123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154 |
- #import <Foundation/Foundation.h>
- #import <AVFoundation/AVFoundation.h>
- #import <CoreMedia/CoreMedia.h>
- #import "GPUImageContext.h"
- #import "GPUImageOutput.h"
- extern const GLfloat kColorConversion601[];
- extern const GLfloat kColorConversion601FullRange[];
- extern const GLfloat kColorConversion709[];
- extern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString;
- extern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString;
- extern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString;
- //Delegate Protocal for Face Detection.
- @protocol GPUImageVideoCameraDelegate <NSObject>
- @optional
- - (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- @end
- /**
- A GPUImageOutput that provides frames from either camera
- */
- @interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
- {
- NSUInteger numberOfFramesCaptured;
- CGFloat totalFrameTimeDuringCapture;
-
- AVCaptureSession *_captureSession;
- AVCaptureDevice *_inputCamera;
- AVCaptureDevice *_microphone;
- AVCaptureDeviceInput *videoInput;
- AVCaptureVideoDataOutput *videoOutput;
- BOOL capturePaused;
- GPUImageRotationMode outputRotation, internalRotation;
- dispatch_semaphore_t frameRenderingSemaphore;
-
- BOOL captureAsYUV;
- GLuint luminanceTexture, chrominanceTexture;
- __unsafe_unretained id<GPUImageVideoCameraDelegate> _delegate;
- }
- /// The AVCaptureSession used to capture from the camera
- @property(readonly, retain, nonatomic) AVCaptureSession *captureSession;
- /// This enables the capture session preset to be changed on the fly
- @property (readwrite, nonatomic, copy) NSString *captureSessionPreset;
- /// This sets the frame rate of the camera (iOS 5 and above only)
- /**
- Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.
- */
- @property (readwrite) int32_t frameRate;
- /// Easy way to tell which cameras are present on device
- @property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;
- @property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;
- /// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
- @property(readwrite, nonatomic) BOOL runBenchmark;
- /// Use this property to manage camera settings. Focus point, exposure point, etc.
- @property(readonly) AVCaptureDevice *inputCamera;
- /// This determines the rotation applied to the output image, based on the source material
- @property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;
- /// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.
- @property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;
- @property(nonatomic, assign) id<GPUImageVideoCameraDelegate> delegate;
- /// @name Initialization and teardown
- /** Begin a capture session
-
- See AVCaptureSession for acceptable values
-
- @param sessionPreset Session preset to use
- @param cameraPosition Camera to capture from
- */
- - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
- /** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you
- can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget
- later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.
- */
- - (BOOL)addAudioInputsAndOutputs;
- /** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs
- were removed, or NO is they hadn't already been added.
- */
- - (BOOL)removeAudioInputsAndOutputs;
- /** Tear down the capture session
- */
- - (void)removeInputsAndOutputs;
- /// @name Manage the camera video stream
- /** Start camera capturing
- */
- - (void)startCameraCapture;
- /** Stop camera capturing
- */
- - (void)stopCameraCapture;
- /** Pause camera capturing
- */
- - (void)pauseCameraCapture;
- /** Resume camera capturing
- */
- - (void)resumeCameraCapture;
- /** Process a video sample
- @param sampleBuffer Buffer to process
- */
- - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- /** Process an audio sample
- @param sampleBuffer Buffer to process
- */
- - (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- /** Get the position (front, rear) of the source camera
- */
- - (AVCaptureDevicePosition)cameraPosition;
- /** Get the AVCaptureConnection of the source camera
- */
- - (AVCaptureConnection *)videoCaptureConnection;
- /** This flips between the front and rear cameras
- */
- - (void)rotateCamera;
- /// @name Benchmarking
- /** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display
- */
- - (CGFloat)averageFrameDurationDuringCapture;
- - (void)resetBenchmarkAverage;
- + (BOOL)isBackFacingCameraPresent;
- + (BOOL)isFrontFacingCameraPresent;
- @end
|