GPUImageVideoCamera.h 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154
  1. #import <Foundation/Foundation.h>
  2. #import <AVFoundation/AVFoundation.h>
  3. #import <CoreMedia/CoreMedia.h>
  4. #import "GPUImageContext.h"
  5. #import "GPUImageOutput.h"
  6. extern const GLfloat kColorConversion601[];
  7. extern const GLfloat kColorConversion601FullRange[];
  8. extern const GLfloat kColorConversion709[];
  9. extern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString;
  10. extern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString;
  11. extern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString;
  12. //Delegate Protocal for Face Detection.
  13. @protocol GPUImageVideoCameraDelegate <NSObject>
  14. @optional
  15. - (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
  16. @end
  17. /**
  18. A GPUImageOutput that provides frames from either camera
  19. */
  20. @interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
  21. {
  22. NSUInteger numberOfFramesCaptured;
  23. CGFloat totalFrameTimeDuringCapture;
  24. AVCaptureSession *_captureSession;
  25. AVCaptureDevice *_inputCamera;
  26. AVCaptureDevice *_microphone;
  27. AVCaptureDeviceInput *videoInput;
  28. AVCaptureVideoDataOutput *videoOutput;
  29. BOOL capturePaused;
  30. GPUImageRotationMode outputRotation, internalRotation;
  31. dispatch_semaphore_t frameRenderingSemaphore;
  32. BOOL captureAsYUV;
  33. GLuint luminanceTexture, chrominanceTexture;
  34. __unsafe_unretained id<GPUImageVideoCameraDelegate> _delegate;
  35. }
  36. /// The AVCaptureSession used to capture from the camera
  37. @property(readonly, retain, nonatomic) AVCaptureSession *captureSession;
  38. /// This enables the capture session preset to be changed on the fly
  39. @property (readwrite, nonatomic, copy) NSString *captureSessionPreset;
  40. /// This sets the frame rate of the camera (iOS 5 and above only)
  41. /**
  42. Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.
  43. */
  44. @property (readwrite) int32_t frameRate;
  45. /// Easy way to tell which cameras are present on device
  46. @property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;
  47. @property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;
  48. /// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
  49. @property(readwrite, nonatomic) BOOL runBenchmark;
  50. /// Use this property to manage camera settings. Focus point, exposure point, etc.
  51. @property(readonly) AVCaptureDevice *inputCamera;
  52. /// This determines the rotation applied to the output image, based on the source material
  53. @property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;
  54. /// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.
  55. @property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;
  56. @property(nonatomic, assign) id<GPUImageVideoCameraDelegate> delegate;
  57. /// @name Initialization and teardown
  58. /** Begin a capture session
  59. See AVCaptureSession for acceptable values
  60. @param sessionPreset Session preset to use
  61. @param cameraPosition Camera to capture from
  62. */
  63. - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
  64. /** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you
  65. can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget
  66. later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.
  67. */
  68. - (BOOL)addAudioInputsAndOutputs;
  69. /** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs
  70. were removed, or NO is they hadn't already been added.
  71. */
  72. - (BOOL)removeAudioInputsAndOutputs;
  73. /** Tear down the capture session
  74. */
  75. - (void)removeInputsAndOutputs;
  76. /// @name Manage the camera video stream
  77. /** Start camera capturing
  78. */
  79. - (void)startCameraCapture;
  80. /** Stop camera capturing
  81. */
  82. - (void)stopCameraCapture;
  83. /** Pause camera capturing
  84. */
  85. - (void)pauseCameraCapture;
  86. /** Resume camera capturing
  87. */
  88. - (void)resumeCameraCapture;
  89. /** Process a video sample
  90. @param sampleBuffer Buffer to process
  91. */
  92. - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
  93. /** Process an audio sample
  94. @param sampleBuffer Buffer to process
  95. */
  96. - (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
  97. /** Get the position (front, rear) of the source camera
  98. */
  99. - (AVCaptureDevicePosition)cameraPosition;
  100. /** Get the AVCaptureConnection of the source camera
  101. */
  102. - (AVCaptureConnection *)videoCaptureConnection;
  103. /** This flips between the front and rear cameras
  104. */
  105. - (void)rotateCamera;
  106. /// @name Benchmarking
  107. /** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display
  108. */
  109. - (CGFloat)averageFrameDurationDuringCapture;
  110. - (void)resetBenchmarkAverage;
  111. + (BOOL)isBackFacingCameraPresent;
  112. + (BOOL)isFrontFacingCameraPresent;
  113. @end