GPUImageVideoCamera.m 41 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135
  1. #import "GPUImageVideoCamera.h"
  2. #import "GPUImageMovieWriter.h"
  3. #import "GPUImageFilter.h"
  4. // Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)
  5. // BT.601, which is the standard for SDTV.
  6. const GLfloat kColorConversion601[] = {
  7. // 1.164, 1.164, 1.164,
  8. // 0.0, -0.392, 2.017,
  9. // 1.596, -0.813, 0.0,
  10. 1, 1, 1,
  11. 0, -.39465, 2.03211,
  12. 1.13983, -.58060, 0,
  13. };
  14. // BT.709, which is the standard for HDTV.
  15. const GLfloat kColorConversion709[] = {
  16. // 1.164, 1.164, 1.164,
  17. // 0.0, -0.213, 2.112,
  18. // 1.793, -0.533, 0.0,
  19. 1, 1, 1,
  20. 0, -.21482, 2.12798,
  21. 1.28033, -.38059, 0,
  22. };
  23. // BT.601 full range (ref: http://www.equasys.de/colorconversion.html)
  24. const GLfloat kColorConversion601FullRange[] = {
  25. 1.0, 1.0, 1.0,
  26. 0.0, -0.343, 1.765,
  27. 1.4, -0.711, 0.0,
  28. };
  29. NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
  30. (
  31. varying highp vec2 textureCoordinate;
  32. uniform sampler2D luminanceTexture;
  33. uniform sampler2D chrominanceTexture;
  34. uniform mediump mat3 colorConversionMatrix;
  35. void main()
  36. {
  37. mediump vec3 yuv;
  38. lowp vec3 rgb;
  39. yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
  40. yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);
  41. rgb = colorConversionMatrix * yuv;
  42. gl_FragColor = vec4(rgb, 1);
  43. }
  44. );
  45. NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
  46. (
  47. varying highp vec2 textureCoordinate;
  48. uniform sampler2D luminanceTexture;
  49. uniform sampler2D chrominanceTexture;
  50. uniform mediump mat3 colorConversionMatrix;
  51. void main()
  52. {
  53. mediump vec3 yuv;
  54. lowp vec3 rgb;
  55. yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
  56. yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
  57. rgb = colorConversionMatrix * yuv;
  58. gl_FragColor = vec4(rgb, 1);
  59. }
  60. );
  61. NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
  62. (
  63. varying highp vec2 textureCoordinate;
  64. uniform sampler2D luminanceTexture;
  65. uniform sampler2D chrominanceTexture;
  66. uniform mediump mat3 colorConversionMatrix;
  67. void main()
  68. {
  69. mediump vec3 yuv;
  70. lowp vec3 rgb;
  71. yuv.x = texture2D(luminanceTexture, textureCoordinate).r - (16.0/255.0);
  72. yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
  73. rgb = colorConversionMatrix * yuv;
  74. gl_FragColor = vec4(rgb, 1);
  75. }
  76. );
  77. #pragma mark -
  78. #pragma mark Private methods and instance variables
  79. @interface GPUImageVideoCamera ()
  80. {
  81. AVCaptureDeviceInput *audioInput;
  82. AVCaptureAudioDataOutput *audioOutput;
  83. NSDate *startingCaptureTime;
  84. dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;
  85. GLProgram *yuvConversionProgram;
  86. GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
  87. GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
  88. GLint yuvConversionMatrixUniform;
  89. const GLfloat *_preferredConversion;
  90. BOOL isFullYUVRange;
  91. int imageBufferWidth, imageBufferHeight;
  92. BOOL addedAudioInputsDueToEncodingTarget;
  93. }
  94. - (void)updateOrientationSendToTargets;
  95. - (void)convertYUVToRGBOutput;
  96. @end
  97. @implementation GPUImageVideoCamera
  98. @synthesize captureSessionPreset = _captureSessionPreset;
  99. @synthesize captureSession = _captureSession;
  100. @synthesize inputCamera = _inputCamera;
  101. @synthesize runBenchmark = _runBenchmark;
  102. @synthesize outputImageOrientation = _outputImageOrientation;
  103. @synthesize delegate = _delegate;
  104. @synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;
  105. @synthesize frameRate = _frameRate;
  106. #pragma mark -
  107. #pragma mark Initialization and teardown
  108. - (id)init;
  109. {
  110. if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))
  111. {
  112. return nil;
  113. }
  114. return self;
  115. }
  116. - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
  117. {
  118. if (!(self = [super init]))
  119. {
  120. return nil;
  121. }
  122. cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
  123. audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
  124. frameRenderingSemaphore = dispatch_semaphore_create(1);
  125. _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
  126. _runBenchmark = NO;
  127. capturePaused = NO;
  128. outputRotation = kGPUImageNoRotation;
  129. internalRotation = kGPUImageNoRotation;
  130. captureAsYUV = YES;
  131. _preferredConversion = kColorConversion709;
  132. // Grab the back-facing or front-facing camera
  133. _inputCamera = nil;
  134. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  135. for (AVCaptureDevice *device in devices)
  136. {
  137. if ([device position] == cameraPosition)
  138. {
  139. _inputCamera = device;
  140. }
  141. }
  142. if (!_inputCamera) {
  143. return nil;
  144. }
  145. // Create the capture session
  146. _captureSession = [[AVCaptureSession alloc] init];
  147. [_captureSession beginConfiguration];
  148. // Add the video input
  149. NSError *error = nil;
  150. videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
  151. if ([_captureSession canAddInput:videoInput])
  152. {
  153. [_captureSession addInput:videoInput];
  154. }
  155. // Add the video frame output
  156. videoOutput = [[AVCaptureVideoDataOutput alloc] init];
  157. [videoOutput setAlwaysDiscardsLateVideoFrames:NO];
  158. // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
  159. if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
  160. {
  161. BOOL supportsFullYUVRange = NO;
  162. NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
  163. for (NSNumber *currentPixelFormat in supportedPixelFormats)
  164. {
  165. if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
  166. {
  167. supportsFullYUVRange = YES;
  168. }
  169. }
  170. if (supportsFullYUVRange)
  171. {
  172. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  173. isFullYUVRange = YES;
  174. }
  175. else
  176. {
  177. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  178. isFullYUVRange = NO;
  179. }
  180. }
  181. else
  182. {
  183. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  184. }
  185. runSynchronouslyOnVideoProcessingQueue(^{
  186. if (captureAsYUV)
  187. {
  188. [GPUImageContext useImageProcessingContext];
  189. // if ([GPUImageContext deviceSupportsRedTextures])
  190. // {
  191. // yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
  192. // }
  193. // else
  194. // {
  195. if (isFullYUVRange)
  196. {
  197. yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
  198. }
  199. else
  200. {
  201. yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
  202. }
  203. // }
  204. if (!yuvConversionProgram.initialized)
  205. {
  206. [yuvConversionProgram addAttribute:@"position"];
  207. [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
  208. if (![yuvConversionProgram link])
  209. {
  210. NSString *progLog = [yuvConversionProgram programLog];
  211. NSLog(@"Program link log: %@", progLog);
  212. NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
  213. NSLog(@"Fragment shader compile log: %@", fragLog);
  214. NSString *vertLog = [yuvConversionProgram vertexShaderLog];
  215. NSLog(@"Vertex shader compile log: %@", vertLog);
  216. yuvConversionProgram = nil;
  217. NSAssert(NO, @"Filter shader link failed");
  218. }
  219. }
  220. yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
  221. yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
  222. yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
  223. yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
  224. yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
  225. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  226. glEnableVertexAttribArray(yuvConversionPositionAttribute);
  227. glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
  228. }
  229. });
  230. [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
  231. if ([_captureSession canAddOutput:videoOutput])
  232. {
  233. [_captureSession addOutput:videoOutput];
  234. }
  235. else
  236. {
  237. NSLog(@"Couldn't add video output");
  238. return nil;
  239. }
  240. _captureSessionPreset = sessionPreset;
  241. [_captureSession setSessionPreset:_captureSessionPreset];
  242. // This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset
  243. // AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
  244. //
  245. // if (conn.supportsVideoMinFrameDuration)
  246. // conn.videoMinFrameDuration = CMTimeMake(1,60);
  247. // if (conn.supportsVideoMaxFrameDuration)
  248. // conn.videoMaxFrameDuration = CMTimeMake(1,60);
  249. [_captureSession commitConfiguration];
  250. return self;
  251. }
  252. - (GPUImageFramebuffer *)framebufferForOutput;
  253. {
  254. return outputFramebuffer;
  255. }
  256. - (void)dealloc
  257. {
  258. [self stopCameraCapture];
  259. [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
  260. [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
  261. [self removeInputsAndOutputs];
  262. // ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
  263. #if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )
  264. if (frameRenderingSemaphore != NULL)
  265. {
  266. dispatch_release(frameRenderingSemaphore);
  267. }
  268. #endif
  269. }
  270. - (BOOL)addAudioInputsAndOutputs
  271. {
  272. if (audioOutput)
  273. return NO;
  274. [_captureSession beginConfiguration];
  275. _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  276. audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];
  277. if ([_captureSession canAddInput:audioInput])
  278. {
  279. [_captureSession addInput:audioInput];
  280. }
  281. audioOutput = [[AVCaptureAudioDataOutput alloc] init];
  282. if ([_captureSession canAddOutput:audioOutput])
  283. {
  284. [_captureSession addOutput:audioOutput];
  285. }
  286. else
  287. {
  288. NSLog(@"Couldn't add audio output");
  289. }
  290. [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];
  291. [_captureSession commitConfiguration];
  292. return YES;
  293. }
  294. - (BOOL)removeAudioInputsAndOutputs
  295. {
  296. if (!audioOutput)
  297. return NO;
  298. [_captureSession beginConfiguration];
  299. [_captureSession removeInput:audioInput];
  300. [_captureSession removeOutput:audioOutput];
  301. audioInput = nil;
  302. audioOutput = nil;
  303. _microphone = nil;
  304. [_captureSession commitConfiguration];
  305. return YES;
  306. }
  307. - (void)removeInputsAndOutputs;
  308. {
  309. [_captureSession beginConfiguration];
  310. if (videoInput) {
  311. [_captureSession removeInput:videoInput];
  312. [_captureSession removeOutput:videoOutput];
  313. videoInput = nil;
  314. videoOutput = nil;
  315. }
  316. if (_microphone != nil)
  317. {
  318. [_captureSession removeInput:audioInput];
  319. [_captureSession removeOutput:audioOutput];
  320. audioInput = nil;
  321. audioOutput = nil;
  322. _microphone = nil;
  323. }
  324. [_captureSession commitConfiguration];
  325. }
  326. #pragma mark -
  327. #pragma mark Managing targets
  328. - (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
  329. {
  330. [super addTarget:newTarget atTextureLocation:textureLocation];
  331. [newTarget setInputRotation:outputRotation atIndex:textureLocation];
  332. }
  333. #pragma mark -
  334. #pragma mark Manage the camera video stream
  335. - (void)startCameraCapture;
  336. {
  337. if (![_captureSession isRunning])
  338. {
  339. startingCaptureTime = [NSDate date];
  340. [_captureSession startRunning];
  341. };
  342. }
  343. - (void)stopCameraCapture;
  344. {
  345. if ([_captureSession isRunning])
  346. {
  347. [_captureSession stopRunning];
  348. }
  349. }
  350. - (void)pauseCameraCapture;
  351. {
  352. capturePaused = YES;
  353. }
  354. - (void)resumeCameraCapture;
  355. {
  356. capturePaused = NO;
  357. }
  358. - (void)rotateCamera
  359. {
  360. if (self.frontFacingCameraPresent == NO)
  361. return;
  362. NSError *error;
  363. AVCaptureDeviceInput *newVideoInput;
  364. AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
  365. if (currentCameraPosition == AVCaptureDevicePositionBack)
  366. {
  367. currentCameraPosition = AVCaptureDevicePositionFront;
  368. }
  369. else
  370. {
  371. currentCameraPosition = AVCaptureDevicePositionBack;
  372. }
  373. AVCaptureDevice *backFacingCamera = nil;
  374. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  375. for (AVCaptureDevice *device in devices)
  376. {
  377. if ([device position] == currentCameraPosition)
  378. {
  379. backFacingCamera = device;
  380. }
  381. }
  382. newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
  383. if (newVideoInput != nil)
  384. {
  385. [_captureSession beginConfiguration];
  386. [_captureSession removeInput:videoInput];
  387. if ([_captureSession canAddInput:newVideoInput])
  388. {
  389. [_captureSession addInput:newVideoInput];
  390. videoInput = newVideoInput;
  391. }
  392. else
  393. {
  394. [_captureSession addInput:videoInput];
  395. }
  396. //captureSession.sessionPreset = oriPreset;
  397. [_captureSession commitConfiguration];
  398. }
  399. _inputCamera = backFacingCamera;
  400. [self setOutputImageOrientation:_outputImageOrientation];
  401. }
  402. - (AVCaptureDevicePosition)cameraPosition
  403. {
  404. return [[videoInput device] position];
  405. }
  406. + (BOOL)isBackFacingCameraPresent;
  407. {
  408. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  409. for (AVCaptureDevice *device in devices)
  410. {
  411. if ([device position] == AVCaptureDevicePositionBack)
  412. return YES;
  413. }
  414. return NO;
  415. }
  416. - (BOOL)isBackFacingCameraPresent
  417. {
  418. return [GPUImageVideoCamera isBackFacingCameraPresent];
  419. }
  420. + (BOOL)isFrontFacingCameraPresent;
  421. {
  422. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  423. for (AVCaptureDevice *device in devices)
  424. {
  425. if ([device position] == AVCaptureDevicePositionFront)
  426. return YES;
  427. }
  428. return NO;
  429. }
  430. - (BOOL)isFrontFacingCameraPresent
  431. {
  432. return [GPUImageVideoCamera isFrontFacingCameraPresent];
  433. }
  434. - (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;
  435. {
  436. [_captureSession beginConfiguration];
  437. _captureSessionPreset = captureSessionPreset;
  438. [_captureSession setSessionPreset:_captureSessionPreset];
  439. [_captureSession commitConfiguration];
  440. }
  441. - (void)setFrameRate:(int32_t)frameRate;
  442. {
  443. _frameRate = frameRate;
  444. if (_frameRate > 0)
  445. {
  446. if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
  447. [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
  448. NSError *error;
  449. [_inputCamera lockForConfiguration:&error];
  450. if (error == nil) {
  451. #if defined(__IPHONE_7_0)
  452. [_inputCamera setActiveVideoMinFrameDuration:CMTimeMake(1, _frameRate)];
  453. [_inputCamera setActiveVideoMaxFrameDuration:CMTimeMake(1, _frameRate)];
  454. #endif
  455. }
  456. [_inputCamera unlockForConfiguration];
  457. } else {
  458. for (AVCaptureConnection *connection in videoOutput.connections)
  459. {
  460. #pragma clang diagnostic push
  461. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  462. if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
  463. connection.videoMinFrameDuration = CMTimeMake(1, _frameRate);
  464. if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
  465. connection.videoMaxFrameDuration = CMTimeMake(1, _frameRate);
  466. #pragma clang diagnostic pop
  467. }
  468. }
  469. }
  470. else
  471. {
  472. if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
  473. [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
  474. NSError *error;
  475. [_inputCamera lockForConfiguration:&error];
  476. if (error == nil) {
  477. #if defined(__IPHONE_7_0)
  478. [_inputCamera setActiveVideoMinFrameDuration:kCMTimeInvalid];
  479. [_inputCamera setActiveVideoMaxFrameDuration:kCMTimeInvalid];
  480. #endif
  481. }
  482. [_inputCamera unlockForConfiguration];
  483. } else {
  484. for (AVCaptureConnection *connection in videoOutput.connections)
  485. {
  486. #pragma clang diagnostic push
  487. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  488. if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
  489. connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default
  490. if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
  491. connection.videoMaxFrameDuration = kCMTimeInvalid; // This sets videoMaxFrameDuration back to default
  492. #pragma clang diagnostic pop
  493. }
  494. }
  495. }
  496. }
  497. - (int32_t)frameRate;
  498. {
  499. return _frameRate;
  500. }
  501. - (AVCaptureConnection *)videoCaptureConnection {
  502. for (AVCaptureConnection *connection in [videoOutput connections] ) {
  503. for ( AVCaptureInputPort *port in [connection inputPorts] ) {
  504. if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {
  505. return connection;
  506. }
  507. }
  508. }
  509. return nil;
  510. }
  511. #define INITIALFRAMESTOIGNOREFORBENCHMARK 5
  512. - (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
  513. {
  514. // First, update all the framebuffers in the targets
  515. for (id<GPUImageInput> currentTarget in targets)
  516. {
  517. if ([currentTarget enabled])
  518. {
  519. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  520. NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  521. if (currentTarget != self.targetToIgnoreForUpdates)
  522. {
  523. [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
  524. [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
  525. if ([currentTarget wantsMonochromeInput] && captureAsYUV)
  526. {
  527. [currentTarget setCurrentlyReceivingMonochromeInput:YES];
  528. // TODO: Replace optimization for monochrome output
  529. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  530. }
  531. else
  532. {
  533. [currentTarget setCurrentlyReceivingMonochromeInput:NO];
  534. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  535. }
  536. }
  537. else
  538. {
  539. [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
  540. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  541. }
  542. }
  543. }
  544. // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
  545. [outputFramebuffer unlock];
  546. outputFramebuffer = nil;
  547. // Finally, trigger rendering as needed
  548. for (id<GPUImageInput> currentTarget in targets)
  549. {
  550. if ([currentTarget enabled])
  551. {
  552. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  553. NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  554. if (currentTarget != self.targetToIgnoreForUpdates)
  555. {
  556. [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
  557. }
  558. }
  559. }
  560. }
  561. - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
  562. {
  563. if (capturePaused)
  564. {
  565. return;
  566. }
  567. CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
  568. CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
  569. int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
  570. int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
  571. CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
  572. if (colorAttachments != NULL)
  573. {
  574. if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
  575. {
  576. if (isFullYUVRange)
  577. {
  578. _preferredConversion = kColorConversion601FullRange;
  579. }
  580. else
  581. {
  582. _preferredConversion = kColorConversion601;
  583. }
  584. }
  585. else
  586. {
  587. _preferredConversion = kColorConversion709;
  588. }
  589. }
  590. else
  591. {
  592. if (isFullYUVRange)
  593. {
  594. _preferredConversion = kColorConversion601FullRange;
  595. }
  596. else
  597. {
  598. _preferredConversion = kColorConversion601;
  599. }
  600. }
  601. CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  602. [GPUImageContext useImageProcessingContext];
  603. if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
  604. {
  605. CVOpenGLESTextureRef luminanceTextureRef = NULL;
  606. CVOpenGLESTextureRef chrominanceTextureRef = NULL;
  607. // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
  608. if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
  609. {
  610. CVPixelBufferLockBaseAddress(cameraFrame, 0);
  611. if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
  612. {
  613. imageBufferWidth = bufferWidth;
  614. imageBufferHeight = bufferHeight;
  615. }
  616. CVReturn err;
  617. // Y-plane
  618. glActiveTexture(GL_TEXTURE4);
  619. if ([GPUImageContext deviceSupportsRedTextures])
  620. {
  621. // err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  622. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  623. }
  624. else
  625. {
  626. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  627. }
  628. if (err)
  629. {
  630. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  631. }
  632. luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
  633. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  634. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  635. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  636. // UV-plane
  637. glActiveTexture(GL_TEXTURE5);
  638. if ([GPUImageContext deviceSupportsRedTextures])
  639. {
  640. // err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  641. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  642. }
  643. else
  644. {
  645. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  646. }
  647. if (err)
  648. {
  649. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  650. }
  651. chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
  652. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  653. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  654. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  655. // if (!allTargetsWantMonochromeData)
  656. // {
  657. [self convertYUVToRGBOutput];
  658. // }
  659. int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
  660. if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
  661. {
  662. rotatedImageBufferWidth = bufferHeight;
  663. rotatedImageBufferHeight = bufferWidth;
  664. }
  665. [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
  666. CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
  667. CFRelease(luminanceTextureRef);
  668. CFRelease(chrominanceTextureRef);
  669. }
  670. else
  671. {
  672. // TODO: Mesh this with the output framebuffer structure
  673. // CVPixelBufferLockBaseAddress(cameraFrame, 0);
  674. //
  675. // CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
  676. //
  677. // if (!texture || err) {
  678. // NSLog(@"Camera CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
  679. // NSAssert(NO, @"Camera failure");
  680. // return;
  681. // }
  682. //
  683. // outputTexture = CVOpenGLESTextureGetName(texture);
  684. // // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
  685. // glBindTexture(GL_TEXTURE_2D, outputTexture);
  686. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  687. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  688. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  689. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  690. //
  691. // [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime];
  692. //
  693. // CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
  694. // CFRelease(texture);
  695. //
  696. // outputTexture = 0;
  697. }
  698. if (_runBenchmark)
  699. {
  700. numberOfFramesCaptured++;
  701. if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
  702. {
  703. CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
  704. totalFrameTimeDuringCapture += currentFrameTime;
  705. NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
  706. NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
  707. }
  708. }
  709. }
  710. else
  711. {
  712. CVPixelBufferLockBaseAddress(cameraFrame, 0);
  713. int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
  714. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
  715. [outputFramebuffer activateFramebuffer];
  716. glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
  717. // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
  718. // Using BGRA extension to pull in video frame data directly
  719. // The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
  720. glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
  721. [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
  722. CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
  723. if (_runBenchmark)
  724. {
  725. numberOfFramesCaptured++;
  726. if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
  727. {
  728. CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
  729. totalFrameTimeDuringCapture += currentFrameTime;
  730. }
  731. }
  732. }
  733. }
  734. - (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
  735. {
  736. [self.audioEncodingTarget processAudioBuffer:sampleBuffer];
  737. }
  738. - (void)convertYUVToRGBOutput;
  739. {
  740. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  741. int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;
  742. if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
  743. {
  744. rotatedImageBufferWidth = imageBufferHeight;
  745. rotatedImageBufferHeight = imageBufferWidth;
  746. }
  747. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];
  748. [outputFramebuffer activateFramebuffer];
  749. glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  750. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  751. static const GLfloat squareVertices[] = {
  752. -1.0f, -1.0f,
  753. 1.0f, -1.0f,
  754. -1.0f, 1.0f,
  755. 1.0f, 1.0f,
  756. };
  757. glActiveTexture(GL_TEXTURE4);
  758. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  759. glUniform1i(yuvConversionLuminanceTextureUniform, 4);
  760. glActiveTexture(GL_TEXTURE5);
  761. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  762. glUniform1i(yuvConversionChrominanceTextureUniform, 5);
  763. glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
  764. glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  765. glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);
  766. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  767. }
  768. #pragma mark -
  769. #pragma mark Benchmarking
  770. - (CGFloat)averageFrameDurationDuringCapture;
  771. {
  772. return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;
  773. }
  774. - (void)resetBenchmarkAverage;
  775. {
  776. numberOfFramesCaptured = 0;
  777. totalFrameTimeDuringCapture = 0.0;
  778. }
  779. #pragma mark -
  780. #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
  781. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
  782. {
  783. if (!self.captureSession.isRunning)
  784. {
  785. return;
  786. }
  787. else if (captureOutput == audioOutput)
  788. {
  789. [self processAudioSampleBuffer:sampleBuffer];
  790. }
  791. else
  792. {
  793. if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
  794. {
  795. return;
  796. }
  797. CFRetain(sampleBuffer);
  798. runAsynchronouslyOnVideoProcessingQueue(^{
  799. //Feature Detection Hook.
  800. if (self.delegate)
  801. {
  802. [self.delegate willOutputSampleBuffer:sampleBuffer];
  803. }
  804. [self processVideoSampleBuffer:sampleBuffer];
  805. CFRelease(sampleBuffer);
  806. dispatch_semaphore_signal(frameRenderingSemaphore);
  807. });
  808. }
  809. }
  810. #pragma mark -
  811. #pragma mark Accessors
  812. - (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
  813. {
  814. if (newValue) {
  815. /* Add audio inputs and outputs, if necessary */
  816. addedAudioInputsDueToEncodingTarget |= [self addAudioInputsAndOutputs];
  817. } else if (addedAudioInputsDueToEncodingTarget) {
  818. /* Remove audio inputs and outputs, if they were added by previously setting the audio encoding target */
  819. [self removeAudioInputsAndOutputs];
  820. addedAudioInputsDueToEncodingTarget = NO;
  821. }
  822. [super setAudioEncodingTarget:newValue];
  823. }
  824. - (void)updateOrientationSendToTargets;
  825. {
  826. runSynchronouslyOnVideoProcessingQueue(^{
  827. // From the iOS 5.0 release notes:
  828. // In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight.
  829. if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
  830. {
  831. outputRotation = kGPUImageNoRotation;
  832. if ([self cameraPosition] == AVCaptureDevicePositionBack)
  833. {
  834. if (_horizontallyMirrorRearFacingCamera)
  835. {
  836. switch(_outputImageOrientation)
  837. {
  838. case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
  839. case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotate180; break;
  840. case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
  841. case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
  842. default:internalRotation = kGPUImageNoRotation;
  843. }
  844. }
  845. else
  846. {
  847. switch(_outputImageOrientation)
  848. {
  849. case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
  850. case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
  851. case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageRotate180; break;
  852. case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageNoRotation; break;
  853. default:internalRotation = kGPUImageNoRotation;
  854. }
  855. }
  856. }
  857. else
  858. {
  859. if (_horizontallyMirrorFrontFacingCamera)
  860. {
  861. switch(_outputImageOrientation)
  862. {
  863. case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
  864. case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateRightFlipHorizontal; break;
  865. case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
  866. case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
  867. default:internalRotation = kGPUImageNoRotation;
  868. }
  869. }
  870. else
  871. {
  872. switch(_outputImageOrientation)
  873. {
  874. case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
  875. case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
  876. case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageNoRotation; break;
  877. case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageRotate180; break;
  878. default:internalRotation = kGPUImageNoRotation;
  879. }
  880. }
  881. }
  882. }
  883. else
  884. {
  885. if ([self cameraPosition] == AVCaptureDevicePositionBack)
  886. {
  887. if (_horizontallyMirrorRearFacingCamera)
  888. {
  889. switch(_outputImageOrientation)
  890. {
  891. case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
  892. case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotate180; break;
  893. case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
  894. case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
  895. default:outputRotation = kGPUImageNoRotation;
  896. }
  897. }
  898. else
  899. {
  900. switch(_outputImageOrientation)
  901. {
  902. case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
  903. case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
  904. case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageRotate180; break;
  905. case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageNoRotation; break;
  906. default:outputRotation = kGPUImageNoRotation;
  907. }
  908. }
  909. }
  910. else
  911. {
  912. if (_horizontallyMirrorFrontFacingCamera)
  913. {
  914. switch(_outputImageOrientation)
  915. {
  916. case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
  917. case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateRightFlipHorizontal; break;
  918. case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
  919. case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
  920. default:outputRotation = kGPUImageNoRotation;
  921. }
  922. }
  923. else
  924. {
  925. switch(_outputImageOrientation)
  926. {
  927. case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
  928. case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
  929. case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageNoRotation; break;
  930. case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageRotate180; break;
  931. default:outputRotation = kGPUImageNoRotation;
  932. }
  933. }
  934. }
  935. }
  936. for (id<GPUImageInput> currentTarget in targets)
  937. {
  938. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  939. [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
  940. }
  941. });
  942. }
  943. - (void)setOutputImageOrientation:(UIInterfaceOrientation)newValue;
  944. {
  945. _outputImageOrientation = newValue;
  946. [self updateOrientationSendToTargets];
  947. }
  948. - (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue
  949. {
  950. _horizontallyMirrorFrontFacingCamera = newValue;
  951. [self updateOrientationSendToTargets];
  952. }
  953. - (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue
  954. {
  955. _horizontallyMirrorRearFacingCamera = newValue;
  956. [self updateOrientationSendToTargets];
  957. }
  958. @end