1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135 |
- #import "GPUImageVideoCamera.h"
- #import "GPUImageMovieWriter.h"
- #import "GPUImageFilter.h"
- // Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)
- // BT.601, which is the standard for SDTV.
- const GLfloat kColorConversion601[] = {
- // 1.164, 1.164, 1.164,
- // 0.0, -0.392, 2.017,
- // 1.596, -0.813, 0.0,
- 1, 1, 1,
- 0, -.39465, 2.03211,
- 1.13983, -.58060, 0,
- };
- // BT.709, which is the standard for HDTV.
- const GLfloat kColorConversion709[] = {
- // 1.164, 1.164, 1.164,
- // 0.0, -0.213, 2.112,
- // 1.793, -0.533, 0.0,
- 1, 1, 1,
- 0, -.21482, 2.12798,
- 1.28033, -.38059, 0,
- };
- // BT.601 full range (ref: http://www.equasys.de/colorconversion.html)
- const GLfloat kColorConversion601FullRange[] = {
- 1.0, 1.0, 1.0,
- 0.0, -0.343, 1.765,
- 1.4, -0.711, 0.0,
- };
- NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
- (
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D luminanceTexture;
- uniform sampler2D chrominanceTexture;
- uniform mediump mat3 colorConversionMatrix;
-
- void main()
- {
- mediump vec3 yuv;
- lowp vec3 rgb;
-
- yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
- yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);
- rgb = colorConversionMatrix * yuv;
-
- gl_FragColor = vec4(rgb, 1);
- }
- );
- NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
- (
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D luminanceTexture;
- uniform sampler2D chrominanceTexture;
- uniform mediump mat3 colorConversionMatrix;
-
- void main()
- {
- mediump vec3 yuv;
- lowp vec3 rgb;
-
- yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
- yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
- rgb = colorConversionMatrix * yuv;
-
- gl_FragColor = vec4(rgb, 1);
- }
- );
- NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
- (
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D luminanceTexture;
- uniform sampler2D chrominanceTexture;
- uniform mediump mat3 colorConversionMatrix;
-
- void main()
- {
- mediump vec3 yuv;
- lowp vec3 rgb;
-
- yuv.x = texture2D(luminanceTexture, textureCoordinate).r - (16.0/255.0);
- yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
- rgb = colorConversionMatrix * yuv;
-
- gl_FragColor = vec4(rgb, 1);
- }
- );
- #pragma mark -
- #pragma mark Private methods and instance variables
- @interface GPUImageVideoCamera ()
- {
- AVCaptureDeviceInput *audioInput;
- AVCaptureAudioDataOutput *audioOutput;
- NSDate *startingCaptureTime;
-
- dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;
-
- GLProgram *yuvConversionProgram;
- GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
- GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
- GLint yuvConversionMatrixUniform;
- const GLfloat *_preferredConversion;
-
- BOOL isFullYUVRange;
-
- int imageBufferWidth, imageBufferHeight;
-
- BOOL addedAudioInputsDueToEncodingTarget;
- }
- - (void)updateOrientationSendToTargets;
- - (void)convertYUVToRGBOutput;
- @end
- @implementation GPUImageVideoCamera
- @synthesize captureSessionPreset = _captureSessionPreset;
- @synthesize captureSession = _captureSession;
- @synthesize inputCamera = _inputCamera;
- @synthesize runBenchmark = _runBenchmark;
- @synthesize outputImageOrientation = _outputImageOrientation;
- @synthesize delegate = _delegate;
- @synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;
- @synthesize frameRate = _frameRate;
- #pragma mark -
- #pragma mark Initialization and teardown
- - (id)init;
- {
- if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))
- {
- return nil;
- }
-
- return self;
- }
- - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
- {
- if (!(self = [super init]))
- {
- return nil;
- }
-
- cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
- audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
- frameRenderingSemaphore = dispatch_semaphore_create(1);
- _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
- _runBenchmark = NO;
- capturePaused = NO;
- outputRotation = kGPUImageNoRotation;
- internalRotation = kGPUImageNoRotation;
- captureAsYUV = YES;
- _preferredConversion = kColorConversion709;
-
- // Grab the back-facing or front-facing camera
- _inputCamera = nil;
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == cameraPosition)
- {
- _inputCamera = device;
- }
- }
-
- if (!_inputCamera) {
- return nil;
- }
-
- // Create the capture session
- _captureSession = [[AVCaptureSession alloc] init];
-
- [_captureSession beginConfiguration];
-
- // Add the video input
- NSError *error = nil;
- videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
- if ([_captureSession canAddInput:videoInput])
- {
- [_captureSession addInput:videoInput];
- }
-
- // Add the video frame output
- videoOutput = [[AVCaptureVideoDataOutput alloc] init];
- [videoOutput setAlwaysDiscardsLateVideoFrames:NO];
-
- // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
- if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
- {
- BOOL supportsFullYUVRange = NO;
- NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
- for (NSNumber *currentPixelFormat in supportedPixelFormats)
- {
- if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
- {
- supportsFullYUVRange = YES;
- }
- }
-
- if (supportsFullYUVRange)
- {
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- isFullYUVRange = YES;
- }
- else
- {
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- isFullYUVRange = NO;
- }
- }
- else
- {
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
-
- if (captureAsYUV)
- {
- [GPUImageContext useImageProcessingContext];
- // if ([GPUImageContext deviceSupportsRedTextures])
- // {
- // yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
- // }
- // else
- // {
- if (isFullYUVRange)
- {
- yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
- }
- else
- {
- yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
- }
- // }
-
- if (!yuvConversionProgram.initialized)
- {
- [yuvConversionProgram addAttribute:@"position"];
- [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
-
- if (![yuvConversionProgram link])
- {
- NSString *progLog = [yuvConversionProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [yuvConversionProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- yuvConversionProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
- yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
- yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
- yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
- yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
-
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
-
- glEnableVertexAttribArray(yuvConversionPositionAttribute);
- glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
- }
- });
-
- [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
- if ([_captureSession canAddOutput:videoOutput])
- {
- [_captureSession addOutput:videoOutput];
- }
- else
- {
- NSLog(@"Couldn't add video output");
- return nil;
- }
-
- _captureSessionPreset = sessionPreset;
- [_captureSession setSessionPreset:_captureSessionPreset];
- // This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset
- // AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
- //
- // if (conn.supportsVideoMinFrameDuration)
- // conn.videoMinFrameDuration = CMTimeMake(1,60);
- // if (conn.supportsVideoMaxFrameDuration)
- // conn.videoMaxFrameDuration = CMTimeMake(1,60);
-
- [_captureSession commitConfiguration];
-
- return self;
- }
- - (GPUImageFramebuffer *)framebufferForOutput;
- {
- return outputFramebuffer;
- }
- - (void)dealloc
- {
- [self stopCameraCapture];
- [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
- [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
-
- [self removeInputsAndOutputs];
-
- // ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
- #if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )
- if (frameRenderingSemaphore != NULL)
- {
- dispatch_release(frameRenderingSemaphore);
- }
- #endif
- }
- - (BOOL)addAudioInputsAndOutputs
- {
- if (audioOutput)
- return NO;
-
- [_captureSession beginConfiguration];
-
- _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
- audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];
- if ([_captureSession canAddInput:audioInput])
- {
- [_captureSession addInput:audioInput];
- }
- audioOutput = [[AVCaptureAudioDataOutput alloc] init];
-
- if ([_captureSession canAddOutput:audioOutput])
- {
- [_captureSession addOutput:audioOutput];
- }
- else
- {
- NSLog(@"Couldn't add audio output");
- }
- [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];
-
- [_captureSession commitConfiguration];
- return YES;
- }
- - (BOOL)removeAudioInputsAndOutputs
- {
- if (!audioOutput)
- return NO;
-
- [_captureSession beginConfiguration];
- [_captureSession removeInput:audioInput];
- [_captureSession removeOutput:audioOutput];
- audioInput = nil;
- audioOutput = nil;
- _microphone = nil;
- [_captureSession commitConfiguration];
- return YES;
- }
- - (void)removeInputsAndOutputs;
- {
- [_captureSession beginConfiguration];
- if (videoInput) {
- [_captureSession removeInput:videoInput];
- [_captureSession removeOutput:videoOutput];
- videoInput = nil;
- videoOutput = nil;
- }
- if (_microphone != nil)
- {
- [_captureSession removeInput:audioInput];
- [_captureSession removeOutput:audioOutput];
- audioInput = nil;
- audioOutput = nil;
- _microphone = nil;
- }
- [_captureSession commitConfiguration];
- }
- #pragma mark -
- #pragma mark Managing targets
- - (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
- {
- [super addTarget:newTarget atTextureLocation:textureLocation];
-
- [newTarget setInputRotation:outputRotation atIndex:textureLocation];
- }
- #pragma mark -
- #pragma mark Manage the camera video stream
- - (void)startCameraCapture;
- {
- if (![_captureSession isRunning])
- {
- startingCaptureTime = [NSDate date];
- [_captureSession startRunning];
- };
- }
- - (void)stopCameraCapture;
- {
- if ([_captureSession isRunning])
- {
- [_captureSession stopRunning];
- }
- }
- - (void)pauseCameraCapture;
- {
- capturePaused = YES;
- }
- - (void)resumeCameraCapture;
- {
- capturePaused = NO;
- }
- - (void)rotateCamera
- {
- if (self.frontFacingCameraPresent == NO)
- return;
-
- NSError *error;
- AVCaptureDeviceInput *newVideoInput;
- AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
-
- if (currentCameraPosition == AVCaptureDevicePositionBack)
- {
- currentCameraPosition = AVCaptureDevicePositionFront;
- }
- else
- {
- currentCameraPosition = AVCaptureDevicePositionBack;
- }
-
- AVCaptureDevice *backFacingCamera = nil;
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == currentCameraPosition)
- {
- backFacingCamera = device;
- }
- }
- newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
-
- if (newVideoInput != nil)
- {
- [_captureSession beginConfiguration];
-
- [_captureSession removeInput:videoInput];
- if ([_captureSession canAddInput:newVideoInput])
- {
- [_captureSession addInput:newVideoInput];
- videoInput = newVideoInput;
- }
- else
- {
- [_captureSession addInput:videoInput];
- }
- //captureSession.sessionPreset = oriPreset;
- [_captureSession commitConfiguration];
- }
-
- _inputCamera = backFacingCamera;
- [self setOutputImageOrientation:_outputImageOrientation];
- }
- - (AVCaptureDevicePosition)cameraPosition
- {
- return [[videoInput device] position];
- }
- + (BOOL)isBackFacingCameraPresent;
- {
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
-
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == AVCaptureDevicePositionBack)
- return YES;
- }
-
- return NO;
- }
- - (BOOL)isBackFacingCameraPresent
- {
- return [GPUImageVideoCamera isBackFacingCameraPresent];
- }
- + (BOOL)isFrontFacingCameraPresent;
- {
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
-
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == AVCaptureDevicePositionFront)
- return YES;
- }
-
- return NO;
- }
- - (BOOL)isFrontFacingCameraPresent
- {
- return [GPUImageVideoCamera isFrontFacingCameraPresent];
- }
- - (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;
- {
- [_captureSession beginConfiguration];
-
- _captureSessionPreset = captureSessionPreset;
- [_captureSession setSessionPreset:_captureSessionPreset];
-
- [_captureSession commitConfiguration];
- }
- - (void)setFrameRate:(int32_t)frameRate;
- {
- _frameRate = frameRate;
-
- if (_frameRate > 0)
- {
- if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
- [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
-
- NSError *error;
- [_inputCamera lockForConfiguration:&error];
- if (error == nil) {
- #if defined(__IPHONE_7_0)
- [_inputCamera setActiveVideoMinFrameDuration:CMTimeMake(1, _frameRate)];
- [_inputCamera setActiveVideoMaxFrameDuration:CMTimeMake(1, _frameRate)];
- #endif
- }
- [_inputCamera unlockForConfiguration];
-
- } else {
-
- for (AVCaptureConnection *connection in videoOutput.connections)
- {
- #pragma clang diagnostic push
- #pragma clang diagnostic ignored "-Wdeprecated-declarations"
- if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
- connection.videoMinFrameDuration = CMTimeMake(1, _frameRate);
-
- if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
- connection.videoMaxFrameDuration = CMTimeMake(1, _frameRate);
- #pragma clang diagnostic pop
- }
- }
-
- }
- else
- {
- if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
- [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
-
- NSError *error;
- [_inputCamera lockForConfiguration:&error];
- if (error == nil) {
- #if defined(__IPHONE_7_0)
- [_inputCamera setActiveVideoMinFrameDuration:kCMTimeInvalid];
- [_inputCamera setActiveVideoMaxFrameDuration:kCMTimeInvalid];
- #endif
- }
- [_inputCamera unlockForConfiguration];
-
- } else {
-
- for (AVCaptureConnection *connection in videoOutput.connections)
- {
- #pragma clang diagnostic push
- #pragma clang diagnostic ignored "-Wdeprecated-declarations"
- if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
- connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default
-
- if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
- connection.videoMaxFrameDuration = kCMTimeInvalid; // This sets videoMaxFrameDuration back to default
- #pragma clang diagnostic pop
- }
- }
-
- }
- }
- - (int32_t)frameRate;
- {
- return _frameRate;
- }
- - (AVCaptureConnection *)videoCaptureConnection {
- for (AVCaptureConnection *connection in [videoOutput connections] ) {
- for ( AVCaptureInputPort *port in [connection inputPorts] ) {
- if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {
- return connection;
- }
- }
- }
-
- return nil;
- }
- #define INITIALFRAMESTOIGNOREFORBENCHMARK 5
- - (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
- {
- // First, update all the framebuffers in the targets
- for (id<GPUImageInput> currentTarget in targets)
- {
- if ([currentTarget enabled])
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
- [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
-
- if ([currentTarget wantsMonochromeInput] && captureAsYUV)
- {
- [currentTarget setCurrentlyReceivingMonochromeInput:YES];
- // TODO: Replace optimization for monochrome output
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- else
- {
- [currentTarget setCurrentlyReceivingMonochromeInput:NO];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- }
- else
- {
- [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- }
- }
-
- // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
- [outputFramebuffer unlock];
- outputFramebuffer = nil;
-
- // Finally, trigger rendering as needed
- for (id<GPUImageInput> currentTarget in targets)
- {
- if ([currentTarget enabled])
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
- }
- }
- }
- }
- - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- {
- if (capturePaused)
- {
- return;
- }
-
- CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
- CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
- int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
- int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
- CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
- if (colorAttachments != NULL)
- {
- if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
- {
- if (isFullYUVRange)
- {
- _preferredConversion = kColorConversion601FullRange;
- }
- else
- {
- _preferredConversion = kColorConversion601;
- }
- }
- else
- {
- _preferredConversion = kColorConversion709;
- }
- }
- else
- {
- if (isFullYUVRange)
- {
- _preferredConversion = kColorConversion601FullRange;
- }
- else
- {
- _preferredConversion = kColorConversion601;
- }
- }
- CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
- [GPUImageContext useImageProcessingContext];
- if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
- {
- CVOpenGLESTextureRef luminanceTextureRef = NULL;
- CVOpenGLESTextureRef chrominanceTextureRef = NULL;
- // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
- if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
- {
- CVPixelBufferLockBaseAddress(cameraFrame, 0);
-
- if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
- {
- imageBufferWidth = bufferWidth;
- imageBufferHeight = bufferHeight;
- }
-
- CVReturn err;
- // Y-plane
- glActiveTexture(GL_TEXTURE4);
- if ([GPUImageContext deviceSupportsRedTextures])
- {
- // err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- }
- else
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- }
- if (err)
- {
- NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
-
- luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
- // UV-plane
- glActiveTexture(GL_TEXTURE5);
- if ([GPUImageContext deviceSupportsRedTextures])
- {
- // err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- }
- else
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- }
- if (err)
- {
- NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
-
- chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
- // if (!allTargetsWantMonochromeData)
- // {
- [self convertYUVToRGBOutput];
- // }
- int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
-
- if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
- {
- rotatedImageBufferWidth = bufferHeight;
- rotatedImageBufferHeight = bufferWidth;
- }
-
- [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
-
- CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
- CFRelease(luminanceTextureRef);
- CFRelease(chrominanceTextureRef);
- }
- else
- {
- // TODO: Mesh this with the output framebuffer structure
-
- // CVPixelBufferLockBaseAddress(cameraFrame, 0);
- //
- // CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
- //
- // if (!texture || err) {
- // NSLog(@"Camera CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
- // NSAssert(NO, @"Camera failure");
- // return;
- // }
- //
- // outputTexture = CVOpenGLESTextureGetName(texture);
- // // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
- // glBindTexture(GL_TEXTURE_2D, outputTexture);
- // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- //
- // [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime];
- //
- // CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
- // CFRelease(texture);
- //
- // outputTexture = 0;
- }
-
-
- if (_runBenchmark)
- {
- numberOfFramesCaptured++;
- if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
- {
- CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
- totalFrameTimeDuringCapture += currentFrameTime;
- NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
- NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
- }
- }
- }
- else
- {
- CVPixelBufferLockBaseAddress(cameraFrame, 0);
-
- int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
- [outputFramebuffer activateFramebuffer];
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
-
- // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
-
- // Using BGRA extension to pull in video frame data directly
- // The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
-
- [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
-
- CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
-
- if (_runBenchmark)
- {
- numberOfFramesCaptured++;
- if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
- {
- CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
- totalFrameTimeDuringCapture += currentFrameTime;
- }
- }
- }
- }
- - (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- {
- [self.audioEncodingTarget processAudioBuffer:sampleBuffer];
- }
- - (void)convertYUVToRGBOutput;
- {
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
- int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;
- if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
- {
- rotatedImageBufferWidth = imageBufferHeight;
- rotatedImageBufferHeight = imageBufferWidth;
- }
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
-
- static const GLfloat squareVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- glActiveTexture(GL_TEXTURE4);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glUniform1i(yuvConversionLuminanceTextureUniform, 4);
- glActiveTexture(GL_TEXTURE5);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glUniform1i(yuvConversionChrominanceTextureUniform, 5);
- glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
- glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
- glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- }
- #pragma mark -
- #pragma mark Benchmarking
- - (CGFloat)averageFrameDurationDuringCapture;
- {
- return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;
- }
- - (void)resetBenchmarkAverage;
- {
- numberOfFramesCaptured = 0;
- totalFrameTimeDuringCapture = 0.0;
- }
- #pragma mark -
- #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
- {
- if (!self.captureSession.isRunning)
- {
- return;
- }
- else if (captureOutput == audioOutput)
- {
- [self processAudioSampleBuffer:sampleBuffer];
- }
- else
- {
- if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
- {
- return;
- }
-
- CFRetain(sampleBuffer);
- runAsynchronouslyOnVideoProcessingQueue(^{
- //Feature Detection Hook.
- if (self.delegate)
- {
- [self.delegate willOutputSampleBuffer:sampleBuffer];
- }
-
- [self processVideoSampleBuffer:sampleBuffer];
-
- CFRelease(sampleBuffer);
- dispatch_semaphore_signal(frameRenderingSemaphore);
- });
- }
- }
- #pragma mark -
- #pragma mark Accessors
- - (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
- {
- if (newValue) {
- /* Add audio inputs and outputs, if necessary */
- addedAudioInputsDueToEncodingTarget |= [self addAudioInputsAndOutputs];
- } else if (addedAudioInputsDueToEncodingTarget) {
- /* Remove audio inputs and outputs, if they were added by previously setting the audio encoding target */
- [self removeAudioInputsAndOutputs];
- addedAudioInputsDueToEncodingTarget = NO;
- }
-
- [super setAudioEncodingTarget:newValue];
- }
- - (void)updateOrientationSendToTargets;
- {
- runSynchronouslyOnVideoProcessingQueue(^{
-
- // From the iOS 5.0 release notes:
- // In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight.
-
- if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
- {
- outputRotation = kGPUImageNoRotation;
- if ([self cameraPosition] == AVCaptureDevicePositionBack)
- {
- if (_horizontallyMirrorRearFacingCamera)
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
- case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotate180; break;
- case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
- case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
- default:internalRotation = kGPUImageNoRotation;
- }
- }
- else
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
- case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
- case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageRotate180; break;
- case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageNoRotation; break;
- default:internalRotation = kGPUImageNoRotation;
- }
- }
- }
- else
- {
- if (_horizontallyMirrorFrontFacingCamera)
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
- case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateRightFlipHorizontal; break;
- case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
- case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
- default:internalRotation = kGPUImageNoRotation;
- }
- }
- else
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
- case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
- case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageNoRotation; break;
- case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageRotate180; break;
- default:internalRotation = kGPUImageNoRotation;
- }
- }
- }
- }
- else
- {
- if ([self cameraPosition] == AVCaptureDevicePositionBack)
- {
- if (_horizontallyMirrorRearFacingCamera)
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
- case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotate180; break;
- case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
- case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
- default:outputRotation = kGPUImageNoRotation;
- }
- }
- else
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
- case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
- case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageRotate180; break;
- case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageNoRotation; break;
- default:outputRotation = kGPUImageNoRotation;
- }
- }
- }
- else
- {
- if (_horizontallyMirrorFrontFacingCamera)
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
- case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateRightFlipHorizontal; break;
- case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
- case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
- default:outputRotation = kGPUImageNoRotation;
- }
- }
- else
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
- case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
- case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageNoRotation; break;
- case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageRotate180; break;
- default:outputRotation = kGPUImageNoRotation;
- }
- }
- }
- }
-
- for (id<GPUImageInput> currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
- }
- });
- }
- - (void)setOutputImageOrientation:(UIInterfaceOrientation)newValue;
- {
- _outputImageOrientation = newValue;
- [self updateOrientationSendToTargets];
- }
- - (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue
- {
- _horizontallyMirrorFrontFacingCamera = newValue;
- [self updateOrientationSendToTargets];
- }
- - (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue
- {
- _horizontallyMirrorRearFacingCamera = newValue;
- [self updateOrientationSendToTargets];
- }
- @end
|