123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127 |
- #import "GPUImageContext.h"
- #import "GPUImageFramebuffer.h"
- #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- #import <UIKit/UIKit.h>
- #else
- // For now, just redefine this on the Mac
- typedef NS_ENUM(NSInteger, UIImageOrientation) {
- UIImageOrientationUp, // default orientation
- UIImageOrientationDown, // 180 deg rotation
- UIImageOrientationLeft, // 90 deg CCW
- UIImageOrientationRight, // 90 deg CW
- UIImageOrientationUpMirrored, // as above but image mirrored along other axis. horizontal flip
- UIImageOrientationDownMirrored, // horizontal flip
- UIImageOrientationLeftMirrored, // vertical flip
- UIImageOrientationRightMirrored, // vertical flip
- };
- #endif
- void runOnMainQueueWithoutDeadlocking(void (^block)(void));
- void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
- void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void));
- void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
- void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
- void reportAvailableMemoryForGPUImage(NSString *tag);
- @class GPUImageMovieWriter;
- /** GPUImage's base source object
-
- Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include:
-
- - GPUImageVideoCamera (for live video from an iOS camera)
- - GPUImageStillCamera (for taking photos with the camera)
- - GPUImagePicture (for still images)
- - GPUImageMovie (for movies)
-
- Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain.
- */
- @interface GPUImageOutput : NSObject
- {
- GPUImageFramebuffer *outputFramebuffer;
-
- NSMutableArray *targets, *targetTextureIndices;
-
- CGSize inputTextureSize, cachedMaximumOutputSize, forcedMaximumSize;
-
- BOOL overrideInputSize;
-
- BOOL allTargetsWantMonochromeData;
- BOOL usingNextFrameForImageCapture;
- }
- @property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput;
- @property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget;
- @property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget;
- @property(readwrite, nonatomic, unsafe_unretained) id<GPUImageInput> targetToIgnoreForUpdates;
- @property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime);
- @property(nonatomic) BOOL enabled;
- @property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions;
- /// @name Managing targets
- - (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
- - (GPUImageFramebuffer *)framebufferForOutput;
- - (void)removeOutputFramebuffer;
- - (void)notifyTargetsAboutNewOutputTexture;
- /** Returns an array of the current targets.
- */
- - (NSArray*)targets;
- /** Adds a target to receive notifications when new frames are available.
-
- The target will be asked for its next available texture.
-
- See [GPUImageInput newFrameReadyAtTime:]
-
- @param newTarget Target to be added
- */
- - (void)addTarget:(id<GPUImageInput>)newTarget;
- /** Adds a target to receive notifications when new frames are available.
-
- See [GPUImageInput newFrameReadyAtTime:]
-
- @param newTarget Target to be added
- */
- - (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
- /** Removes a target. The target will no longer receive notifications when new frames are available.
-
- @param targetToRemove Target to be removed
- */
- - (void)removeTarget:(id<GPUImageInput>)targetToRemove;
- /** Removes all targets.
- */
- - (void)removeAllTargets;
- /// @name Manage the output texture
- - (void)forceProcessingAtSize:(CGSize)frameSize;
- - (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
- /// @name Still image processing
- - (void)useNextFrameForImageCapture;
- - (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
- - (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;
- // Platform-specific image output methods
- // If you're trying to use these methods, remember that you need to set -useNextFrameForImageCapture before running -processImage or running video and calling any of these methods, or you will get a nil image
- #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- - (UIImage *)imageFromCurrentFramebuffer;
- - (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
- - (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
- - (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter;
- #else
- - (NSImage *)imageFromCurrentFramebuffer;
- - (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
- - (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
- - (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter;
- #endif
- - (BOOL)providesMonochromeOutput;
- @end
|