全網找了一圈也沒有看到一個 GPUImageRawDataInput 的完整 Demo,這裡提供一個簡單的使用示例,Demo戳:
#import "RawDataViewController.h"
#import "GPUImage.h"
@interface RawDataViewController ()
@property (nonatomic, strong) GPUImageRawDataInput *rawDataInput;
@property (nonatomic, strong) GPUImageRawDataOutput *rawDataOutput;
@property (nonatomic, strong) GPUImageBrightnessFilter *filter;
@property (nonatomic, strong) GPUImageView *filterView;
@end
@implementation RawDataViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.filterView = [[GPUImageView alloc] initWithFrame:CGRectMake(0, 100, self.view.frame.size.width, 300)];
[self.view addSubview:self.filterView];
// 1. UIImage -> CGImage -> CFDataRef -> UInt8 * data
UIImage *image = [UIImage imageNamed:@"img1.jpg"];
CGImageRef newImageSource = [image CGImage];
CFDataRef dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(newImageSource));
GLubyte* imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);
// 2. UInt8 * data -> GPUImageRawDataInput
self.rawDataInput = [[GPUImageRawDataInput alloc] initWithBytes:imageData size:image.size pixelFormat:GPUPixelFormatRGBA];
self.filter = [[GPUImageBrightnessFilter alloc] init];
self.filter.brightness = 0.1;
[self.rawDataInput addTarget:self.filter];
// 3. 輸出到 GPUImageView
[self.filter addTarget:self.filterView];
// 4. 同時輸出到 raw data output
self.rawDataOutput = [[GPUImageRawDataOutput alloc] initWithImageSize:image.size resultsInBGRAFormat:YES];
[self.filter addTarget:self.rawDataOutput];
// important
[self.filter useNextFrameForImageCapture];
[self.rawDataInput processData];
// 5. read data from GPUImageRawDataOutput
[self.rawDataOutput lockFramebufferForReading];
GLubyte *outputBytes = [self.rawDataOutput rawBytesForImage];
NSInteger bytesPerRow = [self.rawDataOutput bytesPerRowInOutput];
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, outputBytes, bytesPerRow * image.size.height, NULL);
CGImageRef cgImage = CGImageCreate(image.size.width, image.size.height, 8, 32, bytesPerRow, rgbColorSpace, kCGImageAlphaPremultipliedFirst|kCGBitmapByteOrder32Little, provider, NULL, true, kCGRenderingIntentDefault);
[self.rawDataOutput unlockFramebufferAfterReading];
// 斷點到這一行,檢視 outImage
UIImage *outImage = [UIImage imageWithCGImage:cgImage];
NSLog(@"%@", outImage);
}
@end
複製程式碼
使用場景
這個示例的使用場景:
- 用 ffmpeg 將視訊流的一幀讀取成 RGBA 資料;
- 將資料傳入 GPUImageRawDataInput -> 新增濾鏡 -> 輸出到 GPUImageRawDataOutput
- 從 GPUImageRawDataOutput 中取出 RGBA 資料,再交給 ffmpeg 編碼,寫入新的視訊檔案
注:
- 這個流程使用 GPUImage 完全可以完成,不需要 ffmpeg;需要跨平臺的編解碼並加濾鏡時,可以使用 OpenGL 對視訊流加濾鏡;
- 在 Xcode 裡斷點檢視 UIImage 的方法如下: