为了通过 AVAssetWriter 将我的 opengl 渲染(我用于绿色筛选)保存到视频中,我一直在争取一个星期的休息时间。
我在下面创建了一个简单的装备来展示我在做什么。
我在苹果论坛上询问并收到了有关该过程的建议,此处也对此进行了描述:allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/并在 GPUImage 库中使用。
据我所知,我正在做同样的事情——我什至使用 GPUImage 中的方法来创建 FBO。
我已经验证绘图没问题(我在这段代码中也有绘图方法;它们被禁用),
FBO 创建正常并返回成功:glCheckFramebufferStatus
没有崩溃,没有异常,没有警告,写入器处于正常状态,所有纹理缓存,缓冲区等都创建没有错误。
但是我的视频输出仍然是黑色的。
如果我将 glClear 设置为白色,那么我会得到一个不是我请求的视频大小的白色矩形。
我从来没有把我的三角形渲染到我的视频中。
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "TestViewController.h"
/////////////////////////////////////////////////////////////////
// This data type is used to store information for each vertex
typedef struct
{
GLKVector3 positionCoords;
}
SceneVertex;
/////////////////////////////////////////////////////////////////
// Define vertex data for a triangle to use in example
static const SceneVertex vertices[] =
{
{{-1.0f, -1.0f, 1.0}}, // lower left corner
{{1.0f, -1.0f, 0.5}}, // lower right corner
{{1.0f, 1.0f, 0.0}} // upper left corner
};
@interface TestViewController ()
@property(nonatomic, readwrite, assign) CVOpenGLESTextureCacheRef videoTextureCache;
@property(strong, nonatomic) GLKTextureInfo *background;
@property(nonatomic, strong) AVAssetWriter *assetWriter;
@property(nonatomic) BOOL isRecording;
@property(nonatomic, strong) AVAssetWriterInput *assetWriterVideoInput;
@property(nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
@property(nonatomic, assign) CFAbsoluteTime startTime;
@property(nonatomic, strong) GLKView *glkView;
@property(nonatomic, strong) GLKBaseEffect *screenGLEffect;
@property(nonatomic, strong) GLKBaseEffect *FBOGLEffect;
@property(nonatomic, strong) NSTimer *recordingTimer;
- (BOOL)isRetina;
@end
@implementation TestViewController
{
CVOpenGLESTextureCacheRef _writerTextureCache;
GLuint _writerRenderFrameBuffer;
GLuint vertexBufferID;
EAGLContext *_writerContext;
CVOpenGLESTextureRef _writerTexture;
}
- (GLKBaseEffect *)createBasicDrawingEffectInCurrentContext
{
GLKBaseEffect *basicGLEffect = [[GLKBaseEffect alloc] init];
basicGLEffect.useConstantColor = GL_TRUE;
basicGLEffect.constantColor = GLKVector4Make(
.5f, // Red
1.0f, // Green
.5f, // Blue
1.0f);// Alpha
// Set the background color stored in the current context
glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // background color
// Generate, bind, and initialize contents of a buffer to be
// stored in GPU memory
glGenBuffers(1, // STEP 1
&vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, // STEP 2
vertexBufferID);
glBufferData( // STEP 3
GL_ARRAY_BUFFER, // Initialize buffer contents
sizeof(vertices), // Number of bytes to copy
vertices, // Address of bytes to copy
GL_STATIC_DRAW); // Hint: cache in GPU memory
return basicGLEffect;
}
/////////////////////////////////////////////////////////////////
//
- (void)viewDidUnload
{
[super viewDidUnload];
// Make the view's context current
GLKView *view = (GLKView *) self.view;
[EAGLContext setCurrentContext:view.context];
// Stop using the context created in -viewDidLoad
((GLKView *) self.view).context = nil;
[EAGLContext setCurrentContext:nil];
//////////////////////////////////////////////////////////////
#pragma mark AVWriter 设置 ////////////////////////////////////// ////////////////
- (NSString *)tempFilePath
{
return [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/output2.m4v"];
}
- (void)removeTempFile
{
NSString *path = [self tempFilePath];
NSFileManager *fileManager = [NSFileManager defaultManager];
BOOL exists = [fileManager fileExistsAtPath:path];
NSLog(@">>>remove %@ Exists %d", path, exists);
NSError *error;
unlink([path UTF8String]);
NSLog(@">>>AFTER REMOVE %@ Exists %d %@", path, exists, error);
}
- (void)createWriter
{
//My setup code is based heavily on the GPUImage project, https://github.com/BradLarson/GPUImage so some of these dictionary names and structure are similar to the code from that project - I recommend you check it out if you are interested in Video filtering/recording
[self removeTempFile];
NSError *error;
self.assetWriter = [[AVAssetWriter alloc]
initWithURL:[NSURL fileURLWithPath:[self tempFilePath]]
fileType:AVFileTypeQuickTimeMovie
error:&error];
if (error)
{
NSLog(@"Couldn't create writer, %@", error.localizedDescription);
return;
}
NSDictionary *outputSettings = @{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @640,
AVVideoHeightKey : @480
};
self.assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:outputSettings];
self.assetWriterVideoInput.expectsMediaDataInRealTime = YES;
NSDictionary *sourcePixelBufferAttributesDictionary = @{(id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
(id) kCVPixelBufferWidthKey : @640,
(id) kCVPixelBufferHeightKey : @480};
self.assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterVideoInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
self.assetWriterVideoInput.transform = CGAffineTransformMakeScale(1, -1);
if ([_assetWriter canAddInput:self.assetWriterVideoInput])
{
[_assetWriter addInput:self.assetWriterVideoInput];
} else
{
NSLog(@"can't add video writer input %@", self.assetWriterVideoInput);
}
/*
_assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil];
if ([_assetWriter canAddInput:_assetWriterAudioInput]) {
[_assetWriter addInput:_assetWriterAudioInput];
_assetWriterAudioInput.expectsMediaDataInRealTime = YES;
}
*/
}
- (void)writeMovieToLibraryWithPath:(NSURL *)path
{
NSLog(@"writing %@ to library", path);
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:path
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
{
NSLog(@"Error saving to library%@", [error localizedDescription]);
} else
{
NSLog(@"SAVED %@ to photo lib", path);
}
}];
}
//////////////////////////////////////////////////////////////
#pragma mark touch handling
//////////////////////////////////////////////////////////////
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
[super touchesEnded:touches withEvent:event];
if (self.isRecording)
{
[self finishRecording];
} else
{
[self startRecording];
}
}
//////////////////////////////////////////////////////////////
#pragma mark recording
//////////////////////////////////////////////////////////////
- (void)startRecording;
{
NSLog(@"started recording");
#warning debugging startrecording
// NSLog(@"bypassing usual write method");
// if (![assetWriter startWriting]){
// NSLog(@"writer not started %@, %d", assetWriter.error, assetWriter.status);
// }
self.startTime = CFAbsoluteTimeGetCurrent();
[self createWriter];
[self.assetWriter startWriting];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
NSAssert([self.assetWriterPixelBufferInput pixelBufferPool], @"writerpixelbuffer input has no pools");
if (!_writerContext)
{
_writerContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!_writerContext || ![EAGLContext setCurrentContext:_writerContext])
{
NSLog(@"Problem with OpenGL context.");
return;
}
}
[EAGLContext setCurrentContext:_writerContext];
NSLog(@"Creating FBO");
[self createDataFBOUsingGPUImagesMethod];
// [self createDataFBO];
self.isRecording = YES;
NSLog(@"Recording is started");
self.recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1 / 30
target:self
selector:@selector(tick:)
userInfo:nil repeats:YES];
}
- (void)tick:(id)tick
{
[self drawBasicGLTOFBOForWriting];
}
- (void)finishRecording;
{
[self.recordingTimer invalidate];
self.recordingTimer = nil;
NSLog(@"finished recording");
if (self.assetWriter.status == AVAssetWriterStatusCompleted || !self.isRecording)
{
NSLog(@"already completed ingnoring");
return;
}
NSLog(@"Asset writer writing");
self.isRecording = NO;
// runOnMainQueueWithoutDeadlocking(^{
NSLog(@"markng inputs as finished");
//TODO - these cause an error
[self.assetWriterVideoInput markAsFinished];
__weak TestViewController *blockSelf = self;
[self.assetWriter finishWritingWithCompletionHandler:^{
if (self.assetWriter.error == nil)
{
NSLog(@"saved ok - writing to lib");
[self writeMovieToLibraryWithPath:[NSURL fileURLWithPath:[self tempFilePath]]];
} else
{
NSLog(@" did not save due to error %@", self.assetWriter.error);
}
}];
// });
}
- (void)drawBasicGLTOFBOForWriting
{
if (!self.isRecording)
{
return;
}
[EAGLContext setCurrentContext:_writerContext];
if (!self.FBOGLEffect)
{
self.FBOGLEffect = [self createBasicDrawingEffectInCurrentContext];
}
glDisable(GL_DEPTH_TEST);
glBindFramebuffer(GL_FRAMEBUFFER, _writerRenderFrameBuffer);
glClearColor(1, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT);
[self.FBOGLEffect prepareToDraw];
// Clear Frame Buffer (erase previous drawing)
// Enable use of positions from bound vertex buffer
glEnableVertexAttribArray( // STEP 4
GLKVertexAttribPosition);
glVertexAttribPointer( // STEP 5
GLKVertexAttribPosition,
3, // three components per vertex
GL_FLOAT, // data is floating point
GL_FALSE, // no fixed point scaling
sizeof(SceneVertex), // no gaps in data
NULL); // NULL tells GPU to start at
// beginning of bound buffer
// Draw triangles using the first three vertices in the
// currently bound vertex buffer
glDrawArrays(GL_TRIANGLES, // STEP 6
0, // Start with first vertex in currently bound buffer
3); // Use three vertices from currently bound buffer
glFlush();
CFAbsoluteTime interval = (CFAbsoluteTimeGetCurrent() - self.startTime) * 1000;
CMTime currentTime = CMTimeMake((int) interval, 1000);
[self writeToFileWithTime:currentTime];
}
- (void)writeToFileWithTime:(CMTime)time
{
if (!self.assetWriterVideoInput.readyForMoreMediaData)
{
NSLog(@"Had to drop a video frame");
return;
}
if (kCVReturnSuccess == CVPixelBufferLockBaseAddress(_writerPixelBuffer,
kCVPixelBufferLock_ReadOnly))
{
uint8_t *pixels = (uint8_t *) CVPixelBufferGetBaseAddress(_writerPixelBuffer);
// process pixels how you like!
BOOL success = [self.assetWriterPixelBufferInput appendPixelBuffer:_writerPixelBuffer
withPresentationTime:time];
NSLog(@"wrote at %@ : %@", CMTimeCopyDescription(NULL, time), success ? @"YES" : @"NO");
CVPixelBufferUnlockBaseAddress(_writerPixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
//////////////////////////////////////////////////////////////
#pragma mark FBO setup
//////////////////////////////////////////////////////////////
- (void)createDataFBOUsingGPUImagesMethod;
{
glActiveTexture(GL_TEXTURE1);
glGenFramebuffers(1, &_writerRenderFrameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _writerRenderFrameBuffer);
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _writerContext, NULL, &_writerTextureCache);
if (err)
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
}
// Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
CVPixelBufferPoolCreatePixelBuffer(NULL, [self.assetWriterPixelBufferInput pixelBufferPool], &_writerPixelBuffer);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _writerTextureCache, _writerPixelBuffer,
NULL, // texture attributes
GL_TEXTURE_2D,
GL_RGBA, // opengl format
480,
320,
GL_BGRA, // native iOS format
GL_UNSIGNED_BYTE,
0,
&_writerTexture);
if (err)
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(CVOpenGLESTextureGetTarget(_writerTexture), CVOpenGLESTextureGetName(_writerTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(_writerTexture), 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
}
@end