-2

我正在通过渲染 OpenGL 纹理生成电影。生成的影片的某些帧看起来没有完全渲染,因为它们显示了前一帧的一部分。如果我添加一个 NSThread [NSThread sleepForTimeInterval:0.05]; 问题没有出现,但我不能依赖这个指令。

这是没有 [NSThread sleepForTimeInterval:0.05] 的结果

这是添加 [NSThread sleepForTimeInterval:0.05] 时的结果

我使用的代码是:

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
             {                   
               //Video writer
               AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                             [NSURL fileURLWithPath:tempVideoPath]
                                                                      fileType:AVFileTypeQuickTimeMovie
                                                                         error:&error];
               NSParameterAssert(videoWriter);

               NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              AVVideoCodecH264, AVVideoCodecKey,
                                              [NSNumber numberWithInt:MOVsize.width], AVVideoWidthKey,
                                              [NSNumber numberWithInt:MOVsize.height], AVVideoHeightKey,
                                              nil];
               AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                                       assetWriterInputWithMediaType:AVMediaTypeVideo
                                                       outputSettings:videoSettings];
               videoWriterInput.expectsMediaDataInRealTime=NO;

               NSParameterAssert(videoWriterInput);
               NSParameterAssert([videoWriter canAddInput:videoWriterInput]);

               [videoWriter addInput:videoWriterInput];

               NSDictionary* pixelAttributesDict;

                 pixelAttributesDict= [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                       [NSNumber numberWithInt:1024], kCVPixelBufferWidthKey,
                                       [NSNumber numberWithInt:768], kCVPixelBufferHeightKey,
                                       nil];

               AVAssetWriterInputPixelBufferAdaptor* adaptor=[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:pixelAttributesDict];
               [videoWriter startWriting];
               [videoWriter startSessionAtSourceTime:kCMTimeZero];

               if([EAGLContext currentContext]!= glContext)
                 [EAGLContext setCurrentContext:glContext];

               [self createDataFBO:adaptor];

               for (int frame=0;frame<samplesNumber;frame++){

                 while (!videoWriterInput.readyForMoreMediaData)
                 {
                   NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
                   [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
                 }

                   //glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

                   //Render current frame with openGL
                   [self renderFrameAt:frame];

                   CVReturn lockResult= CVPixelBufferLockBaseAddress(renderTarget, 0);
                   BOOL test =(lockResult==kCVReturnSuccess) && [adaptor appendPixelBuffer:renderTarget withPresentationTime:CMTimeMake(frame, kFps)];
                   if(!test) {
                     NSLog(@"append failed!");
                   }

// 如果我在这里添加 [NSThread sleepForTimeInterval:0.05]; 渲染效果很好

                   CVPixelBufferUnlockBaseAddress(renderTarget, 0);

                 dispatch_async(dispatch_get_main_queue(), ^
                                {
                                  [self updateProgressBar];
                                });
               }

               [videoWriterInput markAsFinished];
               [videoWriter endSessionAtSourceTime:CMTimeMake(samplesNumber, kFps)];
               [videoWriter finishWritingWithCompletionHandler:^{

               [self destroyDataFBO];

               //End of movie generation
               }];
             });

}

这是我用来创建纹理缓存的代码

    - (void)createDataFBO:(AVAssetWriterInputPixelBufferAdaptor *) adaptor;
    {
      glActiveTexture(GL_TEXTURE1);
      glGenFramebuffers(1, &movieFramebuffer);
      glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

    #if defined(__IPHONE_6_0)
      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [EAGLContext currentContext], NULL, &coreVideoTextureCache);
    #else
      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[EAGLContext currentContext], NULL, &coreVideoTextureCache);
    #endif

      if (err)
      {
        NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
      }

      // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
      CVPixelBufferPoolCreatePixelBuffer (NULL, [adaptor pixelBufferPool], &renderTarget);

      size_t frameWidth = CVPixelBufferGetWidth(renderTarget);
      size_t frameHeight = CVPixelBufferGetHeight(renderTarget);

      CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
                                                    NULL, // texture attributes
                                                    GL_TEXTURE_2D,
                                                    GL_RGBA, // opengl format
                                                    frameWidth,
                                                    frameHeight,
                                                    GL_BGRA, // native iOS format
                                                    GL_UNSIGNED_BYTE,
                                                    0,
                                                    &renderTexture);

      glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

      glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);

      GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);

      NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
    }
4

1 回答 1

0

您在渲染后尝试过 glFlush() 吗?

于 2013-07-26T14:28:07.770 回答