我正在开发一个在 UIViewController 内使用 OpenGL VideoCapture 的应用程序
在此控制器的第一次初始化中,我在控制台中收到此消息:
wait_fences: failed to receive reply: 10004003
该消息在 viewController 初始化后立即显示
之后,如果我切换到另一个控制器并再次启动此控制器,则会在使用 EXC_BAD_ACCESS 启动 ViewController 2/4 后发生崩溃
asm 输出没有太大帮助
0x32f56a34: bne 0x32f56ad4 ; memmove$VARIANT$CortexA9 + 276
0x32f56a38: subs r2, r2, #60
0x32f56a3c: blo 0x32f56a84 ; memmove$VARIANT$CortexA9 + 196
0x32f56a40: tst r0, #28
0x32f56a44: beq 0x32f56a5c ; memmove$VARIANT$CortexA9 + 156
0x32f56a48: ldr r3, [r1], #4
0x32f56a4c: subs r2, r2, #4
0x32f56a50: str r3, [r0], #4
0x32f56a54: bhs 0x32f56a40 ; memmove$VARIANT$CortexA9 + 128
0x32f56a58: b 0x32f56a84 ; memmove$VARIANT$CortexA9 + 196
0x32f56a5c: push {r5, r6, r8, r10}
0x32f56a60: ldm r1!, {r3, r4, r5, r6, r8, r9, r10, r12}
0x32f56a64: subs r2, r2, #64
源代码的一些相关部分:
- (id)init
{
self = [super init];
if (self)
{
ADLog(@"AR_ogl_ViewController init");
animating = FALSE;
EAGLContext *aContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!aContext)
{
aContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
}
if (!aContext)
{
ADLog(@"Failed to create ES context");
}
else if (![EAGLContext setCurrentContext:aContext])
{
ADLog(@"Failed to set ES context current");
}
self.context = aContext;
ADSafeRelease(aContext);
[(AR_EAGLView *)self.view setContext:context];
[(AR_EAGLView *)self.view setFramebuffer];
[self createVBO];
if ([context API] == kEAGLRenderingAPIOpenGLES2)
[self loadShaders];
animating = FALSE;
displayLinkSupported = FALSE;
animationFrameInterval = 1;
displayLink = nil;
animationTimer = nil;
// Use of CADisplayLink requires iOS version 3.1 or greater.
// The NSTimer object is used as fallback when it isn't available.
/* NSString *reqSysVer = @"3.1";
NSString *currSysVer = [[UIDevice currentDevice] systemVersion];
if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)*/
displayLinkSupported = TRUE;
// start capture
[self setupCaptureSession];
// initialize
frameData = 0;
pTexData = (GLuint*)malloc(TEX_SIZE*TEX_SIZE*sizeof(GLuint));
incrementNeedsVideoFrame();
float x = 0.0f;
float y = 0.0f;
float z = 1.0f;
float angle = 90.0 * PI_OVER_180;
afIdentity[0] = 1+(1-cos(angle))*(x*x-1);
afIdentity[1] = -z*sin(angle)+(1-cos(angle))*x*y;
afIdentity[2] = y*sin(angle)+(1-cos(angle))*x*z;
afIdentity[3] = 0;
afIdentity[4] = z*sin(angle)+(1-cos(angle))*x*y;
afIdentity[5] = 1+(1-cos(angle))*(y*y-1);
afIdentity[6] = -x*sin(angle)+(1-cos(angle))*y*z;
afIdentity[7] = 0;
afIdentity[8] = -y*sin(angle)+(1-cos(angle))*x*z;
afIdentity[9] = x*sin(angle)+(1-cos(angle))*y*z;
afIdentity[10] = 1+(1-cos(angle))*(z*z-1);
afIdentity[11] = 0;
afIdentity[12] = 0;
afIdentity[13] = 0;
afIdentity[14] = 0;
afIdentity[15] = 1;
y = 1.0f;
z = 0.0f;
angle = 180.0 * PI_OVER_180;
afIdentity2[0] = 1+(1-cos(angle))*(x*x-1);
afIdentity2[1] = -z*sin(angle)+(1-cos(angle))*x*y;
afIdentity2[2] = y*sin(angle)+(1-cos(angle))*x*z;
afIdentity2[3] = 0;
afIdentity2[4] = z*sin(angle)+(1-cos(angle))*x*y;
afIdentity2[5] = 1+(1-cos(angle))*(y*y-1);
afIdentity2[6] = -x*sin(angle)+(1-cos(angle))*y*z;
afIdentity2[7] = 0;
afIdentity2[8] = -y*sin(angle)+(1-cos(angle))*x*z;
afIdentity2[9] = x*sin(angle)+(1-cos(angle))*y*z;
afIdentity2[10] = 1+(1-cos(angle))*(z*z-1);
afIdentity2[11] = 0;
afIdentity2[12] = 0;
afIdentity2[13] = 0;
afIdentity2[14] = 0;
afIdentity2[15] = 1;
}
ADLog(@"AR_ogl_ViewController init end");
return self;
}
- (void)dealloc
{
ADLog(@"dealloc");
ADSafeRelease(session);
//ADSafeRelease(input);
//ADSafeRelease(output);
if (program)
{
glDeleteProgram(program);
program = 0;
}
if (m_ui32Vbo)
{
glDeleteBuffers(1, &m_ui32Vbo);
m_ui32Vbo = 0;
}
if (frameData)
{
free(frameData);
frameData = nil;
}
if (pTexData)
{
free(pTexData);
pTexData = nil;
}
// Tear down context.
if ([EAGLContext currentContext] == context)
[EAGLContext setCurrentContext:nil];
[context release];
context = nil;
[super dealloc];
}
- (void)viewDidAppear:(BOOL)animated
{
ADLog(@"viewDidAppear");
[super viewDidAppear:animated];
[self startAnimation];
}
- (void)viewDidDisappear:(BOOL)animated
{
[super viewDidDisappear:animated];
[self stopAnimation];
}
- (void)viewDidUnload
{
[super viewDidUnload];
if (program)
{
glDeleteProgram(program);
program = 0;
}
// Tear down context.
if ([EAGLContext currentContext] == context)
[EAGLContext setCurrentContext:nil];
self.context = nil;
}
- (void)startAnimation
{
ADLog(@"startAnimation");
if (displayLink)
{
[displayLink invalidate];
displayLink = nil;
}
if (animating==FALSE && displayLink==nil)
{
ADLog(@"generating displayLink");
displayLink = [NSClassFromString(@"CADisplayLink") displayLinkWithTarget:self selector:@selector(drawFrame)];
[displayLink setFrameInterval:animationFrameInterval];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
animating = TRUE;
}
ADLog(@"startAnimation end");
}
- (void)stopAnimation
{
ADLog(@"stopAnimation");
if (displayLink)
{
[displayLink invalidate];
displayLink = nil;
animating = FALSE;
}
}
// Create and configure a capture session and start it running
- (void)setupCaptureSession
{
NSError *error = nil;
// Create the session
session = [[AVCaptureSession alloc] init];
// Configure the session to produce lower resolution video frames, if your
// processing algorithm can cope. We'll specify medium quality for the
// chosen device.
// Low : 192 x 144
// Medium: 480 x 320
// High : 1280 x 720
session.sessionPreset = AVCaptureSessionPresetMedium;
// Find a suitable AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo];
// Create a device input with the device and add it to the session.
input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input)
{
// Handling the error appropriately.
}
[session addInput:input];
// Create a VideoDataOutput and add it to the session
output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:output];
// Configure your output.
CaptureQueue = dispatch_queue_create("CaptureQueue", NULL);
[output setSampleBufferDelegate:self queue:CaptureQueue];
dispatch_release(CaptureQueue);
// Specify the pixel format
output.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
output.alwaysDiscardsLateVideoFrames = NO;
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
// [output setVideoMinFrameDuration:CMTimeMake(1, 20)];
// Start the session running to start the flow of data
[session startRunning];
}
@end
有什么建议可以跟踪这次崩溃吗?