7

我正在捕获视频并将其转换为 CGImage 以对其进行处理。它将工作约 10 秒,获得内存警告然后崩溃(通常它表示数据格式化程序暂时不可用)。有人可以帮我解决问题吗?

- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

// CONVERT CMSAMPLEBUFFER INTO A CGIMAGE
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

CVPixelBufferLockBaseAddress(imageBuffer,0);


uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);


CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef cgimage = CGBitmapContextCreateImage(newContext);
UIImage *sourceImage= [UIImage imageWithCGImage:cgimage scale:1.0f orientation:UIImageOrientationLeftMirrored];
CGImageRelease(cgimage);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);

CVPixelBufferUnlockBaseAddress(imageBuffer,0);


// ***
// Adding code after this point to do image transformation usually causes it to crash
UIImage *rot = [self scaleAndRotateImage:sourceImage];
self.detectImage = rot;



}

转换图像的代码....

- (UIImage*)scaleAndRotateImage:(UIImage *)image{
    int kMaxResolution = 320; // Or whatever

    CGImageRef imgRef = image.CGImage;

    CGFloat width = CGImageGetWidth(imgRef);
    CGFloat height = CGImageGetHeight(imgRef);

    CGAffineTransform transform = CGAffineTransformIdentity;
    CGRect bounds = CGRectMake(0, 0, width, height);
    if (width > kMaxResolution || height > kMaxResolution) {
        CGFloat ratio = width/height;
        if (ratio > 1) {
            bounds.size.width = kMaxResolution;
            bounds.size.height = bounds.size.width / ratio;
        }
        else {
            bounds.size.height = kMaxResolution;
            bounds.size.width = bounds.size.height * ratio;
        }
    }

    CGFloat scaleRatio = bounds.size.width / width;
    CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
    CGFloat boundHeight;
    UIImageOrientation orient = image.imageOrientation;
    switch(orient) {

        case UIImageOrientationUp: //EXIF = 1
            transform = CGAffineTransformIdentity;
            break;

        case UIImageOrientationUpMirrored: //EXIF = 2
            transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
            transform = CGAffineTransformScale(transform, -1.0, 1.0);
            break;

        case UIImageOrientationDown: //EXIF = 3
            transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
            transform = CGAffineTransformRotate(transform, M_PI);
            break;

        case UIImageOrientationDownMirrored: //EXIF = 4
            transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
            transform = CGAffineTransformScale(transform, 1.0, -1.0);
            break;

        case UIImageOrientationLeftMirrored: //EXIF = 5
            boundHeight = bounds.size.height;
            bounds.size.height = bounds.size.width;
            bounds.size.width = boundHeight;
            transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.height);
            transform = CGAffineTransformScale(transform, -1.0, 1.0);
            transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
            break;

        case UIImageOrientationLeft: //EXIF = 6
            boundHeight = bounds.size.height;
            bounds.size.height = bounds.size.width;
            bounds.size.width = boundHeight;
            transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
            transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
            break;

        case UIImageOrientationRightMirrored: //EXIF = 7
            boundHeight = bounds.size.height;
            bounds.size.height = bounds.size.width;
            bounds.size.width = boundHeight;
            transform = CGAffineTransformMakeScale(-1.0, 1.0);
            transform = CGAffineTransformRotate(transform, M_PI / 2.0);
            break;

        case UIImageOrientationRight: //EXIF = 8
            boundHeight = bounds.size.height;
            bounds.size.height = bounds.size.width;
            bounds.size.width = boundHeight;
            transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
            transform = CGAffineTransformRotate(transform, M_PI / 2.0);
            break;

        default:
            [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];

    }

    UIGraphicsBeginImageContext(bounds.size);

    CGContextRef context = UIGraphicsGetCurrentContext();

    if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
        CGContextScaleCTM(context, -scaleRatio, scaleRatio);
        CGContextTranslateCTM(context, -height, 0);
    }
    else {
        CGContextScaleCTM(context, scaleRatio, -scaleRatio);
        CGContextTranslateCTM(context, 0, -height);
    }

    CGContextConcatCTM(context, transform);

    CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
    UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    //[self setRotatedImage:imageCopy];
    return imageCopy;
}

这个功能只是背景,看看我是如何设置视频输出的......

AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[videoOut setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // BGRA is necessary for manual preview
dispatch_queue_t my_queue = dispatch_queue_create("com.example.subsystem.taskXYZ", NULL);
[videoOut setSampleBufferDelegate:self queue:my_queue];
if ([self.captureSession canAddOutput:videoOut]) [self.captureSession addOutput:videoOut];
else NSLog(@"Couldn't add video output");
[videoOut release];
4

2 回答 2

4

我有一个类似的问题。最终发生的事情是队列充满了未处理的帧,因为我在委托对象中的处理速度不够快。

我的解决方案是(每个处理帧一次):

proctr++;
if ((proctr % 20) == 0) {
  deferImageProcessing = true;
  dispatch_sync(queue, ^{
    [self queueFlushed];
  });
}

- (void)queueFlushed {
  deferImageProcessing = false;
}

然后,在实际的图像处理代码中

- (void)captureOutput:(AVCaptureOutput *)captureOutput
     didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
     fromConnection:(AVCaptureConnection *)connection
{ 
  if (deferImageProcessing)
    return;
  // do whatever else I'm doing...
}

本质上,我们偶尔会暂停图像处理,直到队列清空。
我希望这很有用。

于 2010-09-30T01:12:00.813 回答
0

在设置视频输出时,不会释放新创建的调度队列。你可以用

dispatch_release(queue);

但我不相信这个函数被调用太频繁,所以泄漏可能来自其他地方。多次浏览您的代码,找不到任何其他罪魁祸首...

您是否尝试过使用泄漏仪器工具搜索泄漏?

于 2010-08-04T08:23:46.140 回答