8

我多次使用 CIDetector 如下:

    -(NSArray *)detect:(UIImage *)inimage
    {
        UIImage *inputimage = inimage;
        UIImageOrientation exifOrientation = inimage.imageOrientation;
        NSNumber *orientation = [NSNumber numberWithInt:exifOrientation];

        NSDictionary *imageOptions = [NSDictionary dictionaryWithObject:orientation forKey:CIDetectorImageOrientation];
        CIImage* ciimage = [CIImage imageWithCGImage:inputimage.CGImage options:imageOptions];


        NSDictionary *detectorOptions = [NSDictionary dictionaryWithObject:orientation forKey:CIDetectorImageOrientation];

        NSArray* features = [self.detector featuresInImage:ciimage options:detectorOptions];

        if (features.count == 0)
        {
            PXLog(@"no face found");
        }

        ciimage = nil;
        NSMutableArray *returnArray = [NSMutableArray new];


        for(CIFaceFeature *feature in features)
        {
            CGRect rect = feature.bounds;
            CGRect r = CGRectMake(rect.origin.x,inputimage.size.height - rect.origin.y - rect.size.height,rect.size.width,rect.size.height);

            FaceFeatures * ff = [[FaceFeatures new] initWithLeftEye:CGPointMake(feature.leftEyePosition.x, inputimage.size.height - feature.leftEyePosition.y )
                                                           rightEye:CGPointMake(feature.rightEyePosition.x, inputimage.size.height - feature.rightEyePosition.y )
                                                              mouth:CGPointMake(feature.mouthPosition.x, inputimage.size.height - feature.mouthPosition.y )];

            Face *ob = [[Face new] initFaceInRect:r withFaceFeatures:ff] ;


            [returnArray addObject:ob];
        }

        features = nil;
        return returnArray;
    }

-(CIContext*) context{
    if(!_context){
        _context = [CIContext contextWithOptions:nil];
    }
    return _context;
}
-(CIDetector *)detector
{
    if (!_detector)
    {
        // 1 for high 0 for low
#warning not checking for fast/slow detection operation
        NSString *str = @"fast";//[SettingsFunctions retrieveFromUserDefaults:@"face_detection_accuracy"];


        if ([str isEqualToString:@"slow"])
        {
            //DDLogInfo(@"faceDetection: -I- Setting accuracy to high");
            _detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil
                                           options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
        } else {
            //DDLogInfo(@"faceDetection: -I- Setting accuracy to low");
            _detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil
                                           options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyLow forKey:CIDetectorAccuracy]];
        }

    }
    return _detector;
}

但是在出现各种内存问题之后,根据 Instruments 的说法,它看起来NSArray* features = [self.detector featuresInImage:ciimage options:detectorOptions];没有被释放

我的代码中是否存在内存泄漏?

在此处输入图像描述

在此处输入图像描述

4

2 回答 2

4

我遇到了同样的问题,它似乎是重用 CIDetector 的错误(或者可能是出于缓存目的而设计的)。

我能够通过不重用 CIDetector 来绕过它,而是根据需要实例化一个,然后在检测完成时释放它(或者,在 ARC 术语中,只是不保留引用)。这样做有一些成本,但是如果您按照您所说的那样在后台线程上进行检测,那么与无限的内存增长相比,这个成本可能是值得的。

也许更好的解决方案是,如果您连续检测多个图像,则创建一个检测器,将其用于所有检测器(或者,如果增长太大,则每 N 个图像释放并创建一个新的检测器。您将必须进行实验以查看 N 应该是多少)。

我已向 Apple 提交了有关此问题的 Radar 错误:http: //openradar.appspot.com/radar?id= 6645353252126720

于 2014-02-13T20:01:55.873 回答
1

我已经解决了这个问题,你应该在调用检测方法的地方使用@autorelease,就像这样在swift中

            autoreleasepool(invoking: { 
                let result = self.detect(image: image)
                // do other things
            })
于 2016-11-29T08:10:21.137 回答