4

我有应用程序,正在使用 Zxing。当用户进入扫描仪页面时,zxing 会立即初始化,无需按下任何“扫描”按钮。好的,所以我遇到了与此类似的问题: AVCaptureSession - 停止运行 - 需要很长时间, 建议的解决方案正在工作。不幸的是,当用户执行一些快速操作时,例如转到设置页面(正在停止捕获)并快速返回扫描仪页面(正在开始捕获),应用程序崩溃。

我通过在我的自定义队列上的异步调度程序中弯曲相机,并在同一个队列上的同步调度程序上初始化它来解决这个问题。它工作正常,直到在相机停止之前调用初始化。在这种情况下,初始化代码正在等待相机停止块的末尾,但是第二个需要大约 10 秒!(通常需要大约 1-2 个)。我不知道是怎么回事。

你有什么建议吗?

这是代码:

    - (void)stopCapture
{
#if HAS_AVFF
  decoding = NO;
 [self.prevLayer removeFromSuperlayer];

    dispatch_async(myQueue, ^{
        NSLog(@"stop capture");
      [captureSession stopRunning];
        NSLog(@"session Stopped");
        if(captureSession.inputs.count>0)
        {
              AVCaptureInput* input = [captureSession.inputs objectAtIndex:0];
              [captureSession removeInput:input];
        }
        if(captureSession.outputs.count>0)
        {
              AVCaptureVideoDataOutput* output = (AVCaptureVideoDataOutput*)[captureSession.outputs objectAtIndex:0];
              [captureSession removeOutput:output];
        }

  self.prevLayer = nil;
  self.captureSession = nil;

        NSLog(@"end stop capture");
    });

#endif
}

- (void)initCapture {
dispatch_sync(myQueue, ^{
    NSLog(@"init capture");
    #if HAS_AVFF
      AVCaptureDeviceInput *captureInput =
        [AVCaptureDeviceInput deviceInputWithDevice:
                [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] 
                                              error:nil];
        if(!captureInput)
        {
            NSLog(@"ERROR - CaptureInputNotInitialized");
        }


      AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; 
      captureOutput.alwaysDiscardsLateVideoFrames = YES;
        if(!captureOutput)
        {
            NSLog(@"ERROR - CaptureOutputNotInitialized");
        }


      [captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

      NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
      NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];

      NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];

      [captureOutput setVideoSettings:videoSettings]; 
      self.captureSession = [[[AVCaptureSession alloc] init] autorelease];
      self.captureSession.sessionPreset = AVCaptureSessionPresetMedium; // 480x360 on a 4

        if([self.captureSession canAddInput:captureInput])
        {
            [self.captureSession addInput:captureInput];
        }
        else
        {
            NSLog(@"ERROR - cannot add input");
        }
        if([self.captureSession canAddOutput:captureOutput])
        {
            [self.captureSession addOutput:captureOutput];
        }
        else
        {
            NSLog(@"ERROR - cannot add output");
        }

      [captureOutput release];

      if (!self.prevLayer)
      {
          [self.prevLayer release];
      }
      self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];

      self.prevLayer.frame = self.view.bounds;
      self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
      [self.view.layer addSublayer: self.prevLayer];

      [self.captureSession startRunning];
    #endif
    NSLog(@"end init");
    });

}

4

0 回答 0