avcapturesession stoprunning需要很长时间

时间:2013-07-05 10:43:17

标签: ios objective-c zxing avcapturesession

我有申请,就是使用Zxing。当用户进入扫描仪页面时,zxing立即初始化,无需按任何“扫描”按钮。 好的,所以我遇到的问题与此类似: AVCaptureSession - Stop Running - take a long long time 建议的解决方案是有效 不幸的是,当用户做一些快速动作时,例如转到设置页面(正在停止捕获)并快速返回扫描仪页面(正在开始捕获),应用程序崩溃。

我通过在我的自定义队列中的异步调度程序中弯曲摄像头并在同一队列上的同步调度程序上初始化它来解决这个问题。它工作正常,直到相机停止前调用初始化。在这种情况下,初始代码在相机停止块结束时等待,但是第二个需要大约10秒! (通常需要大约1-2)。我不知道是怎么回事。

你有什么建议吗?

这是代码:

    - (void)stopCapture
{
#if HAS_AVFF
  decoding = NO;
 [self.prevLayer removeFromSuperlayer];

    dispatch_async(myQueue, ^{
        NSLog(@"stop capture");
      [captureSession stopRunning];
        NSLog(@"session Stopped");
        if(captureSession.inputs.count>0)
        {
              AVCaptureInput* input = [captureSession.inputs objectAtIndex:0];
              [captureSession removeInput:input];
        }
        if(captureSession.outputs.count>0)
        {
              AVCaptureVideoDataOutput* output = (AVCaptureVideoDataOutput*)[captureSession.outputs objectAtIndex:0];
              [captureSession removeOutput:output];
        }

  self.prevLayer = nil;
  self.captureSession = nil;

        NSLog(@"end stop capture");
    });

#endif
}

- (void)initCapture {
dispatch_sync(myQueue, ^{
    NSLog(@"init capture");
    #if HAS_AVFF
      AVCaptureDeviceInput *captureInput =
        [AVCaptureDeviceInput deviceInputWithDevice:
                [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] 
                                              error:nil];
        if(!captureInput)
        {
            NSLog(@"ERROR - CaptureInputNotInitialized");
        }


      AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; 
      captureOutput.alwaysDiscardsLateVideoFrames = YES;
        if(!captureOutput)
        {
            NSLog(@"ERROR - CaptureOutputNotInitialized");
        }


      [captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

      NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
      NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];

      NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];

      [captureOutput setVideoSettings:videoSettings]; 
      self.captureSession = [[[AVCaptureSession alloc] init] autorelease];
      self.captureSession.sessionPreset = AVCaptureSessionPresetMedium; // 480x360 on a 4

        if([self.captureSession canAddInput:captureInput])
        {
            [self.captureSession addInput:captureInput];
        }
        else
        {
            NSLog(@"ERROR - cannot add input");
        }
        if([self.captureSession canAddOutput:captureOutput])
        {
            [self.captureSession addOutput:captureOutput];
        }
        else
        {
            NSLog(@"ERROR - cannot add output");
        }

      [captureOutput release];

      if (!self.prevLayer)
      {
          [self.prevLayer release];
      }
      self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];

      self.prevLayer.frame = self.view.bounds;
      self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
      [self.view.layer addSublayer: self.prevLayer];

      [self.captureSession startRunning];
    #endif
    NSLog(@"end init");
    });

}

0 个答案:

没有答案