按钮点击在iOS中正面和背面切换相机

时间:2014-04-01 07:23:20

标签: objective-c facebook avcapturesession avcapturedevice

    if (_session == nil)
    {
        NSLog(@"Starting up server");

        self.isCapturing = NO;
        self.isPaused = NO;
        _currentFile = 0;
        _discont = NO;

// create capture device with video input
        _session = [[AVCaptureSession alloc] init];

        [self camaraSwitchMode];

        NSLog(@"kdhsfghdsfgs:::::%@",camaraSwitch);
        AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:camaraSwitch error:nil];
        [_session addInput:input];

        // audio input from default mic
        AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
        [_session addInput:micinput];

//使用self作为委托

为YUV输出创建输出
    _captureQueue = dispatch_queue_create("uk.co.gdcl.cameraengine.capture", DISPATCH_QUEUE_SERIAL);
    AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
    [videoout setSampleBufferDelegate:self queue:_captureQueue];
    NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                    [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
                                    nil];
    videoout.videoSettings = setcapSettings;
    [_session addOutput:videoout];
    _videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
    // find the actual dimensions used so we can set up the encoder to the same.
    NSDictionary* actual = videoout.videoSettings;
    _cy = [[actual objectForKey:@"Height"] integerValue];
    _cx = [[actual objectForKey:@"Width"] integerValue];

    AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
    [audioout setSampleBufferDelegate:self queue:_captureQueue];
    [_session addOutput:audioout];
    _audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];

//对于音频,我们需要频道和采样率,但我们无法从ios上的audioout.audiosettings获取这些,所以

    // we need to wait for the first sample

    // start capture and a preview layer
    [_session startRunning];

    _preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
    _preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}

正面和背面的caera模式

    - (void) camaraSwitchMode
{
    AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    if(_iscamaraSwitch==NO)
    {
        NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
        AVCaptureDevice *captureDevice = nil;
        for (AVCaptureDevice *device in videoDevices)
        {
            if (device.position == AVCaptureDevicePositionFront)
            {
                captureDevice = device;
                break;
            }
        }

        cameraDevice = captureDevice;
        //CameraEngine *objCamera=[[CameraEngine alloc]init];
        camaraSwitch=cameraDevice;
        NSLog(@"%@",camaraSwitch);
        _iscamaraSwitch=YES;
    }
    else
    {
        NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
        AVCaptureDevice *captureDevice = nil;
        for (AVCaptureDevice *device in videoDevices)
        {
            if (device.position == AVCaptureDevicePositionBack)
            {
                captureDevice = device;
                break;
            }
        }

        cameraDevice = captureDevice;
        //CameraEngine *objCamera=[[CameraEngine alloc]init];
        camaraSwitch=cameraDevice;
        NSLog(@"%@",camaraSwitch);
        _iscamaraSwitch=YES;

    }
   } 

0 个答案:

没有答案