AVCaptureVideoDataOutput和AVCaptureMovieFileOutput同时?

时间:2014-02-02 20:43:16

标签: ios audio video avfoundation avassetwriter

这是一个similar question。我也有同样的问题。这是我的代码

- (void)viewDidLoad
{
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
    self.packagesBufferMutableArray = [NSMutableArray array];
    self.fps = 25;
    [self initDateFormatter];
//    [self setupCaptureSession];

    [self performSelector:@selector(stopWork) withObject:nil afterDelay:5];

}


- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (void)viewDidAppear:(BOOL)animated
{
    [super viewDidAppear:animated];

    self.isExtra = YES;

    [self.captureSession startRunning];
}

- (void)initDateFormatter {

    self.dateFormatter = [NSDateFormatter new];
    [_dateFormatter setDateFormat:@"yy-MM-dd--HH-mm-ss"];
}

- (NSString *)generateFilePathForMovie {

    return [NSString stringWithFormat:@"%@/%@.mov",
            [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"],
            [_dateFormatter stringFromDate:[NSDate date]]];
}

- (NSDictionary *)settingsForWriterInput {

    int bitRate = (300 + /*self.currentQuality*/5 * 90) * 1024;      //NORMAL 750 * 1024

    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
                                   nil];



    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:480], AVVideoWidthKey,
                                   [NSNumber numberWithInt:320], AVVideoHeightKey,
                                   codecSettings, AVVideoCompressionPropertiesKey,
                                   nil];


    return videoSettings;
}

- (AVAssetWriterInput *)createVideoWriterInput {

    NSDictionary *videoSettings = [self settingsForWriterInput];

    return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                              outputSettings:videoSettings];
}

- (void)setupCaptureSession

{
    NSError *error = nil;

self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetMedium;

AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input)
{
    // Handling the error appropriately.
}
[self.captureSession addInput:input];

// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];

self.assetWriterInput =[self createVideoWriterInput];// [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
self.assetWriterInput.expectsMediaDataInRealTime = YES;
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:[self generateFilePathForMovie]] fileType:AVFileTypeMPEG4 error:&error];

[assetWriter addInput:self.assetWriterInput];


self.assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.assetWriterInput sourcePixelBufferAttributes:
                                                            [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil]];


[self.captureSession addOutput:output];

// Configure your output.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);

// Specify the pixel format
output.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                                                   forKey:(id)kCVPixelBufferPixelFormatTypeKey];

// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
for (AVCaptureOutput* output in self.captureSession.outputs)
{

    if ([output isKindOfClass:[AVCaptureVideoDataOutput class]])
    {
        AVCaptureConnection* connection = [output connectionWithMediaType:AVMediaTypeVideo];

        CMTimeShow(connection.videoMinFrameDuration);
        CMTimeShow(connection.videoMaxFrameDuration);

        CMTime frameDuration = CMTimeMake(1, self.fps);

        if (connection.isVideoMinFrameDurationSupported)
            connection.videoMinFrameDuration = frameDuration;
        if (connection.isVideoMaxFrameDurationSupported)
            connection.videoMaxFrameDuration = frameDuration;

        CMTimeShow(connection.videoMinFrameDuration);
        CMTimeShow(connection.videoMaxFrameDuration);

    }
    else
    {
        AVCaptureConnection* connection = [output connectionWithMediaType:AVMediaTypeVideo];

        CMTimeShow(connection.videoMinFrameDuration);
        CMTimeShow(connection.videoMaxFrameDuration);

        if (connection.isVideoMinFrameDurationSupported)
            connection.videoMinFrameDuration = CMTimeMake(1, 20);
        if (connection.isVideoMaxFrameDurationSupported)
            connection.videoMaxFrameDuration = CMTimeMake(1, 20);

        CMTimeShow(connection.videoMinFrameDuration);
        CMTimeShow(connection.videoMaxFrameDuration);
    }
}


// Start the session running to start the flow of data
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
[self.captureSession startRunning];

// Assign session to an ivar.
}

- (NSMutableDictionary *)createEmptyPackage
{
    return [NSMutableDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInteger:0], @"framesCount", [NSMutableArray array], @"framesArray", nil];
}

- (void)updateCurrentPackageWithFrameImage:(UIImage *)frameImage
{
    if (self.currentPackageMutableDictionary == nil)
    {
        NSLog(@"new package with number %d", self.packagesBufferMutableArray.count);
        self.currentPackageMutableDictionary = [self createEmptyPackage];
    }
    NSInteger framesCount = [[self.currentPackageMutableDictionary objectForKey:@"framesCount"] integerValue];
    NSMutableArray *framesArray = [self.currentPackageMutableDictionary objectForKey:@"framesArray"];
    NSLog(@"added %d frame at current package", framesCount);
    framesCount ++;
    [framesArray addObject:frameImage];

    [self.currentPackageMutableDictionary setObject:[NSNumber numberWithInteger:framesCount] forKey:@"framesCount"];

    if (framesCount == self.fps)
    {
        [self.packagesBufferMutableArray addObject:[NSDictionary dictionaryWithDictionary:self.currentPackageMutableDictionary]];
        self.currentPackageMutableDictionary = nil;
        if ((self.packagesBufferMutableArray.count == 31) && !self.isExtra)
        {
            NSLog(@"remove old package");
            [self.packagesBufferMutableArray removeObjectAtIndex:0];
        }
    }


}

// Delegate routine that is called when a sample buffer was written
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection
{
    // Create a UIImage from the sample buffer data
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
//  UIImageWriteToSavedPhotosAlbum(image, self, @selector(image:didFinishSavingWithError:contextInfo:), nil);
    [self updateCurrentPackageWithFrameImage:image];

//    UIImageView *imageView = [[UIImageView alloc] initWithImage:image];
//    imageView.frame = CGRectMake(0, 0, image.size.width, image.size.height);
//    [self.view addSubview:imageView];


    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    // a very dense way to keep track of the time at which this frame
    // occurs relative to the output stream, but it's just an example!
    static int64_t frameNumber = 0;
    if(self.assetWriterInput.readyForMoreMediaData)
        [self.assetWriterPixelBufferAdaptor appendPixelBuffer:imageBuffer
                         withPresentationTime:CMTimeMake(frameNumber, 24)];
    frameNumber++;

}

- (void) image: (UIImage *) image
didFinishSavingWithError: (NSError *) error
   contextInfo: (void *) contextInfo
{
}

- (void)startWork
{

}

- (void)stopWork
{
    [self.captureSession stopRunning];
    [self.assetWriter finishWriting];
}

// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
    // Get a CMSampleBuffer's Core Video image buffer for the media data
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer, 0);

    // Get the number of bytes per row for the pixel buffer
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

    // Get the number of bytes per row for the pixel buffer
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    // Get the pixel buffer width and height
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);

    // Create a device-dependent RGB color space
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    // Create a bitmap graphics context with the sample buffer data
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    // Create a Quartz image from the pixel data in the bitmap graphics context
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    // Free up the context and color space
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);

    // Create an image object from the Quartz image
    UIImage *image = [UIImage imageWithCGImage:quartzImage];

    // Release the Quartz image
    CGImageRelease(quartzImage);

    return (image);
}

所以,它创建了mov文件,但我无法与任何玩家一起运行。

QuickTime Player can not open the "14-02-02 - 21-29-11.mov», as the movie file format is not recognized. 

也许我必须设置一些其他参数或使用其他算法?

有人可以说出或举一个例子吗?

0 个答案:

没有答案