我正在为ios 4+编写一个视频捕捉应用程序。它在ios 5+的设备上运行良好,但在ios 4+中,代表didFinishRecordingToOutputFileAtURL在录制停止后没有被调用。我检查了苹果的参考文献,其中说“即使没有数据成功写入文件,也会为每个录制请求调用此方法。”
有什么建议吗?
以下是完整的代码:
/
/
// HomeViewController.m
// MyAgingBooth
//
// Created by Mahmud on 29/10/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "HomeViewController.h"
#import "Globals.h"
#import <MobileCoreServices/MobileCoreServices.h>
#import <MediaPlayer/MPMoviePlayerController.h>
#import "SharedData.h"
#import "ResultViewController.h"
@implementation HomeViewController
@synthesize BtnFromCamera, PreviewLayer;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
isPlaying=NO;
playerScore=0;
playerTurn=0;
}
return self;
}
- (void)dealloc
{
//[levelTimer release];
[super dealloc];
}
- (void)didReceiveMemoryWarning
{
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.navigationController.navigationBar.barStyle = UIBarStyleBlackTranslucent;
playerName.text=[NSString stringWithFormat: @"Player %d", (playerTurn+1)];
//add a right bar button item proceed to next.
UIBarButtonItem *proceedButton = [[UIBarButtonItem alloc] initWithTitle:@"Proceed" style:UIBarButtonItemStylePlain target:self action:@selector(proceedToNext)];
//[proceedButton setImage:[UIImage imageNamed:@"info.png"]];
self.navigationItem.rightBarButtonItem=proceedButton;
[proceedButton release];
[BtnFromCamera setTitle:@"Start" forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateSelected];
NSArray *words=[NSArray arrayWithObjects:@"SAY: Girls",@"SAY: Shut up", @"SAY: Tiger",@"SAY: Absurd",@"SAY: Tonight", @"SAY: Amstardam", nil];
[word setText:[words objectAtIndex:arc4random()%6]];
[self initCaptureSession];
}
-(void) proceedToNext
{
self.title=@"Back";
ResultViewController *resultViewController= [[ResultViewController alloc] initWithNibName:@"ResultViewController" bundle:nil];
[self.navigationController pushViewController:resultViewController animated:YES];
[resultViewController release];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
//Action handlers for the buttons
// take snap with camera
-(void) initCaptureSession
{
NSLog(@"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(@"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ];
//[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(@"Couldn't add video input");
}
else
{
NSLog(@"Couldn't create video input");
}
}
else
{
NSLog(@"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(@"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[CaptureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
//ADD VIDEO PREVIEW LAYER
NSLog(@"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession] autorelease]];
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(@"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(@"AudioOutput addedd");
}
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(@"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
[CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(@"Display the preview layer");
CGRect layerRect = CGRectMake(10,44,300,290); //[[[self view] layer] bounds];
[PreviewLayer setBounds:layerRect];
[PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
//[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
AVCaptureConnection *CaptureConnection=nil;
//SET THE CONNECTION PROPERTIES (output properties)
NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: @"5.0.0" options: NSNumericSearch];
if (order == NSOrderedSame || order == NSOrderedDescending) {
// OS version >= 5.0.0
CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMinFrameDuration)
{
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
} else {
// OS version < 5.0.0
CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[MovieFileOutput connections]];
}
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
//CMTimeShow(CaptureConnection.videoMinFrameDuration);
//CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (IBAction) StartVideo
{
if (!isPlaying) {
self.navigationItem.rightBarButtonItem.enabled=NO;
[BtnFromCamera setTitle:@"Stop" forState:UIControlStateNormal];
playerName.text=[NSString stringWithFormat:@"Player %d", playerTurn+1];
playerScore=0;
count=0;
isPlaying=YES;
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
NSLog(@"file remove error");
}
}
[outputPath release];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[outputURL release];
//NSString *DestFilename = @ "output.mov";
//Set the file save to URL
/* NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/output_%@.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSURL* saveLocationURL = [[NSURL alloc] initFileURLWithPath:destinationPath];
[MovieFileOutput startRecordingToOutputFileURL:saveLocationURL recordingDelegate:self];
[saveLocationURL release]; */
levelTimer = [NSTimer scheduledTimerWithTimeInterval:0.05 target: self selector: @selector(levelTimerCallback:) userInfo: nil repeats: YES];
}
else
{
isPlaying=NO;
NSLog(@"STOP RECORDING");
[MovieFileOutput stopRecording];
[levelTimer invalidate];
[BtnFromCamera setTitle:@"Start" forState:UIControlStateNormal];
self.navigationItem.rightBarButtonItem.enabled=YES;
}
}
//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********/
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(@"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(@"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(@"File save error");
}
else
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, @"PlayerName", [NSNumber numberWithFloat:playerScore], @"Score", assetURL, @"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:@"Score %f", playerScore];
playerTurn++;
}
}];
}
else {
NSString *assetURL=[self copyFileToDocuments:outputFileURL];
if(assetURL!=nil)
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, @"PlayerName", [NSNumber numberWithFloat:playerScore], @"Score",assetURL , @"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:@"Score %f", playerScore];
playerTurn++;
}
}
[library release];
}
}
- (NSString*) copyFileToDocuments:(NSURL *)fileURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/output_%@.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSError *error;
if (![[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error]) {
NSLog(@"File save error %@", [error localizedDescription]);
return nil;
}
return destinationPath;
}
- (void)levelTimerCallback:(NSTimer *)timer {
AVCaptureConnection *audioConnection = [self connectionWithMediaType:AVMediaTypeAudio fromConnections:[MovieFileOutput connections]];
//return [audioConnection isActive];
for (AVCaptureAudioChannel *channel in audioConnection.audioChannels) {
float avg = channel.averagePowerLevel;
// float peak = channel.peakHoldLevel;
float vol=powf(10, avg)*1000;
NSLog(@"Power: %f",vol);
if (isPlaying && vol > 0) {
playerScore=playerScore+vol;
count=count+1;
}
}
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return connection;
}
}
}
return nil;
}
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
// look at all the video devices and get the first one that's on the front
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
@end
答案 0 :(得分:0)
修正了问题。我不小心添加了额外的音频输出。删除以下片段对我有用。
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(@"AudioOutput addedd");
}