文件从.mov到.wav格式的对话时不支持的标题格式,它只发生在iOS 11以下的设备中(如iOS 9,10)

时间:2017-11-23 07:52:15

标签: ios objective-c avfoundation ios10 avcapturesession

以下是关于我为.mov对.wav对话做了什么的代码:

- (void)mp4ForURL:(NSURL *)videoURL{

    // Create the asset url with the video file
    AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
    NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:avAsset];

    // Check if video is supported for conversion or not
    if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality])////************************************////
    {
        //Create Export session
        AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];////************************************////

        //Creating temp path to save the converted video
        NSString* documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
        NSString* myDocumentPath = [documentsDirectory stringByAppendingPathComponent:@"temp.mp4"];
        NSURL *url = [[NSURL alloc] initFileURLWithPath:myDocumentPath];

        //Check if the file already exists then remove the previous file
        if ([[NSFileManager defaultManager]fileExistsAtPath:myDocumentPath])
        {
            [[NSFileManager defaultManager]removeItemAtPath:myDocumentPath error:nil];
        }

        exportSession.outputURL = url;
        //set the output file format if you want to make it in other file format (ex .3gp)
        exportSession.outputFileType = AVFileTypeMPEG4;
        exportSession.shouldOptimizeForNetworkUse = YES;

        [exportSession exportAsynchronouslyWithCompletionHandler:^{
            switch ([exportSession status])
            {
                case AVAssetExportSessionStatusFailed:
                    NSLog(@"Export session failed");
                    break;
                case AVAssetExportSessionStatusCancelled:
                    NSLog(@"Export canceled");
                    break;
                case AVAssetExportSessionStatusCompleted:
                {
                    //Video conversion finished
                    NSLog(@"Successful!");
                    [self convertMP4toMP3withFile:myDocumentPath];
                }
                    break;
                default:
                    break;
            }
        }];
    }
    else
    {
        NSLog(@"Video file not supported!");
    }
}

-(void)convertMP4toCAFwithFile:(NSString*)dstPath //Converted to Core Audio Format .caf
{
    NSURL *dstURL = [NSURL fileURLWithPath:dstPath];

    AVMutableComposition*   newAudioAsset = [AVMutableComposition composition];

    AVMutableCompositionTrack*  dstCompositionTrack;
    dstCompositionTrack = [newAudioAsset addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAsset*    srcAsset = [AVURLAsset URLAssetWithURL:dstURL options:nil];
    AVAssetTrack*   srcTrack = [[srcAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

    CMTimeRange timeRange = srcTrack.timeRange;

    NSError*    error;

    if(NO == [dstCompositionTrack insertTimeRange:timeRange ofTrack:srcTrack atTime:kCMTimeZero error:&error]) {
        NSLog(@"track insert failed: %@\n", error);
        return;
    }

    AVAssetExportSession*   exportSesh = [[AVAssetExportSession alloc] initWithAsset:newAudioAsset presetName:AVAssetExportPresetPassthrough];
    exportSesh.outputFileType = AVFileTypeCoreAudioFormat;
    exportSesh.outputURL = dstURL;

    [[NSFileManager defaultManager] removeItemAtURL:dstURL error:nil];

    [exportSesh exportAsynchronouslyWithCompletionHandler:^{
        AVAssetExportSessionStatus  status = exportSesh.status;
        NSLog(@"exportAsynchronouslyWithCompletionHandler: %li\n", (long)status);

        if(AVAssetExportSessionStatusFailed == status) {
            NSLog(@"FAILURE: %@\n", exportSesh.error);
        } else if(AVAssetExportSessionStatusCompleted == status) {
            NSLog(@"SUCCESS!\n");

            NSError *error;
            //append the name of the file in jpg form

            //check if the file exists (completely unnecessary).
            NSString *onlyPath = [dstPath stringByDeletingLastPathComponent];
            NSInteger randomNumber = arc4random() % 100000;
            strDateAndTime = [self getCurrentDateAndTime];
            strAudioName = [NSString stringWithFormat:@"%@_%ld_%@.%@", @"Audio", (long)randomNumber, strDateAndTime, @"caf"];
            NSString *toPathString = [NSString stringWithFormat:@"%@/%@", onlyPath, strAudioName];
            [[NSFileManager defaultManager] moveItemAtPath:dstPath toPath:toPathString error:&error];

            //[self uploadAudioOnAWSFromPath:toPathString];

            [self convertToWavForFilePath:toPathString];
        }
    }];
}

- (void)convertToWavForFilePath:(NSString *)cafFilePath
{
    // set up an AVAssetReader to read from the iPod Library

    //    NSString *cafFilePath=[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"];

    NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath];
    AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];

    NSError *assetError = nil;
    AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
                                                               error:&assetError];

    if (assetError) {
        NSLog (@"error: %@", assetError);
        return;
    }

    AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
                                              assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
                                              audioSettings: nil];
    if (! [assetReader canAddOutput: assetReaderOutput]) {
        NSLog (@"can't add reader output... die!");
        return;
    }
    [assetReader addOutput: assetReaderOutput];

    NSInteger randomNumber = arc4random() % 100000;
    _finalAudioName = [NSString stringWithFormat:@"%@_%ld_%@", @"Audio", (long)randomNumber, strDateAndTime];
    //    NSString *title = @"MyRec";
    NSArray *docDirs = NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *docDir = [docDirs objectAtIndex: 0];
    __block NSString *wavFilePath = [[docDir stringByAppendingPathComponent :_finalAudioName]
                                     stringByAppendingPathExtension:@"wav"];

    _finalAudioName = [wavFilePath lastPathComponent];

    if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
    {
        [[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
    }
    NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
    AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
                                                          fileType:AVFileTypeWAVE
                                                             error:&assetError];
    if (assetError)
    {
        NSLog (@"error: %@", assetError);
        return;
    }

    AudioChannelLayout channelLayout;
    memset(&channelLayout, 0, sizeof(AudioChannelLayout));
    channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
    NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                    [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
                                    [NSNumber numberWithFloat:44100.0], AVSampleRateKey,                                    
                                    [NSNumber numberWithInt:1], AVNumberOfChannelsKey,
                                    [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
                                    [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
                                    [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
                                    [NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
                                    [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,                                                                      
                                    nil];

    AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
                                                                              outputSettings:outputSettings];

    if ([assetWriter canAddInput:assetWriterInput])
    {
        [assetWriter addInput:assetWriterInput];
    }
    else
    {
        NSLog (@"can't add asset writer input... die!");
        return;
    }

    assetWriterInput.expectsMediaDataInRealTime = NO;

    [assetWriter startWriting];
    [assetReader startReading];

    AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
    CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
    [assetWriter startSessionAtSourceTime: startTime];

    __block UInt64 convertedByteCount = 0;
    dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);

    [assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
                                            usingBlock: ^
     {
         while (assetWriterInput.readyForMoreMediaData)
         {

             CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
             if (nextBuffer)
             {
                 // append buffer
                 [assetWriterInput appendSampleBuffer: nextBuffer];
                 convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
                 CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(nextBuffer);

                 CMTime sampleDuration = CMSampleBufferGetDuration(nextBuffer);
                 if (CMTIME_IS_NUMERIC(sampleDuration))
                     progressTime= CMTimeAdd(progressTime, sampleDuration);
                 float dProgress= CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(songAsset.duration);
                 NSLog(@"%f",dProgress);
             }
             else
             {
                 [assetWriterInput markAsFinished];
                 [assetReader cancelReading];

                 [assetWriter finishWritingWithCompletionHandler:^{
                     [self uploadAudioOnAWSFromPath:wavFilePath];
                 }];
             }
         }
     }];
}

在上面的代码中,转换后的最终文件是.wav格式,我必须在AWS服务器上发送以从中获取文本,对于文本对话,我们使用Google Speech API,每当我发送从下面转换的文件iOS 11设备(如iOS 9,10),它显示了这个错误:  错误:WAV标头表示格式不受支持。

在将音频文件(.wav)转换为文本格式时,Google语音API会显示以上错误。

以上代码与最新的iOS 11设备完美配合,仅在iOS 11以下(如iOS 9,10)设备无法正常工作。

0 个答案:

没有答案