iOS:降低包含iPod音乐的MPMediaItem的比特率

时间:2013-07-17 06:49:38

标签: iphone ios media-player ipod

我正在创建一个为视频添加主题音乐的应用。

有些用户抱怨如果他们的音乐采用苹果无损格式,则视频会太大。

我发现这是因为我使用的AVMutableComposition只是将原始音乐格式放入我生成的视频中。

那么有什么方法可以降低MPMediaItem中音乐的比特率,或改变它编码的格式?

这是我用来向视频添加音乐的代码的代码段。

AVMutableComposition* mixComposition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                    ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                     atTime:kCMTimeZero error:nil];

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                               preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                               ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                atTime:kCMTimeZero error:nil];

AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:AVAssetExportPresetPassthrough];

NSURL    *exportUrl = [NSURL fileURLWithPath:_videoOutputPath];

if ([[NSFileManager defaultManager] fileExistsAtPath:_videoOutputPath]){
    [[NSFileManager defaultManager] removeItemAtPath:_videoOutputPath error:nil];
}

_assetExport.outputFileType = @"com.apple.quicktime-movie";

_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;

[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {}

1 个答案:

答案 0 :(得分:0)

我终于明白了,这是我使用的代码:

static NSString * const kWriterInputIsReadyForMoreData = @"readyForMoreMediaData";

#import <AVFoundation/AVFoundation.h>
@implementation AudioUtil
{
    AVAssetReader *_assetReader;
    AVAssetWriter *_assetWriter;
    AVAssetWriterInput *_assetWriterInput;
    AVAssetReaderTrackOutput *_readerOutput;
    void (^_callback)(BOOL);
    CMSampleBufferRef _sampleBufferToAppend;
}

-(void)downSamplingAudioWithSourceURL:(NSURL *)sourceURL destinationURL:(NSURL *)destURL timeRange:(CMTimeRange)timeRange  callBack:(void (^)(BOOL))callback
{
    NSError *error = nil;
    _callback = callback;

    [[NSFileManager defaultManager] removeItemAtURL:destURL error:nil];

//initialize reader
AVURLAsset *inputAsset = [AVURLAsset assetWithURL:sourceURL];
_assetReader = [[AVAssetReader alloc] initWithAsset:inputAsset error:&error];
_assetReader.timeRange = timeRange;
AVAssetTrack* track = [[inputAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

NSMutableDictionary* audioReadSettings = [NSMutableDictionary dictionary];
audioReadSettings[AVFormatIDKey] = @(kAudioFormatLinearPCM);
audioReadSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);

_readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
NSAssert([_assetReader canAddOutput:_readerOutput], @"reader can't add output");
[_assetReader addOutput:_readerOutput];

//initialize writer
_assetWriter = [[AVAssetWriter alloc] initWithURL:destURL fileType:[QLVideoFormatProvider audioFileType] error:nil];

NSMutableDictionary *audioOutputSettings = [NSMutableDictionary dictionary];
audioOutputSettings[AVFormatIDKey] = [QLVideoFormatProvider audioFormatKeyForEncoder];
audioOutputSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);
audioOutputSettings[AVSampleRateKey] = @([QLVideoFormatProvider audioSampleRate]);
audioOutputSettings[AVEncoderBitRateKey] = @([QLVideoFormatProvider audioBitrate]);

_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[_assetWriter addInput:_assetWriterInput];

//start
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
BOOL canStartReading = [_assetReader startReading];
NSLog(@"can start reading %d",canStartReading);
if (!canStartReading) {
    callback(NO);
    return;
}

[_assetWriterInput addObserver:self forKeyPath:kWriterInputIsReadyForMoreData options:NSKeyValueObservingOptionOld|NSKeyValueObservingOptionNew context:NULL];
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];

    [self appendBufferToAppend];
}

-(void)appendBufferToAppend
{
    if ([_assetWriterInput isReadyForMoreMediaData]) {
        if (_sampleBufferToAppend) {
            [_assetWriterInput appendSampleBuffer:_sampleBufferToAppend];
            CFRelease(_sampleBufferToAppend);
        }
        _sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
        if (_sampleBufferToAppend) {
            [self appendBufferToAppend];
        }
        else {
            [_assetWriter finishWritingWithCompletionHandler:^(){
                if (_callback) {
                    _callback(_assetWriter.status == AVAssetWriterStatusCompleted);
                };
            }];
        }
    }
    else {

    }
}

-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    if ([keyPath isEqualToString:kWriterInputIsReadyForMoreData]) {
        if ([change[NSKeyValueChangeNewKey] boolValue] == YES) {
            [self appendBufferToAppend];
        }
    }
}