AVMutableAudioMixInputParameters未正确应用于AudioMiX

时间:2016-07-11 05:20:33

标签: ios swift avfoundation avmutablecomposition

我正在开发将多个视频与背景音轨结合在一起的应用程序。它还需要为不同的视频设置不同的音频级别。

以下是AssetItem类和代码的代码。 AssetManager类

 // AssetItem Class 
class AssetItem : NSObject {

var asset                   : Asset!
var assetEffect             : AssetEffectType!   // Enum 
var assetSceneType          : SceneType!         // Enum
var videoLength             : CMTime!
var animationLayer          : AnyObject?
var volumeOfVideoVoice      : Float = 0.0
var volumeOfBGMusic         : Float = 0.0

override init() {
    super.init()
   }
}

// AssetManager Class implementation 

class AssetManager{
 var assetList  = [AssetItem]()
 var composition                     : AVMutableComposition! = AVMutableComposition()
 var videoComposition                : AVMutableVideoComposition? = AVMutableVideoComposition()
 var audioMix                        : AVMutableAudioMix = AVMutableAudioMix()

 var transitionDuration              = CMTimeMakeWithSeconds(1, 600) // Default transitionDuration is 1 sec

var compositionTimeRanges           : [NSValue] = [NSValue]()

var passThroughTimeRangeValue       : [NSValue] = [NSValue]()
var transitionTimeRangeValue        : [NSValue] = [NSValue]()

var videoTracks                     = [AVMutableCompositionTrack]()
var audioTracks                     = [AVMutableCompositionTrack]()


// MARK: - Constructor
override init() {

    super.init()

    let compositionTrackA           = self.composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
    let compositionTrackB           = self.composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))

    let compositionTrackAudioA      = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
    let compositionTrackAudioB      = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))

    self.videoTracks                = [compositionTrackA, compositionTrackB]
    self.audioTracks                = [compositionTrackAudioA, compositionTrackAudioB]

    }

    func buildCompositionTrack(forExport : Bool ){
       // This is the Method to Build Compositions
    }
}

FOllowing是构建组合的方法

func buildCompositionTrack(forExport : Bool) {

    var cursorTIme = kCMTimeZero
    var transitionDurationForEffect = kCMTimeZero

    // Create a mutable composition instructions object
    var videoCompositionInstructions = [AVMutableVideoCompositionInstruction]()
    var audioMixInputParameters = [AVMutableAudioMixInputParameters]()

    let timeRanges = calculateTimeRangeForAssetLayer()
    self.passThroughTimeRangeValue = timeRanges.passThroughTimeRangeValue
    self.transitionTimeRangeValue = timeRanges.transitionTimeRangeValue

    let defaultMuteSoundTrackURL: NSURL = bundle.URLForResource("30sec", withExtension: "mp3")!
    let muteSoundTrackAsset = AVURLAsset(URL: defaultMuteSoundTrackURL, options: nil)
    let muteSoundTrack = muteSoundTrackAsset.tracksWithMediaType(AVMediaTypeAudio)[0]

    for (index,assetItem) in self.assetsList.enumerate() {
        let trackIndex = index % 2

        let assetVideoTrack = assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeVideo)[0]

        let timeRange = CMTimeRangeMake(kCMTimeZero, assetItem.videoLength)

        do {
            try self.videoTracks[trackIndex].insertTimeRange(timeRange, ofTrack: assetVideoTrack, atTime: cursorTime)
        } catch let error1 as NSError {
            error = error1
        }
        if error != nil {
            print("Error: buildCompositionTracks for video with parameter index: %@ and VideoCounts: %@ error: %@", ["\(index)", "\(self.assetsList.count)", "\(error?.description)"])
            error = nil
        }

        if assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeAudio).count > 0 {
            let clipAudioTrack = assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeAudio)[0]

            do {
                try audioTracks[trackIndex].insertTimeRange(timeRange, ofTrack: clipAudioTrack,  atTime: cursorTime)
            } catch let error1 as NSError {
                error = error1
            }
        }else {
            do {
                try audioTracks[trackIndex].insertTimeRange(timeRange, ofTrack: muteSoundTrack, atTime: cursorTime)
            }catch let error1 as NSError {
                error = error1
            }
        }

        // The end of this clip will overlap the start of the next by transitionDuration.
        // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)

        if assetItem.assetEffect == FLIXAssetEffectType.Default {

            transitionDurationForEffect = kCMTimeZero
            let timeRange = CMTimeRangeMake(cursorTime, assetItem.videoLength)
            self.compositionTimeRanges.append(NSValue(CMTimeRange: timeRange))
            cursorTime = CMTimeAdd(cursorTime, assetItem.videoLength)
        } else {
            transitionDurationForEffect = self.transitionDuration
            let timeRange = CMTimeRangeMake(cursorTime, CMTimeSubtract(assetItem.videoLength, transitionDurationForEffect))
            self.compositionTimeRanges.append(NSValue(CMTimeRange: timeRange))
            cursorTime = CMTimeAdd(cursorTime, assetItem.videoLength)
            cursorTime = CMTimeSubtract(cursorTime, transitionDurationForEffect)
        }

            videoCompositionInstructions.appendContentsOf(self.buildCompositionInstructions( index, assetItem : assetItem))

    }

    if self.project.hasProjectMusicTrack() && self.backgroundMusicTrack != nil {

        let url: NSURL = bundle.URLForResource("Music9", withExtension: "mp3")!
        bgMusicSound = AVURLAsset(URL: url, options: nil)
        backgroundAudioTrack = bgMusicSound.tracksWithMediaType(AVMediaTypeAudio)[0]

        let compositionBackgroundTrack = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))

        let soundDuration =  CMTimeCompare(bgMusicSound.duration, self.composition.duration)

        if soundDuration == -1 {

             let bgMusicSoundTimeRange =  CMTimeRangeMake(kCMTimeZero, bgMusicSound.duration)

             let noOftimes  = Int(CMTimeGetSeconds(self.composition.duration) / CMTimeGetSeconds(bgMusicSound.duration))
             let remainingTime = CMTimeGetSeconds(self.composition.duration) % CMTimeGetSeconds(bgMusicSound.duration)

             var musicCursorTime = kCMTimeZero

             for _ in 0..<noOftimes {

                 do {
                    try compositionBackgroundTrack.insertTimeRange(bgMusicSoundTimeRange, ofTrack: backgroundAudioTrack, atTime: musicCursorTime)
                    } catch let error1 as NSError {
                       error = error1
                    }
                 musicCursorTime = CMTimeAdd(bgMusicSound.duration, musicCursorTime)
              }
         }

          let backgroundMusciMixInputParameters = AVMutableAudioMixInputParameters(track: compositionBackgroundTrack)
          backgroundMusciMixInputParameters.trackID = compositionBackgroundTrack.trackID

          // setting up music levels for background music track.

           for index in 0 ..< Int(self.compositionTimeRanges.count) {

             let timeRange = self.compositionTimeRanges[index].CMTimeRangeValue
             let scene =  self.assetsList[index].assetSceneType

             let volumeOfBGMusic = self.assetsList[index].volumeOfBGMusic
             var nextvolumeOfBGMusic : Float = 0.0

              if let nextAsset = self.assetsList[safe: index + 1] {
                nextvolumeOfBGMusic =  nextAsset.volumeOfBGMusic
              }
              backgroundMusciMixInputParameters.setVolume(volumeOfBGMusic, atTime: timeRange.start)
        backgroundMusciMixInputParameters.setVolumeRampFromStartVolume(volumeOfBGMusic, toEndVolume: nextvolumeOfBGMusic, timeRange: CMTimeRangeMake(CMTimeSubtract(timeRange.end,CMTimeMake(2, 1)), CMTimeMake(2, 1)))
         }

      audioMixInputParameters.append(backgroundMusciMixInputParameters)

  } // End of If for ProjectMusic Check 


    for (index, assetItem) in self.assetsList.enumerate(){

        let trackIndex = index % 2

        let timeRange = self.compositionTimeRanges[index].CMTimeRangeValue
        let sceneType = assetItem.assetSceneType
        let volumnOfVideoMusic = assetItem.volumeOfVideoVoice

        let audioTrackParamater = AVMutableAudioMixInputParameters(track: self.audioTracks[trackIndex])

        audioTrackParamater.trackID = self.audioTracks[trackIndex].trackID
        audioTrackParamater.setVolume(0.0, atTime: kCMTimeZero )  // Statement 1
        audioTrackParamater.setVolume(volumnOfVideoMusic, atTime: timeRange.start) // Statement 2 
        audioTrackParamater.setVolume(0.0, atTime: timeRange.end) // statement 3
        audioMixInputParameters.append(audioTrackParamater)
    }

    self.audioMix.inputParameters = audioMixInputParameters
    self.composition.naturalSize    = self.videoRenderSize
    self.videoComposition!.instructions = videoCompositionInstructions
    self.videoComposition!.renderSize = self.videoRenderSize
    self.videoComposition!.frameDuration = CMTimeMake(1, 30)
    self.videoComposition!.renderScale = 1.0        // This is a iPhone only option.

  }

在上面的代码中,背景音乐级别设置正确,但视频轨道的音频级别出现问题。我已经添加了DebugView来帮助调试组合,在调试视图中一切看起来都很完美,但除了背景音乐轨道之外,视频音频不再可听见。有什么我做错了吗?

如果我从上面的代码中移除语句1,那么它是可听的,但现在它们都在1.0级听到,并且不尊重设置的级别。

DebugView of Compositions and Audio Tracks

1 个答案:

答案 0 :(得分:4)

看起来你正在为AVMutableCompositionTrack中的每个“剪辑”使用一个新的AVAudioMixInputParameters对象。这不起作用:新对象将与具有相同轨道ID的先前对象冲突。

您应该为每个AVMutableCompositionTrack使用单个AVAudioMixInputParameters对象,使用每个所需的渐变值更新该对象。