通过转换将视频合成为一部电影

时间:2015-12-03 08:32:11

标签: ios swift video video-editing

我有几个h.264快速视频,我想将它们连成一个带有过渡的视频(淡入/淡出)。以下代码应接受AVAsset数组并将其插入AVMutableVideoComposition。 但我一直从AVAssetExportSession获得-11841!请帮我找到问题。

SELECT `table1`.`shipping_code`
FROM `table2`
  INNER JOIN `table1` ON `table1`.`id` = `table2`.`id`
WHERE `table1`.`shipping_code` LIKE '%3%'

1 个答案:

答案 0 :(得分:0)

终于解决了问题!看来我必须将所有AVMutableVideoCompositionInstruction都包含在一个数组中,因为所有AVMutableVideoCompositionLayerInstruction都在另一个数据中重叠。

如果有人有同样的问题,我会发布工作代码。

index = Int(0)
let mutableComposition = AVMutableComposition()
// Transition relate
let timeOffsetBetweenVideos = CMTimeMakeWithSeconds(0.3, 30)
let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
let compositionAudioTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var lastVideoEndTime = kCMTimeZero

for asset in movieAssets as! [AVAsset] {
    // Add video track into composition
    let videoStartTime = CMTimeCompare(lastVideoEndTime, kCMTimeZero) == 0 ? kCMTimeZero : CMTimeSubtract(lastVideoEndTime, timeOffsetBetweenVideos)
    let compositionVideoTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    let videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
    try compositionVideoTrack.insertTimeRange(videoTrack.timeRange, ofTrack: videoTrack, atTime: videoStartTime)
    if index == (movieAssets.count - 1) {
        compositionVideoTrack.scaleTimeRange(videoTrack.timeRange, toDuration: CMTimeAdd(asset.duration, timeOffsetBetweenVideos))
    }

    // Add audio track into composition
    let audioTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
    try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: videoStartTime)

    if movieAssets.count == 1 {
        break
    }
    if index == 0 {
        // First movie has ending animation only
        let transitionTimeRange = CMTimeRangeMake(CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos), timeOffsetBetweenVideos)
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
        let transform = CGAffineTransformTranslate(videoTrack.preferredTransform, movieFrameSize.width / -1.0, 0)
        layerInstruction.setTransformRampFromStartTransform(videoTrack.preferredTransform, toEndTransform: transform, timeRange: transitionTimeRange)
        layerInstruction.setOpacity(0.0, atTime: compositionVideoTrack.timeRange.end)

        videoCompositionInstruction.layerInstructions.append(layerInstruction)
    } else if index == (movieAssets.count - 1) {
        // Last movie has begining animation only
        let transitionTimeRange = CMTimeRangeMake(lastVideoEndTime, timeOffsetBetweenVideos)
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
        var transform = CGAffineTransformScale(videoTrack.preferredTransform, 0.5, 0.5)
        transform = CGAffineTransformTranslate(transform, movieFrameSize.width / 2, movieFrameSize.height / 2)
        layerInstruction.setTransformRampFromStartTransform(transform, toEndTransform: videoTrack.preferredTransform, timeRange: transitionTimeRange)

        videoCompositionInstruction.layerInstructions.append(layerInstruction)
    } else {
        // Other movies has both begining/ending animation
        let transitionTimeRangeBegin = CMTimeRangeMake(lastVideoEndTime, timeOffsetBetweenVideos)
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
        var transformBegin = CGAffineTransformScale(videoTrack.preferredTransform, 0.5, 0.5)
        transformBegin = CGAffineTransformTranslate(transformBegin, movieFrameSize.width / 2, movieFrameSize.height / 2)
        layerInstruction.setTransformRampFromStartTransform(transformBegin, toEndTransform: videoTrack.preferredTransform, timeRange: transitionTimeRangeBegin)

        let transitionTimeRangeEnd = CMTimeRangeMake(CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos), timeOffsetBetweenVideos)
        let transform = CGAffineTransformTranslate(videoTrack.preferredTransform, movieFrameSize.width / -1.0, 0)
        layerInstruction.setTransformRampFromStartTransform(videoTrack.preferredTransform, toEndTransform: transform, timeRange: transitionTimeRangeEnd)
        layerInstruction.setOpacity(0.0, atTime: compositionVideoTrack.timeRange.end)

        videoCompositionInstruction.layerInstructions.append(layerInstruction)
    }

    lastVideoEndTime = CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos)
    ++index
}

let exportSesstion = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)
exportSesstion?.outputFileType = AVFileTypeQuickTimeMovie
exportSesstion?.outputURL = retFileUrl
if movieAssets.count > 1 {
    videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mutableComposition.duration)
    videoCompositionInstruction.enablePostProcessing = false

    let videoComposition = AVMutableVideoComposition(propertiesOfAsset: mutableComposition)
    videoComposition.instructions = [videoCompositionInstruction]
    videoComposition.renderSize = mutableComposition.naturalSize
    videoComposition.renderScale = 1.0
    videoComposition.frameDuration = CMTimeMake(1, 30)
    exportSesstion?.videoComposition = videoComposition
}
exportSesstion?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
    if exportSesstion?.status == AVAssetExportSessionStatus.Completed {
        print("Video file exported: \(retFileUrl)")
    } else {
        print(exportSesstion!.error!)
        print("Failed exporting video: \(exportSesstion?.error?.localizedDescription)")
        print(exportSesstion?.error?.localizedFailureReason)
    }
})