在将Obj-C项目移至Swift时遇到一些问题。我有一种旨在合并视频数组的方法。但是问题是,当我向导出器添加mutablevideocomposition时,出现以下错误。
Optional(Error Domain = AVFoundationErrorDomain代码= -11841“操作 已停止” UserInfo = {NSLocalizedFailureReason =无法播放视频 composition。,NSLocalizedDescription =操作已停止, NSUnderlyingError = 0x28195a250 {Error Domain = NSOSStatusErrorDomain Code = -17390“(null)”}})
如果我不将其包含在导出器中,则视频会合并在一起,但我没有变换指令,并且方向无处不在。 为简单起见,我添加了
var transform:CGAffineTransform = video.preferredTransform
而不是更复杂的转换,但这仍然没有任何区别。
// MARK: - MERGE THE VIDEOS
func mergeVideos( videoArray: [AVAsset] ){
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
let mixComposition = AVMutableComposition()
// Set the starting time
var currentTime = CMTime.zero;
let videoToAdd = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let audioToAdd = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
var instructions: [AVVideoCompositionInstructionProtocol] = []
for video: AVAsset in videoArray {
do {
try videoToAdd!.insertTimeRange(CMTimeRange(start: CMTime.zero, duration: video.duration),
of: video.tracks(withMediaType: AVMediaType.video)[0],
at: currentTime)
} catch {
print("Failed to load first track")
return
}
do {
try audioToAdd?.insertTimeRange(CMTimeRange(start: CMTime.zero, duration: video.duration),
of: video.tracks(withMediaType: AVMediaType.audio)[0] ,
at: currentTime)
} catch {
print("Failed to load Audio track")
}
let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
videoCompositionInstruction.timeRange = CMTimeRange(start: currentTime, duration: video.duration)
let layerInstruction = AVMutableVideoCompositionLayerInstruction()
var transform: CGAffineTransform = video.preferredTransform
layerInstruction.setTransform(transform, at: CMTime.zero)
videoCompositionInstruction.layerInstructions = [layerInstruction];
instructions.append(videoCompositionInstruction)
currentTime = CMTimeAdd(currentTime, video.duration)
}
// 4 - Get path
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory,
in: .userDomainMask).first else {
return
}
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let url = documentDirectory.appendingPathComponent("mergeVideo-\(date).mov")
// 5 - Create Exporter
guard let exporter = AVAssetExportSession(asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality) else {
return
}
// 6 - Setup The Video Composition
let mutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTime(seconds: 1, preferredTimescale: 12)
mutableVideoComposition.renderSize = CGSize(width: 1920, height: 1080)
mutableVideoComposition.instructions = instructions
exporter.outputURL = url
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mutableVideoComposition
// 6 - Perform the Export
exporter.exportAsynchronously() {
if(exporter.status == AVAssetExportSession.Status.completed){
DispatchQueue.main.async {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url)
}) { saved, error in
if saved {
let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
alertController.addAction(defaultAction)
self.present(alertController, animated: true, completion: nil)
}
}
//self.exportDidFinish(exporter)
}
}else {
print(exporter.status.rawValue)
print(exporter.error)
}
}
}