在我的ios应用中,我想合并两个视频并更改背景音乐。我尝试了一下,并且可以正常播放视频。但是,当我选择任何延时视频,然后尝试合并或更改背景音乐时,视频就会变成全黑屏幕。
对于我的应用程序,我使用的是swift 4.2和xcode-10。我也尝试过在Swift 4和Swift 5中返回相同的结果。
这是我的代码:
class Export: NSObject {
static let shared = Export()
var exporter: AVAssetExportSession!
let defaultSize = CGSize(width: 1920, height: 1920)
typealias Completion = (URL?, Error?) -> Void
func mergeVideos(arrayVideos:[URL], animation:Bool, exportURL: URL, completion:@escaping Completion) -> Void {
DispatchQueue.main.async {
UIApplication.shared.isIdleTimerDisabled = true
}
var errors: Error!
var insertTime = kCMTimeZero
var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
var outputSize = CGSize(width: 0, height: 0)
// Determine video output size
for url in arrayVideos {
print("merge video url: \(url)")
let videoAsset = AVAsset(url: url)
let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]
var videoSize = videoTrack.naturalSize.applying(videoTrack.preferredTransform)
videoSize.width = fabs(videoSize.width)
videoSize.height = fabs(videoSize.height)
if outputSize.height == 0 || videoSize.height > outputSize.height {
outputSize.height = videoSize.height
}
if outputSize.width == 0 || videoSize.width > outputSize.width {
outputSize.width = videoSize.width
}
}
// Silence sound (in case of video has no sound track)
guard let silenceURL = Bundle.main.url(forResource: "silence", withExtension: "mp3") else { completion(nil, errors); return }
let silenceAsset = AVAsset(url:silenceURL)
let silenceSoundTrack = silenceAsset.tracks(withMediaType: AVMediaType.audio).first
// Init composition
let mixComposition = AVMutableComposition.init()
for url in arrayVideos {
print("merge video url: \(url)")
let videoAsset = AVAsset(url: url)
// Get video track
guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else {
print("video asset track not found")
continue
}
// Get audio track
var audioTrack:AVAssetTrack?
if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 {
audioTrack = videoAsset.tracks(withMediaType: AVMediaType.audio).first
}
else {
audioTrack = silenceSoundTrack
}
// Init video & audio composition track
guard let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { completion(nil, errors); return }
guard let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { completion(nil, errors); return }
do {
let startTime = kCMTimeZero
let duration = videoAsset.duration
// Add video track to video composition at specific time
try videoCompositionTrack.insertTimeRange(CMTimeRangeMake(startTime, duration),
of: videoTrack,
at: insertTime)
// Add audio track to audio composition at specific time
if let audioTrack = audioTrack {
try audioCompositionTrack.insertTimeRange(CMTimeRangeMake(startTime, duration),
of: audioTrack,
at: insertTime)
}
// Add instruction for video track
let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack, asset: videoAsset, standardSize: outputSize, atTime: insertTime)
// Hide video track before changing to new track
let endTime = CMTimeAdd(insertTime, duration)
if animation {
let timeScale = videoAsset.duration.timescale
let durationAnimation = CMTime.init(seconds: 1, preferredTimescale: timeScale)
layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange.init(start: endTime, duration: durationAnimation))
}
else {
layerInstruction.setOpacity(0, at: endTime)
}
arrayLayerInstructions.append(layerInstruction)
// Increase the insert time
insertTime = CMTimeAdd(insertTime, duration)
}
catch {
print("Load track error")
}
}
// Main video composition instruction
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, insertTime)
mainInstruction.layerInstructions = arrayLayerInstructions
// Main video composition
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = outputSize
// Remove file if existed
FileManager.default.removeItemIfExisted(exportURL)
// Init exporter
guard let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else {
errors = "exporter initialization failed" as? Error
print("exporter initialization failed")
completion(nil, errors)
return
}
exporter.outputURL = exportURL
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
self.exporter = exporter
//show progress of export
var exportProgressBarTimer = Timer() // initialize timer
if #available(iOS 10.0, *) {
exportProgressBarTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { timer in
// Get Progress
let progress = Float(exporter.progress)
if (progress < 1.01) {
let dict:[String: Float] = ["progress": progress]
NotificationCenter.default.post(name: Notification.Name("MergeVideosProgressBarPercentage"), object: nil, userInfo: dict)
}
}
} else {
exportProgressBarTimer = Timer.scheduledTimer(timeInterval: 0.5, target: self, selector: #selector(self.updateMergeVideosStatus), userInfo: nil, repeats: true)
}
// Do export
exporter.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
UIApplication.shared.isIdleTimerDisabled = false
}
exportProgressBarTimer.invalidate()
DispatchQueue.main.async {
self.exportDidFinish(exporter: exporter, exportURL: exportURL, completion: completion)
}
})
}
@objc func updateMergeVideosStatus() {
if self.exporter != nil {
let progress = Float(exporter.progress)
if (progress < 1.01) {
let dict:[String: Float] = ["progress": progress]
NotificationCenter.default.post(name: Notification.Name("MergeVideosProgressBarPercentage"), object: nil, userInfo: dict)
}
}
}
func mergeVideoWithMusic(videoUrl: URL, BackgroundMusic musicUrl: URL!, volumes: [Float], exportURL: URL, completion:@escaping Completion) -> Void {
DispatchQueue.main.async {
UIApplication.shared.isIdleTimerDisabled = true
}
var errors: Error!
let video = AVAsset(url: videoUrl)
var music: AVAsset!
if musicUrl != nil {
music = AVAsset(url: musicUrl)
}
// Silence sound (in case of video has no sound track)
guard let silenceURL = Bundle.main.url(forResource: "silence_big", withExtension: "mp3") else { completion(nil, errors); return }
let silenceAsset = AVAsset(url: silenceURL)
let silenceSoundTrack = silenceAsset.tracks(withMediaType: AVMediaType.audio).first
// Init composition
let mixComposition = AVMutableComposition.init()
// Get video track
guard let videoTrack = video.tracks(withMediaType: AVMediaType.video).first else {
print("asset track of video found nil")
completion(nil, nil)
return
}
var outputSize = videoTrack.naturalSize.applying(videoTrack.preferredTransform)
outputSize.width = fabs(outputSize.width)
outputSize.height = fabs(outputSize.height)
let insertTime = kCMTimeZero
// Get audio track
var videoAudioTrack:AVAssetTrack?
if video.tracks(withMediaType: AVMediaType.audio).count > 0 {
videoAudioTrack = video.tracks(withMediaType: AVMediaType.audio).first
}
else {
videoAudioTrack = silenceSoundTrack
}
// Get audio track
var audioTrack:AVAssetTrack?
if music != nil {
if music.tracks(withMediaType: AVMediaType.audio).count > 0 {
audioTrack = music.tracks(withMediaType: AVMediaType.audio).first
}
}
// Init video & audio composition track
guard let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { completion(nil, errors); return }
guard let videoAudioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { completion(nil, errors); return }
var audioCompositionTrack: AVMutableCompositionTrack?
if music != nil {
audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
}
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let videoAudioParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: videoAudioTrack)
videoAudioParam.trackID = (videoAudioCompositionTrack.trackID)
var musicParam: AVMutableAudioMixInputParameters!
if music != nil {
musicParam = AVMutableAudioMixInputParameters(track: audioTrack)
if let audioCompositionTrack = audioCompositionTrack {
musicParam.trackID = (audioCompositionTrack.trackID)
}
}
//Set final volume of the audio record and the music
videoAudioParam.setVolume(volumes[0], at: kCMTimeZero)
print("video volume: \(volumes[0])")
if music != nil {
musicParam.setVolume(volumes[1], at: kCMTimeZero)
print("audio volume: \(volumes[1])")
}
//Add setting
audioMixParam.append(videoAudioParam)
if music != nil {
audioMixParam.append(musicParam)
}
//Add parameter
audioMix.inputParameters = audioMixParam
let startTime = kCMTimeZero
let duration = video.duration
do {
// Add video track to video composition at specific time
try videoCompositionTrack.insertTimeRange(CMTimeRangeMake(startTime, duration),
of: videoTrack,
at: insertTime)
// Add videoAudio track to videoAudio composition at specific time
if let videoAudioTrack = videoAudioTrack {
try videoAudioCompositionTrack.insertTimeRange(CMTimeRangeMake(startTime, duration),
of: videoAudioTrack,
at: insertTime)
}
// Add audio track to audio composition at specific time
if music != nil, let audioTrack = audioTrack {
try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(startTime, duration),
of: audioTrack,
at: insertTime)
}
}
catch {
print("Load track error \(error)")
}
// Init layer instruction
let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack,
asset: video,
standardSize: outputSize,
atTime: insertTime)
// Init main instruction
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(insertTime, duration)
mainInstruction.layerInstructions = [layerInstruction]
// Init layer composition
let layerComposition = AVMutableVideoComposition()
layerComposition.instructions = [mainInstruction]
layerComposition.frameDuration = CMTimeMake(1, 30)
layerComposition.renderSize = outputSize
// Check exist and remove old file
FileManager.default.removeItemIfExisted(exportURL)
// Init exporter
guard let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else {
errors = "exporter initialization failed" as? Error
print("exporter initialization failed")
completion(nil, errors)
return
}
exporter.outputURL = exportURL
exporter.outputFileType = AVFileType.mov
exporter.audioMix = audioMix
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = layerComposition
self.exporter = exporter
//show progress of export
var exportProgressBarTimer = Timer() // initialize timer
if #available(iOS 10.0, *) {
exportProgressBarTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { timer in
// Get Progress
let progress = Float(exporter.progress)
if (progress < 1.01) {
let dict:[String: Float] = ["progress": progress]
NotificationCenter.default.post(name: Notification.Name("AddBackgroundMusicProgressBarPercentage"), object: nil, userInfo: dict)
}
}
} else {
exportProgressBarTimer = Timer.scheduledTimer(timeInterval: 0.5, target: self, selector: #selector(self.updateAddBackgroundMusicStatus), userInfo: nil, repeats: true)
}
// Do export
exporter.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
UIApplication.shared.isIdleTimerDisabled = false
}
exportProgressBarTimer.invalidate()
DispatchQueue.main.async {
self.exportDidFinish(exporter: exporter, exportURL: exportURL, completion: completion)
}
})
}
@objc func updateAddBackgroundMusicStatus() {
if self.exporter != nil {
let progress = Float(exporter.progress)
if (progress < 1.01) {
let dict:[String: Float] = ["progress" : progress]
NotificationCenter.default.post(name: Notification.Name("AddBackgroundMusicProgressBarPercentage"), object: nil, userInfo: dict)
}
}
}
func stopExportSession() {
if exporter != nil {
exporter.cancelExport()
}
}
}
// MARK:- Private methods
extension Export {
fileprivate func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
fileprivate func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset, standardSize:CGSize, atTime: CMTime) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
let assetSize = assetTrack.naturalSize
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform: transform)
let aspectFillRatio:CGFloat = 1
print("aspect ration \(aspectFillRatio)")
if assetInfo.isPortrait {
print("portrait")
let scaleFactor = CGAffineTransform(scaleX: aspectFillRatio, y: aspectFillRatio)
print("standard size width: \(standardSize.width/2)")
print("standard size height: \(standardSize.height/2)")
print("asset size width: \(assetSize.width/2)")
print("asset size height: \(assetSize.height/2)")
let posX = standardSize.width/2 - (assetSize.height * aspectFillRatio)/2
let posY = standardSize.height/2 - (assetSize.width * aspectFillRatio)/2
let moveFactor = CGAffineTransform(translationX: posX, y: posY)
print("posx: \(posX)")
print("posy: \(posY)")
instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(moveFactor), at: atTime)
} else {
let scaleFactor = CGAffineTransform(scaleX: aspectFillRatio, y: aspectFillRatio)
print("standard size width: \(standardSize.width/2)")
print("standard size height: \(standardSize.height/2)")
print("asset size width: \(assetSize.width/2)")
print("asset size height: \(assetSize.height/2)")
let posX = standardSize.width/2 - (assetSize.width * aspectFillRatio)/2
let posY = standardSize.height/2 - (assetSize.height * aspectFillRatio)/2
let moveFactor = CGAffineTransform(translationX: posX, y: posY)
print("posx: \(posX)")
print("posy: \(posY)")
var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(moveFactor)
if assetInfo.orientation == .down {
let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
concat = fixUpsideDown.concatenating(scaleFactor).concatenating(moveFactor)
}
instruction.setTransform(concat, at: atTime)
}
return instruction
}
fileprivate func exportDidFinish(exporter:AVAssetExportSession?, exportURL:URL, completion:@escaping Completion) -> Void {
if exporter?.status == AVAssetExportSessionStatus.completed {
print("Exported file: \(exportURL.absoluteString)")
completion(exportURL, nil)
}
else if exporter?.status == AVAssetExportSessionStatus.failed {
completion(exportURL, exporter?.error)
}
}
}
I expected time-lapse video will work as normal video and there will be no black screen
答案 0 :(得分:0)
所有资产均应在组合前加载。 AVAsset具有适当的方法来预加载资产。 另外,资产的构成和时间可能会出错。