我也有一个高质量的视频本地网址,我也想添加覆盖。如果我只是从网址保存视频,那是非常高质量的。当我尝试添加叠加层时,质量变得非常糟糕。如果我将导出质量设置为“直通”,则它具有较高的质量,但是叠加层不再存在。即使我不叠加任何东西而只是导出视频,质量也较差。
func merge2(url: URL) {
let composition = AVMutableComposition()
// guard let vidAsset = firstAsset else { return }
let vidAsset = AVAsset(url: url)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaType.video)
let videoTrack:AVAssetTrack = vtrack[0]
guard let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID()) else { return }
do {
try compositionvideoTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: vidAsset.duration),
of: videoTrack,
at: CMTime.zero)
} catch {
print("Failed to load first track")
return
}
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
// Watermark Effect
let s = UIScreen.main.bounds
var frames = [UIImage]()
for i in 0...50 {
guard let image = UIImage(named: "frame-\(i)") else { continue }
frames.append(image)
}
let watermarkLayer = CALayer()
watermarkLayer.contents = frames[0].cgImage
watermarkLayer.add(
VideoWatermarker.getFramesAnimation(frames: frames, duration: 2.5), forKey: nil)
let frameAspectRatio = CGFloat(frames[0].cgImage!.height) / CGFloat(frames[0].cgImage!.width)
let newHeight = s.height/3
let newWidth = newHeight/frameAspectRatio
watermarkLayer.frame = CGRect(
x: s.width - newWidth, y: 0, width: newWidth, height: newHeight)
watermarkLayer.opacity = 0.85
let imglogo = UIImage(named: "logo1.png")
let imglayer = CALayer()
imglayer.contents = imglogo?.cgImage
imglayer.frame = CGRect(x: s.width / 2 - 50, y: s.height / 2 - 50, width: 100, height: 100)
imglayer.opacity = 0.6
// create text Layer
let titleLayer = CATextLayer()
titleLayer.backgroundColor = UIColor.green.cgColor
titleLayer.string = "Dummy text"
titleLayer.font = UIFont(name: "Helvetica", size: 28)
titleLayer.shadowOpacity = 0.5
titleLayer.alignmentMode = CATextLayerAlignmentMode.center
titleLayer.frame = CGRect(x: 0, y: 50, width: s.width / 2, height: 35)
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: s.width, height: s.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: s.width, height: s.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
parentlayer.addSublayer(titleLayer)
parentlayer.addSublayer(watermarkLayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
layercomposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: composition.duration)
_ = composition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
let layerinstruction = VideoHelper.videoCompositionInstruction1(compositionvideoTrack, asset: vidAsset)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
// export
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory,
in: .userDomainMask).first else {
return
}
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let url = documentDirectory.appendingPathComponent("mergeVideo-\(date).mov")
// 5 - Create Exporter
guard let exporter = AVAssetExportSession(asset: composition,
presetName: AVAssetExportPresetHighestQuality) else {
return
}
exporter.outputURL = url
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = layercomposition
// 6 - Perform the Export
exporter.exportAsynchronously() {
DispatchQueue.main.async {
self.exportDidFinish(exporter)
}
}
}
func exportDidFinish(_ session: AVAssetExportSession) {
guard
session.status == AVAssetExportSession.Status.completed,
let outputURL = session.outputURL
else {
return
}
let saveVideoToPhotos = {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
}) { saved, error in
let success = saved && (error == nil)
let title = success ? "Success" : "Error"
let message = success ? "Video saved" : "Failed to save video"
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.cancel, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}
// Ensure permission to access Photo Library
if PHPhotoLibrary.authorizationStatus() != .authorized {
PHPhotoLibrary.requestAuthorization { status in
if status == .authorized {
saveVideoToPhotos()
}
}
} else {
saveVideoToPhotos()
}
}
static func orientationFromTransform(_ transform: CGAffineTransform)
-> (orientation: UIImage.Orientation, isPortrait: Bool) {
var assetOrientation = UIImage.Orientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
static func videoCompositionInstruction1(_ track: AVCompositionTrack, asset: AVAsset)
-> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracks(withMediaType: .video)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform)
var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait { // not hit
scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), at: CMTime.zero)
} else { // hit
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
.concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 4))
if assetInfo.orientation == .down { // not hit
let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
let windowBounds = UIScreen.main.bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
}
instruction.setTransform(concat, at: CMTime.zero)
}
return instruction
}
static func getFramesAnimation(frames: [UIImage], duration: CFTimeInterval) -> CAAnimation {
let animation = CAKeyframeAnimation(keyPath:#keyPath(CALayer.contents))
animation.calculationMode = CAAnimationCalculationMode.discrete
animation.duration = duration
animation.values = frames.map {$0.cgImage!}
animation.repeatCount = Float(frames.count)
animation.isRemovedOnCompletion = false
animation.fillMode = CAMediaTimingFillMode.forwards
animation.beginTime = AVCoreAnimationBeginTimeAtZero
return animation
}