当我在视频上叠加图像时,视频质量会大大降低。如果我未设置导出会话的视频组成或将导出质量设置为直通,则视频质量会很好(但显然没有覆盖)。
我也传递了本地.mov视频网址来添加叠加层。 我正在使用PHPhotoLibrary将视频保存到相机胶卷中。 使用其他一些功能来转换视频并设置其说明。
这一切看起来都很简单,但是却有损视频质量
func merge3(url: URL) {
let firstAsset = AVAsset(url: url)
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
let mixComposition = AVMutableComposition()
// 2 - Create two video tracks
guard
let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
else {
return
}
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: firstAsset.duration),
of: firstAsset.tracks(withMediaType: AVMediaType.video)[0],
at: CMTime.zero)
} catch {
print("Failed to load first track")
return
}
let s = UIScreen.main.bounds
let imglogo = UIImage(named: "django")?.scaleImageToSize(newSize: CGSize(width: 250, height: 125))
let imglayer = CALayer()
imglayer.contents = imglogo?.cgImage
imglayer.frame = CGRect(x: s.width / 2 - 125, y: s.height / 2 - 67.5
, width: 250, height: 125)
imglayer.opacity = 1.0
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: s.width, height: s.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: s.width, height: s.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
// 2.1
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,
duration: firstAsset.duration)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
layercomposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: firstAsset.duration)
_ = mixComposition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
let layerinstruction = VideoHelper.videoCompositionInstruction1(firstTrack, asset: firstAsset)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
// 4 - Get path
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory,
in: .userDomainMask).first else {
return
}
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let url = documentDirectory.appendingPathComponent("mergeVideo-\(date).mov")
// 5 - Create Exporter
guard let exporter = AVAssetExportSession(asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality) else {
return
}
exporter.outputURL = url
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = layercomposition
// 6 - Perform the Export
exporter.exportAsynchronously() {
DispatchQueue.main.async {
self.exportDidFinish(exporter)
}
}
}
func exportDidFinish(_ session: AVAssetExportSession) {
guard
session.status == AVAssetExportSession.Status.completed,
let outputURL = session.outputURL
else {
return
}
let saveVideoToPhotos = {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
}) { saved, error in
let success = saved && (error == nil)
let title = success ? "Success" : "Error"
let message = success ? "Video saved" : "Failed to save video"
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.cancel, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}
// Ensure permission to access Photo Library
if PHPhotoLibrary.authorizationStatus() != .authorized {
PHPhotoLibrary.requestAuthorization { status in
if status == .authorized {
saveVideoToPhotos()
}
}
} else {
saveVideoToPhotos()
}
}
static func videoCompositionInstruction1(_ track: AVCompositionTrack, asset: AVAsset)
-> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracks(withMediaType: .video)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform)
var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait { // not hit
scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), at: CMTime.zero)
} else { // hit
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
.concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 4))
if assetInfo.orientation == .down { // not hit
let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
let windowBounds = UIScreen.main.bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
}
instruction.setTransform(concat, at: CMTime.zero)
}
return instruction
}
static func orientationFromTransform(_ transform: CGAffineTransform)
-> (orientation: UIImage.Orientation, isPortrait: Bool) {
var assetOrientation = UIImage.Orientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
答案 0 :(得分:0)
您正在设置
layercomposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
应该是
layercomposition.renderSize = yourAsset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize
第一个操作是将分辨率设置为屏幕尺寸,而不是原始视频的实际尺寸。第二个会更正它,以设置原始视频的分辨率。
以这种方式考虑-您不希望分辨率达到屏幕的大小-确实很小。您希望它具有某些原始视频的大小或某些常用视频设置的大小。