我是斯威夫特的新手。我正在尝试添加一个参考SO代码的水印。我的原始视频分辨率是1280 X 720,但输出视频是缩小的版本。
以下是前后图片
这是我创建水印的功能。
private func watermark(video videoAsset:AVAsset, watermarkText text : String!, image : CGImage!, saveToLibrary flag : Bool, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
}
catch {
print(error.localizedDescription)
}
let videoSize = clipVideoTrack.naturalSize
print("Video size", videoSize.height) //720
print("Video size", videoSize.width) //1280
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0.0,
y: 0.0,
width: videoSize.width,
height: videoSize.height)
videoLayer.frame = CGRect(x: 0.0,
y: 0.0,
width: videoSize.width,
height: videoSize.height)
parentLayer.addSublayer(videoLayer)
if text != nil {
let titleLayer = CATextLayer()
titleLayer.backgroundColor = UIColor.red.cgColor
titleLayer.string = text
titleLayer.font = "Helvetica" as CFTypeRef
titleLayer.fontSize = 15
titleLayer.alignmentMode = kCAAlignmentCenter
titleLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentLayer.addSublayer(titleLayer)
} else if image != nil {
let imageLayer = CALayer()
imageLayer.contents = image
let width: CGFloat = (self.imageView.image?.size.width)!
let height: CGFloat = (self.imageView.image?.size.height)!
print("Video size", height) //720
print("Video size", width) //1280
imageLayer.frame = CGRect(x: 0.0, y: 0.0, width: width, height: height)
imageLayer.opacity = 0.65
parentLayer.addSublayer(imageLayer)
}
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, Int32(clipVideoTrack.nominalFrameRate))
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
_ = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack, asset: videoAsset)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mov")
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = url
exporter?.outputFileType = AVFileTypeQuickTimeMovie
exporter?.shouldOptimizeForNetworkUse = true
exporter?.videoComposition = videoComp
exporter?.exportAsynchronously() {
DispatchQueue.main.async {
if exporter?.status == AVAssetExportSessionStatus.completed {
let outputURL = exporter?.outputURL
if flag {
if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL!.path) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
}
}
}
} else {
completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
}
} else {
// Error
completion!(exporter?.status, exporter, nil)
}
}
}
}
}
虽然水印图像的大小正确,但视频会缩小。
答案 0 :(得分:3)
你可以尝试这个功能
private func watermark(video videoAsset: AVAsset, watermarkText text : String!, image : CGImage!, saveToLibrary flag : Bool, completion : ((_ status : AVAssetExportSessionStatus ?, _ session: AVAssetExportSession ?, _ outputURL : URL ?) -> ())?) {
DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
}
catch {
print(error.localizedDescription)
}
let videoSize = clipVideoTrack.naturalSize
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0.0,
y: 0.0,
width: videoSize.width,
height: videoSize.height)
videoLayer.frame = CGRect(x: 0.0,
y: 0.0,
width: videoSize.width,
height: videoSize.height)
parentLayer.addSublayer(videoLayer)
// if text != nil {
// let titleLayer = CATextLayer()
// titleLayer.backgroundColor = UIColor.red.cgColor
// titleLayer.string = text
// titleLayer.font = "Helvetica" as CFTypeRef
// titleLayer.fontSize = 15
// titleLayer.alignmentMode = kCAAlignmentCenter
// titleLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
// parentLayer.addSublayer(titleLayer)
// } else
if image != nil {
let imageLayer = CALayer()
imageLayer.contents = image
let width: CGFloat = (self.imageView.image ?.size.width)!
let height: CGFloat = (self.imageView.image ?.size.height)!
//
print("Video size", height)
print("Video size", width)
imageLayer.frame = CGRect(x: 0, y: 0, width: width, height: height)
// imageLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
imageLayer.opacity = 1
parentLayer.addSublayer(imageLayer)
}
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, Int32(clipVideoTrack.nominalFrameRate))
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
let videotrack = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
// let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack, asset: videoAsset)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mp4")
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return}
exporter.videoComposition = videoComp
exporter.outputFileType = AVFileTypeMPEG4
exporter.outputURL = url
exporter.exportAsynchronously() {
DispatchQueue.main.async {
if exporter.status == AVAssetExportSessionStatus.completed {
let outputURL = exporter.outputURL
if flag {
// Save to library
// let library = ALAssetsLibrary()
if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL!.path) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) {
saved, error in
if saved {
completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
}
}
}
// if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) {
// library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
// completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
//
// completion!(AVAssetExportSessionStatus.Completed, exporter, outputURL)
// })
// }
} else {
completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
}
} else {
// Error
completion!(exporter.status, exporter, nil)
}
}
}
}
}
答案 1 :(得分:2)
上面创建水印视频的代码似乎不是输出分辨率较小的原因。
<强>问题强>
分辨率取决于将哪种AVAsset放入水印方法中。
实施例: 通常使用UIImagePickerController。有委托方法
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any])
通常可以看到这样的事情:
let url = info[UIImagePickerControllerMediaURL] as? URL
let videoAsset = AVAsset(url: url!)
self.watermark(video: videoAsset, watermarkText: nil, image: self.imageView.image?.cgImage ...
但是,使用上面的线条,使用缩小尺寸的输入图像,例如,而不是拥有1920x1080的视频,视频大小减少了1280x720。
<强>解决方案强>
从PHAsset确定AVAsset的方法可能如下所示:
private func videoAsset(for asset: PHAsset, completion: @escaping (AVAsset?) -> Void) {
let requestOptions = PHVideoRequestOptions()
requestOptions.version = .original
PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions, resultHandler: {
(avAsset, avAudioMix, info) in
completion(avAsset)
})
}
从哪里获取PHAsset?它也可以通过使用UIImagePickerControllerPHAsset在didFinishPickingMediaWithInfo方法中确定:
let asset = info[UIImagePickerControllerPHAsset] as? PHAsset
快速测试
对于快速测试,可以使用:
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
if let asset = info[UIImagePickerControllerPHAsset] as? PHAsset {
picker.dismiss(animated: true, completion: { [weak self] in
self?.videoAsset(for: asset, completion: { (avAsset) in
if let videoAsset = avAsset {
DispatchQueue.main.async {
self?.watermark(video: videoAsset, watermarkText: nil, image: self?.imageView.image?.cgImage, saveToLibrary: true) { (exportStat: AVAssetExportSessionStatus? , session: AVAssetExportSession?, url: URL?) in
print("url: \(String(describing: url?.debugDescription))")
}
}
}
})
})
}
}
结果是原始分辨率的视频左下角有水印,请参阅结果视频的屏幕截图: