保存自拍视频后视频的大小变化。只有在拍摄自拍视频时才会出现此问题。
如何正确支持纵向和横向方向,并在拍摄视频时将其正确反映在视频文件输出中。 以下是我写的完整资料来源:
// File to composit
let asset = AVURLAsset(url: videoURL as URL)
let composition = AVMutableComposition.init()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
// Rotate to potrait
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
var videoAssetOrientation_: UIImageOrientation = .up
var isVideoAssetPortrait_: Bool = false
let videoTransform:CGAffineTransform = clipVideoTrack.preferredTransform
if (videoTransform.a == 0.0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0)
|| (videoTransform.a == 0.0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
isVideoAssetPortrait_ = true
}
if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
videoAssetOrientation_ = .right
isVideoAssetPortrait_ = true
}
if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
videoAssetOrientation_ = .left
isVideoAssetPortrait_ = true
}
if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 {
videoAssetOrientation_ = .up
}
if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 {
videoAssetOrientation_ = .down
}
transformer.setTransform(clipVideoTrack.preferredTransform, at: kCMTimeZero)
var naturalSize = CGSize()
if isVideoAssetPortrait_ {
naturalSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.width)
} else {
naturalSize = clipVideoTrack.naturalSize
}
var renderWidth: CGFloat!
var renderHeight: CGFloat!
renderWidth = naturalSize.width
renderHeight = naturalSize.height
let parentlayer = CALayer()
let videoLayer = CALayer()
let watermarkLayer = CALayer()
watermarkLayer.contents = tempImageView.image?.cgImage
watermarkLayer.opacity = 1.0
parentlayer.frame = CGRect(x: 0, y: 0, width: renderWidth, height: renderHeight)
videoLayer.frame = CGRect(x: 0, y: 0, width: renderWidth, height: renderHeight)
watermarkLayer.frame = CGRect(x: 0, y: 0 ,width: renderWidth, height: renderHeight)
parentlayer.addSublayer(videoLayer)
parentlayer.addSublayer(watermarkLayer)
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: renderWidth, height: renderHeight)
videoComposition.frameDuration = CMTimeMake(1, 30)
videoComposition.renderScale = 1.0
// Add watermark to video
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: [videoLayer], in: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
以下是我拍视频时视频的效果
答案 0 :(得分:0)
Frist写
var switchCam = Bool()
比相机切换到前写
switchCam = true
在照片编辑器类中写
override public func viewDidLoad() {
super.viewDidLoad()
if switchCam {
videoViewContainer.transform = CGAffineTransform(scaleX: -1, y: 1)
} else {
videoViewContainer.transform = CGAffineTransform(scaleX: 1, y: 1)
}
}
比保存视频
//标记: - 保存视频photoLibrary
func convertVideoAndSaveTophotoLibrary(videoURL: URL) {
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let myDocumentPath = URL(fileURLWithPath: documentsDirectory).appendingPathComponent("temp.mp4").absoluteString
_ = NSURL(fileURLWithPath: myDocumentPath)
let documentsDirectory2 = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory2.appendingPathComponent("video.mp4")
deleteFile(filePath: filePath as NSURL)
//Check if the file already exists then remove the previous file
if FileManager.default.fileExists(atPath: myDocumentPath) {
do { try FileManager.default.removeItem(atPath: myDocumentPath)
} catch let error { print(error) }
}
// File to composit
let asset = AVURLAsset(url: videoURL as URL)
let composition = AVMutableComposition.init()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
// Rotate to potrait
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
var videoAssetOrientation_: UIImageOrientation = .up
var isVideoAssetPortrait_: Bool = false
let videoTransform:CGAffineTransform = clipVideoTrack.preferredTransform
if (videoTransform.a == 0.0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0)
|| (videoTransform.a == 0.0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
isVideoAssetPortrait_ = true
}
if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
videoAssetOrientation_ = .right
isVideoAssetPortrait_ = true
}
if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
videoAssetOrientation_ = .left
isVideoAssetPortrait_ = true
}
if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 {
videoAssetOrientation_ = .up
}
if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 {
videoAssetOrientation_ = .down
}
transformer.setTransform(clipVideoTrack.preferredTransform, at: kCMTimeZero)
var naturalSize = CGSize()
if isVideoAssetPortrait_ {
naturalSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.width)
} else {
naturalSize = clipVideoTrack.naturalSize
}
var renderWidth: CGFloat!
var renderHeight: CGFloat!
renderWidth = naturalSize.width
renderHeight = naturalSize.height
let parentlayer = CALayer()
let videoLayer = CALayer()
let watermarkLayer = CALayer()
let watermarkLayer2 = CALayer()
watermarkLayer.contents = self.tempImageView.image?.cgImage
watermarkLayer.opacity = 1.0
parentlayer.frame = CGRect(x: 0, y: 0, width: renderWidth, height: renderHeight)
videoLayer.frame = CGRect(x: 0, y: 0, width: renderWidth, height: renderHeight)
watermarkLayer.frame = CGRect(x: 0, y: 0 ,width: renderWidth, height: renderHeight)
watermarkLayer2.frame = CGRect(x: 0, y: 0 ,width: renderWidth, height: renderHeight)
parentlayer.addSublayer(videoLayer)
parentlayer.addSublayer(watermarkLayer)
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: renderWidth, height: renderHeight)
videoComposition.frameDuration = CMTimeMake(1, 30)
videoComposition.renderScale = 1.0
// Add watermark to video
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: [videoLayer], in: parentlayer)
if switchCam {
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
var transform:CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
transform = transform.translatedBy(x: -clipVideoTrack.naturalSize.width, y: 0.0)
transform = transform.rotated(by: CGFloat(Double.pi/2))
transform = transform.translatedBy(x: 0.0, y: -clipVideoTrack.naturalSize.width)
transformer.setTransform(transform, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
} else {
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
}
let exporter = AVAssetExportSession.init(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputFileType = AVFileType.mov
exporter?.outputURL = filePath
exporter?.videoComposition = videoComposition
exporter!.exportAsynchronously(completionHandler: {() -> Void in
if exporter?.status == .completed {
let outputURL: URL? = exporter?.outputURL
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).lastObject
PHImageManager().requestAVAsset(forVideo: fetchResult!, options: nil, resultHandler: { (avurlAsset, audioMix, dict) in
let newObj = avurlAsset as! AVURLAsset
print(newObj.url)
DispatchQueue.main.async(execute: {
print(newObj.url.absoluteString)
})
})
print (fetchResult!)
}
}
}
})
}