所以,目前我正在使用它来压缩视频:
func compressVideo(inputURL: NSURL, outputURL: NSURL, handler:(session: AVAssetExportSession)-> Void)
{
let urlAsset = AVURLAsset(URL: inputURL, options: nil)
let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality)
exportSession!.outputURL = outputURL
exportSession!.outputFileType = AVFileTypeQuickTimeMovie
exportSession!.shouldOptimizeForNetworkUse = true
exportSession!.exportAsynchronouslyWithCompletionHandler { () -> Void in
handler(session: exportSession!)
}
}
当我在2秒内录制视频时,大小 4,3 MB ,当我在6秒内录制视频时,文件大小 9,3 MB
减少尺寸的任何提示?
答案 0 :(得分:8)
虽然这些扩展程序都使用介质设置进行压缩,但如果您想要关注质量或尺寸,可以将其更改为低或高。
我使用基于Swift版本的这些扩展:
对于OP( Swift 2.2 ):
extension PreviewVideoViewController: AVCaptureFileOutputRecordingDelegate {
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
let data = NSData(contentsOfURL: outputFileURL)
print("File size before compression: \(Double(data!.length / 1048576)) mb")
let compressedURL = NSURL.fileURLWithPath(NSTemporaryDirectory() + NSUUID().UUIDString + ".m4v")
compressVideo(outputFileURL, outputURL: compressedURL) { (session) in
switch session.status {
case .Unknown:
break
case .Waiting:
break
case .Exporting:
break
case .Completed:
let data = NSData(contentsOfURL: compressedURL)
print("File size after compression: \(Double(data!.length / 1048576)) mb")
case .Failed:
break
case .Cancelled:
break
}
}
}
private func compressVideo(inputURL: NSURL, outputURL: NSURL, handler:(session: AVAssetExportSession)-> Void) {
let urlAsset = AVURLAsset(URL: inputURL, options: nil)
if let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronouslyWithCompletionHandler { () -> Void in
handler(session: exportSession)
}
}
}
}
对于需要 Swift 3.0 :
的人extension PreviewVideoViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
guard let data = NSData(contentsOf: outputFileURL as URL) else {
return
}
print("File size before compression: \(Double(data.length / 1048576)) mb")
let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".m4v")
compressVideo(inputURL: outputFileURL as URL, outputURL: compressedURL) { (exportSession) in
guard let session = exportSession else {
return
}
switch session.status {
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .completed:
guard let compressedData = NSData(contentsOf: compressedURL) else {
return
}
print("File size after compression: \(Double(compressedData.length / 1048576)) mb")
case .failed:
break
case .cancelled:
break
}
}
}
func compressVideo(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
}
答案 1 :(得分:0)
func convertVideo(phAsset : PHAsset){
PHImageManager.default().requestAVAsset(forVideo: phAsset, options: PHVideoRequestOptions(), resultHandler: { (asset, audioMix, info) -> Void in
if let asset = asset as? AVURLAsset {
do {
let videoData = try Data.init(contentsOf: asset.url)
print(asset.url)
self.orginalVideo = asset.url
print("File size before compression: \(Double(videoData.count / 1048576)) mb")
let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".MP4")
print(compressedURL)
self.compressVideo(inputURL: asset.url , outputURL: compressedURL) { (exportSession) in
guard let session = exportSession else {
return
}
switch session.status {
case .unknown:
print("unknown")
break
case .waiting:
print("waiting")
break
case .exporting:
print("exporting")
break
case .completed:
do {
let compressedData = try Data.init(contentsOf: compressedURL)
self.compressVideo = compressedURL
print(compressedData)
print("File size AFTER compression: \(Double(compressedData.count / 1048576)) mb")
}
catch{
print(error)
}
case .failed:
print("failed")
break
case .cancelled:
print("cancelled")
break
}
}
} catch {
print(error)
//return
}
}
})
}
func compressVideo(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
答案 2 :(得分:0)
//use SDAVAssetExportSession library with dynamic bitrate
func aVodzLatestVideoCompressor(inputURL: URL, aOutputURL: URL, aStartTime:Float, aEndTime:Float) {
let sizeVideo:Float = inputURL.verboseFileSizeInMB()
if sizeVideo < 5.0 {
DispatchQueue.main.async {
//self.directsendvideowithoutcomressor()
}
return
}
let startTime = CMTime(seconds: Double(aStartTime), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(aEndTime), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
let anAsset = AVURLAsset(url: inputURL, options: nil)
guard let videoTrack = anAsset.tracks(withMediaType: AVMediaType.video).first else { return }
var aQuality:Float = 0.0
var duration = anAsset.duration
let totalSeconds = Int(CMTimeGetSeconds(duration))
print("duration -\(duration) - totalSeconds -\(totalSeconds)")
var bitrate = min(aQuality, videoTrack.estimatedDataRate)
let landscap = self.isLandScapVideo(afileURL: inputURL )
var originalWidth = videoTrack.naturalSize.width
var originalHeight = videoTrack.naturalSize.height
print("originalWidth -\(originalWidth) originalHeight- \(originalHeight) ")
while (originalWidth >= 1920 || originalHeight >= 1920) {
originalWidth = originalWidth / 2
originalHeight = originalHeight / 2
}
var setWidth = Int(originalWidth)
var setlHeight = Int(originalHeight)
if sizeVideo < 10.0 {
// COMPRESS_QUALITY_HIGH:
setWidth = Int(originalWidth)
setlHeight = Int(originalHeight)
aQuality = Float(setWidth * setlHeight * 20)
bitrate = min(aQuality, videoTrack.estimatedDataRate)
}else if sizeVideo < 20.0 {
//COMPRESS_QUALITY_MEDIUM:
if totalSeconds > 35{
setWidth = Int(originalWidth / 2.7)
setlHeight = Int(originalHeight / 2.7)
}else if totalSeconds > 25 {
setWidth = Int(originalWidth / 2.3)
setlHeight = Int(originalHeight / 2.3)
}else{
setWidth = Int(originalWidth / 2.0)
setlHeight = Int(originalHeight / 2.0)
}
aQuality = Float(setWidth * setlHeight * 10)
bitrate = min(aQuality, videoTrack.estimatedDataRate)
}else if sizeVideo < 30.0 {
//COMPRESS_QUALITY_MEDIUM:
if totalSeconds > 35{
setWidth = Int(originalWidth / 3)
setlHeight = Int(originalHeight / 3)
}else if totalSeconds > 20 {
setWidth = Int(originalWidth / 2.5)
setlHeight = Int(originalHeight / 2.5)
}else{
setWidth = Int(originalWidth / 2.0)
setlHeight = Int(originalHeight / 2.0)
}
aQuality = Float(setWidth * setlHeight * 10)
bitrate = min(aQuality, videoTrack.estimatedDataRate)
}else{
if totalSeconds > 35{
setWidth = Int(originalWidth / 3.0)
setlHeight = Int(originalHeight / 3.0)
}else if totalSeconds > 25 {
setWidth = Int(originalWidth / 2.5)
setlHeight = Int(originalHeight / 2.5)
}else{
setWidth = Int(originalWidth / 2.0)
setlHeight = Int(originalHeight / 2.0)
}
aQuality = Float(setWidth * setlHeight * 10)
bitrate = min(aQuality, videoTrack.estimatedDataRate)
}
print("aQuality")
print(Float(aQuality))
print("bitrate")
print(Float(bitrate))
let encoder = SDAVAssetExportSession(asset: anAsset)
encoder?.shouldOptimizeForNetworkUse = true
encoder?.timeRange = timeRange
encoder?.outputFileType = AVFileType.mp4.rawValue
encoder?.outputURL = aOutputURL
//960 X 540 , 1280 * 720 , 1920*1080 // size reduce parameter
encoder?.videoSettings = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: landscap ? NSNumber(value:1280) : NSNumber(value:720) ,
AVVideoHeightKey: landscap ? NSNumber(value:720) : NSNumber(value:1280),
AVVideoCompressionPropertiesKey: [
AVVideoAverageBitRateKey: NSNumber(value: bitrate),
AVVideoProfileLevelKey: AVVideoProfileLevelH264High40
]
]
encoder?.audioSettings = [
AVFormatIDKey: NSNumber(value: kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey: NSNumber(value: 2),
AVSampleRateKey: NSNumber(value: 44100),
AVEncoderBitRateKey: NSNumber(value: 128000)
]
encoder?.exportAsynchronously(completionHandler: {
if encoder?.status == .completed {
print("Video export succeeded")
DispatchQueue.main.async {
appDelegate.hideLoader()
//NotificationCenter.default.post(name: Notification.Name("getMediaEffect"), object: "3")
//self.sendCompletion?(UIImage(), aOutputURL)
let text = "Original video- \(inputURL.verboseFileSizeInMB()) \n and Compressed video \(aOutputURL.verboseFileSizeInMB()) "
let alertController = UIAlertController.init(title: "Compressed!!", message: text , preferredStyle: .alert)
alertController.addAction(UIAlertAction.init(title: "share to server!", style: .default, handler: { (action) in
// Completion block
NotificationCenter.default.post(name: Notification.Name("getMediaEffect"), object: "3")
self.sendCompletion?(UIImage(), aOutputURL)
}))
alertController.addAction(UIAlertAction.init(title: "Save", style: .default, handler: { (action) in
// Completion block
DispatchQueue.main.async {
appDelegate.hideLoader()
if let videoURL = aOutputURL as? URL{
self.shareVideo(aUrl:videoURL )
}
}
}))
alertController.addAction(UIAlertAction.init(title: "cancel!", style: .default, handler: { (action) in
}))
self.present(alertController, animated: true, completion: nil)
}
} else if encoder?.status == .cancelled {
print("Video export cancelled")
DispatchQueue.main.async {
appDelegate.hideLoader()
self.view.makeToast("error_something_went_wrong".localized)
}
} else {
print("Video export failed with error: \(encoder!.error.localizedDescription) ")
DispatchQueue.main.async {
appDelegate.hideLoader()
self.view.makeToast("error_something_went_wrong".localized)
}
}
})
}
func isLandScapVideo(afileURL: URL) -> Bool{
let resolution = self.resolutionForLocalVideo(url: afileURL)
guard let width = resolution?.width, let height = resolution?.height else {
return false
}
if abs(width) > abs(height){
//landscap
return true
}else{
//potrait
return false
}
}
extension URL {
func verboseFileSizeInMB() -> Float{
let p = self.path
let attr = try? FileManager.default.attributesOfItem(atPath: p)
if let attr = attr {
let fileSize = Float(attr[FileAttributeKey.size] as! UInt64) / (1024.0 * 1024.0)
print(String(format: "FILE SIZE: %.2f MB", fileSize))
return fileSize
} else {
return Float.zero
}
}
}
//Extra At SDAVAssetExportSession library changes below at m file:(changes as per your requirement)
—— CGAffineTransform matrix = CGAffineTransformMakeTranslation(transx / xratio, transy / yratio - transform.ty);
——//fix Orientation - 1
UIImageOrientation videoAssetOrientation = UIImageOrientationUp;
BOOL isVideoAssetPortrait = NO;
CGAffineTransform videoTransform = videoTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation = UIImageOrientationRight;
isVideoAssetPortrait = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation = UIImageOrientationLeft;
isVideoAssetPortrait = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation = UIImageOrientationDown;
}
// [passThroughLayer setTransform:transform atTime:kCMTimeZero];
if ((videoAssetOrientation = UIImageOrientationDown) || (videoAssetOrientation = UIImageOrientationLeft)){
[passThroughLayer setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
}else{
[passThroughLayer setTransform:transform atTime:kCMTimeZero];
}