我正在实现一个Karaoke应用程序,我使用CameraManager录制音频和用户语音 完成录制后,我会修剪音乐的音频以匹配从CameraManager录制的音频/视频的相同时长,然后将其合并。
使用相机和3.15mm插孔式耳机,一切都可以成功同步,但是当我使用Airpods录音时,一切都可以成功同步,但是当我将录制的音频/视频与歌曲的主要音乐合并后,就会出现延迟歌曲的主要音乐在从CameraManager录制的音频/视频之前。
这是一些代码
当我从CameraManager开始录制时,这就是我所说的
cameraManager.cameraOutputMode = .videoWithMic
cameraManager.startRecordingVideo()
播放音乐
player = try AVAudioPlayer(contentsOf: songURL)
player?.delegate = self
if (player!.prepareToPlay()) {
player!.play()
}
将音频和视频合并在一起
class func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
setExportSession: (AVAssetExportSession) -> Void,
completion: @escaping (_ error: Error?, _ url: URL?) -> Void) {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid)!
let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid)!
let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaType.video)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = aVideoAsset.tracks(withMediaType: AVMediaType.audio).first
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaType.audio)[0]
// Default must have tranformation
compositionAddVideo!.preferredTransform = aVideoAssetTrack.preferredTransform
// if shouldFlipHorizontally {
// // Flip video horizontally
// var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
// frontalTransform = frontalTransform.translatedBy(x: -aVideoAssetTrack.naturalSize.width, y: 0.0)
// frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -aVideoAssetTrack.naturalSize.width)
// compositionAddVideo!.preferredTransform = frontalTransform
// }
mutableCompositionVideoTrack.append(compositionAddVideo!)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: aVideoAssetTrack.timeRange.duration),
of: aVideoAssetTrack,
at: CMTime.zero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: aVideoAssetTrack.timeRange.duration),
of: aAudioAssetTrack,
at: CMTime.zero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: aVideoAssetTrack.timeRange.duration),
of: aAudioOfVideoAssetTrack,
at: CMTime.zero)
}
} catch {
print(error.localizedDescription)
}
let timestamp = Int(NSDate().timeIntervalSince1970)
// Exporting
let savePathUrl: URL = UIUtils.getDocumentsDirectory().appendingPathComponent("/newVideo-\(timestamp).mp4")
do { // delete old video
if (FileManager.default.fileExists(atPath: savePathUrl.absoluteString)) {
try FileManager.default.removeItem(at: savePathUrl)
}
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
setExportSession(assetExport)
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(nil, savePathUrl)
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
default:
print("complete")
completion(assetExport.error, nil)
}
}
}
并修剪音频
class func trimAudio(audioToTrim: URL, length: Double, setExportSession: (AVAssetExportSession) -> Void, completion: @escaping (URL?, String?) -> Void) {
let audioFileInput = audioToTrim
let mixedAudio: String = "trimmedAudio.m4a"
let exportPath: String = NSTemporaryDirectory() + (mixedAudio)
let audioFileOutput = URL(fileURLWithPath: exportPath)
try? FileManager.default.removeItem(at: audioFileOutput)
let asset = AVAsset(url: audioFileInput)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A)
if (exportSession == nil) {
completion(nil, "ExportSession is nil")
return
}
setExportSession(exportSession!)
let startTime = CMTimeMakeWithSeconds(0, preferredTimescale: 1)
let stoptime = CMTimeMakeWithSeconds(length, preferredTimescale: 1)
let exportTimeRange = CMTimeRangeFromTimeToTime(start: startTime, end: stoptime)
exportSession?.outputURL = audioFileOutput
exportSession?.outputFileType = AVFileType.m4a
exportSession?.timeRange = exportTimeRange
exportSession?.exportAsynchronously {
switch (exportSession?.status) {
case .completed:
completion(audioFileOutput, nil)
break
default:
completion(nil, exportSession?.error?.localizedDescription)
break
}
}
}
当我使用普通耳机或设备的主麦克风时,所有内容均已成功同步,而问题仅出在Airpods上,我了解到Airpods会有延迟,但是我尝试了其他Karaoke应用程序,这是一个示例Start Maker此应用程序在Airpods上运行良好