我尝试在swift中连接.wav音频文件。
这是我的代码:
func merge(audio1: NSURL, audio2: NSURL) {
var error:NSError?
var ok1 = false
var ok2 = false
var documentsDirectory:String = paths[0] as! String
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
var composition = AVMutableComposition()
var compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//create new file to receive data
var documentDirectoryURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first! as! NSURL
var fileDestinationUrl = documentDirectoryURL.URLByAppendingPathComponent("resultmerge.wav")
println(fileDestinationUrl)
var url1 = audio1
var url2 = audio2
var avAsset1 = AVURLAsset(URL: url1, options: nil)
var avAsset2 = AVURLAsset(URL: url2, options: nil)
var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
var tracks2 = avAsset2.tracksWithMediaType(AVMediaTypeAudio)
var assetTrack1:AVAssetTrack = tracks1[0] as! AVAssetTrack
var assetTrack2:AVAssetTrack = tracks2[0] as! AVAssetTrack
var duration1: CMTime = assetTrack1.timeRange.duration
var duration2: CMTime = assetTrack2.timeRange.duration
var timeRange1 = CMTimeRangeMake(kCMTimeZero, duration1)
var timeRange2 = CMTimeRangeMake(duration1, duration2)
ok1 = compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero, error: nil)
if ok1 {
ok2 = compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1, error: nil)
if ok2 {
println("success")
}
}
//AVAssetExportPresetPassthrough => concatenation
var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport.outputFileType = AVFileTypeWAVE
assetExport.outputURL = fileDestinationUrl
assetExport.exportAsynchronouslyWithCompletionHandler({
switch assetExport.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
println("complete")
var audioPlayer = AVAudioPlayer()
audioPlayer = AVAudioPlayer(contentsOfURL: fileDestinationUrl, error: nil)
audioPlayer.prepareToPlay()
audioPlayer.play()
}
})
}
在终端(在iPhone上运行)中收到此错误:
文件:///var/mobile/Containers/Data/Application/3F49D360-B363-4600-B3BB-EE0810501910/Documents/resultmerge.wav
成功
失败错误域= AVFoundationErrorDomain代码= -11838“操作插件”UserInfo = 0x174269ac0 {NSLocalizedDescription =Opérationinterpompue,NSLocalizedFailureReason = L'opérationn'estpas prize en charge pour cecontenumultimédia。}
但我不知道为什么我会收到这个错误。我非常感谢你能给我的任何帮助:)。
答案 0 :(得分:4)
我需要合并多个音频文件,所以我重新编写了接受NSURL数组的函数。以为我会在这里分享。
我是Swift的新手,所以请留下反馈。
在信用到期时给予信任:@Eric D. @Pierre Louis Bresson
以下是代码:
func mergeAudioFiles(audioFileUrls: NSArray, callback: (url: NSURL?, error: NSError?)->()) {
// Create the audio composition
let composition = AVMutableComposition()
// Merge
for (var i = 0; i < audioFileUrls.count; i++) {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(URL: audioFileUrls[i] as! NSURL)
let track = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, ofTrack: track, atTime: composition.duration)
}
// Create output url
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate()))-merge.m4a"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let outputUrl = documentsDirectory.URLByAppendingPathComponent(currentFileName)
// Export it
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = outputUrl
assetExport?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
switch assetExport!.status {
case AVAssetExportSessionStatus.Failed:
callback(url: nil, error: assetExport?.error)
default:
callback(url: assetExport?.outputURL, error: nil)
}
})
}
答案 1 :(得分:3)
for swift 3.0 - 信用证转到 @Peyman (带光线修改)
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("FinalAudio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("Audio Concatenation Complete")
}
})
}
答案 2 :(得分:2)
我通过改变两件事来使你的代码工作:
预设名称:从Nothing
到AVAssetExportPresetPassthrough
输出文件类型:从AVAssetExportPresetAppleM4A
到AVFileTypeWAVE
像这样修改AVFileTypeAppleM4A
声明:
assetExport
然后它将正确合并文件。
看起来var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport.outputFileType = AVFileTypeAppleM4A
只导出M4A格式并忽略其他预设。有没有办法让它导出其他格式(通过子类化?),虽然我还没有探索过这种可能性。
答案 3 :(得分:1)
这是代码,放在 println(fileDestinationUrl)之后:
var file = "resultmerge.m4a"
var dirs : [String] = (NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.AllDomainsMask, true) as? [String])!
var dir = dirs[0] //documents directory
var path = dir.stringByAppendingPathComponent(file)
var pathURLarray:Array = (NSURL(fileURLWithPath: path)!).pathComponents!
var pathURL:String = ""
var final = ""
var debut = ""
for i in 1...(pathURLarray.count-1) {
if i == pathURLarray.count-1 {
final = ""
} else {
final = "/"
}
if i == 1 {
debut = "/"
} else {
debut = ""
}
pathURL = debut + pathURL + (pathURLarray[i] as! String) + final
}
var checkValidation = NSFileManager.defaultManager()
if checkValidation.fileExistsAtPath(pathURL) {
println("file exist")
if NSFileManager.defaultManager().removeItemAtURL(fileDestinationUrl, error: nil) {
println("delete")
}
} else {
println("no file")
}
有了这个和@Eric D.回答,它正在工作。
答案 4 :(得分:0)
只是为了帮助别人'#34;连接&#34;我也改变了
INSERT INTO TABLE_A
(ID_STATUS, ID_USER, ID_Q_INI, ID_Q_FIN, PRICE )
VALUES
(
1,2,10, 17, (
SELECT SUM(PRICE)
FROM TABLE_B
WHERE (ID_Q >= 10 && ID_Q < 17)
)
进入
var timeRange2 = CMTimeRangeMake(duration1, duration2)
(除了@Eric D. @Pierre Louis Bresson代码)。
答案 5 :(得分:0)
此外,如果您希望同步处理它,只需添加一个分派
var errorExport = true
//AVAssetExportPresetPassthrough => concatenation
var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport.outputFileType = AVFileTypeAppleM4A
assetExport.outputURL = fileDestinationUrl
let sessionWaitSemaphore = dispatch_semaphore_create(0)
assetExport.exportAsynchronouslyWithCompletionHandler({
switch assetExport.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
//
errorExport = false
}
dispatch_semaphore_signal(sessionWaitSemaphore)
return Void()
})
dispatch_semaphore_wait(sessionWaitSemaphore, DISPATCH_TIME_FOREVER)
return errorExport
答案 6 :(得分:0)
对于Swift 2.1:
class func getDocumentsDirectory() -> NSString {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true) as [String]
let documentsDirectory = paths[0]
return documentsDirectory
}
class func getFileURL() -> NSURL {
let audioFilename = getDocumentsDirectory().stringByAppendingPathComponent("\(NSDate()).m4a")
let audioURL = NSURL(fileURLWithPath: audioFilename)
return audioURL
}
func merge(audio1: NSURL, audio2: NSURL) {
finalURL = ProcessViewController.getFileURL()
let preferredTimeScale : Int32 = 100
//This object will be edited to include both audio files
let composition = AVMutableComposition()
//Song 1 setup
let compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let url1 = audio1
let avAsset1 = AVURLAsset(URL: url1, options: nil)
let tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
let assetTrack1:AVAssetTrack = tracks1[0]
let duration1: CMTime = CMTimeMakeWithSeconds(trimmedLength1, preferredTimeScale)
let startCMTime = CMTimeMakeWithSeconds(Double(startTime1), preferredTimeScale)
let timeRange1 = CMTimeRangeMake(startCMTime, duration1)
//Song 2 setup
let compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let url2 = audio2
let avAsset2 = AVURLAsset(URL: url2, options: nil)
let tracks2 = avAsset2.tracksWithMediaType(AVMediaTypeAudio)
let assetTrack2:AVAssetTrack = tracks2[0]
let duration2: CMTime = CMTimeMakeWithSeconds(trimmedLength2, preferredTimeScale)
let startCMTime2 = CMTimeMakeWithSeconds(Double(startTime2), preferredTimeScale)
let timeRange2 = CMTimeRangeMake(startCMTime, duration1)
//Insert the tracks into the composition
do {
try compositionAudioTrack1.insertTimeRange(timeRange1, ofTrack: assetTrack1, atTime: kCMTimeZero)
try compositionAudioTrack2.insertTimeRange(timeRange2, ofTrack: assetTrack2, atTime: duration1)
} catch {
print(error)
}
//Perform the merge
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport!.outputFileType = AVFileTypeAppleM4A
assetExport!.outputURL = finalURL
assetExport!.exportAsynchronouslyWithCompletionHandler({
switch assetExport!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport!.error)")
default:
print("complete")
self.initializeAudioPlayer()
}
})
}