我希望用户播放音频,更改某些部分的音量,然后使用新的音量级别保存该文件。
我用AVAssetExportSession和AVMutableAudioMixInputParameters更改了音频的音量并且工作,问题是我需要用这个音频创建一个音频循环,所以首先我创建了这个循环然后我需要改变音量
这是我的代码
let type = "m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("mixedAudio.m4a")
var backgroundUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("background.m4a")
override func viewDidLoad() {
super.viewDidLoad()
playButotn.isEnabled = false
let mainUrl = Bundle.main.url(forResource: "prueba1", withExtension: type, subdirectory: "Audios")
let mainDurations = AVAsset(url: mainUrl!).duration
let secondAudioUrl = Bundle.main.url(forResource: "walk", withExtension: type, subdirectory: "Audios")
let backgroundDuration = AVAsset(url: secondAudioUrl!).duration
if mainDurations > backgroundDuration {
var times = Int(mainDurations.seconds / backgroundDuration.seconds)
let rem = mainDurations.seconds.truncatingRemainder(dividingBy: backgroundDuration.seconds)
if rem > 0 {
times = times + 1
}
createLoopAudio(times: times) {
self.createFade {
self.createMix(mainUrl: mainUrl!, backgroundUrl: self.backgroundUrl)
}
}
}else {
backgroundUrl = secondAudioUrl!
createMix(mainUrl: mainUrl!, backgroundUrl: backgroundUrl)
}
}
func createMix(mainUrl: URL, backgroundUrl: URL){
let composition = AVMutableComposition()
let mainAsset = AVAsset(url: mainUrl)
let backgroundAsset = AVAsset(url: backgroundUrl)
let mainDurations = AVAsset(url: mainUrl).duration
let mainAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let backgroundAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, mainDurations)
do {
try mainAudioTrack.insertTimeRange(timeRange, of: mainAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
try backgroundAudioTrack.insertTimeRange(timeRange, of: backgroundAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = documentsDirectory
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: documentsDirectory.path) {
try! FileManager.default.removeItem(atPath: documentsDirectory.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("complete")
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
})
}
func createLoopAudio(times: Int, completion: @escaping () -> Void){
let composition = AVMutableComposition()
var nextTimeStartClip = kCMTimeZero
for _ in 1...times {
let url = Bundle.main.url(forResource: "walk", withExtension: type, subdirectory: "Audios")
let audioAsset = AVAsset(url: url!)
print("tracks walk \(audioAsset.tracks.count)")
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: nextTimeStartClip)
}catch {
print(error.localizedDescription)
}
nextTimeStartClip = CMTimeAdd(nextTimeStartClip, audioAsset.duration)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("loop complete")
completion()
}
})
}
func createFade(completion: @escaping () -> Void) {
let exportAudioMix = AVMutableAudioMix()
let audioAsset = AVAsset(url: backgroundUrl)
let exportAudioMixInputParameters = AVMutableAudioMixInputParameters(track: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0])
let start = 2
let length = 3
exportAudioMixInputParameters.setVolume(0.0, at: CMTimeMakeWithSeconds(Float64(start - 1), 1))
exportAudioMixInputParameters.setVolume(0.1, at: CMTimeMakeWithSeconds(Float64(start), 1))
exportAudioMixInputParameters.setVolume(0.5, at: CMTimeMakeWithSeconds(Float64(start + 1), 1))
exportAudioMixInputParameters.setVolume(1.0, at: CMTimeMakeWithSeconds(Float64(start + 2), 1))
exportAudioMixInputParameters.setVolume(1.0, at: CMTimeMakeWithSeconds(Float64(start + length - 2), 1))
exportAudioMixInputParameters.setVolume(0.5, at: CMTimeMakeWithSeconds(Float64(start + length - 1), 1))
exportAudioMixInputParameters.setVolume(0.1, at: CMTimeMakeWithSeconds(Float64(start + length), 1))
exportAudioMix.inputParameters = [exportAudioMixInputParameters]
let composition = AVMutableComposition()
print("tracks loop \(audioAsset.tracks.count)")
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("faded complete")
completion()
}
})
答案 0 :(得分:0)
嗯,找到方法并且工作正常:我有2个音频; A和B,B持续时间小于A,所以我创建了一个B循环以适合A,然后我同时播放2个音频,并使用滑块我修改b的音量。最后,我保存了这个音量配置并混合了音频。这是我的代码:
import UIKit
import AVFoundation
class ViewController: UIViewController {
@IBOutlet weak var playButotn: UIButton!
var player: AVAudioPlayer?
var podcastPlayer: AVAudioPlayer?
var times = 0
@IBOutlet weak var volumeSlider: UISlider!
var volumeRanges = [VolumeRange]()
let type = "m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("mixedAudio.m4a")
var backgroundUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("background.m4a")
var mainUrl: URL?
override func viewDidLoad() {
super.viewDidLoad()
mainUrl = Bundle.main.url(forResource: "prueba1", withExtension: type, subdirectory: "Audios")
playButotn.isEnabled = false
volumeSlider.value = 1
let mainDurations = AVAsset(url: mainUrl!).duration
print(AVAsset(url: mainUrl!).duration.seconds)
let secondAudioUrl = Bundle.main.url(forResource: "rocking", withExtension: type, subdirectory: "Audios")
let backgroundDuration = AVAsset(url: secondAudioUrl!).duration
if mainDurations > backgroundDuration {
times = Int(mainDurations.seconds / backgroundDuration.seconds)
let rem = mainDurations.seconds.truncatingRemainder(dividingBy: backgroundDuration.seconds)
if rem > 0 {
times = times + 1
}
createLoop(times: times) {
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
}else {
backgroundUrl = secondAudioUrl!
createMix()
}
}
func createMix(){
let composition = AVMutableComposition()
let mainAsset = AVAsset(url: mainUrl!)
let backgroundAsset = AVAsset(url: backgroundUrl)
let mainDurations = AVAsset(url: mainUrl!).duration
let mainAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let backgroundAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, mainDurations)
do {
try mainAudioTrack.insertTimeRange(timeRange, of: mainAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
try backgroundAudioTrack.insertTimeRange(timeRange, of: backgroundAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = documentsDirectory
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: documentsDirectory.path) {
try! FileManager.default.removeItem(atPath: documentsDirectory.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.completed:
print("mix complete")
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
})
}
func createLoop(times: Int, completion: @escaping () -> Void){
let urlFondo = Bundle.main.url(forResource: "rocking", withExtension: type, subdirectory: "Audios")
let acentoFile = try! JUMAudioFile(forReading: urlFondo!)
let acentoPCM = acentoFile.getPCMArrayBufferFromURL()
let (_ , acentoCompleteData) = JUMAudioFile.convertToPoints(arrayFloatValues: acentoPCM)
var newDraft = [Float]()
for _ in 1...times {
for array in acentoCompleteData {
for fl in array {
newDraft.append(fl)
}
}
}
let _ = try! JUMAudioFile(createFileFromFloats: [newDraft], url: self.backgroundUrl)
print("loop complete")
completion()
}
func createLoopAudioWithFade(completion: @escaping () -> Void){
let composition = AVMutableComposition()
let exportAudioMix = AVMutableAudioMix()
var exportAudioMixInputParametersArry = [AVMutableAudioMixInputParameters]()
let audioAsset = AVAsset(url: self.backgroundUrl)
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let exportAudioMixInputParameters = AVMutableAudioMixInputParameters(track: audioTrack)
for ranges in volumeRanges {
exportAudioMixInputParameters.setVolume(ranges.volume!, at: CMTimeMakeWithSeconds(ranges.start!, 50000))
}
exportAudioMixInputParametersArry.append(exportAudioMixInputParameters)
exportAudioMix.inputParameters = exportAudioMixInputParametersArry
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.audioMix = exportAudioMix
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.completed:
print("fade complete")
completion()
}
})
}
@IBAction func play(_ sender: Any) {
do{
player = try AVAudioPlayer(contentsOf: backgroundUrl)
player?.prepareToPlay()
player?.volume = 1.0
podcastPlayer = try AVAudioPlayer(contentsOf: mainUrl!)
podcastPlayer?.prepareToPlay()
podcastPlayer?.volume = 1
podcastPlayer?.play()
player?.play()
}catch {
print(error.localizedDescription)
}
}
@IBAction func changeVolume(_ sender: UISlider) {
if (player?.isPlaying)!{
player?.volume = sender.value
let volumeRange = VolumeRange()
volumeRange.volume = sender.value
volumeRange.start = player?.currentTime
volumeRanges.append(volumeRange)
}
}
@IBAction func touchUp(_ sender: UISlider) {
}
@IBAction func touchUpOutside(_ sender: UISlider) {
print("ouside")
}
@IBAction func generar(_ sender: Any) {
playButotn.isEnabled = false
self.createLoopAudioWithFade() {
self.createMix()
}
}
@IBAction func playMix(_ sender: UIButton) {
do {
player = try AVAudioPlayer(contentsOf: documentsDirectory)
player?.prepareToPlay()
player?.volume = 1.0
player?.play()
}catch {
}
}
}
class VolumeRange {
var volume: Float?
var start: Double?
}