如何获取修改后的音频文件的URL?

时间:2019-06-11 06:37:47

标签: ios swift avfoundation audiokit

有什么主意,我该如何检索修改后的(带节距的)URL以便将其发送到服务器?目前,我只能在录制后立即播放它,但我想以某种方式将其发送并将其发送到服务器。

Alamofire.upload(multipartFormData: { (multiPartFormData) in
            do {
                let soundfile = try Data.init(contentsOf: self.soundFileURL)
                self.setupAudio(url: self.soundFileURL)
                multiPartFormData.append(soundfile, withName: "message", fileName: "audio.wav", mimeType: "application/octet-stream")
            } catch {
                print("error")
            }
        }


    func setupAudio(url: URL) {
        // initialize (recording) audio file
        do {
            audioFile = try AVAudioFile(forReading: url)
        } catch {
            showAlert("alert", message: String(describing: error))
        }
    }

我有这个playSound函数,用于修改soundFile ..想知道是否可以从中获取修改后的文件并将其发送到服务器

private func playSound(rate: Float? = nil, pitch: Float? = nil, echo: Bool = false, reverb: Bool = false) {

        // initialize audio engine components
        audioEngine = AVAudioEngine()

        // node for playing audio
        audioPlayerNode = AVAudioPlayerNode()
        audioEngine.attach(audioPlayerNode)

        // node for adjusting rate/pitch
        let changeRatePitchNode = AVAudioUnitTimePitch()
        if let pitch = pitch {
            changeRatePitchNode.pitch = pitch
        }
        if let rate = rate {
            changeRatePitchNode.rate = rate
        }
        audioEngine.attach(changeRatePitchNode)

        // node for echo
        let echoNode = AVAudioUnitDistortion()
        echoNode.loadFactoryPreset(.multiEcho1)
        audioEngine.attach(echoNode)

        // node for reverb
        let reverbNode = AVAudioUnitReverb()
        reverbNode.loadFactoryPreset(.cathedral)
        reverbNode.wetDryMix = 50
        audioEngine.attach(reverbNode)

        // connect nodes
        if echo == true && reverb == true {
            connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, reverbNode, audioEngine.outputNode)
        } else if echo == true {
            connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, audioEngine.outputNode)
        } else if reverb == true {
            connectAudioNodes(audioPlayerNode, changeRatePitchNode, reverbNode, audioEngine.outputNode)
        } else {
            connectAudioNodes(audioPlayerNode, changeRatePitchNode, audioEngine.outputNode)
        }

        // schedule to play and start the engine!
        audioPlayerNode.stop()
        audioPlayerNode.scheduleFile(audioFile, at: nil) {

            var delayInSeconds: Double = 0

            if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime) {

                if let rate = rate {
                    delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
                } else {
                    delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
                }
            }

            // schedule a stop timer for when audio finishes playing
            self.playSoundTimer = Timer(timeInterval: delayInSeconds, target: self, selector: #selector(self.stopAudio), userInfo: nil, repeats: false)
            RunLoop.main.add(self.playSoundTimer!, forMode: .defaultRunLoopMode)
        }

        do {
            try audioEngine.start()
        } catch {
            showAlert("alert", message: String(describing: error))
            return
        }

        // play the recording!
        audioPlayerNode.play()
    }

0 个答案:

没有答案