How to increase the quality of a recorded voice?

时间:2016-12-09 13:01:11

标签: ios swift avplayer trim avaudiorecorder

I have to merge the audio file and recorded voice.For example, the recorded voice is 47seconds. I have to cut or trim the 4minutes audio song to 47seconds. And merge the audio file.It's working fine now.If I hear once I merged both.. My voice is very low and Music is very high.How to increase the Voice quality?I attached the audio recorder settings also:

func mixAudio()
{
    let currentTime = CFAbsoluteTimeGetCurrent()
    let composition = AVMutableComposition()
    let compositionAudioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
    compositionAudioTrack.preferredVolume = 0.8
    print(soundFileURL)
    do {

}

catch _
{
}
    let avAsset = AVURLAsset.init(URL: soundFileURL, options: nil)
    print("\(avAsset)")
    var tracks = avAsset.tracksWithMediaType(AVMediaTypeAudio)
    let clipAudioTrack = tracks[0]
    do {
        try compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset.duration), ofTrack: clipAudioTrack, atTime: kCMTimeZero)
    }
    catch _
    {
    }
    let compositionAudioTrack1 = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
    compositionAudioTrack1.preferredVolume = 0.8
    let avAsset1 = AVURLAsset.init(URL: soundFileURL1)
    print(avAsset1)
            var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
    let clipAudioTrack1 = tracks1[0]
    do {
        try compositionAudioTrack1.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset1.duration), ofTrack: clipAudioTrack1, atTime: kCMTimeZero)
    }
    catch _ {
    }
    var paths = NSSearchPathForDirectoriesInDomains(.LibraryDirectory, .UserDomainMask, true)
    let CachesDirectory = paths[0]
    let strOutputFilePath = CachesDirectory.stringByAppendingString("/Fav")
    print(" strOutputFilePath is \n \(strOutputFilePath)")

    let requiredOutputPath = CachesDirectory.stringByAppendingString("/Fav.m4a")
    print(" requiredOutputPath is \n \(requiredOutputPath)")

    soundFile1 = NSURL.fileURLWithPath(requiredOutputPath)
    print(" OUtput path is \n \(soundFile1)")
    let audioDuration = avAsset.duration
    let totalSeconds = CMTimeGetSeconds(audioDuration)
    let hours = Int64(totalSeconds / 3600)
    let minutes = Int64(totalSeconds % 3600 / 60)
    let seconds = Int64(totalSeconds % 3600 % 60)
    print("hours = \(hours), minutes = \(minutes), seconds = \(seconds)")
    print(soundFile1)
    let recordSettings:[String : AnyObject] = [

        AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
        AVSampleRateKey: 16000,
        AVNumberOfChannelsKey: 1,
        AVEncoderAudioQualityKey: AVAudioQuality.Medium.rawValue
    ]
    do {
        audioRecorder = try AVAudioRecorder(URL: soundFile1, settings: recordSettings)
        audioRecorder!.delegate = self
        audioRecorder!.meteringEnabled = true
        audioRecorder!.prepareToRecord()
    }

    catch let error as NSError
    {
        audioRecorder = nil
        print(error.localizedDescription)
    }

    do {

        try NSFileManager.defaultManager().removeItemAtURL(soundFile1)
    }
    catch _ {
    }
    let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
    exporter!.outputURL = soundFile1
    exporter!.outputFileType = AVFileTypeAppleM4A
    let duration = CMTimeGetSeconds(avAsset1.duration)
    print(duration)
    if (duration < 5.0) {
        print("sound is not long enough")
        return
    }
    // e.g. the first 30 seconds
    let startTime = CMTimeMake(0, 1)
    let stopTime = CMTimeMake(seconds,1)
    let exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
    print(exportTimeRange)
    exporter!.timeRange = exportTimeRange
    print(exporter!.timeRange)


    exporter!.exportAsynchronouslyWithCompletionHandler
        {() -> Void in
            print(" OUtput path is \n \(requiredOutputPath)")
            print("export complete: \(CFAbsoluteTimeGetCurrent() - currentTime)")
            var url:NSURL?
            if self.audioRecorder != nil
            {
                url = self.audioRecorder!.url
            }

            else
            {
                url = self.soundFile1!
                print(url)

            }

            print("playing \(url)")

    do
    {
        print(self.soundFile1)
        print(" OUtput path is \n \(requiredOutputPath)")


        let session:AVAudioSession = AVAudioSession.sharedInstance()

        do {
            try session.setCategory(AVAudioSessionCategoryPlayback)
        } catch let error as NSError {
            print("could not set session category")
            print(error.localizedDescription)
        }
        do {
            // try session.setActive(true)
        } catch let error as NSError
        {
            print("could not make session active")
            print(error.localizedDescription)
        }
        do
        {

        }
        self.PlayerWithHeadphone = try AVAudioPlayer(contentsOfURL: self.soundFile1)
        self.PlayerWithHeadphone.numberOfLoops = 0
        self.PlayerWithHeadphone.volume = 2.0
        self.PlayerWithHeadphone.play()
    }
    catch _
    {

    }

    }
}

0 个答案:

没有答案