如何为我录制的音频制作波形?

时间:2019-01-11 18:43:07

标签: ios swift avfoundation waveform avassetreader

我正在使用AVFoundation通过以下设置录制音频。 录制成功后,我需要向用户显示录制文件的波形。谁能帮助我完成这项任务?

这是我的录音机设置:

let recordSettings =
    [AVNumberOfChannelsKey: 1,
     AVFormatIDKey : kAudioFormatOpus,
     AVSampleRateKey: 24000.0] as [String : Any]

3 个答案:

答案 0 :(得分:0)

我会改写该项目中的代码:https://github.com/VideoFlint/VIWaveformView

或者,您可以使用第三方库来执行此操作。有关示例,请参见此处:https://github.com/topics/waveform?l=swift

答案 1 :(得分:0)

import UIKit

import RYKit

let normalColor = UIColor.white
let normalAlphaColor = UIColor.init(white: 1.0, alpha: 0.5)

let highlightColor = UIColor.init(red: 163.0/255.0, green: 243.0/255.0, blue: 16.0/255.0, alpha: 1.0)
let highlightAlphaColor = UIColor.init(red: 163.0/255.0, green: 243.0/255.0, blue: 16.0/255.0, alpha: 0.24)
let waveWidth = CGFloat(2.5)
let waveSpace = CGFloat(0.5)
let waveRadius = CGFloat(1.25)
let upMaxHeight = CGFloat(60)
let downMaxHeight = CGFloat(30)
let upDownSpace = CGFloat(2)

protocol WaveformScrollDelegate: NSObjectProtocol {
    func didScrollToTime(time: NSInteger)
    func didScrollByPercentage(percent: Double, animated: Bool)
}

class WaveformComponent: UIView, CAAnimationDelegate, UIGestureRecognizerDelegate {

    private var timeLine: UILabel!
    private var topView: WaveformView!
    private var topViewMask: CALayer!
    private var bottomView: WaveformView!
    private var isAnimated = false
    private let convertTime = {
        (seconds: Int) -> String in
        let minute = seconds / 60
        let minuteStr = minute > 9 ? "\(minute)" : "0\(minute)"
        let second = seconds % 60
        let secondStr = second > 9 ? "\(second)" : "0\(second)"
        return "\(minuteStr):\(secondStr)"
    }
    var animationTimer: Timer!
    weak var delegate: WaveformScrollDelegate?
    var isVisible = true
    
    /*
    // Only override draw() if you perform custom drawing.
    // An empty implementation adversely affects performance during animation.
    override func draw(_ rect: CGRect) {
        // Drawing code
    }
    */
    
    required init?(coder: NSCoder) {
        super.init(coder: coder)
    }
    
    init(frame: CGRect, amplitudes: [Double]) {
        super.init(frame: frame)
        self.backgroundColor = UIColor.clear
        self.isOpaque = true
        self.clipsToBounds = true
        let width = (waveWidth + waveSpace) * CGFloat(amplitudes.count / 2)
        let height = upMaxHeight + downMaxHeight + upDownSpace
        let waveRect = CGRect.init(x: frame.size.width/2.0, y: (frame.size.height - height)/2.0, width: width, height: height)
        
        bottomView = WaveformView.init(frame: waveRect, amplitudes: amplitudes, isHighlight: true)
        self.addSubview(bottomView)
        
        topView = WaveformView.init(frame: waveRect, amplitudes: amplitudes, isHighlight: false)
        self.addSubview(topView)
        
        topViewMask = CALayer()
        topViewMask.frame = topView.bounds
        topViewMask.backgroundColor = UIColor.white.cgColor
        topView.layer.mask = topViewMask
        
        timeLine = UILabel.init(frame: CGRect.init(x: (frame.size.width - 61.5)/2.0, y: (frame.size.height - upMaxHeight - upDownSpace - downMaxHeight)/2.0 + upMaxHeight - 19.0, width: 61.5, height: 19.0))
        timeLine.backgroundColor = UIColor.init(red: 18/255.0, green: 18/255.0, blue: 18/255.0, alpha: 0.72)
        timeLine.layer.cornerRadius = 9.5
        timeLine.layer.masksToBounds = true
        timeLine.textColor = UIColor.white
        timeLine.font = UIFont.init(name: "PingFangSC-Regular", size: 8.0)
        timeLine.textAlignment = .center
        timeLine.text = "\(convertTime(0))/\(convertTime(amplitudes.count/2))"
        self.addSubview(timeLine)
        
        let panGesture = UIPanGestureRecognizer.init(target: self, action: #selector(handleGesture(gesture:)))
        panGesture.delegate = self
        addGestureRecognizer(panGesture)
        isUserInteractionEnabled = true
    }
    
    func configureAmplitudes(amplitudes: [Double]) {
        let width = (waveWidth + waveSpace) * CGFloat(amplitudes.count / 2)
        let height = upMaxHeight + downMaxHeight + upDownSpace
        self.topView.amplitudes = amplitudes
        self.topView.frame = CGRect(x: screenw/2, y: 0, width: width, height: height)
        self.topView.setNeedsDisplay()
        topViewMask.frame = topView.bounds
        self.bottomView.amplitudes = amplitudes
        self.bottomView.frame = CGRect(x: screenw/2, y: 0, width: width, height: height)
        self.bottomView.setNeedsDisplay()
    }
    
    func play() {
        if !isAnimated {
            isAnimated = true
            topView.layer.add(keyframeAnimationFrom(topView.layer.position.x, to: (self.bounds.size.width - topView.layer.bounds.size.width)/2, isTop: false), forKey: "pan")
            topViewMask.add(keyframeAnimationFrom(topViewMask.position.x, to: topViewMask.bounds.size.width*3/2, isTop: false), forKey: "pan")
            bottomView.layer.add(keyframeAnimationFrom(bottomView.layer.position.x, to: (self.bounds.size.width - bottomView.layer.bounds.size.width)/2, isTop: false), forKey: "pan")
            weak var weakSelf = self
            animationTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: { (timer) in
                guard let presentation = weakSelf?.topView.layer.presentation() else { return }
                let delta = (weakSelf!.bounds.size.width + weakSelf!.topView.bounds.size.width)/2 - presentation.position.x
                weakSelf!.timeLine.text = "\(weakSelf!.convertTime(Int(round(delta / 3))))/\(weakSelf!.convertTime(weakSelf!.topView.amplitudes.count/2))"
                if weakSelf!.delegate != nil {
                    let offset = delta / 3
                    let distance = weakSelf!.topView.amplitudes.count/2
                    if distance > 0 {
                        weakSelf!.delegate?.didScrollByPercentage(percent: Double(offset) / Double(distance), animated: true)
                    }else {
                        weakSelf!.delegate?.didScrollByPercentage(percent: 0, animated: true)
                    }
                }
            })
        }
    }
    
    func pause() {
        if isAnimated {
            topView.layer.position = topView.layer.presentation()!.position
            topViewMask.position = topViewMask.presentation()!.position
            bottomView.layer.position = bottomView.layer.presentation()!.position
            removeAnimate()
        }
    }
    
    func reset() {
        timeLine.text = "\(convertTime(0))/\(convertTime(topView.amplitudes.count/2))"
        let position = CGPoint(x: (self.size.width + topView.size.width) / 2, y: self.size.height / 2)
        topView.layer.position = position
        topView.layer.removeAllAnimations()
        topViewMask.position = CGPoint(x: topView.size.width / 2, y: topView.size.height / 2)
        topViewMask.removeAllAnimations()
        bottomView.layer.position = position
        bottomView.layer.removeAllAnimations()
        isAnimated = false
        stopTimer()
    }
    
    func initialOffset(offset: Int) {
        let position = CGPoint(x: (self.size.width + topView.size.width) / 2 - 3 * CGFloat(offset), y: self.size.height / 2)
        topView.layer.position = position
        topViewMask.position = CGPoint(x: topView.size.width / 2 + 3 * CGFloat(offset), y: topView.size.height / 2)
        bottomView.layer.position = position
        timeLine.text = "\(convertTime(offset))/\(convertTime(topView.amplitudes.count/2))"
    }
    
    func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRequireFailureOf otherGestureRecognizer: UIGestureRecognizer) -> Bool {
        if otherGestureRecognizer.isKind(of: UISwipeGestureRecognizer.self) {
            let swipe = otherGestureRecognizer as! UISwipeGestureRecognizer
            if (swipe.direction == .up || swipe.direction == .down) && ((swipe.qmui_targetView?.parentViewController?.isKind(of: AudioPlayerViewController.self)) != nil) {
                return true
            }
        }
        return false
    }
    
//    func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldBeRequiredToFailBy otherGestureRecognizer: UIGestureRecognizer) -> Bool {
//        return true
//    }
    
    @objc private func handleGesture(gesture: UIPanGestureRecognizer) {
        if gesture.state == .changed {
            let translation = gesture.translation(in: self)
            let absX = abs(translation.x)
            let absY = abs(translation.y)
            if (absX > absY ) {
                if (translation.x < 0) {
                    //向左滑动
                    if isAnimated {
                        topView.layer.position = CGPoint.init(x: max(topView.layer.presentation()!.position.x + translation.x, (self.bounds.size.width - topView.layer.bounds.size.width)/2), y: topView.layer.position.y)
                        topViewMask.position = CGPoint.init(x: min(topViewMask.presentation()!.position.x - translation.x, topViewMask.bounds.size.width*3/2), y: topViewMask.position.y)
                        bottomView.layer.position = CGPoint.init(x: max(bottomView.layer.presentation()!.position.x + translation.x, (self.bounds.size.width - bottomView.layer.bounds.size.width)/2), y: bottomView.layer.position.y)
                    }else {
                        if topView.layer.frame.origin.x + topView.layer.frame.size.width <= self.bounds.size.width / 2 {
                            print("左滑,切歌下一曲")
                            return
                        }
                        topView.layer.position = CGPoint.init(x: max(topView.layer.position.x + translation.x, (self.bounds.size.width - topView.layer.bounds.size.width)/2), y: topView.layer.position.y)
                        topViewMask.position = CGPoint.init(x: min(topViewMask.position.x - translation.x, topViewMask.bounds.size.width*3/2), y: topViewMask.position.y)
                        bottomView.layer.position = CGPoint.init(x: max(bottomView.layer.position.x + translation.x, (self.bounds.size.width - bottomView.layer.bounds.size.width)/2), y: bottomView.layer.position.y)
                    }
                    gesture.setTranslation(CGPoint.zero, in: self)
                }else{
                    //向右滑动
                    if isAnimated {
                        topView.layer.position = CGPoint.init(x: min(topView.layer.presentation()!.position.x + translation.x, (self.bounds.size.width + topView.layer.bounds.size.width)/2), y: topView.layer.position.y)
                        topViewMask.position = CGPoint.init(x: max(topViewMask.presentation()!.position.x - translation.x, topViewMask.bounds.size.width/2), y: topViewMask.position.y)
                        bottomView.layer.position = CGPoint.init(x: min(bottomView.layer.presentation()!.position.x + translation.x, (self.bounds.size.width + bottomView.layer.bounds.size.width)/2), y: bottomView.layer.position.y)
                    }else {
                        if topView.layer.frame.origin.x >= self.bounds.size.width / 2 {
                            print("右滑,切歌上一曲")
                            return
                        }
                        topView.layer.position = CGPoint.init(x: min(topView.layer.position.x + translation.x, (self.bounds.size.width + topView.layer.bounds.size.width)/2), y: topView.layer.position.y)
                        topViewMask.position = CGPoint.init(x: max(topViewMask.position.x - translation.x, topViewMask.bounds.size.width/2), y: topViewMask.position.y)
                        bottomView.layer.position = CGPoint.init(x: min(bottomView.layer.position.x + translation.x, (self.bounds.size.width + bottomView.layer.bounds.size.width)/2), y: bottomView.layer.position.y)
                    }
                    gesture.setTranslation(CGPoint.zero, in: self)
                }
                removeAnimate()
                scrollTimeLineWhetherNotice(notice: false)
                if delegate != nil {
                    let offset = (self.size.width + topView.size.width) / 2 - topView.layer.position.x
                    let distance = topView.size.width
                    delegate?.didScrollByPercentage(percent: Double(offset) / Double(distance), animated: false)
                }
            }
        }
        if gesture.state == .ended {
            //考虑到歌曲存在缓冲,请手动调用play方法
//            play()
            scrollTimeLineWhetherNotice(notice: true)
        }
    }
    
    private func scrollTimeLineWhetherNotice(notice: Bool) {
        let delta = (self.bounds.size.width + self.topView.bounds.size.width)/2 - self.topView.layer.position.x
        var time = NSInteger(round(delta / 3))
        if time >= topView.amplitudes.count / 2 {
            time = topView.amplitudes.count / 2 - 1
        }
        timeLine.text = "\(convertTime(time))/\(convertTime(topView.amplitudes.count/2))"
        if delegate != nil && notice {
            delegate?.didScrollToTime(time: time)
        }
    }
    
    private func removeAnimate() {
        if isAnimated {
            isAnimated = false
            topView.layer.removeAnimation(forKey: "pan")
            topViewMask.removeAnimation(forKey: "pan")
            bottomView.layer.removeAnimation(forKey: "pan")
        }
    }
    
    private func keyframeAnimationFrom(_ start: CGFloat, to end: CGFloat, isTop:Bool) -> CAAnimation {
        let animation = CAKeyframeAnimation.init(keyPath: "position.x")
        let scale = UIScreen.main.scale
        let increment = copysign(1, end - start) / scale
        let numberOfSteps = Int(abs((end - start) / increment))
        let positions = NSMutableArray.init(capacity: numberOfSteps)
        for i in 0..<numberOfSteps {
            positions.add(start + CGFloat(i) * increment)
        }
        animation.values = (positions as! [Any])
        animation.calculationMode = .discrete
        animation.isRemovedOnCompletion = false
        animation.fillMode = .forwards
        animation.duration = Double(Int(abs(end-start) / (AppConstants.waveWidth + AppConstants.waveSpace)))
        animation.delegate = self
        
        return animation
    }
    
    func animationDidStart(_ anim: CAAnimation) {
        if anim == topView.layer.animation(forKey: "pan") {
            
        }
    }
    
    func animationDidStop(_ anim: CAAnimation, finished flag: Bool) {
        if UIApplication.shared.applicationState == .active && isVisible {
            if isAnimated {
                reset()
            }
            stopTimer()
        }
    }
    
    private func stopTimer() {
        guard let animationTimer = self.animationTimer else {
            return
        }
        if animationTimer.isValid {
            self.animationTimer.invalidate()
            self.animationTimer = nil
        }
    }
    
    deinit {
        print("release WaveformComponent")
    }
}

class WaveformView: UIView {

    var isHighlight = false
    var amplitudes = [Double]()
    
    required init?(coder: NSCoder) {
        super.init(coder: coder)
    }
    
    init(frame: CGRect, amplitudes: [Double], isHighlight: Bool) {
        super.init(frame: frame)
        self.backgroundColor = UIColor.clear
        self.isOpaque = true
        self.amplitudes = amplitudes
        self.isHighlight = isHighlight
    }
    
    // Only override draw() if you perform custom drawing.
    // An empty implementation adversely affects performance during animation.
    override func draw(_ rect: CGRect) {
        // Drawing code
        guard let context = UIGraphicsGetCurrentContext() else { return }
        
        for i in 0..<amplitudes.count {
            if i%2 == 0 {
                //单数
                let path = CGMutablePath()
                let height = downMaxHeight * CGFloat(abs(amplitudes[i]))
                path.addRoundedRect(in: CGRect.init(x: CGFloat(Int(i/2)) * (waveWidth + waveSpace), y: 62, width: 2.5, height: height), cornerWidth: 1.25, cornerHeight: 1.25 >= height/2.0 ? 0 : 1.25)
                context.addPath(path)
                if isHighlight {
                    context.setFillColor(highlightAlphaColor.cgColor)
                }else {
                    context.setFillColor(normalAlphaColor.cgColor)
                }
                context.fillPath()
            }else {
                //双数
                let path = CGMutablePath()
                let height = upMaxHeight * CGFloat(abs(amplitudes[i]))
                path.addRoundedRect(in: CGRect.init(x: CGFloat(Int(i/2)) * (waveWidth + waveSpace), y: 60 - height, width: 2.5, height: height), cornerWidth: 1.25, cornerHeight: 1.25 >= height/2.0 ? 0 : 1.25)
                context.addPath(path)
                if isHighlight {
                    context.setFillColor(highlightColor.cgColor)
                }else {
                    context.setFillColor(normalColor.cgColor)
                }
                context.fillPath()
            }
        }
    }
}

答案 2 :(得分:0)

我不知道您是想从头开始还是自定义创建波形,但是有一个名为 FDWaveformView 的库,我过去曾使用过这个库。将此库安装到您的项目后,您可以添加一个继承 FDWaveformView 类的 UIView,然后提供音频文件。

您的代码可能如下所示

import UIKit
import FDWaveformView

class ViewController: UIViewController {
  @IBOutlet weak var mySampleWaveform: FDWaveformView!
  
  override func viewDidLoad() {
    super.viewDidLoad()
    let thisBundle = Bundle(for: type(of: self))
    let url = thisBundle.url(forResource: "myaudio", withExtension: "mp3")
    mySampleWaveform.audioURL = url
    mySampleWaveform.wavesColor = .green
    mySampleWaveform.doesAllowScrubbing = true
    mySampleWaveform.doesAllowStretch = true
    mySampleWaveform.doesAllowScroll = true
  }
}

它会显示如下:

waveform

这将使您对波形的工作原理有足够的了解,并且您可以自定义许多内容,例如颜色、宽度、高度等。