将ObjectiveUnit转换为Swift

时间:2015-10-23 06:43:55

标签: ios audiounit avaudiosession audiotoolbox

我alredy设法翻译此代码,以便调用渲染回调: http://www.cocoawithlove.com/2010/10/ios-tone-generator-introduction-to.html

我确定我的渲染回调方法没有正确实现,因为我要么根本没有声音,要么我的耳机噪音很大。 我也没有在viewDidLoad中的audioSession和其他代码之间看到连接。

有没有人可以帮我解决这个问题?

private func performRender(
inRefCon: UnsafeMutablePointer<Void>,
ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBufNumber: UInt32,
inNumberFrames: UInt32,
ioData: UnsafeMutablePointer<AudioBufferList>) -> OSStatus
{

// get object
let vc = unsafeBitCast(inRefCon, ViewController.self)
print("callback")

let thetaIncrement = 2.0 * M_PI * vc.kFrequency / vc.kSampleRate
var theta = vc.theta

//    var sinValues = [Int32]()
let amplitude : Double = 0.25

let abl = UnsafeMutableAudioBufferListPointer(ioData)
    for buffer in abl
    {
        let val : Int32 = Int32((sin(theta) * amplitude))
    //        sinValues.append(val)
        theta += thetaIncrement

        memset(buffer.mData, val, Int(buffer.mDataByteSize))
    }

vc.theta = theta

return noErr
}

class ViewController: UIViewController
{
let kSampleRate : Float64 = 44100
let kFrequency : Double = 440
var theta : Double = 0

private var toneUnit = AudioUnit()
private let kInputBus = AudioUnitElement(1)
private let kOutputBus = AudioUnitElement(0)

@IBAction func tooglePlay(sender: UIButton)
{
    if(toneUnit != nil)
    {
        AudioOutputUnitStop(toneUnit)
        AudioUnitInitialize(toneUnit)
        AudioComponentInstanceDispose(toneUnit)
        toneUnit = nil
    }
    else
    {
        createToneUnit()
        var err = AudioUnitInitialize(toneUnit)
        assert(err == noErr, "error initializing audiounit!")
        err = AudioOutputUnitStart(toneUnit)
        assert(err == noErr, "error starting audiooutput unit!")      
    }
}

func createToneUnit()
{
    var defaultOutputDescription = AudioComponentDescription(
        componentType: kAudioUnitType_Output,
        componentSubType: kAudioUnitSubType_RemoteIO,
        componentManufacturer: kAudioUnitManufacturer_Apple,
        componentFlags: 0,
        componentFlagsMask: 0)

    let defaultOutput = AudioComponentFindNext(nil,&defaultOutputDescription)


    let fourBytesPerFloat : UInt32 = 4
    let eightBitsPerByte : UInt32 = 8

    var err = AudioComponentInstanceNew(defaultOutput, &toneUnit)
    assert(err == noErr, "error setting audio component instance!")
    var input = AURenderCallbackStruct(inputProc: performRender,     inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self)))

    err = AudioUnitSetProperty(toneUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &input, UInt32(sizeof(AURenderCallbackStruct)))
    assert(err == noErr, "error setting render callback!")

    var streamFormat = AudioStreamBasicDescription(
        mSampleRate: kSampleRate,
        mFormatID: kAudioFormatLinearPCM,
        mFormatFlags: kAudioFormatFlagsNativeFloatPacked,
        mBytesPerPacket: fourBytesPerFloat,
        mFramesPerPacket: 1,
        mBytesPerFrame: fourBytesPerFloat,
        mChannelsPerFrame: 1,
        mBitsPerChannel: fourBytesPerFloat*eightBitsPerByte,
        mReserved: 0)

    err = AudioUnitSetProperty(toneUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &streamFormat, UInt32(sizeof(AudioStreamBasicDescription)))
    assert(err == noErr, "error setting audiounit property!")
}

override func viewDidLoad()
{
    super.viewDidLoad()
    let audioSession = AVAudioSession.sharedInstance()

    do
    {
        try audioSession.setCategory(AVAudioSessionCategoryPlayback)
    }
    catch
    {
        print("Audio session setCategory failed")
    }

    do
    {
        try audioSession.setPreferredSampleRate(kSampleRate)
    }
    catch
    {
        print("Audio session samplerate error")
    }

    do
    {
        try audioSession.setPreferredIOBufferDuration(0.005)
    }
    catch
    {
        print("Audio session bufferduration error")
    }

    do
    {
        try audioSession.setActive(true)
    }
    catch
    {
        print("Audio session activate failure")
    }
}

1 个答案:

答案 0 :(得分:0)

  • vc.theta没有增加
  • memset只需要一个字节val
  • AudioUnit需要float,但您要存储Int32 s
  • 音频数据的范围看起来也很有趣 - 为什么不把它保持在[-1,1]的范围内?
  • 没有必要约束thetasin可以做到这一点。

你确定这曾经在objective-c中工作吗?