我正在尝试使用SDK 8.1上的Core Audio和AVAudioSession开发一个分析来自麦克风的音频流的应用。使用苹果文档中的点点滴滴以及乐于助人的博主,我成功地制作了一个成功的程序
但该程序很难开始录制 - AudioOutputUnitStart
函数失败,返回码为-500。这是为什么? [以下代码按原样运行,如果它保存为ViewController.m
并在单页xcode模板中使用。即使它不是最小的,我也尽量保持尽可能小。]
#import "ViewController.h"
@import AVFoundation;
@import AudioUnit;
#define kInputBus 1
AudioComponentInstance *audioUnit = NULL;
float *convertedSampleBuffer = NULL;
int status = 0;
static OSStatus recordingCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
AudioBufferList *bufferList;
OSStatus status;
status = AudioUnitRender(*audioUnit,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
bufferList);
printf("%d", status); printf("%s", " is the return code of AudioUnitRender from the recordingCallback.\n");
// DoStuffWithTheRecordedAudio(bufferList);
return noErr;
}
int myAudio() {
AVAudioSession *mySession = [AVAudioSession sharedInstance];
[mySession setCategory: AVAudioSessionCategoryRecord error: nil];
[mySession setMode: AVAudioSessionModeMeasurement error: nil];
[mySession setPreferredSampleRate:44100 error:nil];
[mySession setPreferredIOBufferDuration:0.02 error:nil];
[mySession setActive: YES error: nil];
audioUnit = (AudioUnit*)malloc(sizeof(AudioUnit));
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
status = AudioComponentInstanceNew(comp, audioUnit);
printf("%d", status); printf("%s", " is the return code of Instantiating a new audio component instance.\n");
UInt32 enable = 1;
status = AudioUnitSetProperty(*audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &enable, sizeof(enable));
printf("%d", status); printf("%s", " is the return code of EnablingIO on the audiounit.\n");
AudioStreamBasicDescription streamDescription = {0};
streamDescription.mSampleRate = 44100;
streamDescription.mFormatID = kAudioFormatLinearPCM;
streamDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
streamDescription.mFramesPerPacket = 1;
streamDescription.mChannelsPerFrame = 1;
streamDescription.mBitsPerChannel = 16;
streamDescription.mBytesPerPacket = 2;
streamDescription.mBytesPerFrame = 2;
status = AudioUnitSetProperty(*audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamDescription, sizeof(streamDescription));
printf("%d", status); printf("%s", " is the return code of setting the AudioStreamDescription.\n");
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = NULL;
status = AudioUnitSetProperty(*audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
kInputBus,
&callbackStruct,
sizeof(callbackStruct));
printf("%d", status); printf("%s", " is the return code of setting the recording callback on the audiounit\n");
status = AudioUnitInitialize(*audioUnit);
printf("%d", status); printf("%s", " is the return code of initializing the audiounit.\n");
status = AudioOutputUnitStart(*audioUnit);
printf("%d", status); printf("%s", " is the return code of Starting the audioUnit\n");
return noErr;
}
@interface ViewController ()
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
myAudio();
[NSThread sleepForTimeInterval:1];
exit(0);
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
除了返回-500的最后一个语句之外,所有printf
语句都返回0。
答案 0 :(得分:1)
问题中提供的代码有两个错误:
这是一个完整的ViewController.m
,能够录制,并且如果激活了评论块,也可以播放(如果您的目标是播放并激活音频会话块,则从{{1}更改类别} Record
。Michael Tyson用于发布我构建的基础的信用。
PlayAndRecord