我尝试使用音频队列简单地播放音频文件。 我从一个教程中获得了这段代码,其中首先录制了音频然后播放,这很好。 修改声音文件路径以指向我的声音文件后,播放时只有白噪声。我尝试过不同的格式并使用我的音频格式设置,但我显然遗漏了一些东西。
另外,我一直在使用Swift学习iOS,但我没能将此代码翻译成Swift,所以我在Swift应用程序中将其桥接。
在线似乎没有太多的例子,链接到的Apple示例项目链接已经死了。
任何建议都非常感谢!
#import "ViewController.h"
typedef NS_ENUM(NSUInteger, AudioQueueState) {
AudioQueueState_Idle,
AudioQueueState_Recording,
AudioQueueState_Playing,
};
@import AVFoundation;
@interface ViewController ()
@property AudioQueueState currentState;
@property (strong, nonatomic) NSURL *audioFileURL;
@end
#define NUM_BUFFERS 10
@implementation ViewController
void AudioOutputCallback(void *inUserData,
AudioQueueRef outAQ, // a reference to the audio queue
AudioQueueBufferRef outBuffer) { // the buffers
ViewController *viewController = (__bridge ViewController*)inUserData;
if (viewController.currentState != AudioQueueState_Playing) {
return;
}
// Read the data out of the audio file in order to fill the buffers with it.
UInt32 numBytes = 16000;
OSStatus status = AudioFileReadBytes(audioFileID, false, currentByte, &numBytes, outBuffer->mAudioData);
if (status != noErr && status != kAudioFileEndOfFileError) {
printf("Error\n");
return;
}
// If data has been read successfully tell the audio queue that the buffer is ready to play.
if (numBytes > 0) {
outBuffer->mAudioDataByteSize = numBytes;
OSStatus statusOfEnqueue = AudioQueueEnqueueBuffer(queue, outBuffer, 0, NULL);
if (statusOfEnqueue != noErr) {
printf("Error\n");
return;
}
currentByte += numBytes;
}
// Check if it's at the end of the file.
if (numBytes == 0 || status == kAudioFileEndOfFileError) {
AudioQueueStop(queue, false);
AudioFileClose(audioFileID);
viewController.currentState = AudioQueueState_Idle;
}
}
static SInt64 currentByte;
static AudioStreamBasicDescription audioFormat;
static AudioQueueRef queue;
static AudioQueueBufferRef buffers [NUM_BUFFERS];
static AudioFileID audioFileID;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupAudio];
}
- (void) setupAudio {
audioFormat.mSampleRate = 44100.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(SInt16);
audioFormat.mBytesPerPacket = audioFormat.mFramesPerPacket * audioFormat.mBytesPerFrame;
self.currentState = AudioQueueState_Idle;
}
- (IBAction)playButtonPressed:(id)sender {
// Set up the audio session.
[[AVAudioSession sharedInstance] setActive:YES error:&error];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:&error];
[self startPlayback];
}
- (void) startPlayback {
NSString *resourcePath = [[NSBundle mainBundle] pathForResource:@"MyAudioFileName" ofType:@"wav"];
NSLog(@"path: %@", resourcePath);
self.audioFileURL = [NSURL fileURLWithPath:resourcePath];
currentByte = 0;
OSStatus status = AudioFileOpenURL((__bridge CFURLRef) (self.audioFileURL), kAudioFileReadPermission, kAudioFileWAVEType, &audioFileID);
status = AudioQueueNewOutput(&audioFormat, AudioOutputCallback, (__bridge void*)self, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &queue);
self.currentState = AudioQueueState_Playing;
for (int i = 0; i < NUM_BUFFERS && self.currentState == AudioQueueState_Playing; i++) {
status = AudioQueueAllocateBuffer(queue, 16000, &buffers[i]);
AudioOutputCallback((__bridge void*)self, queue, buffers[i]);
}
status = AudioQueueStart(queue, NULL);
}
- (void) stopPlayback {
self.currentState = AudioQueueState_Idle;
for (int i = 0; i < NUM_BUFFERS; i++) {
AudioQueueFreeBuffer(queue, buffers[i]);
}
AudioQueueDispose(queue, true);
AudioFileClose(audioFileID);
}
@end