我会尽我所能。 我会用我的audioqueue录制声音。 我有一个项目,并添加了我的项目所需的所有框架(所以这不是问题)。 我只更改了2个文件:
在我开始之前: 我不确定文件类型(void)或(IBAction)(我无法测试它)。
以下是 ViewController.h
的源代码#import <UIKit/UIKit.h>
//#import <AudioToolbox/AudioQueue.h> //(don't know to use that)
//#import <AudioToolbox/AudioFile.h> //(don't know to use that)
#import <AudioUnit/AudioUnit.h>
#import <AudioToolbox/AudioToolbox.h>
#define NUM_BUFFERS 3
#define SECONDS_TO_RECORD 10
typedef struct
{
AudioStreamBasicDescription dataFormat;
AudioQueueRef queue;
AudioQueueBufferRef buffers[NUM_BUFFERS];
AudioFileID audioFile;
SInt64 currentPacket;
bool recording;
} RecordState;
typedef struct
{
AudioStreamBasicDescription dataFormat;
AudioQueueRef queue;
AudioQueueBufferRef buffers[NUM_BUFFERS];
AudioFileID audioFile;
SInt64 currentPacket;
bool playing;
} PlayState;
@interface ViewController : UIViewController{
IBOutlet UILabel* labelStatus;
IBOutlet UIButton* buttonRecord;
IBOutlet UIButton* buttonPlay;
RecordState recordState;
PlayState playState;
CFURLRef fileURL;
}
- (BOOL)getFilename:(char*)buffer maxLenth:(int)maxBufferLength;
- (void)setupAudioFormat:(AudioStreamBasicDescription*)format;
- (void)recordPressed:(id)sender;
- (void)playPressed:(id)sender;
- (IBAction)startRecording;
- (IBAction)stopRecording;
- (IBAction)startPlayback;
- (IBAction)stopPlayback;
以下是 ViewController.m
的源代码(我在funktion startPlayback注释中出现错误)
#import "ViewController.h"
@interface ViewController ()
@end
@implementation ViewController
void AudioInputCallback(
void *inUserData,
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp *inStartTime,
UInt32 inNumberPacketDescriptions,
const AudioStreamPacketDescription *inPacketDescs)
{
RecordState* recordState = (RecordState*)inUserData;
if(!recordState->recording)
{
printf("Not recording, returning\n");
}
//if(inNumberPacketDescriptions == 0 && recordState->dataFormat.mBytesPerPacket != 0)
//{
// inNumberPacketDescriptions = inBuffer->mAudioDataByteSize / recordState->dataFormat.mBytesPerPacket;
//}
printf("Writing buffer %lld\n", recordState->currentPacket);
OSStatus status = AudioFileWritePackets(recordState->audioFile,
false,
inBuffer->mAudioDataByteSize,
inPacketDescs,
recordState->currentPacket,
&inNumberPacketDescriptions,
inBuffer->mAudioData);
if(status == 0)
{
recordState->currentPacket += inNumberPacketDescriptions;
}
AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}
void AudioOutputCallback(
void* inUserData,
AudioQueueRef outAQ,
AudioQueueBufferRef outBuffer)
{
PlayState* playState = (PlayState*)inUserData;
if(!playState->playing)
{
printf("Not playing, returning\n");
return;
}
printf("Queuing buffer %lld for playback\n", playState->currentPacket);
AudioStreamPacketDescription* packetDescs = NULL;
UInt32 bytesRead;
UInt32 numPackets = 8000;
OSStatus status;
status = AudioFileReadPackets(
playState->audioFile,
false,
&bytesRead,
packetDescs,
playState->currentPacket,
&numPackets,
outBuffer->mAudioData);
if(numPackets)
{
outBuffer->mAudioDataByteSize = bytesRead;
status = AudioQueueEnqueueBuffer(
playState->queue,
outBuffer,
0,
packetDescs);
playState->currentPacket += numPackets;
}
else
{
if(playState->playing)
{
AudioQueueStop(playState->queue, false);
AudioFileClose(playState->audioFile);
playState->playing = false;
}
AudioQueueFreeBuffer(playState->queue, outBuffer);
}
}
- (void)setupAudioFormat:(AudioStreamBasicDescription*)format
{
format->mSampleRate = 8000.0;
format->mFormatID = kAudioFormatLinearPCM;
format->mFramesPerPacket = 1;
format->mChannelsPerFrame = 1;
format->mBytesPerFrame = 2;
format->mBytesPerPacket = 2;
format->mBitsPerChannel = 16;
format->mReserved = 0;
format->mFormatFlags = kLinearPCMFormatFlagIsBigEndian |
kLinearPCMFormatFlagIsSignedInteger |
kLinearPCMFormatFlagIsPacked;
}
- (void)recordPressed:(id)sender
{
if(!playState.playing)
{
if(!recordState.recording)
{
printf("Starting recording\n");
[self startRecording];
}
else
{
printf("Stopping recording\n");
[self stopRecording];
}
}
else
{
printf("Can't start recording, currently playing\n");
}
}
- (void)playPressed:(id)sender
{
if(!recordState.recording)
{
if(!playState.playing)
{
printf("Starting playback\n");
[self startPlayback];
}
else
{
printf("Stopping playback\n");
[self stopPlayback];
}
}
}
- (IBAction)startRecording
{
[self setupAudioFormat:&recordState.dataFormat];
recordState.currentPacket = 0;
OSStatus status;
status = AudioQueueNewInput(&recordState.dataFormat,
AudioInputCallback,
&recordState,
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0,
&recordState.queue);
if(status == 0)
{
for(int i = 0; i < NUM_BUFFERS; i++)
{
AudioQueueAllocateBuffer(recordState.queue,
16000, &recordState.buffers[i]);
AudioQueueEnqueueBuffer(recordState.queue,
recordState.buffers[i], 0, NULL);
}
status = AudioFileCreateWithURL(fileURL,
kAudioFileAIFFType,
&recordState.dataFormat,
kAudioFileFlags_EraseFile,
&recordState.audioFile);
if(status == 0)
{
recordState.recording = true;
status = AudioQueueStart(recordState.queue, NULL);
if(status == 0)
{
labelStatus.text = @"Recording";
}
}
}
if(status != 0)
{
[self stopRecording];
labelStatus.text = @"Record Failed";
}
}
- (IBAction)stopRecording
{
recordState.recording = false;
AudioQueueStop(recordState.queue, true);
for(int i = 0; i < NUM_BUFFERS; i++)
{
AudioQueueFreeBuffer(recordState.queue,
recordState.buffers[i]);
}
AudioQueueDispose(recordState.queue, true);
AudioFileClose(recordState.audioFile);
labelStatus.text = @"Idle";
}
- (IBAction)startPlayback
{
playState.currentPacket = 0;
[self setupAudioFormat:&playState.dataFormat];
OSStatus status;
// I get here an error
// Use of undeclared identifier 'fsRdPerm'
// How to fix that?
status = AudioFileOpenURL(fileURL, fsRdPerm, kAudioFileAIFFType, &playState.audioFile);
if(status == 0)
{
status = AudioQueueNewOutput(
&playState.dataFormat,
AudioOutputCallback,
&playState,
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0,
&playState.queue);
if(status == 0)
{
playState.playing = true;
for(int i = 0; i < NUM_BUFFERS && playState.playing; i++)
{
if(playState.playing)
{
AudioQueueAllocateBuffer(playState.queue, 16000, &playState.buffers[i]);
AudioOutputCallback(&playState, playState.queue, playState.buffers[i]);
}
}
if(playState.playing)
{
status = AudioQueueStart(playState.queue, NULL);
if(status == 0)
{
labelStatus.text = @"Playing";
}
}
}
}
if(status != 0)
{
[self stopPlayback];
labelStatus.text = @"Play failed";
}
}
- (void)stopPlayback
{
playState.playing = false;
for(int i = 0; i < NUM_BUFFERS; i++)
{
AudioQueueFreeBuffer(playState.queue, playState.buffers[i]);
}
AudioQueueDispose(playState.queue, true);
AudioFileClose(playState.audioFile);
}
- (BOOL)getFilename:(char*)buffer maxLenth:(int)maxBufferLength
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString* docDir = [paths objectAtIndex:0];
NSString* file = [docDir stringByAppendingString:@"/recording.aif"];
return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
我不知道如何解决这个问题,我可以使用我的项目。 如果我对funktion startPlayback进行评论,我会收到此错误:
Ld /Users/NAME/Library/Developer/Xcode/DerivedData/recorder_test2-gehymgoneospsldgfpxnbjdapebu/Build/Products/Debug-iphonesimulator/recorder_test2.app/recorder_test2 normal i386 cd / Users / NAME / Desktop / recorder_test2 setenv IPHONEOS_DEPLOYMENT_TARGET 6.0 setenv PATH“/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/usr/bin:/Applications/Xcode.app/Contents/Developer/usr/bin:/usr/bin:/bin:/ usr / sbin目录:/ sbin目录” /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/clang -arch i386 -isysroot /Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator6。 0.sdk -L / Users / NAME / Library / Developer / Xcode / DerivedData / recorder_test2-gehymgoneospsldgfpxnbjdapebu / Build / Products / Debug-iphonesimulator -F / Users / NAME / Library / Developer / Xcode / DerivedData / recorder_test2-gehymgoneospsldgfpxnbjdapebu / Build / Products / Debug-iphonesimulator -filelist /Users/NAME/Library/Developer/Xcode/DerivedData/recorder_test2-gehymgoneospsldgfpxnbjdapebu/Build/Intermediates/recorder_test2.build/Debug-iphonesimulator/recorder_test2.build/Objects-normal/i386/recorder_test2.LinkFileList - Xlinker -objc_abi_version -Xlinker 2 -fobjc-arc -fobjc-link-runtime -Xlinker -no_implicit_dylibs -mios-simulator-version-min = 6.0 -framework AudioToolbox -framework AudioUnit -framework CoreAudio -framework UIKit -framework Foundation -framework CoreGraphics -o /用户/ NAME /图书馆/开发商/ Xcode中/ DerivedData / recorder_test2-gehymgoneospsldgfpxnbjdapebu /编译/产品/调试-iphonesimulator / recorder_test2.app / recorder_test2
ld:未找到框架AudioUnit clang:错误:链接器命令失败,退出代码为1(使用-v查看调用)
请使用2个源文件并自行测试并帮助我。
答案 0 :(得分:2)
只需在项目设置中添加AudioUnit,并确保您有正确的路径。
答案 1 :(得分:0)
从项目中删除AudioUnit.framework,并将fsRdPerm
替换为kAudioFileReadPermission
。
长篇故事:
虽然经过谷歌长途旅行后我找不到任何证据,但我几乎可以肯定在iOS 6的任何音频框架中都不存在fsRdPerm
。我已经搜索过了在iOS6模拟器中它只出现在CarbonCore.framework中,这是一个遗留框架,因此很老:
pwds2622:Frameworks mac$ pwd
/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator6.0.sdk/System/Library/Frameworks
s2622:Frameworks mac$ grep -sr fsRdPerm .
./CoreServices.framework/Frameworks/CarbonCore.framework/Headers/Files.h: fsRdPerm = 0x01,
./CoreServices.framework/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h: fsRdPerm = 0x01,
./CoreServices.framework/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h: fsRdPerm = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Headers/Files.h: fsRdPerm = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h: fsRdPerm = 0x01,
./CoreServices.framework/Versions/A/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h: fsRdPerm = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Headers/Files.h: fsRdPerm = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Versions/A/Headers/Files.h: fsRdPerm = 0x01,
./CoreServices.framework/Versions/Current/Frameworks/CarbonCore.framework/Versions/Current/Headers/Files.h: fsRdPerm = 0x01,
我找到了建议使用kAudioFileReadPermission
代替fsRdPerm
的论坛帖子。这有效,事实上,kAudioFileReadPermission
的文档说这是“与AudioFileOpenURL和AudioFileOpen函数一起使用的标志之一”。阅读更多Audio File Permission Flags。