我一直在努力解决这个问题大约一年,现在试图解决我的问题并代表其他人去看。
我一直在编写依赖于GarageBand'喜欢录音。也就是说,我想记录用户正好8个节拍,然后我希望他们能够循环这个。我正在为用户同时播放一个节拍器(用户会戴着听到节拍器的头戴式耳机,在他们的设备上录制到麦克风上)
我可以设置打开录音约4.8秒(.6 * 8节拍),计时器说它跑了4.8秒,但我的录音总是比4.8短。它就像4.78或4.71那样导致循环失控。
我已经尝试过AVAudioRecorder,AudioQueue和AudioUnits,认为后一种方法可能会解决我的问题。
我正在使用NSTimer每隔0.6秒发射一次,为节拍器播放短片。 4节拍后,节拍器定时器的功能,打开录音机节拍器,等待4.6秒停止录音。
我使用时间间隔来计算地铁运行的时间长度(看起来非常紧凑在4.800xxx)并将其与音频文件的持续时间进行比较,该持续时间总是不同。
我希望我可以附加我的项目,但我想我只需要解决附加我的标题和实现问题。为了测试你,我们必须制作一个具有以下IB特征的项目:
录制,播放,停止按钮 歌曲/曲目持续时间标签 定时器持续时间标 调试标签
如果您启动该应用,然后点击记录,则表示您已计入' 4节拍,然后录音机启动。用手指轻敲桌面,直至录音机停止。经过8次节拍(总共12次)后,录音机停止。
您可以在显示屏中看到录制的曲目短于4.8秒,在某些情况下,短得多,导致音频无法正常循环。
有谁知道我能做些什么来收紧这个?谢谢你的阅读。
这是我的代码:
//
// ViewController.h
// speakagain
//
// Created by NOTHING on 2014-03-18.
//
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import "CoreAudio/CoreAudioTypes.h"
#import <AudioToolbox/AudioQueue.h>
#import <AudioToolbox/AudioFile.h>
#import <AVFoundation/AVFoundation.h>
@interface ViewController : UIViewController
{
IBOutlet UIButton *btnRecord;
IBOutlet UIButton *btnPlay;
IBOutlet UIButton *btnStop;
IBOutlet UILabel *debugLabel;
IBOutlet UILabel *timerDuration;
IBOutlet UILabel *songDuration;
//UILabel *labelDebug;
struct AQRecorderState {
AudioStreamBasicDescription mDataFormat;
AudioQueueRef mQueue;
AudioQueueBufferRef mBuffers[kNumberBuffers];
AudioFileID mAudioFile;
UInt32 bufferByteSize;
SInt64 mCurrentPacket;
bool mIsRunning; // 8
};
struct AQRecorderState aqData;
AVAudioPlayer *audioPlayer;
NSString *songName;
NSTimer *recordTimer;
NSTimer *metroTimer;
NSTimeInterval startTime, endTime, elapsedTime;
int inputBuffer;
int beatNumber;
}
@property (nonatomic, retain) IBOutlet UIButton *btnRecord;
@property (nonatomic, retain) IBOutlet UIButton *btnPlay;
@property (nonatomic, retain) IBOutlet UIButton *btnStop;
@property (nonatomic, retain) IBOutlet UILabel *debugLabel;
@property (nonatomic, retain) IBOutlet UILabel *timerDuration;
@property (nonatomic, retain) IBOutlet UILabel *songDuration;
- (IBAction) record;
- (IBAction) stop;
- (IBAction) play;
static void HandleInputBuffer (void *aqData,AudioQueueRef inAQ,AudioQueueBufferRef inBuffer,const AudioTimeStamp *inStartTime, UInt32 inNumPackets,const AudioStreamPacketDescription *inPacketDesc);
@end
实现:
//
// ViewController.m
// speakagain
//
// Created by NOTHING on 2014-03-18.
//
#import "ViewController.h"
@interface ViewController ()
@end
@implementation ViewController
@synthesize btnPlay, btnRecord,btnStop,songDuration, timerDuration, debugLabel;
- (void)viewDidLoad
{
debugLabel.text = @"";
songName =[[NSString alloc ]init];
//NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
//NSString *documentsDirectory = [paths objectAtIndex:0];
songName = @"TestingQueue.caf";
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)prepareAudioQueue
{
//struct AQRecorderState *pAqData;
inputBuffer=0;
aqData.mDataFormat.mFormatID = kAudioFormatLinearPCM;
aqData.mDataFormat.mSampleRate = 44100.0;
aqData.mDataFormat.mChannelsPerFrame = 1;
aqData.mDataFormat.mBitsPerChannel = 16;
aqData.mDataFormat.mBytesPerPacket =
aqData.mDataFormat.mBytesPerFrame = aqData.mDataFormat.mChannelsPerFrame * sizeof (SInt16);
aqData.mDataFormat.mFramesPerPacket = 1;
// AudioFileTypeID fileType = kAudioFileAIFFType;
AudioFileTypeID fileType = kAudioFileCAFType;
aqData.mDataFormat.mFormatFlags = kLinearPCMFormatFlagIsBigEndian| kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;
AudioQueueNewInput (&aqData.mDataFormat,HandleInputBuffer, &aqData,NULL, kCFRunLoopCommonModes, 0,&aqData.mQueue);
UInt32 dataFormatSize = sizeof (aqData.mDataFormat);
// in Mac OS X, instead use
// kAudioConverterCurrentInputStreamDescription
AudioQueueGetProperty (aqData.mQueue,kAudioQueueProperty_StreamDescription,&aqData.mDataFormat,&dataFormatSize);
//Verify
NSFileManager *fileManager = [NSFileManager defaultManager];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *txtPath = [documentsDirectory stringByAppendingPathComponent:songName];
NSLog(@"INITIALIZING FILE");
if ([fileManager fileExistsAtPath:txtPath] == YES) {
NSLog(@"PREVIOUS FILE REMOVED");
[fileManager removeItemAtPath:txtPath error:nil];
}
const char *filePath = [txtPath UTF8String];
CFURLRef audioFileURL = CFURLCreateFromFileSystemRepresentation ( NULL,(const UInt8 *) filePath,strlen (filePath),false );
AudioFileCreateWithURL (audioFileURL,fileType,&aqData.mDataFormat, kAudioFileFlags_EraseFile,&aqData.mAudioFile );
DeriveBufferSize (aqData.mQueue,aqData.mDataFormat,0.5,&aqData.bufferByteSize);
for (int i = 0; i < kNumberBuffers; ++i)
{
AudioQueueAllocateBuffer (aqData.mQueue,aqData.bufferByteSize,&aqData.mBuffers[i]);
AudioQueueEnqueueBuffer (aqData.mQueue,aqData.mBuffers[i], 0,NULL );
}
}
- (void) metronomeFire
{
if(beatNumber < 5)
{
//count in time.
// just play the metro beep but don't start recording
debugLabel.text = @"count in (1,2,3,4)";
[self playSound];
}
if(beatNumber == 5)
{
//start recording
aqData.mCurrentPacket = 0;
aqData.mIsRunning = true;
startTime = [NSDate timeIntervalSinceReferenceDate];
recordTimer = [NSTimer scheduledTimerWithTimeInterval:4.8 target:self selector:@selector(killTimer) userInfo:nil repeats:NO];
AudioQueueStart (aqData.mQueue,NULL);
debugLabel.text = @"Recording for 8 beats (1,2,3,4 1,2,3,4)";
[self playSound];
}
else if (beatNumber < 12)
{ //play metronome from beats 6-16
[self playSound];
}
if(beatNumber == 12)
{
[metroTimer invalidate]; metroTimer = nil;
[self playSound];
}
beatNumber++;
}
- (IBAction) play
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *txtPath = [documentsDirectory stringByAppendingPathComponent:songName];
NSURL *url = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@",txtPath]];
if (audioPlayer)
{
[audioPlayer stop];
audioPlayer = nil;
}
NSError *error;
audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error];
if (audioPlayer == nil)
{
NSLog(@"%@",[error description]);
}
else
{
[audioPlayer play];
[audioPlayer setNumberOfLoops:-1];
}
}
- (void) killTimer
{
//this is the timer function. Runs once after 4.8 seconds.
[self stop];
}
- (IBAction) stop
{
if (audioPlayer)
{
[audioPlayer stop];
audioPlayer = nil;
}
else
{
if(metroTimer)
{
[metroTimer invalidate];metroTimer = nil;
}
//Stop the audio queue
AudioQueueStop (aqData.mQueue,true);
aqData.mIsRunning = false;
AudioQueueDispose (aqData.mQueue,true);
AudioFileClose (aqData.mAudioFile);
//Get elapsed time of timer
endTime = [NSDate timeIntervalSinceReferenceDate];
elapsedTime = endTime - startTime;
//Get elapsed time of audio file
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
songName,
nil];
NSURL *audioFileURL = [NSURL fileURLWithPathComponents:pathComponents];
AVURLAsset* audioAsset = [AVURLAsset URLAssetWithURL:audioFileURL options:nil];
CMTime audioDuration = audioAsset.duration;
float audioDurationSeconds = CMTimeGetSeconds(audioDuration);
//Log values
NSLog(@"Track Duration: %f",audioDurationSeconds);
NSLog(@"Timer Duration: %.6f", elapsedTime);
//Show values on GUI too
songDuration.text = [NSString stringWithFormat: @"Track Duration: %f",audioDurationSeconds];
timerDuration.text = [NSString stringWithFormat:@"Timer Duration: %@",[NSString stringWithFormat: @"%.6f", elapsedTime]];
debugLabel.text = @"Why is the duration of the track less than the duration the timer ran?";
}
}
-(void) playSound
{
NSString *path = [[NSBundle mainBundle] pathForResource:@"blip2" ofType:@"aif"];
SystemSoundID soundID;
AudioServicesCreateSystemSoundID((__bridge CFURLRef)[NSURL fileURLWithPath:path], &soundID);
AudioServicesPlaySystemSound (soundID);
}
- (IBAction) record
{
[self prepareAudioQueue];
songDuration.text = @"";
timerDuration.text = @"";
//debugLabel.text = @"Please wait 12 beats (The first four are count in)";
//init beat number
beatNumber = 1;
//safe guard
if(aqData.mIsRunning)
{
AudioQueueStop (aqData.mQueue,true);
aqData.mIsRunning = false;
AudioQueueDispose (aqData.mQueue,true);
AudioFileClose (aqData.mAudioFile);
}
//start count in (metro will start recording)
//aqData.mCurrentPacket = 0;
//aqData.mIsRunning = true;
startTime = [NSDate timeIntervalSinceReferenceDate];
metroTimer = [NSTimer scheduledTimerWithTimeInterval:.6 target:self selector:@selector(metronomeFire) userInfo:nil repeats:YES];
//recordTimer = [NSTimer scheduledTimerWithTimeInterval:4.8 target:self selector:@selector(killTimer) userInfo:nil repeats:NO];
//AudioQueueStart (aqData.mQueue,NULL);
}
static void HandleInputBuffer (void *aqData,AudioQueueRef inAQ,AudioQueueBufferRef inBuffer,const AudioTimeStamp *inStartTime,UInt32 inNumPackets,const AudioStreamPacketDescription *inPacketDesc)
{
//boiler plate
NSLog(@"HandleInputBuffer");
struct AQRecorderState *pAqData = (struct AQRecorderState *) aqData;
if (inNumPackets == 0 && pAqData->mDataFormat.mBytesPerPacket != 0)
inNumPackets = inBuffer->mAudioDataByteSize / pAqData->mDataFormat.mBytesPerPacket;
if (AudioFileWritePackets (pAqData->mAudioFile,false,inBuffer->mAudioDataByteSize,inPacketDesc,pAqData->mCurrentPacket,&inNumPackets,inBuffer->mAudioData) == noErr)
{
pAqData->mCurrentPacket += inNumPackets;
}
if (pAqData->mIsRunning == 0)
return;
AudioQueueEnqueueBuffer (pAqData->mQueue,inBuffer,0,NULL);
}
void DeriveBufferSize(AudioQueueRef audioQueue,AudioStreamBasicDescription ASBDescription,Float64 seconds,UInt32 *outBufferSize)
{
//boiler plate
static const int maxBufferSize = 0x50000;
int maxPacketSize = ASBDescription.mBytesPerPacket;
if(maxPacketSize == 0)
{
UInt32 maxVBRPacketSize = sizeof(maxPacketSize);
AudioQueueGetProperty(audioQueue, kAudioQueueProperty_MaximumOutputPacketSize, &maxPacketSize, &maxVBRPacketSize);
NSLog(@"max buffer = %d",maxPacketSize);
}
Float64 numBytesForTime = ASBDescription.mSampleRate * maxPacketSize * seconds;
*outBufferSize = (UInt32)(numBytesForTime < maxBufferSize ? numBytesForTime : maxBufferSize);
}
OSStatus SetMagicCookieForFile (AudioQueueRef inQueue, AudioFileID inFile)
{
//boiler plate
OSStatus result = noErr;
UInt32 cookieSize;
if (AudioQueueGetPropertySize (inQueue,kAudioQueueProperty_MagicCookie,&cookieSize) == noErr)
{
char* magicCookie =(char *) malloc (cookieSize);
if (AudioQueueGetProperty (inQueue,kAudioQueueProperty_MagicCookie,magicCookie,&cookieSize) == noErr)
{
result = AudioFileSetProperty (inFile,kAudioFilePropertyMagicCookieData,cookieSize,magicCookie);
}
free (magicCookie);
}
return result;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
答案 0 :(得分:1)
这是一个很大的话题,所以我怀疑你会得到足够大的答案来重新构建你提供的代码。但是,我可以给你链接,提供你需要的绝大部分。
由于同步问题,第一件事是NSTimer
永远不会工作。另外,忘记AudioQueue
和AVAudioRecorder
。只有AudioUnit
的水平足以满足您的需求。
在这里查看我的答案:
iOS Stream Audio from one iOS Device to Another
但真正的金矿 - 以及你需要非常熟悉的知识 - 是Tasty Pixel的博客。 Tasty Pixel是Loopy HD的供应商,但也有人能够分享一些非常深入的知识。
请参阅:
A simple, fast circular buffer implementation for audio processing
Developing Loopy, Part 2: Implementation
和
最后,确保您熟悉数据包,帧,样本等。一切都需要完美同步。