mSampleRate在iPad mini和iPad Air之间有所不同

时间:2014-05-02 12:45:55

标签: ios7 core-audio

我正在尝试使用代码底部捕获麦克风音频。

NSLog(@"sampleBuffer = %@",sampleBuffer);

将返回

sampleBuffer = CMSampleBuffer 0x155d111a0 retainCount: 1 allocator: 0x191e590c0
invalid = NO
dataReady = YES
makeDataReadyCallback = 0x0
makeDataReadyRefcon = 0x0
formatDescription = <CMAudioFormatDescription 0x170119e90 [0x191e590c0]> {
mediaType:'soun' 
mediaSubType:'lpcm' 
mediaSpecific: {
    ASBD: {
        mSampleRate: 44100.000000 
        mFormatID: 'lpcm' 
        mFormatFlags: 0xc 
        mBytesPerPacket: 2 
        mFramesPerPacket: 1 
        mBytesPerFrame: 2 
        mChannelsPerFrame: 1 
        mBitsPerChannel: 16     } 
    cookie: {(null)} 
    ACL: {(null)} 
} 
extensions: {(null)}
}
sbufToTrackReadiness = 0x0
numSamples = 1024
sampleTimingArray[1] = {
    {PTS = {810106869118125/1000000000 = 810106.869}, DTS = {INVALID}, duration = {1/44100 = 0.000}},
}
sampleSizeArray[1] = {
    sampleSize = 2,
}
dataBuffer = 0x178118f60
iPad Air上的“mSampleRate: 44100.000000 ”,即使iPad mini(非视网膜)上的“mSampleRate: 11025.000000 ”。两者都是iOS7。

我不知道为什么。即使在iPad Air上,我想要的是“mSampleRate: 11025.000000 ”。

我该怎么做?

在此之后,我想把流编码为aac。它正在使用11025 iPad mini和44100 iPad mini和Air。

CODE

@interface CustomCell : UITableViewCell <AVCaptureAudioDataOutputSampleBufferDelegate>

@property AVCaptureAudioDataOutput *audioDataOutput;
@property AVCaptureSession *captureSession;
@property AVAudioSession *audioSession;

@end

-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{   
    [self.audioSession setActive:NO error:nil];
    self.audioSession = [AVAudioSession sharedInstance];
    [self.audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
    double hwSampleRate = 11025.0;
    [self.audioSession setPreferredSampleRate:hwSampleRate error:nil];
    [self.audioSession setPreferredInputNumberOfChannels:1 error:nil];
    [self.audioSession setPreferredOutputNumberOfChannels:1 error:nil];
    [self.audioSession setActive:YES error:nil];

    AVCaptureDevice *captureDevice;
    captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];

    NSError *error = nil;
    AVCaptureDeviceInput *audioInput;
    audioInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
    if (!audioInput) {
        NSLog(@"error:%@", error);
        return;
    }

    self.captureSession = nil;
    self.captureSession = [[AVCaptureSession alloc] init];
    self.captureSession.usesApplicationAudioSession = YES;
    [self.captureSession addInput:audioInput];

    self.audioDataOutput = nil;
    self.audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
    [self.captureSession addOutput:self.audioDataOutput];
    dispatch_queue_t queue = dispatch_queue_create("AudioDataOutputQueue", DISPATCH_QUEUE_SERIAL);
    [self.audioDataOutput setSampleBufferDelegate:self queue:queue];
    [self.captureSession startRunning];
}

-(void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
    [self setSelected:NO];
    [self.captureSession stopRunning];

}

- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection
{
    NSLog(@"sampleBuffer = %@",sampleBuffer);
}

0 个答案:

没有答案