录制或播放声音时如何连续观看声波

时间:2016-06-30 10:40:19

标签: ios objective-c iphone xcode swift

ViewConid类的ViewDidLoad我实例化了SoundWaveView

- (void)viewDidLoad
{
    [super viewDidLoad];

    _soundWaveView = [[SoundWaveView alloc] initWithFrame:CGRectMake(10, 50, 300, 200)];
    [self.view addSubview:soundWaveView];
}

在SoundWaveView中,我写了下面的代码

- (void) layoutSubviews {

    [super layoutSubviews];

    if (_waveImageView == nil)
    {
        _waveImageView = [[UIImageView alloc] initWithFrame:self.bounds];
        _progressImageView = [[UIImageView alloc] initWithFrame:self.bounds];

        _waveImageView.contentMode = UIViewContentModeLeft;
        _progressImageView.contentMode = UIViewContentModeLeft;
        _waveImageView.clipsToBounds = YES;
        _progressImageView.clipsToBounds = YES;

        [self addSubview:_waveImageView];
        [self addSubview:_progressImageView];
    }
}

播放声音 playSoundFromUrl 时调用:

- (void) playSoundFromUrl:(NSURL*)soundURL {

        _soundURL = soundURL;

        [self render];
    }








- (void) render {

        AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:_soundURL options:nil];
        UIImage *renderedImage = [self renderWaveImageFromAudioAsset:asset];

        _waveImageView.image = renderedImage;
        _progressImageView.image = [renderedImage tintedImageWithColor:_progressColor];

        _waveImageView.width = renderedImage.size.width;
        _waveImageView.left = (self.width - renderedImage.size.width) ;
        _progressImageView.left = _waveImageView.left;
        _progressImageView.width = 0;


    }






 - (UIImage*) renderWaveImageFromAudioAsset:(AVURLAsset *)songAsset {

    NSError* error = nil;

    AVAssetReader* reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];

    AVAssetTrack* songTrack = [songAsset.tracks objectAtIndex:0];
    //change
    NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
                                        [NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
                                        [NSNumber numberWithInt:1],AVNumberOfChannelsKey,
                                        [NSNumber numberWithInt:8],AVLinearPCMBitDepthKey,
                                        [NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
                                        [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
                                        [NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
                                        nil];

    AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];

    [reader addOutput:output];

    UInt32 sampleRate, channelCount;

    NSArray* formatDesc = songTrack.formatDescriptions;

    for (int i = 0; i < [formatDesc count]; ++i)
    {
        CMAudioFormatDescriptionRef item = (__bridge CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
        const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
        if (fmtDesc)
        {
            sampleRate = fmtDesc->mSampleRate;
            channelCount = fmtDesc->mChannelsPerFrame;
        }
    }

    UInt32 bytesPerSample = 2 *  channelCount;
    SInt16 maxValue = 0;

    NSMutableData *fullSongData = [[NSMutableData alloc] init];

    [reader startReading];

    UInt64 totalBytes = 0;
    SInt64 totalLeft = 0;
    SInt64 totalRight = 0;
    NSInteger sampleTally = 0;

    NSInteger samplesPerPixel = 100; // pretty enougth for most of ui and fast

    int buffersCount = 0;
    while (reader.status == AVAssetReaderStatusReading)
    {
        AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
        CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];

        if (sampleBufferRef)
        {
            CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);

            size_t length = CMBlockBufferGetDataLength(blockBufferRef);
            totalBytes += length;

            @autoreleasepool
            {
                NSMutableData *data = [NSMutableData dataWithLength:length];
                CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);

                SInt16 * samples = (SInt16*) data.mutableBytes;
                int sampleCount = length / bytesPerSample;

                for (int i = 0; i < sampleCount; i++)
                {
                    SInt16 left = *samples++;

                    totalLeft += left;

                    SInt16 right;

                    if (channelCount == 2)
                    {
                        right = *samples++;

                        totalRight += right;
                    }

                    sampleTally++;

                    if (sampleTally > samplesPerPixel)
                    {
                        left = (totalLeft / sampleTally);

                        if (channelCount == 2)
                        {
                            right = (totalRight / sampleTally);
                        }

                        SInt16 val = right ? ((right + left) / 2) : left;

                        [fullSongData appendBytes:&val length:sizeof(val)];

                        totalLeft = 0;
                        totalRight = 0;
                        sampleTally = 0;
                    }
                }
                CMSampleBufferInvalidate(sampleBufferRef);

                CFRelease(sampleBufferRef);
            }
        }

        buffersCount++;
    }

    NSMutableData *adjustedSongData = [[NSMutableData alloc] init];

    int sampleCount =  fullSongData.length / 2; // sizeof(SInt16)

    int adjustFactor = ceilf((float)sampleCount / (self.width / (_drawSpaces ? 2.0 : 1.0)));

    SInt16* samples = (SInt16*) fullSongData.mutableBytes;

    int i = 0;

    while (i < sampleCount)
    {
        SInt16 val = 0;

        for (int j = 0; j < adjustFactor; j++)
        {
            val += samples[i + j];
        }
        val /= adjustFactor;
        if (ABS(val) > maxValue)
        {
            maxValue = ABS(val);
        }
        [adjustedSongData appendBytes:&val length:sizeof(val)];
        i += adjustFactor;
    }

    sampleCount = adjustedSongData.length / 2;

    if (reader.status == AVAssetReaderStatusCompleted)
    {
        UIImage *image = [self drawImageFromSamples:(SInt16 *)adjustedSongData.bytes
                                           maxValue:maxValue
                                        sampleCount:sampleCount];
        return image;
    }
    return nil;
}

然后绘制图像

- (UIImage*) drawImageFromSamples:(SInt16*)samples
                         maxValue:(SInt16)maxValue
                      sampleCount:(NSInteger)sampleCount {



    CGSize imageSize = CGSizeMake(sampleCount * (_drawSpaces ? 6 : 6), self.height); UIGraphicsBeginImageContextWithOptions(imageSize, NO, 0);
    CGContextRef context = UIGraphicsGetCurrentContext();

    CGContextSetFillColorWithColor(context, self.backgroundColor.CGColor); CGContextSetAlpha(context, 1.0);

    CGRect rect; rect.size = imageSize; rect.origin.x = 0; rect.origin.y = 0;

    CGColorRef waveColor = self.waveColor.CGColor;

    CGContextFillRect(context, rect);

    CGContextSetLineWidth(context, 2.0);

    CGContextSetLineCap (context, kCGLineCapRound);
    float channelCenterY = imageSize.height / 2; float sampleAdjustmentFactor = imageSize.height / (float)maxValue;

    for (NSInteger i = 0; i < sampleCount; i++) { float val = *samples++; val = val * sampleAdjustmentFactor; if ((int)val == 0) val = 1.0; 

        CGContextMoveToPoint(context, i * (_drawSpaces ? 6 : 6), channelCenterY - val / 2.0);
        CGContextAddLineToPoint(context, i * (_drawSpaces ? 6 : 6), channelCenterY + val / 2.0);CGContextSetStrokeColorWithColor(context, waveColor); CGContextStrokePath(context); }

    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();

    UIGraphicsEndImageContext();

    return newImage;
}

我想在视图上不断绘制波形,但它只绘制到视图宽度

enter image description here

它不会在视图宽度之后绘制。

0 个答案:

没有答案