iOS - 编码视频生涩

时间:2017-10-13 13:42:55

标签: ios opengl-es opengl-es-2.0 video-encoding avassetwriter

我正在尝试将OpenGL场景编码为mp4视频。

在左侧(红色),视频以尽可能快的方式编码,但结果看起来不稳定。如果我每次编码帧时都添加一个暂停(NSThread睡眠),它看起来更好(正确,蓝色),但需要更长的时间..

enter image description here enter image description here

我怀疑我在某处做错了,所以这里是编码器的代码(如果需要,我可以在github上提供完整的项目):

编码循环:

while(!finished) {

    if([self.encoder isReadyToEncodeNewFrame]) {

        if([self.encoder encodeFrame]) {
                self.frameCpt++;
        }
    }
} 

编码器:

#import "GLEncoder.h"
#include <OpenGLES/ES2/glext.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

@interface GLEncoder () {
    CVOpenGLESTextureCacheRef _coreVideoTextureCache;
    CVPixelBufferRef _renderTarget;
}

@property (nonatomic, assign) GLuint fboHook;
@property (nonatomic, assign) GLuint fboTexture;
@property (nonatomic, assign) int videoWidth;
@property (nonatomic, assign) int videoHeight;
@property (nonatomic, assign) int FPS;
@property (nonatomic, assign) BOOL isEncodingFrame;
@property (nonatomic, assign) BOOL hasFinishedEncoding;
@property (nonatomic, assign) int frameCpt;

@property (nonatomic, strong) NSString * videoFilePath;
@property (nonatomic, strong) EAGLContext * eaglContext;
@property (nonatomic, strong) NSURL * videoFileURL;
@property (nonatomic, strong) AVAssetWriter *assetWriter;
@property (nonatomic, strong) AVAssetWriterInput *assetWriterInput;
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *inputPixelBufferAdapter;

@property (nonatomic, strong) id<GLEncoderDelegate> delegate;

@end

@implementation GLEncoder 

- (id)initWithWidth:(int)videoWidth
          andHeight:(int)videoHeight
             andFPS:(int)FPS
     andEAGLContext:(EAGLContext *)context {

    self.videoWidth = videoWidth;
    self.videoHeight = videoHeight;
    self.FPS = FPS;
    self.eaglContext = context;
    self.frameCpt = 0;
    self.isEncodingFrame = NO;
    self.hasFinishedEncoding = NO;
    return self;
}

- (void)setDelegate:(id<GLEncoderDelegate>)newDelegate {
    self.delegate = newDelegate;
}

- (void)setupEncoding:(nonnull NSString *)oVideoFilePath fboHook:(GLuint)fboHook {

    self.fboHook = fboHook;
    self.videoFilePath = oVideoFilePath;
    self.videoFileURL = [NSURL fileURLWithPath:oVideoFilePath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:self.videoFilePath ])
        [[NSFileManager defaultManager] removeItemAtPath:self.videoFilePath  error:nil];

    NSError *error = nil;
    self.assetWriter = [[AVAssetWriter alloc] initWithURL:self.videoFileURL
                                                 fileType:AVFileTypeMPEG4
                                                    error:&error];

    NSDictionary *outputSettingsDictionary = @{AVVideoCodecKey:
            AVVideoCodecH264,
            AVVideoWidthKey:
            @(self.videoWidth),
            AVVideoHeightKey:
            @(self.videoHeight)};

    self.assetWriterInput = [AVAssetWriterInput
            assetWriterInputWithMediaType:AVMediaTypeVideo
                           outputSettings:outputSettingsDictionary];

    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
            @(kCVPixelFormatType_32BGRA),
            kCVPixelBufferPixelFormatTypeKey,
            @(self.videoWidth),
            kCVPixelBufferWidthKey,
            @(self.videoHeight),
            kCVPixelBufferHeightKey,
                    nil];

    self.inputPixelBufferAdapter = [AVAssetWriterInputPixelBufferAdaptor
            assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterInput
                                       sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];

    [self.assetWriter addInput:self.assetWriterInput];

    [self.assetWriter startWriting];
    [self.assetWriter startSessionAtSourceTime:CMTimeMake(0, self.FPS)];

    _coreVideoTextureCache = NULL;
    _renderTarget = NULL;

    CVOpenGLESTextureCacheCreate(kCFAllocatorDefault,
            NULL,
            self.eaglContext,
            NULL,
            &_coreVideoTextureCache);

    CVPixelBufferPoolCreatePixelBuffer(NULL,
            [self.inputPixelBufferAdapter pixelBufferPool],
            &_renderTarget);

    CVOpenGLESTextureRef renderTexture;
    CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
            _coreVideoTextureCache,
            _renderTarget,
            NULL,
            GL_TEXTURE_2D,
            GL_RGBA,
            self.videoWidth,
            self.videoHeight,
            GL_BGRA,
            GL_UNSIGNED_BYTE,
            0,
            &renderTexture);

    self.fboTexture = CVOpenGLESTextureGetName(renderTexture);
    glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), self.fboTexture);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

    glBindFramebuffer(GL_FRAMEBUFFER, self.fboHook);

    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, self.fboTexture, 0);
}

- (BOOL)isReadyToEncodeNewFrame {
    return (self.assetWriterInput.readyForMoreMediaData && !self.isEncodingFrame);
}

- (BOOL)encodeFrame {

    __block BOOL success = NO;

    if (!self.hasFinishedEncoding) {
        self.isEncodingFrame = YES;

        if (self.assetWriterInput.readyForMoreMediaData) {

            if (self.delegate)
                [self.delegate drawOpenGLScene];

            CVPixelBufferLockBaseAddress(_renderTarget, 0);

            CMTime frameTime = CMTimeMake(self.frameCpt, self.FPS);
            if ([_inputPixelBufferAdapter appendPixelBuffer:_renderTarget withPresentationTime:frameTime]) {
                self.frameCpt++;
                success = YES;
            }

            CVPixelBufferUnlockBaseAddress(_renderTarget, 0);

            // This is where I pause after each frame has been encoded
            [NSThread sleepForTimeInterval:0.05];
        }

        self.isEncodingFrame = NO;
    }

    return success;
}

- (void)finishEncoding:(BlockRunnable)completionHandler {

    self.hasFinishedEncoding = YES;
    [self.assetWriterInput markAsFinished];
    [self.assetWriter finishWritingWithCompletionHandler:^{
        self.assetWriter = nil;
        completionHandler();
    }];
}

@end

0 个答案:

没有答案