使用FFmpeg

时间:2017-10-30 09:15:37

标签: c++ ffmpeg

我目前正在使用Autodesk 3DStudio Max 2014 SDK(工具集100)和Visual Studio 2015中的Ffmpeg库进行C ++工作,并尝试将DIB(设备无关位图)转换为uint8_t指针数组,然后将这些数据转换为AVFrame。

我没有任何错误,但我的视频仍然是黑色且没有元数据。 (没有时间显示等)

我使用Visual Studio控制台应用程序大致相同地从磁盘转换jpeg图像序列,这很好。 (唯一的区别是,我没有使用Ffmpeg库将jpeg转换为AVFrame,而是尝试将原始数据转换为AVFrame。)

所以我认为问题可能是DIB转换为uint8_t数据或uint8_t数据转换为AVFrame。 (第二个更合理,因为我使用SFML库显示一个带有我的rgb uint8_t *数据的窗口用于调试,它工作正常。)

我首先初始化ffmpeg库:

  

此功能在开始时调用一次。

int Converter::Initialize(AVCodecID codec_id, int width, int height, int fps, const char *filename)
{
    avcodec_register_all();
    av_register_all();

    AVCodec *codec;
    inputFrame = NULL;
    codecContext = NULL;
    pkt = NULL;
    file = NULL;
    outputFilename = new char[strlen(filename)]();
    *outputFilename = '\0';
    strcpy(outputFilename, filename);

    int ret;

    //Initializing AVCodecContext and getting PixelFormat supported by encoder
    codec = avcodec_find_encoder(codec_id);
    if (!codec)
        return 1;

    AVPixelFormat pixFormat = codec->pix_fmts[0];
    codecContext = avcodec_alloc_context3(codec);
    if (!codecContext)
        return 1;

    codecContext->bit_rate = 400000;
    codecContext->width = width;
    codecContext->height = height;
    codecContext->time_base.num = 1;
    codecContext->time_base.den = fps;
    codecContext->gop_size = 10;
    codecContext->max_b_frames = 1;
    codecContext->pix_fmt = pixFormat;

    if (codec_id == AV_CODEC_ID_H264)
        av_opt_set(codecContext->priv_data, "preset", "slow", 0);

    //Actually opening the encoder
    if (avcodec_open2(codecContext, codec, NULL) < 0)
        return 1;

    file = fopen(outputFilename, "wb");
    if (!file)
        return 1;

    inputFrame = av_frame_alloc();
    inputFrame->format = codecContext->pix_fmt;
    inputFrame->width = codecContext->width;
    inputFrame->height = codecContext->height;

    ret = av_image_alloc(inputFrame->data, inputFrame->linesize, codecContext->width, codecContext->height, codecContext->pix_fmt, 32);

    if (ret < 0)
        return 1;

    return 0;
}

然后对于每一帧,我得到DIB并使用此函数转换为uint8_t *:

uint8_t* Util::ToUint8_t(RGBQUAD *data, int width, int height)
{
    uint8_t* buf = (uint8_t*)data;

    int imageSize = width * height;
    size_t rgbquad_size = sizeof(RGBQUAD);
    size_t total_bytes = imageSize * rgbquad_size;
    uint8_t * pCopyBuffer = new uint8_t[total_bytes];

    for (int x = 0; x < width; x++)
    {
        for (int y = 0; y < height; y++)
        {
            int index = (x + width * y) * rgbquad_size;
            int invertIndex = (x + width* (height - y - 1)) * rgbquad_size;

            //BGRA to RGBA
            pCopyBuffer[index] = buf[invertIndex + 2];
            pCopyBuffer[index + 1] = buf[invertIndex + 1];
            pCopyBuffer[index + 2] = buf[invertIndex];
            pCopyBuffer[index + 3] = 0xFF;
        }
    }

    return pCopyBuffer;
}

void GetDIBBuffer(Interface* ip, BITMAPINFO *bmi, uint8_t** outBuffer)
{
    int size;

    ViewExp& view = ip->GetActiveViewExp();

    view.getGW()->getDIB(NULL, &size);

    bmi = (BITMAPINFO *)malloc(size);
    BITMAPINFOHEADER *bmih = (BITMAPINFOHEADER *)bmi;
    view.getGW()->getDIB(bmi, &size);

    uint8_t * pCopyBuffer = Util::ToUint8_t(bmi->bmiColors, bmih->biWidth, bmih->biHeight);

    *outBuffer = pCopyBuffer;
}

此函数用于获取DIB:

void GetViewportDIB(Interface* ip, BITMAPINFO *bmi, BITMAPINFOHEADER *bmih, BitmapInfo biFile, Bitmap *map)
{
    int size;

    if (!biFile.Name()[0])
        return;

    ViewExp& view = ip->GetActiveViewExp();

    view.getGW()->getDIB(NULL, &size);

    bmi = (BITMAPINFO *)malloc(size);
    bmih = (BITMAPINFOHEADER *)bmi;

    view.getGW()->getDIB(bmi, &size);

    biFile.SetWidth((WORD)bmih->biWidth);
    biFile.SetHeight((WORD)bmih->biHeight);
    biFile.SetType(BMM_TRUE_32);

    map = TheManager->Create(&biFile);
    map->OpenOutput(&biFile);
    map->FromDib(bmi);
    map->Write(&biFile);
    map->Close(&biFile);
}

转换为AVFrame和视频编码后:

  

EncodeFromMem函数每帧调用。

int Converter::EncodeFromMem(const char *outputDir, int frameNumber, uint8_t* data)
{
    int ret;

    inputFrame->pts = frameNumber;
    EncodeFrame(data, codecContext, inputFrame, &pkt, file);

    return 0;
}

static void RgbToYuv(uint8_t *rgb, AVCodecContext *c, AVFrame *frame)
{
    struct SwsContext *swsCtx = NULL;
    const int in_linesize[1] = { 3 * c->width };// RGB stride
    swsCtx = sws_getCachedContext(swsCtx, c->width, c->height, AV_PIX_FMT_RGB24, c->width, c->height, AV_PIX_FMT_YUV420P, 0, 0, 0, 0);
    sws_scale(swsCtx, (const uint8_t * const *)&rgb, in_linesize, 0, c->height, frame->data, frame->linesize);
}

static void EncodeFrame(uint8_t *rgb, AVCodecContext *c, AVFrame *frame, AVPacket **pkt, FILE *file)
{
    int ret, got_output;

    RgbToYuv(rgb, c, frame);

    *pkt = av_packet_alloc();
    av_init_packet(*pkt);
    (*pkt)->data = NULL;
    (*pkt)->size = 0;

    ret = avcodec_encode_video2(c, *pkt, frame, &got_output);
    if (ret < 0)
    {
        fprintf(stderr, "Error encoding frame/n");
        exit(1);
    }
    if (got_output)
    {
        fwrite((*pkt)->data, 1, (*pkt)->size, file);
        av_packet_unref(*pkt);
    }
}

要完成,我有一个写入数据包并释放内存的函数: 该函数在时间范围结束时调用一次。

int Converter::Finalize()
{
    int ret, got_output;
    uint8_t endcode[] = { 0, 0, 1, 0xb7 };

    /* get the delayed frames */
    do
    {
        fflush(stdout);
        ret = avcodec_encode_video2(codecContext, pkt, NULL, &got_output);
        if (ret < 0)
        {
            fprintf(stderr, "Error encoding frame/n");
            return 1;
        }
        if (got_output)
        {
            fwrite(pkt->data, 1, pkt->size, file);
            av_packet_unref(pkt);
        }
    } while (got_output);

    fwrite(endcode, 1, sizeof(endcode), file);
    fclose(file);

    avcodec_close(codecContext);
    av_free(codecContext);

    av_frame_unref(inputFrame);
    av_frame_free(&inputFrame);
    //av_freep(&inputFrame->data[0]); //Crash

    delete outputFilename;
    outputFilename = 0;

    return 0;
}

编辑

我修改 RgbToYuv 功能并创建另一个功能,将yuv帧转换回rgb帧。

这并不能解决问题,但可能会将问题集中在 YuvToRgb 的转换上。

这是从 YUV转换为RGB 的结果:

![YuvToRgb结果]:https://img42.com/kHqpt+

static void YuvToRgb(AVCodecContext *c, AVFrame *frame)
{
    struct SwsContext *img_convert_ctx = sws_getContext(c->width, c->height, AV_PIX_FMT_YUV420P, c->width, c->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
    AVFrame * rgbPictInfo = av_frame_alloc();
    avpicture_fill((AVPicture*)rgbPictInfo, *(frame)->data, AV_PIX_FMT_RGB24, c->width, c->height);
    sws_scale(img_convert_ctx, frame->data, frame->linesize, 0, c->height, rgbPictInfo->data, rgbPictInfo->linesize);

    Util::DebugWindow(c->width, c->height, rgbPictInfo->data[0]);
}
static void RgbToYuv(uint8_t *rgb, AVCodecContext *c, AVFrame *frame)
{
    AVFrame * rgbPictInfo = av_frame_alloc();
    avpicture_fill((AVPicture*)rgbPictInfo, rgb, AV_PIX_FMT_RGBA, c->width, c->height);

    struct SwsContext *swsCtx = sws_getContext(c->width, c->height, AV_PIX_FMT_RGBA, c->width, c->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
    avpicture_fill((AVPicture*)frame, rgb, AV_PIX_FMT_YUV420P, c->width, c->height);    
    sws_scale(swsCtx, rgbPictInfo->data, rgbPictInfo->linesize, 0, c->height, frame->data, frame->linesize);

    YuvToRgb(c, frame);
}

0 个答案:

没有答案