使用ffmpeg和H264编解码器创建的视频无法在android上播放:播放器称它不是divx文件

时间:2013-04-09 09:21:14

标签: android ffmpeg

我在C中使用ffmpeg和H264编解码器从图像创建了一个视频。 我可以在Android设备上使用第三方播放器(V播放器)运行视频。 但本机播放器不播放视频。 IT表示该文件不是Divx文件。 下面是我用来从图像创建视频的代码:

JNIEXPORT void Java_com_canvasm_mediclinic_VideoGenerator_generate(JNIEnv *pEnv, jobject pObj,jobjectArray stringArray,int famerate,int width,int height,jstring videoFilename)
{
    AVCodec *codec;
    AVCodecContext *c= NULL;
    //int framesnum=5;
    int i,looper, out_size, size, x, y,j;
    int ret,pts,got_pkt_ptr;

    int imagecount= (*pEnv)->GetArrayLength(pEnv, stringArray); 
    int retval=-10;
    uint8_t endcode[]={0,0,1,0xb7};
    AVPacket outpacket;
    FILE *f;
    AVFrame *picture,*encoded_avframe;  
    jbyte *raw_record;
    char logdatadata[100];
    int returnvalue = -1,numBytes =-1;
    const char *gVideoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, videoFilename, NULL); 
        /* find the mpeg1 video encoder */  
    codec = avcodec_find_encoder_by_name("libx264");
    if (!codec) {
        __android_log_write(ANDROID_LOG_INFO, "record","codec not found");
        exit(1);
    }
    c= avcodec_alloc_context();
    c->bit_rate = 500000;
    c->width = width;
    c->height = height;
    c->time_base= (AVRational){1,famerate};
    c->gop_size = 12; // emit one intra frame every ten frames
    c->max_b_frames=1;
    c->pix_fmt = PIX_FMT_YUV420P;
    c->codec_type = AVMEDIA_TYPE_VIDEO;
    c->codec_id = codec->id;
    c->max_b_frames = 0;
    c->me_range = 16;
    c->max_qdiff = 4;
    c->qmin = 10;
    c->qmax = 26;
    c->qcompress = 0.6;
    c->trellis=0;
    c->level = 30;
    c->refs = 5;
    c->coder_type = 0;
    c->scenechange_threshold = 0;
    //new
    c->flags|=CODEC_FLAG_LOOP_FILTER;//new  
    c->scenechange_threshold = 40; //new
    c-> rc_buffer_size = 0;
    c->gop_size=250; //new
    c->max_b_frames=1;//new
    c->me_method=7;
    c->me_cmp|= 1;
    c->me_subpel_quality = 6;
    c->qmax=51;
    c->keyint_min=25;
    av_opt_set(c->priv_data,"subq","6",0);
    av_opt_set(c->priv_data,"crf","20.0",0);
    av_opt_set(c->priv_data,"weighted_p_pred","0",0);
    av_opt_set(c->priv_data,"profile","baseline",AV_OPT_SEARCH_CHILDREN);
    av_opt_set(c->priv_data,"preset","medium",0);
    av_opt_set(c->priv_data,"tune","zerolatency",0);
    av_opt_set(c->priv_data,"x264opts","rc-lookahead=0",0);




    /* open it */
    retval = avcodec_open(c, codec);
    if ( retval < 0)
    {
    __android_log_write(ANDROID_LOG_INFO, "record","could not open codec");
        exit(1);
    }   
    f = fopen(gVideoFileName, "ab");

    if (!f) {
        __android_log_write(ANDROID_LOG_INFO, "record","could not open video file");
        exit(1);
    }
    pts = 0;
    for(i=0;i<=imagecount;i++) {


        jboolean isCp;
        int progress = 0;
        float temp;

        jstring string;
        if(i==imagecount)
            string = (jstring) (*pEnv)->GetObjectArrayElement(pEnv, stringArray,
                      imagecount-1);
        else
            string = (jstring) (*pEnv)->GetObjectArrayElement(pEnv, stringArray, i);

        const char *rawString = (*pEnv)->GetStringUTFChars(pEnv, string, &isCp);        
        picture = OpenImage(rawString,width,height,i);      
        av_init_packet(&outpacket);
        fflush(stdout);


        {               
            picture->pts=i ;//c->frame_number;          
            do{
                out_size = avcodec_encode_video2(c, &outpacket, 
                                 picture,&got_pkt_ptr);             
            }while(!got_pkt_ptr);   

        }

        returnvalue = fwrite(outpacket.data, 1, outpacket.size, f);
        av_free_packet(&outpacket); 

    }


    /* get the delayed frames */
    for(got_pkt_ptr =1; got_pkt_ptr; i++) {
        fflush(stdout);
        avcodec_encode_video2(c, &outpacket, NULL,&got_pkt_ptr);

        if(got_pkt_ptr)
        {
            fwrite(outpacket.data, 1, outpacket.size, f);
            av_free_packet(&outpacket);
        }   
    }

    fwrite(endcode,1,sizeof(endcode),f);
    fclose(f);  
    avcodec_close(c);
    av_free(c);     
}
AVFrame* OpenImage(const char* imageFileName,int w,int h,int index)
{
    AVFrame *pFrame;
    AVCodec *pCodec ;
    AVFormatContext *pFormatCtx = NULL;
    AVCodecContext *pCodecCtx;
    uint8_t *buffer;
    int frameFinished,framesNumber = 0,retval = -1,numBytes=0;
    AVPacket packet;
    char logdatadata[100];
    int result = -1;



    result=avformat_open_input(&pFormatCtx,imageFileName,NULL,NULL);
    if(result!=0)       
    {       
        __android_log_write(ANDROID_LOG_INFO, "record",
                "Can't open image file ");
        return NULL;
    }
    pCodecCtx = pFormatCtx->streams[0]->codec;
    pCodecCtx->width = w;
    pCodecCtx->height = h;
    pCodecCtx->pix_fmt = PIX_FMT_YUV420P;

    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if (!pCodec)
    {
        __android_log_write(ANDROID_LOG_INFO, "record",
                "Can't open image file ");
        return NULL;
    }

    pFrame = avcodec_alloc_frame();

    numBytes = avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
    buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t)); 

    retval = avpicture_fill((AVPicture *) pFrame, buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx-
            >height);

    // Open codec
    if(avcodec_open(pCodecCtx, pCodec)<0)
    {
        __android_log_write(ANDROID_LOG_INFO, "record","Could not open codec");
        return NULL;
    }

    if (!pFrame)
    {
        __android_log_write(ANDROID_LOG_INFO, "record","Can't allocate memory for AVFrame\n");
        return NULL;
    }
    int readval = -5;
    while (readval = av_read_frame(pFormatCtx, &packet) >= 0)
    {
        if(packet.stream_index != 0)
            continue;       
        int ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

        if (ret > 0)
        {
            __android_log_write(ANDROID_LOG_INFO, "record","Frame is decoded\n");
            pFrame->quality = 4;
            av_free_packet(&packet);
            av_close_input_file(pFormatCtx);            
            return pFrame;
        }
        else
        {
            __android_log_write(ANDROID_LOG_INFO, "record","error while decoding frame \n");
        }
    }   
}

abovve代码生成的视频可以使用像VLC这样的第三方播放器运行。 我只是想知道在Android上播放视频时我缺少什么 如何删除“Not a divx file”错误。

我已经检查了下面给出的一些论坛链接但没有用 link1 link2

提前感谢。

2 个答案:

答案 0 :(得分:0)

也许您转换的视频文件有.avi扩展名? ffmpeg使用扩展名来选择视频容器格式,而H264则需要MPEG4容器格式。

答案 1 :(得分:0)

虽然我应该发布解决方案。 我通过使用ff-mpeg UNIX可执行文件并转移到命令行解决了这个问题。步骤如下:

1)我能够创建一个8 MB大小的ff-mpeg并放在我的资产文件夹中。

2)当我的应用程序启动时,我将文件复制到SD卡上。

3)然后使用以下代码从图像生成视频

changeExeMode(SYSTEM_BIN_CHMOD +" 777 "+ APP_DATA_PATH+FFMPEG_EXE);// this is a function defined ahead

    String[] ffmpegCommand = new String[] {APP_DATA_PATH+FFMPEG_EXE,
                        "-f","image2",

                        "-r",""+frameVisibleTime,
                        "-i",Environment.getExternalStorageDirectory().toString() + Constants.logoImageDir + "/"+"%d.jpg",path};
                Process exeffmpeg = Runtime.getRuntime().exec(ffmpegCommand);


void changeExeMode(String exeName) throws IOException, InterruptedException{
        Process chm =Runtime.getRuntime().exec(exeName);
        chm.waitFor();
        int exitCode = chm.exitValue();
        if(exitCode == 0) {
        }
        else {          
            printFailureReason(chm);
        }
    }