在android中使用ffmpeg获取帧

时间:2014-08-05 11:03:24

标签: android encoding ffmpeg

我想在android中使用ffmpeg从.mp4(通过手机摄像头录制)获取帧。 没有! comfile和ndk-build 我的问题是

  1. 从.mp4获取帧,我该怎么办? 编码?解码?

  2. 编码的mp4视频文件,无法在媒体播放器上播放文件mp4。

  3. 帮帮我PLZ ....

    在这里输入代码

    #include <stdio.h>
    #include <stdlib.h>
    #include <string.h>
    #include <android/log.h>
    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libswscale/swscale.h>
    #include <libavutil/opt.h>
    #include <libavutil/channel_layout.h>
    #include <libavutil/common.h>
    #include <libavutil/imgutils.h>
    #include <libavutil/mathematics.h>
    #include <libavutil/samplefmt.h>
    #include "player.h"
    
    void video_encode_example(const char *filename, int codec_id)
    {
        AVCodec *codec;
        AVCodecContext *c= NULL;
        int i, ret, x, y, got_output;
        FILE *f;
        AVFrame *frame;
        AVPacket pkt;
        uint8_t endcode[] = { 0, 0, 1, 0xb7 };
    
            __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "%s",filename);
    
        /* find the mpeg1 video encoder */
        codec = avcodec_find_encoder(CODEC_ID_H263);
        if (!codec) {
            __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Codec not found");
            exit(1);
        }
    
        c = avcodec_alloc_context3(codec);
        if (!c) {
            __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate video codec context");
            exit(1);
        }
    
        /* put sample parameters */
        c->bit_rate = 400000;
        /* resolution must be a multiple of two */
        c->width = 352;
        c->height = 288;
        /* frames per second */
        c->time_base= (AVRational){1,25};
        c->gop_size = 10; /* emit one intra frame every ten frames */
        c->max_bframes=1;
        c->pix_fmt = AV_PIX_FMT_YUV420P;
    
    
       // if(codec == AV_CODEC_ID_H264)
          // av_opt_set(c->priv_data, "preset", "slow", 0);
    
        /* open it */
        if (avcodec_open2(c, codec, NULL) < 0) {
            __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not open codec");
            exit(1);
        }
    
        f = fopen(filename, "wb");
        if (f == NULL) {
            __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not open");
            exit(1);
        }
    
        frame = avcodec_alloc_frame();
        if (!frame) {
            __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate video frame");
            exit(1);
        }
        frame->format = c->pix_fmt;
        frame->width  = c->width;
        frame->height = c->height;
    
        /* the image can be allocated by any means and av_image_alloc() is
         * just the most convenient way if av_malloc() is to be used */
        ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height,
                             c->pix_fmt, 32);
        if (ret < 0) {
            __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate raw picture buffer");
            exit(1);
        }
    
        /* encode 1 second of video */
        for(i=0;i<250;i++) {
            av_init_packet(&pkt);
            pkt.data = NULL;    // packet data will be allocated by the encoder
            pkt.size = 0;
    
            fflush(stdout);
            /* prepare a dummy image */
            /* Y */
            for(y=0;y<c->height;y++) {
                for(x=0;x<c->width;x++) {
                    frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;
                }
            }
    
            /* Cb and Cr */
            for(y=0;y<c->height/2;y++) {
                for(x=0;x<c->width/2;x++) {
                    frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;
                    frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;
                }
            }
    
            frame->pts = i;
    
            /* encode the image */
            ret = avcodec_encode_video2(c, &pkt, frame, &got_output);
            if (ret < 0) {
                __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Error encoding frame");
                exit(1);
            }
    
            if (got_output) {
                __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "encode the image Write frame pktsize  %d", pkt.size);
                fwrite(pkt.data, 1, pkt.size, f);
                av_free_packet(&pkt);
            }
        }
    
        /* get the delayed frames */
        for (got_output = 1; got_output; i++) {
            fflush(stdout);
    
            ret = avcodec_encode_video2(c, &pkt, NULL, &got_output);
            if (ret < 0) {
                __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Error encoding frame");
                fprintf(stderr, "Error encoding frame\n");
                exit(1);
            }
    
            if (got_output) {
                __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "get the delayed frames Write frame pktsize  %d", pkt.size);
                fwrite(pkt.data, 1, pkt.size, f);
                av_free_packet(&pkt);
            }
        }
    
        /* add sequence end code to have a real mpeg file */
        fwrite(endcode, 1, sizeof(endcode), f);
        fclose(f);
    
        avcodec_close(c);
        av_free(c);
        av_freep(&frame->data[0]);
        avcodec_free_frame(&frame);
    }
    enter code here
    

0 个答案:

没有答案