Ffmpeg C ++:寻求第0帧

时间:2015-02-19 15:57:51

标签: c++ video ffmpeg

我已成功使用:

avformat_seek_file(avFormatContext_, streamIndex_, 0, frame, frame, AVSEEK_FLAG_FRAME)

这与底部的示例代码一起让我可以在我的视频中寻找特定的iframe,并从那里读取帧,直到我到达我想要的帧。

问题是我使用的视频文件具有向前和向后插值,因此第一个关键帧不在第0帧,而是类似第8帧。

我正在寻找的是一种寻找我视频文件中第一个B帧之前存在的帧的方法。任何帮助将不胜感激。

seekFrame:

    bool QVideoDecoder::seekFrame(int64_t frame)
    {

       if(!ok)
          return false;

       //printf("**** seekFrame to %d. LLT: %d. LT: %d. LLF: %d. LF: %d. LastFrameOk: %d\n",(int)frame,LastLastFrameTime,LastFrameTime,LastLastFrameNumber,LastFrameNumber,(int)LastFrameOk);

       // Seek if:
       // - we don't know where we are (Ok=false)
       // - we know where we are but:
       //    - the desired frame is after the last decoded frame (this could be optimized: if the distance is small, calling decodeSeekFrame may be faster than seeking from the last key frame)
       //    - the desired frame is smaller or equal than the previous to the last decoded frame. Equal because if frame==LastLastFrameNumber we don't want the LastFrame, but the one before->we need to seek there
       if( (LastFrameOk==false) || ((LastFrameOk==true) && (frame<=LastLastFrameNumber || frame>LastFrameNumber) ) )
       {
          //printf("\t avformat_seek_file\n");
          if(ffmpeg::avformat_seek_file(pFormatCtx,videoStream,0,frame,frame,AVSEEK_FLAG_FRAME)<0)
             return false;

          avcodec_flush_buffers(pCodecCtx);

          DesiredFrameNumber = frame;
          LastFrameOk=false;
       }
       //printf("\t decodeSeekFrame\n");

       return decodeSeekFrame(frame);

       return true;
    }

decodeSeekFrame:

    bool QVideoDecoder::decodeSeekFrame(int after)
    {
       if(!ok)
          return false;

       //printf("decodeSeekFrame. after: %d. LLT: %d. LT: %d. LLF: %d. LF: %d. LastFrameOk: %d.\n",after,LastLastFrameTime,LastFrameTime,LastLastFrameNumber,LastFrameNumber,(int)LastFrameOk);



       // If the last decoded frame satisfies the time condition we return it
       //if( after!=-1 && ( LastDataInvalid==false && after>=LastLastFrameTime && after <= LastFrameTime))
       if( after!=-1 && ( LastFrameOk==true && after>=LastLastFrameNumber && after <= LastFrameNumber))
       {
          // This is the frame we want to return

          // Compute desired frame time
          ffmpeg::AVRational millisecondbase = {1, 1000};
          DesiredFrameTime = ffmpeg::av_rescale_q(after,pFormatCtx->streams[videoStream]->time_base,millisecondbase);

          //printf("Returning already available frame %d @ %d. DesiredFrameTime: %d\n",LastFrameNumber,LastFrameTime,DesiredFrameTime);

          return true;
       }   

       // The last decoded frame wasn't ok; either we need any new frame (after=-1), or a specific new frame with time>after

       bool done=false;
       while(!done)
       {
          // Read a frame
          if(av_read_frame(pFormatCtx, &packet)<0)
             return false;                             // Frame read failed (e.g. end of stream)

          //printf("Packet of stream %d, size %d\n",packet.stream_index,packet.size);

          if(packet.stream_index==videoStream)
          {
             // Is this a packet from the video stream -> decode video frame

             int frameFinished;
             avcodec_decode_video2(pCodecCtx,pFrame,&frameFinished,&packet);

             //printf("used %d out of %d bytes\n",len,packet.size);

             //printf("Frame type: ");
             //if(pFrame->pict_type == FF_B_TYPE)
             //   printf("B\n");
             //else if (pFrame->pict_type == FF_I_TYPE)
             //   printf("I\n");
             //else
             //   printf("P\n");


             /*printf("codecctx time base: num: %d den: %d\n",pCodecCtx->time_base.num,pCodecCtx->time_base.den);
             printf("formatctx time base: num: %d den: %d\n",pFormatCtx->streams[videoStream]->time_base.num,pFormatCtx->streams[videoStream]->time_base.den);
             printf("pts: %ld\n",pts);
             printf("dts: %ld\n",dts);*/




             // Did we get a video frame?
             if(frameFinished)
             {
                ffmpeg::AVRational millisecondbase = {1, 1000};
                int f = packet.dts;
                int t = ffmpeg::av_rescale_q(packet.dts,pFormatCtx->streams[videoStream]->time_base,millisecondbase);
                if(LastFrameOk==false)
                {
                   LastFrameOk=true;
                   LastLastFrameTime=LastFrameTime=t;
                   LastLastFrameNumber=LastFrameNumber=f;
                }
                else
                {
                   // If we decoded 2 frames in a row, the last times are okay
                   LastLastFrameTime = LastFrameTime;
                   LastLastFrameNumber = LastFrameNumber;
                   LastFrameTime=t;
                   LastFrameNumber=f;
                }
                //printf("Frame %d @ %d. LastLastT: %d. LastLastF: %d. LastFrameOk: %d\n",LastFrameNumber,LastFrameTime,LastLastFrameTime,LastLastFrameNumber,(int)LastFrameOk);

                // Is this frame the desired frame?
                if(after==-1 || LastFrameNumber>=after)
                {
                   // It's the desired frame

                   // Convert the image format (init the context the first time)
                   int w = pCodecCtx->width;
                   int h = pCodecCtx->height;
                   img_convert_ctx = ffmpeg::sws_getCachedContext(img_convert_ctx,w, h, pCodecCtx->pix_fmt, w, h, ffmpeg::PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);

                   if(img_convert_ctx == NULL)
                   {
                      printf("Cannot initialize the conversion context!\n");
                      return false;
                   }
                   ffmpeg::sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);

                   // Convert the frame to QImage
                   LastFrame=QImage(w,h,QImage::Format_RGB888);

                   for(int y=0;y<h;y++)
                      memcpy(LastFrame.scanLine(y),pFrameRGB->data[0]+y*pFrameRGB->linesize[0],w*3);

                   // Set the time
                   DesiredFrameTime = ffmpeg::av_rescale_q(after,pFormatCtx->streams[videoStream]->time_base,millisecondbase);
                   LastFrameOk=true;


                   done = true;

                } // frame of interest
             }  // frameFinished
          }  // stream_index==videoStream
          av_free_packet(&packet);      // Free the packet that was allocated by av_read_frame
       }
       //printf("Returning new frame %d @ %d. LastLastT: %d. LastLastF: %d. LastFrameOk: %d\n",LastFrameNumber,LastFrameTime,LastLastFrameTime,LastLastFrameNumber,(int)LastFrameOk);
       //printf("\n");
       return done;   // done indicates whether or not we found a frame
    }

0 个答案:

没有答案