使用ffmpeg生成视频

时间:2010-11-22 17:31:25

标签: video ffmpeg using

我正在尝试使用ffmpeg库从图像集生成视频。使用下面的代码,我能够在模拟器上生成视频,但是当我在设备上运行它时,它会在视频中产生奇怪的绿色效果。我跟着Encoding images to video with ffmpeg网址。任何人都可以帮助我,或者如果可以提供代码片段,我们将非常感激。

//这是代码..

(void)createVideoFromImages 
{
  NSString *fileName2 = [Utilities documentsPath:[NSString stringWithFormat:@"test.mov"]];
  NSLog(@"filename: %@",fileName2);

  //Register all formats and codecs

  AVCodec *codec;

  //avcodec_register_all();
  //avdevice_register_all();

  av_register_all();


  AVCodecContext *c= NULL; 
  int i, out_size, size, outbuf_size;
  FILE *f;
  AVFrame *picture;
  uint8_t *outbuf;

  printf("Video encoding\n");

  /* find the mpeg video encoder */
  codec = avcodec_find_encoder(CODEC_ID_MPEG2VIDEO);
  if (!codec) 
  {
    fprintf(stderr, "codec not found\n");
    exit(1);
  }

  c= avcodec_alloc_context();
  picture= avcodec_alloc_frame();

  /* put sample parameters */
  c->bit_rate = 400000;
  /* resolution must be a multiple of two */
  c->width = 256;
  c->height = 256;//258;

  /* frames per second */
  c->time_base= (AVRational){1,25};
  c->gop_size = 10; /* emit one intra frame every ten frames */
  c->max_b_frames=1;
  c->pix_fmt =  PIX_FMT_YUV420P;//PIX_FMT_YUV420P;

  /* open it */
  if (avcodec_open(c, codec) < 0) {
    fprintf(stderr, "could not open codec\n");
    exit(1);
  }

  const char* filename_cstr = [fileName2 cStringUsingEncoding:NSUTF8StringEncoding];
  f = fopen(filename_cstr, "wb");
  if (!f) {
    fprintf(stderr, "could not open %s\n", fileName2);
    exit(1);
  }

  /* alloc image and output buffer */
  outbuf_size = 100000;
  outbuf = malloc(outbuf_size);
  size = c->width * c->height;

  #pragma mark -
  AVFrame* outpic = avcodec_alloc_frame();
  int nbytes = avpicture_get_size(PIX_FMT_YUV420P, c->width, c->height);

  NSLog(@"bytes: %d",nbytes);

  //create buffer for the output image
  uint8_t* outbuffer = (uint8_t*)av_malloc(nbytes);

  for(i=100;i<104;i++) 
  {
    fflush(stdout);

    int numBytes = avpicture_get_size(PIX_FMT_YUV420P, c->width, c->height);
    NSLog(@"numBytes: %d",numBytes);
    uint8_t *buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));

    UIImage *image;

    image = [UIImage imageWithContentsOfFile:[Utilities bundlePath:[NSString stringWithFormat:@"%d.png",i]]];


    /*
    if(i>=98)//for video images
    {
       NSLog(@"i: %d",i);
       image = [UIImage imageWithContentsOfFile:[Utilities documentsPath:[NSString stringWithFormat:@"image0098.png"]]]; ///@"image0098.png"]];
       //[NSString stringWithFormat:@"%d.png", i]];
    }
    else //for custom image
    {
       image = [UIImage imageWithContentsOfFile:[Utilities bundlePath:[NSString stringWithFormat:@"image%04d.png", i]]];
       //[UIImage imageNamed:[NSString stringWithFormat:@"%d.png", i]];//@"image%04d.png",i]];
    }*/

    CGImageRef newCgImage = [image CGImage];

    NSLog(@"No. of Bits per component: %d",CGImageGetBitsPerComponent([image CGImage]));
    NSLog(@"No. of Bits per pixel: %d",CGImageGetBitsPerPixel([image CGImage]));
    NSLog(@"No. of Bytes per row: %d",CGImageGetBytesPerRow([image CGImage]));


    CGDataProviderRef dataProvider = CGImageGetDataProvider(newCgImage);
    CFDataRef bitmapData = CGDataProviderCopyData(dataProvider);
    buffer = (uint8_t *)CFDataGetBytePtr(bitmapData);   

    struct SwsContext* fooContext;

    avpicture_fill((AVPicture*)picture, buffer, PIX_FMT_RGBA, c->width, c->height);

    avpicture_fill((AVPicture*)outpic, outbuffer, PIX_FMT_YUV420P, c->width, c->height);

    fooContext= sws_getContext(c->width, c->height, 
       PIX_FMT_RGBA, 
        c->width, c->height, 
        PIX_FMT_YUV420P, 
        SWS_FAST_BILINEAR , NULL, NULL, NULL);

//}

  //perform the conversion

  NSLog(@"linesize: %d", picture->linesize[0]);

  sws_scale(fooContext, picture->data, picture->linesize, 0, c->height, outpic->data, outpic->linesize);

  // Here is where I try to convert to YUV

  /* encode the image */
  out_size = avcodec_encode_video(c, outbuf, outbuf_size, outpic);
  printf("encoding frame %d (size=%d)\n", i, out_size);
  fwrite(outbuf, 1, out_size, f);

  NSLog(@"%d",sizeof(f));

  free(buffer);
  buffer = NULL;      

  }

  /* get the delayed frames */
  for( ; out_size; i++) 
  {
    fflush(stdout);
    out_size = avcodec_encode_video(c, outbuf, outbuf_size, NULL);
    printf("write frame %3d (size=%5d)\n", i, out_size);
    fwrite(outbuf, 1, outbuf_size, f);      
  }

  /* add sequence end code to have a real mpeg file */
  outbuf[0] = 0x00;
  outbuf[1] = 0x00;
  outbuf[2] = 0x01;
  outbuf[3] = 0xb7;
  fwrite(outbuf, 1, 4, f);

  fclose(f);
  free(outbuf);

  avcodec_close(c);
  av_free(c);
  av_free(picture);
  printf("\n");

}

0 个答案:

没有答案