在我的Unity游戏中,将每个帧渲染为一个纹理,然后使用FFmpeg将其合并为视频。现在我的问题是我是否这样做正确,因为每次发生异常时avcodec_send_frame都会引发。 我很确定自己做错了什么,或者顺序错了,或者只是缺少了什么。
以下是捕获纹理的代码:
void Update() {
//StartCoroutine(CaptureFrame());
if (rt == null)
{
rect = new Rect(0, 0, captureWidth, captureHeight);
rt = new RenderTexture(captureWidth, captureHeight, 24);
frame = new Texture2D(captureWidth, captureHeight, TextureFormat.RGB24, false);
}
Camera camera = this.GetComponent<Camera>(); // NOTE: added because there was no reference to camera in original script; must add this script to Camera
camera.targetTexture = rt;
camera.Render();
RenderTexture.active = rt;
frame.ReadPixels(rect, 0, 0);
frame.Apply();
camera.targetTexture = null;
RenderTexture.active = null;
byte[] fileData = null;
fileData = frame.GetRawTextureData();
encoding(fileData, fileData.Length);
}
这是用于编码和发送字节数据的代码:
private unsafe void encoding(byte[] bytes, int size)
{
Debug.Log("Encoding...");
AVCodec* codec;
codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
int ret, got_output = 0;
AVCodecContext* codecContext = null;
codecContext = ffmpeg.avcodec_alloc_context3(codec);
codecContext->bit_rate = 400000;
codecContext->width = captureWidth;
codecContext->height = captureHeight;
//codecContext->time_base.den = 25;
//codecContext->time_base.num = 1;
AVRational timeBase = new AVRational();
timeBase.num = 1;
timeBase.den = 25;
codecContext->time_base = timeBase;
//AVStream* videoAVStream = null;
//videoAVStream->time_base = timeBase;
AVRational frameRate = new AVRational();
frameRate.num = 25;
frameRate.den = 1;
codecContext->framerate = frameRate;
codecContext->gop_size = 10;
codecContext->max_b_frames = 1;
codecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
AVFrame* inputFrame;
inputFrame = ffmpeg.av_frame_alloc();
inputFrame->format = (int)codecContext->pix_fmt;
inputFrame->width = captureWidth;
inputFrame->height = captureHeight;
inputFrame->linesize[0] = inputFrame->width;
AVPixelFormat dst_pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P, src_pix_fmt = AVPixelFormat.AV_PIX_FMT_RGBA;
int src_w = 1920, src_h = 1080, dst_w = 1920, dst_h = 1080;
SwsContext* sws_ctx;
GCHandle pinned = GCHandle.Alloc(bytes, GCHandleType.Pinned);
IntPtr address = pinned.AddrOfPinnedObject();
sbyte** inputData = (sbyte**)address;
sws_ctx = ffmpeg.sws_getContext(src_w, src_h, src_pix_fmt,
dst_w, dst_h, dst_pix_fmt,
0, null, null, null);
fixed (int* lineSize = new int[1])
{
lineSize[0] = 4 * captureHeight;
// Convert RGBA to YUV420P
ffmpeg.sws_scale(sws_ctx, inputData, lineSize, 0, codecContext->width, inputFrame->extended_data, inputFrame->linesize);
}
inputFrame->pts = counter++;
if (ffmpeg.avcodec_send_frame(codecContext, inputFrame) < 0)
throw new ApplicationException("Error sending a frame for encoding!");
AVPacket pkt;
pkt = new AVPacket();
//pkt.data = inData;
AVPacket* packet = &pkt;
ffmpeg.av_init_packet(packet);
Debug.Log("pkt.size " + pkt.size);
pinned.Free();
AVDictionary* options = null;
ffmpeg.av_dict_set(&options, "pkt_size", "1300", 0);
ffmpeg.av_dict_set(&options, "buffer_size", "65535", 0);
AVIOContext* server = null;
ffmpeg.avio_open2(&server, "udp://192.168.0.1:1111", ffmpeg.AVIO_FLAG_WRITE, null, &options);
Debug.Log("encoded");
ret = ffmpeg.avcodec_encode_video2(codecContext, &pkt, inputFrame, &got_output);
ffmpeg.avio_write(server, pkt.data, pkt.size);
ffmpeg.av_free_packet(&pkt);
pkt.data = null;
pkt.size = 0;
}
每次我开始游戏
if (ffmpeg.avcodec_send_frame(codecContext, inputFrame) < 0)
throw new ApplicationException("Error sending a frame for encoding!");
引发异常。 任何解决此问题的帮助将不胜感激:)