我正在开发一个类似于v4l2src的gstreamer插件,它可以从我们的相机捕获实时视频并使用gstreamer进行显示。
我的插件能够成功生成yuv文件。
现在我需要将相机图像缓冲区复制到gstreamer缓冲区并将其推送到下一个元素。为此,我使用了以下方法:
GstBuffer *outbuf1
outbuf1 = gst_buffer_new_allocate(NULL,*buflen,NULL);
ret_size = gst_buffer_fill(outbuf1,pImgBuf->pBuffer[0],pImgBuf->iFilledLen[0]);
ret = gst_pad_push(Video->gst_filter->srcpad,outbuf1);
我在这里收到错误代码GST_FLOW_FLUSHING
进一步调试这个我还从图像缓冲区中的数据生成了一个yuv文件,然后将图像缓冲区复制到gstreamer缓冲区:
fwrite( pImgBuf->pBuffer[0], pImgBuf->iFilledLen[0], 1, Video->File );
这个yuv文件是使用vooya生成和播放的。
现在当我尝试使用gstreamer缓冲区生成yuv文件时,yuv文件没有正确生成,因为它没有在vooya上播放并且提供错误消息"没有好的数据"
以下是我的代码流程,以获取更多详细信息:
static void gst_imx219src_init (Gstimx219src * filter)
{
printf("================================Inside gst_imx219src_init============== \n");
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_event_function (filter->sinkpad,
GST_DEBUG_FUNCPTR(gst_imx219src_sink_event));
-----------------code for camera init and recording -------------------
ret = start_processing_on_event_dequeue( hHandle, filter );
start_processing_on_event_dequeue : This function will start the recording process in camera.This function internally calls the below function:
void Video_DeliverInput( void *hVideo, CAM_ImageBuffer *pImgBuf , CAM_StreamID strmId)
{
VideoChain *Video = (VideoChain*)Video;
CAM_Error error = CAM_ERROR_NONE;
int retCode = *buflen;
GstBuffer *outbuf1,*outbuf2,*outbuf3;
GstFlowReturn ret;
buflen = pImgBuf->iFilledLen;
outbuf1 = gst_buffer_new_allocate(NULL,*buflen,NULL);
outbuf2 = gst_buffer_new_allocate(NULL,*buflen,NULL);
outbuf3 = gst_buffer_new_allocate(NULL,*buflen,NULL);
gst_buffer_memset(outbuf1, 0, '\0', *buflen);
gst_buffer_memset(outbuf2, 0, '\0', *buflen);
gst_buffer_memset(outbuf3, 0, '\0', *buflen);
if ( Video->iEncoderType == 0 ) // yuv
{
if ( Video->File != NULL )
{
fwrite( pImgBuf->pBuffer[0], pImgBuf->iFilledLen[0], 1, Video->File );
fwrite( pImgBuf->pBuffer[1], pImgBuf->iFilledLen[1], 1, Video->File );
fwrite( pImgBuf->pBuffer[2], pImgBuf->iFilledLen[2], 1, Video->File );
}
}
if(outbuf1)
{
ret_size = gst_buffer_fill (outbuf1, 0, pImgBuf->pBuffer[0],pImgBuf->iFilledLen[0]);
ret_size = gst_buffer_fill (outbuf2, 0, pImgBuf->pBuffer[1],pImgBuf->iFilledLen[1]);
ret_size = gst_buffer_fill (outbuf3, 0, pImgBuf->pBuffer[2],pImgBuf->iFilledLen[2]);
}
else
{
printf("===========failed to create buffer \n");
}
gst_pad_set_active (Video->gst_filter->srcpad,TRUE);
ret = gst_pad_is_active (Video->gst_filter->srcpad);
ret = gst_pad_push(Video->gst_filter->srcpad,outbuf1);
#if defined( PLATFORM_PROCESSOR_ULC )
if(strmId == CAM_STREAM1)
error = CAM_PortEnqueueBuffer( hVideo->hCameraEngine, CAM_PORT_PREVIEW, pImgBuf );
else
error = CAM_PortEnqueueBuffer( hVideo->hCameraEngine, CAM_PORT_STILL, pImgBuf );
ASSERT_CAM_ERROR( error );
#else
error = CAM_PortEnqueueBuffer( hVideo->hCameraEngine, CAM_PORT_VIDEO, pImgBuf );
ASSERT_CAM_ERROR( error );
#endif
//}
return;
}
filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
ret = start_processing_on_event_dequeue( hHandle, filter );
请有人帮我这个吗?
感谢您的帮助。