媒体基础帧以字节格式运行时

时间:2015-02-11 12:55:44

标签: ms-media-foundation

我使用媒体基础捕获活动网络摄像头视频,是否可以在运行时中以字节流格式捕获帧并将其作为文本文件中的位流写入每个时间周期后?

我不确定我是否可以使用字节格式的流(没有容器)我也不能在运行时这样做?

1 个答案:

答案 0 :(得分:3)

它并不完全清楚你在问什么。如果您想从网络摄像头捕获原始帧并将其保存到文件,那么答案是肯定的,可以完成。 Media Foundation SDK MFCaptureToFile示例完全相同,但由于它使用SinkWriter,因此在创建时必须指定容器文件类型,例如mp4。

如果您确实想要逐个获取原始帧,那么您需要省去SinkWriter(或编写自定义帧)。下面是一个代码片段,显示从IMFSourceReader获取样本并将它们转换为字节数组(以及其他一些内容)。您可以将字节数组写入文本文件,但除非您执行类似于放置位图标题的操作,否则它将非常有用。 IMFSourceReader,IMFMediaTypes都需要在能够调用ReadSample之前正确设置,但希望它能让您大致了解进一步查看的位置。

HRESULT MFVideoSampler::GetSample(/* out */ array<Byte> ^% buffer)
{
    if (_videoReader == NULL) {
        return -1;
    }
    else {
        IMFSample *videoSample = NULL;
        DWORD streamIndex, flags;
        LONGLONG llVideoTimeStamp;

        // Initial read results in a null pSample??
        CHECK_HR(_videoReader->ReadSample(
            //MF_SOURCE_READER_ANY_STREAM,    // Stream index.
            MF_SOURCE_READER_FIRST_VIDEO_STREAM,
            0,                              // Flags.
            &streamIndex,                   // Receives the actual stream index. 
            &flags,                         // Receives status flags.
            &llVideoTimeStamp,                   // Receives the time stamp.
            &videoSample                        // Receives the sample or NULL.
            ), L"Error reading video sample.");

        if (flags & MF_SOURCE_READERF_ENDOFSTREAM)
        {
            wprintf(L"\tEnd of stream\n");
        }
        if (flags & MF_SOURCE_READERF_NEWSTREAM)
        {
            wprintf(L"\tNew stream\n");
        }
        if (flags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED)
        {
            wprintf(L"\tNative type changed\n");
        }
        if (flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)
        {
            wprintf(L"\tCurrent type changed\n");

            IMFMediaType *videoType = NULL;
            CHECK_HR(_videoReader->GetCurrentMediaType(
                (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
                &videoType), L"Error retrieving current media type from first video stream.");

            Console::WriteLine(GetMediaTypeDescription(videoType));

            // Get the frame dimensions and stride
            UINT32 nWidth, nHeight;
            MFGetAttributeSize(videoType, MF_MT_FRAME_SIZE, &nWidth, &nHeight);
            _width = nWidth;
            _height = nHeight;

            //LONG lFrameStride;
            //videoType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&lFrameStride);

            videoType->Release();
        }
        if (flags & MF_SOURCE_READERF_STREAMTICK)
        {
            wprintf(L"\tStream tick\n");
        }

        if (!videoSample)
        {
            printf("Failed to get video sample from MF.\n");
        }
        else
        {
            DWORD nCurrBufferCount = 0;
            CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n");

            IMFMediaBuffer * pMediaBuffer;
            CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n");

            DWORD nCurrLen = 0;
            CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n");

            byte *imgBuff;
            DWORD buffCurrLen = 0;
            DWORD buffMaxLen = 0;
            pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen);

            if (Stride != -1 && Stride < 0) {
                // Bitmap needs to be flipped.
                int bmpSize = buffCurrLen; // ToDo: Don't assume RGB/BGR 24.
                int absStride = Stride * -1;
                byte *flipBuf = new byte[bmpSize];

                for (int row = 0; row < _height; row++) {
                    for (int col = 0; col < absStride; col += 3) {
                        flipBuf[row * absStride + col] = imgBuff[((_height - row - 1) * absStride) + col];
                        flipBuf[row * absStride + col + 1] = imgBuff[((_height - row - 1) * absStride) + col + 1];
                        flipBuf[row * absStride + col + 2] = imgBuff[((_height - row - 1) * absStride) + col + 2];
                    }
                }

                buffer = gcnew array<Byte>(buffCurrLen);
                Marshal::Copy((IntPtr)flipBuf, buffer, 0, buffCurrLen);

                delete flipBuf;
            }
            else {
                buffer = gcnew array<Byte>(buffCurrLen);
                Marshal::Copy((IntPtr)imgBuff, buffer, 0, buffCurrLen);
            }

            pMediaBuffer->Unlock();
            pMediaBuffer->Release();

            videoSample->Release();

            return S_OK;
        }
    }
}