DirectX屏幕捕获和输出视频

时间:2015-11-17 09:57:23

标签: c++ image opencv video directx

我正在进行桌面屏幕捕获,并希望输出为视频文件。目前我有代码从here到输出png图像。我稍微修改了代码以将输出更改为JPEG文件,然后使用openCV 3.0.0将其转换为avi视频输出。原因我需要JPEG文件作为输出是因为我在Windows 8.1和OpenCV VideoWriter :: fourcc上运行(' M',' J',' P', ' G')是唯一适用于我的选项。

PNG图像的输出完美但不是JPEG。图像上有垂直线,生成JPEG输出的时间比PNG长得多。

我有2个选项:

  1. 改善JPEG文件的输出,使其工作更快,图像清晰。

  2. 摆脱OpenCV 3.0.0问题以接受PNG文件输入并能够输出视频文件。(最好是AVI / MP4文件格式)

  3. 任何一种解决方案都能为我做到。请帮忙。谢谢。

    我的代码:

    #include "stdafx.h"
    #include <Wincodec.h>             // we use WIC for saving images
    #include <d3d9.h>                 // DirectX 9 header
    #include <opencv\cv.h>
    #include <opencv\cxcore.hpp>
    #include <opencv2\highgui\highgui.hpp>
    #pragma comment(lib, "d3d9.lib")  // link to DirectX 9 library
    
    using namespace cv;
    
    #define WIDEN2(x) L ## x
    #define WIDEN(x) WIDEN2(x)
    #define __WFILE__ WIDEN(__FILE__)
    #define HRCHECK(__expr) {hr=(__expr);if(FAILED(hr)){wprintf(L"FAILURE 0x%08X (%i)\n\tline: %u file: '%s'\n\texpr: '" WIDEN(#__expr) L"'\n",hr, hr, __LINE__,__WFILE__);goto cleanup;}}
    #define RELEASE(__p) {if(__p!=nullptr){__p->Release();__p=nullptr;}}
    
    HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count);
    HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride, LPBYTE pixels, LPWSTR filePath, const GUID &format);
    
    
    int _tmain(int argc, _TCHAR* argv[])
    {
        HRESULT hr = Direct3D9TakeScreenshots(D3DADAPTER_DEFAULT, 10);
        return 0;
    }
    
    
    HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count)
    {
        HRESULT hr = S_OK;
        IDirect3D9 *d3d = nullptr;
        IDirect3DDevice9 *device = nullptr;
        IDirect3DSurface9 *surface = nullptr;
        D3DPRESENT_PARAMETERS parameters = { 0 };
        D3DDISPLAYMODE mode;
        D3DLOCKED_RECT rc;
        UINT pitch;
        SYSTEMTIME st;
        LPBYTE *shots = nullptr;
    
        // init D3D and get screen size
        d3d = Direct3DCreate9(D3D_SDK_VERSION);
        HRCHECK(d3d->GetAdapterDisplayMode(adapter, &mode));
    
        parameters.Windowed = TRUE;
        parameters.BackBufferCount = 1;
        parameters.BackBufferHeight = mode.Height;
        parameters.BackBufferWidth = mode.Width;
        parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
        parameters.hDeviceWindow = NULL;
    
        // create device & capture surface
        HRCHECK(d3d->CreateDevice(adapter, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &parameters, &device));
        HRCHECK(device->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, &surface, nullptr));
    
        // compute the required buffer size
        HRCHECK(surface->LockRect(&rc, NULL, 0));
        pitch = rc.Pitch;
        HRCHECK(surface->UnlockRect());
    
        // allocate screenshots buffers
        shots = new LPBYTE[count];
        for (UINT i = 0; i < count; i++)
        {
            shots[i] = new BYTE[pitch * mode.Height];
        }
    
        GetSystemTime(&st); // measure the time we spend doing <count> captures
        wprintf(L"START Capture--> %i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
        for (UINT i = 0; i < count; i++)
        {
            // get the data
            HRCHECK(device->GetFrontBufferData(0, surface));
    
            // copy it into our buffers
            HRCHECK(surface->LockRect(&rc, NULL, 0));
            CopyMemory(shots[i], rc.pBits, rc.Pitch * mode.Height);
            HRCHECK(surface->UnlockRect());
        }
        GetSystemTime(&st);
        wprintf(L"END Capture--> %i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
    
        // save all screenshots
        for (UINT i = 0; i < count; i++)
        {
            WCHAR file[100];
            wsprintf(file, L"cap%i.jpg", i);
            HRCHECK(SavePixelsToFile32bppPBGRA(mode.Width, mode.Height, pitch, shots[i], file, GUID_ContainerFormatJpeg));
        }
    
        cleanup:
        if (shots != nullptr)
        {
            for (UINT i = 0; i < count; i++)
            {
            delete shots[i];
            }
            delete[] shots;
        }
    
        RELEASE(surface);
        RELEASE(device);
        RELEASE(d3d);
        return hr;
    }
    
    HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride, LPBYTE pixels, LPWSTR filePath, const GUID &format)
    {
        if (!filePath || !pixels)
            return E_INVALIDARG;
    
        HRESULT hr = S_OK;
        IWICImagingFactory *factory = nullptr;
        IWICBitmapEncoder *encoder = nullptr;
        IWICBitmapFrameEncode *frame = nullptr;
        IWICStream *stream = nullptr;
        GUID pf = GUID_WICPixelFormat32bppPBGRA;
        BOOL coInit = CoInitialize(nullptr);
    
        HRCHECK(CoCreateInstance(CLSID_WICImagingFactory, nullptr, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&factory)));
        HRCHECK(factory->CreateStream(&stream));
        HRCHECK(stream->InitializeFromFilename(filePath, GENERIC_WRITE));
        HRCHECK(factory->CreateEncoder(format, nullptr, &encoder));
        HRCHECK(encoder->Initialize(stream, WICBitmapEncoderNoCache));
        HRCHECK(encoder->CreateNewFrame(&frame, nullptr)); // we don't use options here
        HRCHECK(frame->Initialize(nullptr)); // we dont' use any options here
        HRCHECK(frame->SetSize(width, height));
        HRCHECK(frame->SetPixelFormat(&pf));
        HRCHECK(frame->WritePixels(height, stride, stride * height, pixels));
        HRCHECK(frame->Commit());
        HRCHECK(encoder->Commit());
    
        cleanup:
        RELEASE(stream);
        RELEASE(frame);
        RELEASE(encoder);
        RELEASE(factory);
        if (coInit) CoUninitialize();
    
        //This part do encode JPEG file to video file 
        VideoCapture in_capture("cap%d.jpg");
    
        Mat img;
    
        VideoWriter out_capture("video.avi", CV_FOURCC('M','J','P','G'), 1, Size(1920,1080));
    
        while (true)
        {
            in_capture >> img;
            if(img.empty())
                break;
    
            out_capture.write(img);
        }
    
        return hr;
    }
    

1 个答案:

答案 0 :(得分:3)

您的图片不正确,因为JPEG编码器会编码3种格式JPEG Native Codec

  • GUID_WICPixelFormat8bppGray
  • GUID_WICPixelFormat24bppBGR
  • GUID_WICPixelFormat32bppCMYK

你试过D3DXSaveSurfaceToFile,是不是太慢了?

修改

对于你的第二个选择,我已经制作了一个程序,用Media Foundation将屏幕截图编码为h264:

  • 这项工作在Windows 7上,分辨率为1280 * 1024,性能合理,但我不知道它是否满足您的需求。
  • 此程序不是实时捕获,也许您必须更正传递给接收器编写器的时间戳。考虑使用Media Foundation时钟。
  • 您可以尝试更改编码格式:MF_TRANSCODE_CONTAINERTYPE
  • 请注意编码器的分辨率有限。
  • 更改MF_MT_AVG_BITRATE可以提高质量而不是性能。根据您的要求设置值。
  • 在我的系统上,捕获图像是反向的,所以我需要以严格的方式复制图像(请参阅#define REVERSE_IMAGE)。也许它比MFCopyImage慢,但在测试两种实现后我都不确定。
  • 我们不需要WIC,因为Media Foundation编码器处理d3d9捕获的格式。
  • 在Windows 7上,请考虑使用IDirect3DDevice9Ex而不是idirect3ddevice9。
  • 我已将您的d3d9捕获代码与本教程混合:Using the Sink Writer to Encode Video。该计划的设计也可以改进。

以下是代码:

#include <Windows.h>
#include <mfapi.h>
#include <mfidl.h>
#include <Mfreadwrite.h>
#include <mferror.h>
#include <d3d9.h>

#pragma comment(lib, "mfreadwrite")
#pragma comment(lib, "mfplat")
#pragma comment(lib, "mfuuid")
#pragma comment(lib, "d3d9.lib")

template <class T> void SafeRelease(T **ppT){

    if(*ppT){
        (*ppT)->Release();
        *ppT = NULL;
    }
}

#define REVERSE_IMAGE

// Format constants
const UINT32 VIDEO_FPS = 30;
const UINT64 VIDEO_FRAME_DURATION = 10 * 1000 * 1000 / VIDEO_FPS;
const UINT32 VIDEO_BIT_RATE = 2000000;
const GUID   VIDEO_ENCODING_FORMAT = MFVideoFormat_H264;
const GUID   VIDEO_INPUT_FORMAT = MFVideoFormat_RGB32;
const UINT32 VIDEO_FRAME_COUNT = 5 * VIDEO_FPS;

HRESULT InitializeDirect3D9(IDirect3DDevice9** ppDevice, IDirect3DSurface9** ppSurface, UINT32& uiWidth, UINT32& uiHeight){

    IDirect3D9* d3d = NULL;

    d3d = Direct3DCreate9(D3D_SDK_VERSION);

    if(d3d == NULL)
        return E_POINTER;

    D3DDISPLAYMODE mode;
    HRESULT hr = d3d->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &mode);

    if(FAILED(hr)){
        SafeRelease(&d3d);
        return hr;
    }

    D3DPRESENT_PARAMETERS parameters = {0};

    parameters.Windowed = TRUE;
    parameters.BackBufferCount = 1;
    uiHeight = parameters.BackBufferHeight = mode.Height;
    uiWidth = parameters.BackBufferWidth = mode.Width;
    parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
    parameters.hDeviceWindow = NULL;

    hr = d3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &parameters, ppDevice);

    if(FAILED(hr)){
        SafeRelease(&d3d);
        return hr;
    }

    hr = (*ppDevice)->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, ppSurface, nullptr);

    SafeRelease(&d3d);

    return hr;
}

HRESULT InitializeSinkWriter(IMFSinkWriter **ppWriter, DWORD *pStreamIndex, const UINT32 uiWidth, const UINT32 uiHeight){

    *ppWriter = NULL;
    *pStreamIndex = NULL;

    IMFSinkWriter   *pSinkWriter = NULL;
    IMFMediaType    *pMediaTypeOut = NULL;
    IMFMediaType    *pMediaTypeIn = NULL;
    DWORD           streamIndex;

    HRESULT hr = MFCreateSinkWriterFromURL(L"output.mp4", NULL, NULL, &pSinkWriter);

    // Set the output media type.
    if(SUCCEEDED(hr)){
        hr = MFCreateMediaType(&pMediaTypeOut);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, VIDEO_ENCODING_FORMAT);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, VIDEO_BIT_RATE);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
    }
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->AddStream(pMediaTypeOut, &streamIndex);
    }

    // Set the input media type.
    if(SUCCEEDED(hr)){
        hr = MFCreateMediaType(&pMediaTypeIn);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, VIDEO_INPUT_FORMAT);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, VIDEO_FPS, 1);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
    }
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL);
    }

    // Tell the sink writer to start accepting data.
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->BeginWriting();
    }

    // Return the pointer to the caller.
    if(SUCCEEDED(hr)){

        *ppWriter = pSinkWriter;
        (*ppWriter)->AddRef();
        *pStreamIndex = streamIndex;
    }

    SafeRelease(&pSinkWriter);
    SafeRelease(&pMediaTypeOut);
    SafeRelease(&pMediaTypeIn);
    return hr;
}

HRESULT WriteFrame(IDirect3DDevice9* pDevice, IDirect3DSurface9* pSurface, IMFSinkWriter* pWriter, DWORD streamIndex, const LONGLONG& rtStart, const UINT32 uiWidth, const UINT32 uiHeight){

    HRESULT hr = pDevice->GetFrontBufferData(0, pSurface);

    if(FAILED(hr)){
        return hr;
    }

    D3DLOCKED_RECT rc;
    hr = pSurface->LockRect(&rc, NULL, 0);

    if(FAILED(hr)){
        return hr;
    }

    IMFSample *pSample = NULL;
    IMFMediaBuffer *pBuffer = NULL;

    const LONG cbWidth = 4 * uiWidth;
    const DWORD cbBuffer = cbWidth * uiHeight;

    BYTE *pData = NULL;

    // Create a new memory buffer.
    hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer);

    // Lock the buffer and copy the video frame to the buffer.
    if(SUCCEEDED(hr)){
        hr = pBuffer->Lock(&pData, NULL, NULL);
    }

    if(SUCCEEDED(hr)){

#ifdef REVERSE_IMAGE
        for(int i = 0, j = uiHeight - 1; i < uiHeight; i++, j--)
            for(int k = 0; k < cbWidth; k++)
                    pData[(i * cbWidth) + k] = ((BYTE*)rc.pBits)[(j * cbWidth) + k];
#else
        hr = MFCopyImage(pData, cbWidth, (BYTE*)rc.pBits, rc.Pitch, cbWidth, uiHeight);
#endif
    }

    if(pBuffer){
        pBuffer->Unlock();
    }

    // Set the data length of the buffer.
    if(SUCCEEDED(hr)){
        hr = pBuffer->SetCurrentLength(cbBuffer);
    }

    // Create a media sample and add the buffer to the sample.
    if(SUCCEEDED(hr)){
        hr = MFCreateSample(&pSample);
    }

    if(SUCCEEDED(hr)){
        hr = pSample->AddBuffer(pBuffer);
    }

    // Set the time stamp and the duration.
    if(SUCCEEDED(hr)){
        hr = pSample->SetSampleTime(rtStart);
    }

    if(SUCCEEDED(hr)){
        hr = pSample->SetSampleDuration(VIDEO_FRAME_DURATION);
    }

    // Send the sample to the Sink Writer.
    if(SUCCEEDED(hr)){
        hr = pWriter->WriteSample(streamIndex, pSample);
    }

    hr = pSurface->UnlockRect();

    SafeRelease(&pSample);
    SafeRelease(&pBuffer);
    return hr;
}

void main(){

    HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);

    if(SUCCEEDED(hr)){

        hr = MFStartup(MF_VERSION);

        if(SUCCEEDED(hr)){

                UINT32 uiWidth = 0;
                UINT32 uiHeight = 0;

                IDirect3DDevice9* pDevice = NULL;
                IDirect3DSurface9* pSurface = NULL;

                hr = InitializeDirect3D9(&pDevice, &pSurface, uiWidth, uiHeight);

                if(SUCCEEDED(hr)){

                        IMFSinkWriter *pSinkWriter = NULL;
                        DWORD stream;

                        hr = InitializeSinkWriter(&pSinkWriter, &stream, uiWidth, uiHeight);

                        if(SUCCEEDED(hr)){

                            LONGLONG rtStart = 0;

                            for(DWORD i = 0; i < VIDEO_FRAME_COUNT; ++i){

                                hr = WriteFrame(pDevice, pSurface, pSinkWriter, stream, rtStart, uiWidth, uiHeight);

                                            if(FAILED(hr)){
                                                    break;
                                            }

                                            rtStart += VIDEO_FRAME_DURATION;
                                    }
                            }

                            if(SUCCEEDED(hr)){
                                    hr = pSinkWriter->Finalize();
                            }

                            SafeRelease(&pSinkWriter);
                    }

                    SafeRelease(&pDevice);
                    SafeRelease(&pSurface);
                    MFShutdown();
            }

            CoUninitialize();
    }
}