两个视频流到一个屏幕 - 媒体基金会

时间:2017-11-08 12:41:07

标签: video-streaming ms-media-foundation mixer

目的是在一个屏幕上显示两个视频流。

我使用以下MSDN示例代码。和堆栈

  1. 音频/视频播放
  2. https://msdn.microsoft.com/en-us/library/windows/desktop/dd317914(v=vs.85).aspx

    1. 创建转换节点
    2. https://msdn.microsoft.com/en-us/library/windows/desktop/bb250384(v=vs.85).aspx

      1. 将解码器添加到拓扑
      2. https://msdn.microsoft.com/en-us/library/windows/desktop/bb250371(v=vs.85).aspx

        1. Add two media sources in topology and display mix video - Windows Media Foundation

          我的媒体资源在一个文件中有两个视频流。 而且我没有通过MFCreateAggregateSource进行IMFCollection。 但是没有输出。

          我制作媒体两个媒体下沉。它的工作原理。但是交替显示没有索引值。

          我认为这是一个问题,IMFTransForm :: AddInputStreams,getStreamLimits ......没有被调用。

          我不知道该怎么做。

          听到的是代码:

          HRESULT CreatePlaybackTopology( .... )
          {
              ....
              hr = MFCreateVideoRendererActivate(hVideoWnd, &pVideoRendererActivate);
              // For each stream, create the topology nodes and add them to the topology.
              for (DWORD i = 0; i < cSourceStreams; i++)
              {
                  hr = AddBranchToPartialTopology(pTopology, pSource, pPD, i, pVideoRendererActivate);
                  if (FAILED(hr))
                  {
                      goto done;
                  }
              }
          ....
          }
          

          AddBranchPartialTopilogy方法。

          HRESULT AddBranchToPartialTopology(
          IMFTopology *pTopology,         // Topology.
          IMFMediaSource *pSource,        // Media source.
          IMFPresentationDescriptor *pPD, // Presentation descriptor.
          DWORD iStream,                  // Stream index.
          IMFActivate* pVideoRendererActivate)                 // Window for video playback.
          {
              IMFStreamDescriptor *pSD = NULL;
              IMFActivate         *pSinkActivate = NULL;
              IMFTopologyNode     *pSourceNode = NULL;
              IMFTopologyNode     *pOutputNode = NULL;
              IMFTopologyNode     *pDecoderNode = NULL;
              IMFTopologyNode     *pMFTNode = NULL;
          
              BOOL fSelected = FALSE;
          CLSID clsidDecoder = GUID_NULL;
          
          HRESULT hr = pPD->GetStreamDescriptorByIndex(iStream, &fSelected, &pSD);
          if (FAILED(hr))
          {
              goto done;
          }
          
          DWORD iStreamID = 0;
          
          if (fSelected)
          {
              // Create the media sink activation object.
              hr = CreateMediaSinkActivate(pSD, iStreamID, pVideoRendererActivate, &pSinkActivate);
              if (FAILED(hr))
              {
                  goto done;
              }
          
              // Add a source node for this stream.
              hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pSourceNode);
              if (FAILED(hr))
              {
                  goto done;
              }
          
              // Create the output node for the renderer.
              hr = AddOutputNode(pTopology, pSinkActivate, iStreamID, &pOutputNode);
              if (FAILED(hr))
              {
                  goto done;
              }
          
              // Find a decoder.
              if (SUCCEEDED(hr))
              {
                  hr = FindDecoderForStream(pSD, &clsidDecoder);
              }
          
              if (SUCCEEDED(hr))
              {
                  if (clsidDecoder == GUID_NULL)
                  {
                      // No decoder is required. 
                      // Connect the source node to the output node.
                      hr = pSourceNode->ConnectOutput(0, pOutputNode, 0);
                  }
                  else
                  {
                      // Add a decoder node.
                      hr = AddTransformNode(pTopology, clsidDecoder, &pDecoderNode);
                      hr = AddTransformNode(pTopology, CLSID_TopviewMFT, &pMFTNode);
                      // Connect the source node to the decoder node.
                      if (SUCCEEDED(hr))
                      {
                          hr = pSourceNode->ConnectOutput(0, pDecoderNode, 0);
                      }
          
                      if (SUCCEEDED(hr))
                      {
                          hr = pDecoderNode->ConnectOutput(0, pMFTNode, 0);
                      }
          
                      if (SUCCEEDED(hr))
                      {
                          hr = pMFTNode->ConnectOutput(0, pOutputNode, 0);
                      }
                  }
              }
          }
          // else: If not selected, don't add the branch. 
          
          done:
              SafeRelease(&pSD);
              SafeRelease(&pSinkActivate);
              SafeRelease(&pSourceNode);
              SafeRelease(&pOutputNode);
              SafeRelease(&pDecoderNode);
              SafeRelease(&pMFTNode);
          
              return hr;
          }
          

          CreateMediaSinkActivate方法。

          DWORD globalVideoIndex = 0;
          HRESULT CreateMediaSinkActivate(
              IMFStreamDescriptor *pSourceSD,     // Pointer to the stream descriptor.
              DWORD& iStreamID,
              IMFActivate* pVideoRendererActivate,                  // Handle to the video clipping window.
              IMFActivate **ppActivate
          )
          {
              IMFMediaTypeHandler *pHandler = NULL;
              IMFActivate *pActivate = NULL;
          
              // Get the media type handler for the stream.
              HRESULT hr = pSourceSD->GetMediaTypeHandler(&pHandler);
              if (FAILED(hr))
              {
                  goto done;
              }
          
              // Get the major media type.
              GUID guidMajorType;
              hr = pHandler->GetMajorType(&guidMajorType);
              if (FAILED(hr))
              {
                  goto done;
              }
          
              // Create an IMFActivate object for the renderer, based on the media type.
              if (MFMediaType_Audio == guidMajorType)
              {
                  // Create the audio renderer.
                  hr = MFCreateAudioRendererActivate(&pActivate);
              }
              else if (MFMediaType_Video == guidMajorType)
              {
                  // Create the video renderer.
          //        hr = MFCreateVideoRendererActivate(hVideoWindow, &pActivate);
              hr = pVideoRendererActivate->QueryInterface(IID_PPV_ARGS(&pActivate));
              iStreamID = globalVideoIndex++;
              }
              else
              {
                  // Unknown stream type. 
                  hr = E_FAIL;
                  // Optionally, you could deselect this stream instead of failing.
              }
              if (FAILED(hr))
              {
                  goto done;
              }
          
              // Return IMFActivate pointer to caller.
              *ppActivate = pActivate;
              (*ppActivate)->AddRef();
          
              done:
                  SafeRelease(&pHandler);
                  SafeRelease(&pActivate);
          
                  return hr;
          }
          

          MFT .h

          interface IMFVideoMixerControl;
          
          class CTopview : BaseObject, 
              public IMFVideoDeviceID,
              public IMFGetService,
              public IMFTopologyServiceLookupClient,
              public IMFTransform,
          //  public IMFVideoMixerControl,
              public IMFAttributes
          {
          public:
              static HRESULT CreateInstance(IUnknown *pUnkOuter, REFIID iid, void **ppSource);
          
              // IUnknown
              STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
              STDMETHODIMP_(ULONG) AddRef();
              STDMETHODIMP_(ULONG) Release();
          ....
          

0 个答案:

没有答案