使用MediaTranscoder

时间:2015-07-24 08:57:47

标签: c++-cx ms-media-foundation

我正在尝试使用Store Foundation中的Media Foundation将一系列来自捕获设备的视频帧从IYUV转换为H264。 MediaTranscoder仅适用于第一帧,然后关闭并关闭MediaStreamSource。如何让它不断要求新帧编码? 以下是相关代码:

int InitEncoder(const VideoCodec* inst) {

    auto encoderProps = VideoEncodingProperties::CreateUncompressed(MediaEncodingSubtypes::Iyuv, inst->width, inst->height);
    streamDescriptor_ = ref new VideoStreamDescriptor(encoderProps);
    mediaStreamSource_ = ref new MediaStreamSource(streamDescriptor_);

    mediaStreamSource_->Starting += 
    ref new TypedEventHandler<MediaStreamSource ^, MediaStreamSourceStartingEventArgs ^>
    ([=](MediaStreamSource ^source, MediaStreamSourceStartingEventArgs ^ args)
    {
        args->Request->GetDeferral()->Complete();
    });

    mediaStreamSource_->SwitchStreamsRequested += 
    ref new TypedEventHandler<MediaStreamSource ^, MediaStreamSourceSwitchStreamsRequestedEventArgs ^>
    ([=](MediaStreamSource ^source,     MediaStreamSourceSwitchStreamsRequestedEventArgs ^args)
    {
        OutputDebugString(L"Media stream source switch stream requested.\n");
    });

    mediaStreamSource_->Closed += 
    ref new TypedEventHandler<MediaStreamSource ^, MediaStreamSourceClosedEventArgs ^>
    ([=](MediaStreamSource ^source, MediaStreamSourceClosedEventArgs ^args)
    {
        OutputDebugString(L"Media stream source closed event, reason: ");
    });

    mediaStreamSource_->SampleRequested +=
    ref new TypedEventHandler<MediaStreamSource ^, MediaStreamSourceSampleRequestedEventArgs ^>
    ([=](MediaStreamSource ^source, MediaStreamSourceSampleRequestedEventArgs ^args){
    if (framesToEncode_.size() > 0) {
        FeedOneSample(args->Request);        
    }
    else {
        sampleRequest_ = args->Request;
        args->Request->ReportSampleProgress(1);
        isSampleRequestPending_ = true;
    }
    });

    mediaTranscoder_ = ref new MediaTranscoder();
    mediaTranscoder_->VideoProcessingAlgorithm = MediaVideoProcessingAlgorithm::Default;
    mediaTranscoder_->HardwareAccelerationEnabled = true;    

    encodedStream_ = ref new InMemoryRandomAccessStream();

    h264EncodingProfile_ = MediaEncodingProfile::CreateMp4(VideoEncodingQuality::HD720p);
    h264EncodingProfile_->Audio = nullptr;
    h264EncodingProfile_->Container = nullptr;

    auto prepareTranscodeResultTask = mediaTranscoder_->PrepareMediaStreamSourceTranscodeAsync(
    mediaStreamSource_,
    encodedStream_,
    h264EncodingProfile_);

    auto prepareTranscodeResultContinuationTask = create_task(prepareTranscodeResultTask).then(
    [=](PrepareTranscodeResult ^prepTransResult) {
    prepareTranscodeResult_ = prepTransResult;
    if (!prepTransResult->CanTranscode) {
        TranscodeFailureReason failureReason = TranscodeFailureReason::None;
        failureReason = prepTransResult->FailureReason;
    }
    });
    prepareTranscodeResultContinuationTask.then([this]()
    {  
        if (prepareTranscodeResult_->CanTranscode) {
            inited_ = true;
        }
        return 0;
    });

    return 0;
}

int EncodeFrame(const VideoFrame& frame) {

    if (!inited_)
    {
        return -1;
    }

    framesToEncode_.push_back(newFrame);

    if (framesToEncode_.size() == 1 && !transcodeStarted_)
    {
        auto transcodeOp = prepareTranscodeResult_->TranscodeAsync();
        OnTranscodeAsync(transcodeOp);
        transcodeStarted_ = true;
    }
    if (isSampleRequestPending_ && framesToEncode_.size() > 0)
    {
        FeedOneSample(sampleRequest_);
        isSampleRequestPending_ = false;
    }

    return 0;
}

void OnTranscodeAsync(
IAsyncActionWithProgress<double> ^actionProgress) {
    auto transcodeContinuation = create_task(actionProgress);
    transcodeContinuation.then([=]() {
        OutputDebugString(L"Transcoding frame complete.\n");
        OutputDebugString(L"Size of encoded stream is: ");
        OutputDebugString(encodedStream_->Size.ToString()->Data());
        OutputDebugString(L"\n");
    });
}

void FeedOneSample(MediaStreamSourceSampleRequest ^sampleRequest)
{
    const VideoFrame* frame = framesToEncode_.front();
    framesToEncode_.pop_front();
    sampleRequest->Sample = TransformToMediaStreamSample(frame);
    sampleRequest->GetDeferral()->Complete();
}

MediaStreamSample^ TransformToMediaStreamSample(const VideoFrame* frame)
{
    unsigned int totalSize = frame->allocated_size(kYPlane) +
        frame->allocated_size(kUPlane) +
        frame->allocated_size(kVPlane);
    const uint8_t *bytes = frame->buffer(kYPlane);
    unsigned char *unChars = const_cast<unsigned char*>(bytes);
    auto finalArray = ref new Platform::Array<unsigned char>(unChars, totalSize);
    auto dataWriter = ref new DataWriter();
    dataWriter->WriteBytes(finalArray);
    auto timeSpan = TimeSpan();
    timeSpan.Duration = frame->timestamp();
    auto sample = MediaStreamSample::CreateFromBuffer(dataWriter->DetachBuffer(), timeSpan);

    return sample;
}

.h文件的相应部分是:

private:
    bool inited_;
    bool transcodeStarted_;
    bool isSampleRequestPending_;
    std::list<const I420VideoFrame*> framesToEncode_;
    EncodedImageCallback* encoded_complete_callback_;
    MediaStreamSource ^mediaStreamSource_;
    MediaTranscoder ^mediaTranscoder_;
    InMemoryRandomAccessStream ^encodedStream_;
    MediaEncodingProfile ^h264EncodingProfile_;
    PrepareTranscodeResult ^prepareTranscodeResult_;
    MediaStreamSourceSampleRequest ^sampleRequest_;
    VideoStreamDescriptor ^streamDescriptor_;

    void FeedOneSample(MediaStreamSourceSampleRequest ^sampleRequest);
    MediaStreamSample^ TransformToMediaStreamSample(const I420VideoFrame* frame);

如果有用,我可以提供使用MFTrace捕获的日志。

1 个答案:

答案 0 :(得分:0)

最后,在更了解Microsoft Media Foundation内部人员的帮助下,发现了错误。 当没有可用的帧时,应调用GetDeferral(),而不是行args->Request->ReportSampleProgress(1);此调用告诉代码转换器它必须等待一段新帧。此外,请求必须在调用GetDefferal()后存储并稍后使用:

MediaStreamSourceSampleRequestDeferral ^sampleRequestDefferal_;

然后,当有可用的帧时,SampleRequest设置为新帧,并且必须调用sampleRequestDefferal_->Complete()

通过这些修改,转码器可以连续工作。