从VideoPlayer获取当前帧纹理

时间:2017-03-12 12:19:26

标签: c# unity3d unity5

在这篇文章Using new Unity VideoPlayer and VideoClip API to play video中说明,如果需要,可以为每一帧检索纹理"

将当前帧作为Texture2D获取的正确方法是什么?

修改

在答案之后我做了这个,但它没有工作:

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Video;

public class AverageColorFromTexture : MonoBehaviour {

    public VideoClip videoToPlay;
    public Light lSource;

    private Color targetColor;
    private VideoPlayer videoPlayer;
    private VideoSource videoSource;
    private Renderer rend;
    private Texture tex;
    private AudioSource audioSource;

    void Start()
    {
        Application.runInBackground = true;
        StartCoroutine(playVideo());
    }

    IEnumerator playVideo()
    {

        rend = GetComponent<Renderer>();

        videoPlayer = gameObject.AddComponent<VideoPlayer>();
        audioSource = gameObject.AddComponent<AudioSource>();

        //Disable Play on Awake for both Video and Audio
        videoPlayer.playOnAwake = false;
        audioSource.playOnAwake = false;

        videoPlayer.source = VideoSource.VideoClip;
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.EnableAudioTrack(0, true);
        videoPlayer.SetTargetAudioSource(0, audioSource);

        //Set video To Play then prepare Audio to prevent Buffering
        videoPlayer.clip = videoToPlay;
        videoPlayer.Prepare();

        //Wait until video is prepared
        while (!videoPlayer.isPrepared)
        {
            Debug.Log("Preparing Video");
            yield return null;
        }
        Debug.Log("Done Preparing Video");

        //Assign the Texture from Video to Material texture
        tex = videoPlayer.texture;
        rend.material.mainTexture = tex;

        //Enable new frame Event
        videoPlayer.sendFrameReadyEvents = true;

        //Subscribe to the new frame Event
        videoPlayer.frameReady += OnNewFrame;

        //Play Video
        videoPlayer.Play();

        //Play Sound
        audioSource.Play();

        Debug.Log("Playing Video");
        while (videoPlayer.isPlaying)
        {
            Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
            yield return null;
        }
        Debug.Log("Done Playing Video");
    }

    void OnNewFrame(VideoPlayer source, long frameIdx)
    {
        Texture2D videoFrame = (Texture2D)source.texture;

        targetColor = CalculateAverageColorFromTexture(videoFrame);
        lSource.color = targetColor ;
    }


    Color32 CalculateAverageColorFromTexture(Texture2D tex)
    {
        Color32[] texColors = tex.GetPixels32();
        int total = texColors.Length;
        float r = 0;
        float g = 0;
        float b = 0;

        for(int i = 0; i < total; i++)
        {
            r += texColors[i].r;
            g += texColors[i].g;
            b += texColors[i].b;
        }
        return new Color32((byte)(r / total) , (byte)(g / total) , (byte)(b / total) , 0);
    }
}

1 个答案:

答案 0 :(得分:2)

您可以通过三个步骤正确完成此操作:

1 。通过将VideoPlayer.sendFrameReadyEvents设置为true来启用新的框架事件。

2 。订阅VideoPlayerframeReady活动

3 。当新框架可用时,您将调用分配给VideoPlayerframeReady事件的功能。只需访问VideoPlayer中的视频帧,它就会通过将VideoPlayer.texture转换为Texture2D来传递给参数。

那就是它。

在代码中:

video.Play()之前添加以下内容:

//Enable new frame Event
videoPlayer.sendFrameReadyEvents = true;

//Subscribe to the new frame Event
videoPlayer.frameReady += OnNewFrame;

这是您的OnNewFrame功能签名。

void OnNewFrame(VideoPlayer source, long frameIdx)
{
    Texture2D videoFrame = (Texture2D)source.texture;
    //Do anything with the videoFrame Texture.
}

值得注意的是,启用该事件的成本很高。在执行此操作之前,请确保您需要每个框架。

修改

Texture2D videoFrame = (Texture2D)source.texture;Texture2D videoFrame = source.texture as Texture2D;都失败了。

我将Debug.Log(source.texture);放在OnNewFrame函数中并得到:

  

TempBuffer 294 320x240(UnityEngine.RenderTexture)

因此,看起来Video.texture属性正在返回RenderTexture类型而不是Texture类型。

我们必须将RenderTexture转换为Texture2D

void Start()
{
    videoFrame = new Texture2D(2, 2);]
    ...
}

//Initialize in the Start function
Texture2D videoFrame;

void OnNewFrame(VideoPlayer source, long frameIdx)
{
    RenderTexture renderTexture = source.texture as RenderTexture;

    if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
    {
        videoFrame.Resize(renderTexture.width, renderTexture.height);
    }
    RenderTexture.active = renderTexture;
    videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
    videoFrame.Apply();
    RenderTexture.active = null;

    targetColor = CalculateAverageColorFromTexture(videoFrame);
    lSource.color = targetColor;
}

您问题中的完整代码:

public class AverageColorFromTexture : MonoBehaviour
{
    public VideoClip videoToPlay;
    public Light lSource;

    private Color targetColor;
    private VideoPlayer videoPlayer;
    private VideoSource videoSource;
    private Renderer rend;
    private Texture tex;
    private AudioSource audioSource;

    void Start()
    {
        videoFrame = new Texture2D(2, 2);
        Application.runInBackground = true;
        StartCoroutine(playVideo());
    }

    IEnumerator playVideo()
    {
        rend = GetComponent<Renderer>();

        videoPlayer = gameObject.AddComponent<VideoPlayer>();
        audioSource = gameObject.AddComponent<AudioSource>();

        //Disable Play on Awake for both Video and Audio
        videoPlayer.playOnAwake = false;
        audioSource.playOnAwake = false;

        videoPlayer.source = VideoSource.VideoClip;
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.EnableAudioTrack(0, true);
        videoPlayer.SetTargetAudioSource(0, audioSource);

        //Set video To Play then prepare Audio to prevent Buffering
        videoPlayer.clip = videoToPlay;
        videoPlayer.Prepare();

        //Wait until video is prepared
        while (!videoPlayer.isPrepared)
        {
            Debug.Log("Preparing Video");
            yield return null;
        }
        Debug.Log("Done Preparing Video");

        //Assign the Texture from Video to Material texture
        tex = videoPlayer.texture;
        rend.material.mainTexture = tex;

        //Enable new frame Event
        videoPlayer.sendFrameReadyEvents = true;

        //Subscribe to the new frame Event
        videoPlayer.frameReady += OnNewFrame;

        //Play Video
        videoPlayer.Play();

        //Play Sound
        audioSource.Play();

        Debug.Log("Playing Video");
        while (videoPlayer.isPlaying)
        {
            Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
            yield return null;
        }
        Debug.Log("Done Playing Video");
    }

    //Initialize in the Start function
    Texture2D videoFrame;

    void OnNewFrame(VideoPlayer source, long frameIdx)
    {
        RenderTexture renderTexture = source.texture as RenderTexture;


        if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
        {
            videoFrame.Resize(renderTexture.width, renderTexture.height);
        }
        RenderTexture.active = renderTexture;
        videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
        videoFrame.Apply();
        RenderTexture.active = null;

        targetColor = CalculateAverageColorFromTexture(videoFrame);
        lSource.color = targetColor;
    }

    Color32 CalculateAverageColorFromTexture(Texture2D tex)
    {
        Color32[] texColors = tex.GetPixels32();
        int total = texColors.Length;
        float r = 0;
        float g = 0;
        float b = 0;

        for (int i = 0; i < total; i++)
        {
            r += texColors[i].r;
            g += texColors[i].g;
            b += texColors[i].b;
        }
        return new Color32((byte)(r / total), (byte)(g / total), (byte)(b / total), 0);
    }
}