如何在Unity中延迟屏幕输出?

时间:2017-07-10 16:45:49

标签: c# unity3d

这不是通常需要的东西,但我需要这个研究项目。

如何使场景开始并正常运行,但使屏幕输出延迟固定的秒数。即游戏运行并实时响应用户输入,但屏幕输出延迟x秒?

2 个答案:

答案 0 :(得分:4)

经过一些优化(在Unity Answer上询问),我已经重新设计了我的解决方案。

我使用一个固定大小的数组和两个整数来跟踪捕获的帧和渲染的帧。

using UnityEngine;
using System.Collections;

public class DelayedCamera : MonoBehaviour
{
    public struct Frame
    {
        /// <summary>
        /// The texture representing the frame
        /// </summary>
        private Texture2D texture;

        /// <summary>
        /// The time (in seconds) the frame has been captured at
        /// </summary>
        private float recordedTime;

        /// <summary>
        /// Captures a new frame using the given render texture
        /// </summary>
        /// <param name="renderTexture">The render texture this frame must be captured from</param>
        public void CaptureFrom( RenderTexture renderTexture )
        {
            RenderTexture.active = renderTexture;

            // Create a new texture if none have been created yet in the given array index
            if ( texture == null )
                texture = new Texture2D( renderTexture.width, renderTexture.height );

            // Save what the camera sees into the texture
            texture.ReadPixels( new Rect( 0, 0, renderTexture.width, renderTexture.height ), 0, 0 );
            texture.Apply();

            recordedTime = Time.time;

            RenderTexture.active = null;
        }

        /// <summary>
        /// Indicates whether the frame has been captured before the given time
        /// </summary>
        /// <param name="time">The time</param>
        /// <returns><c>true</c> if the frame has been captured before the given time, <c>false</c> otherwise</returns>
        public bool CapturedBefore( float time )
        {
            return recordedTime < time;
        }

        /// <summary>
        /// Operator to convert the frame to a texture
        /// </summary>
        /// <param name="frame"></param>
        public static implicit operator Texture2D( Frame frame ) { return frame.texture; }
    }

    /// <summary>
    /// The camera used to capture the frames
    /// </summary>
    [SerializeField]
    private Camera renderCamera;

    /// <summary>
    /// The delay
    /// </summary>
    [SerializeField]
    private float delay = 0.5f;

    /// <summary>
    /// The size of the buffer containing the recorded images
    /// </summary>
    /// <remarks>
    /// Try to keep this value as low as possible according to the delay
    /// </remarks>
    private int bufferSize = 256;

    /// <summary>
    /// The render texture used to record what the camera sees
    /// </summary>
    private RenderTexture renderTexture;

    /// <summary>
    /// The recorded frames
    /// </summary>
    private Frame[] frames;

    /// <summary>
    /// The index of the captured texture
    /// </summary>
    private int capturedFrameIndex;

    /// <summary>
    /// The index of the rendered texture
    /// </summary>
    private int renderedFrameIndex;

    /// <summary>
    /// The frame index
    /// </summary>
    private int frameIndex;

    private void Awake()
    {
        frames = new Frame[bufferSize];

        // Change the depth value from 24 to 16 may improve performances. And try to specify an image format with better compression.
        renderTexture = new RenderTexture( Screen.width, Screen.height, 24 );
        renderCamera.targetTexture = renderTexture;
        StartCoroutine( Render() );
    }

    /// <summary>
    /// Makes the camera render with a delay
    /// </summary>
    /// <returns></returns>
    private IEnumerator Render()
    {
        WaitForEndOfFrame wait = new WaitForEndOfFrame();

        while ( true )
        {
            yield return wait;

            capturedFrameIndex = frameIndex % bufferSize;

            frames[capturedFrameIndex].CaptureFrom( renderTexture );

            // Find the index of the frame to render
            // The foor loop is **voluntary** empty
            for ( ; frames[renderedFrameIndex].CapturedBefore( Time.time - delay ) ; renderedFrameIndex = ( renderedFrameIndex + 1 ) % bufferSize ) ;

            Graphics.Blit( frames[renderedFrameIndex], null as RenderTexture );

            frameIndex++;
        }
    }
}

原始答案

我制作了这个剧本,但它远未被优化!将以下脚本放在空/您的相机上并拖放将相机游戏对象拖放到检查器中的renderCamera字段。

using UnityEngine;
using System.Collections;
using System.Collections.Generic;

public class DelayedCamera : MonoBehaviour
{
    [SerializeField]
    private Camera renderCamera;

    private RenderTexture renderTexture;

    private Queue<Texture2D> frames;

    private void Awake()
    {
        frames = new Queue<Texture2D>();

        // Change the depth value from 24 to 16 may improve performances. And try to specify an image format with better compression.
        renderTexture = new RenderTexture( Screen.width, Screen.height, 24 );
        renderCamera.targetTexture = renderTexture;
        StartCoroutine( Render() );
    }

    private IEnumerator Render()
    {
        WaitForEndOfFrame wait = new WaitForEndOfFrame();

        while ( true )
        {
            yield return wait;

            // Create new texture of the current frame
            RenderTexture.active = renderTexture;

            // Making this variable a class attribute may be more efficient
            Texture2D texture = new Texture2D( renderTexture.width, renderTexture.height );
            texture.ReadPixels( new Rect( 0, 0, renderTexture.width, renderTexture.height ), 0, 0 );
            texture.Apply();
            RenderTexture.active = null;

            // Add it to the queue to "delay" the rendering
            frames.Enqueue( texture );

            // If the queue is too big, render the first frame on the screen
            if ( frames.Count > 100 )
            {
                Graphics.Blit( frames.Dequeue(), null as RenderTexture );
            }
        }
    }
}

答案 1 :(得分:0)

您可以将每个操作都作为Coroutine执行,并使用WaitForSeconds函数等待x秒

文档:https://docs.unity3d.com/ScriptReference/WaitForSeconds.html