Unity3D IP摄像机GUI延迟

时间:2018-11-26 22:18:01

标签: c# opencv user-interface unity3d ip-camera

我正在尝试从Unity获取IP摄像机供稿并将其投影在GUI(PictureBox)上。要访问IP摄像机供稿,我正在使用名为OpenCvforUnity的资产。 这是我附加到对象上的代码;

using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
using OpenCVForUnity;

namespace OpenCVForUnity { 
public class sourceControl : MonoBehaviour
{


    /// <summary>
    /// The videocapture.
    /// </summary>
VideoCapture capture;

/// <summary>
/// The rgb mat.
/// </summary>
Mat rgbMat;

/// <summary>
/// The texture.
/// </summary>
Texture2D texture;

/// <summary>
/// Indicates whether the video frame needs updating.
/// </summary>
bool shouldUpdateVideoFrame = false;

/// <summary>
/// The prev frame tick count.
/// </summary>
long prevFrameTickCount;

/// <summary>
/// The current frame tick count.
/// </summary>
long currentFrameTickCount;

/// <summary>
/// The FPS monitor.
/// </summary>

public string source = "http://iris.not.iac.es/axis-cgi/mjpg/video.cgi?resolution=320x240?x.mjpeg";


// Use this for initialization
void Start()
{


    capture = new VideoCapture();



    Initialize();
}

private void Initialize()
{
        capture.open(source);
        rgbMat = new Mat();

    if (!capture.isOpened())
    {
        Debug.LogError("capture.isOpened() is false. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
    }


    double ext = capture.get(Videoio.CAP_PROP_FOURCC);


    capture.grab();
    capture.retrieve(rgbMat, 0);
    int frameWidth = rgbMat.cols();
    int frameHeight = rgbMat.rows();
    texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);

    capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

    gameObject.GetComponent<UnityEngine.UI.Image>().material.mainTexture = texture;

    StartCoroutine("WaitFrameTime");
}

void Update()
{
    if (shouldUpdateVideoFrame)
    {
        shouldUpdateVideoFrame = false;

        //Loop play
        if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

        if (capture.grab())
        {

            capture.retrieve(rgbMat, 0);

            Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);



            Utils.fastMatToTexture2D(rgbMat, texture);
        }


    }
}

private IEnumerator WaitFrameTime()
{
    double videoFPS = (capture.get(Videoio.CAP_PROP_FPS) <= 0) ? 10.0 : capture.get(Videoio.CAP_PROP_FPS);
    int frameTime_msec = (int)Math.Round(1000.0 / videoFPS);

    while (true)
    {
        shouldUpdateVideoFrame = true;

        prevFrameTickCount = currentFrameTickCount;
        currentFrameTickCount = Core.getTickCount();

        yield return new WaitForSeconds(frameTime_msec / 1000f);
    }
}

/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
    StopCoroutine("WaitFrameTime");

    capture.release();

    if (rgbMat != null)
        rgbMat.Dispose();


}
}
}

(上面的代码摘自Example并做了很少的修改。) 我可以到达视频提要,可以在Image对象上显示它,但是问题是每次Update调用似乎都将主线程冻结了几分之一秒,因此整个程序很耗时。

所以我的问题是

实施这种方法是否正确?

如果没有,我应该如何实现它或更改纹理,以免引起延迟。

谢谢!

(Unity版本2018.2.16.f1

0 个答案:

没有答案