HoloLens-如何从Vuforia获取2D网络摄像头纹理

时间:2019-10-22 07:43:08

标签: unity3d vuforia hololens

我正在使用Unity开发适用于HoloLens的Vuforia应用程序。

当检测到图像目标时,此应用会显示一个简单的3D对象。

我还将使用Unity Asset Store中的fm Exhibition Tool Pack hololens,以便将HoloLens上运行的应用流式传输到PC。

一切正常,但是当我将应用程序流式传输到PC时,我看到的是3D Unity场景而不是房间。

因此,我尝试获取网络摄像头纹理并将其附加到场景内的多维数据集,但是vuforia ARCamera却与之发生冲突,并且我在多维数据集上看不到任何东西。相反,当我在Unity Simulator中运行该应用程序时,我发现自己在多维数据集上。

是否可以从Vuforia获取2D网络摄像头纹理并将其附加到场景内的GameObject?也许与Vuforia.Image类一起使用?但是我不知道它是如何工作的。

2 个答案:

答案 0 :(得分:1)

以下脚本与FMETP STREAM兼容。 这些脚本已在移动设备上进行了测试。

using UnityEngine;
using System.Collections;
using Vuforia;
using UnityEngine.UI;

public class VuforiaCamAccess : MonoBehaviour
{
    private bool mAccessCameraImage = true;
    public RawImage rawImage;
    public GameObject Mesh;
    private Texture2D texture;

#if UNITY_EDITOR
    private Vuforia.PIXEL_FORMAT mPixelFormat = Vuforia.PIXEL_FORMAT.GRAYSCALE;
#else
    private Vuforia.PIXEL_FORMAT mPixelFormat =  Vuforia.PIXEL_FORMAT.RGB888;
#endif

    private bool mFormatRegistered = false;

    void Start()
    {
#if UNITY_EDITOR
        texture = new Texture2D(Screen.width, Screen.height, TextureFormat.R8, false);
#else
        texture = new Texture2D(Screen.width, Screen.height, TextureFormat.RGB24, false);
#endif
        // Register Vuforia life-cycle callbacks:
        Vuforia.VuforiaARController.Instance.RegisterVuforiaStartedCallback(OnVuforiaStarted);
        Vuforia.VuforiaARController.Instance.RegisterOnPauseCallback(OnPause);
        Vuforia.VuforiaARController.Instance.RegisterTrackablesUpdatedCallback(OnTrackablesUpdated);
    }

    private void OnVuforiaStarted()
    {
        // Try register camera image format
        if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
        {
            Debug.Log("Successfully registered pixel format " + mPixelFormat.ToString());
            mFormatRegistered = true;
        }
        else
        {
            Debug.LogError("Failed to register pixel format " + mPixelFormat.ToString() +
                "\n the format may be unsupported by your device;" +
                "\n consider using a different pixel format.");
            mFormatRegistered = false;
        }
    }

    private void OnPause(bool paused)
    {
        if (paused)
        {
            Debug.Log("App was paused");
            UnregisterFormat();
        }
        else
        {
            Debug.Log("App was resumed");
            RegisterFormat();
        }
    }

    private void OnTrackablesUpdated()
    {
        //skip if still loading image to texture2d
        if (LoadingTexture) return;

        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                //if (image != null && image.IsValid())
                if (image != null)
                {
                    byte[] pixels = image.Pixels;
                    int width = image.Width;
                    int height = image.Height;
                    StartCoroutine(SetTexture(pixels, width, height));
                }
            }
        }
    }

    bool LoadingTexture = false;
    IEnumerator SetTexture(byte[] pixels, int width, int height)
    {
        if (!LoadingTexture)
        {
            LoadingTexture = true;
            if (pixels != null && pixels.Length > 0)
            {
                if (texture.width != width || texture.height != height)
                {
#if UNITY_EDITOR
                    texture = new Texture2D(width, height, TextureFormat.R8, false);
#else
                    texture = new Texture2D(width, height, TextureFormat.RGB24, false);
#endif
                }

                texture.LoadRawTextureData(pixels);
                texture.Apply();

                if (rawImage != null)
                {
                    rawImage.texture = texture;
                    rawImage.material.mainTexture = texture;
                }

                if (Mesh != null) Mesh.GetComponent<Renderer>().material.mainTexture = texture;
            }
            yield return null;
            LoadingTexture = false;
        }
    }

    private void UnregisterFormat()
    {
        Debug.Log("Unregistering camera pixel format " + mPixelFormat.ToString());
        CameraDevice.Instance.SetFrameFormat(mPixelFormat, false);
        mFormatRegistered = false;
    }

    private void RegisterFormat()
    {
        if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
        {
            Debug.Log("Successfully registered camera pixel format " + mPixelFormat.ToString());
            mFormatRegistered = true;
        }
        else
        {
            Debug.LogError("Failed to register camera pixel format " + mPixelFormat.ToString());
            mFormatRegistered = false;
        }
    }
}

答案 1 :(得分:0)

我决定发布脚本,该脚本应该能够通过Vuforia获取网络摄像头图像。请注意,该脚本尚未针对低端设备进行优化。有一个“愚蠢的循环”,可能会影响性能。

如果有人通过Vuforia将FM Exhibition Tool Pack流用于第一代Hololens,我建议您首先将整个pixel byte []数据流传输到Mac / PC,然后将其作为背景。

我只在iPhone 6s中测试过它,但是从理论上讲,它应该可以在HoloLens上运行。

using UnityEngine;
using System.Collections;
using Vuforia;
using UnityEngine.UI;

public class CamAccess : MonoBehaviour
{
    private bool mAccessCameraImage = true;
    public RawImage rawImage;
    public GameObject Mesh;
    private Texture2D texture;

    // The desired camera image pixel format
    // private Image.PIXEL_FORMAT mPixelFormat = Image.PIXEL_FORMAT.RGB565;// or RGBA8888, RGB888, RGB565, YUV
    // Boolean flag telling whether the pixel format has been registered


#if UNITY_EDITOR
    private Vuforia.PIXEL_FORMAT mPixelFormat = Vuforia.PIXEL_FORMAT.GRAYSCALE;
#else
    private Vuforia.PIXEL_FORMAT mPixelFormat =  Vuforia.PIXEL_FORMAT.RGB888;
#endif

    private bool mFormatRegistered = false;

    void Start()
    {
#if UNITY_EDITOR
        texture = new Texture2D(Screen.width, Screen.height, TextureFormat.R8, false);
#else
        texture = new Texture2D(Screen.width, Screen.height, TextureFormat.RGB24, false);
#endif
        // Register Vuforia life-cycle callbacks:
        Vuforia.VuforiaARController.Instance.RegisterVuforiaStartedCallback(OnVuforiaStarted);
        Vuforia.VuforiaARController.Instance.RegisterOnPauseCallback(OnPause);
        Vuforia.VuforiaARController.Instance.RegisterTrackablesUpdatedCallback(OnTrackablesUpdated);
    }
    /// <summary>
    /// Called when Vuforia is started
    /// </summary>
    private void OnVuforiaStarted()
    {
        // Try register camera image format
        if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
        {
            Debug.Log("Successfully registered pixel format " + mPixelFormat.ToString());
            mFormatRegistered = true;
        }
        else
        {
            Debug.LogError("Failed to register pixel format " + mPixelFormat.ToString() +
                "\n the format may be unsupported by your device;" +
                "\n consider using a different pixel format.");
            mFormatRegistered = false;
        }
    }
    /// <summary>
    /// Called when app is paused / resumed
    /// </summary>
    private void OnPause(bool paused)
    {
        if (paused)
        {
            Debug.Log("App was paused");
            UnregisterFormat();
        }
        else
        {
            Debug.Log("App was resumed");
            RegisterFormat();
        }
    }
    /// <summary>
    /// Called each time the Vuforia state is updated
    /// </summary>
    private void OnTrackablesUpdated()
    {
        //skip if still loading image to texture2d
        if (LoadingTexture) return;

        if (mFormatRegistered)
        {
            if (mAccessCameraImage)
            {
                Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
                //if (image != null && image.IsValid())
                if (image != null)
                {
                    byte[] pixels = image.Pixels;
                    int width = image.Width;
                    int height = image.Height;
                    StartCoroutine(SetTexture(pixels, width, height));
                }
            }
        }
    }

    bool LoadingTexture = false;
    IEnumerator SetTexture(byte[] pixels, int width, int height)
    {
        if (!LoadingTexture)
        {
            LoadingTexture = true;
            if (pixels != null && pixels.Length > 0)
            {
                if (texture.width != width || texture.height != height)
                {
#if UNITY_EDITOR
                    texture = new Texture2D(width, height, TextureFormat.R8, false);
#else
                    texture = new Texture2D(width, height, TextureFormat.RGB24, false);
#endif
                }

                texture.LoadRawTextureData(pixels);
                texture.Apply();

                if (rawImage != null)
                {
                    rawImage.texture = texture;
                    rawImage.material.mainTexture = texture;
                }
                if (Mesh != null) Mesh.GetComponent<Renderer>().material.mainTexture = texture;
            }
            yield return null;
            LoadingTexture = false;
        }
    }

    /// <summary>
    /// Unregister the camera pixel format (e.g. call this when app is paused)
    /// </summary>
    private void UnregisterFormat()
    {
        Debug.Log("Unregistering camera pixel format " + mPixelFormat.ToString());
        CameraDevice.Instance.SetFrameFormat(mPixelFormat, false);
        mFormatRegistered = false;
    }
    /// <summary>
    /// Register the camera pixel format
    /// </summary>
    private void RegisterFormat()
    {
        if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
        {
            Debug.Log("Successfully registered camera pixel format " + mPixelFormat.ToString());
            mFormatRegistered = true;
        }
        else
        {
            Debug.LogError("Failed to register camera pixel format " + mPixelFormat.ToString());
            mFormatRegistered = false;
        }
    }
}