如何统一更改相机输入(DJI无人机相机)并导出到Android Studio

时间:2018-07-27 07:13:42

标签: android unity3d dji-sdk

嗨,我正在用无人机(DJI)做一个简单的增强现实项目。我使用android studio和Unity进行项目。我添加这个问题是因为我对相机有一些疑问。

  1. 借助DJI提供的SDK,我可以在Android Studio中控制无人机摄像头。将unity项目(一个mapbox示例)导出到android studio后,我添加了一些代码以连接到我的无人机。我发现Unity打开我的手机摄像头而不是无人机摄像头(不同的Activity)。而且UnityPlayerActivity中没有与相机相关的功能。因此,我认为应该在Unity项目中进行操作。但是,我找不到在哪里更改输入相机。有什么办法解决这个问题? 以下代码是C#ARcontroller.cs unity project photo

    using UnityEngine;
    using System.Collections;
    
    #if UNITY_EDITOR
    using UnityEngine.Networking.PlayerConnection;
    using UnityEditor.Networking.PlayerConnection;
    #endif
    
    namespace UnityARInterface
    {
    public class ARController : MonoBehaviour
    {
    protected ARInterface m_ARInterface;
    
    [SerializeField]
    protected Camera m_ARCamera;
    public Camera arCamera { get { return m_ARCamera; } }
    
    [SerializeField]
    private bool m_PlaneDetection;
    
    [SerializeField]
    private bool m_LightEstimation;
    
    [SerializeField]
    private bool m_PointCloud;
    
    [SerializeField]
    private bool m_BackgroundRendering = true;
    
    [SerializeField]
    private float m_Scale = 1f;
    
    public virtual bool BackgroundRendering {
        get { return m_BackgroundRendering; }
    
        set {
            if(m_ARInterface != null){
                m_ARInterface.BackgroundRendering = m_BackgroundRendering = 
    value;
            }
        }
    }
    
    public float scale
    {
        set
        {
            m_Scale = value;
    
            var root = m_ARCamera.transform.parent;
            if (root)
            {
                var poiInRootSpace = root.InverseTransformPoint(pointOfInterest);
                root.localPosition = m_InvRotation * (-poiInRootSpace * m_Scale) + pointOfInterest;
            }
        }
    
        get { return m_Scale; }
    }
    
    public Vector3 pointOfInterest;
    private Quaternion m_Rotation = Quaternion.identity;
    private Quaternion m_InvRotation = Quaternion.identity;
    public Quaternion rotation
    {
        get { return m_Rotation; }
        set
        {
            var root = m_ARCamera.transform.parent;
            if (root)
            {
                m_Rotation = value;
                m_InvRotation = Quaternion.Inverse(rotation);
                var poiInRootSpace = root.InverseTransformPoint(pointOfInterest);
    
                root.localPosition = m_InvRotation * (-poiInRootSpace * scale) + pointOfInterest;
                root.localRotation = m_InvRotation;
            }
        }
    }
    
    public bool IsRunning
    {
        get
        {
            if (m_ARInterface == null)
                return false;
            return m_ARInterface.IsRunning;
        }
    }
    
    public void AlignWithPointOfInterest(Vector3 position)
    {
        var root = m_ARCamera.transform.parent;
        if (root)
        {
            var poiInRootSpace = root.InverseTransformPoint(position - pointOfInterest);
            root.localPosition = m_InvRotation * (-poiInRootSpace * scale);
        }
    }
    
    void OnBeforeRender()
    {
        m_ARInterface.UpdateCamera(m_ARCamera);
    
        Pose pose = new Pose();
        if (m_ARInterface.TryGetPose(ref pose))
        {
            m_ARCamera.transform.localPosition = pose.position;
            m_ARCamera.transform.localRotation = pose.rotation;
            var parent = m_ARCamera.transform.parent;
            if (parent != null)
                parent.localScale = Vector3.one * scale;
        }
    }
    
    protected virtual void SetupARInterface()
    {
        m_ARInterface = ARInterface.GetInterface();
    }
    
    private void OnEnable()
    {
        Application.targetFrameRate = 60;
        Screen.sleepTimeout = SleepTimeout.NeverSleep;
        Input.simulateMouseWithTouches = true;
    
        if (m_ARInterface == null)
            SetupARInterface();
    
        // See if we are on a camera
        if (m_ARCamera == null)
            m_ARCamera = GetComponent<Camera>();
    
        // Fallback to main camera
        if (m_ARCamera == null)
            m_ARCamera = Camera.main;
    
        StopAllCoroutines();
        StartCoroutine(StartServiceRoutine());
    
    }
    
    IEnumerator StartServiceRoutine()
    {
        yield return m_ARInterface.StartService(GetSettings());
        if (IsRunning)
        {
            m_ARInterface.SetupCamera(m_ARCamera);
            m_ARInterface.BackgroundRendering = BackgroundRendering;
            Application.onBeforeRender += OnBeforeRender;
        }
        else
        {
            enabled = false;
        }
    }
    
    
    void OnDisable()
    {
        StopAllCoroutines();
        if (IsRunning)
        {
            m_ARInterface.StopService();
            Application.onBeforeRender -= OnBeforeRender;
        }
    }
    
    void Update()
    {
        m_ARInterface.Update();
    }
    
    public ARInterface.Settings GetSettings()
    {
        return new ARInterface.Settings()
        {
            enablePointCloud = m_PointCloud,
            enablePlaneDetection = m_PlaneDetection,
            enableLightEstimation = m_LightEstimation
        };
    }
    

    } }

  2. 由于我的无人机(DJI)使用android SDK进行开发,因此我应该在android studio上进行开发。我之所以使用Unity,是因为它使我更容易处理mapbox AR和3D游戏对象。结合两个方面使我的发展比我想象的要困难得多。不应该使用这种方式来做我的项目吗?还是应该使用其他更好的方法?

0 个答案:

没有答案