如何与C#统一进行实时光线跟踪

时间:2015-09-28 00:34:37

标签: c# unity3d raytracing

我正在制作一个统一的视频游戏,并决定使用光线追踪。我有代码,但正如你将在一秒钟内看到的那样。它并不是一帧一帧的精确渲染。 这是我的光线跟踪代码,这是附加到主摄像头的主要脚本。

using UnityEngine;
using System.Collections;

public class RayTracer : MonoBehaviour
{

    public Color backgroundColor = Color.black;
    public float RenderResolution = 1f;
    public float maxDist = 100f;
    public int maxRecursion = 4;


    private Light[] lights;
    private Texture2D renderTexture;

    void Awake()
    {
        renderTexture = new Texture2D((int)(Screen.width * RenderResolution), (int)(Screen.height * RenderResolution));
        lights = FindObjectsOfType(typeof(Light)) as Light[];
    }

    void Start()
    {
        RayTrace();
    }

    void OnGUI()
    {
        GUI.DrawTexture(new Rect(0, 0, Screen.width, Screen.height), renderTexture);
    }

    void RayTrace()
    {
        for (int x = 0; x < renderTexture.width; x++)
        {
            for (int y = 0; y < renderTexture.height; y++)
            {
                Color color = Color.black;
                Ray ray = GetComponent<Camera>().ScreenPointToRay(new Vector3(x / RenderResolution, y / RenderResolution, 0));

                renderTexture.SetPixel(x, y, TraceRay(ray, color, 0));
            }
        }

        renderTexture.Apply();
    }

    Color TraceRay(Ray ray, Color color, int recursiveLevel)
    {

        if (recursiveLevel < maxRecursion)
        {
            RaycastHit hit;
            if (Physics.Raycast(ray, out hit, maxDist))
            {
                Vector3 viewVector = ray.direction;
                Vector3 pos = hit.point + hit.normal * 0.0001f;
                Vector3 normal = hit.normal;

                RayTracerObject rto = hit.collider.gameObject.GetComponent<RayTracerObject>(); 
                //Does the object we hit have that script?
                if (rto == null)
                {
                    var GO = hit.collider.gameObject;
                    Debug.Log("Raycast hit failure! On " + GO.name + " position " + GO.transform.position.ToString());
                    return color; //exit out
                }

                Material mat = hit.collider.GetComponent<Renderer>().material;
                if (mat.mainTexture)
                {
                    color += (mat.mainTexture as Texture2D).GetPixelBilinear(hit.textureCoord.x, hit.textureCoord.y);
                }
                else
                {
                    color += mat.color;
                }

                color *= TraceLight(rto, viewVector, pos, normal);

                if (rto.reflectiveCoeff > 0)
                {
                    float reflet = 2.0f * Vector3.Dot(viewVector, normal);
                    Ray newRay = new Ray(pos, viewVector - reflet * normal);
                    color += rto.reflectiveCoeff * TraceRay(newRay, color, recursiveLevel + 1);
                }

                if (rto.transparentCoeff > 0)
                {
                    Ray newRay = new Ray(hit.point - hit.normal * 0.0001f, viewVector);
                    color += rto.transparentCoeff * TraceRay(newRay, color, recursiveLevel + 1);
                }
            }
        }

        return color;

    }

    Color TraceLight(RayTracerObject rto, Vector3 viewVector, Vector3 pos, Vector3 normal)
    {
        Color c = RenderSettings.ambientLight;

        foreach (Light light in lights)
        {
            if (light.enabled)
            {
                c += LightTrace(rto, light, viewVector, pos, normal);
            }
        }
        return c;
    }

    Color LightTrace(RayTracerObject rto, Light light, Vector3 viewVector, Vector3 pos, Vector3 normal)
    {


        float dot, distance, contribution;
        Vector3 direction;
        switch (light.type)
        {
            case LightType.Directional:
                contribution = 0;
                direction = -light.transform.forward;
                dot = Vector3.Dot(direction, normal);
                if (dot > 0)
                {
                    if (Physics.Raycast(pos, direction, maxDist))
                    {
                        return Color.black;
                    }

                    if (rto.lambertCoeff > 0)
                    {
                        contribution += dot * rto.lambertCoeff;
                    }
                    if (rto.reflectiveCoeff > 0)
                    {
                        if (rto.phongCoeff > 0)
                        {
                            float reflet = 2.0f * Vector3.Dot(viewVector, normal);
                            Vector3 phongDir = viewVector - reflet * normal;
                            float phongTerm = max(Vector3.Dot(phongDir, viewVector), 0.0f);
                            phongTerm = rto.reflectiveCoeff * Mathf.Pow(phongTerm, rto.phongPower) * rto.phongCoeff;

                            contribution += phongTerm;
                        }
                        if (rto.blinnPhongCoeff > 0)
                        {
                            Vector3 blinnDir = -light.transform.forward - viewVector;
                            float temp = Mathf.Sqrt(Vector3.Dot(blinnDir, blinnDir));
                            if (temp != 0.0f)
                            {
                                blinnDir = (1.0f / temp) * blinnDir;
                                float blinnTerm = max(Vector3.Dot(blinnDir, normal), 0.0f);
                                blinnTerm = rto.reflectiveCoeff * Mathf.Pow(blinnTerm, rto.blinnPhongPower) * rto.blinnPhongCoeff;

                                contribution += blinnTerm;
                            }
                        }
                    }
                }
                return light.color * light.intensity * contribution;
            case LightType.Point:
                contribution = 0;
                direction = (light.transform.position - pos).normalized;
                dot = Vector3.Dot(normal, direction);
                distance = Vector3.Distance(pos, light.transform.position);
                if ((distance < light.range) && (dot > 0))
                {
                    if (Physics.Raycast(pos, direction, distance))
                    {
                        return Color.black;
                    }

                    if (rto.lambertCoeff > 0)
                    {
                        contribution += dot * rto.lambertCoeff;
                    }
                    if (rto.reflectiveCoeff > 0)
                    {
                        if (rto.phongCoeff > 0)
                        {
                            float reflet = 2.0f * Vector3.Dot(viewVector, normal);
                            Vector3 phongDir = viewVector - reflet * normal;
                            float phongTerm = max(Vector3.Dot(phongDir, viewVector), 0.0f);
                            phongTerm = rto.reflectiveCoeff * Mathf.Pow(phongTerm, rto.phongPower) * rto.phongCoeff;

                            contribution += phongTerm;
                        }
                        if (rto.blinnPhongCoeff > 0)
                        {
                            Vector3 blinnDir = -light.transform.forward - viewVector;
                            float temp = Mathf.Sqrt(Vector3.Dot(blinnDir, blinnDir));
                            if (temp != 0.0f)
                            {
                                blinnDir = (1.0f / temp) * blinnDir;
                                float blinnTerm = max(Vector3.Dot(blinnDir, normal), 0.0f);
                                blinnTerm = rto.reflectiveCoeff * Mathf.Pow(blinnTerm, rto.blinnPhongPower) * rto.blinnPhongCoeff;

                                contribution += blinnTerm;
                            }
                        }
                    }
                }
                if (contribution == 0)
                {
                    return Color.black;
                }
                return light.color * light.intensity * contribution;
            case LightType.Spot:
                contribution = 0;
                direction = (light.transform.position - pos).normalized;
                dot = Vector3.Dot(normal, direction);
                distance = Vector3.Distance(pos, light.transform.position);
                if (distance < light.range && dot > 0)
                {
                    float dot2 = Vector3.Dot(-light.transform.forward, direction);
                    if (dot2 > (1 - light.spotAngle / 180))
                    {
                        if (Physics.Raycast(pos, direction, distance))
                        {
                            return Color.black;
                        }
                        if (rto.lambertCoeff > 0)
                        {
                            contribution += dot * rto.lambertCoeff;
                        }
                        if (rto.reflectiveCoeff > 0)
                        {
                            if (rto.phongCoeff > 0)
                            {
                                float reflet = 2.0f * Vector3.Dot(viewVector, normal);
                                Vector3 phongDir = viewVector - reflet * normal;
                                float phongTerm = max(Vector3.Dot(phongDir, viewVector), 0.0f);
                                phongTerm = rto.reflectiveCoeff * Mathf.Pow(phongTerm, rto.phongPower) * rto.phongCoeff;

                                contribution += phongTerm;
                            }
                            if (rto.blinnPhongCoeff > 0)
                            {
                                Vector3 blinnDir = -light.transform.forward - viewVector;
                                float temp = Mathf.Sqrt(Vector3.Dot(blinnDir, blinnDir));
                                if (temp != 0.0f)
                                {
                                    blinnDir = (1.0f / temp) * blinnDir;
                                    float blinnTerm = max(Vector3.Dot(blinnDir, normal), 0.0f);
                                    blinnTerm = rto.reflectiveCoeff * Mathf.Pow(blinnTerm, rto.blinnPhongPower) * rto.blinnPhongCoeff;

                                    contribution += blinnTerm;
                                }
                            }
                        }
                    }
                }
                if (contribution == 0)
                {
                    return Color.black;
                }
                return light.color * light.intensity * contribution;
        }
        return Color.black;
    }

    float max(float x0, float x1)
    {
        return x0 > x1 ? x0 : x1;
    }
}

这是附加到场景中的对象的代码

using UnityEngine;
using System.Collections;

public class RayTracerObject : MonoBehaviour
{

    public float lambertCoeff = 1f;

    public float reflectiveCoeff = 0f;

    public float phongCoeff = 1f;
    public float phongPower = 2f;

    public float blinnPhongCoeff = 1f;
    public float blinnPhongPower = 2f;

    public float transparentCoeff = 0f;


    public Color baseColor = Color.gray;

    void Awake()
    {
        if (!GetComponent<Renderer>().material.mainTexture)
        {
            GetComponent<Renderer>().material.color = baseColor;
        }
    }
}

我该怎么做呢?代码是什么?

3 个答案:

答案 0 :(得分:6)

虽然主线程中的光线跟踪是一个完全可以接受的设计,但它可能不是你想要的,因为它阻止了其他一切。

现在你可以产生一个子线程来执行光线跟踪并让主线程呈现结果。但问题在于,这两种方法都没有使用GPU,而是首先使用Unity来破坏这一点。

  

如何与C#一起实时进行光线追踪

这完全取决于你的场景包含什么以及你打算如何渲染它。你可以在低分辨率下实时渲染一些简单的东西,然而渲染具有合理的屏幕分辨率和合理的光线反射水平,即用反射或透射材料投射的递归光线的数量可能要困难得多。

相反,我会敦促您遵循光线追踪的变化趋势,其中实时光线追踪现在正在GPU上使用称为 通用GPU <的技术执行/ em>或 GPGPU 。 nVidia就此主题进行了一些讨论,可在YouTube上找到。 Here is my sample Unity GPGPU galaxy simulation可能有助于作为GPGPU的背景。

示例GPGPU内核仅仅是为了向您展示GPGPU的内容:

// File: Galaxy1Compute.compute

// Each #kernel tells which function to compile; you can have many kernels
#pragma kernel UpdateStars

#include "Galaxy.cginc"

// blackmagic
#define BLOCKSIZE   128

RWStructuredBuffer<Star> stars;

Texture2D HueTexture;

// refer to http://forum.unity3d.com/threads/163591-Compute-Shader-SamplerState-confusion
SamplerState samplerHueTexture;

// time ellapsed since last frame
float deltaTime;

const float Softening=3e4f;
#define Softening2  Softening * Softening

static float G = 6.67300e-11f;
static float DefaultMass = 1000000.0f;

// Do a pre-calculation assuming all the stars have the same mass
static float GMM = G*DefaultMass*DefaultMass;


[numthreads(BLOCKSIZE,1,1)]
void UpdateStars (uint3 id : SV_DispatchThreadID)
{
    uint i = id.x;
    uint numStars, stride;
    stars.GetDimensions(numStars, stride);

    float3 position = stars[i].position;
    float3 velocity = stars[i].velocity;

    float3 A=float3(0,0,0);

    [loop]
    for (uint j = 0; j < numStars; j++)
    {       
        if (i != j)
        {
            float3 D = stars[j].position - stars[i].position;
            float r   = length(D); 
            float f = GMM / (r * r + Softening2);
            A += f * normalize(D);
        }
    }

    velocity += A * deltaTime;
    position += velocity * deltaTime;

    if (i < numStars)
    {
        stars[i].velocity = velocity;
        stars[i].position = position;
        stars[i].accelMagnitude = length(A);
    }


}

此外,还有一些关于这个主题的精美书籍。 Real-time Volume Graphics ,虽然它涵盖了体积,但确实涵盖了投射光线 - 光线追踪的本质。最难的范式转换是GPGPU的编写,一旦你理解了它,编写GPGPU光线跟踪器就可以轻松地从GPGPU卷着色器开始。

enter image description here

伴随任何光线跟踪作者的一本奇妙的大部头是Matt Pharr的Physically Based Rendering书(有第2版,但我没看过)

enter image description here

答案 1 :(得分:0)

Nvidia宣布了NVIDIA RTX™,这是一种光线跟踪技术,可以为内容创建者和游戏开发人员带来实时的电影品质的渲染。

它由运行在NVIDIA Volta体系结构GPU上的光线跟踪引擎组成。它旨在支持通过多种界面进行光线跟踪。

这些结果使游戏开发人员可以在工作中进行光线投射,以获得电影质量的输出。

https://nvidianews.nvidia.com/news/nvidia-rtx-technology-realizes-dream-of-real-time-cinematic-rendering

将来的更新中的Unity将支持此新的DirectX Raytracing API。然后,游戏开发人员可以在其统一渲染管道中享受逼真的质量输出。

答案 2 :(得分:0)

因此,当我们都看到了有关RTX卡的炒作之后,我们需要回答一个问题,它实际上在做什么?好吧,基本上它是硬件加速的raycaster,它已经过优化以完成其工作。

但是没有人说您不能在其他任何图形卡上进行硬件加速的射线广播。在Unity中,您可以使用着色器形式访问硬件加速。您可以使用计算着色器的功能编写自己的raycaster。这将比非常优化的RTX卡要慢得多,但在某些方面会给您带来优势。

但是,嘿,因为它比RTX慢,为什么我需要这样做。好吧,通常来说,您可以使用此方法来增强渲染效果。例如,柔化阴影,尝试全局照明,各种操作。但是要回答您的问题,如果没有RTX卡,您将无法进行全面的光线追踪。