This commit is contained in:
2024-09-20 20:30:10 +02:00
commit 4fabf1a6fd
29169 changed files with 1706941 additions and 0 deletions

View File

@@ -0,0 +1,154 @@
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering
{
/// <summary>
/// Helper static class used by render pipelines to setup stereo constants accessed by builtin shaders.
/// </summary>
public static class XRBuiltinShaderConstants
{
/// <summary>
/// Cached unique id for unity_StereoCameraProjection
/// </summary>
static public readonly int unity_StereoCameraProjection = Shader.PropertyToID("unity_StereoCameraProjection");
/// <summary>
/// Cached unique id for unity_StereoCameraInvProjection
/// </summary>
static public readonly int unity_StereoCameraInvProjection = Shader.PropertyToID("unity_StereoCameraInvProjection");
/// <summary>
/// Cached unique id for unity_StereoMatrixV
/// </summary>
static public readonly int unity_StereoMatrixV = Shader.PropertyToID("unity_StereoMatrixV");
/// <summary>
/// Cached unique id for unity_StereoMatrixInvV
/// </summary>
static public readonly int unity_StereoMatrixInvV = Shader.PropertyToID("unity_StereoMatrixInvV");
/// <summary>
/// Cached unique id for unity_StereoMatrixP
/// </summary>
static public readonly int unity_StereoMatrixP = Shader.PropertyToID("unity_StereoMatrixP");
/// <summary>
/// Cached unique id for unity_StereoMatrixInvP
/// </summary>
static public readonly int unity_StereoMatrixInvP = Shader.PropertyToID("unity_StereoMatrixInvP");
/// <summary>
/// Cached unique id for unity_StereoMatrixVP
/// </summary>
static public readonly int unity_StereoMatrixVP = Shader.PropertyToID("unity_StereoMatrixVP");
/// <summary>
/// Cached unique id for unity_StereoMatrixInvVP
/// </summary>
static public readonly int unity_StereoMatrixInvVP = Shader.PropertyToID("unity_StereoMatrixInvVP");
/// <summary>
/// Cached unique id for unity_StereoWorldSpaceCameraPos
/// </summary>
static public readonly int unity_StereoWorldSpaceCameraPos = Shader.PropertyToID("unity_StereoWorldSpaceCameraPos");
// Pre-allocate arrays to avoid GC
static Matrix4x4[] s_cameraProjMatrix = new Matrix4x4[2];
static Matrix4x4[] s_invCameraProjMatrix = new Matrix4x4[2];
static Matrix4x4[] s_viewMatrix = new Matrix4x4[2];
static Matrix4x4[] s_invViewMatrix = new Matrix4x4[2];
static Matrix4x4[] s_projMatrix = new Matrix4x4[2];
static Matrix4x4[] s_invProjMatrix = new Matrix4x4[2];
static Matrix4x4[] s_viewProjMatrix = new Matrix4x4[2];
static Matrix4x4[] s_invViewProjMatrix = new Matrix4x4[2];
static Vector4[] s_worldSpaceCameraPos = new Vector4[2];
/// <summary>
/// Update the shader constant data used by the C++ builtin renderer.
/// </summary>
/// <param name="viewMatrix"></param>
/// <param name="projMatrix"></param>
/// <param name="renderIntoTexture"></param>
/// <param name="viewIndex"></param>
public static void UpdateBuiltinShaderConstants(Matrix4x4 viewMatrix, Matrix4x4 projMatrix, bool renderIntoTexture, int viewIndex)
{
#if ENABLE_VR && ENABLE_XR_MODULE
s_cameraProjMatrix[viewIndex] = projMatrix;
s_viewMatrix[viewIndex] = viewMatrix;
s_projMatrix[viewIndex] = GL.GetGPUProjectionMatrix(s_cameraProjMatrix[viewIndex], renderIntoTexture);
s_viewProjMatrix[viewIndex] = s_projMatrix[viewIndex] * s_viewMatrix[viewIndex];
s_invCameraProjMatrix[viewIndex] = Matrix4x4.Inverse(s_cameraProjMatrix[viewIndex]);
s_invViewMatrix[viewIndex] = Matrix4x4.Inverse(s_viewMatrix[viewIndex]);
s_invProjMatrix[viewIndex] = Matrix4x4.Inverse(s_projMatrix[viewIndex]);
s_invViewProjMatrix[viewIndex] = Matrix4x4.Inverse(s_viewProjMatrix[viewIndex]);
s_worldSpaceCameraPos[viewIndex] = s_invViewMatrix[viewIndex].GetColumn(3);
#endif
}
/// <summary>
/// Bind the shader constants used by the C++ builtin renderer via a command buffer. `UpdateBuiltinShaderConstants` should be called before to update the constants.
/// This is required to maintain compatibility with legacy code and shaders.
/// </summary>
/// <param name="cmd"></param>
public static void SetBuiltinShaderConstants(CommandBuffer cmd)
{
#if ENABLE_VR && ENABLE_XR_MODULE
cmd.SetGlobalMatrixArray(unity_StereoCameraProjection, s_cameraProjMatrix);
cmd.SetGlobalMatrixArray(unity_StereoCameraInvProjection, s_invCameraProjMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixV, s_viewMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixInvV, s_invViewMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixP, s_projMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixInvP, s_invProjMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixVP, s_viewProjMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixInvVP, s_invViewProjMatrix);
cmd.SetGlobalVectorArray(unity_StereoWorldSpaceCameraPos, s_worldSpaceCameraPos);
#endif
}
/// <summary>
/// Update and bind shader constants used by the C++ builtin renderer given the XRPass. For better control of setting up builtin shader constants, see `UpdateBuiltinShaderConstants`
/// and `SetBuiltinShaderConstants` which do the same logic but could take in custom projection and view matricies instead.
/// This is required to maintain compatibility with legacy code and shaders.
/// </summary>
/// <param name="xrPass"></param>
/// <param name="cmd"></param>
/// <param name="renderIntoTexture"></param>
public static void Update(XRPass xrPass, CommandBuffer cmd, bool renderIntoTexture)
{
#if ENABLE_VR && ENABLE_XR_MODULE
if (xrPass.enabled)
{
cmd.SetViewProjectionMatrices(xrPass.GetViewMatrix(), xrPass.GetProjMatrix());
if (xrPass.singlePassEnabled)
{
for (int viewIndex = 0; viewIndex < 2; ++viewIndex)
{
s_cameraProjMatrix[viewIndex] = xrPass.GetProjMatrix(viewIndex);
s_viewMatrix[viewIndex] = xrPass.GetViewMatrix(viewIndex);
s_projMatrix[viewIndex] = GL.GetGPUProjectionMatrix(s_cameraProjMatrix[viewIndex], renderIntoTexture);
s_viewProjMatrix[viewIndex] = s_projMatrix[viewIndex] * s_viewMatrix[viewIndex];
s_invCameraProjMatrix[viewIndex] = Matrix4x4.Inverse(s_cameraProjMatrix[viewIndex]);
s_invViewMatrix[viewIndex] = Matrix4x4.Inverse(s_viewMatrix[viewIndex]);
s_invProjMatrix[viewIndex] = Matrix4x4.Inverse(s_projMatrix[viewIndex]);
s_invViewProjMatrix[viewIndex] = Matrix4x4.Inverse(s_viewProjMatrix[viewIndex]);
s_worldSpaceCameraPos[viewIndex] = s_invViewMatrix[viewIndex].GetColumn(3);
}
cmd.SetGlobalMatrixArray(unity_StereoCameraProjection, s_cameraProjMatrix);
cmd.SetGlobalMatrixArray(unity_StereoCameraInvProjection, s_invCameraProjMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixV, s_viewMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixInvV, s_invViewMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixP, s_projMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixInvP, s_invProjMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixVP, s_viewProjMatrix);
cmd.SetGlobalMatrixArray(unity_StereoMatrixInvVP, s_invViewProjMatrix);
cmd.SetGlobalVectorArray(unity_StereoWorldSpaceCameraPos, s_worldSpaceCameraPos);
}
}
#endif
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5686dd9517f0d754da1a4a1a57784781
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,86 @@
using System;
using System.Collections.Generic;
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering
{
/// <summary>
/// Utility class to connect SRP to automated test framework.
/// </summary>
public static class XRGraphicsAutomatedTests
{
// XR tests can be enabled from the command line. Cache result to avoid GC.
static bool activatedFromCommandLine
{
#if UNITY_EDITOR
get => Array.Exists(Environment.GetCommandLineArgs(), arg => arg == "-xr-reuse-tests");
#elif XR_REUSE_TESTS_STANDALONE
get => true;
#else
get => false;
#endif
}
/// <summary>
/// Used by render pipelines to initialize XR tests.
/// </summary>
public static bool enabled { get; } = activatedFromCommandLine;
/// <summary>
/// Set by automated test framework and read by render pipelines.
/// </summary>
public static bool running = false;
// Helper function to override the XR default layout using settings of new camera
internal static void OverrideLayout(XRLayout layout, Camera camera)
{
#if ENABLE_VR && ENABLE_XR_MODULE
if (enabled && running)
{
var camProjMatrix = camera.projectionMatrix;
var camViewMatrix = camera.worldToCameraMatrix;
if (camera.TryGetCullingParameters(false, out var cullingParams))
{
cullingParams.stereoProjectionMatrix = camProjMatrix;
cullingParams.stereoViewMatrix = camViewMatrix;
cullingParams.stereoSeparationDistance = 0.0f;
List<(Camera, XRPass)> xrPasses = layout.GetActivePasses();
for (int passId = 0; passId < xrPasses.Count; passId++)
{
var xrPass = xrPasses[passId].Item2;
xrPass.AssignCullingParams(xrPass.cullingPassId, cullingParams);
for (int viewId = 0; viewId < xrPass.viewCount; viewId++)
{
var projMatrix = camProjMatrix;
var viewMatrix = camViewMatrix;
bool isFirstViewMultiPass = xrPasses.Count == 2 && passId == 0;
bool isFirstViewSinglePass = xrPasses.Count == 1 && viewId == 0;
if (isFirstViewMultiPass || isFirstViewSinglePass)
{
// Modify the render viewpoint and frustum of the first view in order to
// distinguish it from the final view used for image comparison.
// This is a trick to help detect issues related to view indexing.
var planes = projMatrix.decomposeProjection;
planes.left *= 0.44f;
planes.right *= 0.88f;
planes.top *= 0.11f;
planes.bottom *= 0.33f;
projMatrix = Matrix4x4.Frustum(planes);
viewMatrix *= Matrix4x4.Translate(new Vector3(.34f, 0.25f, -0.08f));
}
XRView xrView = new XRView(projMatrix, viewMatrix, xrPass.GetViewport(viewId), null, xrPass.GetTextureArraySlice(viewId));
xrPass.AssignView(viewId, xrView);
}
}
}
}
#endif
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a88e3e436344c9246acd63d2edca6fdf
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,108 @@
using System.Collections.Generic;
using System.Text;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering
{
/// <summary>
/// Used by render pipelines to store information about the XR device layout.
/// </summary>
public class XRLayout
{
readonly List<(Camera, XRPass)> m_ActivePasses = new List<(Camera, XRPass)>();
/// <summary>
/// Configure the layout to render from the specified camera by generating passes from the the connected XR device.
/// </summary>
/// <param name="camera"></param>
/// <param name="enableXR"></param>
public void AddCamera(Camera camera, bool enableXR)
{
if (camera == null)
return;
// Enable XR layout only for game camera
bool isGameCamera = (camera.cameraType == CameraType.Game || camera.cameraType == CameraType.VR);
bool xrSupported = isGameCamera && camera.targetTexture == null && enableXR;
if (XRSystem.displayActive && xrSupported)
{
XRSystem.SetDisplayZRange(camera.nearClipPlane, camera.farClipPlane);
XRSystem.CreateDefaultLayout(camera);
}
else
{
AddPass(camera, XRSystem.emptyPass);
}
}
/// <summary>
/// Used by render pipelines to reconfigure a pass from a camera.
/// </summary>
/// <param name="xrPass"></param>
/// <param name="camera"></param>
public void ReconfigurePass(XRPass xrPass, Camera camera)
{
if (xrPass.enabled)
{
XRSystem.ReconfigurePass(xrPass, camera);
xrPass.UpdateCombinedOcclusionMesh();
}
}
/// <summary>
/// Used by render pipelines to access all registered passes on this layout.
/// </summary>
/// <returns></returns>
public List<(Camera, XRPass)> GetActivePasses()
{
return m_ActivePasses;
}
internal void AddPass(Camera camera, XRPass xrPass)
{
xrPass.UpdateCombinedOcclusionMesh();
m_ActivePasses.Add((camera, xrPass));
}
internal void Clear()
{
for (int i = 0; i < m_ActivePasses.Count; i++)
{
// Pop from the back to keep initial ordering (see implementation of ObjectPool)
(Camera _, XRPass xrPass) = m_ActivePasses[m_ActivePasses.Count - i - 1];
if (xrPass != XRSystem.emptyPass)
xrPass.Release();
}
m_ActivePasses.Clear();
}
internal void LogDebugInfo()
{
var sb = new StringBuilder();
sb.AppendFormat("XRSystem setup for frame {0}, active: {1}", Time.frameCount, XRSystem.displayActive);
sb.AppendLine();
for (int passIndex = 0; passIndex < m_ActivePasses.Count; passIndex++)
{
var pass = m_ActivePasses[passIndex].Item2;
for (int viewIndex = 0; viewIndex < pass.viewCount; viewIndex++)
{
var viewport = pass.GetViewport(viewIndex);
sb.AppendFormat("XR Pass {0} Cull {1} View {2} Slice {3} : {4} x {5}",
pass.multipassId,
pass.cullingPassId,
viewIndex,
pass.GetTextureArraySlice(viewIndex),
viewport.width,
viewport.height);
sb.AppendLine();
}
}
Debug.Log(sb);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 9a524f7be46233f4ba5569b2dc52878f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,88 @@
using System;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering
{
internal static class XRMirrorView
{
static readonly MaterialPropertyBlock s_MirrorViewMaterialProperty = new MaterialPropertyBlock();
static readonly ProfilingSampler k_MirrorViewProfilingSampler = new ProfilingSampler("XR Mirror View");
static readonly int k_SourceTex = Shader.PropertyToID("_SourceTex");
static readonly int k_SourceTexArraySlice = Shader.PropertyToID("_SourceTexArraySlice");
static readonly int k_ScaleBias = Shader.PropertyToID("_ScaleBias");
static readonly int k_ScaleBiasRt = Shader.PropertyToID("_ScaleBiasRt");
static readonly int k_SRGBRead = Shader.PropertyToID("_SRGBRead");
static readonly int k_SRGBWrite = Shader.PropertyToID("_SRGBWrite");
#if ENABLE_VR && ENABLE_XR_MODULE
internal static void RenderMirrorView(CommandBuffer cmd, Camera camera, Material mat, UnityEngine.XR.XRDisplaySubsystem display)
{
// XRTODO : remove this check when the Quest plugin is fixed
if (Application.platform == RuntimePlatform.Android && !XRGraphicsAutomatedTests.running)
return;
if (display == null || !display.running || mat == null)
return;
int mirrorBlitMode = display.GetPreferredMirrorBlitMode();
if (display.GetMirrorViewBlitDesc(null, out var blitDesc, mirrorBlitMode))
{
using (new ProfilingScope(cmd, k_MirrorViewProfilingSampler))
{
cmd.SetRenderTarget(camera.targetTexture != null ? camera.targetTexture : new RenderTargetIdentifier(BuiltinRenderTextureType.CameraTarget));
if (blitDesc.nativeBlitAvailable)
{
display.AddGraphicsThreadMirrorViewBlit(cmd, blitDesc.nativeBlitInvalidStates, mirrorBlitMode);
}
else
{
for (int i = 0; i < blitDesc.blitParamsCount; ++i)
{
blitDesc.GetBlitParameter(i, out var blitParam);
Vector4 scaleBias = new Vector4(blitParam.srcRect.width, blitParam.srcRect.height, blitParam.srcRect.x, blitParam.srcRect.y);
Vector4 scaleBiasRt = new Vector4(blitParam.destRect.width, blitParam.destRect.height, blitParam.destRect.x, blitParam.destRect.y);
// Deal with y-flip
if (camera.targetTexture != null || camera.cameraType == CameraType.SceneView || camera.cameraType == CameraType.Preview)
{
scaleBias.y = -scaleBias.y;
scaleBias.w += blitParam.srcRect.height;
}
// Eye textures are always gamma corrected : use explicit sRGB read in shader only if the source is not using sRGB format.
s_MirrorViewMaterialProperty.SetFloat(k_SRGBRead, blitParam.srcTex.sRGB ? 0.0f : 1.0f);
// Perform explicit sRGB write in shader if color space is gamma
s_MirrorViewMaterialProperty.SetFloat(k_SRGBWrite, (QualitySettings.activeColorSpace == ColorSpace.Linear) ? 0.0f : 1.0f);
s_MirrorViewMaterialProperty.SetTexture(k_SourceTex, blitParam.srcTex);
s_MirrorViewMaterialProperty.SetVector(k_ScaleBias, scaleBias);
s_MirrorViewMaterialProperty.SetVector(k_ScaleBiasRt, scaleBiasRt);
s_MirrorViewMaterialProperty.SetFloat(k_SourceTexArraySlice, blitParam.srcTexArraySlice);
if (XRSystem.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.NonUniformRaster) && blitParam.foveatedRenderingInfo != IntPtr.Zero)
{
cmd.ConfigureFoveatedRendering(blitParam.foveatedRenderingInfo);
cmd.EnableShaderKeyword("_FOVEATED_RENDERING_NON_UNIFORM_RASTER");
}
int shaderPass = (blitParam.srcTex.dimension == TextureDimension.Tex2DArray) ? 1 : 0;
cmd.DrawProcedural(Matrix4x4.identity, mat, shaderPass, MeshTopology.Quads, 4, 1, s_MirrorViewMaterialProperty);
}
}
}
}
if (XRSystem.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.NonUniformRaster))
{
cmd.DisableShaderKeyword("_FOVEATED_RENDERING_NON_UNIFORM_RASTER");
cmd.ConfigureFoveatedRendering(IntPtr.Zero);
}
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: eb231ffedf4f2b24a83437fcc01640d5
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,175 @@
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering
{
// Helper class to render occlusion meshes.
// If possible, the mesh for each view will be combined into one mesh to reduce draw calls.
internal class XROcclusionMesh
{
XRPass m_Pass;
Mesh m_CombinedMesh;
Material m_Material;
int m_CombinedMeshHashCode;
static readonly ProfilingSampler k_OcclusionMeshProfilingSampler = new ProfilingSampler("XR Occlusion Mesh");
internal XROcclusionMesh(XRPass xrPass)
{
m_Pass = xrPass;
}
internal void SetMaterial(Material mat)
{
m_Material = mat;
}
internal bool hasValidOcclusionMesh
{
get
{
if (IsOcclusionMeshSupported())
{
if (m_Pass.singlePassEnabled)
return m_CombinedMesh != null;
else
return m_Pass.GetOcclusionMesh() != null;
}
return false;
}
}
internal void RenderOcclusionMesh(CommandBuffer cmd)
{
if (IsOcclusionMeshSupported())
{
using (new ProfilingScope(cmd, k_OcclusionMeshProfilingSampler))
{
if (m_Pass.singlePassEnabled)
{
if (m_CombinedMesh != null && SystemInfo.supportsRenderTargetArrayIndexFromVertexShader)
{
m_Pass.StopSinglePass(cmd);
cmd.EnableShaderKeyword("XR_OCCLUSION_MESH_COMBINED");
cmd.DrawMesh(m_CombinedMesh, Matrix4x4.identity, m_Material);
cmd.DisableShaderKeyword("XR_OCCLUSION_MESH_COMBINED");
m_Pass.StartSinglePass(cmd);
}
}
else
{
Mesh mesh = m_Pass.GetOcclusionMesh(0);
if (mesh != null)
{
cmd.DrawMesh(mesh, Matrix4x4.identity, m_Material);
}
}
}
}
}
internal void UpdateCombinedMesh()
{
if (IsOcclusionMeshSupported() && m_Pass.singlePassEnabled && TryGetOcclusionMeshCombinedHashCode(out var hashCode))
{
if (m_CombinedMesh == null || hashCode != m_CombinedMeshHashCode)
{
CreateOcclusionMeshCombined();
m_CombinedMeshHashCode = hashCode;
}
}
else
{
m_CombinedMesh = null;
m_CombinedMeshHashCode = 0;
}
}
bool IsOcclusionMeshSupported()
{
return m_Pass.enabled && m_Material != null;
}
bool TryGetOcclusionMeshCombinedHashCode(out int hashCode)
{
hashCode = 17;
for (int viewId = 0; viewId < m_Pass.viewCount; ++viewId)
{
Mesh mesh = m_Pass.GetOcclusionMesh(viewId);
if (mesh != null)
{
hashCode = hashCode * 23 + mesh.GetHashCode();
}
else
{
hashCode = 0;
return false;
}
}
return true;
}
// Create a new mesh that contains the occlusion data from all views
void CreateOcclusionMeshCombined()
{
CoreUtils.Destroy(m_CombinedMesh);
m_CombinedMesh = new Mesh();
m_CombinedMesh.indexFormat = IndexFormat.UInt16;
int combinedVertexCount = 0;
uint combinedIndexCount = 0;
for (int viewId = 0; viewId < m_Pass.viewCount; ++viewId)
{
Mesh mesh = m_Pass.GetOcclusionMesh(viewId);
Debug.Assert(mesh != null);
Debug.Assert(mesh.subMeshCount == 1);
Debug.Assert(mesh.indexFormat == IndexFormat.UInt16);
combinedVertexCount += mesh.vertexCount;
combinedIndexCount += mesh.GetIndexCount(0);
}
Vector3[] vertices = new Vector3[combinedVertexCount];
ushort[] indices = new ushort[combinedIndexCount];
int vertexStart = 0;
int indexStart = 0;
for (int viewId = 0; viewId < m_Pass.viewCount; ++viewId)
{
Mesh mesh = m_Pass.GetOcclusionMesh(viewId);
var meshIndices = mesh.GetIndices(0);
// Encore the viewId into the z channel
{
mesh.vertices.CopyTo(vertices, vertexStart);
for (int i = 0; i < mesh.vertices.Length; i++)
vertices[vertexStart + i].z = viewId;
}
// Combine indices into one buffer
for (int i = 0; i < meshIndices.Length; i++)
{
int newIndex = vertexStart + meshIndices[i];
Debug.Assert(meshIndices[i] < ushort.MaxValue);
indices[indexStart + i] = (ushort)newIndex;
}
vertexStart += mesh.vertexCount;
indexStart += meshIndices.Length;
}
m_CombinedMesh.vertices = vertices;
m_CombinedMesh.SetIndices(indices, MeshTopology.Triangles, 0);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 48abac0b65e9c804aa38cf4bf6340d82
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,330 @@
using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering
{
/// <summary>
/// Set of data used to create a XRPass object.
/// </summary>
public struct XRPassCreateInfo
{
internal RenderTargetIdentifier renderTarget;
internal RenderTextureDescriptor renderTargetDesc;
internal ScriptableCullingParameters cullingParameters;
internal Material occlusionMeshMaterial;
internal IntPtr foveatedRenderingInfo;
internal int multipassId;
internal int cullingPassId;
internal bool copyDepth;
#if ENABLE_VR && ENABLE_XR_MODULE
internal UnityEngine.XR.XRDisplaySubsystem.XRRenderPass xrSdkRenderPass;
#endif
}
/// <summary>
/// XRPass holds the render target information and a list of XRView.
/// XRView contains the parameters required to render (projection and view matrices, viewport, etc)
/// When a pass has 2 views or more, single-pass will be active if the platform supports it.
/// To avoid allocating every frame, XRView is a struct and XRPass is pooled.
/// </summary>
public class XRPass
{
readonly List<XRView> m_Views;
readonly XROcclusionMesh m_OcclusionMesh;
/// <summary>
/// Parameterless constructor.
/// Note: in order to avoid GC, the render pipeline should use XRPass.Create instead of this method.
/// </summary>
public XRPass()
{
m_Views = new List<XRView>(2);
m_OcclusionMesh = new XROcclusionMesh(this);
}
/// <summary>
/// Default allocator method for XRPass.
/// </summary>
/// <param name="createInfo"></param>
/// <returns></returns>
public static XRPass CreateDefault(XRPassCreateInfo createInfo)
{
XRPass pass = GenericPool<XRPass>.Get();
pass.InitBase(createInfo);
return pass;
}
/// <summary>
/// Default release method. Can be overridden by render pipelines.
/// </summary>
virtual public void Release()
{
GenericPool<XRPass>.Release(this);
}
/// <summary>
/// Returns true if the pass contains at least one view.
/// </summary>
public bool enabled
{
#if ENABLE_VR && ENABLE_XR_MODULE
get => viewCount > 0;
#else
get => false;
#endif
}
/// <summary>
/// Returns true if the pass can use foveated rendering commands.
/// </summary>
public bool supportsFoveatedRendering
{
#if ENABLE_VR && ENABLE_XR_MODULE
get => enabled && foveatedRenderingInfo != IntPtr.Zero && XRSystem.foveatedRenderingCaps != FoveatedRenderingCaps.None;
#else
get => false;
#endif
}
/// <summary>
/// If true, the render pipeline is expected to output a valid depth buffer to the renderTarget.
/// </summary>
public bool copyDepth { get; private set; }
/// <summary>
/// Index of the pass inside the frame.
/// </summary>
public int multipassId { get; private set; }
/// <summary>
/// Index used for culling. It can be shared between multiple passes.
/// </summary>
public int cullingPassId { get; private set; }
/// <summary>
/// Destination render target.
/// </summary>
public RenderTargetIdentifier renderTarget { get; private set; }
/// <summary>
/// Destination render target descriptor.
/// </summary>
public RenderTextureDescriptor renderTargetDesc { get; private set; }
/// <summary>
/// Parameters used for culling.
/// </summary>
public ScriptableCullingParameters cullingParams { get; private set; }
/// <summary>
/// Returns the number of views inside this pass.
/// </summary>
public int viewCount { get => m_Views.Count; }
/// <summary>
/// If true, the render pipeline is expected to use single-pass techniques to save CPU time.
/// </summary>
public bool singlePassEnabled { get => viewCount > 1; }
/// <summary>
/// Native pointer from the XR plugin to be consumed by ConfigureFoveatedRendering.
/// </summary>
public IntPtr foveatedRenderingInfo { get; private set; }
/// <summary>
/// Returns the projection matrix for a given view.
/// </summary>
/// <param name="viewIndex"></param>
/// <returns></returns>
public Matrix4x4 GetProjMatrix(int viewIndex = 0)
{
return m_Views[viewIndex].projMatrix;
}
/// <summary>
/// Returns the view matrix for a given view.
/// </summary>
/// <param name="viewIndex"></param>
/// <returns></returns>
public Matrix4x4 GetViewMatrix(int viewIndex = 0)
{
return m_Views[viewIndex].viewMatrix;
}
/// <summary>
/// Returns the viewport for a given view.
/// </summary>
/// <param name="viewIndex"></param>
/// <returns></returns>
public Rect GetViewport(int viewIndex = 0)
{
return m_Views[viewIndex].viewport;
}
/// <summary>
/// Returns the occlusion mesh for a given view.
/// </summary>
/// <param name="viewIndex"></param>
/// <returns></returns>
public Mesh GetOcclusionMesh(int viewIndex = 0)
{
return m_Views[viewIndex].occlusionMesh;
}
/// <summary>
/// Returns the destination slice index (for texture array) for a given view.
/// </summary>
/// <param name="viewIndex"></param>
/// <returns></returns>
public int GetTextureArraySlice(int viewIndex = 0)
{
return m_Views[viewIndex].textureArraySlice;
}
/// <summary>
/// Queue up render commands to enable single-pass techniques.
/// Note: depending on the platform and settings, either single-pass instancing or the multiview extension will be used.
/// </summary>
/// <param name="cmd"></param>
public void StartSinglePass(CommandBuffer cmd)
{
if (enabled)
{
if (singlePassEnabled)
{
if (viewCount <= TextureXR.slices)
{
if (SystemInfo.supportsMultiview)
{
cmd.EnableShaderKeyword("STEREO_MULTIVIEW_ON");
}
else
{
cmd.EnableShaderKeyword("STEREO_INSTANCING_ON");
cmd.SetInstanceMultiplier((uint)viewCount);
}
}
else
{
throw new NotImplementedException($"Invalid XR setup for single-pass, trying to render too many views! Max supported: {TextureXR.slices}");
}
}
}
}
/// <summary>
/// Queue up render commands to disable single-pass techniques.
/// </summary>
/// <param name="cmd"></param>
public void StopSinglePass(CommandBuffer cmd)
{
if (enabled)
{
if (singlePassEnabled)
{
if (SystemInfo.supportsMultiview)
{
cmd.DisableShaderKeyword("STEREO_MULTIVIEW_ON");
}
else
{
cmd.DisableShaderKeyword("STEREO_INSTANCING_ON");
cmd.SetInstanceMultiplier(1);
}
}
}
}
/// <summary>
/// Returns true if the pass was setup with expected mesh and material.
/// </summary>
public bool hasValidOcclusionMesh { get => m_OcclusionMesh.hasValidOcclusionMesh; }
/// <summary>
/// Generate commands to render the occlusion mesh for this pass.
/// In single-pass mode : the meshes for all views are combined into one mesh,
/// where the corresponding view index is encoded into each vertex. The keyword
/// "XR_OCCLUSION_MESH_COMBINED" is also enabled when rendering the combined mesh.
/// </summary>
/// <param name="cmd"></param>
public void RenderOcclusionMesh(CommandBuffer cmd)
{
m_OcclusionMesh.RenderOcclusionMesh(cmd);
}
/// <summary>
/// Take a point that is center-relative (0.5, 0.5) and modify it to be placed relative to the view's center instead, respecting the asymmetric FOV (if it is used)
/// </summary>
/// <param name="center"></param>
/// <returns></returns>
public Vector4 ApplyXRViewCenterOffset(Vector2 center)
{
Vector4 result = Vector4.zero;
float centerDeltaX = 0.5f - center.x;
float centerDeltaY = 0.5f - center.y;
result.x = m_Views[0].eyeCenterUV.x - centerDeltaX;
result.y = m_Views[0].eyeCenterUV.y - centerDeltaY;
if (singlePassEnabled)
{
// With single-pass XR, we need to add the data for the 2nd view
result.z = m_Views[1].eyeCenterUV.x - centerDeltaX;
result.w = m_Views[1].eyeCenterUV.y - centerDeltaY;
}
return result;
}
internal void AssignView(int viewId, XRView xrView)
{
if (viewId < 0 || viewId >= m_Views.Count)
throw new ArgumentOutOfRangeException(nameof(viewId));
m_Views[viewId] = xrView;
}
internal void AssignCullingParams(int cullingPassId, ScriptableCullingParameters cullingParams)
{
// Disable legacy stereo culling path
cullingParams.cullingOptions &= ~CullingOptions.Stereo;
this.cullingPassId = cullingPassId;
this.cullingParams = cullingParams;
}
internal void UpdateCombinedOcclusionMesh()
{
m_OcclusionMesh.UpdateCombinedMesh();
}
/// <summary>
/// Initialize the base class fields.
/// </summary>
/// <param name="createInfo"></param>
public void InitBase(XRPassCreateInfo createInfo)
{
m_Views.Clear();
copyDepth = createInfo.copyDepth;
multipassId = createInfo.multipassId;
AssignCullingParams(createInfo.cullingPassId, createInfo.cullingParameters);
renderTarget = new RenderTargetIdentifier(createInfo.renderTarget, 0, CubemapFace.Unknown, -1);
renderTargetDesc = createInfo.renderTargetDesc;
m_OcclusionMesh.SetMaterial(createInfo.occlusionMeshMaterial);
foveatedRenderingInfo = createInfo.foveatedRenderingInfo;
}
internal void AddView(XRView xrView)
{
if (m_Views.Count < TextureXR.slices)
{
m_Views.Add(xrView);
}
else
{
throw new NotImplementedException($"Invalid XR setup for single-pass, trying to add too many views! Max supported: {TextureXR.slices}");
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 94c64639e39f0b84eae9a4ab5f5777bc
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,394 @@
using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
#if ENABLE_VR && ENABLE_XR_MODULE
using UnityEngine.XR;
#endif
namespace UnityEngine.Experimental.Rendering
{
/// <summary>
/// Used by render pipelines to communicate with XR SDK.
/// </summary>
public static class XRSystem
{
// Keep track of only one XR layout
static XRLayout s_Layout = new XRLayout();
// Delegate allocations of XRPass to the render pipeline
static Func<XRPassCreateInfo, XRPass> s_PassAllocator = null;
#if ENABLE_VR && ENABLE_XR_MODULE
static List<XRDisplaySubsystem> s_DisplayList = new List<XRDisplaySubsystem>();
static XRDisplaySubsystem s_Display;
/// <summary>
/// Returns the active XR display.
/// </summary>
static public XRDisplaySubsystem GetActiveDisplay()
{
return s_Display;
}
#endif
// MSAA level (number of samples per pixel) shared by all XR displays
static MSAASamples s_MSAASamples = MSAASamples.None;
// Internal resources used by XR rendering
static Material s_OcclusionMeshMaterial;
static Material s_MirrorViewMaterial;
// Ability to override the default XR layout
static Action<XRLayout, Camera> s_LayoutOverride = null;
/// <summary>
/// Returns true if a XR device is connected and running.
/// </summary>
static public bool displayActive
{
#if ENABLE_VR && ENABLE_XR_MODULE
get => (s_Display != null) ? s_Display.running : false;
#else
get => false;
#endif
}
/// <summary>
/// Valid empty pass when a camera is not using XR.
/// </summary>
public static readonly XRPass emptyPass = new XRPass();
/// <summary>
/// If true, the system will try to create a layout compatible with single-pass rendering.
/// </summary>
static public bool singlePassAllowed { get; set; } = true;
/// <summary>
/// Cached value of SystemInfo.foveatedRenderingCaps.
/// </summary>
static public FoveatedRenderingCaps foveatedRenderingCaps { get; set; }
/// <summary>
/// If true, the system will log some information about the layout to the console.
/// </summary>
static public bool dumpDebugInfo { get; set; } = false;
/// <summary>
/// Use this method to assign the shaders that will be used to render occlusion mesh for each XRPass and the final mirror view.
/// </summary>
/// <param name="passAllocator"></param>
/// <param name="occlusionMeshPS"></param>
/// <param name="mirrorViewPS"></param>
public static void Initialize(Func<XRPassCreateInfo, XRPass> passAllocator, Shader occlusionMeshPS, Shader mirrorViewPS)
{
if (passAllocator == null)
throw new ArgumentNullException("passCreator");
s_PassAllocator = passAllocator;
RefreshDeviceInfo();
foveatedRenderingCaps = SystemInfo.foveatedRenderingCaps;
if (occlusionMeshPS != null && s_OcclusionMeshMaterial == null)
s_OcclusionMeshMaterial = CoreUtils.CreateEngineMaterial(occlusionMeshPS);
if (mirrorViewPS != null && s_MirrorViewMaterial == null)
s_MirrorViewMaterial = CoreUtils.CreateEngineMaterial(mirrorViewPS);
if (XRGraphicsAutomatedTests.enabled)
SetLayoutOverride(XRGraphicsAutomatedTests.OverrideLayout);
}
/// <summary>
/// Used by the render pipeline to communicate to the XR device how many samples are used by MSAA.
/// </summary>
/// <param name="msaaSamples"></param>
public static void SetDisplayMSAASamples(MSAASamples msaaSamples)
{
if (s_MSAASamples == msaaSamples)
return;
s_MSAASamples = msaaSamples;
#if ENABLE_VR && ENABLE_XR_MODULE
SubsystemManager.GetInstances(s_DisplayList);
foreach (var display in s_DisplayList)
display.SetMSAALevel((int)s_MSAASamples);
#endif
}
/// <summary>
/// Returns the number of samples (MSAA) currently configured on the XR device.
/// </summary>
/// <returns></returns>
public static MSAASamples GetDisplayMSAASamples()
{
return s_MSAASamples;
}
/// <summary>
/// Used by the render pipeline to scale the render target on the XR device.
/// </summary>
/// <param name="renderScale">A value of 1.0f represents 100% of the original resolution.</param>
public static void SetRenderScale(float renderScale)
{
#if ENABLE_VR && ENABLE_XR_MODULE
SubsystemManager.GetInstances(s_DisplayList);
foreach (var display in s_DisplayList)
display.scaleOfAllRenderTargets = renderScale;
#endif
}
/// <summary>
/// Used by the render pipeline to initiate a new rendering frame through a XR layout.
/// </summary>
/// <returns></returns>
public static XRLayout NewLayout()
{
RefreshDeviceInfo();
if (s_Layout.GetActivePasses().Count > 0)
{
Debug.LogWarning("Render Pipeline error : the XR layout still contains active passes. Executing XRSystem.EndLayout() right now.");
EndLayout();
}
return s_Layout;
}
/// <summary>
/// Used by the render pipeline to complete the XR layout at the end of the frame.
/// </summary>
public static void EndLayout()
{
if (dumpDebugInfo)
s_Layout.LogDebugInfo();
s_Layout.Clear();
}
/// <summary>
/// Used by the render pipeline to render the mirror view to the gameview, as configured by the XR device.
/// </summary>
/// <param name="cmd"></param>
/// <param name="camera"></param>
public static void RenderMirrorView(CommandBuffer cmd, Camera camera)
{
#if ENABLE_VR && ENABLE_XR_MODULE
XRMirrorView.RenderMirrorView(cmd, camera, s_MirrorViewMaterial, s_Display);
#endif
}
/// <summary>
/// Free up resources used by the system.
/// </summary>
public static void Dispose()
{
if (s_OcclusionMeshMaterial != null)
{
CoreUtils.Destroy(s_OcclusionMeshMaterial);
s_OcclusionMeshMaterial = null;
}
if (s_MirrorViewMaterial != null)
{
CoreUtils.Destroy(s_MirrorViewMaterial);
s_MirrorViewMaterial = null;
}
}
// Used by the render pipeline to communicate to the XR device the range of the depth buffer.
internal static void SetDisplayZRange(float zNear, float zFar)
{
#if ENABLE_VR && ENABLE_XR_MODULE
if (s_Display != null)
{
s_Display.zNear = zNear;
s_Display.zFar = zFar;
}
#endif
}
// XRTODO : expose as public API
static void SetLayoutOverride(Action<XRLayout, Camera> action)
{
s_LayoutOverride = action;
}
// Disable legacy VR system before rendering first frame
[RuntimeInitializeOnLoadMethod(RuntimeInitializeLoadType.BeforeSplashScreen)]
static void XRSystemInit()
{
if (GraphicsSettings.currentRenderPipeline != null)
{
RefreshDeviceInfo();
}
}
static void RefreshDeviceInfo()
{
#if ENABLE_VR && ENABLE_XR_MODULE
SubsystemManager.GetInstances(s_DisplayList);
if (s_DisplayList.Count > 0)
{
if (s_DisplayList.Count > 1)
throw new NotImplementedException("Only one XR display is supported!");
s_Display = s_DisplayList[0];
s_Display.disableLegacyRenderer = true;
s_Display.sRGB = QualitySettings.activeColorSpace == ColorSpace.Linear;
// XRTODO : discuss this code and UI implications
s_Display.textureLayout = XRDisplaySubsystem.TextureLayout.Texture2DArray;
// XRTODO : replace by API from XR SDK, assume we have 2 views max for now
TextureXR.maxViews = Math.Max(TextureXR.slices, 2);
}
else
{
s_Display = null;
}
#endif
}
// Setup the layout to use multi-pass or single-pass based on the runtime caps
internal static void CreateDefaultLayout(Camera camera)
{
#if ENABLE_VR && ENABLE_XR_MODULE
if (s_Display == null)
throw new NullReferenceException(nameof(s_Display));
for (int renderPassIndex = 0; renderPassIndex < s_Display.GetRenderPassCount(); ++renderPassIndex)
{
s_Display.GetRenderPass(renderPassIndex, out var renderPass);
s_Display.GetCullingParameters(camera, renderPass.cullingPassIndex, out var cullingParams);
if (CanUseSinglePass(camera, renderPass))
{
var xrPass = s_PassAllocator(BuildPass(renderPass, cullingParams));
for (int renderParamIndex = 0; renderParamIndex < renderPass.GetRenderParameterCount(); ++renderParamIndex)
{
renderPass.GetRenderParameter(camera, renderParamIndex, out var renderParam);
xrPass.AddView(BuildView(renderPass, renderParam));
}
s_Layout.AddPass(camera, xrPass);
}
else
{
for (int renderParamIndex = 0; renderParamIndex < renderPass.GetRenderParameterCount(); ++renderParamIndex)
{
renderPass.GetRenderParameter(camera, renderParamIndex, out var renderParam);
var xrPass = s_PassAllocator(BuildPass(renderPass, cullingParams));
xrPass.AddView(BuildView(renderPass, renderParam));
s_Layout.AddPass(camera, xrPass);
}
}
}
if (s_LayoutOverride != null)
s_LayoutOverride.Invoke(s_Layout, camera);
#endif
}
// Update the parameters of one pass with a different camera
internal static void ReconfigurePass(XRPass xrPass, Camera camera)
{
#if ENABLE_VR && ENABLE_XR_MODULE
if (xrPass.enabled && s_Display != null)
{
s_Display.GetRenderPass(xrPass.multipassId, out var renderPass);
Debug.Assert(xrPass.singlePassEnabled || renderPass.GetRenderParameterCount() == 1);
s_Display.GetCullingParameters(camera, renderPass.cullingPassIndex, out var cullingParams);
xrPass.AssignCullingParams(renderPass.cullingPassIndex, cullingParams);
for (int renderParamIndex = 0; renderParamIndex < renderPass.GetRenderParameterCount(); ++renderParamIndex)
{
renderPass.GetRenderParameter(camera, renderParamIndex, out var renderParam);
xrPass.AssignView(renderParamIndex, BuildView(renderPass, renderParam));
}
if (s_LayoutOverride != null)
s_LayoutOverride.Invoke(s_Layout, camera);
}
#endif
}
#if ENABLE_VR && ENABLE_XR_MODULE
static bool CanUseSinglePass(Camera camera, XRDisplaySubsystem.XRRenderPass renderPass)
{
if (!singlePassAllowed)
return false;
if (renderPass.renderTargetDesc.dimension != TextureDimension.Tex2DArray)
return false;
if (renderPass.GetRenderParameterCount() != 2 || renderPass.renderTargetDesc.volumeDepth != 2)
return false;
renderPass.GetRenderParameter(camera, 0, out var renderParam0);
renderPass.GetRenderParameter(camera, 1, out var renderParam1);
if (renderParam0.textureArraySlice != 0 || renderParam1.textureArraySlice != 1)
return false;
if (renderParam0.viewport != renderParam1.viewport)
return false;
return true;
}
static XRView BuildView(XRDisplaySubsystem.XRRenderPass renderPass, XRDisplaySubsystem.XRRenderParameter renderParameter)
{
// Convert viewport from normalized to screen space
Rect viewport = renderParameter.viewport;
viewport.x *= renderPass.renderTargetDesc.width;
viewport.width *= renderPass.renderTargetDesc.width;
viewport.y *= renderPass.renderTargetDesc.height;
viewport.height *= renderPass.renderTargetDesc.height;
// XRTODO : remove this line and use XRSettings.useOcclusionMesh instead when it's fixed
Mesh occlusionMesh = XRGraphicsAutomatedTests.running ? null : renderParameter.occlusionMesh;
return new XRView(renderParameter.projection, renderParameter.view, viewport, occlusionMesh, renderParameter.textureArraySlice);
}
static XRPassCreateInfo BuildPass(XRDisplaySubsystem.XRRenderPass xrRenderPass, ScriptableCullingParameters cullingParameters)
{
// We can't use descriptor directly because y-flip is forced
// XRTODO : fix root problem
RenderTextureDescriptor xrDesc = xrRenderPass.renderTargetDesc;
RenderTextureDescriptor rtDesc = new RenderTextureDescriptor(xrDesc.width, xrDesc.height, xrDesc.colorFormat, xrDesc.depthBufferBits, xrDesc.mipCount);
rtDesc.dimension = xrRenderPass.renderTargetDesc.dimension;
rtDesc.volumeDepth = xrRenderPass.renderTargetDesc.volumeDepth;
rtDesc.vrUsage = xrRenderPass.renderTargetDesc.vrUsage;
rtDesc.sRGB = xrRenderPass.renderTargetDesc.sRGB;
XRPassCreateInfo passInfo = new XRPassCreateInfo
{
renderTarget = xrRenderPass.renderTarget,
renderTargetDesc = rtDesc,
cullingParameters = cullingParameters,
occlusionMeshMaterial = s_OcclusionMeshMaterial,
foveatedRenderingInfo = xrRenderPass.foveatedRenderingInfo,
multipassId = s_Layout.GetActivePasses().Count,
cullingPassId = xrRenderPass.cullingPassIndex,
copyDepth = xrRenderPass.shouldFillOutDepth,
xrSdkRenderPass = xrRenderPass
};
return passInfo;
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3d520a25148b68744a8a1a3e7ff07920
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,34 @@
using System;
namespace UnityEngine.Experimental.Rendering
{
internal readonly struct XRView
{
internal readonly Matrix4x4 projMatrix;
internal readonly Matrix4x4 viewMatrix;
internal readonly Rect viewport;
internal readonly Mesh occlusionMesh;
internal readonly int textureArraySlice;
internal readonly Vector2 eyeCenterUV;
internal XRView(Matrix4x4 projMatrix, Matrix4x4 viewMatrix, Rect viewport, Mesh occlusionMesh, int textureArraySlice)
{
this.projMatrix = projMatrix;
this.viewMatrix = viewMatrix;
this.viewport = viewport;
this.occlusionMesh = occlusionMesh;
this.textureArraySlice = textureArraySlice;
eyeCenterUV = ComputeEyeCenterUV(projMatrix);
}
private static Vector2 ComputeEyeCenterUV(Matrix4x4 proj)
{
var projectionParameters = proj.decomposeProjection;
float left = Math.Abs(projectionParameters.left);
float right = Math.Abs(projectionParameters.right);
float top = Math.Abs(projectionParameters.top);
float bottom = Math.Abs(projectionParameters.bottom);
return new Vector2(left / (right + left), top / (top + bottom));
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c8920a8121c6f154b97d56f966d3c4d1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: