This commit is contained in:
2024-09-20 20:30:10 +02:00
commit 4fabf1a6fd
29169 changed files with 1706941 additions and 0 deletions

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 686bcd471035ed641a36aa69aacb76a9
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,56 @@
#ifndef DECODE_SH
# define DECODE_SH
// TODO: We're working on irradiance instead of radiance coefficients
// Add safety margin 2 to avoid out-of-bounds values
#define APV_L1_ENCODING_SCALE 2.0f // Should be: 3/(2*sqrt(3)) * 2, but rounding to 2 to issues we are observing.
#define APV_L2_ENCODING_SCALE 3.5777088f // 4/sqrt(5) * 2
float3 DecodeSH(float l0, float3 l1)
{
return (l1 - 0.5f) * 2.0f * APV_L1_ENCODING_SCALE * l0;
}
void DecodeSH_L2(inout float3 l0, inout float4 l2_R, inout float4 l2_G, inout float4 l2_B, inout float4 l2_C)
{
l2_R = (l2_R - 0.5f) * APV_L2_ENCODING_SCALE * l0.r;
l2_G = (l2_G - 0.5f) * APV_L2_ENCODING_SCALE * l0.g;
l2_B = (l2_B - 0.5f) * APV_L2_ENCODING_SCALE * l0.b;
l2_C = (l2_C - 0.5f) * APV_L2_ENCODING_SCALE;
l2_C.rgb *= l0;
// Account for how L2 is encoded.
l0.r -= l2_R.z;
l0.g -= l2_G.z;
l0.b -= l2_B.z;
l2_R.z *= 3.0f;
l2_G.z *= 3.0f;
l2_B.z *= 3.0f;
}
float3 EncodeSH(float l0, float3 l1)
{
return l0 == 0.0f ? 0.5f : l1 * rcp(l0) / (2.0f * APV_L1_ENCODING_SCALE) + 0.5f;
}
void EncodeSH_L2(inout float3 l0, inout float4 l2_R, inout float4 l2_G, inout float4 l2_B, inout float3 l2_C)
{
// Account for how L2 is encoded.
l2_R.z /= 3.0f;
l2_G.z /= 3.0f;
l2_B.z /= 3.0f;
l0.r += l2_R.z;
l0.g += l2_G.z;
l0.b += l2_B.z;
float3 rcpl0 = rcp(l0);
rcpl0 = float3(l0.x == 0.0f ? 0.0f : rcpl0.x, l0.y == 0.0f ? 0.0f : rcpl0.y, l0.z == 0.0f ? 0.0f : rcpl0.z);
l2_R = 0.5f + l2_R * rcp(APV_L2_ENCODING_SCALE) * rcpl0.r;
l2_G = 0.5f + l2_G * rcp(APV_L2_ENCODING_SCALE) * rcpl0.g;
l2_B = 0.5f + l2_B * rcp(APV_L2_ENCODING_SCALE) * rcpl0.b;
l2_C = 0.5f + l2_C * rcp(APV_L2_ENCODING_SCALE) * rcpl0;
}
#endif // DECODE_SH

View File

@@ -0,0 +1,10 @@
fileFormatVersion: 2
guid: 6646f462aeb8278489c25fda24fd2db8
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
preprocessorOverride: 0
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,541 @@
//#define USE_INDEX_NATIVE_ARRAY
using System;
using System.Diagnostics;
using System.Collections.Generic;
using UnityEngine.Profiling;
using System.Collections;
using Unity.Collections;
using Chunk = UnityEngine.Rendering.ProbeBrickPool.BrickChunkAlloc;
using CellInfo = UnityEngine.Rendering.ProbeReferenceVolume.CellInfo;
using Cell = UnityEngine.Rendering.ProbeReferenceVolume.Cell;
namespace UnityEngine.Rendering
{
internal class ProbeBrickIndex
{
// a few constants
internal const int kMaxSubdivisionLevels = 7; // 3 bits
internal const int kIndexChunkSize = 243;
BitArray m_IndexChunks;
int m_IndexInChunks;
int m_NextFreeChunk;
int m_AvailableChunkCount;
ComputeBuffer m_PhysicalIndexBuffer;
int[] m_PhysicalIndexBufferData;
internal int estimatedVMemCost { get; private set; }
[DebuggerDisplay("Brick [{position}, {subdivisionLevel}]")]
[Serializable]
public struct Brick : IEquatable<Brick>
{
public Vector3Int position; // refspace index, indices are cell coordinates at max resolution
public int subdivisionLevel; // size as factor covered elementary cells
internal Brick(Vector3Int position, int subdivisionLevel)
{
this.position = position;
this.subdivisionLevel = subdivisionLevel;
}
public bool Equals(Brick other) => position == other.position && subdivisionLevel == other.subdivisionLevel;
}
[DebuggerDisplay("Brick [{brick.position}, {brick.subdivisionLevel}], {flattenedIdx}")]
struct ReservedBrick
{
public Brick brick;
public int flattenedIdx;
}
class VoxelMeta
{
public Cell cell;
public List<ushort> brickIndices = new List<ushort>();
public void Clear()
{
cell = null;
brickIndices.Clear();
}
}
class BrickMeta
{
public HashSet<Vector3Int> voxels = new HashSet<Vector3Int>();
public List<ReservedBrick> bricks = new List<ReservedBrick>();
public void Clear()
{
voxels.Clear();
bricks.Clear();
}
}
Vector3Int m_CenterRS; // the anchor in ref space, around which the index is defined. [IMPORTANT NOTE! For now we always have it at 0, so is not passed to the shader, but is kept here until development is active in case we find it useful]
Dictionary<Vector3Int, List<VoxelMeta>> m_VoxelToBricks;
Dictionary<Cell, BrickMeta> m_BricksToVoxels;
// Various pools for data re-usage
ObjectPool<BrickMeta> m_BrickMetaPool = new ObjectPool<BrickMeta>(x => x.Clear(), null, false);
ObjectPool<List<VoxelMeta>> m_VoxelMetaListPool = new ObjectPool<List<VoxelMeta>>(x => x.Clear(), null, false);
ObjectPool<VoxelMeta> m_VoxelMetaPool = new ObjectPool<VoxelMeta>(x => x.Clear(), null, false);
int GetVoxelSubdivLevel()
{
int defaultVoxelSubdivLevel = 3;
return Mathf.Min(defaultVoxelSubdivLevel, ProbeReferenceVolume.instance.GetMaxSubdivision() - 1);
}
bool m_NeedUpdateIndexComputeBuffer;
int m_UpdateMinIndex = int.MaxValue;
int m_UpdateMaxIndex = int.MinValue;
// Static variable required to avoid allocations inside lambda functions
static Cell g_Cell = null;
int SizeOfPhysicalIndexFromBudget(ProbeVolumeTextureMemoryBudget memoryBudget)
{
switch (memoryBudget)
{
case ProbeVolumeTextureMemoryBudget.MemoryBudgetLow:
// 16 MB - 4 million of bricks worth of space. At full resolution and a distance of 1 meter between probes, this is roughly 474 * 474 * 474 meters worth of bricks. If 0.25x on Y axis, this is equivalent to 948 * 118 * 948 meters
return 16000000;
case ProbeVolumeTextureMemoryBudget.MemoryBudgetMedium:
// 32 MB - 8 million of bricks worth of space. At full resolution and a distance of 1 meter between probes, this is roughly 600 * 600 * 600 meters worth of bricks. If 0.25x on Y axis, this is equivalent to 1200 * 150 * 1200 meters
return 32000000;
case ProbeVolumeTextureMemoryBudget.MemoryBudgetHigh:
// 64 MB - 16 million of bricks worth of space. At full resolution and a distance of 1 meter between probes, this is roughly 756 * 756 * 756 meters worth of bricks. If 0.25x on Y axis, this is equivalent to 1512 * 184 * 1512 meters
return 64000000;
}
return 32000000;
}
internal ProbeBrickIndex(ProbeVolumeTextureMemoryBudget memoryBudget)
{
Profiler.BeginSample("Create ProbeBrickIndex");
m_CenterRS = new Vector3Int(0, 0, 0);
m_VoxelToBricks = new Dictionary<Vector3Int, List<VoxelMeta>>();
m_BricksToVoxels = new Dictionary<Cell, BrickMeta>();
m_NeedUpdateIndexComputeBuffer = false;
m_IndexInChunks = Mathf.CeilToInt((float)SizeOfPhysicalIndexFromBudget(memoryBudget) / kIndexChunkSize);
m_AvailableChunkCount = m_IndexInChunks;
m_IndexChunks = new BitArray(Mathf.Max(1, m_IndexInChunks));
int physicalBufferSize = m_IndexInChunks * kIndexChunkSize;
m_PhysicalIndexBufferData = new int[physicalBufferSize];
m_PhysicalIndexBuffer = new ComputeBuffer(physicalBufferSize, sizeof(int), ComputeBufferType.Structured);
m_NextFreeChunk = 0;
estimatedVMemCost = physicalBufferSize * sizeof(int);
// Should be done by a compute shader
Clear();
Profiler.EndSample();
}
public int GetRemainingChunkCount()
{
return m_AvailableChunkCount;
}
internal void UploadIndexData()
{
Debug.Assert(m_UpdateMinIndex >= 0 && m_UpdateMaxIndex < m_PhysicalIndexBufferData.Length);
var count = m_UpdateMaxIndex - m_UpdateMinIndex + 1;
m_PhysicalIndexBuffer.SetData(m_PhysicalIndexBufferData, m_UpdateMinIndex, m_UpdateMinIndex, count);
m_NeedUpdateIndexComputeBuffer = false;
m_UpdateMaxIndex = int.MinValue;
m_UpdateMinIndex = int.MaxValue;
}
internal void Clear()
{
Profiler.BeginSample("Clear Index");
for (int i = 0; i < m_PhysicalIndexBufferData.Length; ++i)
m_PhysicalIndexBufferData[i] = -1;
m_NeedUpdateIndexComputeBuffer = true;
m_UpdateMinIndex = 0;
m_UpdateMaxIndex = m_PhysicalIndexBufferData.Length - 1;
m_NextFreeChunk = 0;
m_IndexChunks.SetAll(false);
foreach (var value in m_VoxelToBricks.Values)
{
foreach (var voxel in value)
m_VoxelMetaPool.Release(voxel);
m_VoxelMetaListPool.Release(value);
}
m_VoxelToBricks.Clear();
foreach (var value in m_BricksToVoxels.Values)
m_BrickMetaPool.Release(value);
m_BricksToVoxels.Clear();
Profiler.EndSample();
}
void MapBrickToVoxels(ProbeBrickIndex.Brick brick, HashSet<Vector3Int> voxels)
{
// create a list of all voxels this brick will touch
int brick_subdiv = brick.subdivisionLevel;
int voxels_touched_cnt = (int)Mathf.Pow(3, Mathf.Max(0, brick_subdiv - GetVoxelSubdivLevel()));
Vector3Int ipos = brick.position;
int brick_size = ProbeReferenceVolume.CellSize(brick.subdivisionLevel);
int voxel_size = ProbeReferenceVolume.CellSize(GetVoxelSubdivLevel());
if (voxels_touched_cnt <= 1)
{
Vector3 pos = brick.position;
pos = pos * (1.0f / voxel_size);
ipos = new Vector3Int(Mathf.FloorToInt(pos.x) * voxel_size, Mathf.FloorToInt(pos.y) * voxel_size, Mathf.FloorToInt(pos.z) * voxel_size);
}
for (int z = ipos.z; z < ipos.z + brick_size; z += voxel_size)
for (int y = ipos.y; y < ipos.y + brick_size; y += voxel_size)
for (int x = ipos.x; x < ipos.x + brick_size; x += voxel_size)
{
voxels.Add(new Vector3Int(x, y, z));
}
}
void ClearVoxel(Vector3Int pos, CellIndexUpdateInfo cellInfo)
{
Vector3Int vx_min, vx_max;
ClipToIndexSpace(pos, GetVoxelSubdivLevel(), out vx_min, out vx_max, cellInfo);
UpdatePhysicalIndex(vx_min, vx_max, -1, cellInfo);
}
internal void GetRuntimeResources(ref ProbeReferenceVolume.RuntimeResources rr)
{
// If we are pending an update of the actual compute buffer we do it here
if (m_NeedUpdateIndexComputeBuffer)
{
UploadIndexData();
}
rr.index = m_PhysicalIndexBuffer;
}
internal void Cleanup()
{
CoreUtils.SafeRelease(m_PhysicalIndexBuffer);
m_PhysicalIndexBuffer = null;
}
public struct CellIndexUpdateInfo
{
public int firstChunkIndex;
public int numberOfChunks;
public int minSubdivInCell;
// IMPORTANT, These values should be at max resolution. This means that
// The map to the lower possible resolution is done after. However they are still in local space.
public Vector3Int minValidBrickIndexForCellAtMaxRes;
public Vector3Int maxValidBrickIndexForCellAtMaxResPlusOne;
public Vector3Int cellPositionInBricksAtMaxRes;
}
int MergeIndex(int index, int size)
{
const int mask = kMaxSubdivisionLevels;
const int shift = 28;
return (index & ~(mask << shift)) | ((size & mask) << shift);
}
internal bool AssignIndexChunksToCell(int bricksCount, ref CellIndexUpdateInfo cellUpdateInfo, bool ignoreErrorLog)
{
// We need to better handle the case where the chunks are full, this is where streaming will need to come into place swapping in/out
// Also the current way to find an empty spot might be sub-optimal, when streaming is in place it'd be nice to have this more efficient
// if it is meant to happen frequently.
int numberOfChunks = Mathf.CeilToInt((float)bricksCount / kIndexChunkSize);
// Search for the first empty element with enough space.
int firstValidChunk = -1;
for (int i = 0; i < m_IndexInChunks; ++i)
{
if (!m_IndexChunks[i] && (i + numberOfChunks) < m_IndexInChunks)
{
int emptySlotsStartingHere = 0;
for (int k = i; k < (i + numberOfChunks); ++k)
{
if (!m_IndexChunks[k]) emptySlotsStartingHere++;
else break;
}
if (emptySlotsStartingHere == numberOfChunks)
{
firstValidChunk = i;
break;
}
}
}
if (firstValidChunk < 0)
{
// During baking we know we can hit this when trying to do dilation of all cells at the same time.
// That can happen because we try to load all cells at the same time. If the budget is not high enough it will fail.
// In this case we'll iterate separately on each cell and their neighbors.
// If so, we don't want controlled error message spam during baking so we ignore it.
// In theory this should never happen with proper streaming/defrag but we keep the message just in case otherwise.
if (!ignoreErrorLog)
Debug.LogError("APV Index Allocation failed.");
return false;
}
// This assert will need to go away or do something else when streaming is allowed (we need to find holes in available chunks or stream out stuff)
cellUpdateInfo.firstChunkIndex = firstValidChunk;
cellUpdateInfo.numberOfChunks = numberOfChunks;
for (int i = firstValidChunk; i < (firstValidChunk + numberOfChunks); ++i)
{
Debug.Assert(!m_IndexChunks[i]);
m_IndexChunks[i] = true;
}
m_NextFreeChunk += Mathf.Max(0, (firstValidChunk + numberOfChunks) - m_NextFreeChunk);
m_AvailableChunkCount -= numberOfChunks;
return true;
}
public void AddBricks(Cell cell, NativeArray<Brick> bricks, List<Chunk> allocations, int allocationSize, int poolWidth, int poolHeight, CellIndexUpdateInfo cellInfo)
{
Debug.Assert(bricks.Length <= ushort.MaxValue, "Cannot add more than 65K bricks per RegId.");
int largest_cell = ProbeReferenceVolume.CellSize(kMaxSubdivisionLevels);
g_Cell = cell;
// create a new copy
BrickMeta bm = m_BrickMetaPool.Get();
m_BricksToVoxels.Add(cell, bm);
int brick_idx = 0;
// find all voxels each brick will touch
for (int i = 0; i < allocations.Count; i++)
{
Chunk alloc = allocations[i];
int cnt = Mathf.Min(allocationSize, bricks.Length - brick_idx);
for (int j = 0; j < cnt; j++, brick_idx++, alloc.x += ProbeBrickPool.kBrickProbeCountPerDim)
{
Brick brick = bricks[brick_idx];
int cellSize = ProbeReferenceVolume.CellSize(brick.subdivisionLevel);
Debug.Assert(cellSize <= largest_cell, "Cell sizes are not correctly sorted.");
largest_cell = Mathf.Min(largest_cell, cellSize);
MapBrickToVoxels(brick, bm.voxels);
ReservedBrick rbrick = new ReservedBrick();
rbrick.brick = brick;
rbrick.flattenedIdx = MergeIndex(alloc.flattenIndex(poolWidth, poolHeight), brick.subdivisionLevel);
bm.bricks.Add(rbrick);
foreach (var v in bm.voxels)
{
List<VoxelMeta> vm_list;
if (!m_VoxelToBricks.TryGetValue(v, out vm_list)) // first time the voxel is touched
{
vm_list = m_VoxelMetaListPool.Get();
m_VoxelToBricks.Add(v, vm_list);
}
VoxelMeta vm = null;
int vm_idx = vm_list.FindIndex((VoxelMeta lhs) => lhs.cell == g_Cell);
if (vm_idx == -1) // first time a brick from this id has touched this voxel
{
vm = m_VoxelMetaPool.Get();
vm.cell = cell;
vm_list.Add(vm);
}
else
{
vm = vm_list[vm_idx];
}
// add this brick to the voxel under its regId
vm.brickIndices.Add((ushort)brick_idx);
}
}
}
foreach (var voxel in bm.voxels)
{
UpdateIndexForVoxel(voxel, cellInfo);
}
}
public void RemoveBricks(CellInfo cellInfo)
{
if (!m_BricksToVoxels.ContainsKey(cellInfo.cell))
return;
var cellUpdateInfo = cellInfo.updateInfo;
g_Cell = cellInfo.cell;
BrickMeta bm = m_BricksToVoxels[cellInfo.cell];
foreach (var v in bm.voxels)
{
List<VoxelMeta> vm_list = m_VoxelToBricks[v];
int idx = vm_list.FindIndex((VoxelMeta lhs) => lhs.cell == g_Cell);
if (idx >= 0)
{
m_VoxelMetaPool.Release(vm_list[idx]);
vm_list.RemoveAt(idx);
if (vm_list.Count > 0)
{
UpdateIndexForVoxel(v, cellUpdateInfo);
}
else
{
ClearVoxel(v, cellUpdateInfo);
m_VoxelMetaListPool.Release(vm_list);
m_VoxelToBricks.Remove(v);
}
}
}
m_BrickMetaPool.Release(bm);
m_BricksToVoxels.Remove(cellInfo.cell);
// Clear allocated chunks
for (int i = cellUpdateInfo.firstChunkIndex; i < (cellUpdateInfo.firstChunkIndex + cellUpdateInfo.numberOfChunks); ++i)
{
m_IndexChunks[i] = false;
}
m_AvailableChunkCount += cellUpdateInfo.numberOfChunks;
}
void UpdateIndexForVoxel(Vector3Int voxel, CellIndexUpdateInfo cellInfo)
{
ClearVoxel(voxel, cellInfo);
List<VoxelMeta> vm_list = m_VoxelToBricks[voxel];
foreach (var vm in vm_list)
{
// get the list of bricks and indices
List<ReservedBrick> bricks = m_BricksToVoxels[vm.cell].bricks;
List<ushort> indcs = vm.brickIndices;
UpdateIndexForVoxel(voxel, bricks, indcs, cellInfo);
}
}
void UpdatePhysicalIndex(Vector3Int brickMin, Vector3Int brickMax, int value, CellIndexUpdateInfo cellInfo)
{
// We need to do our calculations in local space to the cell, so we move the brick to local space as a first step.
// Reminder that at this point we are still operating at highest resolution possible, not necessarily the one that will be
// the final resolution for the chunk.
brickMin = brickMin - cellInfo.cellPositionInBricksAtMaxRes;
brickMax = brickMax - cellInfo.cellPositionInBricksAtMaxRes;
// Since the index is spurious (not same resolution, but varying per cell) we need to bring to the output resolution the brick coordinates
// Before finding the locations inside the Index for the current cell/chunk.
brickMin /= ProbeReferenceVolume.CellSize(cellInfo.minSubdivInCell);
brickMax /= ProbeReferenceVolume.CellSize(cellInfo.minSubdivInCell);
// Verify we are actually in local space now.
int maxCellSizeInOutputRes = ProbeReferenceVolume.CellSize(ProbeReferenceVolume.instance.GetMaxSubdivision() - 1 - cellInfo.minSubdivInCell);
Debug.Assert(brickMin.x >= 0 && brickMin.y >= 0 && brickMin.z >= 0 && brickMax.x >= 0 && brickMax.y >= 0 && brickMax.z >= 0);
Debug.Assert(brickMin.x < maxCellSizeInOutputRes && brickMin.y < maxCellSizeInOutputRes && brickMin.z < maxCellSizeInOutputRes && brickMax.x <= maxCellSizeInOutputRes && brickMax.y <= maxCellSizeInOutputRes && brickMax.z <= maxCellSizeInOutputRes);
// We are now in the right resolution, but still not considering the valid area, so we need to still normalize against that.
// To do so first let's move back the limits to the desired resolution
var cellMinIndex = cellInfo.minValidBrickIndexForCellAtMaxRes / ProbeReferenceVolume.CellSize(cellInfo.minSubdivInCell);
var cellMaxIndex = cellInfo.maxValidBrickIndexForCellAtMaxResPlusOne / ProbeReferenceVolume.CellSize(cellInfo.minSubdivInCell);
// Then perform the rescale of the local indices for min and max.
brickMin -= cellMinIndex;
brickMax -= cellMinIndex;
// In theory now we are all positive since we clipped during the voxel stage. Keeping assert for debugging, but can go later.
Debug.Assert(brickMin.x >= 0 && brickMin.y >= 0 && brickMin.z >= 0 && brickMax.x >= 0 && brickMax.y >= 0 && brickMax.z >= 0);
// Compute the span of the valid part
var size = (cellMaxIndex - cellMinIndex);
// Analytically compute min and max because doing it in the inner loop with Math.Min/Max is costly (not inlined)
int chunkStart = cellInfo.firstChunkIndex * kIndexChunkSize;
int newMin = chunkStart + brickMin.z * (size.x * size.y) + brickMin.x * size.y + brickMin.y;
int newMax = chunkStart + Math.Max(0, (brickMax.z - 1)) * (size.x * size.y) + Math.Max(0, (brickMax.x - 1)) * size.y + Math.Max(0, (brickMax.y - 1));
m_UpdateMinIndex = Math.Min(m_UpdateMinIndex, newMin);
m_UpdateMaxIndex = Math.Max(m_UpdateMaxIndex, newMax);
// Loop through all touched indices
for (int x = brickMin.x; x < brickMax.x; ++x)
{
for (int z = brickMin.z; z < brickMax.z; ++z)
{
for (int y = brickMin.y; y < brickMax.y; ++y)
{
int localFlatIdx = z * (size.x * size.y) + x * size.y + y;
int actualIdx = chunkStart + localFlatIdx;
m_PhysicalIndexBufferData[actualIdx] = value;
}
}
}
m_NeedUpdateIndexComputeBuffer = true;
}
void ClipToIndexSpace(Vector3Int pos, int subdiv, out Vector3Int outMinpos, out Vector3Int outMaxpos, CellIndexUpdateInfo cellInfo)
{
// to relative coordinates
int cellSize = ProbeReferenceVolume.CellSize(subdiv);
// The position here is in global space, however we want to constraint this voxel update to the valid cell area
var minValidPosition = cellInfo.cellPositionInBricksAtMaxRes + cellInfo.minValidBrickIndexForCellAtMaxRes;
var maxValidPosition = cellInfo.cellPositionInBricksAtMaxRes + cellInfo.maxValidBrickIndexForCellAtMaxResPlusOne - Vector3Int.one;
int minpos_x = pos.x - m_CenterRS.x;
int minpos_y = pos.y;
int minpos_z = pos.z - m_CenterRS.z;
int maxpos_x = minpos_x + cellSize;
int maxpos_y = minpos_y + cellSize;
int maxpos_z = minpos_z + cellSize;
// clip to valid region
minpos_x = Mathf.Max(minpos_x, minValidPosition.x);
minpos_y = Mathf.Max(minpos_y, minValidPosition.y);
minpos_z = Mathf.Max(minpos_z, minValidPosition.z);
maxpos_x = Mathf.Min(maxpos_x, maxValidPosition.x);
maxpos_y = Mathf.Min(maxpos_y, maxValidPosition.y);
maxpos_z = Mathf.Min(maxpos_z, maxValidPosition.z);
outMinpos = new Vector3Int(minpos_x, minpos_y, minpos_z);
outMaxpos = new Vector3Int(maxpos_x, maxpos_y, maxpos_z);
}
void UpdateIndexForVoxel(Vector3Int voxel, List<ReservedBrick> bricks, List<ushort> indices, CellIndexUpdateInfo cellInfo)
{
// clip voxel to index space
Vector3Int vx_min, vx_max;
ClipToIndexSpace(voxel, GetVoxelSubdivLevel(), out vx_min, out vx_max, cellInfo);
foreach (var rbrick in bricks)
{
// clip brick to clipped voxel
int brick_cell_size = ProbeReferenceVolume.CellSize(rbrick.brick.subdivisionLevel);
Vector3Int brick_min = rbrick.brick.position;
Vector3Int brick_max = rbrick.brick.position + Vector3Int.one * brick_cell_size;
brick_min.x = Mathf.Max(vx_min.x, brick_min.x - m_CenterRS.x);
brick_min.y = Mathf.Max(vx_min.y, brick_min.y);
brick_min.z = Mathf.Max(vx_min.z, brick_min.z - m_CenterRS.z);
brick_max.x = Mathf.Min(vx_max.x, brick_max.x - m_CenterRS.x);
brick_max.y = Mathf.Min(vx_max.y, brick_max.y);
brick_max.z = Mathf.Min(vx_max.z, brick_max.z - m_CenterRS.z);
UpdatePhysicalIndex(brick_min, brick_max, rbrick.flattenedIdx, cellInfo);
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c1a832f1bbd6c0248844c604b55947b0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,548 @@
using System.Diagnostics;
using System.Collections.Generic;
using UnityEngine.Profiling;
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering
{
internal class ProbeBrickPool
{
const int kProbePoolChunkSizeInBricks = 128;
[DebuggerDisplay("Chunk ({x}, {y}, {z})")]
public struct BrickChunkAlloc
{
public int x, y, z;
internal int flattenIndex(int sx, int sy) { return z * (sx * sy) + y * sx + x; }
}
public struct DataLocation
{
internal Texture TexL0_L1rx;
internal Texture TexL1_G_ry;
internal Texture TexL1_B_rz;
internal Texture TexL2_0;
internal Texture TexL2_1;
internal Texture TexL2_2;
internal Texture TexL2_3;
internal Texture3D TexValidity;
internal int width;
internal int height;
internal int depth;
internal void Cleanup()
{
CoreUtils.Destroy(TexL0_L1rx);
CoreUtils.Destroy(TexL1_G_ry);
CoreUtils.Destroy(TexL1_B_rz);
CoreUtils.Destroy(TexL2_0);
CoreUtils.Destroy(TexL2_1);
CoreUtils.Destroy(TexL2_2);
CoreUtils.Destroy(TexL2_3);
CoreUtils.Destroy(TexValidity);
TexL0_L1rx = null;
TexL1_G_ry = null;
TexL1_B_rz = null;
TexL2_0 = null;
TexL2_1 = null;
TexL2_2 = null;
TexL2_3 = null;
TexValidity = null;
}
}
internal const int kBrickCellCount = 3;
internal const int kBrickProbeCountPerDim = kBrickCellCount + 1;
internal const int kBrickProbeCountTotal = kBrickProbeCountPerDim * kBrickProbeCountPerDim * kBrickProbeCountPerDim;
internal const int kChunkProbeCountPerDim = kProbePoolChunkSizeInBricks * kBrickProbeCountPerDim;
internal int estimatedVMemCost { get; private set; }
const int kMaxPoolWidth = 1 << 11; // 2048 texels is a d3d11 limit for tex3d in all dimensions
internal DataLocation m_Pool; // internal to access it from blending pool only
BrickChunkAlloc m_NextFreeChunk;
Stack<BrickChunkAlloc> m_FreeList;
int m_AvailableChunkCount;
ProbeVolumeSHBands m_SHBands;
bool m_ContainsValidity;
internal ProbeBrickPool(ProbeVolumeTextureMemoryBudget memoryBudget, ProbeVolumeSHBands shBands, bool allocateValidityData = true)
{
Profiler.BeginSample("Create ProbeBrickPool");
m_NextFreeChunk.x = m_NextFreeChunk.y = m_NextFreeChunk.z = 0;
m_SHBands = shBands;
m_ContainsValidity = allocateValidityData;
m_FreeList = new Stack<BrickChunkAlloc>(256);
DerivePoolSizeFromBudget(memoryBudget, out int width, out int height, out int depth);
m_Pool = CreateDataLocation(width * height * depth, false, shBands, "APV", true, allocateValidityData, out int estimatedCost);
estimatedVMemCost = estimatedCost;
m_AvailableChunkCount = (m_Pool.width / (kProbePoolChunkSizeInBricks * kBrickProbeCountPerDim)) * (m_Pool.height / kBrickProbeCountPerDim) * (m_Pool.depth / kBrickProbeCountPerDim);
Profiler.EndSample();
}
public int GetRemainingChunkCount()
{
return m_AvailableChunkCount;
}
internal void EnsureTextureValidity()
{
// We assume that if a texture is null, all of them are. In any case we reboot them altogether.
if (m_Pool.TexL0_L1rx == null)
{
m_Pool.Cleanup();
m_Pool = CreateDataLocation(m_Pool.width * m_Pool.height * m_Pool.depth, false, m_SHBands, "APV", true, m_ContainsValidity, out int estimatedCost);
estimatedVMemCost = estimatedCost;
}
}
internal static int GetChunkSizeInBrickCount() { return kProbePoolChunkSizeInBricks; }
internal static int GetChunkSizeInProbeCount() { return kProbePoolChunkSizeInBricks * kBrickProbeCountTotal; }
internal int GetPoolWidth() { return m_Pool.width; }
internal int GetPoolHeight() { return m_Pool.height; }
internal Vector3Int GetPoolDimensions() { return new Vector3Int(m_Pool.width, m_Pool.height, m_Pool.depth); }
internal void GetRuntimeResources(ref ProbeReferenceVolume.RuntimeResources rr)
{
rr.L0_L1rx = m_Pool.TexL0_L1rx as RenderTexture;
rr.L1_G_ry = m_Pool.TexL1_G_ry as RenderTexture;
rr.L1_B_rz = m_Pool.TexL1_B_rz as RenderTexture;
rr.L2_0 = m_Pool.TexL2_0 as RenderTexture;
rr.L2_1 = m_Pool.TexL2_1 as RenderTexture;
rr.L2_2 = m_Pool.TexL2_2 as RenderTexture;
rr.L2_3 = m_Pool.TexL2_3 as RenderTexture;
rr.Validity = m_Pool.TexValidity;
}
internal void Clear()
{
m_FreeList.Clear();
m_NextFreeChunk.x = m_NextFreeChunk.y = m_NextFreeChunk.z = 0;
}
internal static int GetChunkCount(int brickCount, int chunkSizeInBricks)
{
int chunkSize = chunkSizeInBricks;
return (brickCount + chunkSize - 1) / chunkSize;
}
internal bool Allocate(int numberOfBrickChunks, List<BrickChunkAlloc> outAllocations, bool ignoreErrorLog)
{
while (m_FreeList.Count > 0 && numberOfBrickChunks > 0)
{
outAllocations.Add(m_FreeList.Pop());
numberOfBrickChunks--;
m_AvailableChunkCount--;
}
for (uint i = 0; i < numberOfBrickChunks; i++)
{
if (m_NextFreeChunk.z >= m_Pool.depth)
{
// During baking we know we can hit this when trying to do dilation of all cells at the same time.
// We don't want controlled error message spam during baking so we ignore it.
// In theory this should never happen with proper streaming/defrag but we keep the message just in case otherwise.
if (!ignoreErrorLog)
Debug.LogError("Cannot allocate more brick chunks, probe volume brick pool is full.");
return false; // failure case, pool is full
}
outAllocations.Add(m_NextFreeChunk);
m_AvailableChunkCount--;
m_NextFreeChunk.x += kProbePoolChunkSizeInBricks * kBrickProbeCountPerDim;
if (m_NextFreeChunk.x >= m_Pool.width)
{
m_NextFreeChunk.x = 0;
m_NextFreeChunk.y += kBrickProbeCountPerDim;
if (m_NextFreeChunk.y >= m_Pool.height)
{
m_NextFreeChunk.y = 0;
m_NextFreeChunk.z += kBrickProbeCountPerDim;
}
}
}
return true;
}
internal void Deallocate(List<BrickChunkAlloc> allocations)
{
m_AvailableChunkCount += allocations.Count;
foreach (var brick in allocations)
m_FreeList.Push(brick);
}
internal void Update(DataLocation source, List<BrickChunkAlloc> srcLocations, List<BrickChunkAlloc> dstLocations, int destStartIndex, ProbeVolumeSHBands bands)
{
for (int i = 0; i < srcLocations.Count; i++)
{
BrickChunkAlloc src = srcLocations[i];
BrickChunkAlloc dst = dstLocations[destStartIndex + i];
for (int j = 0; j < kBrickProbeCountPerDim; j++)
{
int width = Mathf.Min(kProbePoolChunkSizeInBricks * kBrickProbeCountPerDim, source.width - src.x);
Graphics.CopyTexture(source.TexL0_L1rx, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexL0_L1rx, dst.z + j, 0, dst.x, dst.y);
Graphics.CopyTexture(source.TexL1_G_ry, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexL1_G_ry, dst.z + j, 0, dst.x, dst.y);
Graphics.CopyTexture(source.TexL1_B_rz, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexL1_B_rz, dst.z + j, 0, dst.x, dst.y);
if (m_ContainsValidity)
Graphics.CopyTexture(source.TexValidity, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexValidity, dst.z + j, 0, dst.x, dst.y);
if (bands == ProbeVolumeSHBands.SphericalHarmonicsL2)
{
Graphics.CopyTexture(source.TexL2_0, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexL2_0, dst.z + j, 0, dst.x, dst.y);
Graphics.CopyTexture(source.TexL2_1, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexL2_1, dst.z + j, 0, dst.x, dst.y);
Graphics.CopyTexture(source.TexL2_2, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexL2_2, dst.z + j, 0, dst.x, dst.y);
Graphics.CopyTexture(source.TexL2_3, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexL2_3, dst.z + j, 0, dst.x, dst.y);
}
}
}
}
internal void UpdateValidity(DataLocation source, List<BrickChunkAlloc> srcLocations, List<BrickChunkAlloc> dstLocations, int destStartIndex)
{
Debug.Assert(m_ContainsValidity);
for (int i = 0; i < srcLocations.Count; i++)
{
BrickChunkAlloc src = srcLocations[i];
BrickChunkAlloc dst = dstLocations[destStartIndex + i];
for (int j = 0; j < kBrickProbeCountPerDim; j++)
{
int width = Mathf.Min(kProbePoolChunkSizeInBricks * kBrickProbeCountPerDim, source.width - src.x);
Graphics.CopyTexture(source.TexValidity, src.z + j, 0, src.x, src.y, width, kBrickProbeCountPerDim, m_Pool.TexValidity, dst.z + j, 0, dst.x, dst.y);
}
}
}
internal static Vector3Int ProbeCountToDataLocSize(int numProbes)
{
Debug.Assert(numProbes != 0);
Debug.Assert(numProbes % kBrickProbeCountTotal == 0);
int numBricks = numProbes / kBrickProbeCountTotal;
int poolWidth = kMaxPoolWidth / kBrickProbeCountPerDim;
int width, height, depth;
depth = (numBricks + poolWidth * poolWidth - 1) / (poolWidth * poolWidth);
if (depth > 1)
width = height = poolWidth;
else
{
height = (numBricks + poolWidth - 1) / poolWidth;
if (height > 1)
width = poolWidth;
else
width = numBricks;
}
width *= kBrickProbeCountPerDim;
height *= kBrickProbeCountPerDim;
depth *= kBrickProbeCountPerDim;
return new Vector3Int(width, height, depth);
}
public static Texture CreateDataTexture(int width, int height, int depth, GraphicsFormat format, string name, bool allocateRendertexture, ref int allocatedBytes)
{
int elementSize = format == GraphicsFormat.R16G16B16A16_SFloat ? 8 :
format == GraphicsFormat.R8G8B8A8_UNorm ? 4 : 1;
Texture texture;
allocatedBytes += (width * height * depth) * elementSize;
if (allocateRendertexture)
{
texture = new RenderTexture(new RenderTextureDescriptor()
{
width = width,
height = height,
volumeDepth = depth,
graphicsFormat = format,
mipCount = 1,
enableRandomWrite = true,
dimension = TextureDimension.Tex3D,
msaaSamples = 1,
});
}
else
texture = new Texture3D(width, height, depth, format, TextureCreationFlags.None, 1);
texture.hideFlags = HideFlags.HideAndDontSave;
texture.name = name;
if (allocateRendertexture)
(texture as RenderTexture).Create();
return texture;
}
public static DataLocation CreateDataLocation(int numProbes, bool compressed, ProbeVolumeSHBands bands, string name, bool allocateRendertexture, bool allocateValidityData, out int allocatedBytes)
{
Vector3Int locSize = ProbeCountToDataLocSize(numProbes);
int width = locSize.x;
int height = locSize.y;
int depth = locSize.z;
DataLocation loc;
var L0Format = GraphicsFormat.R16G16B16A16_SFloat;
var L1L2Format = compressed ? GraphicsFormat.RGBA_BC7_UNorm : GraphicsFormat.R8G8B8A8_UNorm;
allocatedBytes = 0;
loc.TexL0_L1rx = CreateDataTexture(width, height, depth, L0Format, $"{name}_TexL0_L1rx", allocateRendertexture, ref allocatedBytes);
loc.TexL1_G_ry = CreateDataTexture(width, height, depth, L1L2Format, $"{name}_TexL1_G_ry", allocateRendertexture, ref allocatedBytes);
loc.TexL1_B_rz = CreateDataTexture(width, height, depth, L1L2Format, $"{name}_TexL1_B_rz", allocateRendertexture, ref allocatedBytes);
if (allocateValidityData)
loc.TexValidity = CreateDataTexture(width, height, depth, GraphicsFormat.R8_UNorm, $"{name}_Validity", false, ref allocatedBytes) as Texture3D;
else
loc.TexValidity = null;
if (bands == ProbeVolumeSHBands.SphericalHarmonicsL2)
{
loc.TexL2_0 = CreateDataTexture(width, height, depth, L1L2Format, $"{name}_TexL2_0", allocateRendertexture, ref allocatedBytes);
loc.TexL2_1 = CreateDataTexture(width, height, depth, L1L2Format, $"{name}_TexL2_1", allocateRendertexture, ref allocatedBytes);
loc.TexL2_2 = CreateDataTexture(width, height, depth, L1L2Format, $"{name}_TexL2_2", allocateRendertexture, ref allocatedBytes);
loc.TexL2_3 = CreateDataTexture(width, height, depth, L1L2Format, $"{name}_TexL2_3", allocateRendertexture, ref allocatedBytes);
}
else
{
loc.TexL2_0 = null;
loc.TexL2_1 = null;
loc.TexL2_2 = null;
loc.TexL2_3 = null;
}
loc.width = width;
loc.height = height;
loc.depth = depth;
return loc;
}
void DerivePoolSizeFromBudget(ProbeVolumeTextureMemoryBudget memoryBudget, out int width, out int height, out int depth)
{
// TODO: This is fairly simplistic for now and relies on the enum to have the value set to the desired numbers,
// might change the heuristic later on.
width = (int)memoryBudget;
height = (int)memoryBudget;
depth = kBrickProbeCountPerDim;
}
internal void Cleanup()
{
m_Pool.Cleanup();
}
}
internal class ProbeBrickBlendingPool
{
static ComputeShader stateBlendShader;
static int scenarioBlendingKernel = -1;
static readonly int _PoolDim_LerpFactor = Shader.PropertyToID("_PoolDim_LerpFactor");
static readonly int _ChunkList = Shader.PropertyToID("_ChunkList");
static readonly int _State0_L0_L1Rx = Shader.PropertyToID("_State0_L0_L1Rx");
static readonly int _State0_L1G_L1Ry = Shader.PropertyToID("_State0_L1G_L1Ry");
static readonly int _State0_L1B_L1Rz = Shader.PropertyToID("_State0_L1B_L1Rz");
static readonly int _State0_L2_0 = Shader.PropertyToID("_State0_L2_0");
static readonly int _State0_L2_1 = Shader.PropertyToID("_State0_L2_1");
static readonly int _State0_L2_2 = Shader.PropertyToID("_State0_L2_2");
static readonly int _State0_L2_3 = Shader.PropertyToID("_State0_L2_3");
static readonly int _State1_L0_L1Rx = Shader.PropertyToID("_State1_L0_L1Rx");
static readonly int _State1_L1G_L1Ry = Shader.PropertyToID("_State1_L1G_L1Ry");
static readonly int _State1_L1B_L1Rz = Shader.PropertyToID("_State1_L1B_L1Rz");
static readonly int _State1_L2_0 = Shader.PropertyToID("_State1_L2_0");
static readonly int _State1_L2_1 = Shader.PropertyToID("_State1_L2_1");
static readonly int _State1_L2_2 = Shader.PropertyToID("_State1_L2_2");
static readonly int _State1_L2_3 = Shader.PropertyToID("_State1_L2_3");
static readonly int _Out_L0_L1Rx = Shader.PropertyToID("_Out_L0_L1Rx");
static readonly int _Out_L1G_L1Ry = Shader.PropertyToID("_Out_L1G_L1Ry");
static readonly int _Out_L1B_L1Rz = Shader.PropertyToID("_Out_L1B_L1Rz");
static readonly int _Out_L2_0 = Shader.PropertyToID("_Out_L2_0");
static readonly int _Out_L2_1 = Shader.PropertyToID("_Out_L2_1");
static readonly int _Out_L2_2 = Shader.PropertyToID("_Out_L2_2");
static readonly int _Out_L2_3 = Shader.PropertyToID("_Out_L2_3");
internal static bool isSupported => stateBlendShader != null;
internal static void Initialize(in ProbeVolumeSystemParameters parameters)
{
stateBlendShader = parameters.scenarioBlendingShader;
scenarioBlendingKernel = stateBlendShader ? stateBlendShader.FindKernel("BlendScenarios") : -1;
}
Vector4[] m_ChunkList;
int m_MappedChunks;
ProbeBrickPool m_State0, m_State1;
ProbeVolumeTextureMemoryBudget m_MemoryBudget;
ProbeVolumeSHBands m_ShBands;
internal bool isAllocated => m_State0 != null;
internal int estimatedVMemCost => isAllocated ? m_State0.estimatedVMemCost + m_State1.estimatedVMemCost : 0;
internal int GetPoolWidth() { return m_State0.m_Pool.width; }
internal int GetPoolHeight() { return m_State0.m_Pool.height; }
internal int GetPoolDepth() { return m_State0.m_Pool.depth; }
internal ProbeBrickBlendingPool(ProbeVolumeBlendingTextureMemoryBudget memoryBudget, ProbeVolumeSHBands shBands)
{
// Casting to other memory budget struct works cause it's casted to int in the end anyway
m_MemoryBudget = (ProbeVolumeTextureMemoryBudget)memoryBudget;
m_ShBands = shBands;
}
internal void AllocateResourcesIfNeeded()
{
if (isAllocated)
return;
m_State0 = new ProbeBrickPool(m_MemoryBudget, m_ShBands, false);
m_State1 = new ProbeBrickPool(m_MemoryBudget, m_ShBands, false);
int maxAvailablebrickCount = (GetPoolWidth() / ProbeBrickPool.kChunkProbeCountPerDim)
* (GetPoolHeight() / ProbeBrickPool.kBrickProbeCountPerDim)
* (GetPoolDepth() / ProbeBrickPool.kBrickProbeCountPerDim);
m_ChunkList = new Vector4[maxAvailablebrickCount];
m_MappedChunks = 0;
}
internal void Update(ProbeBrickPool.DataLocation source, List<ProbeBrickPool.BrickChunkAlloc> srcLocations, List<ProbeBrickPool.BrickChunkAlloc> dstLocations, int destStartIndex, ProbeVolumeSHBands bands, int state)
{
(state == 0 ? m_State0 : m_State1).Update(source, srcLocations, dstLocations, destStartIndex, bands);
}
static int DivRoundUp(int x, int y) => (x + y - 1) / y;
internal void PerformBlending(CommandBuffer cmd, float factor, ProbeBrickPool dstPool)
{
if (m_MappedChunks == 0)
return;
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State0_L0_L1Rx, m_State0.m_Pool.TexL0_L1rx);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State0_L1G_L1Ry, m_State0.m_Pool.TexL1_G_ry);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State0_L1B_L1Rz, m_State0.m_Pool.TexL1_B_rz);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State1_L0_L1Rx, m_State1.m_Pool.TexL0_L1rx);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State1_L1G_L1Ry, m_State1.m_Pool.TexL1_G_ry);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State1_L1B_L1Rz, m_State1.m_Pool.TexL1_B_rz);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _Out_L0_L1Rx, dstPool.m_Pool.TexL0_L1rx);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _Out_L1G_L1Ry, dstPool.m_Pool.TexL1_G_ry);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _Out_L1B_L1Rz, dstPool.m_Pool.TexL1_B_rz);
if (m_ShBands == ProbeVolumeSHBands.SphericalHarmonicsL2)
{
stateBlendShader.EnableKeyword("PROBE_VOLUMES_L2");
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State0_L2_0, m_State0.m_Pool.TexL2_0);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State0_L2_1, m_State0.m_Pool.TexL2_1);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State0_L2_2, m_State0.m_Pool.TexL2_2);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State0_L2_3, m_State0.m_Pool.TexL2_3);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State1_L2_0, m_State1.m_Pool.TexL2_0);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State1_L2_1, m_State1.m_Pool.TexL2_1);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State1_L2_2, m_State1.m_Pool.TexL2_2);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _State1_L2_3, m_State1.m_Pool.TexL2_3);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _Out_L2_0, dstPool.m_Pool.TexL2_0);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _Out_L2_1, dstPool.m_Pool.TexL2_1);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _Out_L2_2, dstPool.m_Pool.TexL2_2);
cmd.SetComputeTextureParam(stateBlendShader, scenarioBlendingKernel, _Out_L2_3, dstPool.m_Pool.TexL2_3);
}
else
stateBlendShader.DisableKeyword("PROBE_VOLUMES_L2");
var poolDim_LerpFactor = new Vector4(dstPool.GetPoolWidth(), dstPool.GetPoolHeight(), factor, 0.0f);
const int numthreads = 4;
int threadX = DivRoundUp(ProbeBrickPool.kChunkProbeCountPerDim, numthreads);
int threadY = DivRoundUp(ProbeBrickPool.kBrickProbeCountPerDim, numthreads);
int threadZ = DivRoundUp(ProbeBrickPool.kBrickProbeCountPerDim, numthreads);
cmd.SetComputeVectorArrayParam(stateBlendShader, _ChunkList, m_ChunkList);
cmd.SetComputeVectorParam(stateBlendShader, _PoolDim_LerpFactor, poolDim_LerpFactor);
cmd.DispatchCompute(stateBlendShader, scenarioBlendingKernel, threadX, threadY, threadZ * m_MappedChunks);
m_MappedChunks = 0;
}
internal void BlendChunks(ProbeReferenceVolume.BlendingCellInfo blendingCell, ProbeBrickPool dstPool)
{
for (int c = 0; c < blendingCell.chunkList.Count; c++)
{
var chunk = blendingCell.chunkList[c];
int dst = blendingCell.cellInfo.chunkList[c].flattenIndex(dstPool.GetPoolWidth(), dstPool.GetPoolHeight());
m_ChunkList[m_MappedChunks++] = new Vector4(chunk.x, chunk.y, chunk.z, dst);
}
}
internal void Clear()
=> m_State0?.Clear();
internal bool Allocate(int numberOfBrickChunks, List<ProbeBrickPool.BrickChunkAlloc> outAllocations)
{
AllocateResourcesIfNeeded();
if (numberOfBrickChunks > m_State0.GetRemainingChunkCount())
return false;
return m_State0.Allocate(numberOfBrickChunks, outAllocations, false);
}
internal void Deallocate(List<ProbeBrickPool.BrickChunkAlloc> allocations)
{
if (allocations.Count == 0)
return;
m_State0.Deallocate(allocations);
}
internal void EnsureTextureValidity()
{
if (isAllocated)
{
m_State0.EnsureTextureValidity();
m_State1.EnsureTextureValidity();
}
}
internal void Cleanup()
{
if (isAllocated)
{
m_State0.Cleanup();
m_State1.Cleanup();
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 079fb321dd4edfd42bee1376c8c48c05
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,144 @@
namespace UnityEngine.Rendering
{
internal class ProbeCellIndices
{
const int kUintPerEntry = 3;
internal int estimatedVMemCost { get; private set; }
internal struct IndexMetaData
{
static uint[] s_PackedValues = new uint[kUintPerEntry];
internal Vector3Int minLocalIdx;
internal Vector3Int maxLocalIdx;
internal int firstChunkIndex;
internal int minSubdiv;
internal void Pack(out uint[] vals)
{
vals = s_PackedValues;
for (int i = 0; i < kUintPerEntry; ++i)
{
vals[i] = 0;
}
// Note this packing is really really generous, I really think we can get rid of 1 uint at least if we assume we don't go extreme.
// but this is encompassing all scenarios.
//
// UINT 0:
// FirstChunkIndex 29 bit
// MinSubdiv 3 bit
// UINT 1:
// minLocalIdx.x 10 bit
// minLocalIdx.y 10 bit
// minLocalIdx.z 10 bit
// UINT 2:
// maxLocalIdx.x 10 bit
// maxLocalIdx.y 10 bit
// maxLocalIdx.z 10 bit
vals[0] = (uint)firstChunkIndex & 0x1FFFFFFF;
vals[0] |= ((uint)minSubdiv & 0x7) << 29;
vals[1] = (uint)minLocalIdx.x & 0x3FF;
vals[1] |= ((uint)minLocalIdx.y & 0x3FF) << 10;
vals[1] |= ((uint)minLocalIdx.z & 0x3FF) << 20;
vals[2] = (uint)maxLocalIdx.x & 0x3FF;
vals[2] |= ((uint)maxLocalIdx.y & 0x3FF) << 10;
vals[2] |= ((uint)maxLocalIdx.z & 0x3FF) << 20;
}
}
ComputeBuffer m_IndexOfIndicesBuffer;
uint[] m_IndexOfIndicesData;
Vector3Int m_CellCount;
Vector3Int m_CellMin;
int m_CellSizeInMinBricks;
bool m_NeedUpdateComputeBuffer;
internal Vector3Int GetCellIndexDimension() => m_CellCount;
internal Vector3Int GetCellMinPosition() => m_CellMin;
int GetFlatIndex(Vector3Int normalizedPos)
{
return normalizedPos.z * (m_CellCount.x * m_CellCount.y) + normalizedPos.y * m_CellCount.x + normalizedPos.x;
}
internal ProbeCellIndices(Vector3Int cellMin, Vector3Int cellMax, int cellSizeInMinBricks)
{
Vector3Int cellCount = cellMax + Vector3Int.one - cellMin;
m_CellCount = cellCount;
m_CellMin = cellMin;
m_CellSizeInMinBricks = cellSizeInMinBricks;
int flatCellCount = cellCount.x * cellCount.y * cellCount.z;
int bufferSize = kUintPerEntry * flatCellCount;
m_IndexOfIndicesBuffer = new ComputeBuffer(flatCellCount, kUintPerEntry * sizeof(uint));
m_IndexOfIndicesData = new uint[bufferSize];
m_NeedUpdateComputeBuffer = false;
estimatedVMemCost = flatCellCount * kUintPerEntry * sizeof(uint);
}
internal int GetFlatIdxForCell(Vector3Int cellPosition)
{
Vector3Int normalizedPos = cellPosition - m_CellMin;
Debug.Assert(normalizedPos.x >= 0 && normalizedPos.y >= 0 && normalizedPos.z >= 0);
return GetFlatIndex(normalizedPos);
}
internal void UpdateCell(int cellFlatIdx, ProbeBrickIndex.CellIndexUpdateInfo cellUpdateInfo)
{
int minSubdivCellSize = ProbeReferenceVolume.CellSize(cellUpdateInfo.minSubdivInCell);
IndexMetaData metaData = new IndexMetaData();
metaData.minSubdiv = cellUpdateInfo.minSubdivInCell;
metaData.minLocalIdx = cellUpdateInfo.minValidBrickIndexForCellAtMaxRes / minSubdivCellSize;
metaData.maxLocalIdx = cellUpdateInfo.maxValidBrickIndexForCellAtMaxResPlusOne / minSubdivCellSize;
metaData.firstChunkIndex = cellUpdateInfo.firstChunkIndex;
metaData.Pack(out uint[] packedVals);
for (int i = 0; i < kUintPerEntry; ++i)
{
m_IndexOfIndicesData[cellFlatIdx * kUintPerEntry + i] = packedVals[i];
}
m_NeedUpdateComputeBuffer = true;
}
internal void MarkCellAsUnloaded(int cellFlatIdx)
{
for (int i = 0; i < kUintPerEntry; ++i)
{
m_IndexOfIndicesData[cellFlatIdx * kUintPerEntry + i] = 0xFFFFFFFF;
}
m_NeedUpdateComputeBuffer = true;
}
internal void PushComputeData()
{
m_IndexOfIndicesBuffer.SetData(m_IndexOfIndicesData);
m_NeedUpdateComputeBuffer = false;
}
internal void GetRuntimeResources(ref ProbeReferenceVolume.RuntimeResources rr)
{
// If we are pending an update of the actual compute buffer we do it here
if (m_NeedUpdateComputeBuffer)
{
PushComputeData();
}
rr.cellIndices = m_IndexOfIndicesBuffer;
}
internal void Cleanup()
{
CoreUtils.SafeRelease(m_IndexOfIndicesBuffer);
m_IndexOfIndicesBuffer = null;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 58562d806ae1e3b4ba9cc4b895239782
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,647 @@
using System;
using System.Collections.Generic;
using System.Linq;
using UnityEngine.Experimental.Rendering.RenderGraphModule;
using UnityEditor;
namespace UnityEngine.Rendering
{
/// <summary>
/// Modes for Debugging Probes
/// </summary>
[GenerateHLSL]
public enum DebugProbeShadingMode
{
/// <summary>
/// Based on Spherical Harmonics
/// </summary>
SH,
/// <summary>
/// Based on Spherical Harmonics first band only (ambient)
/// </summary>
SHL0,
/// <summary>
/// Based on Spherical Harmonics band zero and one only
/// </summary>
SHL0L1,
/// <summary>
/// Based on validity
/// </summary>
Validity,
/// <summary>
/// Based on validity over a dilation threshold
/// </summary>
ValidityOverDilationThreshold,
/// <summary>
/// Show in red probes that have been made invalid by touchup volumes. Important to note that this debug view will only show result for volumes still present in the scene.
/// </summary>
InvalidatedByTouchupVolumes,
/// <summary>
/// Based on size
/// </summary>
Size
}
class ProbeVolumeDebug : IDebugData
{
public bool drawProbes;
public bool drawBricks;
public bool drawCells;
public bool realtimeSubdivision;
public int subdivisionCellUpdatePerFrame = 4;
public float subdivisionDelayInSeconds = 1;
public DebugProbeShadingMode probeShading;
public float probeSize = 0.3f;
public float subdivisionViewCullingDistance = 500.0f;
public float probeCullingDistance = 200.0f;
public int maxSubdivToVisualize = ProbeBrickIndex.kMaxSubdivisionLevels;
public int minSubdivToVisualize = 0;
public float exposureCompensation;
public bool drawVirtualOffsetPush;
public float offsetSize = 0.025f;
public bool freezeStreaming;
public int otherStateIndex = 0;
public ProbeVolumeDebug()
{
Init();
}
void Init()
{
drawProbes = false;
drawBricks = false;
drawCells = false;
realtimeSubdivision = false;
subdivisionCellUpdatePerFrame = 4;
subdivisionDelayInSeconds = 1;
probeShading = DebugProbeShadingMode.SH;
probeSize = 0.3f;
subdivisionViewCullingDistance = 500.0f;
probeCullingDistance = 200.0f;
maxSubdivToVisualize = ProbeBrickIndex.kMaxSubdivisionLevels;
minSubdivToVisualize = 0;
exposureCompensation = 0.0f;
drawVirtualOffsetPush = false;
offsetSize = 0.025f;
freezeStreaming = false;
otherStateIndex = 0;
}
public Action GetReset() => () => Init();
}
public partial class ProbeReferenceVolume
{
internal class CellInstancedDebugProbes
{
public List<Matrix4x4[]> probeBuffers;
public List<Matrix4x4[]> offsetBuffers;
public List<MaterialPropertyBlock> props;
}
const int kProbesPerBatch = 511;
/// <summary>Name of debug panel for Probe Volume</summary>
public static readonly string k_DebugPanelName = "Probe Volume";
internal ProbeVolumeDebug probeVolumeDebug { get; } = new ProbeVolumeDebug();
/// <summary>Colors that can be used for debug visualization of the brick structure subdivision.</summary>
public Color[] subdivisionDebugColors { get; } = new Color[ProbeBrickIndex.kMaxSubdivisionLevels];
DebugUI.Widget[] m_DebugItems;
Mesh m_DebugMesh;
Material m_DebugMaterial;
Mesh m_DebugOffsetMesh;
Material m_DebugOffsetMaterial;
Plane[] m_DebugFrustumPlanes = new Plane[6];
// Scenario blending debug data
GUIContent[] m_DebugScenarioNames = new GUIContent[0];
int[] m_DebugScenarioValues = new int[0];
string m_DebugActiveSceneGUID, m_DebugActiveScenario;
DebugUI.EnumField m_DebugScenarioField;
internal ProbeVolumeBakingProcessSettings bakingProcessSettings; /* DEFAULTS would be better but is implemented in PR#6174 = ProbeVolumeBakingProcessSettings.Defaults; */
// Field used for the realtime subdivision preview
internal Dictionary<Bounds, ProbeBrickIndex.Brick[]> realtimeSubdivisionInfo = new ();
bool m_MaxSubdivVisualizedIsMaxAvailable = false;
/// <summary>
/// Render Probe Volume related debug
/// </summary>
/// <param name="camera">The <see cref="Camera"/></param>
public void RenderDebug(Camera camera)
{
if (camera.cameraType != CameraType.Reflection && camera.cameraType != CameraType.Preview)
{
DrawProbeDebug(camera);
}
}
void InitializeDebug(in ProbeVolumeSystemParameters parameters)
{
if (parameters.supportsRuntimeDebug)
{
m_DebugMesh = parameters.probeDebugMesh;
m_DebugMaterial = CoreUtils.CreateEngineMaterial(parameters.probeDebugShader);
m_DebugMaterial.enableInstancing = true;
m_DebugOffsetMesh = parameters.offsetDebugMesh;
m_DebugOffsetMaterial = CoreUtils.CreateEngineMaterial(parameters.offsetDebugShader);
m_DebugOffsetMaterial.enableInstancing = true;
// Hard-coded colors for now.
Debug.Assert(ProbeBrickIndex.kMaxSubdivisionLevels == 7); // Update list if this changes.
subdivisionDebugColors[0] = new Color(1.0f, 0.0f, 0.0f);
subdivisionDebugColors[1] = new Color(0.0f, 1.0f, 0.0f);
subdivisionDebugColors[2] = new Color(0.0f, 0.0f, 1.0f);
subdivisionDebugColors[3] = new Color(1.0f, 1.0f, 0.0f);
subdivisionDebugColors[4] = new Color(1.0f, 0.0f, 1.0f);
subdivisionDebugColors[5] = new Color(0.0f, 1.0f, 1.0f);
subdivisionDebugColors[6] = new Color(0.5f, 0.5f, 0.5f);
}
RegisterDebug(parameters);
#if UNITY_EDITOR
UnityEditor.Lightmapping.lightingDataCleared += OnClearLightingdata;
#endif
}
void CleanupDebug()
{
UnregisterDebug(true);
CoreUtils.Destroy(m_DebugMaterial);
CoreUtils.Destroy(m_DebugOffsetMaterial);
#if UNITY_EDITOR
UnityEditor.Lightmapping.lightingDataCleared -= OnClearLightingdata;
#endif
}
void DebugCellIndexChanged<T>(DebugUI.Field<T> field, T value)
{
ClearDebugData();
}
void RegisterDebug(ProbeVolumeSystemParameters parameters)
{
void RefreshDebug<T>(DebugUI.Field<T> field, T value)
{
UnregisterDebug(false);
RegisterDebug(parameters);
}
const float kProbeSizeMin = 0.05f, kProbeSizeMax = 10.0f;
const float kOffsetSizeMin = 0.001f, kOffsetSizeMax = 0.1f;
var widgetList = new List<DebugUI.Widget>();
var subdivContainer = new DebugUI.Container() { displayName = "Subdivision Visualization" };
subdivContainer.children.Add(new DebugUI.BoolField { displayName = "Display Cells", getter = () => probeVolumeDebug.drawCells, setter = value => probeVolumeDebug.drawCells = value, onValueChanged = RefreshDebug });
subdivContainer.children.Add(new DebugUI.BoolField { displayName = "Display Bricks", getter = () => probeVolumeDebug.drawBricks, setter = value => probeVolumeDebug.drawBricks = value, onValueChanged = RefreshDebug });
#if UNITY_EDITOR
subdivContainer.children.Add(new DebugUI.BoolField { displayName = "Realtime Update", getter = () => probeVolumeDebug.realtimeSubdivision, setter = value => probeVolumeDebug.realtimeSubdivision = value, onValueChanged = RefreshDebug });
if (probeVolumeDebug.realtimeSubdivision)
{
var cellUpdatePerFrame = new DebugUI.IntField { displayName = "Number Of Cell Update Per Frame", getter = () => probeVolumeDebug.subdivisionCellUpdatePerFrame, setter = value => probeVolumeDebug.subdivisionCellUpdatePerFrame = value, min = () => 1, max = () => 100 };
var delayBetweenUpdates = new DebugUI.FloatField { displayName = "Delay Between Two Updates In Seconds", getter = () => probeVolumeDebug.subdivisionDelayInSeconds, setter = value => probeVolumeDebug.subdivisionDelayInSeconds = value, min = () => 0.1f, max = () => 10 };
subdivContainer.children.Add(new DebugUI.Container { children = { cellUpdatePerFrame, delayBetweenUpdates } });
}
#endif
subdivContainer.children.Add(new DebugUI.FloatField { displayName = "Culling Distance", getter = () => probeVolumeDebug.subdivisionViewCullingDistance, setter = value => probeVolumeDebug.subdivisionViewCullingDistance = value, min = () => 0.0f });
var probeContainer = new DebugUI.Container() { displayName = "Probe Visualization" };
probeContainer.children.Add(new DebugUI.BoolField { displayName = "Display Probes", getter = () => probeVolumeDebug.drawProbes, setter = value => probeVolumeDebug.drawProbes = value, onValueChanged = RefreshDebug });
if (probeVolumeDebug.drawProbes)
{
var probeContainerChildren = new DebugUI.Container();
probeContainerChildren.children.Add(new DebugUI.EnumField
{
displayName = "Probe Shading Mode",
getter = () => (int)probeVolumeDebug.probeShading,
setter = value => probeVolumeDebug.probeShading = (DebugProbeShadingMode)value,
autoEnum = typeof(DebugProbeShadingMode),
getIndex = () => (int)probeVolumeDebug.probeShading,
setIndex = value => probeVolumeDebug.probeShading = (DebugProbeShadingMode)value,
onValueChanged = RefreshDebug
});
probeContainerChildren.children.Add(new DebugUI.FloatField { displayName = "Probe Size", getter = () => probeVolumeDebug.probeSize, setter = value => probeVolumeDebug.probeSize = value, min = () => kProbeSizeMin, max = () => kProbeSizeMax });
if (probeVolumeDebug.probeShading == DebugProbeShadingMode.SH || probeVolumeDebug.probeShading == DebugProbeShadingMode.SHL0 || probeVolumeDebug.probeShading == DebugProbeShadingMode.SHL0L1)
probeContainerChildren.children.Add(new DebugUI.FloatField { displayName = "Probe Exposure Compensation", getter = () => probeVolumeDebug.exposureCompensation, setter = value => probeVolumeDebug.exposureCompensation = value });
probeContainerChildren.children.Add(new DebugUI.IntField
{
displayName = "Max subdivision displayed",
getter = () => probeVolumeDebug.maxSubdivToVisualize,
setter = (v) => probeVolumeDebug.maxSubdivToVisualize = Mathf.Min(v, ProbeReferenceVolume.instance.GetMaxSubdivision() - 1),
min = () => 0,
max = () => ProbeReferenceVolume.instance.GetMaxSubdivision()-1,
});
probeContainerChildren.children.Add(new DebugUI.IntField
{
displayName = "Min subdivision displayed",
getter = () => probeVolumeDebug.minSubdivToVisualize,
setter = (v) => probeVolumeDebug.minSubdivToVisualize = Mathf.Max(v, 0),
min = () => 0,
max = () => ProbeReferenceVolume.instance.GetMaxSubdivision()-1,
});
probeContainer.children.Add(probeContainerChildren);
}
probeContainer.children.Add(new DebugUI.BoolField
{
displayName = "Virtual Offset",
getter = () => probeVolumeDebug.drawVirtualOffsetPush,
setter = value =>
{
probeVolumeDebug.drawVirtualOffsetPush = value;
if (probeVolumeDebug.drawVirtualOffsetPush && probeVolumeDebug.drawProbes)
{
// If probes are being drawn when enabling offset, automatically scale them down to a reasonable size so the arrows aren't obscured by the probes.
var searchDistance = CellSize(0) * MinBrickSize() / ProbeBrickPool.kBrickCellCount * bakingProcessSettings.virtualOffsetSettings.searchMultiplier + bakingProcessSettings.virtualOffsetSettings.outOfGeoOffset;
probeVolumeDebug.probeSize = Mathf.Min(probeVolumeDebug.probeSize, Mathf.Clamp(searchDistance, kProbeSizeMin, kProbeSizeMax));
}
},
onValueChanged = RefreshDebug
});
if (probeVolumeDebug.drawVirtualOffsetPush)
{
var voOffset = new DebugUI.FloatField { displayName = "Offset Size", getter = () => probeVolumeDebug.offsetSize, setter = value => probeVolumeDebug.offsetSize = value, min = () => kOffsetSizeMin, max = () => kOffsetSizeMax };
probeContainer.children.Add(new DebugUI.Container { children = { voOffset } });
}
probeContainer.children.Add(new DebugUI.FloatField { displayName = "Culling Distance", getter = () => probeVolumeDebug.probeCullingDistance, setter = value => probeVolumeDebug.probeCullingDistance = value, min = () => 0.0f });
var streamingContainer = new DebugUI.Container() { displayName = "Streaming" };
streamingContainer.children.Add(new DebugUI.BoolField { displayName = "Freeze Streaming", getter = () => probeVolumeDebug.freezeStreaming, setter = value => probeVolumeDebug.freezeStreaming = value });
streamingContainer.children.Add(new DebugUI.IntField { displayName = "Number Of Cells Loaded Per Frame", getter = () => instance.numberOfCellsLoadedPerFrame, setter = value => instance.SetNumberOfCellsLoadedPerFrame(value), min = () => 0 });
if (parameters.supportsRuntimeDebug)
{
// Cells / Bricks visualization is not implemented in a runtime compatible way atm.
if (Application.isEditor)
widgetList.Add(subdivContainer);
widgetList.Add(probeContainer);
}
if (parameters.supportStreaming)
{
widgetList.Add(streamingContainer);
}
if (parameters.scenarioBlendingShader != null && parameters.blendingMemoryBudget != 0)
{
var blendingContainer = new DebugUI.Container() { displayName = "Scenario Blending" };
blendingContainer.children.Add(new DebugUI.IntField { displayName = "Number Of Cells Blended Per Frame", getter = () => instance.numberOfCellsBlendedPerFrame, setter = value => instance.numberOfCellsBlendedPerFrame = value, min = () => 0 });
blendingContainer.children.Add(new DebugUI.FloatField { displayName = "Turnover Rate", getter = () => instance.turnoverRate, setter = value => instance.turnoverRate = value, min = () => 0, max = () => 1 });
void RefreshScenarioNames(string guid)
{
HashSet<string> allScenarios = new();
foreach (var set in parameters.sceneData.bakingSets)
{
if (!set.sceneGUIDs.Contains(guid))
continue;
foreach (var scenario in set.lightingScenarios)
allScenarios.Add(scenario);
}
allScenarios.Remove(sceneData.lightingScenario);
if (m_DebugActiveSceneGUID == guid && allScenarios.Count + 1 == m_DebugScenarioNames.Length && m_DebugActiveScenario == sceneData.lightingScenario)
return;
int i = 0;
ArrayExtensions.ResizeArray(ref m_DebugScenarioNames, allScenarios.Count + 1);
ArrayExtensions.ResizeArray(ref m_DebugScenarioValues, allScenarios.Count + 1);
m_DebugScenarioNames[0] = new GUIContent("None");
m_DebugScenarioValues[0] = 0;
foreach (var scenario in allScenarios)
{
i++;
m_DebugScenarioNames[i] = new GUIContent(scenario);
m_DebugScenarioValues[i] = i;
}
m_DebugActiveSceneGUID = guid;
m_DebugActiveScenario = sceneData.lightingScenario;
m_DebugScenarioField.enumNames = m_DebugScenarioNames;
m_DebugScenarioField.enumValues = m_DebugScenarioValues;
if (probeVolumeDebug.otherStateIndex >= m_DebugScenarioNames.Length)
probeVolumeDebug.otherStateIndex = 0;
}
m_DebugScenarioField = new DebugUI.EnumField
{
displayName = "Scenario To Blend With",
enumNames = m_DebugScenarioNames,
enumValues = m_DebugScenarioValues,
getIndex = () =>
{
RefreshScenarioNames(ProbeVolumeSceneData.GetSceneGUID(SceneManagement.SceneManager.GetActiveScene()));
probeVolumeDebug.otherStateIndex = 0;
if (!string.IsNullOrEmpty(sceneData.otherScenario))
{
for (int i = 1; i < m_DebugScenarioNames.Length; i++)
{
if (m_DebugScenarioNames[i].text == sceneData.otherScenario)
{
probeVolumeDebug.otherStateIndex = i;
break;
}
}
}
return probeVolumeDebug.otherStateIndex;
},
setIndex = value =>
{
string other = value == 0 ? null : m_DebugScenarioNames[value].text;
sceneData.BlendLightingScenario(other, sceneData.scenarioBlendingFactor);
probeVolumeDebug.otherStateIndex = value;
},
getter = () => probeVolumeDebug.otherStateIndex,
setter = (value) => probeVolumeDebug.otherStateIndex = value,
};
blendingContainer.children.Add(m_DebugScenarioField);
blendingContainer.children.Add(new DebugUI.FloatField { displayName = "Scenario Blending Factor", getter = () => instance.scenarioBlendingFactor, setter = value => instance.scenarioBlendingFactor = value, min = () => 0.0f, max = () => 1.0f });
widgetList.Add(blendingContainer);
}
if (widgetList.Count > 0)
{
m_DebugItems = widgetList.ToArray();
var panel = DebugManager.instance.GetPanel(k_DebugPanelName, true);
panel.children.Add(m_DebugItems);
}
DebugManager debugManager = DebugManager.instance;
debugManager.RegisterData(probeVolumeDebug);
}
void UnregisterDebug(bool destroyPanel)
{
if (destroyPanel)
DebugManager.instance.RemovePanel(k_DebugPanelName);
else
DebugManager.instance.GetPanel(k_DebugPanelName, false).children.Remove(m_DebugItems);
}
bool ShouldCullCell(Vector3 cellPosition, Transform cameraTransform, Plane[] frustumPlanes)
{
var cellSize = MaxBrickSize();
var originWS = GetTransform().posWS;
Vector3 cellCenterWS = cellPosition * cellSize + originWS + Vector3.one * (cellSize / 2.0f);
// We do coarse culling with cell, finer culling later.
float distanceRoundedUpWithCellSize = Mathf.CeilToInt(probeVolumeDebug.probeCullingDistance / cellSize) * cellSize;
if (Vector3.Distance(cameraTransform.position, cellCenterWS) > distanceRoundedUpWithCellSize)
return true;
var volumeAABB = new Bounds(cellCenterWS, cellSize * Vector3.one);
return !GeometryUtility.TestPlanesAABB(frustumPlanes, volumeAABB);
}
void DrawProbeDebug(Camera camera)
{
if (!enabledBySRP || !isInitialized)
return;
if (!probeVolumeDebug.drawProbes && !probeVolumeDebug.drawVirtualOffsetPush)
return;
GeometryUtility.CalculateFrustumPlanes(camera, m_DebugFrustumPlanes);
m_DebugMaterial.shaderKeywords = null;
if (m_SHBands == ProbeVolumeSHBands.SphericalHarmonicsL1)
m_DebugMaterial.EnableKeyword("PROBE_VOLUMES_L1");
else if (m_SHBands == ProbeVolumeSHBands.SphericalHarmonicsL2)
m_DebugMaterial.EnableKeyword("PROBE_VOLUMES_L2");
// This is to force the rendering not to draw to the depth pre pass and still behave.
// They are going to be rendered opaque anyhow, just using the transparent render queue to make sure
// they properly behave w.r.t fog.
m_DebugMaterial.renderQueue = (int)RenderQueue.Transparent;
m_DebugOffsetMaterial.renderQueue = (int)RenderQueue.Transparent;
// Sanitize the min max subdiv levels with what is available
int minAvailableSubdiv = ProbeReferenceVolume.instance.cells.Count > 0 ? ProbeReferenceVolume.instance.GetMaxSubdivision()-1 : 0;
foreach (var cellInfo in ProbeReferenceVolume.instance.cells.Values)
{
minAvailableSubdiv = Mathf.Min(minAvailableSubdiv, cellInfo.cell.minSubdiv);
}
probeVolumeDebug.maxSubdivToVisualize = Mathf.Min(probeVolumeDebug.maxSubdivToVisualize, ProbeReferenceVolume.instance.GetMaxSubdivision() - 1);
m_MaxSubdivVisualizedIsMaxAvailable = probeVolumeDebug.maxSubdivToVisualize == ProbeReferenceVolume.instance.GetMaxSubdivision() - 1;
probeVolumeDebug.minSubdivToVisualize = Mathf.Clamp(probeVolumeDebug.minSubdivToVisualize, minAvailableSubdiv, probeVolumeDebug.maxSubdivToVisualize);
foreach (var cellInfo in ProbeReferenceVolume.instance.cells.Values)
{
if (ShouldCullCell(cellInfo.cell.position, camera.transform, m_DebugFrustumPlanes))
continue;
var debug = CreateInstancedProbes(cellInfo);
if (debug == null)
continue;
for (int i = 0; i < debug.probeBuffers.Count; ++i)
{
var props = debug.props[i];
props.SetInt("_ShadingMode", (int)probeVolumeDebug.probeShading);
props.SetFloat("_ExposureCompensation", probeVolumeDebug.exposureCompensation);
props.SetFloat("_ProbeSize", probeVolumeDebug.probeSize);
props.SetFloat("_CullDistance", probeVolumeDebug.probeCullingDistance);
props.SetInt("_MaxAllowedSubdiv", probeVolumeDebug.maxSubdivToVisualize);
props.SetInt("_MinAllowedSubdiv", probeVolumeDebug.minSubdivToVisualize);
props.SetFloat("_ValidityThreshold", bakingProcessSettings.dilationSettings.dilationValidityThreshold);
props.SetFloat("_OffsetSize", probeVolumeDebug.offsetSize);
if (probeVolumeDebug.drawProbes)
{
var probeBuffer = debug.probeBuffers[i];
Graphics.DrawMeshInstanced(m_DebugMesh, 0, m_DebugMaterial, probeBuffer, probeBuffer.Length, props, ShadowCastingMode.Off, false, 0, camera, LightProbeUsage.Off, null);
}
if (probeVolumeDebug.drawVirtualOffsetPush)
{
var offsetBuffer = debug.offsetBuffers[i];
Graphics.DrawMeshInstanced(m_DebugOffsetMesh, 0, m_DebugOffsetMaterial, offsetBuffer, offsetBuffer.Length, props, ShadowCastingMode.Off, false, 0, camera, LightProbeUsage.Off, null);
}
}
}
}
internal void ResetDebugViewToMaxSubdiv()
{
if (m_MaxSubdivVisualizedIsMaxAvailable)
probeVolumeDebug.maxSubdivToVisualize = ProbeReferenceVolume.instance.GetMaxSubdivision() - 1;
}
void ClearDebugData()
{
realtimeSubdivisionInfo.Clear();
}
CellInstancedDebugProbes CreateInstancedProbes(CellInfo cellInfo)
{
if (cellInfo.debugProbes != null)
return cellInfo.debugProbes;
int maxSubdiv = ProbeReferenceVolume.instance.GetMaxSubdivision() - 1;
var cell = cellInfo.cell;
if (!cell.bricks.IsCreated || cell.bricks.Length == 0 || !cellInfo.loaded)
return null;
List<Matrix4x4[]> probeBuffers = new List<Matrix4x4[]>();
List<Matrix4x4[]> offsetBuffers = new List<Matrix4x4[]>();
List<MaterialPropertyBlock> props = new List<MaterialPropertyBlock>();
var chunks = cellInfo.chunkList;
Vector4[] texels = new Vector4[kProbesPerBatch];
float[] validity = new float[kProbesPerBatch];
float[] relativeSize = new float[kProbesPerBatch];
float[] touchupUpVolumeAction = cell.touchupVolumeInteraction.Length > 0 ? new float[kProbesPerBatch] : null;
Vector4[] offsets = cell.offsetVectors.Length > 0 ? new Vector4[kProbesPerBatch] : null;
List<Matrix4x4> probeBuffer = new List<Matrix4x4>();
List<Matrix4x4> offsetBuffer = new List<Matrix4x4>();
var debugData = new CellInstancedDebugProbes();
debugData.probeBuffers = probeBuffers;
debugData.offsetBuffers = offsetBuffers;
debugData.props = props;
var chunkSizeInProbes = m_CurrentProbeVolumeChunkSizeInBricks * ProbeBrickPool.kBrickProbeCountTotal;
var loc = ProbeBrickPool.ProbeCountToDataLocSize(chunkSizeInProbes);
int idxInBatch = 0;
int globalIndex = 0;
int brickCount = cell.probeCount / ProbeBrickPool.kBrickProbeCountTotal;
int bx = 0, by = 0, bz = 0;
for (int brickIndex = 0; brickIndex < brickCount; ++brickIndex)
{
Debug.Assert(bz < loc.z);
int brickSize = cell.bricks[brickIndex].subdivisionLevel;
int chunkIndex = brickIndex / m_CurrentProbeVolumeChunkSizeInBricks;
var chunk = chunks[chunkIndex];
Vector3Int brickStart = new Vector3Int(chunk.x + bx, chunk.y + by, chunk.z + bz);
for (int z = 0; z < ProbeBrickPool.kBrickProbeCountPerDim; ++z)
{
for (int y = 0; y < ProbeBrickPool.kBrickProbeCountPerDim; ++y)
{
for (int x = 0; x < ProbeBrickPool.kBrickProbeCountPerDim; ++x)
{
Vector3Int texelLoc = new Vector3Int(brickStart.x + x, brickStart.y + y, brickStart.z + z);
int probeFlatIndex = chunkIndex * chunkSizeInProbes + (bx + x) + loc.x * ((by + y) + loc.y * (bz + z));
probeBuffer.Add(Matrix4x4.TRS(cell.probePositions[probeFlatIndex], Quaternion.identity, Vector3.one * (0.3f * (brickSize + 1))));
validity[idxInBatch] = cell.validity[probeFlatIndex];
texels[idxInBatch] = new Vector4(texelLoc.x, texelLoc.y, texelLoc.z, brickSize);
relativeSize[idxInBatch] = (float)brickSize / (float)maxSubdiv;
if (touchupUpVolumeAction != null)
{
touchupUpVolumeAction[idxInBatch] = cell.touchupVolumeInteraction[probeFlatIndex];
}
if (offsets != null)
{
const float kOffsetThresholdSqr = 1e-6f;
var offset = cell.offsetVectors[probeFlatIndex];
offsets[idxInBatch] = offset;
if (offset.sqrMagnitude < kOffsetThresholdSqr)
{
offsetBuffer.Add(Matrix4x4.identity);
}
else
{
var position = cell.probePositions[probeFlatIndex] + offset;
var orientation = Quaternion.LookRotation(-offset);
var scale = new Vector3(0.5f, 0.5f, offset.magnitude);
offsetBuffer.Add(Matrix4x4.TRS(position, orientation, scale));
}
}
idxInBatch++;
if (probeBuffer.Count >= kProbesPerBatch || globalIndex == cell.probeCount - 1)
{
idxInBatch = 0;
MaterialPropertyBlock prop = new MaterialPropertyBlock();
prop.SetFloatArray("_Validity", validity);
prop.SetFloatArray("_TouchupedByVolume", touchupUpVolumeAction);
prop.SetFloatArray("_RelativeSize", relativeSize);
prop.SetVectorArray("_IndexInAtlas", texels);
if (offsets != null)
prop.SetVectorArray("_Offset", offsets);
props.Add(prop);
probeBuffers.Add(probeBuffer.ToArray());
probeBuffer = new List<Matrix4x4>();
probeBuffer.Clear();
offsetBuffers.Add(offsetBuffer.ToArray());
offsetBuffer.Clear();
}
globalIndex++;
}
}
}
bx += ProbeBrickPool.kBrickProbeCountPerDim;
if (bx >= loc.x)
{
bx = 0;
by += ProbeBrickPool.kBrickProbeCountPerDim;
if (by >= loc.y)
{
by = 0;
bz += ProbeBrickPool.kBrickProbeCountPerDim;
if (bz >= loc.z)
{
bx = 0;
by = 0;
bz = 0;
}
}
}
}
cellInfo.debugProbes = debugData;
return debugData;
}
void OnClearLightingdata()
{
ClearDebugData();
}
}
}

View File

@@ -0,0 +1,19 @@
//
// This file was automatically generated. Please don't edit by hand. Execute Editor command [ Edit > Rendering > Generate Shader Includes ] instead
//
#ifndef PROBEREFERENCEVOLUME_DEBUG_CS_HLSL
#define PROBEREFERENCEVOLUME_DEBUG_CS_HLSL
//
// UnityEngine.Rendering.DebugProbeShadingMode: static fields
//
#define DEBUGPROBESHADINGMODE_SH (0)
#define DEBUGPROBESHADINGMODE_SHL0 (1)
#define DEBUGPROBESHADINGMODE_SHL0L1 (2)
#define DEBUGPROBESHADINGMODE_VALIDITY (3)
#define DEBUGPROBESHADINGMODE_VALIDITY_OVER_DILATION_THRESHOLD (4)
#define DEBUGPROBESHADINGMODE_INVALIDATED_BY_TOUCHUP_VOLUMES (5)
#define DEBUGPROBESHADINGMODE_SIZE (6)
#endif

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: f6e06ceb4cc6a38458f4b614e8000dc8
ShaderIncludeImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2f2e5e8e23a1880449ebe8c1baaa136f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,215 @@
using System.Collections.Generic;
using Unity.Collections;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace UnityEngine.Rendering
{
#if UNITY_EDITOR
/// <summary>
/// A manager to enqueue extra probe rendering outside of probe volumes.
/// </summary>
public class AdditionalGIBakeRequestsManager
{
// The baking ID for the extra requests
// TODO: Need to ensure this never conflicts with bake IDs from others interacting with the API.
// In our project, this is ProbeVolumes.
internal static readonly int s_BakingID = 912345678;
private static AdditionalGIBakeRequestsManager s_Instance = new AdditionalGIBakeRequestsManager();
/// <summary>
/// Get the manager that governs the additional light probe rendering requests.
/// </summary>
public static AdditionalGIBakeRequestsManager instance { get { return s_Instance; } }
internal void Init()
{
SubscribeOnBakeStarted();
}
internal void Cleanup()
{
UnsubscribeOnBakeStarted();
}
const float kInvalidSH = 1f;
const float kValidSHThresh = 0.33f;
private static Dictionary<int, SphericalHarmonicsL2> m_SHCoefficients = new Dictionary<int, SphericalHarmonicsL2>();
private static Dictionary<int, float> m_SHValidity = new Dictionary<int, float>();
private static Dictionary<int, Vector3> m_RequestPositions = new Dictionary<int, Vector3>();
private static readonly Vector2 s_FreelistSentinel = new Vector2(float.MaxValue, float.MaxValue);
/// <summary>
/// Enqueue a request for probe rendering at the specified location.
/// </summary>
/// <param name ="capturePosition"> The position at which a probe is baked.</param>
/// <param name ="probeInstanceID"> The instance ID of the probe doing the request.</param>
public void EnqueueRequest(Vector3 capturePosition, int probeInstanceID)
{
m_SHCoefficients[probeInstanceID] = new SphericalHarmonicsL2();
m_SHValidity[probeInstanceID] = kInvalidSH;
m_RequestPositions[probeInstanceID] = capturePosition;
}
/// <summary>
/// Dequeue a request for probe rendering.
/// </summary>
/// <param name ="probeInstanceID">The instance ID of the probe for which we want to dequeue a request. </param>
public void DequeueRequest(int probeInstanceID)
{
if (m_SHCoefficients.ContainsKey(probeInstanceID))
{
m_SHCoefficients.Remove(probeInstanceID);
m_SHValidity.Remove(probeInstanceID);
m_RequestPositions.Remove(probeInstanceID);
}
}
/// <summary>
/// Retrieve the result of a capture request, it will return false if the request has not been fulfilled yet or the request ID is invalid.
/// </summary>
/// <param name ="probeInstanceID"> The instance ID of the probe doing the request.</param>
/// <param name ="sh"> The output SH coefficients that have been computed.</param>
/// <param name ="pos"> The position for which the computed SH coefficients are valid.</param>
/// <returns>Whether the request for light probe rendering has been fulfilled and sh is valid.</returns>
public bool RetrieveProbeSH(int probeInstanceID, out SphericalHarmonicsL2 sh, out Vector3 pos)
{
if (m_SHCoefficients.ContainsKey(probeInstanceID))
{
sh = m_SHCoefficients[probeInstanceID];
pos = m_RequestPositions[probeInstanceID];
return m_SHValidity[probeInstanceID] < kValidSHThresh;
}
sh = new SphericalHarmonicsL2();
pos = Vector3.negativeInfinity;
return false;
}
/// <summary>
/// Update the capture location for the probe request.
/// </summary>
/// <param name ="probeInstanceID"> The instance ID of the probe doing the request and that wants the capture position updated.</param>
/// <param name ="newPositionnewPosition"> The position at which a probe is baked.</param>
public void UpdatePositionForRequest(int probeInstanceID, Vector3 newPosition)
{
if (m_SHCoefficients.ContainsKey(probeInstanceID))
{
m_RequestPositions[probeInstanceID] = newPosition;
m_SHCoefficients[probeInstanceID] = new SphericalHarmonicsL2();
m_SHValidity[probeInstanceID] = kInvalidSH;
}
else
{
EnqueueRequest(newPosition, probeInstanceID);
}
}
private void SubscribeOnBakeStarted()
{
UnsubscribeOnBakeStarted();
Lightmapping.bakeStarted += AddRequestsToLightmapper;
}
private void UnsubscribeOnBakeStarted()
{
Lightmapping.bakeStarted -= AddRequestsToLightmapper;
RemoveRequestsFromLightmapper();
}
internal void AddRequestsToLightmapper()
{
UnityEditor.Experimental.Lightmapping.SetAdditionalBakedProbes(s_BakingID, (new List<Vector3>(m_RequestPositions.Values)).ToArray());
Lightmapping.bakeCompleted -= OnAdditionalProbesBakeCompleted;
Lightmapping.bakeCompleted += OnAdditionalProbesBakeCompleted;
}
private void RemoveRequestsFromLightmapper()
{
UnityEditor.Experimental.Lightmapping.SetAdditionalBakedProbes(s_BakingID, null);
}
private void OnAdditionalProbesBakeCompleted()
{
Lightmapping.bakeCompleted -= OnAdditionalProbesBakeCompleted;
if (m_RequestPositions.Count == 0) return;
var sh = new NativeArray<SphericalHarmonicsL2>(m_RequestPositions.Count, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
var validity = new NativeArray<float>(m_RequestPositions.Count, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
var bakedProbeOctahedralDepth = new NativeArray<float>(m_RequestPositions.Count * 64, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
if (UnityEditor.Experimental.Lightmapping.GetAdditionalBakedProbes(s_BakingID, sh, validity, bakedProbeOctahedralDepth))
{
SetSHCoefficients(sh, validity);
}
else
{
Debug.LogWarning($"Failed to collect results for additional probes. (Bake Id {s_BakingID})");
ClearSHCoefficients();
}
ProbeReferenceVolume.instance.retrieveExtraDataAction?.Invoke(new ProbeReferenceVolume.ExtraDataActionInput());
sh.Dispose();
validity.Dispose();
bakedProbeOctahedralDepth.Dispose();
}
private void SetSHCoefficients(NativeArray<SphericalHarmonicsL2> sh, NativeArray<float> validity)
{
Debug.Assert(sh.Length == m_SHCoefficients.Count);
Debug.Assert(sh.Length == validity.Length);
List<int> requestsInstanceIDs = new List<int>(m_SHCoefficients.Keys);
for (int i = 0; i < sh.Length; ++i)
{
var v = validity[i];
var s = sh[i];
if (v < kValidSHThresh)
{
var hasNonZeroValue = false;
for (var r = 0; r < 3; ++r)
{
for (var c = 0; c < 9; ++c)
{
if (s[r, c] != 0f)
{
hasNonZeroValue = true;
goto doubleBreak;
}
}
}
doubleBreak:
if (!hasNonZeroValue)
{
// Use max value as a sentinel to explicitly pass coefficients to light loop that cancel out reflection probe contribution
const float k = float.MaxValue;
s.AddAmbientLight(new Color(k, k, k));
}
}
m_SHCoefficients[requestsInstanceIDs[i]] = s;
m_SHValidity[requestsInstanceIDs[i]] = v;
}
}
private void ClearSHCoefficients()
{
foreach (var key in m_SHCoefficients.Keys)
{
m_SHCoefficients[key] = default;
m_SHValidity[key] = kInvalidSH;
}
}
}
#endif
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 41e89eb9ee118734dab014dfd6bb55fc
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,317 @@
namespace UnityEngine.Rendering
{
public partial class ProbeReferenceVolume
{
DynamicArray<CellInfo> m_LoadedCells = new DynamicArray<CellInfo>();
DynamicArray<CellInfo> m_ToBeLoadedCells = new DynamicArray<CellInfo>();
DynamicArray<CellInfo> m_TempCellToLoadList = new DynamicArray<CellInfo>();
DynamicArray<CellInfo> m_TempCellToUnloadList = new DynamicArray<CellInfo>();
DynamicArray<BlendingCellInfo> m_LoadedBlendingCells = new();
DynamicArray<BlendingCellInfo> m_ToBeLoadedBlendingCells = new();
DynamicArray<BlendingCellInfo> m_TempBlendingCellToLoadList = new();
DynamicArray<BlendingCellInfo> m_TempBlendingCellToUnloadList = new();
Vector3 m_FrozenCameraPosition;
bool m_HasRemainingCellsToBlend = false;
internal void ScenarioBlendingChanged(bool scenarioChanged)
{
m_HasRemainingCellsToBlend = true;
if (scenarioChanged)
{
UnloadAllBlendingCells();
for (int i = 0; i < m_ToBeLoadedBlendingCells.size; ++i)
m_ToBeLoadedBlendingCells[i].ForceReupload();
}
}
/// <summary>
/// Set the number of cells that are loaded per frame when needed.
/// </summary>
/// <param name="numberOfCells"></param>
public void SetNumberOfCellsLoadedPerFrame(int numberOfCells)
{
m_NumberOfCellsLoadedPerFrame = Mathf.Max(1, numberOfCells);
}
void ComputeCellCameraDistance(Vector3 cameraPosition, DynamicArray<CellInfo> cells)
{
for (int i = 0; i < cells.size; ++i)
{
var cellInfo = cells[i];
// For now streaming score is only distance based.
cellInfo.streamingScore = Vector3.Distance(cameraPosition, cellInfo.cell.position);
}
}
void ComputeStreamingScoreForBlending(DynamicArray<BlendingCellInfo> cells, float worstScore)
{
float factor = scenarioBlendingFactor;
for (int i = 0; i < cells.size; ++i)
{
var blendingCell = cells[i];
if (factor == blendingCell.blendingFactor)
blendingCell.MarkUpToDate();
else
{
blendingCell.streamingScore = blendingCell.cellInfo.streamingScore;
if (blendingCell.ShouldPrioritize())
blendingCell.streamingScore -= worstScore;
}
}
}
bool TryLoadCell(CellInfo cellInfo, ref int shBudget, ref int indexBudget, DynamicArray<CellInfo> loadedCells)
{
// Are we within budget?
if (cellInfo.cell.shChunkCount <= shBudget && cellInfo.cell.indexChunkCount <= indexBudget)
{
// This can still fail because of fragmentation.
// TODO: Handle defrag
if (LoadCell(cellInfo))
{
loadedCells.Add(cellInfo);
shBudget -= cellInfo.cell.shChunkCount;
indexBudget -= cellInfo.cell.indexChunkCount;
return true;
}
}
return false;
}
void UnloadBlendingCell(BlendingCellInfo blendingCell, DynamicArray<BlendingCellInfo> unloadedCells)
{
UnloadBlendingCell(blendingCell);
unloadedCells.Add(blendingCell);
}
bool TryLoadBlendingCell(BlendingCellInfo blendingCell, DynamicArray<BlendingCellInfo> loadedCells)
{
if (!AddBlendingBricks(blendingCell))
return false;
loadedCells.Add(blendingCell);
return true;
}
/// <summary>
/// Updates the cell streaming for a <see cref="Camera"/>
/// </summary>
/// <param name="cmd">The <see cref="CommandBuffer"/></param>
/// <param name="camera">The <see cref="Camera"/></param>
public void UpdateCellStreaming(CommandBuffer cmd, Camera camera)
{
if (!isInitialized) return;
using (new ProfilingScope(null, ProfilingSampler.Get(CoreProfileId.APVCellStreamingUpdate)))
{
var cameraPosition = camera.transform.position;
if (!probeVolumeDebug.freezeStreaming)
{
m_FrozenCameraPosition = cameraPosition;
}
// Cell position in cell space is the top left corner. So we need to shift the camera position by half a cell to make things comparable.
var cameraPositionCellSpace = (m_FrozenCameraPosition - m_Transform.posWS) / MaxBrickSize() - Vector3.one * 0.5f;
ComputeCellCameraDistance(cameraPositionCellSpace, m_ToBeLoadedCells);
ComputeCellCameraDistance(cameraPositionCellSpace, m_LoadedCells);
m_ToBeLoadedCells.QuickSort();
m_LoadedCells.QuickSort();
// This is only a rough budget estimate at first.
// It doesn't account for fragmentation.
int indexChunkBudget = m_Index.GetRemainingChunkCount();
int shChunkBudget = m_Pool.GetRemainingChunkCount();
int cellCountToLoad = Mathf.Min(m_NumberOfCellsLoadedPerFrame, m_ToBeLoadedCells.size);
if (m_SupportStreaming)
{
while (m_TempCellToLoadList.size < cellCountToLoad)
{
// Enough memory, we can safely load the cell.
var cellInfo = m_ToBeLoadedCells[m_TempCellToLoadList.size];
if (!TryLoadCell(cellInfo, ref shChunkBudget, ref indexChunkBudget, m_TempCellToLoadList))
break;
}
// Budget reached. We need to figure out if we can safely unload other cells to make room.
if (m_TempCellToLoadList.size != cellCountToLoad)
{
int pendingUnloadCount = 0;
while (m_TempCellToLoadList.size < cellCountToLoad)
{
if (m_LoadedCells.size - pendingUnloadCount == 0)
break;
var furthestLoadedCell = m_LoadedCells[m_LoadedCells.size - pendingUnloadCount - 1];
var closestUnloadedCell = m_ToBeLoadedCells[m_TempCellToLoadList.size];
// The most distant loaded cell is further than the closest unloaded cell, we can unload it.
if (furthestLoadedCell.streamingScore > closestUnloadedCell.streamingScore)
{
pendingUnloadCount++;
UnloadCell(furthestLoadedCell);
shChunkBudget += furthestLoadedCell.cell.shChunkCount;
indexChunkBudget += furthestLoadedCell.cell.indexChunkCount;
m_TempCellToUnloadList.Add(furthestLoadedCell);
TryLoadCell(closestUnloadedCell, ref shChunkBudget, ref indexChunkBudget, m_TempCellToLoadList);
}
else // We are in a "stable" state, all the closest cells are loaded within the budget.
break;
}
if (pendingUnloadCount > 0)
{
m_LoadedCells.RemoveRange(m_LoadedCells.size - pendingUnloadCount, pendingUnloadCount);
RecomputeMinMaxLoadedCellPos();
}
}
}
else
{
for (int i = 0; i < cellCountToLoad; ++i)
{
var cellInfo = m_ToBeLoadedCells[m_TempCellToLoadList.size]; // m_TempCellToLoadList.size get incremented in TryLoadCell
TryLoadCell(cellInfo, ref shChunkBudget, ref indexChunkBudget, m_TempCellToLoadList);
}
}
// Remove the cells we successfully loaded.
m_ToBeLoadedCells.RemoveRange(0, m_TempCellToLoadList.size);
m_LoadedCells.AddRange(m_TempCellToLoadList);
m_ToBeLoadedCells.AddRange(m_TempCellToUnloadList);
m_TempCellToLoadList.Clear();
m_TempCellToUnloadList.Clear();
}
// Handle cell streaming for blending
if (enableScenarioBlending)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(CoreProfileId.APVScenarioBlendingUpdate)))
UpdateBlendingCellStreaming(cmd);
}
}
int FindWorstBlendingCellToBeLoaded()
{
int idx = -1;
float worstBlending = -1;
float factor = scenarioBlendingFactor;
for (int i = m_TempBlendingCellToLoadList.size; i < m_ToBeLoadedBlendingCells.size; ++i)
{
float score = Mathf.Abs(m_ToBeLoadedBlendingCells[i].blendingFactor - factor);
if (score > worstBlending)
{
idx = i;
if (m_ToBeLoadedBlendingCells[i].ShouldReupload()) // We are not gonna find worse than that
break;
worstBlending = score;
}
}
return idx;
}
void UpdateBlendingCellStreaming(CommandBuffer cmd)
{
if (!m_HasRemainingCellsToBlend)
return;
// Compute the worst score to offset score of cells to prioritize
float worstLoaded = m_LoadedCells.size != 0 ? m_LoadedCells[m_LoadedCells.size - 1].streamingScore : 0.0f;
float worstToBeLoaded = m_ToBeLoadedCells.size != 0 ? m_ToBeLoadedCells[m_ToBeLoadedCells.size - 1].streamingScore : 0.0f;
float worstScore = Mathf.Max(worstLoaded, worstToBeLoaded);
ComputeStreamingScoreForBlending(m_ToBeLoadedBlendingCells, worstScore);
ComputeStreamingScoreForBlending(m_LoadedBlendingCells, worstScore);
m_ToBeLoadedBlendingCells.QuickSort();
m_LoadedBlendingCells.QuickSort();
int cellCountToLoad = Mathf.Min(m_NumberOfCellsLoadedPerFrame, m_ToBeLoadedBlendingCells.size);
while (m_TempBlendingCellToLoadList.size < cellCountToLoad)
{
var blendingCell = m_ToBeLoadedBlendingCells[m_TempBlendingCellToLoadList.size];
if (!TryLoadBlendingCell(blendingCell, m_TempBlendingCellToLoadList))
break;
}
// Budget reached
if (m_TempBlendingCellToLoadList.size != cellCountToLoad)
{
// Turnover allows a percentage of the pool to be replaced by cells with a lower streaming score
// once the system is in a stable state. This ensures all cells get updated regularly.
int turnoverOffset = -1;
int idx = (int)(m_LoadedBlendingCells.size * (1.0f - turnoverRate));
var worstNoTurnover = idx < m_LoadedBlendingCells.size ? m_LoadedBlendingCells[idx] : null;
while (m_TempBlendingCellToLoadList.size < cellCountToLoad)
{
if (m_LoadedBlendingCells.size - m_TempBlendingCellToUnloadList.size == 0) // We unloaded everything
break;
var worstCellLoaded = m_LoadedBlendingCells[m_LoadedBlendingCells.size - m_TempBlendingCellToUnloadList.size - 1];
var bestCellToBeLoaded = m_ToBeLoadedBlendingCells[m_TempBlendingCellToLoadList.size];
if (bestCellToBeLoaded.streamingScore >= (worstNoTurnover ?? worstCellLoaded).streamingScore) // We are in a "stable" state
{
if (worstNoTurnover == null) // Disable turnover
break;
// Find worst cell and assume contiguous cells have roughly the same blending factor
// (contiguous cells are spatially close by, so it's good anyway to update them together)
if (turnoverOffset == -1)
turnoverOffset = FindWorstBlendingCellToBeLoaded();
bestCellToBeLoaded = m_ToBeLoadedBlendingCells[turnoverOffset];
if (bestCellToBeLoaded.IsUpToDate()) // Every single cell is blended :)
break;
}
UnloadBlendingCell(worstCellLoaded, m_TempBlendingCellToUnloadList);
// Loading can still fail cause all cells don't have the same chunk count
if (TryLoadBlendingCell(bestCellToBeLoaded, m_TempBlendingCellToLoadList) && turnoverOffset != -1)
{
// swap to ensure loaded cells are at the start of m_ToBeLoadedBlendingCells
m_ToBeLoadedBlendingCells[turnoverOffset] = m_ToBeLoadedBlendingCells[m_TempBlendingCellToLoadList.size-1];
m_ToBeLoadedBlendingCells[m_TempBlendingCellToLoadList.size-1] = bestCellToBeLoaded;
if (++turnoverOffset >= m_ToBeLoadedBlendingCells.size)
turnoverOffset = m_TempBlendingCellToLoadList.size;
}
}
m_LoadedBlendingCells.RemoveRange(m_LoadedBlendingCells.size - m_TempBlendingCellToUnloadList.size, m_TempBlendingCellToUnloadList.size);
}
m_ToBeLoadedBlendingCells.RemoveRange(0, m_TempBlendingCellToLoadList.size);
m_LoadedBlendingCells.AddRange(m_TempBlendingCellToLoadList);
m_TempBlendingCellToLoadList.Clear();
m_ToBeLoadedBlendingCells.AddRange(m_TempBlendingCellToUnloadList);
m_TempBlendingCellToUnloadList.Clear();
if (m_LoadedBlendingCells.size != 0)
{
float factor = scenarioBlendingFactor;
int cellCountToBlend = Mathf.Min(numberOfCellsBlendedPerFrame, m_LoadedBlendingCells.size);
for (int i = 0; i < cellCountToBlend; ++i)
{
m_LoadedBlendingCells[i].blendingFactor = factor;
m_BlendingPool.BlendChunks(m_LoadedBlendingCells[i], m_Pool);
}
m_BlendingPool.PerformBlending(cmd, factor, m_Pool);
}
if (m_ToBeLoadedBlendingCells.size == 0)
m_HasRemainingCellsToBlend = false;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 17f23bb13fc806a40b1c1ec8a9b6cb75
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5206b91ddf5158c47918dbb51d045103
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,84 @@
namespace UnityEngine.Rendering
{
/// <summary>
/// An Asset which holds a set of settings to use with a <see cref="Probe Reference Volume"/>.
/// </summary>
public sealed class ProbeReferenceVolumeProfile : ScriptableObject
{
internal enum Version
{
Initial,
}
[SerializeField]
Version version = CoreUtils.GetLastEnumValue<Version>();
// TODO: This is here just to find a place where to serialize it. It might not be the best spot.
[SerializeField]
internal bool freezePlacement = false;
/// <summary>
/// How many levels contains the probes hierarchical structure.
/// </summary>
[Range(2, 5)]
public int simplificationLevels = 3;
/// <summary>
/// The size of a Cell in number of bricks.
/// </summary>
public int cellSizeInBricks => (int)Mathf.Pow(3, simplificationLevels);
/// <summary>
/// The minimum distance between two probes in meters.
/// </summary>
[Min(0.1f)]
public float minDistanceBetweenProbes = 1.0f;
/// <summary>
/// Maximum subdivision in the structure.
/// </summary>
public int maxSubdivision => simplificationLevels + 1; // we add one for the top subdiv level which is the same size as a cell
/// <summary>
/// Minimum size of a brick in meters.
/// </summary>
public float minBrickSize => Mathf.Max(0.01f, minDistanceBetweenProbes * 3.0f);
/// <summary>
/// Size of the cell in meters.
/// </summary>
public float cellSizeInMeters => (float)cellSizeInBricks * minBrickSize;
/// <summary>
/// Layer mask filter for all renderers.
/// </summary>
public LayerMask renderersLayerMask = -1;
/// <summary>
/// Specifies the minimum bounding box volume of renderers to consider placing probes around.
/// </summary>
[Min(0)]
public float minRendererVolumeSize = 0.1f;
void OnEnable()
{
if (version != CoreUtils.GetLastEnumValue<Version>())
{
// Migration code
}
}
/// <summary>
/// Determines if the Probe Reference Volume Profile is equivalent to another one.
/// </summary>
/// <param name ="otherProfile">The profile to compare with.</param>
/// <returns>Whether the Probe Reference Volume Profile is equivalent to another one.</returns>
public bool IsEquivalent(ProbeReferenceVolumeProfile otherProfile)
{
return minDistanceBetweenProbes == otherProfile.minDistanceBetweenProbes &&
cellSizeInMeters == otherProfile.cellSizeInMeters &&
simplificationLevels == otherProfile.simplificationLevels &&
renderersLayerMask == otherProfile.renderersLayerMask;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4881f9a2c4d568047b316028d20a8dca
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,61 @@
using System.Collections.Generic;
using UnityEngine.Rendering;
using UnityEngine.SceneManagement;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace UnityEngine.Rendering
{
/// <summary>
/// A marker to determine what area of the scene is considered by the Probe Volumes system
/// </summary>
[CoreRPHelpURL("probevolumes-settings#probe-adjustment-volume", "com.unity.render-pipelines.high-definition")]
[ExecuteAlways]
[AddComponentMenu("Rendering/Probe Volume Touchup")]
public class ProbeTouchupVolume : MonoBehaviour
{
/// <summary>
/// A scale to apply to probes falling within the invalidation volume. It is really important to use this with caution as it can lead to inconsistent lighting.
/// </summary>
[Range(0.0001f, 2.0f)]
public float intensityScale = 1.0f;
/// <summary>
/// Whether to invalidate all probes falling within this volume.
/// </summary>
public bool invalidateProbes = false;
/// <summary>
/// Whether to use a custom threshold for dilation for probes falling withing this volume.
/// </summary>
public bool overrideDilationThreshold = false;
/// <summary>
/// The overridden dilation threshold.
/// </summary>
[Range(0.0f, 0.99f)]
public float overriddenDilationThreshold = 0.75f;
/// <summary>
/// The size.
/// </summary>
public Vector3 size = new Vector3(1, 1, 1);
#if UNITY_EDITOR
/// <summary>
/// Returns the extents of the volume.
/// </summary>
/// <returns>The extents of the ProbeVolume.</returns>
public Vector3 GetExtents()
{
return size;
}
internal void GetOBBandAABB(out ProbeReferenceVolume.Volume volume, out Bounds bounds)
{
volume = new ProbeReferenceVolume.Volume(Matrix4x4.TRS(transform.position, transform.rotation, GetExtents()), 0, 0);
bounds = volume.CalculateAABB();
}
#endif
}
} // UnityEngine.Rendering.HDPipeline

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 55f90731af2ebc94fb7fdaebe2a9a409
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,40 @@
using System;
namespace UnityEngine.Rendering
{
public partial class ProbeVolume : MonoBehaviour
{
enum Version
{
Initial,
LocalMode,
Count
}
[SerializeField]
Version version = Version.Initial;
void Awake()
{
if (version == Version.Count)
return;
if (version == Version.Initial)
{
#pragma warning disable 618 // Type or member is obsolete
mode = globalVolume ? Mode.Scene : Mode.Local;
#pragma warning restore 618
version++;
}
}
/// <summary>
/// If is a global bolume
/// </summary>
[SerializeField, Obsolete("Use mode instead")]
public bool globalVolume = false;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 66d9341ed4713be40a82b8eac8efe939
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,375 @@
using System.Collections.Generic;
using UnityEngine.SceneManagement;
#if UNITY_EDITOR
using UnityEditor;
using ProbeVolumeWithBounds = System.Collections.Generic.List<(UnityEngine.Rendering.ProbeVolume component, UnityEngine.Rendering.ProbeReferenceVolume.Volume volume)>;
#endif
namespace UnityEngine.Rendering
{
/// <summary>
/// A marker to determine what area of the scene is considered by the Probe Volumes system
/// </summary>
[CoreRPHelpURL("probevolumes-settings#probe-volume-properties", "com.unity.render-pipelines.high-definition")]
[ExecuteAlways]
[AddComponentMenu("Rendering/Probe Volume")]
public partial class ProbeVolume : MonoBehaviour
{
/// <summary>Indicates which renderers should be considerer for the Probe Volume bounds when baking</summary>
public enum Mode
{
/// <summary>Encapsulate all renderers in the baking set.</summary>
Global,
/// <summary>Encapsulate all renderers in the scene.</summary>
Scene,
/// <summary>Encapsulate all renderers in the bounding box.</summary>
Local
}
/// <summary>
/// If is a global bolume
/// </summary>
[Tooltip("When set to Global this Probe Volume considers all renderers with Contribute Global Illumination enabled. Local only considers renderers in the scene.\nThis list updates every time the Scene is saved or the lighting is baked.")]
public Mode mode = Mode.Scene;
/// <summary>
/// The size
/// </summary>
public Vector3 size = new Vector3(10, 10, 10);
/// <summary>
/// Override the renderer filters.
/// </summary>
[HideInInspector, Min(0)]
public bool overrideRendererFilters = false;
/// <summary>
/// The minimum renderer bounding box volume size. This value is used to discard small renderers when the overrideMinRendererVolumeSize is enabled.
/// </summary>
[HideInInspector, Min(0)]
public float minRendererVolumeSize = 0.1f;
/// <summary>
/// The <see cref="LayerMask"/>
/// </summary>
public LayerMask objectLayerMask = -1;
/// <summary>
/// The lowest subdivision level override
/// </summary>
[HideInInspector]
public int lowestSubdivLevelOverride = 0;
/// <summary>
/// The highest subdivision level override
/// </summary>
[HideInInspector]
public int highestSubdivLevelOverride = -1;
/// <summary>
/// If the subdivision levels need to be overriden
/// </summary>
[HideInInspector]
public bool overridesSubdivLevels = false;
[SerializeField] internal bool mightNeedRebaking = false;
[SerializeField] internal Matrix4x4 cachedTransform;
[SerializeField] internal int cachedHashCode;
/// <summary>Whether spaces with no renderers need to be filled with bricks at lowest subdivision level.</summary>
[HideInInspector]
[Tooltip("Whether spaces with no renderers need to be filled with bricks at lowest subdivision level.")]
public bool fillEmptySpaces = false;
#if UNITY_EDITOR
/// <summary>
/// Returns the extents of the volume.
/// </summary>
/// <returns>The extents of the ProbeVolume.</returns>
public Vector3 GetExtents()
{
return size;
}
internal Bounds ComputeBounds(GIContributors.ContributorFilter filter, Scene? scene = null)
{
Bounds bounds = new Bounds();
bool foundABound = false;
void ExpandBounds(Bounds bound)
{
if (!foundABound)
{
bounds = bound;
foundABound = true;
}
else
{
bounds.Encapsulate(bound);
}
}
var contributors = GIContributors.Find(filter, scene);
foreach (var renderer in contributors.renderers)
ExpandBounds(renderer.component.bounds);
foreach (var terrain in contributors.terrains)
ExpandBounds(terrain.boundsWithTrees);
return bounds;
}
internal void UpdateGlobalVolume(GIContributors.ContributorFilter filter)
{
var scene = gameObject.scene;
// Get minBrickSize from scene profile if available
float minBrickSize = ProbeReferenceVolume.instance.MinBrickSize();
if (ProbeReferenceVolume.instance.sceneData != null)
{
var profile = ProbeReferenceVolume.instance.sceneData.GetProfileForScene(scene);
if (profile != null)
minBrickSize = profile.minBrickSize;
}
var bounds = ComputeBounds(filter, scene);
transform.position = bounds.center;
size = Vector3.Max(bounds.size + new Vector3(minBrickSize, minBrickSize, minBrickSize), Vector3.zero);
}
internal void OnLightingDataAssetCleared()
{
mightNeedRebaking = true;
}
internal void OnBakeCompleted()
{
// We cache the data of last bake completed.
cachedTransform = gameObject.transform.worldToLocalMatrix;
cachedHashCode = GetHashCode();
mightNeedRebaking = false;
}
public override int GetHashCode()
{
int hash = 17;
unchecked
{
hash = hash * 23 + size.GetHashCode();
hash = hash * 23 + gameObject.transform.worldToLocalMatrix.GetHashCode();
hash = hash * 23 + overridesSubdivLevels.GetHashCode();
hash = hash * 23 + highestSubdivLevelOverride.GetHashCode();
hash = hash * 23 + lowestSubdivLevelOverride.GetHashCode();
hash = hash * 23 + overrideRendererFilters.GetHashCode();
if (overrideRendererFilters)
{
hash = hash * 23 + minRendererVolumeSize.GetHashCode();
hash = hash * 23 + objectLayerMask.value.GetHashCode();
}
hash = hash * 23 + fillEmptySpaces.GetHashCode();
}
return hash;
}
internal float GetMinSubdivMultiplier()
{
float maxSubdiv = ProbeReferenceVolume.instance.GetMaxSubdivision() - 1;
return overridesSubdivLevels ? Mathf.Clamp(lowestSubdivLevelOverride / maxSubdiv, 0.0f, 1.0f) : 0.0f;
}
internal float GetMaxSubdivMultiplier()
{
float maxSubdiv = ProbeReferenceVolume.instance.GetMaxSubdivision() - 1;
return overridesSubdivLevels ? Mathf.Clamp(highestSubdivLevelOverride / maxSubdiv, 0.0f, 1.0f) : 1.0f;
}
// Momentarily moving the gizmo rendering for bricks and cells to Probe Volume itself,
// only the first probe volume in the scene will render them. The reason is that we dont have any
// other non-hidden component related to APV.
#region APVGizmo
static List<ProbeVolume> sProbeVolumeInstances = new();
MeshGizmo brickGizmos;
MeshGizmo cellGizmo;
void DisposeGizmos()
{
brickGizmos?.Dispose();
brickGizmos = null;
cellGizmo?.Dispose();
cellGizmo = null;
}
void OnEnable()
{
sProbeVolumeInstances.Add(this);
}
void OnDisable()
{
sProbeVolumeInstances.Remove(this);
DisposeGizmos();
}
// Only the first PV of the available ones will draw gizmos.
bool IsResponsibleToDrawGizmo() => sProbeVolumeInstances.Count > 0 && sProbeVolumeInstances[0] == this;
internal bool ShouldCullCell(Vector3 cellPosition, Vector3 originWS = default(Vector3))
{
var cellSizeInMeters = ProbeReferenceVolume.instance.MaxBrickSize();
var debugDisplay = ProbeReferenceVolume.instance.probeVolumeDebug;
if (debugDisplay.realtimeSubdivision)
{
var profile = ProbeReferenceVolume.instance.sceneData.GetProfileForScene(gameObject.scene);
if (profile == null)
return true;
cellSizeInMeters = profile.cellSizeInMeters;
}
var cameraTransform = Camera.current.transform;
Vector3 cellCenterWS = cellPosition * cellSizeInMeters + originWS + Vector3.one * (cellSizeInMeters / 2.0f);
// Round down to cell size distance
float roundedDownDist = Mathf.Floor(Vector3.Distance(cameraTransform.position, cellCenterWS) / cellSizeInMeters) * cellSizeInMeters;
if (roundedDownDist > ProbeReferenceVolume.instance.probeVolumeDebug.subdivisionViewCullingDistance)
return true;
var frustumPlanes = GeometryUtility.CalculateFrustumPlanes(Camera.current);
var volumeAABB = new Bounds(cellCenterWS, cellSizeInMeters * Vector3.one);
return !GeometryUtility.TestPlanesAABB(frustumPlanes, volumeAABB);
}
// TODO: We need to get rid of Handles.DrawWireCube to be able to have those at runtime as well.
void OnDrawGizmos()
{
if (!ProbeReferenceVolume.instance.isInitialized || !IsResponsibleToDrawGizmo() || ProbeReferenceVolume.instance.sceneData == null)
return;
var debugDisplay = ProbeReferenceVolume.instance.probeVolumeDebug;
var cellSizeInMeters = ProbeReferenceVolume.instance.MaxBrickSize();
if (debugDisplay.realtimeSubdivision)
{
var profile = ProbeReferenceVolume.instance.sceneData.GetProfileForScene(gameObject.scene);
if (profile == null)
return;
cellSizeInMeters = profile.cellSizeInMeters;
}
if (debugDisplay.drawBricks)
{
var subdivColors = ProbeReferenceVolume.instance.subdivisionDebugColors;
IEnumerable<ProbeBrickIndex.Brick> GetVisibleBricks()
{
if (debugDisplay.realtimeSubdivision)
{
// realtime subdiv cells are already culled
foreach (var kp in ProbeReferenceVolume.instance.realtimeSubdivisionInfo)
{
var cellVolume = kp.Key;
foreach (var brick in kp.Value)
{
yield return brick;
}
}
}
else
{
foreach (var cellInfo in ProbeReferenceVolume.instance.cells.Values)
{
if (!cellInfo.loaded)
continue;
if (ShouldCullCell(cellInfo.cell.position, ProbeReferenceVolume.instance.GetTransform().posWS))
continue;
if (cellInfo.cell.bricks == null)
continue;
foreach (var brick in cellInfo.cell.bricks)
yield return brick;
}
}
}
if (brickGizmos == null)
brickGizmos = new MeshGizmo((int)(Mathf.Pow(3, ProbeBrickIndex.kMaxSubdivisionLevels) * MeshGizmo.vertexCountPerCube));
brickGizmos.Clear();
foreach (var brick in GetVisibleBricks())
{
if (brick.subdivisionLevel < 0)
continue;
float brickSize = ProbeReferenceVolume.instance.BrickSize(brick.subdivisionLevel);
float minBrickSize = ProbeReferenceVolume.instance.MinBrickSize();
Vector3 scaledSize = new Vector3(brickSize, brickSize, brickSize);
Vector3 scaledPos = new Vector3(brick.position.x * minBrickSize, brick.position.y * minBrickSize, brick.position.z * minBrickSize) + scaledSize / 2;
brickGizmos.AddWireCube(scaledPos, scaledSize, subdivColors[brick.subdivisionLevel]);
}
brickGizmos.RenderWireframe(Matrix4x4.identity, gizmoName: "Brick Gizmo Rendering");
}
if (debugDisplay.drawCells)
{
IEnumerable<Vector4> GetVisibleCellCentersAndState()
{
if (debugDisplay.realtimeSubdivision)
{
foreach (var kp in ProbeReferenceVolume.instance.realtimeSubdivisionInfo)
{
var center = kp.Key.center;
yield return new Vector4(center.x, center.y, center.z, 1.0f);
}
}
else
{
foreach (var cellInfo in ProbeReferenceVolume.instance.cells.Values)
{
if (ShouldCullCell(cellInfo.cell.position, ProbeReferenceVolume.instance.GetTransform().posWS))
continue;
var cell = cellInfo.cell;
var positionF = new Vector4(cell.position.x, cell.position.y, cell.position.z, 0.0f);
var center = positionF * cellSizeInMeters + cellSizeInMeters * 0.5f * Vector4.one;
center.w = cellInfo.loaded ? 1.0f : 0.0f;
yield return center;
}
}
}
Matrix4x4 trs = Matrix4x4.TRS(ProbeReferenceVolume.instance.GetTransform().posWS, ProbeReferenceVolume.instance.GetTransform().rot, Vector3.one);
var oldGizmoMatrix = Gizmos.matrix;
if (cellGizmo == null)
cellGizmo = new MeshGizmo();
cellGizmo.Clear();
foreach (var center in GetVisibleCellCentersAndState())
{
bool loaded = center.w == 1.0f;
Gizmos.color = loaded ? new Color(0, 1, 0.5f, 0.2f) : new Color(1, 0.0f, 0.0f, 0.2f);
Gizmos.matrix = trs;
Gizmos.DrawCube(center, Vector3.one * cellSizeInMeters);
cellGizmo.AddWireCube(center, Vector3.one * cellSizeInMeters, loaded ? new Color(0, 1, 0.5f, 1) : new Color(1, 0.0f, 0.0f, 1));
}
cellGizmo.RenderWireframe(Gizmos.matrix, gizmoName: "Brick Gizmo Rendering");
Gizmos.matrix = oldGizmoMatrix;
}
}
#endregion
#endif // UNITY_EDITOR
}
} // UnityEngine.Rendering.HDPipeline

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: cded085d155cde949b60f67a11dbc3bd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,697 @@
#ifndef __PROBEVOLUME_HLSL__
#define __PROBEVOLUME_HLSL__
#include "Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/SphericalHarmonics.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl"
// Unpack variables
#define _PoolDim _PoolDim_CellInMeters.xyz
#define _CellInMeters _PoolDim_CellInMeters.w
#define _MinCellPosition _MinCellPos_Noise.xyz
#define _PVSamplingNoise _MinCellPos_Noise.w
#define _CellIndicesDim _IndicesDim_IndexChunkSize.xyz
#define _IndexChunkSize _IndicesDim_IndexChunkSize.w
#define _NormalBias _Biases_CellInMinBrick_MinBrickSize.x
#define _ViewBias _Biases_CellInMinBrick_MinBrickSize.y
#define _MinBrickSize _Biases_CellInMinBrick_MinBrickSize.w
#define _Weight _Weight_MinLoadedCell.x
#define _MinLoadedCell _Weight_MinLoadedCell.yzw
#define _MaxLoadedCell _MaxLoadedCell_FrameIndex.xyz
#define _NoiseFrameIndex _MaxLoadedCell_FrameIndex.w
#define _MinReflProbeNormalizationFactor _NormalizationClamp_Padding12.x
#define _MaxReflProbeNormalizationFactor _NormalizationClamp_Padding12.y
#ifndef DECODE_SH
#include "Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DecodeSH.hlsl"
#endif
#ifndef UNITY_SHADER_VARIABLES_INCLUDED
SAMPLER(s_linear_clamp_sampler);
SAMPLER(s_point_clamp_sampler);
#endif
// TODO: Remove define when we are sure about what to do with this.
#define MANUAL_FILTERING 0
struct APVResources
{
StructuredBuffer<int> index;
Texture3D L0_L1Rx;
Texture3D L1G_L1Ry;
Texture3D L1B_L1Rz;
Texture3D L2_0;
Texture3D L2_1;
Texture3D L2_2;
Texture3D L2_3;
Texture3D Validity;
};
struct APVSample
{
float3 L0;
float3 L1_R;
float3 L1_G;
float3 L1_B;
#ifdef PROBE_VOLUMES_L2
float4 L2_R;
float4 L2_G;
float4 L2_B;
float3 L2_C;
#endif
#define APV_SAMPLE_STATUS_INVALID -1
#define APV_SAMPLE_STATUS_ENCODED 0
#define APV_SAMPLE_STATUS_DECODED 1
int status;
// Note: at the moment this is called at the moment the struct is built, but it is kept as a separate step
// as ideally should be called as far as possible from sample to allow for latency hiding.
void Decode()
{
if (status == APV_SAMPLE_STATUS_ENCODED)
{
L1_R = DecodeSH(L0.r, L1_R);
L1_G = DecodeSH(L0.g, L1_G);
L1_B = DecodeSH(L0.b, L1_B);
#ifdef PROBE_VOLUMES_L2
float4 outL2_C = float4(L2_C, 0.0f);
DecodeSH_L2(L0, L2_R, L2_G, L2_B, outL2_C);
L2_C = outL2_C.xyz;
#endif
status = APV_SAMPLE_STATUS_DECODED;
}
}
void Encode()
{
if (status == APV_SAMPLE_STATUS_DECODED)
{
L1_R = EncodeSH(L0.r, L1_R);
L1_G = EncodeSH(L0.g, L1_G);
L1_B = EncodeSH(L0.b, L1_B);
#ifdef PROBE_VOLUMES_L2
EncodeSH_L2(L0, L2_R, L2_G, L2_B, L2_C);
#endif
status = APV_SAMPLE_STATUS_ENCODED;
}
}
};
// Resources required for APV
StructuredBuffer<int> _APVResIndex;
StructuredBuffer<uint3> _APVResCellIndices;
TEXTURE3D(_APVResL0_L1Rx);
TEXTURE3D(_APVResL1G_L1Ry);
TEXTURE3D(_APVResL1B_L1Rz);
TEXTURE3D(_APVResL2_0);
TEXTURE3D(_APVResL2_1);
TEXTURE3D(_APVResL2_2);
TEXTURE3D(_APVResL2_3);
TEXTURE3D(_APVResValidity);
// -------------------------------------------------------------
// Various weighting functions for occlusion or helper functions.
// -------------------------------------------------------------
float3 AddNoiseToSamplingPosition(float3 posWS, float2 positionSS)
{
float3 outPos = posWS;
if (_PVSamplingNoise > 0)
{
float noise1D_0 = (InterleavedGradientNoise(positionSS, _NoiseFrameIndex) * 2.0f - 1.0f) * _PVSamplingNoise;
outPos += noise1D_0;
}
return outPos;
}
uint3 GetSampleOffset(uint i)
{
return uint3(i, i >> 1, i >> 2) & 1;
}
// The validity mask is sampled once and contains a binary info on whether a probe neighbour (relevant for trilinear) is to be used
// or not. The entry in the mask uses the same mapping that GetSampleOffset above uses.
float GetValidityWeight(int offset, uint validityMask)
{
int mask = 1 << offset;
return (validityMask & mask) > 0 ? 1 : 0;
}
float ProbeDistance(uint subdiv)
{
return pow(3, subdiv) * _MinBrickSize / 3.0f;
}
float3 GetSnappedProbePosition(float3 posWS, uint subdiv)
{
float distBetweenProbes = ProbeDistance(subdiv);
float3 dividedPos = posWS / distBetweenProbes;
return (dividedPos - frac(dividedPos)) * distBetweenProbes;
}
float GetNormalWeight(int3 offset, float3 posWS, float3 sample0Pos, float3 normalWS, int subdiv)
{
// TODO: This can be optimized.
float3 samplePos = sample0Pos + offset * ProbeDistance(subdiv);
float3 vecToProbe = normalize((samplePos)-posWS);
float weight = saturate(dot(vecToProbe, normalWS) - _LeakReductionParams.z);
return weight;
}
// -------------------------------------------------------------
// Indexing functions
// -------------------------------------------------------------
bool LoadCellIndexMetaData(int cellFlatIdx, out int chunkIndex, out int stepSize, out int3 minRelativeIdx, out int3 maxRelativeIdx)
{
bool cellIsLoaded = false;
uint3 metaData = _APVResCellIndices[cellFlatIdx];
if (metaData.x != 0xFFFFFFFF)
{
chunkIndex = metaData.x & 0x1FFFFFFF;
stepSize = pow(3, (metaData.x >> 29) & 0x7);
minRelativeIdx.x = metaData.y & 0x3FF;
minRelativeIdx.y = (metaData.y >> 10) & 0x3FF;
minRelativeIdx.z = (metaData.y >> 20) & 0x3FF;
maxRelativeIdx.x = metaData.z & 0x3FF;
maxRelativeIdx.y = (metaData.z >> 10) & 0x3FF;
maxRelativeIdx.z = (metaData.z >> 20) & 0x3FF;
cellIsLoaded = true;
}
else
{
chunkIndex = -1;
stepSize = -1;
minRelativeIdx = -1;
maxRelativeIdx = -1;
}
return cellIsLoaded;
}
uint GetIndexData(APVResources apvRes, float3 posWS)
{
int3 cellPos = floor(posWS / _CellInMeters);
float3 topLeftCellWS = cellPos * _CellInMeters;
bool isALoadedCell = all(cellPos <= _MaxLoadedCell) && all(cellPos >= _MinLoadedCell);
// Make sure we start from 0
cellPos -= (int3)_MinCellPosition;
int flatIdx = cellPos.z * (_CellIndicesDim.x * _CellIndicesDim.y) + cellPos.y * _CellIndicesDim.x + cellPos.x;
int stepSize = 0;
int3 minRelativeIdx, maxRelativeIdx;
int chunkIdx = -1;
bool isValidBrick = true;
int locationInPhysicalBuffer = 0;
if (isALoadedCell && LoadCellIndexMetaData(flatIdx, chunkIdx, stepSize, minRelativeIdx, maxRelativeIdx))
{
float3 residualPosWS = posWS - topLeftCellWS;
int3 localBrickIndex = floor(residualPosWS / (_MinBrickSize * stepSize));
// Out of bounds.
if (any(localBrickIndex < minRelativeIdx || localBrickIndex >= maxRelativeIdx))
{
isValidBrick = false;
}
int3 sizeOfValid = maxRelativeIdx - minRelativeIdx;
// Relative to valid region
int3 localRelativeIndexLoc = (localBrickIndex - minRelativeIdx);
int flattenedLocationInCell = localRelativeIndexLoc.z * (sizeOfValid.x * sizeOfValid.y) + localRelativeIndexLoc.x * sizeOfValid.y + localRelativeIndexLoc.y;
locationInPhysicalBuffer = chunkIdx * _IndexChunkSize + flattenedLocationInCell;
}
else
{
isValidBrick = false;
}
return isValidBrick ? apvRes.index[locationInPhysicalBuffer] : 0xffffffff;
}
// -------------------------------------------------------------
// Loading functions
// -------------------------------------------------------------
APVResources FillAPVResources()
{
APVResources apvRes;
apvRes.index = _APVResIndex;
apvRes.L0_L1Rx = _APVResL0_L1Rx;
apvRes.L1G_L1Ry = _APVResL1G_L1Ry;
apvRes.L1B_L1Rz = _APVResL1B_L1Rz;
apvRes.L2_0 = _APVResL2_0;
apvRes.L2_1 = _APVResL2_1;
apvRes.L2_2 = _APVResL2_2;
apvRes.L2_3 = _APVResL2_3;
apvRes.Validity = _APVResValidity;
return apvRes;
}
bool TryToGetPoolUVWAndSubdiv(APVResources apvRes, float3 posWS, float3 normalWS, float3 viewDirWS, out float3 uvw, out uint subdiv, out float3 biasedPosWS)
{
uvw = 0;
// Note: we could instead early return when we know we'll have invalid UVs, but some bade code gen on Vulkan generates shader warnings if we do.
bool hasValidUVW = true;
float4 posWSForSample = float4(posWS + normalWS * _NormalBias
+ viewDirWS * _ViewBias, 1.0);
biasedPosWS = posWSForSample.xyz;
uint3 poolDim = (uint3)_PoolDim;
// resolve the index
float3 posRS = posWSForSample.xyz / _MinBrickSize;
uint packed_pool_idx = GetIndexData(apvRes, posWSForSample.xyz);
// no valid brick loaded for this index, fallback to ambient probe
if (packed_pool_idx == 0xffffffff)
{
hasValidUVW = false;
}
// unpack pool idx
// size is encoded in the upper 4 bits
subdiv = (packed_pool_idx >> 28) & 15;
float cellSize = pow(3.0, subdiv);
uint flattened_pool_idx = packed_pool_idx & ((1 << 28) - 1);
uint3 pool_idx;
pool_idx.z = flattened_pool_idx / (poolDim.x * poolDim.y);
flattened_pool_idx -= pool_idx.z * (poolDim.x * poolDim.y);
pool_idx.y = flattened_pool_idx / poolDim.x;
pool_idx.x = flattened_pool_idx - (pool_idx.y * poolDim.x);
uvw = ((float3) pool_idx + 0.5) / _PoolDim;
// calculate uv offset and scale
float3 offset = frac(posRS / (float)cellSize); // [0;1] in brick space
//offset = clamp( offset, 0.25, 0.75 ); // [0.25;0.75] in brick space (is this actually necessary?)
offset *= 3.0 / _PoolDim; // convert brick footprint to texels footprint in pool texel space
uvw += offset; // add the final offset
return hasValidUVW;
}
bool TryToGetPoolUVW(APVResources apvRes, float3 posWS, float3 normalWS, float3 viewDir, out float3 uvw)
{
uint unusedSubdiv;
float3 unusedPos;
return TryToGetPoolUVWAndSubdiv(apvRes, posWS, normalWS, viewDir, uvw, unusedSubdiv, unusedPos);
}
APVSample SampleAPV(APVResources apvRes, float3 uvw)
{
APVSample apvSample;
float4 L0_L1Rx = SAMPLE_TEXTURE3D_LOD(apvRes.L0_L1Rx, s_linear_clamp_sampler, uvw, 0).rgba;
float4 L1G_L1Ry = SAMPLE_TEXTURE3D_LOD(apvRes.L1G_L1Ry, s_linear_clamp_sampler, uvw, 0).rgba;
float4 L1B_L1Rz = SAMPLE_TEXTURE3D_LOD(apvRes.L1B_L1Rz, s_linear_clamp_sampler, uvw, 0).rgba;
apvSample.L0 = L0_L1Rx.xyz;
apvSample.L1_R = float3(L0_L1Rx.w, L1G_L1Ry.w, L1B_L1Rz.w);
apvSample.L1_G = L1G_L1Ry.xyz;
apvSample.L1_B = L1B_L1Rz.xyz;
#ifdef PROBE_VOLUMES_L2
apvSample.L2_R = SAMPLE_TEXTURE3D_LOD(apvRes.L2_0, s_linear_clamp_sampler, uvw, 0).rgba;
apvSample.L2_G = SAMPLE_TEXTURE3D_LOD(apvRes.L2_1, s_linear_clamp_sampler, uvw, 0).rgba;
apvSample.L2_B = SAMPLE_TEXTURE3D_LOD(apvRes.L2_2, s_linear_clamp_sampler, uvw, 0).rgba;
apvSample.L2_C = SAMPLE_TEXTURE3D_LOD(apvRes.L2_3, s_linear_clamp_sampler, uvw, 0).rgb;
#endif
apvSample.status = APV_SAMPLE_STATUS_ENCODED;
return apvSample;
}
APVSample LoadAndDecodeAPV(APVResources apvRes, int3 loc)
{
APVSample apvSample;
float4 L0_L1Rx = LOAD_TEXTURE3D(apvRes.L0_L1Rx, loc).rgba;
float4 L1G_L1Ry = LOAD_TEXTURE3D(apvRes.L1G_L1Ry, loc).rgba;
float4 L1B_L1Rz = LOAD_TEXTURE3D(apvRes.L1B_L1Rz, loc).rgba;
apvSample.L0 = L0_L1Rx.xyz;
apvSample.L1_R = float3(L0_L1Rx.w, L1G_L1Ry.w, L1B_L1Rz.w);
apvSample.L1_G = L1G_L1Ry.xyz;
apvSample.L1_B = L1B_L1Rz.xyz;
#ifdef PROBE_VOLUMES_L2
apvSample.L2_R = LOAD_TEXTURE3D(apvRes.L2_0, loc).rgba;
apvSample.L2_G = LOAD_TEXTURE3D(apvRes.L2_1, loc).rgba;
apvSample.L2_B = LOAD_TEXTURE3D(apvRes.L2_2, loc).rgba;
apvSample.L2_C = LOAD_TEXTURE3D(apvRes.L2_3, loc).rgb;
#endif
apvSample.status = APV_SAMPLE_STATUS_ENCODED;
apvSample.Decode();
return apvSample;
}
void WeightSample(inout APVSample apvSample, float weight)
{
apvSample.L0 *= weight;
apvSample.L1_R *= weight;
apvSample.L1_G *= weight;
apvSample.L1_B *= weight;
#ifdef PROBE_VOLUMES_L2
apvSample.L2_R *= weight;
apvSample.L2_G *= weight;
apvSample.L2_B *= weight;
apvSample.L2_C *= weight;
#endif
}
void AccumulateSamples(inout APVSample dst, APVSample other, float weight)
{
WeightSample(other, weight);
dst.L0 += other.L0;
dst.L1_R += other.L1_R;
dst.L1_G += other.L1_G;
dst.L1_B += other.L1_B;
#ifdef PROBE_VOLUMES_L2
dst.L2_R += other.L2_R;
dst.L2_G += other.L2_G;
dst.L2_B += other.L2_B;
dst.L2_C += other.L2_C;
#endif
}
APVSample ManuallyFilteredSample(APVResources apvRes, float3 posWS, float3 normalWS, int subdiv, float3 biasedPosWS, float3 uvw)
{
float3 texCoordFloat = uvw * _PoolDim - .5f;
int3 texCoordInt = texCoordFloat;
float3 texFrac = frac(texCoordFloat);
float3 oneMinTexFrac = 1.0f - texFrac;
bool sampled = false;
float totalW = 0.0f;
APVSample baseSample;
float3 positionCentralProbe = GetSnappedProbePosition(biasedPosWS, subdiv);
ZERO_INITIALIZE(APVSample, baseSample);
uint validityMask = LOAD_TEXTURE3D(apvRes.Validity, texCoordInt).x * 255;
for (int i = 0; i < 8; ++i)
{
uint3 offset = GetSampleOffset(i);
float trilinearW =
((offset.x == 1) ? texFrac.x : oneMinTexFrac.x) *
((offset.y == 1) ? texFrac.y : oneMinTexFrac.y) *
((offset.z == 1) ? texFrac.z : oneMinTexFrac.z);
float validityWeight = GetValidityWeight(i, validityMask);
if (validityWeight > 0)
{
APVSample apvSample = LoadAndDecodeAPV(apvRes, texCoordInt + offset);
float geoW = GetNormalWeight(offset, posWS, positionCentralProbe, normalWS, subdiv);
float finalW = geoW * trilinearW;
AccumulateSamples(baseSample, apvSample, finalW);
totalW += finalW;
}
}
WeightSample(baseSample, rcp(totalW));
return baseSample;
}
void WarpUVWLeakReduction(APVResources apvRes, float3 posWS, float3 normalWS, int subdiv, float3 biasedPosWS, inout float3 uvw)
{
float3 texCoordFloat = uvw * _PoolDim - .5f;
int3 texCoordInt = texCoordFloat;
float3 texFrac = frac(texCoordFloat);
float3 oneMinTexFrac = 1.0f - texFrac;
uint validityMask = LOAD_TEXTURE3D(apvRes.Validity, texCoordInt).x * 255;
float3 newFrac = 0.0f;
float weights[8] = { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f };
float totalW = 0.0f;
int i = 0;
float3 positionCentralProbe = GetSnappedProbePosition(biasedPosWS, subdiv);
for (i = 0; i < 8; ++i)
{
uint3 offset = GetSampleOffset(i);
float trilinearW =
((offset.x == 1) ? texFrac.x : oneMinTexFrac.x) *
((offset.y == 1) ? texFrac.y : oneMinTexFrac.y) *
((offset.z == 1) ? texFrac.z : oneMinTexFrac.z);
float validityWeight = GetValidityWeight(i, validityMask);
float geoW = GetNormalWeight(offset, posWS, positionCentralProbe, normalWS, subdiv);
weights[i] = max(0.0001f, saturate(trilinearW * geoW * validityWeight));
totalW += weights[i];
}
for (i = 0; i < 8; ++i)
{
uint3 offset = GetSampleOffset(i);
newFrac += (float3)offset * weights[i] * rcp(totalW);
}
uvw = ((texCoordFloat - texFrac + newFrac + 0.5) * rcp(_PoolDim));
}
APVSample SampleAPV(APVResources apvRes, float3 posWS, float3 biasNormalWS, float3 viewDir)
{
APVSample outSample;
float3 pool_uvw;
uint subdiv;
float3 biasedPosWS;
if (TryToGetPoolUVWAndSubdiv(apvRes, posWS, biasNormalWS, viewDir, pool_uvw, subdiv, biasedPosWS))
{
#if MANUAL_FILTERING == 1
if (_LeakReductionParams.x != 0)
outSample = ManuallyFilteredSample(apvRes, posWS, biasNormalWS, subdiv, biasedPosWS, pool_uvw);
else
outSample = SampleAPV(apvRes, pool_uvw);
#else
if (_LeakReductionParams.x != 0)
{
WarpUVWLeakReduction(apvRes, posWS, biasNormalWS, subdiv, biasedPosWS, pool_uvw);
}
outSample = SampleAPV(apvRes, pool_uvw);
#endif
}
else
{
ZERO_INITIALIZE(APVSample, outSample);
outSample.status = APV_SAMPLE_STATUS_INVALID;
}
return outSample;
}
APVSample SampleAPV(float3 posWS, float3 biasNormalWS, float3 viewDir)
{
APVResources apvRes = FillAPVResources();
return SampleAPV(apvRes, posWS, biasNormalWS, viewDir);
}
// -------------------------------------------------------------
// Internal Evaluation functions (avoid usage in caller code outside this file)
// -------------------------------------------------------------
float3 EvaluateAPVL0(APVSample apvSample)
{
return apvSample.L0;
}
void EvaluateAPVL1(APVSample apvSample, float3 N, out float3 diffuseLighting)
{
diffuseLighting = SHEvalLinearL1(N, apvSample.L1_R, apvSample.L1_G, apvSample.L1_B);
}
#ifdef PROBE_VOLUMES_L2
void EvaluateAPVL1L2(APVSample apvSample, float3 N, out float3 diffuseLighting)
{
EvaluateAPVL1(apvSample, N, diffuseLighting);
diffuseLighting += SHEvalLinearL2(N, apvSample.L2_R, apvSample.L2_G, apvSample.L2_B, float4(apvSample.L2_C, 0.0f));
}
#endif
// -------------------------------------------------------------
// "Public" Evaluation functions, the one that callers outside this file should use
// -------------------------------------------------------------
void EvaluateAdaptiveProbeVolume(APVSample apvSample, float3 normalWS, float3 backNormalWS, out float3 bakeDiffuseLighting, out float3 backBakeDiffuseLighting)
{
if (apvSample.status != APV_SAMPLE_STATUS_INVALID)
{
apvSample.Decode();
#ifdef PROBE_VOLUMES_L1
EvaluateAPVL1(apvSample, normalWS, bakeDiffuseLighting);
EvaluateAPVL1(apvSample, backNormalWS, backBakeDiffuseLighting);
#elif PROBE_VOLUMES_L2
EvaluateAPVL1L2(apvSample, normalWS, bakeDiffuseLighting);
EvaluateAPVL1L2(apvSample, backNormalWS, backBakeDiffuseLighting);
#endif
bakeDiffuseLighting += apvSample.L0;
backBakeDiffuseLighting += apvSample.L0;
if (_Weight < 1.f)
{
bakeDiffuseLighting = lerp(EvaluateAmbientProbe(normalWS), bakeDiffuseLighting, _Weight);
backBakeDiffuseLighting = lerp(EvaluateAmbientProbe(backNormalWS), backBakeDiffuseLighting, _Weight);
}
}
else
{
// no valid brick, fallback to ambient probe
bakeDiffuseLighting = EvaluateAmbientProbe(normalWS);
backBakeDiffuseLighting = EvaluateAmbientProbe(backNormalWS);
}
}
void EvaluateAdaptiveProbeVolume(in float3 posWS, in float3 normalWS, in float3 backNormalWS, in float3 reflDir, in float3 viewDir,
in float2 positionSS, out float3 bakeDiffuseLighting, out float3 backBakeDiffuseLighting, out float3 lightingInReflDir)
{
APVResources apvRes = FillAPVResources();
posWS = AddNoiseToSamplingPosition(posWS, positionSS);
APVSample apvSample = SampleAPV(posWS, normalWS, viewDir);
if (apvSample.status != APV_SAMPLE_STATUS_INVALID)
{
#if MANUAL_FILTERING == 0
apvSample.Decode();
#endif
#ifdef PROBE_VOLUMES_L1
EvaluateAPVL1(apvSample, normalWS, bakeDiffuseLighting);
EvaluateAPVL1(apvSample, backNormalWS, backBakeDiffuseLighting);
EvaluateAPVL1(apvSample, reflDir, lightingInReflDir);
#elif PROBE_VOLUMES_L2
EvaluateAPVL1L2(apvSample, normalWS, bakeDiffuseLighting);
EvaluateAPVL1L2(apvSample, backNormalWS, backBakeDiffuseLighting);
EvaluateAPVL1L2(apvSample, reflDir, lightingInReflDir);
#endif
bakeDiffuseLighting += apvSample.L0;
backBakeDiffuseLighting += apvSample.L0;
lightingInReflDir += apvSample.L0;
if (_Weight < 1.f)
{
bakeDiffuseLighting = lerp(EvaluateAmbientProbe(normalWS), bakeDiffuseLighting, _Weight);
backBakeDiffuseLighting = lerp(EvaluateAmbientProbe(backNormalWS), backBakeDiffuseLighting, _Weight);
}
}
else
{
bakeDiffuseLighting = EvaluateAmbientProbe(normalWS);
backBakeDiffuseLighting = EvaluateAmbientProbe(backNormalWS);
lightingInReflDir = -1;
}
}
void EvaluateAdaptiveProbeVolume(in float3 posWS, in float3 normalWS, in float3 backNormalWS, in float3 viewDir,
in float2 positionSS, out float3 bakeDiffuseLighting, out float3 backBakeDiffuseLighting)
{
bakeDiffuseLighting = float3(0.0, 0.0, 0.0);
backBakeDiffuseLighting = float3(0.0, 0.0, 0.0);
posWS = AddNoiseToSamplingPosition(posWS, positionSS);
APVSample apvSample = SampleAPV(posWS, normalWS, viewDir);
EvaluateAdaptiveProbeVolume(apvSample, normalWS, backNormalWS, bakeDiffuseLighting, backBakeDiffuseLighting);
}
void EvaluateAdaptiveProbeVolume(in float3 posWS, in float2 positionSS, out float3 bakeDiffuseLighting)
{
APVResources apvRes = FillAPVResources();
posWS = AddNoiseToSamplingPosition(posWS, positionSS);
float3 uvw;
if (TryToGetPoolUVW(apvRes, posWS, 0, 0, uvw))
{
bakeDiffuseLighting = SAMPLE_TEXTURE3D_LOD(apvRes.L0_L1Rx, s_linear_clamp_sampler, uvw, 0).rgb;
}
else
{
bakeDiffuseLighting = EvaluateAmbientProbe(0);
}
}
// -------------------------------------------------------------
// Reflection Probe Normalization functions
// -------------------------------------------------------------
// Same idea as in Rendering of COD:IW [Drobot 2017]
float EvaluateReflectionProbeSH(float3 sampleDir, float4 reflProbeSHL0L1, float4 reflProbeSHL2_1, float reflProbeSHL2_2)
{
float outFactor = 0;
float L0 = reflProbeSHL0L1.x;
float L1 = dot(reflProbeSHL0L1.yzw, sampleDir);
outFactor = L0 + L1;
#ifdef PROBE_VOLUMES_L2
// IMPORTANT: The encoding is unravelled C# side before being sent
float4 vB = sampleDir.xyzz * sampleDir.yzzx;
// First 4 coeff.
float L2 = dot(reflProbeSHL2_1, vB);
float vC = sampleDir.x * sampleDir.x - sampleDir.y * sampleDir.y;
L2 += reflProbeSHL2_2 * vC;
outFactor += L2;
#endif
return outFactor;
}
float GetReflectionProbeNormalizationFactor(float3 lightingInReflDir, float3 sampleDir, float4 reflProbeSHL0L1, float4 reflProbeSHL2_1, float reflProbeSHL2_2)
{
float refProbeNormalization = EvaluateReflectionProbeSH(sampleDir, reflProbeSHL0L1, reflProbeSHL2_1, reflProbeSHL2_2);
float localNormalization = Luminance(lightingInReflDir);
return lerp(1.f, clamp(SafeDiv(localNormalization, refProbeNormalization), _MinReflProbeNormalizationFactor, _MaxReflProbeNormalizationFactor), _Weight);
}
#endif // __PROBEVOLUME_HLSL__

View File

@@ -0,0 +1,10 @@
fileFormatVersion: 2
guid: 688129ab15b222340a2c7fa427269889
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
preprocessorOverride: 0
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,276 @@
using System;
using System.IO;
using UnityEngine.SceneManagement;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Collections;
namespace UnityEngine.Rendering
{
[PreferBinarySerialization]
internal class ProbeVolumeAsset : ScriptableObject
{
[Serializable]
internal enum AssetVersion
{
First,
AddProbeVolumesAtlasEncodingModes,
PV2,
ChunkBasedIndex,
BinaryRuntimeDebugSplit,
BinaryTextureData,
Max,
Current = Max - 1
}
public int Version => m_Version;
[SerializeField] protected internal int m_Version = (int)AssetVersion.Current;
[SerializeField] internal ProbeReferenceVolume.Cell[] cells;
[SerializeField] internal CellCounts[] cellCounts;
[SerializeField] internal CellCounts totalCellCounts;
[SerializeField] internal Vector3Int maxCellPosition;
[SerializeField] internal Vector3Int minCellPosition;
[SerializeField] internal Bounds globalBounds;
[SerializeField] internal ProbeVolumeSHBands bands;
[SerializeField] internal int chunkSizeInBricks;
[SerializeField] string m_AssetFullPath = "UNINITIALIZED!";
// Profile info
[SerializeField] internal int cellSizeInBricks;
[SerializeField] internal int simplificationLevels;
[SerializeField] internal float minDistanceBetweenProbes;
[Serializable]
internal struct CellCounts
{
public int bricksCount;
public int probesCount;
public int offsetsCount;
public int chunksCount;
public void Add(CellCounts o)
{
bricksCount += o.bricksCount;
probesCount += o.probesCount;
offsetsCount += o.offsetsCount;
chunksCount += o.chunksCount;
}
}
internal int maxSubdivision => simplificationLevels + 1; // we add one for the top subdiv level which is the same size as a cell
internal float minBrickSize => Mathf.Max(0.01f, minDistanceBetweenProbes * 3.0f);
internal bool CompatibleWith(ProbeVolumeAsset otherAsset)
{
return (maxSubdivision == otherAsset.maxSubdivision) && (minBrickSize == otherAsset.minBrickSize) && (cellSizeInBricks == otherAsset.cellSizeInBricks)
&& (chunkSizeInBricks == otherAsset.chunkSizeInBricks);
}
internal bool IsInvalid()
{
return maxCellPosition.x < minCellPosition.x || maxCellPosition.y < minCellPosition.y || maxCellPosition.z < minCellPosition.z;
}
public string GetSerializedFullPath()
{
return m_AssetFullPath;
}
static int AlignUp16(int count)
{
var alignment = 16;
var remainder = count % alignment;
return count + (remainder == 0 ? 0 : alignment - remainder);
}
NativeArray<T> GetSubArray<T>(NativeArray<byte> input, int count, ref int offset) where T : struct
{
var size = count * UnsafeUtility.SizeOf<T>();
if (offset + size > input.Length)
return default;
var result = input.GetSubArray(offset, size).Reinterpret<T>(1);
offset = AlignUp16(offset + size);
return result;
}
// The unpacking in Resolve functions is the "inverse" of ProbeBakingGI.WriteBakingCells flattening
internal bool ResolveSharedCellData(TextAsset cellSharedDataAsset, TextAsset cellSupportDataAsset)
{
if (cellSharedDataAsset == null)
return false;
var chunkSizeInProbeCount = chunkSizeInBricks * ProbeBrickPool.kBrickProbeCountTotal;
var totalProbeCount = totalCellCounts.chunksCount * chunkSizeInProbeCount;
// Shared Data
var cellSharedData = cellSharedDataAsset.GetData<byte>();
var offset = 0;
var bricksData = GetSubArray<ProbeBrickIndex.Brick>(cellSharedData, totalCellCounts.bricksCount, ref offset);
var validityNeighMaskData = GetSubArray<byte>(cellSharedData, totalProbeCount, ref offset);
if (offset != AlignUp16(cellSharedData.Length))
return false;
// Support Data
var cellSupportData = cellSupportDataAsset ? cellSupportDataAsset.GetData<byte>() : default;
var hasSupportData = cellSupportData.IsCreated;
offset = 0;
var positionsData = hasSupportData ? GetSubArray<Vector3>(cellSupportData, totalProbeCount, ref offset) : default;
var touchupInteractionData = hasSupportData ? GetSubArray<float>(cellSupportData, totalProbeCount, ref offset) : default;
var validityData = hasSupportData ? GetSubArray<float>(cellSupportData, totalProbeCount, ref offset) : default;
var offsetsData = hasSupportData ? GetSubArray<Vector3>(cellSupportData, totalProbeCount, ref offset) : default;
if (hasSupportData && offset != AlignUp16(cellSupportData.Length))
return false;
// Resolve per cell
var startCounts = new CellCounts();
for (var i = 0; i < cells.Length; ++i)
{
var cell = cells[i];
var counts = cellCounts[i];
var chunksOffset = startCounts.chunksCount * chunkSizeInProbeCount;
var chunksSize = counts.chunksCount * chunkSizeInProbeCount;
cell.bricks = bricksData.GetSubArray(startCounts.bricksCount, counts.bricksCount);
cell.validityNeighMaskData = validityNeighMaskData.GetSubArray(chunksOffset, chunksSize);
if (hasSupportData)
{
cell.probePositions = positionsData.GetSubArray(chunksOffset, chunksSize);
cell.touchupVolumeInteraction = touchupInteractionData.GetSubArray(chunksOffset, chunksSize);
cell.offsetVectors = offsetsData.GetSubArray(chunksOffset, chunksSize);
cell.validity = validityData.GetSubArray(chunksOffset, chunksSize);
}
startCounts.Add(counts);
}
return true;
}
internal bool ResolvePerScenarioCellData(TextAsset cellDataAsset, TextAsset cellOptionalDataAsset, int stateIndex)
{
if (cellDataAsset == null)
return false;
var chunkSizeInProbeCount = chunkSizeInBricks * ProbeBrickPool.kBrickProbeCountTotal;
var totalProbeCount = totalCellCounts.chunksCount * chunkSizeInProbeCount;
// L0L1 Data
var cellData = cellDataAsset.GetData<byte>();
/// 3 4 component textures, 1 half and 2 bytes. Aligned on the size of a chunk.
var offset = 0;
var shL0L1RxData = GetSubArray<ushort>(cellData, totalProbeCount * 4, ref offset);
var shL1GL1RyData = GetSubArray<byte>(cellData, totalProbeCount * 4, ref offset);
var shL1BL1RzData = GetSubArray<byte>(cellData, totalProbeCount * 4, ref offset);
if (offset != AlignUp16(cellData.Length))
return false;
// Optional L2 data
var cellOptionalData = cellOptionalDataAsset ? cellOptionalDataAsset.GetData<byte>() : default;
var hasOptionalData = cellOptionalData.IsCreated;
offset = 0;
var shL2Data_0 = GetSubArray<byte>(cellOptionalData, totalProbeCount * 4, ref offset);
var shL2Data_1 = GetSubArray<byte>(cellOptionalData, totalProbeCount * 4, ref offset);
var shL2Data_2 = GetSubArray<byte>(cellOptionalData, totalProbeCount * 4, ref offset);
var shL2Data_3 = GetSubArray<byte>(cellOptionalData, totalProbeCount * 4, ref offset);
if (hasOptionalData && offset != AlignUp16(cellOptionalData.Length))
return false;
var startCounts = new CellCounts();
for (var i = 0; i < cells.Length; ++i)
{
var counts = cellCounts[i];
var cellState = new ProbeReferenceVolume.Cell.PerScenarioData();
var chunksOffset = startCounts.chunksCount * chunkSizeInProbeCount * 4;
var chunksSize = counts.chunksCount * chunkSizeInProbeCount * 4;
cellState.shL0L1RxData = shL0L1RxData.GetSubArray(chunksOffset, chunksSize);
cellState.shL1GL1RyData = shL1GL1RyData.GetSubArray(chunksOffset, chunksSize);
cellState.shL1BL1RzData = shL1BL1RzData.GetSubArray(chunksOffset, chunksSize);
if (hasOptionalData)
{
cellState.shL2Data_0 = shL2Data_0.GetSubArray(chunksOffset, chunksSize);
cellState.shL2Data_1 = shL2Data_1.GetSubArray(chunksOffset, chunksSize);
cellState.shL2Data_2 = shL2Data_2.GetSubArray(chunksOffset, chunksSize);
cellState.shL2Data_3 = shL2Data_3.GetSubArray(chunksOffset, chunksSize);
}
if (stateIndex == 0)
cells[i].scenario0 = cellState;
else
cells[i].scenario1 = cellState;
startCounts.Add(counts);
}
return true;
}
#if UNITY_EDITOR
public void OnEnable()
{
m_AssetFullPath = UnityEditor.AssetDatabase.GetAssetPath(this);
}
internal const string assetName = "ProbeVolumeData";
public static string GetPath(Scene scene)
=> Path.Combine(GetDirectory(scene.path, scene.name), assetName + ".asset");
public static string GetDirectory(string scenePath, string sceneName)
{
string sceneDir = Path.GetDirectoryName(scenePath);
string assetPath = Path.Combine(sceneDir, sceneName);
if (!UnityEditor.AssetDatabase.IsValidFolder(assetPath))
UnityEditor.AssetDatabase.CreateFolder(sceneDir, sceneName);
return assetPath;
}
public static ProbeVolumeAsset CreateAsset(ProbeVolumePerSceneData data)
{
ProbeVolumeAsset asset = CreateInstance<ProbeVolumeAsset>();
if (data.asset != null) asset.m_AssetFullPath = UnityEditor.AssetDatabase.GetAssetPath(data.asset);
if (string.IsNullOrEmpty(asset.m_AssetFullPath)) asset.m_AssetFullPath = GetPath(data.gameObject.scene);
UnityEditor.AssetDatabase.CreateAsset(asset, asset.m_AssetFullPath);
return asset;
}
internal void StoreProfileData(ProbeReferenceVolumeProfile profile)
{
cellSizeInBricks = profile.cellSizeInBricks;
simplificationLevels = profile.simplificationLevels;
minDistanceBetweenProbes = profile.minDistanceBetweenProbes;
}
internal int GetBakingHashCode()
{
int hash = maxCellPosition.GetHashCode();
hash = hash * 23 + minCellPosition.GetHashCode();
hash = hash * 23 + globalBounds.GetHashCode();
hash = hash * 23 + bands.GetHashCode();
hash = hash * 23 + cellSizeInBricks.GetHashCode();
hash = hash * 23 + simplificationLevels.GetHashCode();
hash = hash * 23 + minDistanceBetweenProbes.GetHashCode();
return hash;
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c1043e08cdc375146bdb853794c4c0fd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,99 @@
namespace UnityEngine.Rendering
{
[System.Serializable]
internal struct ProbeDilationSettings
{
public bool enableDilation;
public float dilationDistance;
public float dilationValidityThreshold;
public int dilationIterations;
public bool squaredDistWeighting;
internal void SetDefaults()
{
enableDilation = true;
dilationDistance = 1;
dilationValidityThreshold = 0.25f;
dilationIterations = 1;
squaredDistWeighting = true;
}
internal void UpgradeFromTo(ProbeVolumeBakingProcessSettings.SettingsVersion from, ProbeVolumeBakingProcessSettings.SettingsVersion to) { }
}
[System.Serializable]
internal struct VirtualOffsetSettings
{
public bool useVirtualOffset;
[Range(0f, 1f)] public float outOfGeoOffset;
[Range(0f, 2f)] public float searchMultiplier;
[Range(-0.05f, 0f)] public float rayOriginBias;
[Range(4, 24)] public int maxHitsPerRay;
public LayerMask collisionMask;
internal void SetDefaults()
{
useVirtualOffset = true;
outOfGeoOffset = 0.01f;
searchMultiplier = 0.2f;
UpgradeFromTo(ProbeVolumeBakingProcessSettings.SettingsVersion.Initial, ProbeVolumeBakingProcessSettings.SettingsVersion.ThreadedVirtualOffset);
}
internal void UpgradeFromTo(ProbeVolumeBakingProcessSettings.SettingsVersion from, ProbeVolumeBakingProcessSettings.SettingsVersion to)
{
if (from < ProbeVolumeBakingProcessSettings.SettingsVersion.ThreadedVirtualOffset && to >= ProbeVolumeBakingProcessSettings.SettingsVersion.ThreadedVirtualOffset)
{
rayOriginBias = -0.001f;
maxHitsPerRay = 10;
collisionMask = Physics.DefaultRaycastLayers;
}
}
}
// TODO: Use this structure in the actual authoring component rather than just a mean to group output parameters.
[System.Serializable]
internal struct ProbeVolumeBakingProcessSettings
{
internal static ProbeVolumeBakingProcessSettings Default { get { var s = new ProbeVolumeBakingProcessSettings(); s.SetDefaults(); return s; } }
internal enum SettingsVersion
{
Initial,
ThreadedVirtualOffset,
Max,
Current = Max - 1
}
internal ProbeVolumeBakingProcessSettings(ProbeDilationSettings dilationSettings, VirtualOffsetSettings virtualOffsetSettings)
{
m_Version = SettingsVersion.Current;
this.dilationSettings = dilationSettings;
this.virtualOffsetSettings = virtualOffsetSettings;
}
internal void SetDefaults()
{
m_Version = SettingsVersion.Current;
dilationSettings.SetDefaults();
virtualOffsetSettings.SetDefaults();
}
internal void Upgrade()
{
if (m_Version != SettingsVersion.Current)
{
// Debug.Log($"Upgrading probe volume baking process settings from '{m_Version}' to '{SettingsVersion.Current}'.");
dilationSettings.UpgradeFromTo(m_Version, SettingsVersion.Current);
virtualOffsetSettings.UpgradeFromTo(m_Version, SettingsVersion.Current);
m_Version = SettingsVersion.Current;
}
}
[SerializeField] SettingsVersion m_Version;
public ProbeDilationSettings dilationSettings;
public VirtualOffsetSettings virtualOffsetSettings;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2ca179cc72ecf4f4fa7d4ed3bb83ffd2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,82 @@
#ifndef BLEND_STATES
# define BLEND_STATES
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#ifndef __BUILTINGIUTILITIES_HLSL__
// We don't need this.
real3 EvaluateAmbientProbe(real3 normalWS)
{
return 0;
}
#endif
#include "Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.hlsl"
struct APVResourcesRW
{
RWTexture3D<float4> L0_L1Rx;
RWTexture3D<float4> L1G_L1Ry;
RWTexture3D<float4> L1B_L1Rz;
RWTexture3D<float4> L2_0;
RWTexture3D<float4> L2_1;
RWTexture3D<float4> L2_2;
RWTexture3D<float4> L2_3;
};
#define LOAD_APV_RES_L1(res, target) \
res.L0_L1Rx = CALL_MERGE_NAME(target, _L0_L1Rx); \
res.L1G_L1Ry = CALL_MERGE_NAME(target, _L1G_L1Ry); \
res.L1B_L1Rz = CALL_MERGE_NAME(target, _L1B_L1Rz);
#define LOAD_APV_RES_L2(res, target) \
res.L2_0 = CALL_MERGE_NAME(target, _L2_0); \
res.L2_1 = CALL_MERGE_NAME(target, _L2_1); \
res.L2_2 = CALL_MERGE_NAME(target, _L2_2); \
res.L2_3 = CALL_MERGE_NAME(target, _L2_3);
#ifndef PROBE_VOLUMES_L2
# define LOAD_APV_RES(res, target) LOAD_APV_RES_L1(res, target)
#else
# define LOAD_APV_RES(res, target) \
LOAD_APV_RES_L1(res, target) \
LOAD_APV_RES_L2(res, target)
#endif
APVSample BlendAPVSamples(APVSample state0, APVSample state1, float factor)
{
APVSample result;
result.L0 = lerp(state0.L0, state1.L0, factor);
result.L1_R = lerp(state0.L1_R, state1.L1_R, factor);
result.L1_G = lerp(state0.L1_G, state1.L1_G, factor);
result.L1_B = lerp(state0.L1_B, state1.L1_B, factor);
#ifdef PROBE_VOLUMES_L2
result.L2_R = lerp(state0.L2_R, state1.L2_R, factor);
result.L2_G = lerp(state0.L2_G, state1.L2_G, factor);
result.L2_B = lerp(state0.L2_B, state1.L2_B, factor);
result.L2_C = lerp(state0.L2_C, state1.L2_C, factor);
#endif
result.status = APV_SAMPLE_STATUS_DECODED;
return result;
}
void EncodeAndStoreAPV(APVResourcesRW apvRes, APVSample apvSample, int3 loc)
{
apvSample.Encode();
float4 L0_L1Rx = float4(apvSample.L0, apvSample.L1_R.x);
float4 L1G_L1Ry = float4(apvSample.L1_G, apvSample.L1_R.y);
float4 L1B_L1Rz = float4(apvSample.L1_B, apvSample.L1_R.z);
apvRes.L0_L1Rx [loc].rgba = L0_L1Rx;
apvRes.L1G_L1Ry[loc].rgba = L1G_L1Ry;
apvRes.L1B_L1Rz[loc].rgba = L1B_L1Rz;
#ifdef PROBE_VOLUMES_L2
apvRes.L2_0[loc].rgba = apvSample.L2_R;
apvRes.L2_1[loc].rgba = apvSample.L2_G;
apvRes.L2_2[loc].rgba = apvSample.L2_B;
apvRes.L2_3[loc].rgba = float4(apvSample.L2_C, 0.0f);
#endif
}
#endif

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: e478f7f7b57c7ab46a2933b408f63161
ShaderIncludeImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,416 @@
using System.Collections.Generic;
using UnityEngine.SceneManagement;
#if UNITY_EDITOR
using UnityEditor;
using ProbeVolumeWithBoundsList = System.Collections.Generic.List<(UnityEngine.Rendering.ProbeVolume component, UnityEngine.Rendering.ProbeReferenceVolume.Volume volume, UnityEngine.Bounds bounds)>;
#endif
namespace UnityEngine.Rendering
{
struct GIContributors
{
#if UNITY_EDITOR
public struct TerrainContributor
{
public struct TreePrototype
{
public MeshRenderer component;
public Matrix4x4 transform;
public Bounds prefabBounds;
public List<(Matrix4x4 transform, Bounds boundsWS)> instances;
}
public Terrain component;
public Bounds boundsWithTrees;
public Bounds boundsTerrainOnly;
public TreePrototype[] treePrototypes;
}
public List<(Renderer component, Bounds bounds)> renderers;
public List<TerrainContributor> terrains;
public int Count => renderers.Count + terrains.Count;
internal enum ContributorFilter { All, Scene, Selection };
internal static bool ContributesGI(GameObject go) =>
(GameObjectUtility.GetStaticEditorFlags(go) & StaticEditorFlags.ContributeGI) != 0;
internal static Vector3[] m_Vertices = new Vector3[8];
static Bounds TransformBounds(Bounds bounds, Matrix4x4 transform)
{
Vector3 boundsMin = bounds.min, boundsMax = bounds.max;
m_Vertices[0] = new Vector3(boundsMin.x, boundsMin.y, boundsMin.z);
m_Vertices[1] = new Vector3(boundsMax.x, boundsMin.y, boundsMin.z);
m_Vertices[2] = new Vector3(boundsMax.x, boundsMax.y, boundsMin.z);
m_Vertices[3] = new Vector3(boundsMin.x, boundsMax.y, boundsMin.z);
m_Vertices[4] = new Vector3(boundsMin.x, boundsMin.y, boundsMax.z);
m_Vertices[5] = new Vector3(boundsMax.x, boundsMin.y, boundsMax.z);
m_Vertices[6] = new Vector3(boundsMax.x, boundsMax.y, boundsMax.z);
m_Vertices[7] = new Vector3(boundsMin.x, boundsMax.y, boundsMax.z);
Vector3 min = transform.MultiplyPoint(m_Vertices[0]);
Vector3 max = min;
for (int i = 1; i < 8; i++)
{
var point = transform.MultiplyPoint(m_Vertices[i]);
min = Vector3.Min(min, point);
max = Vector3.Max(max, point);
}
Bounds result = default;
result.SetMinMax(min, max);
return result;
}
static internal Matrix4x4 GetTreeInstanceTransform(Terrain terrain, TreeInstance tree)
{
var position = terrain.GetPosition() + Vector3.Scale(tree.position, terrain.terrainData.size);
var rotation = Quaternion.Euler(0, tree.rotation * Mathf.Rad2Deg, 0);
var scale = new Vector3(tree.widthScale, tree.heightScale, tree.widthScale);
return Matrix4x4.TRS(position, rotation, scale);
}
public static GIContributors Find(ContributorFilter filter, Scene? scene = null)
{
if (filter == ContributorFilter.Scene && scene == null)
return default;
Profiling.Profiler.BeginSample("GIContributors.Find");
var contributors = new GIContributors()
{
renderers = new(),
terrains = new(),
};
void PushRenderer(Renderer renderer)
{
if (!ContributesGI(renderer.gameObject) || !renderer.gameObject.activeInHierarchy || !renderer.enabled)
return;
var bounds = renderer.bounds;
bounds.size += Vector3.one * 0.01f;
contributors.renderers.Add((renderer, bounds));
}
void PushTerrain(Terrain terrain)
{
if (!ContributesGI(terrain.gameObject) || !terrain.gameObject.activeInHierarchy || !terrain.enabled)
return;
var terrainData = terrain.terrainData;
var terrainBounds = terrainData.bounds;
terrainBounds.center += terrain.GetPosition();
terrainBounds.size += Vector3.one * 0.01f;
var prototypes = terrainData.treePrototypes;
var treePrototypes = new TerrainContributor.TreePrototype[prototypes.Length];
for (int i = 0; i < prototypes.Length; i++)
{
MeshRenderer renderer = null;
var prefab = prototypes[i].prefab;
if (prefab == null)
continue;
if (prefab.TryGetComponent<LODGroup>(out var lodGroup))
{
var groups = lodGroup.GetLODs();
if (groups.Length != 0 && groups[0].renderers.Length != 0)
renderer = groups[0].renderers[0] as MeshRenderer;
}
if (renderer == null)
renderer = prefab.GetComponent<MeshRenderer>();
if (renderer != null && renderer.enabled && ContributesGI(renderer.gameObject))
{
var tr = prefab.transform;
// For some reason, tree instances are not affected by rotation and position of prefab root
// But they are affected by scale, and by any other transform in the hierarchy
var transform = Matrix4x4.TRS(tr.position, tr.rotation, Vector3.one).inverse * renderer.localToWorldMatrix;
// Compute prefab bounds. This will be used to compute highest tree to expand terrain bounds
// and to approximate the bounds of tree instances for culling during voxelization.
var prefabBounds = TransformBounds(renderer.localBounds, transform);
treePrototypes[i] = new TerrainContributor.TreePrototype()
{
component = renderer,
transform = transform,
prefabBounds = prefabBounds,
instances = new List<(Matrix4x4 transform, Bounds boundsWS)>(),
};
}
}
Vector3 totalMax = terrainBounds.max;
foreach (var tree in terrainData.treeInstances)
{
var prototype = treePrototypes[tree.prototypeIndex];
if (prototype.component == null)
continue;
// Approximate instance bounds since rotation can only be on y axis
var transform = GetTreeInstanceTransform(terrain, tree);
var boundsCenter = transform.MultiplyPoint(prototype.prefabBounds.center);
var boundsSize = prototype.prefabBounds.size;
float maxTreeWidth = Mathf.Max(boundsSize.x, boundsSize.z) * tree.widthScale * Mathf.Sqrt(2.0f);
boundsSize = new Vector3(maxTreeWidth, boundsSize.y * tree.heightScale, maxTreeWidth);
prototype.instances.Add((transform, new Bounds(boundsCenter, boundsSize)));
totalMax.y = Mathf.Max(boundsCenter.y + boundsSize.y * 0.5f, totalMax.y);
}
var totalBounds = new Bounds();
totalBounds.SetMinMax(terrainBounds.min, totalMax);
contributors.terrains.Add(new TerrainContributor()
{
component = terrain,
boundsWithTrees = totalBounds,
boundsTerrainOnly = terrainBounds,
treePrototypes = treePrototypes,
});
}
if (filter == ContributorFilter.Selection)
{
var transforms = Selection.transforms;
foreach (var transform in transforms)
{
var childrens = transform.gameObject.GetComponentsInChildren<Transform>();
foreach (var children in childrens)
{
if (children.gameObject.TryGetComponent(out Renderer renderer))
PushRenderer(renderer);
else if (children.gameObject.TryGetComponent(out Terrain terrain))
PushTerrain(terrain);
}
}
}
else
{
var renderers = Object.FindObjectsOfType<Renderer>();
Profiling.Profiler.BeginSample($"Find Renderers ({renderers.Length})");
foreach (var renderer in renderers)
{
if (filter != ContributorFilter.Scene || renderer.gameObject.scene == scene)
PushRenderer(renderer);
}
Profiling.Profiler.EndSample();
var terrains = Object.FindObjectsOfType<Terrain>();
Profiling.Profiler.BeginSample($"Find Terrains ({terrains.Length})");
foreach (var terrain in terrains)
{
if (filter != ContributorFilter.Scene || terrain.gameObject.scene == scene)
PushTerrain(terrain);
}
Profiling.Profiler.EndSample();
}
Profiling.Profiler.EndSample();
return contributors;
}
static bool DiscardedByProbeVolume(ProbeVolume pv, ProbeReferenceVolumeProfile profile, float boundsVolume, int layerMask)
{
if (profile == null)
return false;
float minRendererBoundingBoxSize = profile.minRendererVolumeSize;
var renderersLayerMask = profile.renderersLayerMask;
if (pv.overrideRendererFilters)
{
minRendererBoundingBoxSize = pv.minRendererVolumeSize;
renderersLayerMask = pv.objectLayerMask;
}
// Skip renderers that have a smaller volume than the min volume size from the profile or probe volume component
// And renderers whose layer mask is excluded
return (boundsVolume < minRendererBoundingBoxSize) || (layerMask & renderersLayerMask) == 0;
}
public GIContributors Filter(ProbeReferenceVolumeProfile profile, Bounds cellBounds, ProbeVolumeWithBoundsList probeVolumes)
{
Profiling.Profiler.BeginSample("Filter GIContributors");
var contributors = new GIContributors()
{
renderers = new(),
terrains = new(),
};
Profiling.Profiler.BeginSample($"Filter Renderers ({renderers.Count})");
foreach (var renderer in renderers)
{
if (!cellBounds.Intersects(renderer.bounds))
continue;
var volumeSize = renderer.bounds.size;
float rendererBoundsVolume = volumeSize.x * volumeSize.y * volumeSize.z;
int rendererLayerMask = 1 << renderer.component.gameObject.layer;
foreach (var probeVolume in probeVolumes)
{
if (DiscardedByProbeVolume(probeVolume.component, profile, rendererBoundsVolume, rendererLayerMask) ||
!ProbeVolumePositioning.OBBAABBIntersect(probeVolume.volume, renderer.bounds, probeVolume.bounds))
continue;
contributors.renderers.Add(renderer);
break;
}
}
Profiling.Profiler.EndSample();
Profiling.Profiler.BeginSample($"Filter Terrains ({terrains.Count})");
foreach (var terrain in terrains)
{
if (!cellBounds.Intersects(terrain.boundsWithTrees))
continue;
var volumeSize = terrain.boundsWithTrees.size;
float terrainBoundsVolume = volumeSize.x * volumeSize.y * volumeSize.z;
int terrainLayerMask = 1 << terrain.component.gameObject.layer;
// Find if terrain with trees hits at least one PV
bool contributes = false;
foreach (var probeVolume in probeVolumes)
{
if (DiscardedByProbeVolume(probeVolume.component, profile, terrainBoundsVolume, terrainLayerMask) ||
!ProbeVolumePositioning.OBBAABBIntersect(probeVolume.volume, terrain.boundsWithTrees, probeVolume.bounds))
continue;
contributes = true;
break;
}
if (!contributes)
continue;
// Cull trees - iterates over all instances for each pv, may be very slow
var probeVolumesForProto = new List<Bounds>();
Vector3 totalMax = terrain.boundsTerrainOnly.max;
var treePrototypes = new TerrainContributor.TreePrototype[terrain.treePrototypes.Length];
for (int i = 0; i < treePrototypes.Length; i++)
{
var srcProto = terrain.treePrototypes[i];
// This prototype may have been previously filtered out
if (srcProto.component == null)
continue;
// Find which pv may intersect instances of this proto
probeVolumesForProto.Clear();
int prototypeLayerMask = 1 << srcProto.component.gameObject.layer;
foreach (var probeVolume in probeVolumes)
{
// Ignore bounds volume check for trees, assume they are always big enough
// Otherwise we have to do the complex math stuff to compute the actual tree bounds
if (!DiscardedByProbeVolume(probeVolume.component, profile, float.MaxValue, prototypeLayerMask))
probeVolumesForProto.Add(probeVolume.bounds);
}
if (probeVolumesForProto.Count == 0)
continue;
treePrototypes[i] = new TerrainContributor.TreePrototype()
{
component = srcProto.component,
transform = srcProto.transform,
prefabBounds = srcProto.prefabBounds,
instances = new List<(Matrix4x4 transform, Bounds boundsWS)>(),
};
// Cull tree instances
for (int j = 0; j < srcProto.instances.Count; j++)
{
var treeBounds = srcProto.instances[j].boundsWS;
if (!treeBounds.Intersects(cellBounds))
continue;
foreach (var pvAABB in probeVolumesForProto)
{
if (treeBounds.Intersects(pvAABB))
{
treePrototypes[i].instances.Add(srcProto.instances[j]);
totalMax.y = Mathf.Max(treeBounds.max.y, totalMax.y);
break;
}
}
}
}
// Recompute terrain bounds by excluding trees that were filtered out
var totalBounds = new Bounds();
totalBounds.SetMinMax(terrain.boundsTerrainOnly.min, totalMax);
var terrainContrib = new TerrainContributor()
{
component = terrain.component,
boundsWithTrees = totalBounds,
boundsTerrainOnly = terrain.boundsTerrainOnly,
treePrototypes = treePrototypes,
};
contributors.terrains.Add(terrainContrib);
}
Profiling.Profiler.EndSample();
Profiling.Profiler.EndSample();
return contributors;
}
public GIContributors FilterLayerMaskOnly(LayerMask layerMask)
{
Profiling.Profiler.BeginSample("Filter GIContributors LayerMask");
var contributors = new GIContributors()
{
renderers = new(),
terrains = new(),
};
foreach (var renderer in renderers)
{
int rendererLayerMask = 1 << renderer.component.gameObject.layer;
if ((rendererLayerMask & layerMask) != 0)
contributors.renderers.Add(renderer);
}
foreach (var terrain in terrains)
{
int terrainLayerMask = 1 << terrain.component.gameObject.layer;
if ((terrainLayerMask & layerMask) != 0)
{
// Filter out trees
var filteredPrototypes = new List<TerrainContributor.TreePrototype>();
foreach (var treeProto in terrain.treePrototypes)
{
int treeProtoLayerMask = 1 << treeProto.component.gameObject.layer;
if ((treeProtoLayerMask & layerMask) != 0)
filteredPrototypes.Add(treeProto);
}
var terrainContrib = new TerrainContributor()
{
component = terrain.component,
boundsWithTrees = terrain.boundsWithTrees,
boundsTerrainOnly = terrain.boundsTerrainOnly,
treePrototypes = filteredPrototypes.ToArray(),
};
contributors.terrains.Add(terrainContrib);
}
}
Profiling.Profiler.EndSample();
return contributors;
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d6b61523d2d66384485b5ef359496dd9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,274 @@
using System;
using System.Collections.Generic;
#if UNITY_EDITOR
using System.IO;
using UnityEditor;
#endif
namespace UnityEngine.Rendering
{
/// <summary>
/// A component that stores baked probe volume state and data references. Normally hidden from the user.
/// </summary>
[ExecuteAlways]
[AddComponentMenu("")] // Hide.
public class ProbeVolumePerSceneData : MonoBehaviour, ISerializationCallbackReceiver
{
[Serializable]
internal struct PerScenarioData
{
public int sceneHash;
public TextAsset cellDataAsset; // Contains L0 L1 SH data
public TextAsset cellOptionalDataAsset; // Contains L2 SH data
}
[Serializable]
struct SerializablePerScenarioDataItem
{
public string scenario;
public PerScenarioData data;
}
[SerializeField] internal ProbeVolumeAsset asset;
[SerializeField] internal TextAsset cellSharedDataAsset; // Contains bricks and validity data
[SerializeField] internal TextAsset cellSupportDataAsset; // Contains debug data
[SerializeField] List<SerializablePerScenarioDataItem> serializedScenarios = new();
internal Dictionary<string, PerScenarioData> scenarios = new();
bool assetLoaded = false;
string activeScenario = null, otherScenario = null;
/// <summary>
/// OnAfterDeserialize implementation.
/// </summary>
void ISerializationCallbackReceiver.OnAfterDeserialize()
{
scenarios.Clear();
foreach (var scenarioData in serializedScenarios)
scenarios.Add(scenarioData.scenario, scenarioData.data);
}
/// <summary>
/// OnBeforeSerialize implementation.
/// </summary>
void ISerializationCallbackReceiver.OnBeforeSerialize()
{
serializedScenarios.Clear();
foreach (var kvp in scenarios)
{
serializedScenarios.Add(new SerializablePerScenarioDataItem()
{
scenario = kvp.Key,
data = kvp.Value,
});
}
}
#if UNITY_EDITOR
void DeleteAsset(Object asset)
{
if (asset != null && AssetDatabase.TryGetGUIDAndLocalFileIdentifier(asset, out string guid, out long instanceID))
{
var assetPath = AssetDatabase.GUIDToAssetPath(guid);
AssetDatabase.DeleteAsset(assetPath);
}
}
#endif
internal void Clear()
{
QueueAssetRemoval();
#if UNITY_EDITOR
try
{
AssetDatabase.StartAssetEditing();
DeleteAsset(asset);
DeleteAsset(cellSharedDataAsset);
DeleteAsset(cellSupportDataAsset);
foreach (var scenarioData in scenarios.Values)
{
DeleteAsset(scenarioData.cellDataAsset);
DeleteAsset(scenarioData.cellOptionalDataAsset);
}
}
finally
{
AssetDatabase.StopAssetEditing();
AssetDatabase.Refresh();
EditorUtility.SetDirty(this);
}
#endif
scenarios.Clear();
}
internal void RemoveScenario(string scenario)
{
#if UNITY_EDITOR
if (scenarios.TryGetValue(scenario, out var scenarioData))
{
AssetDatabase.DeleteAsset(AssetDatabase.GetAssetPath(scenarioData.cellDataAsset));
AssetDatabase.DeleteAsset(AssetDatabase.GetAssetPath(scenarioData.cellOptionalDataAsset));
EditorUtility.SetDirty(this);
}
#endif
scenarios.Remove(scenario);
}
internal void RenameScenario(string scenario, string newName)
{
if (!scenarios.TryGetValue(scenario, out var data))
return;
scenarios.Remove(scenario);
scenarios.Add(newName, data);
#if UNITY_EDITOR
EditorUtility.SetDirty(this);
var baseName = ProbeVolumeAsset.assetName + "-" + newName;
void RenameAsset(Object asset, string extension)
{
var oldPath = AssetDatabase.GetAssetPath(asset);
AssetDatabase.RenameAsset(oldPath, baseName + extension);
}
RenameAsset(data.cellDataAsset, ".CellData.bytes");
RenameAsset(data.cellOptionalDataAsset, ".CellOptionalData.bytes");
#endif
}
internal bool ResolveCells() => ResolveSharedCellData() && ResolvePerScenarioCellData();
internal bool ResolveSharedCellData() => asset != null && asset.ResolveSharedCellData(cellSharedDataAsset, cellSupportDataAsset);
bool ResolvePerScenarioCellData()
{
int loadedCount = 0, targetLoaded = otherScenario == null ? 1 : 2;
if (activeScenario != null && scenarios.TryGetValue(activeScenario, out var data0))
{
if (asset.ResolvePerScenarioCellData(data0.cellDataAsset, data0.cellOptionalDataAsset, 0))
loadedCount++;
}
if (otherScenario != null && scenarios.TryGetValue(otherScenario, out var data1))
{
if (asset.ResolvePerScenarioCellData(data1.cellDataAsset, data1.cellOptionalDataAsset, loadedCount))
loadedCount++;
}
for (var i = 0; i < asset.cells.Length; ++i)
asset.cells[i].hasTwoScenarios = loadedCount == 2;
return loadedCount == targetLoaded;
}
internal void QueueAssetLoading()
{
if (asset == null || asset.IsInvalid() || !ResolvePerScenarioCellData())
return;
var refVol = ProbeReferenceVolume.instance;
refVol.AddPendingAssetLoading(asset);
assetLoaded = true;
#if UNITY_EDITOR
if (refVol.sceneData != null)
refVol.bakingProcessSettings = refVol.sceneData.GetBakeSettingsForScene(gameObject.scene);
#endif
}
internal void QueueAssetRemoval()
{
if (asset != null)
ProbeReferenceVolume.instance.AddPendingAssetRemoval(asset);
assetLoaded = false;
}
void OnEnable()
{
ProbeReferenceVolume.instance.RegisterPerSceneData(this);
if (ProbeReferenceVolume.instance.sceneData != null)
Initialize();
// otherwise baking state will be initialized in ProbeReferenceVolume.Initialize when sceneData is loaded
}
void OnDisable()
{
QueueAssetRemoval();
activeScenario = otherScenario = null;
ProbeReferenceVolume.instance.UnregisterPerSceneData(this);
}
internal void Initialize()
{
ResolveSharedCellData();
QueueAssetRemoval();
activeScenario = ProbeReferenceVolume.instance.sceneData.lightingScenario;
otherScenario = ProbeReferenceVolume.instance.sceneData.otherScenario;
QueueAssetLoading();
}
internal void UpdateActiveScenario(string activeScenario, string otherScenario)
{
if (asset == null)
return;
// if we just change scenario, don't need to queue anything
// Just load cells from disk and wait for blending to stream updates to gpu
this.activeScenario = activeScenario;
this.otherScenario = otherScenario;
if (!assetLoaded)
QueueAssetLoading();
else if (!ResolvePerScenarioCellData())
QueueAssetRemoval();
}
#if UNITY_EDITOR
internal string GetAssetPathSafe(Object asset)
{
if (asset != null && AssetDatabase.TryGetGUIDAndLocalFileIdentifier(asset, out string guid, out long instanceID))
return AssetDatabase.GUIDToAssetPath(guid);
return "";
}
internal void GetBlobFileNames(out string cellDataFilename, out string cellOptionalDataFilename, out string cellSharedDataFilename, out string cellSupportDataFilename)
{
var scenario = ProbeReferenceVolume.instance.lightingScenario;
string basePath = Path.Combine(ProbeVolumeAsset.GetDirectory(gameObject.scene.path, gameObject.scene.name), ProbeVolumeAsset.assetName);
string GetOrCreateFileName(Object o, string extension)
{
var res = AssetDatabase.GetAssetPath(o);
if (string.IsNullOrEmpty(res)) res = basePath + extension;
return res;
}
cellDataFilename = GetOrCreateFileName(scenarios[scenario].cellDataAsset, "-" + scenario + ".CellData.bytes");
cellOptionalDataFilename = GetOrCreateFileName(scenarios[scenario].cellOptionalDataAsset, "-" + scenario + ".CellOptionalData.bytes");
cellSharedDataFilename = GetOrCreateFileName(cellSharedDataAsset, ".CellSharedData.bytes");
cellSupportDataFilename = GetOrCreateFileName(cellSupportDataAsset, ".CellSupportData.bytes");
}
// Returns the file size in bytes
long GetFileSize(string path) => File.Exists(path) ? new FileInfo(path).Length : 0;
internal long GetDiskSizeOfSharedData()
{
return GetFileSize(GetAssetPathSafe(cellSharedDataAsset)) + GetFileSize(GetAssetPathSafe(cellSupportDataAsset));
}
internal long GetDiskSizeOfScenarioData(string scenario)
{
if (scenario == null || !scenarios.TryGetValue(scenario, out var data))
return 0;
return GetFileSize(GetAssetPathSafe(data.cellDataAsset)) + GetFileSize(GetAssetPathSafe(data.cellOptionalDataAsset));
}
/// <summary>
/// Call this function during OnProcessScene to strip debug from project builds.
/// </summary>
public void StripSupportData()
{
cellSupportDataAsset = null;
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a83d2f7ae04ab6f4f99b0d85377be998
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,124 @@
#if UNITY_EDITOR
namespace UnityEngine.Rendering
{
internal static class ProbeVolumePositioning
{
internal static Vector3[] m_Axes = new Vector3[6];
internal static Vector3[] m_AABBCorners = new Vector3[8];
public static bool OBBIntersect(in ProbeReferenceVolume.Volume a, in ProbeReferenceVolume.Volume b)
{
// First we test if the bounding spheres intersects, in which case we case do the more complex OBB test
a.CalculateCenterAndSize(out var aCenter, out var aSize);
b.CalculateCenterAndSize(out var bCenter, out var bSize);
var aRadius = aSize.sqrMagnitude / 2.0f;
var bRadius = bSize.sqrMagnitude / 2.0f;
if (Vector3.SqrMagnitude(aCenter - bCenter) > aRadius + bRadius)
return false;
m_Axes[0] = a.X.normalized;
m_Axes[1] = a.Y.normalized;
m_Axes[2] = a.Z.normalized;
m_Axes[3] = b.X.normalized;
m_Axes[4] = b.Y.normalized;
m_Axes[5] = b.Z.normalized;
for (int i = 0; i < 6; i++)
{
Vector2 aProj = ProjectOBB(in a, m_Axes[i]);
Vector2 bProj = ProjectOBB(in b, m_Axes[i]);
if (aProj.y < bProj.x || bProj.y < aProj.x)
{
return false;
}
}
return true;
}
// Test between a OBB and an AABB. The AABB of the OBB is requested to avoid recalculating it
public static bool OBBAABBIntersect(in ProbeReferenceVolume.Volume a, in Bounds b, in Bounds aAABB)
{
// First perform fast AABB test
if (!aAABB.Intersects(b))
return false;
// Perform complex OBB test
Vector3 boundsMin = b.min, boundsMax = b.max;
m_AABBCorners[0] = new Vector3(boundsMin.x, boundsMin.y, boundsMin.z);
m_AABBCorners[1] = new Vector3(boundsMax.x, boundsMin.y, boundsMin.z);
m_AABBCorners[2] = new Vector3(boundsMax.x, boundsMax.y, boundsMin.z);
m_AABBCorners[3] = new Vector3(boundsMin.x, boundsMax.y, boundsMin.z);
m_AABBCorners[4] = new Vector3(boundsMin.x, boundsMin.y, boundsMax.z);
m_AABBCorners[5] = new Vector3(boundsMax.x, boundsMin.y, boundsMax.z);
m_AABBCorners[6] = new Vector3(boundsMax.x, boundsMax.y, boundsMax.z);
m_AABBCorners[7] = new Vector3(boundsMin.x, boundsMax.y, boundsMax.z);
m_Axes[0] = a.X.normalized;
m_Axes[1] = a.Y.normalized;
m_Axes[2] = a.Z.normalized;
for (int i = 0; i < 3; i++)
{
Vector2 aProj = ProjectOBB(in a, m_Axes[i]);
Vector2 bProj = ProjectAABB(m_AABBCorners, m_Axes[i]);
if (aProj.y < bProj.x || bProj.y < aProj.x)
{
return false;
}
}
return true;
}
static Vector2 ProjectOBB(in ProbeReferenceVolume.Volume a, Vector3 axis)
{
float min = Vector3.Dot(axis, a.corner);
float max = min;
for (int x = 0; x < 2; x++)
{
for (int y = 0; y < 2; y++)
{
for (int z = 0; z < 2; z++)
{
Vector3 vert = a.corner + a.X * x + a.Y * y + a.Z * z;
float proj = Vector3.Dot(axis, vert);
if (proj < min)
{
min = proj;
}
else if (proj > max)
{
max = proj;
}
}
}
}
return new Vector2(min, max);
}
static Vector2 ProjectAABB(in Vector3[] corners, Vector3 axis)
{
float min = Vector3.Dot(axis, corners[0]);
float max = min;
for (int i = 1; i < 8; i++)
{
float proj = Vector3.Dot(axis, corners[i]);
if (proj < min) min = proj;
else if (proj > max) max = proj;
}
return new Vector2(min, max);
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a55ec696682683040ab8b67b9175f1a0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,596 @@
using System.Linq;
using System.Reflection;
using System.Collections.Generic;
using UnityEngine.SceneManagement;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace UnityEngine.Rendering
{
// Add Profile and baking settings.
/// <summary> A class containing info about the bounds defined by the probe volumes in various scenes. </summary>
[System.Serializable]
public class ProbeVolumeSceneData : ISerializationCallbackReceiver
{
static PropertyInfo s_SceneGUID = typeof(Scene).GetProperty("guid", System.Reflection.BindingFlags.NonPublic | BindingFlags.Instance);
static internal string GetSceneGUID(Scene scene)
{
Debug.Assert(s_SceneGUID != null, "Reflection for scene GUID failed");
return (string)s_SceneGUID.GetValue(scene);
}
[System.Serializable]
struct SerializableBoundItem
{
[SerializeField] public string sceneGUID;
[SerializeField] public Bounds bounds;
}
[System.Serializable]
struct SerializableHasPVItem
{
[SerializeField] public string sceneGUID;
[SerializeField] public bool hasProbeVolumes;
}
[System.Serializable]
struct SerializablePVProfile
{
[SerializeField] public string sceneGUID;
[SerializeField] public ProbeReferenceVolumeProfile profile;
}
[System.Serializable]
struct SerializablePVBakeSettings
{
public string sceneGUID;
public ProbeVolumeBakingProcessSettings settings;
}
[System.Serializable]
internal class BakingSet
{
public string name;
public List<string> sceneGUIDs = new List<string>();
public ProbeVolumeBakingProcessSettings settings;
public ProbeReferenceVolumeProfile profile;
public List<string> lightingScenarios = new List<string>();
internal string CreateScenario(string name)
{
if (lightingScenarios.Contains(name))
{
string renamed;
int index = 1;
do
renamed = $"{name} ({index++})";
while (lightingScenarios.Contains(renamed));
name = renamed;
}
lightingScenarios.Add(name);
return name;
}
internal bool RemoveScenario(string name)
{
return lightingScenarios.Remove(name);
}
}
[SerializeField] List<SerializableBoundItem> serializedBounds;
[SerializeField] List<SerializableHasPVItem> serializedHasVolumes;
[SerializeField] List<SerializablePVProfile> serializedProfiles;
[SerializeField] List<SerializablePVBakeSettings> serializedBakeSettings;
[SerializeField] List<BakingSet> serializedBakingSets;
internal Object parentAsset = null;
internal string parentSceneDataPropertyName;
/// <summary> A dictionary containing the Bounds defined by probe volumes for each scene (scene path is the key of the dictionary). </summary>
public Dictionary<string, Bounds> sceneBounds;
internal Dictionary<string, bool> hasProbeVolumes;
internal Dictionary<string, ProbeReferenceVolumeProfile> sceneProfiles;
internal Dictionary<string, ProbeVolumeBakingProcessSettings> sceneBakingSettings;
internal List<BakingSet> bakingSets;
[SerializeField] string m_LightingScenario = ProbeReferenceVolume.defaultLightingScenario;
string m_OtherScenario = null;
float m_ScenarioBlendingFactor = 0.0f;
internal string lightingScenario => m_LightingScenario;
internal string otherScenario => m_OtherScenario;
internal float scenarioBlendingFactor => m_ScenarioBlendingFactor;
internal void SetActiveScenario(string scenario)
{
if (m_LightingScenario == scenario && m_ScenarioBlendingFactor == 0.0f)
return;
m_LightingScenario = scenario;
m_OtherScenario = null;
m_ScenarioBlendingFactor = 0.0f;
foreach (var data in ProbeReferenceVolume.instance.perSceneDataList)
data.UpdateActiveScenario(m_LightingScenario, m_OtherScenario);
if (ProbeReferenceVolume.instance.enableScenarioBlending)
{
// Trigger blending system to replace old cells with the one from the new active scenario.
// Although we technically don't need blending for that, it is better than unloading all cells
// because it will replace them progressively. There is no real performance cost to using blending
// rather than regular load thanks to the bypassBlending branch in AddBlendingBricks.
ProbeReferenceVolume.instance.ScenarioBlendingChanged(true);
}
else
ProbeReferenceVolume.instance.UnloadAllCells();
}
internal void BlendLightingScenario(string otherScenario, float blendingFactor)
{
if (!ProbeReferenceVolume.instance.enableScenarioBlending)
{
if (!ProbeBrickBlendingPool.isSupported)
Debug.LogError("Blending between lighting scenarios is not supported by this render pipeline.");
else
Debug.LogError("Blending between lighting scenarios is disabled in the render pipeline settings.");
return;
}
blendingFactor = Mathf.Clamp01(blendingFactor);
if (otherScenario == m_LightingScenario || string.IsNullOrEmpty(otherScenario))
otherScenario = null;
if (otherScenario == null)
blendingFactor = 0.0f;
if (otherScenario == m_OtherScenario && Mathf.Approximately(blendingFactor, m_ScenarioBlendingFactor))
return;
bool scenarioChanged = otherScenario != m_OtherScenario;
m_OtherScenario = otherScenario;
m_ScenarioBlendingFactor = blendingFactor;
if (scenarioChanged)
{
foreach (var data in ProbeReferenceVolume.instance.perSceneDataList)
data.UpdateActiveScenario(m_LightingScenario, m_OtherScenario);
}
ProbeReferenceVolume.instance.ScenarioBlendingChanged(scenarioChanged);
}
/// <summary>
/// Constructor for ProbeVolumeSceneData.
/// </summary>
/// <param name="parentAsset">The asset holding this ProbeVolumeSceneData, it will be dirtied every time scene bounds or settings are changed.</param>
/// <param name="parentSceneDataPropertyName">The name of the property holding the ProbeVolumeSceneData in the parentAsset.</param>
public ProbeVolumeSceneData(Object parentAsset, string parentSceneDataPropertyName)
{
this.parentAsset = parentAsset;
this.parentSceneDataPropertyName = parentSceneDataPropertyName;
sceneBounds = new Dictionary<string, Bounds>();
hasProbeVolumes = new Dictionary<string, bool>();
sceneProfiles = new Dictionary<string, ProbeReferenceVolumeProfile>();
sceneBakingSettings = new Dictionary<string, ProbeVolumeBakingProcessSettings>();
bakingSets = new List<BakingSet>();
serializedBounds = new List<SerializableBoundItem>();
serializedHasVolumes = new List<SerializableHasPVItem>();
serializedProfiles = new List<SerializablePVProfile>();
serializedBakeSettings = new List<SerializablePVBakeSettings>();
UpdateBakingSets();
}
/// <summary>Set a reference to the object holding this ProbeVolumeSceneData.</summary>
/// <param name="parent">The object holding this ProbeVolumeSceneData, it will be dirtied every time scene bounds or settings are changed. </param>
/// <param name="parentSceneDataPropertyName">The name of the property holding the ProbeVolumeSceneData in the parentAsset.</param>
public void SetParentObject(Object parent, string parentSceneDataPropertyName)
{
parentAsset = parent;
this.parentSceneDataPropertyName = parentSceneDataPropertyName;
UpdateBakingSets();
}
/// <summary>
/// OnAfterDeserialize implementation.
/// </summary>
public void OnAfterDeserialize()
{
// We haven't initialized the bounds, no need to do anything here.
if (serializedBounds == null || serializedHasVolumes == null ||
serializedProfiles == null || serializedBakeSettings == null) return;
sceneBounds = new Dictionary<string, Bounds>();
hasProbeVolumes = new Dictionary<string, bool>();
sceneProfiles = new Dictionary<string, ProbeReferenceVolumeProfile>();
sceneBakingSettings = new Dictionary<string, ProbeVolumeBakingProcessSettings>();
bakingSets = new List<BakingSet>();
foreach (var boundItem in serializedBounds)
{
sceneBounds.Add(boundItem.sceneGUID, boundItem.bounds);
}
foreach (var boundItem in serializedHasVolumes)
{
hasProbeVolumes.Add(boundItem.sceneGUID, boundItem.hasProbeVolumes);
}
foreach (var profileItem in serializedProfiles)
{
sceneProfiles.Add(profileItem.sceneGUID, profileItem.profile);
}
foreach (var settingsItem in serializedBakeSettings)
{
sceneBakingSettings.Add(settingsItem.sceneGUID, settingsItem.settings);
}
if (string.IsNullOrEmpty(m_LightingScenario))
m_LightingScenario = ProbeReferenceVolume.defaultLightingScenario;
foreach (var set in serializedBakingSets)
{
// Ensure baking set settings are up to date
set.settings.Upgrade();
bakingSets.Add(set);
}
if (m_OtherScenario == "")
m_OtherScenario = null;
}
// This function must not be called during the serialization (because of asset creation)
void UpdateBakingSets()
{
foreach (var set in serializedBakingSets)
{
// Small migration code to ensure that old sets have correct settings
if (set.profile == null)
InitializeBakingSet(set, set.name);
if (set.lightingScenarios.Count == 0)
InitializeScenarios(set);
}
SyncBakingSetSettings();
}
/// <summary>
/// OnBeforeSerialize implementation.
/// </summary>
public void OnBeforeSerialize()
{
// We haven't initialized the bounds, no need to do anything here.
if (sceneBounds == null || hasProbeVolumes == null || sceneBakingSettings == null || sceneProfiles == null ||
serializedBounds == null || serializedHasVolumes == null || serializedBakeSettings == null || serializedProfiles == null
|| serializedBakingSets == null) return;
serializedBounds.Clear();
serializedHasVolumes.Clear();
serializedProfiles.Clear();
serializedBakeSettings.Clear();
serializedBakingSets.Clear();
foreach (var k in sceneBounds.Keys)
{
SerializableBoundItem item;
item.sceneGUID = k;
item.bounds = sceneBounds[k];
serializedBounds.Add(item);
}
foreach (var k in hasProbeVolumes.Keys)
{
SerializableHasPVItem item;
item.sceneGUID = k;
item.hasProbeVolumes = hasProbeVolumes[k];
serializedHasVolumes.Add(item);
}
foreach (var k in sceneBakingSettings.Keys)
{
SerializablePVBakeSettings item;
item.sceneGUID = k;
item.settings = sceneBakingSettings[k];
serializedBakeSettings.Add(item);
}
foreach (var k in sceneProfiles.Keys)
{
SerializablePVProfile item;
item.sceneGUID = k;
item.profile = sceneProfiles[k];
serializedProfiles.Add(item);
}
foreach (var set in bakingSets)
serializedBakingSets.Add(set);
}
internal BakingSet CreateNewBakingSet(string name)
{
BakingSet set = new BakingSet();
// Initialize new baking set settings
InitializeBakingSet(set, name);
bakingSets.Add(set);
return set;
}
void InitializeBakingSet(BakingSet set, string name)
{
var newProfile = ScriptableObject.CreateInstance<ProbeReferenceVolumeProfile>();
#if UNITY_EDITOR
var path = AssetDatabase.GenerateUniqueAssetPath($"Assets/{name}.asset");
AssetDatabase.CreateAsset(newProfile, path);
#endif
set.name = newProfile.name;
set.profile = newProfile;
set.settings = ProbeVolumeBakingProcessSettings.Default;
InitializeScenarios(set);
}
void InitializeScenarios(BakingSet set)
{
set.lightingScenarios = new List<string>() { ProbeReferenceVolume.defaultLightingScenario };
}
internal void SyncBakingSetSettings()
{
// Sync all the scene settings in the set to avoid config mismatch.
foreach (var set in bakingSets)
{
foreach (var guid in set.sceneGUIDs)
{
sceneBakingSettings[guid] = set.settings;
sceneProfiles[guid] = set.profile;
}
}
}
#if UNITY_EDITOR
static internal int MaxSubdivLevelInProbeVolume(Vector3 volumeSize, int maxSubdiv)
{
float maxSizedDim = Mathf.Max(volumeSize.x, Mathf.Max(volumeSize.y, volumeSize.z));
float maxSideInBricks = maxSizedDim / ProbeReferenceVolume.instance.MinDistanceBetweenProbes();
int absoluteMaxSubdiv = ProbeReferenceVolume.instance.GetMaxSubdivision() - 1;
int subdivLevel = Mathf.FloorToInt(Mathf.Log(maxSideInBricks, 3)) - 1;
return Mathf.Max(subdivLevel, absoluteMaxSubdiv - maxSubdiv);
}
private void InflateBound(ref Bounds bounds, ProbeVolume pv)
{
Bounds originalBounds = bounds;
// Round the probe volume bounds to cell size
float cellSize = ProbeReferenceVolume.instance.MaxBrickSize();
// Expand the probe volume bounds to snap on the cell size grid
bounds.Encapsulate(new Vector3(cellSize * Mathf.Floor(bounds.min.x / cellSize),
cellSize * Mathf.Floor(bounds.min.y / cellSize),
cellSize * Mathf.Floor(bounds.min.z / cellSize)));
bounds.Encapsulate(new Vector3(cellSize * Mathf.Ceil(bounds.max.x / cellSize),
cellSize * Mathf.Ceil(bounds.max.y / cellSize),
cellSize * Mathf.Ceil(bounds.max.z / cellSize)));
// calculate how much padding we need to remove according to the brick generation in ProbePlacement.cs:
var cellSizeVector = new Vector3(cellSize, cellSize, cellSize);
var minPadding = (bounds.min - originalBounds.min);
var maxPadding = (bounds.max - originalBounds.max);
minPadding = cellSizeVector - new Vector3(Mathf.Abs(minPadding.x), Mathf.Abs(minPadding.y), Mathf.Abs(minPadding.z));
maxPadding = cellSizeVector - new Vector3(Mathf.Abs(maxPadding.x), Mathf.Abs(maxPadding.y), Mathf.Abs(maxPadding.z));
// Find the size of the brick we can put for every axis given the padding size
int maxSubdiv = (pv.overridesSubdivLevels ? pv.highestSubdivLevelOverride : 0);
float rightPaddingSubdivLevel = ProbeReferenceVolume.instance.BrickSize(MaxSubdivLevelInProbeVolume(new Vector3(maxPadding.x, originalBounds.size.y, originalBounds.size.z), maxSubdiv));
float leftPaddingSubdivLevel = ProbeReferenceVolume.instance.BrickSize(MaxSubdivLevelInProbeVolume(new Vector3(minPadding.x, originalBounds.size.y, originalBounds.size.z), maxSubdiv));
float topPaddingSubdivLevel = ProbeReferenceVolume.instance.BrickSize(MaxSubdivLevelInProbeVolume(new Vector3(originalBounds.size.x, maxPadding.y, originalBounds.size.z), maxSubdiv));
float bottomPaddingSubdivLevel = ProbeReferenceVolume.instance.BrickSize(MaxSubdivLevelInProbeVolume(new Vector3(originalBounds.size.x, minPadding.y, originalBounds.size.z), maxSubdiv));
float forwardPaddingSubdivLevel = ProbeReferenceVolume.instance.BrickSize(MaxSubdivLevelInProbeVolume(new Vector3(originalBounds.size.x, originalBounds.size.y, maxPadding.z), maxSubdiv));
float backPaddingSubdivLevel = ProbeReferenceVolume.instance.BrickSize(MaxSubdivLevelInProbeVolume(new Vector3(originalBounds.size.x, originalBounds.size.y, minPadding.z), maxSubdiv));
// Remove the extra padding caused by cell rounding
bounds.min = bounds.min + new Vector3(
leftPaddingSubdivLevel * Mathf.Floor(Mathf.Abs(bounds.min.x - originalBounds.min.x) / (float)leftPaddingSubdivLevel),
bottomPaddingSubdivLevel * Mathf.Floor(Mathf.Abs(bounds.min.y - originalBounds.min.y) / (float)bottomPaddingSubdivLevel),
backPaddingSubdivLevel * Mathf.Floor(Mathf.Abs(bounds.min.z - originalBounds.min.z) / (float)backPaddingSubdivLevel)
);
bounds.max = bounds.max - new Vector3(
rightPaddingSubdivLevel * Mathf.Floor(Mathf.Abs(bounds.max.x - originalBounds.max.x) / (float)rightPaddingSubdivLevel),
topPaddingSubdivLevel * Mathf.Floor(Mathf.Abs(bounds.max.y - originalBounds.max.y) / (float)topPaddingSubdivLevel),
forwardPaddingSubdivLevel * Mathf.Floor(Mathf.Abs(bounds.max.z - originalBounds.max.z) / (float)forwardPaddingSubdivLevel)
);
}
// Should be called after EnsureSceneIsInBakingSet otherwise GetProfileForScene might be out of date
internal void UpdateSceneBounds(Scene scene, bool updateGlobalVolumes)
{
var volumes = Object.FindObjectsOfType<ProbeVolume>();
float prevBrickSize = ProbeReferenceVolume.instance.MinBrickSize();
int prevMaxSubdiv = ProbeReferenceVolume.instance.GetMaxSubdivision();
{
var profile = GetProfileForScene(scene);
if (profile == null)
{
if (volumes.Length > 0)
Debug.LogWarning("A probe volume is present in the scene but a profile has not been set. Please configure a profile for your scene in the Probe Volume Baking settings.");
return;
}
ProbeReferenceVolume.instance.SetMinBrickAndMaxSubdiv(profile.minBrickSize, profile.maxSubdivision);
}
var sceneGUID = GetSceneGUID(scene);
bool boundFound = false;
Bounds newBound = new Bounds();
foreach (var volume in volumes)
{
bool forceUpdate = updateGlobalVolumes && volume.mode == ProbeVolume.Mode.Global;
if (!forceUpdate && volume.gameObject.scene != scene)
continue;
if (volume.mode != ProbeVolume.Mode.Local)
volume.UpdateGlobalVolume(volume.mode == ProbeVolume.Mode.Global ? GIContributors.ContributorFilter.All : GIContributors.ContributorFilter.Scene);
var transform = volume.gameObject.transform;
var obb = new ProbeReferenceVolume.Volume(Matrix4x4.TRS(transform.position, transform.rotation, volume.GetExtents()), 0, 0);
Bounds localBounds = obb.CalculateAABB();
InflateBound(ref localBounds, volume);
if (!boundFound)
{
newBound = localBounds;
boundFound = true;
}
else
{
newBound.Encapsulate(localBounds);
}
}
hasProbeVolumes[sceneGUID] = boundFound;
if (boundFound)
sceneBounds[sceneGUID] = newBound;
ProbeReferenceVolume.instance.SetMinBrickAndMaxSubdiv(prevBrickSize, prevMaxSubdiv);
if (parentAsset != null)
EditorUtility.SetDirty(parentAsset);
}
// It is important this is called after UpdateSceneBounds is called otherwise SceneHasProbeVolumes might be out of date
internal void EnsurePerSceneData(Scene scene)
{
var sceneGUID = GetSceneGUID(scene);
if (SceneHasProbeVolumes(sceneGUID))
{
bool foundPerSceneData = false;
foreach (var data in ProbeReferenceVolume.instance.perSceneDataList)
{
if (GetSceneGUID(data.gameObject.scene) == sceneGUID)
{
foundPerSceneData = true;
break;
}
}
if (!foundPerSceneData)
{
GameObject go = new GameObject("ProbeVolumePerSceneData");
go.hideFlags |= HideFlags.HideInHierarchy;
go.AddComponent<ProbeVolumePerSceneData>();
SceneManager.MoveGameObjectToScene(go, scene);
}
}
}
internal void EnsureSceneIsInBakingSet(Scene scene)
{
var sceneGUID = GetSceneGUID(scene);
foreach (var set in bakingSets)
if (set.sceneGUIDs.Contains(sceneGUID))
return;
// The scene is not in a baking set, we need to add it
if (bakingSets.Count == 0)
return; // Technically shouldn't be possible since it's blocked in the UI
bakingSets[0].sceneGUIDs.Add(sceneGUID);
SyncBakingSetSettings();
}
internal string GetFirstProbeVolumeSceneGUID(ProbeVolumeSceneData.BakingSet set)
{
foreach (var guid in set.sceneGUIDs)
{
if (sceneBakingSettings.ContainsKey(guid) && sceneProfiles.ContainsKey(guid))
return guid;
}
return null;
}
internal void OnSceneSaving(Scene scene, string path = null)
{
// If we are called from the scene callback, we want to update all global volumes that are potentially affected
bool updateGlobalVolumes = path != null;
EnsureSceneIsInBakingSet(scene);
UpdateSceneBounds(scene, updateGlobalVolumes);
EnsurePerSceneData(scene);
}
internal void SetProfileForScene(Scene scene, ProbeReferenceVolumeProfile profile)
{
if (sceneProfiles == null) sceneProfiles = new Dictionary<string, ProbeReferenceVolumeProfile>();
var sceneGUID = GetSceneGUID(scene);
sceneProfiles[sceneGUID] = profile;
}
internal void SetProfileForScene(string sceneGUID, ProbeReferenceVolumeProfile profile)
{
if (sceneProfiles == null) sceneProfiles = new Dictionary<string, ProbeReferenceVolumeProfile>();
sceneProfiles[sceneGUID] = profile;
}
internal void SetBakeSettingsForScene(Scene scene, ProbeDilationSettings dilationSettings, VirtualOffsetSettings virtualOffsetSettings)
{
if (sceneBakingSettings == null) sceneBakingSettings = new Dictionary<string, ProbeVolumeBakingProcessSettings>();
var sceneGUID = GetSceneGUID(scene);
sceneBakingSettings[sceneGUID] = new ProbeVolumeBakingProcessSettings(dilationSettings, virtualOffsetSettings);
}
internal ProbeReferenceVolumeProfile GetProfileForScene(Scene scene)
{
var sceneGUID = GetSceneGUID(scene);
if (sceneProfiles != null && sceneProfiles.ContainsKey(sceneGUID))
return sceneProfiles[sceneGUID];
return null;
}
internal bool BakeSettingsDefinedForScene(Scene scene)
{
var sceneGUID = GetSceneGUID(scene);
return sceneBakingSettings.ContainsKey(sceneGUID);
}
internal ProbeVolumeBakingProcessSettings GetBakeSettingsForScene(Scene scene)
{
var sceneGUID = GetSceneGUID(scene);
if (sceneBakingSettings != null && sceneBakingSettings.ContainsKey(sceneGUID))
return sceneBakingSettings[sceneGUID];
return ProbeVolumeBakingProcessSettings.Default;
}
// This is sub-optimal, but because is called once when kicking off a bake
internal BakingSet GetBakingSetForScene(Scene scene)
{
var sceneGUID = GetSceneGUID(scene);
foreach (var set in bakingSets)
{
foreach (var guidInSet in set.sceneGUIDs)
{
if (guidInSet == sceneGUID)
return set;
}
}
return null;
}
internal bool SceneHasProbeVolumes(string sceneGUID) => hasProbeVolumes != null && hasProbeVolumes.TryGetValue(sceneGUID, out var hasPV) && hasPV;
internal bool SceneHasProbeVolumes(Scene scene) => SceneHasProbeVolumes(GetSceneGUID(scene));
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 04fb4740d8c6dbc469086e13c435cd4b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,46 @@
using UnityEngine.Rendering;
namespace UnityEngine.Rendering
{
/// <summary>
/// Defines the constant buffer register that will be used as binding point for the Probe Volumes constant buffer.
/// </summary>
public enum APVConstantBufferRegister
{
/// <summary>
/// Global register
/// </summary>
GlobalRegister = 5
}
/// <summary>
/// Defines the method used to reduce leaking.
/// </summary>
[GenerateHLSL]
public enum APVLeakReductionMode
{
/// <summary>
/// Nothing is done to prevent leaking. Cheapest option in terms of cost of sampling.
/// </summary>
None = 0,
/// <summary>
/// The uvw used to sample APV data are warped to try to have invalid probe not contributing to lighting. Also, a geometric weight based on normal at sampling position and vector to probes is used.
/// This only modifies the uvw used, but still sample a single time. It is effective in some situations (especially when occluding object contain probes inside) but ineffective in many other.
/// </summary>
ValidityAndNormalBased = 1,
}
[GenerateHLSL(needAccessors = false, generateCBuffer = true, constantRegister = (int)APVConstantBufferRegister.GlobalRegister)]
internal unsafe struct ShaderVariablesProbeVolumes
{
public Vector4 _PoolDim_CellInMeters;
public Vector4 _MinCellPos_Noise;
public Vector4 _IndicesDim_IndexChunkSize;
public Vector4 _Biases_CellInMinBrick_MinBrickSize;
public Vector4 _LeakReductionParams;
public Vector4 _Weight_MinLoadedCell;
public Vector4 _MaxLoadedCell_FrameIndex;
public Vector4 _NormalizationClamp_Padding12;
}
}

View File

@@ -0,0 +1,27 @@
//
// This file was automatically generated. Please don't edit by hand. Execute Editor command [ Edit > Rendering > Generate Shader Includes ] instead
//
#ifndef SHADERVARIABLESPROBEVOLUMES_CS_HLSL
#define SHADERVARIABLESPROBEVOLUMES_CS_HLSL
//
// UnityEngine.Rendering.APVLeakReductionMode: static fields
//
#define APVLEAKREDUCTIONMODE_NONE (0)
#define APVLEAKREDUCTIONMODE_VALIDITY_AND_NORMAL_BASED (1)
// Generated from UnityEngine.Rendering.ShaderVariablesProbeVolumes
// PackingRules = Exact
GLOBAL_CBUFFER_START(ShaderVariablesProbeVolumes, b5)
float4 _PoolDim_CellInMeters;
float4 _MinCellPos_Noise;
float4 _IndicesDim_IndexChunkSize;
float4 _Biases_CellInMinBrick_MinBrickSize;
float4 _LeakReductionParams;
float4 _Weight_MinLoadedCell;
float4 _MaxLoadedCell_FrameIndex;
float4 _NormalizationClamp_Padding12;
CBUFFER_END
#endif

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: dcbd446e83bc87d40b82dd90f8814c3c
ShaderIncludeImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6df0bea367f474d46865e94282c99503
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,280 @@
using System;
namespace UnityEngine.Rendering
{
/// <summary>
/// Structure holding Spherical Harmonic L1 coefficient.
/// </summary>
[Serializable]
public struct SphericalHarmonicsL1
{
/// <summary>
/// Red channel of each of the three L1 SH coefficient.
/// </summary>
public Vector4 shAr;
/// <summary>
/// Green channel of each of the three L1 SH coefficient.
/// </summary>
public Vector4 shAg;
/// <summary>
/// Blue channel of each of the three L1 SH coefficient.
/// </summary>
public Vector4 shAb;
/// <summary>
/// A set of L1 coefficients initialized to zero.
/// </summary>
public static readonly SphericalHarmonicsL1 zero = new SphericalHarmonicsL1
{
shAr = Vector4.zero,
shAg = Vector4.zero,
shAb = Vector4.zero
};
// These operators are implemented so that SphericalHarmonicsL1 matches API of SphericalHarmonicsL2.
/// <summary>
/// Sum two SphericalHarmonicsL1.
/// </summary>
/// <param name="lhs">First SphericalHarmonicsL1.</param>
/// <param name="rhs">Second SphericalHarmonicsL1.</param>
/// <returns>The resulting SphericalHarmonicsL1.</returns>
public static SphericalHarmonicsL1 operator +(SphericalHarmonicsL1 lhs, SphericalHarmonicsL1 rhs) => new SphericalHarmonicsL1()
{
shAr = lhs.shAr + rhs.shAr,
shAg = lhs.shAg + rhs.shAg,
shAb = lhs.shAb + rhs.shAb
};
/// <summary>
/// Subtract two SphericalHarmonicsL1.
/// </summary>
/// <param name="lhs">First SphericalHarmonicsL1.</param>
/// <param name="rhs">Second SphericalHarmonicsL1.</param>
/// <returns>The resulting SphericalHarmonicsL1.</returns>
public static SphericalHarmonicsL1 operator -(SphericalHarmonicsL1 lhs, SphericalHarmonicsL1 rhs) => new SphericalHarmonicsL1()
{
shAr = lhs.shAr - rhs.shAr,
shAg = lhs.shAg - rhs.shAg,
shAb = lhs.shAb - rhs.shAb
};
/// <summary>
/// Multiply two SphericalHarmonicsL1.
/// </summary>
/// <param name="lhs">First SphericalHarmonicsL1.</param>
/// <param name="rhs">Second SphericalHarmonicsL1.</param>
/// <returns>The resulting SphericalHarmonicsL1.</returns>
public static SphericalHarmonicsL1 operator *(SphericalHarmonicsL1 lhs, float rhs) => new SphericalHarmonicsL1()
{
shAr = lhs.shAr * rhs,
shAg = lhs.shAg * rhs,
shAb = lhs.shAb * rhs
};
/// <summary>
/// Divide two SphericalHarmonicsL1.
/// </summary>
/// <param name="lhs">First SphericalHarmonicsL1.</param>
/// <param name="rhs">Second SphericalHarmonicsL1.</param>
/// <returns>The resulting SphericalHarmonicsL1.</returns>
public static SphericalHarmonicsL1 operator /(SphericalHarmonicsL1 lhs, float rhs) => new SphericalHarmonicsL1()
{
shAr = lhs.shAr / rhs,
shAg = lhs.shAg / rhs,
shAb = lhs.shAb / rhs
};
/// <summary>
/// Compare two SphericalHarmonicsL1.
/// </summary>
/// <param name="lhs">First SphericalHarmonicsL1.</param>
/// <param name="rhs">Second SphericalHarmonicsL1.</param>
/// <returns>Whether the SphericalHarmonicsL1 match.</returns>
public static bool operator ==(SphericalHarmonicsL1 lhs, SphericalHarmonicsL1 rhs)
{
return lhs.shAr == rhs.shAr
&& lhs.shAg == rhs.shAg
&& lhs.shAb == rhs.shAb;
}
/// <summary>
/// Check two SphericalHarmonicsL1 inequality.
/// </summary>
/// <param name="lhs">First SphericalHarmonicsL1.</param>
/// <param name="rhs">Second SphericalHarmonicsL1.</param>
/// <returns>Whether the SphericalHarmonicsL1 are different.</returns>
public static bool operator !=(SphericalHarmonicsL1 lhs, SphericalHarmonicsL1 rhs)
{
return !(lhs == rhs);
}
/// <summary>
/// Compare this SphericalHarmonicsL1 with an object.
/// </summary>
/// <param name="other">The object to compare with.</param>
/// <returns>Whether the SphericalHarmonicsL1 is equal to the object passed.</returns>
public override bool Equals(object other)
{
if (!(other is SphericalHarmonicsL1)) return false;
return this == (SphericalHarmonicsL1)other;
}
/// <summary>
/// Produces an hash code of the SphericalHarmonicsL1.
/// </summary>
/// <returns>The hash code for this SphericalHarmonicsL1.</returns>
public override int GetHashCode()
{
return ((17 * 23 + shAr.GetHashCode()) * 23 + shAg.GetHashCode()) * 23 + shAb.GetHashCode();
}
}
/// <summary>
/// A collection of utility functions used to access and set SphericalHarmonicsL2 in a more verbose way.
/// </summary>
public class SphericalHarmonicsL2Utils
{
/// <summary>
/// Returns the L1 coefficients organized in such a way that are swizzled per channel rather than per coefficient.
/// </summary>
/// <param name ="sh"> The SphericalHarmonicsL2 data structure to use to query the information.</param>
/// <param name ="L1_R">The red channel of all coefficient for the L1 band.</param>
/// <param name ="L1_G">The green channel of all coefficient for the L1 band.</param>
/// <param name ="L1_B">The blue channel of all coefficient for the L1 band.</param>
public static void GetL1(SphericalHarmonicsL2 sh, out Vector3 L1_R, out Vector3 L1_G, out Vector3 L1_B)
{
L1_R = new Vector3(sh[0, 1],
sh[0, 2],
sh[0, 3]);
L1_G = new Vector3(sh[1, 1],
sh[1, 2],
sh[1, 3]);
L1_B = new Vector3(sh[2, 1],
sh[2, 2],
sh[2, 3]);
}
/// <summary>
/// Returns all the L2 coefficients.
/// </summary>
/// <param name ="sh"> The SphericalHarmonicsL2 data structure to use to query the information.</param>
/// <param name ="L2_0">The first coefficient for the L2 band.</param>
/// <param name ="L2_1">The second coefficient for the L2 band.</param>
/// <param name ="L2_2">The third coefficient for the L2 band.</param>
/// <param name ="L2_3">The fourth coefficient for the L2 band.</param>
/// <param name ="L2_4">The fifth coefficient for the L2 band.</param>
public static void GetL2(SphericalHarmonicsL2 sh, out Vector3 L2_0, out Vector3 L2_1, out Vector3 L2_2, out Vector3 L2_3, out Vector3 L2_4)
{
L2_0 = new Vector3(sh[0, 4],
sh[1, 4],
sh[2, 4]);
L2_1 = new Vector3(sh[0, 5],
sh[1, 5],
sh[2, 5]);
L2_2 = new Vector3(sh[0, 6],
sh[1, 6],
sh[2, 6]);
L2_3 = new Vector3(sh[0, 7],
sh[1, 7],
sh[2, 7]);
L2_4 = new Vector3(sh[0, 8],
sh[1, 8],
sh[2, 8]);
}
/// <summary>
/// Set L0 coefficient.
/// </summary>
/// <param name ="sh">The SphericalHarmonicsL2 data structure to store information on.</param>
/// <param name ="L0">The L0 coefficient to set.</param>
public static void SetL0(ref SphericalHarmonicsL2 sh, Vector3 L0)
{
sh[0, 0] = L0.x;
sh[1, 0] = L0.y;
sh[2, 0] = L0.z;
}
/// <summary>
/// Set the red channel for each of the L1 coefficients.
/// </summary>
/// <param name ="sh">The SphericalHarmonicsL2 data structure to store information on.</param>
/// <param name ="L1_R">The red channels for each L1 coefficient.</param>
public static void SetL1R(ref SphericalHarmonicsL2 sh, Vector3 L1_R)
{
sh[0, 1] = L1_R.x;
sh[0, 2] = L1_R.y;
sh[0, 3] = L1_R.z;
}
/// <summary>
/// Set the green channel for each of the L1 coefficients.
/// </summary>
/// <param name ="sh">The SphericalHarmonicsL2 data structure to store information on.</param>
/// <param name ="L1_G">The green channels for each L1 coefficient.</param>
public static void SetL1G(ref SphericalHarmonicsL2 sh, Vector3 L1_G)
{
sh[1, 1] = L1_G.x;
sh[1, 2] = L1_G.y;
sh[1, 3] = L1_G.z;
}
/// <summary>
/// Set the blue channel for each of the L1 coefficients.
/// </summary>
/// <param name ="sh">The SphericalHarmonicsL2 data structure to store information on.</param>
/// <param name ="L1_B">The blue channels for each L1 coefficient.</param>
public static void SetL1B(ref SphericalHarmonicsL2 sh, Vector3 L1_B)
{
sh[2, 1] = L1_B.x;
sh[2, 2] = L1_B.y;
sh[2, 3] = L1_B.z;
}
/// <summary>
/// Set all L1 coefficients per channel.
/// </summary>
/// <param name ="sh">The SphericalHarmonicsL2 data structure to store information on.</param>
/// <param name ="L1_R">The red channels for each L1 coefficient.</param>
/// <param name ="L1_G">The green channels for each L1 coefficient.</param>
/// <param name ="L1_B">The blue channels for each L1 coefficient.</param>
public static void SetL1(ref SphericalHarmonicsL2 sh, Vector3 L1_R, Vector3 L1_G, Vector3 L1_B)
{
SetL1R(ref sh, L1_R);
SetL1G(ref sh, L1_G);
SetL1B(ref sh, L1_B);
}
/// <summary>
/// Set a spherical harmonics coefficient.
/// </summary>
/// <param name ="sh">The SphericalHarmonicsL2 data structure to store information on.</param>
/// <param name ="index">The index of the coefficient that is set (must be less than 9).</param>
/// <param name ="coefficient">The values of the coefficient is set.</param>
public static void SetCoefficient(ref SphericalHarmonicsL2 sh, int index, Vector3 coefficient)
{
Debug.Assert(index < 9);
sh[0, index] = coefficient.x;
sh[1, index] = coefficient.y;
sh[2, index] = coefficient.z;
}
/// <summary>
/// Get a spherical harmonics coefficient.
/// </summary>
/// <param name ="sh">The SphericalHarmonicsL2 data structure to get information from.</param>
/// <param name ="index">The index of the coefficient that is requested (must be less than 9).</param>
/// <returns>The value of the requested coefficient.</returns>
public static Vector3 GetCoefficient(SphericalHarmonicsL2 sh, int index)
{
Debug.Assert(index < 9);
return new Vector3(sh[0, index], sh[1, index], sh[2, index]);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f89ae72a038989e4780e2087fe3911c7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: