ThiefLightmapper/KeepersCompound.Lightmapper/PotentiallyVisibleSet.cs

384 lines
13 KiB
C#
Raw Normal View History

2025-01-04 20:22:36 +00:00
using System.Numerics;
using KeepersCompound.LGS.Database.Chunks;
namespace KeepersCompound.Lightmapper;
public class PotentiallyVisibleSet
{
2025-01-05 14:49:02 +00:00
private struct Edge
2025-01-04 20:22:36 +00:00
{
2025-01-04 21:17:53 +00:00
public int Destination;
2025-01-04 20:22:36 +00:00
public Poly Poly;
2025-01-05 14:49:02 +00:00
public override string ToString()
{
return $"<Destination: {Destination}, Poly: {Poly}";
}
2025-01-04 20:22:36 +00:00
}
2025-01-05 14:49:02 +00:00
private struct Poly
2025-01-04 20:22:36 +00:00
{
2025-01-06 21:26:24 +00:00
public List<Vector3> Vertices;
2025-01-05 14:49:02 +00:00
public readonly Plane Plane;
2025-01-04 20:22:36 +00:00
2025-01-06 21:26:24 +00:00
public Poly(List<Vector3> vertices, Plane plane)
2025-01-04 20:22:36 +00:00
{
2025-01-05 14:49:02 +00:00
Vertices = vertices;
Plane = plane;
}
2025-01-04 20:22:36 +00:00
2025-01-05 14:49:02 +00:00
public Poly(Poly other)
{
2025-01-06 21:26:24 +00:00
Vertices = [..other.Vertices];
Plane = other.Plane;
2025-01-05 14:49:02 +00:00
}
public bool IsCoplanar(Poly other)
{
return MathUtils.IsCoplanar(Plane, other.Plane);
}
public override string ToString()
{
return $"<Plane: {Plane}, Vertices: [{string.Join(", ", Vertices)}]";
2025-01-04 20:22:36 +00:00
}
}
private readonly List<int>[] _portalGraph;
private readonly List<Edge> _edges;
private readonly Dictionary<int, HashSet<int>> _visibilitySet;
2025-01-05 14:49:02 +00:00
private const float Epsilon = 0.1f;
2025-01-06 21:26:24 +00:00
// This is yucky and means we're not thread safe
private readonly List<float> _clipDistances = new(32);
private readonly List<Side> _clipSides = new(32);
private readonly int[] _clipCounts = [0, 0, 0];
2025-01-05 14:49:02 +00:00
2025-01-04 20:22:36 +00:00
// TODO:
// - This is a conservative algorithm based on Matt's Ramblings Quake PVS video
// - Build portal graph (or just use WR directly)
// - A cell can always see it's self and any immediate neighbours
// - The third depth cell is also visible unless the portal to it is coplanar with the second cells portal (do I need to think about this?)
// - For all further cells:
// - Generate separating planes between the source cell portal and the previously passed (clipped) portal
// - Clip the target portal to the new cell using the separating planes
// - If anything is left of the clipped portal, we can see, otherwise we discard that cell
// - The full process is a recursive depth first search
public PotentiallyVisibleSet(WorldRep.Cell[] cells)
{
_edges = [];
_visibilitySet = new Dictionary<int, HashSet<int>>();
_portalGraph = new List<int>[cells.Length];
for (var i = 0; i < cells.Length; i++)
{
_portalGraph[i] = [];
var cell = cells[i];
// If a cell is "blocks vision" flagged, we can never see out of it
// We can see into it though, so we still want the edges coming in
if ((cell.Flags & 8) != 0)
{
continue;
}
// We have to cycle through *all* polys rather than just portals to calculate the correct poly vertex offsets
var indicesOffset = 0;
var portalStartIdx = cell.PolyCount - cell.PortalPolyCount;
for (var j = 0; j < cell.PolyCount; j++)
{
var poly = cell.Polys[j];
if (j < portalStartIdx)
{
indicesOffset += poly.VertexCount;
continue;
}
var other = poly.Destination;
// Checking if there's already an edge is super slow. It's much faster to just add a new edge, even with
// the duplicated poly
2025-01-06 21:26:24 +00:00
var vs = new List<Vector3>(poly.VertexCount);
for (var vIdx = 0; vIdx < poly.VertexCount; vIdx++)
2025-01-04 20:22:36 +00:00
{
2025-01-06 21:26:24 +00:00
vs.Add(cell.Vertices[cell.Indices[indicesOffset + vIdx]]);
2025-01-04 20:22:36 +00:00
}
var edge = new Edge
{
2025-01-04 21:17:53 +00:00
Destination = other,
Poly = new Poly(vs, cell.Planes[poly.PlaneId]),
};
_edges.Add(edge);
_portalGraph[i].Add(_edges.Count - 1);
2025-01-04 20:22:36 +00:00
indicesOffset += poly.VertexCount;
}
}
}
public int[] GetVisible(int cellIdx)
{
if (_visibilitySet.TryGetValue(cellIdx, out var value))
{
return [..value];
}
var visibleCells = ComputeVisibility(cellIdx);
_visibilitySet.Add(cellIdx, visibleCells);
return [..visibleCells];
}
private HashSet<int> ComputeVisibility(int cellIdx)
{
if (cellIdx >= _portalGraph.Length)
{
return [];
}
// A cell can always see itself, so we'll add that now
var visible = new HashSet<int>();
visible.Add(cellIdx);
// Additionally a cell can always see it's direct neighbours (obviously)
foreach (var edgeIndex in _portalGraph[cellIdx])
{
var edge = _edges[edgeIndex];
2025-01-04 21:17:53 +00:00
var neighbourIdx = edge.Destination;
2025-01-04 20:22:36 +00:00
visible.Add(neighbourIdx);
// Neighbours of our direct neighbour are always visible, unless they're coplanar
foreach (var innerEdgeIndex in _portalGraph[neighbourIdx])
{
var innerEdge = _edges[innerEdgeIndex];
2025-01-04 21:17:53 +00:00
if (innerEdge.Destination == cellIdx || edge.Poly.IsCoplanar(innerEdge.Poly))
2025-01-04 20:22:36 +00:00
{
continue;
}
2025-01-05 14:49:02 +00:00
ExplorePortalRecursive(visible, edge.Poly, new Poly(innerEdge.Poly), neighbourIdx, innerEdge.Destination, 0);
2025-01-04 20:22:36 +00:00
}
}
return visible;
}
2025-01-05 14:49:02 +00:00
private void ExplorePortalRecursive(
2025-01-04 20:22:36 +00:00
HashSet<int> visible,
Poly sourcePoly,
Poly previousPoly,
int previousCellIdx,
int currentCellIdx,
int depth)
{
2025-01-05 14:49:02 +00:00
// TODO: Might need to lose this
if (depth > 1024)
2025-01-04 20:22:36 +00:00
{
return;
}
visible.Add(currentCellIdx);
2025-01-05 14:49:02 +00:00
// Only one edge out of the cell means we'd be going back on ourselves
if (_portalGraph[currentCellIdx].Count <= 1)
{
return;
}
// TODO: If all neighbours are already in `visible` skip exploring?
2025-01-04 20:22:36 +00:00
var separators = new List<Plane>();
2025-01-05 14:49:02 +00:00
GetSeparatingPlanes(separators, sourcePoly, previousPoly, false);
GetSeparatingPlanes(separators, previousPoly, sourcePoly, true);
// The case for this occuring is... interesting ( idk )
if (separators.Count == 0)
{
return;
}
2025-01-04 20:22:36 +00:00
// Clip all new polys and recurse
foreach (var edgeIndex in _portalGraph[currentCellIdx])
{
var edge = _edges[edgeIndex];
2025-01-05 14:49:02 +00:00
if (edge.Destination == previousCellIdx || previousPoly.IsCoplanar(edge.Poly) || sourcePoly.IsCoplanar(edge.Poly))
2025-01-04 20:22:36 +00:00
{
continue;
}
2025-01-05 14:49:02 +00:00
var poly = new Poly(edge.Poly);
foreach (var separator in separators)
{
2025-01-06 21:26:24 +00:00
ClipPolygonByPlane(ref poly, separator);
2025-01-05 14:49:02 +00:00
}
2025-01-06 21:26:24 +00:00
if (poly.Vertices.Count == 0)
2025-01-04 20:22:36 +00:00
{
continue;
}
2025-01-05 14:49:02 +00:00
ExplorePortalRecursive(visible, sourcePoly, poly, currentCellIdx, edge.Destination, depth + 1);
2025-01-04 20:22:36 +00:00
}
}
2025-01-05 14:49:02 +00:00
// TODO: We're getting multiple separating planes that are the same, let's not somehow?
private static void GetSeparatingPlanes(List<Plane> separators, Poly p0, Poly p1, bool flip)
2025-01-04 20:22:36 +00:00
{
2025-01-06 21:26:24 +00:00
for (var i = 0; i < p0.Vertices.Count; i++)
2025-01-04 20:22:36 +00:00
{
// brute force all combinations
// there's probably some analytical way to choose the "correct" v2 but I couldn't find anything online
var v0 = p0.Vertices[i];
2025-01-06 21:26:24 +00:00
var v1 = p0.Vertices[(i + 1) % p0.Vertices.Count];
for (var j = 0; j < p1.Vertices.Count; j++)
2025-01-04 20:22:36 +00:00
{
var v2 = p1.Vertices[j];
2025-01-05 14:49:02 +00:00
var normal = Vector3.Cross(v1 - v0, v2 - v0);
if (normal.LengthSquared() < Epsilon)
{
// colinear (or near colinear) points will produce an invalid plane
continue;
}
normal = Vector3.Normalize(normal);
var d = -Vector3.Dot(v2, normal);
2025-01-04 20:22:36 +00:00
// Depending on how the edges were built, the resulting plane might be facing the wrong way
2025-01-05 14:49:02 +00:00
var distanceToSource = MathUtils.DistanceFromPlane(p0.Plane, v2);
if (distanceToSource > Epsilon)
{
normal = -normal;
d = -d;
}
var plane = new Plane(normal, d);
if (MathUtils.IsCoplanar(plane, flip ? p0.Plane : p1.Plane))
2025-01-04 20:22:36 +00:00
{
2025-01-05 14:49:02 +00:00
continue;
2025-01-04 20:22:36 +00:00
}
2025-01-05 14:49:02 +00:00
// All points should be in front of the plane (except for the point used to create it)
var invalid = false;
2025-01-04 20:22:36 +00:00
var count = 0;
2025-01-06 21:26:24 +00:00
for (var k = 0; k < p1.Vertices.Count; k++)
2025-01-04 20:22:36 +00:00
{
2025-01-05 14:49:02 +00:00
if (k == j)
{
continue;
}
var dist = MathUtils.DistanceFromPlane(plane, p1.Vertices[k]);
if (dist > Epsilon)
2025-01-04 20:22:36 +00:00
{
count++;
}
2025-01-05 14:49:02 +00:00
else if (dist < -Epsilon)
{
invalid = true;
break;
}
2025-01-04 20:22:36 +00:00
}
2025-01-05 14:49:02 +00:00
if (invalid || count == 0)
2025-01-04 20:22:36 +00:00
{
continue;
}
if (flip)
{
2025-01-05 14:49:02 +00:00
plane.Normal = -normal;
plane.D = -d;
2025-01-04 20:22:36 +00:00
}
separators.Add(plane);
}
}
}
private enum Side
{
Front,
On,
Back
}
// TODO: is this reference type poly going to fuck me?
// TODO: Should this and Poly be in MathUtils?
2025-01-06 21:26:24 +00:00
private void ClipPolygonByPlane(ref Poly poly, Plane plane)
2025-01-04 20:22:36 +00:00
{
2025-01-06 21:26:24 +00:00
var vertexCount = poly.Vertices.Count;
2025-01-05 14:49:02 +00:00
if (vertexCount == 0)
{
2025-01-06 21:26:24 +00:00
return;
2025-01-05 14:49:02 +00:00
}
2025-01-04 20:22:36 +00:00
// Firstly we want to tally up what side of the plane each point of the poly is on
// This is used both to early out if nothing/everything is clipped, and to aid the clipping
2025-01-06 21:26:24 +00:00
// var distances = new float[vertexCount];
// var sides = new Side[vertexCount];
// var counts = new int[3];
_clipDistances.Clear();
_clipSides.Clear();
_clipCounts[0] = 0;
_clipCounts[1] = 0;
_clipCounts[2] = 0;
2025-01-04 20:22:36 +00:00
for (var i = 0; i < vertexCount; i++)
{
var distance = MathUtils.DistanceFromPlane(plane, poly.Vertices[i]);
2025-01-06 21:26:24 +00:00
_clipDistances.Add(distance);
_clipSides.Add(distance switch {
2025-01-05 14:49:02 +00:00
> Epsilon => Side.Front,
<-Epsilon => Side.Back,
2025-01-04 20:22:36 +00:00
_ => Side.On,
2025-01-06 21:26:24 +00:00
});
_clipCounts[(int)_clipSides[i]]++;
2025-01-04 20:22:36 +00:00
}
// Everything is within the half-space, so we don't need to clip anything
2025-01-06 21:26:24 +00:00
if (_clipCounts[(int)Side.Back] == 0 && _clipCounts[(int)Side.On] != vertexCount)
2025-01-04 20:22:36 +00:00
{
2025-01-06 21:26:24 +00:00
return;
2025-01-04 20:22:36 +00:00
}
// Everything is outside the half-space, so we clip everything
2025-01-06 21:26:24 +00:00
if (_clipCounts[(int)Side.Front] == 0)
2025-01-04 20:22:36 +00:00
{
2025-01-06 21:26:24 +00:00
poly.Vertices.Clear();
return;
2025-01-04 20:22:36 +00:00
}
var vertices = new List<Vector3>();
for (var i = 0; i < vertexCount; i++)
{
var i1 = (i + 1) % vertexCount;
var v0 = poly.Vertices[i];
var v1 = poly.Vertices[i1];
2025-01-06 21:26:24 +00:00
var side = _clipSides[i];
var nextSide = _clipSides[i1];
2025-01-04 20:22:36 +00:00
// Vertices that are inside/on the half-space don't get clipped
2025-01-06 21:26:24 +00:00
if (_clipSides[i] != Side.Back)
2025-01-04 20:22:36 +00:00
{
vertices.Add(v0);
}
// We only need to do any clipping if we've swapped from front-to-back or vice versa
// If either the current or next side is On then that's where we would have clipped to
// anyway so we also don't need to do anything
2025-01-05 14:49:02 +00:00
if (side == Side.On || nextSide == Side.On || side == nextSide)
2025-01-04 20:22:36 +00:00
{
continue;
}
// This is how far along the vector v0 -> v1 the front/back crossover occurs
2025-01-06 21:26:24 +00:00
var frac = _clipDistances[i] / (_clipDistances[i] - _clipDistances[i1]);
2025-01-04 20:22:36 +00:00
var splitVertex = v0 + frac * (v1 - v0);
vertices.Add(splitVertex);
}
2025-01-05 14:49:02 +00:00
2025-01-06 21:26:24 +00:00
poly.Vertices = vertices;
2025-01-04 20:22:36 +00:00
}
}