2019-12-01 03:53:09 +01:00
|
|
|
using Ryujinx.Common;
|
2019-10-13 08:02:07 +02:00
|
|
|
using Ryujinx.Graphics.GAL;
|
|
|
|
using Ryujinx.Graphics.Gpu.State;
|
|
|
|
using Ryujinx.Graphics.Shader;
|
|
|
|
using System;
|
|
|
|
|
|
|
|
namespace Ryujinx.Graphics.Gpu.Memory
|
|
|
|
{
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Buffer manager.
|
|
|
|
/// </summary>
|
2019-10-13 08:02:07 +02:00
|
|
|
class BufferManager
|
|
|
|
{
|
2019-11-25 01:29:37 +01:00
|
|
|
private const int OverlapsBufferInitialCapacity = 10;
|
|
|
|
private const int OverlapsBufferMaxCapacity = 10000;
|
|
|
|
|
2019-10-13 08:02:07 +02:00
|
|
|
private const ulong BufferAlignmentSize = 0x1000;
|
|
|
|
private const ulong BufferAlignmentMask = BufferAlignmentSize - 1;
|
|
|
|
|
|
|
|
private GpuContext _context;
|
|
|
|
|
|
|
|
private RangeList<Buffer> _buffers;
|
|
|
|
|
2019-11-25 01:29:37 +01:00
|
|
|
private Buffer[] _bufferOverlaps;
|
|
|
|
|
2019-10-13 08:02:07 +02:00
|
|
|
private IndexBuffer _indexBuffer;
|
|
|
|
|
|
|
|
private VertexBuffer[] _vertexBuffers;
|
|
|
|
|
|
|
|
private class BuffersPerStage
|
|
|
|
{
|
|
|
|
public uint EnableMask { get; set; }
|
|
|
|
|
|
|
|
public BufferBounds[] Buffers { get; }
|
|
|
|
|
|
|
|
public BuffersPerStage(int count)
|
|
|
|
{
|
|
|
|
Buffers = new BufferBounds[count];
|
|
|
|
}
|
|
|
|
|
|
|
|
public void Bind(int index, ulong address, ulong size)
|
|
|
|
{
|
|
|
|
Buffers[index].Address = address;
|
|
|
|
Buffers[index].Size = size;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private BuffersPerStage _cpStorageBuffers;
|
|
|
|
private BuffersPerStage _cpUniformBuffers;
|
|
|
|
private BuffersPerStage[] _gpStorageBuffers;
|
|
|
|
private BuffersPerStage[] _gpUniformBuffers;
|
|
|
|
|
|
|
|
private bool _gpStorageBuffersDirty;
|
|
|
|
private bool _gpUniformBuffersDirty;
|
|
|
|
|
|
|
|
private bool _indexBufferDirty;
|
|
|
|
private bool _vertexBuffersDirty;
|
2019-11-23 06:17:22 +01:00
|
|
|
private uint _vertexBuffersEnableMask;
|
2019-10-13 08:02:07 +02:00
|
|
|
|
|
|
|
private bool _rebind;
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Creates a new instance of the buffer manager.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="context">The GPU context that the buffer manager belongs to</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public BufferManager(GpuContext context)
|
|
|
|
{
|
|
|
|
_context = context;
|
|
|
|
|
|
|
|
_buffers = new RangeList<Buffer>();
|
|
|
|
|
2019-11-25 01:29:37 +01:00
|
|
|
_bufferOverlaps = new Buffer[OverlapsBufferInitialCapacity];
|
|
|
|
|
2019-10-13 08:02:07 +02:00
|
|
|
_vertexBuffers = new VertexBuffer[Constants.TotalVertexBuffers];
|
|
|
|
|
|
|
|
_cpStorageBuffers = new BuffersPerStage(Constants.TotalCpStorageBuffers);
|
|
|
|
_cpUniformBuffers = new BuffersPerStage(Constants.TotalCpUniformBuffers);
|
|
|
|
|
2020-01-01 16:39:09 +01:00
|
|
|
_gpStorageBuffers = new BuffersPerStage[Constants.ShaderStages];
|
|
|
|
_gpUniformBuffers = new BuffersPerStage[Constants.ShaderStages];
|
2019-10-13 08:02:07 +02:00
|
|
|
|
2020-01-01 16:39:09 +01:00
|
|
|
for (int index = 0; index < Constants.ShaderStages; index++)
|
2019-10-13 08:02:07 +02:00
|
|
|
{
|
|
|
|
_gpStorageBuffers[index] = new BuffersPerStage(Constants.TotalGpStorageBuffers);
|
|
|
|
_gpUniformBuffers[index] = new BuffersPerStage(Constants.TotalGpUniformBuffers);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets the memory range with the index buffer data, to be used for subsequent draw calls.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="gpuVa">Start GPU virtual address of the index buffer</param>
|
|
|
|
/// <param name="size">Size, in bytes, of the index buffer</param>
|
|
|
|
/// <param name="type">Type of each index buffer element</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetIndexBuffer(ulong gpuVa, ulong size, IndexType type)
|
|
|
|
{
|
|
|
|
ulong address = TranslateAndCreateBuffer(gpuVa, size);
|
|
|
|
|
|
|
|
_indexBuffer.Address = address;
|
|
|
|
_indexBuffer.Size = size;
|
|
|
|
_indexBuffer.Type = type;
|
|
|
|
|
|
|
|
_indexBufferDirty = true;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets the memory range with vertex buffer data, to be used for subsequent draw calls.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="index">Index of the vertex buffer (up to 16)</param>
|
|
|
|
/// <param name="gpuVa">GPU virtual address of the buffer</param>
|
|
|
|
/// <param name="size">Size in bytes of the buffer</param>
|
|
|
|
/// <param name="stride">Stride of the buffer, defined as the number of bytes of each vertex</param>
|
|
|
|
/// <param name="divisor">Vertex divisor of the buffer, for instanced draws</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetVertexBuffer(int index, ulong gpuVa, ulong size, int stride, int divisor)
|
|
|
|
{
|
|
|
|
ulong address = TranslateAndCreateBuffer(gpuVa, size);
|
|
|
|
|
|
|
|
_vertexBuffers[index].Address = address;
|
|
|
|
_vertexBuffers[index].Size = size;
|
|
|
|
_vertexBuffers[index].Stride = stride;
|
|
|
|
_vertexBuffers[index].Divisor = divisor;
|
|
|
|
|
|
|
|
_vertexBuffersDirty = true;
|
2019-11-23 06:17:22 +01:00
|
|
|
|
|
|
|
if (address != 0)
|
|
|
|
{
|
|
|
|
_vertexBuffersEnableMask |= 1u << index;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
_vertexBuffersEnableMask &= ~(1u << index);
|
|
|
|
}
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets a storage buffer on the compute pipeline.
|
|
|
|
/// Storage buffers can be read and written to on shaders.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="index">Index of the storage buffer</param>
|
|
|
|
/// <param name="gpuVa">Start GPU virtual address of the buffer</param>
|
|
|
|
/// <param name="size">Size in bytes of the storage buffer</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetComputeStorageBuffer(int index, ulong gpuVa, ulong size)
|
|
|
|
{
|
2019-12-01 03:53:09 +01:00
|
|
|
size += gpuVa & ((ulong)_context.Capabilities.StorageBufferOffsetAlignment - 1);
|
2019-10-13 08:02:07 +02:00
|
|
|
|
2019-12-01 03:53:09 +01:00
|
|
|
gpuVa = BitUtils.AlignDown(gpuVa, _context.Capabilities.StorageBufferOffsetAlignment);
|
2019-10-13 08:02:07 +02:00
|
|
|
|
|
|
|
ulong address = TranslateAndCreateBuffer(gpuVa, size);
|
|
|
|
|
|
|
|
_cpStorageBuffers.Bind(index, address, size);
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets a storage buffer on the graphics pipeline.
|
|
|
|
/// Storage buffers can be read and written to on shaders.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="stage">Index of the shader stage</param>
|
|
|
|
/// <param name="index">Index of the storage buffer</param>
|
|
|
|
/// <param name="gpuVa">Start GPU virtual address of the buffer</param>
|
|
|
|
/// <param name="size">Size in bytes of the storage buffer</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetGraphicsStorageBuffer(int stage, int index, ulong gpuVa, ulong size)
|
|
|
|
{
|
2019-12-01 03:53:09 +01:00
|
|
|
size += gpuVa & ((ulong)_context.Capabilities.StorageBufferOffsetAlignment - 1);
|
2019-10-13 08:02:07 +02:00
|
|
|
|
2019-12-01 03:53:09 +01:00
|
|
|
gpuVa = BitUtils.AlignDown(gpuVa, _context.Capabilities.StorageBufferOffsetAlignment);
|
2019-10-13 08:02:07 +02:00
|
|
|
|
|
|
|
ulong address = TranslateAndCreateBuffer(gpuVa, size);
|
|
|
|
|
2019-10-26 19:50:52 +02:00
|
|
|
if (_gpStorageBuffers[stage].Buffers[index].Address != address ||
|
|
|
|
_gpStorageBuffers[stage].Buffers[index].Size != size)
|
|
|
|
{
|
|
|
|
_gpStorageBuffersDirty = true;
|
|
|
|
}
|
2019-10-13 08:02:07 +02:00
|
|
|
|
2019-10-26 19:50:52 +02:00
|
|
|
_gpStorageBuffers[stage].Bind(index, address, size);
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets a uniform buffer on the compute pipeline.
|
|
|
|
/// Uniform buffers are read-only from shaders, and have a small capacity.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="index">Index of the uniform buffer</param>
|
|
|
|
/// <param name="gpuVa">Start GPU virtual address of the buffer</param>
|
|
|
|
/// <param name="size">Size in bytes of the storage buffer</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetComputeUniformBuffer(int index, ulong gpuVa, ulong size)
|
|
|
|
{
|
|
|
|
ulong address = TranslateAndCreateBuffer(gpuVa, size);
|
|
|
|
|
|
|
|
_cpUniformBuffers.Bind(index, address, size);
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets a uniform buffer on the graphics pipeline.
|
|
|
|
/// Uniform buffers are read-only from shaders, and have a small capacity.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="stage">Index of the shader stage</param>
|
|
|
|
/// <param name="index">Index of the uniform buffer</param>
|
|
|
|
/// <param name="gpuVa">Start GPU virtual address of the buffer</param>
|
|
|
|
/// <param name="size">Size in bytes of the storage buffer</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetGraphicsUniformBuffer(int stage, int index, ulong gpuVa, ulong size)
|
|
|
|
{
|
|
|
|
ulong address = TranslateAndCreateBuffer(gpuVa, size);
|
|
|
|
|
|
|
|
_gpUniformBuffers[stage].Bind(index, address, size);
|
|
|
|
|
|
|
|
_gpUniformBuffersDirty = true;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets the enabled storage buffers mask on the compute pipeline.
|
|
|
|
/// Each bit set on the mask indicates that the respective buffer index is enabled.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="mask">Buffer enable mask</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetComputeStorageBufferEnableMask(uint mask)
|
|
|
|
{
|
|
|
|
_cpStorageBuffers.EnableMask = mask;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets the enabled storage buffers mask on the graphics pipeline.
|
|
|
|
/// Each bit set on the mask indicates that the respective buffer index is enabled.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="stage">Index of the shader stage</param>
|
|
|
|
/// <param name="mask">Buffer enable mask</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetGraphicsStorageBufferEnableMask(int stage, uint mask)
|
|
|
|
{
|
|
|
|
_gpStorageBuffers[stage].EnableMask = mask;
|
|
|
|
|
|
|
|
_gpStorageBuffersDirty = true;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets the enabled uniform buffers mask on the compute pipeline.
|
|
|
|
/// Each bit set on the mask indicates that the respective buffer index is enabled.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="mask">Buffer enable mask</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetComputeUniformBufferEnableMask(uint mask)
|
|
|
|
{
|
|
|
|
_cpUniformBuffers.EnableMask = mask;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Sets the enabled uniform buffers mask on the graphics pipeline.
|
|
|
|
/// Each bit set on the mask indicates that the respective buffer index is enabled.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="stage">Index of the shader stage</param>
|
|
|
|
/// <param name="mask">Buffer enable mask</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void SetGraphicsUniformBufferEnableMask(int stage, uint mask)
|
|
|
|
{
|
|
|
|
_gpUniformBuffers[stage].EnableMask = mask;
|
|
|
|
|
|
|
|
_gpUniformBuffersDirty = true;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Performs address translation of the GPU virtual address, and creates a
|
|
|
|
/// new buffer, if needed, for the specified range.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="gpuVa">Start GPU virtual address of the buffer</param>
|
|
|
|
/// <param name="size">Size in bytes of the buffer</param>
|
|
|
|
/// <returns>CPU virtual address of the buffer, after address translation</returns>
|
2019-10-13 08:02:07 +02:00
|
|
|
private ulong TranslateAndCreateBuffer(ulong gpuVa, ulong size)
|
|
|
|
{
|
|
|
|
if (gpuVa == 0)
|
|
|
|
{
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
ulong address = _context.MemoryManager.Translate(gpuVa);
|
|
|
|
|
|
|
|
if (address == MemoryManager.BadAddress)
|
|
|
|
{
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
ulong endAddress = address + size;
|
|
|
|
|
|
|
|
ulong alignedAddress = address & ~BufferAlignmentMask;
|
|
|
|
|
|
|
|
ulong alignedEndAddress = (endAddress + BufferAlignmentMask) & ~BufferAlignmentMask;
|
|
|
|
|
|
|
|
// The buffer must have the size of at least one page.
|
|
|
|
if (alignedEndAddress == alignedAddress)
|
|
|
|
{
|
|
|
|
alignedEndAddress += BufferAlignmentSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
CreateBuffer(alignedAddress, alignedEndAddress - alignedAddress);
|
|
|
|
|
|
|
|
return address;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Creates a new buffer for the specified range, if needed.
|
|
|
|
/// If a buffer where this range can be fully contained already exists,
|
|
|
|
/// then the creation of a new buffer is not necessary.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="address">Address of the buffer in guest memory</param>
|
|
|
|
/// <param name="size">Size in bytes of the buffer</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
private void CreateBuffer(ulong address, ulong size)
|
|
|
|
{
|
2019-11-25 01:29:37 +01:00
|
|
|
int overlapsCount = _buffers.FindOverlapsNonOverlapping(address, size, ref _bufferOverlaps);
|
2019-10-13 08:02:07 +02:00
|
|
|
|
2019-11-25 01:29:37 +01:00
|
|
|
if (overlapsCount != 0)
|
2019-10-13 08:02:07 +02:00
|
|
|
{
|
|
|
|
// The buffer already exists. We can just return the existing buffer
|
|
|
|
// if the buffer we need is fully contained inside the overlapping buffer.
|
|
|
|
// Otherwise, we must delete the overlapping buffers and create a bigger buffer
|
|
|
|
// that fits all the data we need. We also need to copy the contents from the
|
|
|
|
// old buffer(s) to the new buffer.
|
|
|
|
ulong endAddress = address + size;
|
|
|
|
|
2019-11-25 01:29:37 +01:00
|
|
|
if (_bufferOverlaps[0].Address > address || _bufferOverlaps[0].EndAddress < endAddress)
|
2019-10-13 08:02:07 +02:00
|
|
|
{
|
2019-11-25 01:29:37 +01:00
|
|
|
for (int index = 0; index < overlapsCount; index++)
|
2019-10-13 08:02:07 +02:00
|
|
|
{
|
2019-11-25 01:29:37 +01:00
|
|
|
Buffer buffer = _bufferOverlaps[index];
|
|
|
|
|
2019-10-13 08:02:07 +02:00
|
|
|
address = Math.Min(address, buffer.Address);
|
|
|
|
endAddress = Math.Max(endAddress, buffer.EndAddress);
|
|
|
|
|
|
|
|
buffer.SynchronizeMemory(buffer.Address, buffer.Size);
|
|
|
|
|
|
|
|
_buffers.Remove(buffer);
|
|
|
|
}
|
|
|
|
|
|
|
|
Buffer newBuffer = new Buffer(_context, address, endAddress - address);
|
|
|
|
|
|
|
|
_buffers.Add(newBuffer);
|
|
|
|
|
2019-11-25 01:29:37 +01:00
|
|
|
for (int index = 0; index < overlapsCount; index++)
|
2019-10-13 08:02:07 +02:00
|
|
|
{
|
2019-11-25 01:29:37 +01:00
|
|
|
Buffer buffer = _bufferOverlaps[index];
|
|
|
|
|
2019-10-13 08:02:07 +02:00
|
|
|
int dstOffset = (int)(buffer.Address - newBuffer.Address);
|
|
|
|
|
|
|
|
buffer.CopyTo(newBuffer, dstOffset);
|
|
|
|
|
|
|
|
buffer.Dispose();
|
|
|
|
}
|
|
|
|
|
|
|
|
_rebind = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// No overlap, just create a new buffer.
|
|
|
|
Buffer buffer = new Buffer(_context, address, size);
|
|
|
|
|
|
|
|
_buffers.Add(buffer);
|
|
|
|
}
|
2019-11-25 01:29:37 +01:00
|
|
|
|
|
|
|
ShrinkOverlapsBufferIfNeeded();
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Resizes the temporary buffer used for range list intersection results, if it has grown too much.
|
|
|
|
/// </summary>
|
2019-11-25 01:29:37 +01:00
|
|
|
private void ShrinkOverlapsBufferIfNeeded()
|
|
|
|
{
|
|
|
|
if (_bufferOverlaps.Length > OverlapsBufferMaxCapacity)
|
|
|
|
{
|
|
|
|
Array.Resize(ref _bufferOverlaps, OverlapsBufferMaxCapacity);
|
|
|
|
}
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Gets the address of the compute uniform buffer currently bound at the given index.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="index">Index of the uniform buffer binding</param>
|
2020-01-02 00:14:18 +01:00
|
|
|
/// <returns>The uniform buffer address, or an undefined value if the buffer is not currently bound</returns>
|
2019-10-13 08:02:07 +02:00
|
|
|
public ulong GetComputeUniformBufferAddress(int index)
|
|
|
|
{
|
|
|
|
return _cpUniformBuffers.Buffers[index].Address;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Gets the address of the graphics uniform buffer currently bound at the given index.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="stage">Index of the shader stage</param>
|
|
|
|
/// <param name="index">Index of the uniform buffer binding</param>
|
2020-01-02 00:14:18 +01:00
|
|
|
/// <returns>The uniform buffer address, or an undefined value if the buffer is not currently bound</returns>
|
2019-10-13 08:02:07 +02:00
|
|
|
public ulong GetGraphicsUniformBufferAddress(int stage, int index)
|
|
|
|
{
|
|
|
|
return _gpUniformBuffers[stage].Buffers[index].Address;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Ensures that the compute engine bindings are visible to the host GPU.
|
2020-01-01 16:39:09 +01:00
|
|
|
/// Note: this actually performs the binding using the host graphics API.
|
2019-12-31 04:22:58 +01:00
|
|
|
/// </summary>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void CommitComputeBindings()
|
|
|
|
{
|
|
|
|
uint enableMask = _cpStorageBuffers.EnableMask;
|
|
|
|
|
|
|
|
for (int index = 0; (enableMask >> index) != 0; index++)
|
|
|
|
{
|
|
|
|
if ((enableMask & (1u << index)) == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferBounds bounds = _cpStorageBuffers.Buffers[index];
|
|
|
|
|
|
|
|
if (bounds.Address == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferRange buffer = GetBufferRange(bounds.Address, bounds.Size);
|
|
|
|
|
2019-12-29 18:41:50 +01:00
|
|
|
_context.Renderer.Pipeline.SetStorageBuffer(index, ShaderStage.Compute, buffer);
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
enableMask = _cpUniformBuffers.EnableMask;
|
|
|
|
|
|
|
|
for (int index = 0; (enableMask >> index) != 0; index++)
|
|
|
|
{
|
|
|
|
if ((enableMask & (1u << index)) == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferBounds bounds = _cpUniformBuffers.Buffers[index];
|
|
|
|
|
|
|
|
if (bounds.Address == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferRange buffer = GetBufferRange(bounds.Address, bounds.Size);
|
|
|
|
|
2019-12-29 18:41:50 +01:00
|
|
|
_context.Renderer.Pipeline.SetUniformBuffer(index, ShaderStage.Compute, buffer);
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
2019-10-18 04:41:18 +02:00
|
|
|
|
|
|
|
// Force rebind after doing compute work.
|
|
|
|
_rebind = true;
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Ensures that the graphics engine bindings are visible to the host GPU.
|
2020-01-01 16:39:09 +01:00
|
|
|
/// Note: this actually performs the binding using the host graphics API.
|
2019-12-31 04:22:58 +01:00
|
|
|
/// </summary>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void CommitBindings()
|
|
|
|
{
|
|
|
|
if (_indexBufferDirty || _rebind)
|
|
|
|
{
|
|
|
|
_indexBufferDirty = false;
|
|
|
|
|
|
|
|
if (_indexBuffer.Address != 0)
|
|
|
|
{
|
|
|
|
BufferRange buffer = GetBufferRange(_indexBuffer.Address, _indexBuffer.Size);
|
|
|
|
|
2019-12-29 18:41:50 +01:00
|
|
|
_context.Renderer.Pipeline.SetIndexBuffer(buffer, _indexBuffer.Type);
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (_indexBuffer.Address != 0)
|
|
|
|
{
|
|
|
|
SynchronizeBufferRange(_indexBuffer.Address, _indexBuffer.Size);
|
|
|
|
}
|
|
|
|
|
2019-11-23 06:17:22 +01:00
|
|
|
uint vbEnableMask = _vertexBuffersEnableMask;
|
|
|
|
|
2019-10-13 08:02:07 +02:00
|
|
|
if (_vertexBuffersDirty || _rebind)
|
|
|
|
{
|
|
|
|
_vertexBuffersDirty = false;
|
|
|
|
|
|
|
|
VertexBufferDescriptor[] vertexBuffers = new VertexBufferDescriptor[Constants.TotalVertexBuffers];
|
|
|
|
|
2019-11-23 06:17:22 +01:00
|
|
|
for (int index = 0; (vbEnableMask >> index) != 0; index++)
|
2019-10-13 08:02:07 +02:00
|
|
|
{
|
|
|
|
VertexBuffer vb = _vertexBuffers[index];
|
|
|
|
|
|
|
|
if (vb.Address == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferRange buffer = GetBufferRange(vb.Address, vb.Size);
|
|
|
|
|
|
|
|
vertexBuffers[index] = new VertexBufferDescriptor(buffer, vb.Stride, vb.Divisor);
|
|
|
|
}
|
|
|
|
|
2019-12-29 18:41:50 +01:00
|
|
|
_context.Renderer.Pipeline.SetVertexBuffers(vertexBuffers);
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-11-23 06:17:22 +01:00
|
|
|
for (int index = 0; (vbEnableMask >> index) != 0; index++)
|
2019-10-13 08:02:07 +02:00
|
|
|
{
|
|
|
|
VertexBuffer vb = _vertexBuffers[index];
|
|
|
|
|
|
|
|
if (vb.Address == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
SynchronizeBufferRange(vb.Address, vb.Size);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (_gpStorageBuffersDirty || _rebind)
|
|
|
|
{
|
|
|
|
_gpStorageBuffersDirty = false;
|
|
|
|
|
|
|
|
BindBuffers(_gpStorageBuffers, isStorage: true);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
UpdateBuffers(_gpStorageBuffers);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (_gpUniformBuffersDirty || _rebind)
|
|
|
|
{
|
|
|
|
_gpUniformBuffersDirty = false;
|
|
|
|
|
|
|
|
BindBuffers(_gpUniformBuffers, isStorage: false);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
UpdateBuffers(_gpUniformBuffers);
|
|
|
|
}
|
|
|
|
|
|
|
|
_rebind = false;
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Bind respective buffer bindings on the host API.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="bindings">Bindings to bind</param>
|
|
|
|
/// <param name="isStorage">True to bind as storage buffer, false to bind as uniform buffers</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
private void BindBuffers(BuffersPerStage[] bindings, bool isStorage)
|
|
|
|
{
|
|
|
|
BindOrUpdateBuffers(bindings, bind: true, isStorage);
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Updates data for the already bound buffer bindings.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="bindings">Bindings to update</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
private void UpdateBuffers(BuffersPerStage[] bindings)
|
|
|
|
{
|
|
|
|
BindOrUpdateBuffers(bindings, bind: false);
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
2020-01-01 16:39:09 +01:00
|
|
|
/// This binds buffers into the host API, or updates data for already bound buffers.
|
2019-12-31 04:22:58 +01:00
|
|
|
/// </summary>
|
|
|
|
/// <param name="bindings">Bindings to bind or update</param>
|
|
|
|
/// <param name="bind">True to bind, false to update</param>
|
2020-01-01 16:39:09 +01:00
|
|
|
/// <param name="isStorage">True to bind as storage buffer, false to bind as uniform buffer</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
private void BindOrUpdateBuffers(BuffersPerStage[] bindings, bool bind, bool isStorage = false)
|
|
|
|
{
|
|
|
|
for (ShaderStage stage = ShaderStage.Vertex; stage <= ShaderStage.Fragment; stage++)
|
|
|
|
{
|
|
|
|
uint enableMask = bindings[(int)stage - 1].EnableMask;
|
|
|
|
|
|
|
|
if (enableMask == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (int index = 0; (enableMask >> index) != 0; index++)
|
|
|
|
{
|
|
|
|
if ((enableMask & (1u << index)) == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferBounds bounds = bindings[(int)stage - 1].Buffers[index];
|
|
|
|
|
|
|
|
if (bounds.Address == 0)
|
|
|
|
{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (bind)
|
|
|
|
{
|
|
|
|
BindBuffer(index, stage, bounds, isStorage);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
SynchronizeBufferRange(bounds.Address, bounds.Size);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Binds a buffer on the host API.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="index">Index to bind the buffer into</param>
|
|
|
|
/// <param name="stage">Shader stage to bind the buffer into</param>
|
|
|
|
/// <param name="bounds">Buffer address and size</param>
|
|
|
|
/// <param name="isStorage">True to bind as storage buffer, false to bind as uniform buffer</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
private void BindBuffer(int index, ShaderStage stage, BufferBounds bounds, bool isStorage)
|
|
|
|
{
|
|
|
|
BufferRange buffer = GetBufferRange(bounds.Address, bounds.Size);
|
|
|
|
|
|
|
|
if (isStorage)
|
|
|
|
{
|
2019-12-29 18:41:50 +01:00
|
|
|
_context.Renderer.Pipeline.SetStorageBuffer(index, stage, buffer);
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-12-29 18:41:50 +01:00
|
|
|
_context.Renderer.Pipeline.SetUniformBuffer(index, stage, buffer);
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Copy a buffer data from a given address to another.
|
|
|
|
/// </summary>
|
2020-01-01 16:39:09 +01:00
|
|
|
/// <remarks>
|
|
|
|
/// This does a GPU side copy.
|
|
|
|
/// </remarks>
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <param name="srcVa">GPU virtual address of the copy source</param>
|
|
|
|
/// <param name="dstVa">GPU virtual address of the copy destination</param>
|
|
|
|
/// <param name="size">Size in bytes of the copy</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
public void CopyBuffer(GpuVa srcVa, GpuVa dstVa, ulong size)
|
|
|
|
{
|
|
|
|
ulong srcAddress = TranslateAndCreateBuffer(srcVa.Pack(), size);
|
|
|
|
ulong dstAddress = TranslateAndCreateBuffer(dstVa.Pack(), size);
|
|
|
|
|
2019-10-27 03:41:01 +01:00
|
|
|
Buffer srcBuffer = GetBuffer(srcAddress, size);
|
|
|
|
Buffer dstBuffer = GetBuffer(dstAddress, size);
|
2019-10-13 08:02:07 +02:00
|
|
|
|
2019-10-27 03:41:01 +01:00
|
|
|
int srcOffset = (int)(srcAddress - srcBuffer.Address);
|
|
|
|
int dstOffset = (int)(dstAddress - dstBuffer.Address);
|
|
|
|
|
|
|
|
srcBuffer.HostBuffer.CopyTo(
|
|
|
|
dstBuffer.HostBuffer,
|
|
|
|
srcOffset,
|
|
|
|
dstOffset,
|
2019-10-13 08:02:07 +02:00
|
|
|
(int)size);
|
2019-10-27 03:41:01 +01:00
|
|
|
|
|
|
|
dstBuffer.Flush(dstAddress, size);
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Gets a buffer sub-range for a given memory range.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="address">Start address of the memory range</param>
|
|
|
|
/// <param name="size">Size in bytes of the memory range</param>
|
|
|
|
/// <returns>The buffer sub-range for the given range</returns>
|
2019-10-13 08:02:07 +02:00
|
|
|
private BufferRange GetBufferRange(ulong address, ulong size)
|
2019-10-27 03:41:01 +01:00
|
|
|
{
|
|
|
|
return GetBuffer(address, size).GetRange(address, size);
|
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Gets a buffer for a given memory range.
|
|
|
|
/// A buffer overlapping with the specified range is assumed to already exist on the cache.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="address">Start address of the memory range</param>
|
|
|
|
/// <param name="size">Size in bytes of the memory range</param>
|
|
|
|
/// <returns>The buffer where the range is fully contained</returns>
|
2019-10-27 03:41:01 +01:00
|
|
|
private Buffer GetBuffer(ulong address, ulong size)
|
2019-10-13 08:02:07 +02:00
|
|
|
{
|
|
|
|
Buffer buffer;
|
|
|
|
|
|
|
|
if (size != 0)
|
|
|
|
{
|
|
|
|
buffer = _buffers.FindFirstOverlap(address, size);
|
|
|
|
|
|
|
|
buffer.SynchronizeMemory(address, size);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
buffer = _buffers.FindFirstOverlap(address, 1);
|
|
|
|
}
|
|
|
|
|
2019-10-27 03:41:01 +01:00
|
|
|
return buffer;
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
|
2019-12-31 04:22:58 +01:00
|
|
|
/// <summary>
|
|
|
|
/// Performs guest to host memory synchronization of a given memory range.
|
|
|
|
/// </summary>
|
|
|
|
/// <param name="address">Start address of the memory range</param>
|
|
|
|
/// <param name="size">Size in bytes of the memory range</param>
|
2019-10-13 08:02:07 +02:00
|
|
|
private void SynchronizeBufferRange(ulong address, ulong size)
|
|
|
|
{
|
|
|
|
if (size != 0)
|
|
|
|
{
|
|
|
|
Buffer buffer = _buffers.FindFirstOverlap(address, size);
|
|
|
|
|
|
|
|
buffer.SynchronizeMemory(address, size);
|
|
|
|
}
|
|
|
|
}
|
2019-12-31 23:09:49 +01:00
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
/// Disposes all buffers in the cache.
|
|
|
|
/// It's an error to use the buffer manager after disposal.
|
|
|
|
/// </summary>
|
|
|
|
public void Dispose()
|
|
|
|
{
|
|
|
|
foreach (Buffer buffer in _buffers)
|
|
|
|
{
|
|
|
|
buffer.Dispose();
|
|
|
|
}
|
|
|
|
}
|
2019-10-13 08:02:07 +02:00
|
|
|
}
|
|
|
|
}
|