Ryujinx/Ryujinx.Memory/AddressSpaceManager.cs
gdkchan 95017b8c66
Support memory aliasing (#2954)
* Back to the origins: Make memory manager take guest PA rather than host address once again

* Direct mapping with alias support on Windows

* Fixes and remove more of the emulated shared memory

* Linux support

* Make shared and transfer memory not depend on SharedMemoryStorage

* More efficient view mapping on Windows (no more restricted to 4KB pages at a time)

* Handle potential access violations caused by partial unmap

* Implement host mapping using shared memory on Linux

* Add new GetPhysicalAddressChecked method, used to ensure the virtual address is mapped before address translation

Also align GetRef behaviour with software memory manager

* We don't need a mirrorable memory block for software memory manager mode

* Disable memory aliasing tests while we don't have shared memory support on Mac

* Shared memory & SIGBUS handler for macOS

* Fix typo + nits + re-enable memory tests

* Set MAP_JIT_DARWIN on x86 Mac too

* Add back the address space mirror

* Only set MAP_JIT_DARWIN if we are mapping as executable

* Disable aliasing tests again (still fails on Mac)

* Fix UnmapView4KB (by not casting size to int)

* Use ref counting on memory blocks to delay closing the shared memory handle until all blocks using it are disposed

* Address PR feedback

* Make RO hold a reference to the guest process memory manager to avoid early disposal

Co-authored-by: nastys <nastys@users.noreply.github.com>
2022-05-02 20:30:02 -03:00

407 lines
12 KiB
C#

using Ryujinx.Memory.Range;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Memory
{
/// <summary>
/// Represents a address space manager.
/// Supports virtual memory region mapping, address translation and read/write access to mapped regions.
/// </summary>
public sealed class AddressSpaceManager : IVirtualMemoryManager, IWritableBlock
{
public const int PageBits = PageTable<ulong>.PageBits;
public const int PageSize = PageTable<ulong>.PageSize;
public const int PageMask = PageTable<ulong>.PageMask;
/// <summary>
/// Address space width in bits.
/// </summary>
public int AddressSpaceBits { get; }
private readonly ulong _addressSpaceSize;
private readonly MemoryBlock _backingMemory;
private readonly PageTable<ulong> _pageTable;
/// <summary>
/// Creates a new instance of the memory manager.
/// </summary>
/// <param name="backingMemory">Physical backing memory where virtual memory will be mapped to</param>
/// <param name="addressSpaceSize">Size of the address space</param>
public AddressSpaceManager(MemoryBlock backingMemory, ulong addressSpaceSize)
{
ulong asSize = PageSize;
int asBits = PageBits;
while (asSize < addressSpaceSize)
{
asSize <<= 1;
asBits++;
}
AddressSpaceBits = asBits;
_addressSpaceSize = asSize;
_backingMemory = backingMemory;
_pageTable = new PageTable<ulong>();
}
/// <inheritdoc/>
public void Map(ulong va, ulong pa, ulong size)
{
AssertValidAddressAndSize(va, size);
while (size != 0)
{
_pageTable.Map(va, pa);
va += PageSize;
pa += PageSize;
size -= PageSize;
}
}
/// <inheritdoc/>
public void Unmap(ulong va, ulong size)
{
AssertValidAddressAndSize(va, size);
while (size != 0)
{
_pageTable.Unmap(va);
va += PageSize;
size -= PageSize;
}
}
/// <inheritdoc/>
public T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
/// <inheritdoc/>
public void Read(ulong va, Span<byte> data)
{
ReadImpl(va, data);
}
/// <inheritdoc/>
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
AssertValidAddressAndSize(va, (ulong)data.Length);
if (IsContiguousAndMapped(va, data.Length))
{
data.CopyTo(_backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length));
}
else
{
int offset = 0, size;
if ((va & PageMask) != 0)
{
ulong pa = GetPhysicalAddressInternal(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data.Slice(0, size).CopyTo(_backingMemory.GetSpan(pa, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
ulong pa = GetPhysicalAddressInternal(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(_backingMemory.GetSpan(pa, size));
}
}
}
/// <inheritdoc/>
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return ReadOnlySpan<byte>.Empty;
}
if (IsContiguousAndMapped(va, size))
{
return _backingMemory.GetSpan(GetPhysicalAddressInternal(va), size);
}
else
{
Span<byte> data = new byte[size];
ReadImpl(va, data);
return data;
}
}
/// <inheritdoc/>
public unsafe WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return new WritableRegion(null, va, Memory<byte>.Empty);
}
if (IsContiguousAndMapped(va, size))
{
return new WritableRegion(null, va, _backingMemory.GetMemory(GetPhysicalAddressInternal(va), size));
}
else
{
Memory<byte> memory = new byte[size];
GetSpan(va, size).CopyTo(memory.Span);
return new WritableRegion(this, va, memory);
}
}
/// <inheritdoc/>
public ref T GetRef<T>(ulong va) where T : unmanaged
{
if (!IsContiguous(va, Unsafe.SizeOf<T>()))
{
ThrowMemoryNotContiguous();
}
return ref _backingMemory.GetRef<T>(GetPhysicalAddressInternal(va));
}
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private int GetPagesCount(ulong va, uint size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguous(ulong va, int size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
{
return false;
}
int pages = GetPagesCount(va, (uint)size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
}
/// <inheritdoc/>
public IEnumerable<MemoryRange> GetPhysicalRegions(ulong va, ulong size)
{
if (size == 0)
{
return Enumerable.Empty<MemoryRange>();
}
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, size))
{
return null;
}
int pages = GetPagesCount(va, (uint)size, out va);
var regions = new List<MemoryRange>();
ulong regionStart = GetPhysicalAddressInternal(va);
ulong regionSize = PageSize;
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return null;
}
ulong newPa = GetPhysicalAddressInternal(va + PageSize);
if (GetPhysicalAddressInternal(va) + PageSize != newPa)
{
regions.Add(new MemoryRange(regionStart, regionSize));
regionStart = newPa;
regionSize = 0;
}
va += PageSize;
regionSize += PageSize;
}
regions.Add(new MemoryRange(regionStart, regionSize));
return regions;
}
private void ReadImpl(ulong va, Span<byte> data)
{
if (data.Length == 0)
{
return;
}
AssertValidAddressAndSize(va, (ulong)data.Length);
int offset = 0, size;
if ((va & PageMask) != 0)
{
ulong pa = GetPhysicalAddressInternal(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
_backingMemory.GetSpan(pa, size).CopyTo(data.Slice(0, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
ulong pa = GetPhysicalAddressInternal(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
_backingMemory.GetSpan(pa, size).CopyTo(data.Slice(offset, size));
}
}
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va)
{
if (!ValidateAddress(va))
{
return false;
}
return _pageTable.Read(va) != 0;
}
/// <inheritdoc/>
public bool IsRangeMapped(ulong va, ulong size)
{
if (size == 0UL)
{
return true;
}
if (!ValidateAddressAndSize(va, size))
{
return false;
}
int pages = GetPagesCount(va, (uint)size, out va);
for (int page = 0; page < pages; page++)
{
if (!IsMapped(va))
{
return false;
}
va += PageSize;
}
return true;
}
private bool ValidateAddress(ulong va)
{
return va < _addressSpaceSize;
}
/// <summary>
/// Checks if the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
private bool ValidateAddressAndSize(ulong va, ulong size)
{
ulong endVa = va + size;
return endVa >= va && endVa >= size && endVa <= _addressSpaceSize;
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
private void AssertValidAddressAndSize(ulong va, ulong size)
{
if (!ValidateAddressAndSize(va, size))
{
throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
}
}
private ulong GetPhysicalAddressInternal(ulong va)
{
return _pageTable.Read(va) + (va & PageMask);
}
/// <summary>
/// Reprotect a region of virtual memory for tracking. Sets software protection bits.
/// </summary>
/// <param name="va">Virtual address base</param>
/// <param name="size">Size of the region to protect</param>
/// <param name="protection">Memory protection to set</param>
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
{
throw new NotImplementedException();
}
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false)
{
// Only the ARM Memory Manager has tracking for now.
}
}
}