Defer address space mirror mapping and use it only if strictly needed

This commit is contained in:
gdk 2022-06-22 18:48:41 -03:00 committed by Mary-nyan
parent 619ac86bd0
commit 5b5810a46a
2 changed files with 382 additions and 82 deletions

View file

@ -0,0 +1,353 @@
using Ryujinx.Common.Collections;
using Ryujinx.Memory;
using Ryujinx.Memory.Range;
using System;
using System.Collections.Generic;
using System.Threading;
namespace Ryujinx.Cpu.Jit
{
class MappingTree
{
private const ulong PageSize = 0x1000;
private enum MappingState : byte
{
Unmapped,
Mapped,
MappedWithMirror
}
private class Mapping : IntrusiveRedBlackTreeNode<Mapping>, IComparable<Mapping>
{
public ulong Address { get; private set; }
public ulong Size { get; private set; }
public ulong EndAddress => Address + Size;
public ulong BackingOffset { get; private set; }
public MappingState State { get; private set; }
public Mapping(ulong address, ulong size, ulong backingOffset, MappingState state)
{
Address = address;
Size = size;
BackingOffset = backingOffset;
State = state;
}
public Mapping Split(ulong splitAddress)
{
ulong leftSize = splitAddress - Address;
ulong rightSize = EndAddress - splitAddress;
Mapping left = new Mapping(Address, leftSize, BackingOffset, State);
Address = splitAddress;
Size = rightSize;
if (State != MappingState.Unmapped)
{
BackingOffset += leftSize;
}
return left;
}
public void UpdateState(ulong newBackingOffset, MappingState newState)
{
BackingOffset = newBackingOffset;
State = newState;
}
public void Extend(ulong sizeDelta)
{
Size += sizeDelta;
}
public int CompareTo(Mapping other)
{
if (Address < other.Address)
{
return -1;
}
else if (Address <= other.EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
}
private readonly IntrusiveRedBlackTree<Mapping> _tree;
private readonly ReaderWriterLock _treeLock;
public MappingTree(ulong addressSpaceSize)
{
_tree = new IntrusiveRedBlackTree<Mapping>();
_treeLock = new ReaderWriterLock();
_tree.Add(new Mapping(0UL, addressSpaceSize, 0UL, MappingState.Unmapped));
}
public void Map(ulong va, ulong pa, ulong size)
{
_treeLock.AcquireWriterLock(Timeout.Infinite);
Update(va, pa, size, MappingState.Mapped);
_treeLock.ReleaseWriterLock();
}
public void Unmap(ulong va, ulong size)
{
_treeLock.AcquireWriterLock(Timeout.Infinite);
Update(va, 0UL, size, MappingState.Unmapped);
_treeLock.ReleaseWriterLock();
}
public IEnumerable<MemoryRange> GetPhysicalRegions(ulong va, ulong size)
{
_treeLock.AcquireReaderLock(Timeout.Infinite);
var regions = GetPhysicalRegionsImpl(va, size);
_treeLock.ReleaseReaderLock();
return regions;
}
public bool TryGetContiguousPa(ulong va, ulong size, out ulong pa)
{
_treeLock.AcquireReaderLock(Timeout.Infinite);
bool result = TryGetContiguousPaImpl(va, size, out pa);
_treeLock.ReleaseReaderLock();
return result;
}
public (MemoryBlock, ulong) GetContiguousBlock(MemoryBlock backingMemory, MemoryBlock mirror, ulong va, ulong size)
{
_treeLock.AcquireReaderLock(Timeout.Infinite);
var result = GetContiguousBlockImpl(backingMemory, mirror, va, size);
_treeLock.ReleaseReaderLock();
return result;
}
private void Update(ulong va, ulong pa, ulong size, MappingState state)
{
Mapping map = _tree.GetNode(new Mapping(va, 1UL, 0UL, MappingState.Unmapped));
Update(map, va, pa, size, state);
}
private Mapping Update(Mapping map, ulong va, ulong pa, ulong size, MappingState state)
{
ulong endAddress = va + size;
for (; map != null; map = map.Successor)
{
if (map.Address < va)
{
_tree.Add(map.Split(va));
}
if (map.EndAddress > endAddress)
{
Mapping newMap = map.Split(endAddress);
_tree.Add(newMap);
map = newMap;
}
map.UpdateState(pa, state);
map = TryCoalesce(map);
if (map.EndAddress >= endAddress)
{
break;
}
}
return map;
}
private Mapping TryCoalesce(Mapping map)
{
Mapping previousMap = map.Predecessor;
Mapping nextMap = map.Successor;
if (previousMap != null && CanCoalesce(previousMap, map))
{
previousMap.Extend(map.Size);
_tree.Remove(map);
map = previousMap;
}
if (nextMap != null && CanCoalesce(map, nextMap))
{
map.Extend(nextMap.Size);
_tree.Remove(nextMap);
}
return map;
}
private static bool CanCoalesce(Mapping left, Mapping right)
{
if (left.State != right.State)
{
return false;
}
return left.State == MappingState.Unmapped || (left.BackingOffset + left.Size == right.BackingOffset);
}
private IEnumerable<MemoryRange> GetPhysicalRegionsImpl(ulong va, ulong size)
{
Mapping map = _tree.GetNode(new Mapping(va, 1UL, 0UL, MappingState.Unmapped));
if (map == null)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
var regions = new List<MemoryRange>();
ulong endAddress = va + size;
ulong regionStart = 0;
ulong regionSize = 0;
for (; map != null; map = map.Successor)
{
if (map.State == MappingState.Unmapped)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
ulong clampedAddress = Math.Max(map.Address, va);
ulong clampedEndAddress = Math.Min(map.EndAddress, endAddress);
ulong clampedSize = clampedEndAddress - clampedAddress;
ulong pa = map.BackingOffset + (clampedAddress - map.Address);
if (pa != regionStart + regionSize)
{
if (regionSize != 0)
{
regions.Add(new MemoryRange(regionStart, regionSize));
}
regionStart = pa;
regionSize = clampedSize;
}
else
{
regionSize += clampedSize;
}
if (map.EndAddress >= endAddress)
{
break;
}
}
if (regionSize != 0)
{
regions.Add(new MemoryRange(regionStart, regionSize));
}
return regions;
}
private bool TryGetContiguousPaImpl(ulong va, ulong size, out ulong pa)
{
Mapping map = _tree.GetNode(new Mapping(va, 1UL, 0UL, MappingState.Unmapped));
ulong endAddress = va + size;
if (map != null && map.Address <= va && map.EndAddress >= endAddress)
{
pa = map.BackingOffset + (va - map.Address);
return true;
}
pa = 0;
return false;
}
private (MemoryBlock, ulong) GetContiguousBlockImpl(MemoryBlock backingMemory, MemoryBlock mirror, ulong va, ulong size)
{
Mapping map = _tree.GetNode(new Mapping(va, 1UL, 0UL, MappingState.Unmapped));
ulong endAddress = va + size;
if (map != null && map.Address <= va && map.EndAddress >= endAddress)
{
ulong pa = map.BackingOffset + (va - map.Address);
return (backingMemory, pa);
}
if (map != null)
{
Mapping firstMap = map;
bool contiguous = true;
ulong expectedPa = map.BackingOffset + map.Size;
while ((map = map.Successor) != null && map.Address < endAddress)
{
if (map.State == MappingState.Unmapped || map.BackingOffset != expectedPa)
{
contiguous = false;
break;
}
if (map.EndAddress >= endAddress)
{
break;
}
expectedPa = map.BackingOffset + map.Size;
}
if (contiguous && map != null)
{
ulong pa = firstMap.BackingOffset + (va - firstMap.Address);
return (backingMemory, pa);
}
map = firstMap;
}
ulong endVaAligned = (endAddress + PageSize - 1) & ~(PageSize - 1);
ulong vaAligned = va & ~(PageSize - 1);
// Make sure the range that will be accessed on the mirror is fully mapped.
for (; map != null; map = map.Successor)
{
if (map.State == MappingState.Mapped)
{
ulong clampedAddress = Math.Max(map.Address, vaAligned);
ulong clampedEndAddress = Math.Min(map.EndAddress, endVaAligned);
ulong clampedSize = clampedEndAddress - clampedAddress;
ulong backingOffset = map.BackingOffset + (clampedAddress - map.Address);
LockCookie lockCookie = _treeLock.UpgradeToWriterLock(Timeout.Infinite);
mirror.MapView(backingMemory, backingOffset, clampedAddress, clampedSize);
map = Update(map, clampedAddress, backingOffset, clampedSize, MappingState.MappedWithMirror);
_treeLock.DowngradeFromWriterLock(ref lockCookie);
}
if (map.EndAddress >= endAddress)
{
break;
}
}
return (mirror, va);
}
private static void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message);
}
}

View file

@ -42,7 +42,7 @@ namespace Ryujinx.Cpu.Jit
private readonly ulong _addressSpaceSize;
private readonly MemoryBlock _backingMemory;
private readonly PageTable<ulong> _pageTable;
private readonly MappingTree _mappingTree;
private readonly MemoryEhMeilleure _memoryEh;
@ -68,7 +68,6 @@ namespace Ryujinx.Cpu.Jit
public MemoryManagerHostMapped(MemoryBlock backingMemory, ulong addressSpaceSize, bool unsafeMode, InvalidAccessHandler invalidAccessHandler = null)
{
_backingMemory = backingMemory;
_pageTable = new PageTable<ulong>();
_invalidAccessHandler = invalidAccessHandler;
_unsafeMode = unsafeMode;
_addressSpaceSize = addressSpaceSize;
@ -84,6 +83,8 @@ namespace Ryujinx.Cpu.Jit
AddressSpaceBits = asBits;
_mappingTree = new MappingTree(asSize);
_pageBitmap = new ulong[1 << (AddressSpaceBits - (PageBits + PageToPteShift))];
MemoryAllocationFlags asFlags = MemoryAllocationFlags.Reserve | MemoryAllocationFlags.ViewCompatible;
@ -150,9 +151,8 @@ namespace Ryujinx.Cpu.Jit
AssertValidAddressAndSize(va, size);
_addressSpace.MapView(_backingMemory, pa, va, size);
_addressSpaceMirror.MapView(_backingMemory, pa, va, size);
AddMapping(va, size);
PtMap(va, pa, size);
_mappingTree.Map(va, pa, size);
Tracking.Map(va, size);
}
@ -166,34 +166,11 @@ namespace Ryujinx.Cpu.Jit
Tracking.Unmap(va, size);
RemoveMapping(va, size);
PtUnmap(va, size);
_mappingTree.Unmap(va, size);
_addressSpace.UnmapView(_backingMemory, va, size);
_addressSpaceMirror.UnmapView(_backingMemory, va, size);
}
private void PtMap(ulong va, ulong pa, ulong size)
{
while (size != 0)
{
_pageTable.Map(va, pa);
va += PageSize;
pa += PageSize;
size -= PageSize;
}
}
private void PtUnmap(ulong va, ulong size)
{
while (size != 0)
{
_pageTable.Unmap(va);
va += PageSize;
size -= PageSize;
}
}
/// <inheritdoc/>
public T Read<T>(ulong va) where T : unmanaged
{
@ -201,7 +178,8 @@ namespace Ryujinx.Cpu.Jit
{
AssertMapped(va, (ulong)Unsafe.SizeOf<T>());
return _addressSpaceMirror.Read<T>(va);
(MemoryBlock block, ulong offset) = GetContiguousBlock(va, (ulong)Unsafe.SizeOf<T>());
return block.Read<T>(offset);
}
catch (InvalidMemoryRegionException)
{
@ -241,7 +219,8 @@ namespace Ryujinx.Cpu.Jit
{
AssertMapped(va, (ulong)data.Length);
_addressSpaceMirror.Read(va, data);
(MemoryBlock block, ulong offset) = GetContiguousBlock(va, (ulong)data.Length);
block.Read(offset, data);
}
catch (InvalidMemoryRegionException)
{
@ -260,7 +239,8 @@ namespace Ryujinx.Cpu.Jit
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), write: true);
_addressSpaceMirror.Write(va, value);
(MemoryBlock block, ulong offset) = GetContiguousBlock(va, (ulong)Unsafe.SizeOf<T>());
block.Write(offset, value);
}
catch (InvalidMemoryRegionException)
{
@ -274,10 +254,12 @@ namespace Ryujinx.Cpu.Jit
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
{
try {
try
{
SignalMemoryTracking(va, (ulong)data.Length, write: true);
_addressSpaceMirror.Write(va, data);
(MemoryBlock block, ulong offset) = GetContiguousBlock(va, (ulong)data.Length);
block.Write(offset, data);
}
catch (InvalidMemoryRegionException)
{
@ -295,7 +277,8 @@ namespace Ryujinx.Cpu.Jit
{
AssertMapped(va, (ulong)data.Length);
_addressSpaceMirror.Write(va, data);
(MemoryBlock block, ulong offset) = GetContiguousBlock(va, (ulong)data.Length);
block.Write(offset, data);
}
catch (InvalidMemoryRegionException)
{
@ -318,7 +301,8 @@ namespace Ryujinx.Cpu.Jit
AssertMapped(va, (ulong)size);
}
return _addressSpaceMirror.GetSpan(va, size);
(MemoryBlock block, ulong offset) = GetContiguousBlock(va, (ulong)size);
return block.GetSpan(offset, size);
}
/// <inheritdoc/>
@ -333,7 +317,8 @@ namespace Ryujinx.Cpu.Jit
AssertMapped(va, (ulong)size);
}
return _addressSpaceMirror.GetWritableRegion(va, size);
(MemoryBlock block, ulong offset) = GetContiguousBlock(va, (ulong)size);
return block.GetWritableRegion(offset, size);
}
/// <inheritdoc/>
@ -341,7 +326,8 @@ namespace Ryujinx.Cpu.Jit
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), true);
return ref _addressSpaceMirror.GetRef<T>(va);
(MemoryBlock block, ulong offset) = GetContiguousBlock(va, (ulong)Unsafe.SizeOf<T>());
return ref block.GetRef<T>(offset);
}
/// <inheritdoc/>
@ -428,51 +414,7 @@ namespace Ryujinx.Cpu.Jit
/// <inheritdoc/>
public IEnumerable<MemoryRange> GetPhysicalRegions(ulong va, ulong size)
{
int pages = GetPagesCount(va, (uint)size, out va);
var regions = new List<MemoryRange>();
ulong regionStart = GetPhysicalAddressChecked(va);
ulong regionSize = PageSize;
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return null;
}
ulong newPa = GetPhysicalAddressChecked(va + PageSize);
if (GetPhysicalAddressChecked(va) + PageSize != newPa)
{
regions.Add(new MemoryRange(regionStart, regionSize));
regionStart = newPa;
regionSize = 0;
}
va += PageSize;
regionSize += PageSize;
}
regions.Add(new MemoryRange(regionStart, regionSize));
return regions;
}
private ulong GetPhysicalAddressChecked(ulong va)
{
if (!IsMapped(va))
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}");
}
return GetPhysicalAddressInternal(va);
}
private ulong GetPhysicalAddressInternal(ulong va)
{
return _pageTable.Read(va) + (va & PageMask);
return _mappingTree.GetPhysicalRegions(va, size);
}
/// <inheritdoc/>
@ -684,6 +626,11 @@ namespace Ryujinx.Cpu.Jit
return new CpuSmartMultiRegionHandle(Tracking.BeginSmartGranularTracking(address, size, granularity));
}
private (MemoryBlock, ulong) GetContiguousBlock(ulong va, ulong size)
{
return _mappingTree.GetContiguousBlock(_backingMemory, _addressSpaceMirror, va, size);
}
/// <summary>
/// Adds the given address mapping to the page table.
/// </summary>