mirror of
https://github.com/ryujinx-mirror/ryujinx.git
synced 2024-10-02 16:50:20 -07:00

* Add host tracked memory manager mode * Skipping flush is no longer needed * Formatting + revert unrelated change * LightningJit: Ensure that dest register is saved for load ops that do partial updates * avoid allocations when doing address space lookup Add missing improvement * IsRmwMemory -> IsPartialRegisterUpdateMemory * Ensure we iterate all private allocations in range * PR feedback and potential fixes * Simplified bridges a lot * Skip calling SignalMappingChanged if Guest is true * Late map bridge too * Force address masking for prefetch instructions * Reprotection for bridges * Move partition list validation to separate debug method * Move host tracked related classes to HostTracked folder * New HostTracked namespace * Move host tracked modes to the end of enum to avoid PPTC invalidation --------- Co-authored-by: riperiperi <rhy3756547@hotmail.com>
224 lines
7.0 KiB
C#
224 lines
7.0 KiB
C#
using Ryujinx.Cpu.Signal;
|
|
using Ryujinx.Memory;
|
|
using System;
|
|
using System.Diagnostics;
|
|
using System.Numerics;
|
|
using System.Runtime.InteropServices;
|
|
|
|
namespace Ryujinx.Cpu.Jit.HostTracked
|
|
{
|
|
sealed class NativePageTable : IDisposable
|
|
{
|
|
private delegate ulong TrackingEventDelegate(ulong address, ulong size, bool write);
|
|
|
|
private const int PageBits = 12;
|
|
private const int PageSize = 1 << PageBits;
|
|
private const int PageMask = PageSize - 1;
|
|
|
|
private const int PteSize = 8;
|
|
|
|
private readonly int _bitsPerPtPage;
|
|
private readonly int _entriesPerPtPage;
|
|
private readonly int _pageCommitmentBits;
|
|
|
|
private readonly PageTable<ulong> _pageTable;
|
|
private readonly MemoryBlock _nativePageTable;
|
|
private readonly ulong[] _pageCommitmentBitmap;
|
|
private readonly ulong _hostPageSize;
|
|
|
|
private readonly TrackingEventDelegate _trackingEvent;
|
|
|
|
private bool _disposed;
|
|
|
|
public IntPtr PageTablePointer => _nativePageTable.Pointer;
|
|
|
|
public NativePageTable(ulong asSize)
|
|
{
|
|
ulong hostPageSize = MemoryBlock.GetPageSize();
|
|
|
|
_entriesPerPtPage = (int)(hostPageSize / sizeof(ulong));
|
|
_bitsPerPtPage = BitOperations.Log2((uint)_entriesPerPtPage);
|
|
_pageCommitmentBits = PageBits + _bitsPerPtPage;
|
|
|
|
_hostPageSize = hostPageSize;
|
|
_pageTable = new PageTable<ulong>();
|
|
_nativePageTable = new MemoryBlock((asSize / PageSize) * PteSize + _hostPageSize, MemoryAllocationFlags.Reserve);
|
|
_pageCommitmentBitmap = new ulong[(asSize >> _pageCommitmentBits) / (sizeof(ulong) * 8)];
|
|
|
|
ulong ptStart = (ulong)_nativePageTable.Pointer;
|
|
ulong ptEnd = ptStart + _nativePageTable.Size;
|
|
|
|
_trackingEvent = VirtualMemoryEvent;
|
|
|
|
bool added = NativeSignalHandler.AddTrackedRegion((nuint)ptStart, (nuint)ptEnd, Marshal.GetFunctionPointerForDelegate(_trackingEvent));
|
|
|
|
if (!added)
|
|
{
|
|
throw new InvalidOperationException("Number of allowed tracked regions exceeded.");
|
|
}
|
|
}
|
|
|
|
public void Map(ulong va, ulong pa, ulong size, AddressSpacePartitioned addressSpace, MemoryBlock backingMemory, bool privateMap)
|
|
{
|
|
while (size != 0)
|
|
{
|
|
_pageTable.Map(va, pa);
|
|
|
|
EnsureCommitment(va);
|
|
|
|
if (privateMap)
|
|
{
|
|
_nativePageTable.Write((va / PageSize) * PteSize, GetPte(va, addressSpace.GetPointer(va, PageSize)));
|
|
}
|
|
else
|
|
{
|
|
_nativePageTable.Write((va / PageSize) * PteSize, GetPte(va, backingMemory.GetPointer(pa, PageSize)));
|
|
}
|
|
|
|
va += PageSize;
|
|
pa += PageSize;
|
|
size -= PageSize;
|
|
}
|
|
}
|
|
|
|
public void Unmap(ulong va, ulong size)
|
|
{
|
|
IntPtr guardPagePtr = GetGuardPagePointer();
|
|
|
|
while (size != 0)
|
|
{
|
|
_pageTable.Unmap(va);
|
|
_nativePageTable.Write((va / PageSize) * PteSize, GetPte(va, guardPagePtr));
|
|
|
|
va += PageSize;
|
|
size -= PageSize;
|
|
}
|
|
}
|
|
|
|
public ulong Read(ulong va)
|
|
{
|
|
ulong pte = _nativePageTable.Read<ulong>((va / PageSize) * PteSize);
|
|
|
|
pte += va & ~(ulong)PageMask;
|
|
|
|
return pte + (va & PageMask);
|
|
}
|
|
|
|
public void Update(ulong va, IntPtr ptr, ulong size)
|
|
{
|
|
ulong remainingSize = size;
|
|
|
|
while (remainingSize != 0)
|
|
{
|
|
EnsureCommitment(va);
|
|
|
|
_nativePageTable.Write((va / PageSize) * PteSize, GetPte(va, ptr));
|
|
|
|
va += PageSize;
|
|
ptr += PageSize;
|
|
remainingSize -= PageSize;
|
|
}
|
|
}
|
|
|
|
private void EnsureCommitment(ulong va)
|
|
{
|
|
ulong bit = va >> _pageCommitmentBits;
|
|
|
|
int index = (int)(bit / (sizeof(ulong) * 8));
|
|
int shift = (int)(bit % (sizeof(ulong) * 8));
|
|
|
|
ulong mask = 1UL << shift;
|
|
|
|
ulong oldMask = _pageCommitmentBitmap[index];
|
|
|
|
if ((oldMask & mask) == 0)
|
|
{
|
|
lock (_pageCommitmentBitmap)
|
|
{
|
|
oldMask = _pageCommitmentBitmap[index];
|
|
|
|
if ((oldMask & mask) != 0)
|
|
{
|
|
return;
|
|
}
|
|
|
|
_nativePageTable.Commit(bit * _hostPageSize, _hostPageSize);
|
|
|
|
Span<ulong> pageSpan = MemoryMarshal.Cast<byte, ulong>(_nativePageTable.GetSpan(bit * _hostPageSize, (int)_hostPageSize));
|
|
|
|
Debug.Assert(pageSpan.Length == _entriesPerPtPage);
|
|
|
|
IntPtr guardPagePtr = GetGuardPagePointer();
|
|
|
|
for (int i = 0; i < pageSpan.Length; i++)
|
|
{
|
|
pageSpan[i] = GetPte((bit << _pageCommitmentBits) | ((ulong)i * PageSize), guardPagePtr);
|
|
}
|
|
|
|
_pageCommitmentBitmap[index] = oldMask | mask;
|
|
}
|
|
}
|
|
}
|
|
|
|
private IntPtr GetGuardPagePointer()
|
|
{
|
|
return _nativePageTable.GetPointer(_nativePageTable.Size - _hostPageSize, _hostPageSize);
|
|
}
|
|
|
|
private static ulong GetPte(ulong va, IntPtr ptr)
|
|
{
|
|
Debug.Assert((va & PageMask) == 0);
|
|
|
|
return (ulong)ptr - va;
|
|
}
|
|
|
|
public ulong GetPhysicalAddress(ulong va)
|
|
{
|
|
return _pageTable.Read(va) + (va & PageMask);
|
|
}
|
|
|
|
private ulong VirtualMemoryEvent(ulong address, ulong size, bool write)
|
|
{
|
|
if (address < _nativePageTable.Size - _hostPageSize)
|
|
{
|
|
// Some prefetch instructions do not cause faults with invalid addresses.
|
|
// Retry if we are hitting a case where the page table is unmapped, the next
|
|
// run will execute the actual instruction.
|
|
// The address loaded from the page table will be invalid, and it should hit the else case
|
|
// if the instruction faults on unmapped or protected memory.
|
|
|
|
ulong va = address * (PageSize / sizeof(ulong));
|
|
|
|
EnsureCommitment(va);
|
|
|
|
return (ulong)_nativePageTable.Pointer + address;
|
|
}
|
|
else
|
|
{
|
|
throw new InvalidMemoryRegionException();
|
|
}
|
|
}
|
|
|
|
private void Dispose(bool disposing)
|
|
{
|
|
if (!_disposed)
|
|
{
|
|
if (disposing)
|
|
{
|
|
NativeSignalHandler.RemoveTrackedRegion((nuint)_nativePageTable.Pointer);
|
|
|
|
_nativePageTable.Dispose();
|
|
}
|
|
|
|
_disposed = true;
|
|
}
|
|
}
|
|
|
|
public void Dispose()
|
|
{
|
|
Dispose(disposing: true);
|
|
GC.SuppressFinalize(this);
|
|
}
|
|
}
|
|
}
|