Merge Latest Ryujinx (Unstable)

This commit is contained in:
Stossy11 2025-03-08 10:13:40 +11:00
parent aaefc0a9e5
commit 12ab8bc3e2
1237 changed files with 48656 additions and 21399 deletions

View file

@ -1,5 +1,3 @@
using Ryujinx.Common;
using Ryujinx.Common.Collections;
using Ryujinx.Memory;
using System;
@ -7,175 +5,23 @@ namespace Ryujinx.Cpu
{
public class AddressSpace : IDisposable
{
private const int DefaultBlockAlignment = 1 << 20;
private enum MappingType : byte
{
None,
Private,
Shared,
}
private class Mapping : IntrusiveRedBlackTreeNode<Mapping>, IComparable<Mapping>
{
public ulong Address { get; private set; }
public ulong Size { get; private set; }
public ulong EndAddress => Address + Size;
public MappingType Type { get; private set; }
public Mapping(ulong address, ulong size, MappingType type)
{
Address = address;
Size = size;
Type = type;
}
public Mapping Split(ulong splitAddress)
{
ulong leftSize = splitAddress - Address;
ulong rightSize = EndAddress - splitAddress;
Mapping left = new(Address, leftSize, Type);
Address = splitAddress;
Size = rightSize;
return left;
}
public void UpdateState(MappingType newType)
{
Type = newType;
}
public void Extend(ulong sizeDelta)
{
Size += sizeDelta;
}
public int CompareTo(Mapping other)
{
if (Address < other.Address)
{
return -1;
}
else if (Address <= other.EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
}
private class PrivateMapping : IntrusiveRedBlackTreeNode<PrivateMapping>, IComparable<PrivateMapping>
{
public ulong Address { get; private set; }
public ulong Size { get; private set; }
public ulong EndAddress => Address + Size;
public PrivateMemoryAllocation PrivateAllocation { get; private set; }
public PrivateMapping(ulong address, ulong size, PrivateMemoryAllocation privateAllocation)
{
Address = address;
Size = size;
PrivateAllocation = privateAllocation;
}
public PrivateMapping Split(ulong splitAddress)
{
ulong leftSize = splitAddress - Address;
ulong rightSize = EndAddress - splitAddress;
(var leftAllocation, PrivateAllocation) = PrivateAllocation.Split(leftSize);
PrivateMapping left = new(Address, leftSize, leftAllocation);
Address = splitAddress;
Size = rightSize;
return left;
}
public void Map(MemoryBlock baseBlock, MemoryBlock mirrorBlock, PrivateMemoryAllocation newAllocation)
{
baseBlock.MapView(newAllocation.Memory, newAllocation.Offset, Address, Size);
mirrorBlock.MapView(newAllocation.Memory, newAllocation.Offset, Address, Size);
PrivateAllocation = newAllocation;
}
public void Unmap(MemoryBlock baseBlock, MemoryBlock mirrorBlock)
{
if (PrivateAllocation.IsValid)
{
baseBlock.UnmapView(PrivateAllocation.Memory, Address, Size);
mirrorBlock.UnmapView(PrivateAllocation.Memory, Address, Size);
PrivateAllocation.Dispose();
}
PrivateAllocation = default;
}
public void Extend(ulong sizeDelta)
{
Size += sizeDelta;
}
public int CompareTo(PrivateMapping other)
{
if (Address < other.Address)
{
return -1;
}
else if (Address <= other.EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
}
private readonly MemoryBlock _backingMemory;
private readonly PrivateMemoryAllocator _privateMemoryAllocator;
private readonly IntrusiveRedBlackTree<Mapping> _mappingTree;
private readonly IntrusiveRedBlackTree<PrivateMapping> _privateTree;
private readonly object _treeLock;
private readonly bool _supports4KBPages;
public MemoryBlock Base { get; }
public MemoryBlock Mirror { get; }
public ulong AddressSpaceSize { get; }
public AddressSpace(MemoryBlock backingMemory, MemoryBlock baseMemory, MemoryBlock mirrorMemory, ulong addressSpaceSize, bool supports4KBPages)
public AddressSpace(MemoryBlock backingMemory, MemoryBlock baseMemory, MemoryBlock mirrorMemory, ulong addressSpaceSize)
{
if (!supports4KBPages)
{
_privateMemoryAllocator = new PrivateMemoryAllocator(DefaultBlockAlignment, MemoryAllocationFlags.Mirrorable | MemoryAllocationFlags.NoMap);
_mappingTree = new IntrusiveRedBlackTree<Mapping>();
_privateTree = new IntrusiveRedBlackTree<PrivateMapping>();
_treeLock = new object();
_mappingTree.Add(new Mapping(0UL, addressSpaceSize, MappingType.None));
_privateTree.Add(new PrivateMapping(0UL, addressSpaceSize, default));
}
_backingMemory = backingMemory;
_supports4KBPages = supports4KBPages;
Base = baseMemory;
Mirror = mirrorMemory;
AddressSpaceSize = addressSpaceSize;
}
public static bool TryCreate(MemoryBlock backingMemory, ulong asSize, bool supports4KBPages, out AddressSpace addressSpace)
public static bool TryCreate(MemoryBlock backingMemory, ulong asSize, out AddressSpace addressSpace)
{
addressSpace = null;
@ -193,7 +39,7 @@ namespace Ryujinx.Cpu
{
baseMemory = new MemoryBlock(addressSpaceSize, AsFlags);
mirrorMemory = new MemoryBlock(addressSpaceSize, AsFlags);
addressSpace = new AddressSpace(backingMemory, baseMemory, mirrorMemory, addressSpaceSize, supports4KBPages);
addressSpace = new AddressSpace(backingMemory, baseMemory, mirrorMemory, addressSpaceSize);
break;
}
@ -209,289 +55,20 @@ namespace Ryujinx.Cpu
public void Map(ulong va, ulong pa, ulong size, MemoryMapFlags flags)
{
if (_supports4KBPages)
{
Base.MapView(_backingMemory, pa, va, size);
Mirror.MapView(_backingMemory, pa, va, size);
return;
}
lock (_treeLock)
{
ulong alignment = MemoryBlock.GetPageSize();
bool isAligned = ((va | pa | size) & (alignment - 1)) == 0;
if (flags.HasFlag(MemoryMapFlags.Private) && !isAligned)
{
Update(va, pa, size, MappingType.Private);
}
else
{
// The update method assumes that shared mappings are already aligned.
if (!flags.HasFlag(MemoryMapFlags.Private))
{
if ((va & (alignment - 1)) != (pa & (alignment - 1)))
{
throw new InvalidMemoryRegionException($"Virtual address 0x{va:X} and physical address 0x{pa:X} are misaligned and can't be aligned.");
}
ulong endAddress = va + size;
va = BitUtils.AlignDown(va, alignment);
pa = BitUtils.AlignDown(pa, alignment);
size = BitUtils.AlignUp(endAddress, alignment) - va;
}
Update(va, pa, size, MappingType.Shared);
}
}
Base.MapView(_backingMemory, pa, va, size);
Mirror.MapView(_backingMemory, pa, va, size);
}
public void Unmap(ulong va, ulong size)
{
if (_supports4KBPages)
{
Base.UnmapView(_backingMemory, va, size);
Mirror.UnmapView(_backingMemory, va, size);
return;
}
lock (_treeLock)
{
Update(va, 0UL, size, MappingType.None);
}
}
private void Update(ulong va, ulong pa, ulong size, MappingType type)
{
Mapping map = _mappingTree.GetNode(new Mapping(va, 1UL, MappingType.None));
Update(map, va, pa, size, type);
}
private Mapping Update(Mapping map, ulong va, ulong pa, ulong size, MappingType type)
{
ulong endAddress = va + size;
for (; map != null; map = map.Successor)
{
if (map.Address < va)
{
_mappingTree.Add(map.Split(va));
}
if (map.EndAddress > endAddress)
{
Mapping newMap = map.Split(endAddress);
_mappingTree.Add(newMap);
map = newMap;
}
switch (type)
{
case MappingType.None:
if (map.Type == MappingType.Shared)
{
ulong startOffset = map.Address - va;
ulong mapVa = va + startOffset;
ulong mapSize = Math.Min(size - startOffset, map.Size);
ulong mapEndAddress = mapVa + mapSize;
ulong alignment = MemoryBlock.GetPageSize();
mapVa = BitUtils.AlignDown(mapVa, alignment);
mapEndAddress = BitUtils.AlignUp(mapEndAddress, alignment);
mapSize = mapEndAddress - mapVa;
Base.UnmapView(_backingMemory, mapVa, mapSize);
Mirror.UnmapView(_backingMemory, mapVa, mapSize);
}
else
{
UnmapPrivate(va, size);
}
break;
case MappingType.Private:
if (map.Type == MappingType.Shared)
{
throw new InvalidMemoryRegionException($"Private mapping request at 0x{va:X} with size 0x{size:X} overlaps shared mapping at 0x{map.Address:X} with size 0x{map.Size:X}.");
}
else
{
MapPrivate(va, size);
}
break;
case MappingType.Shared:
if (map.Type != MappingType.None)
{
throw new InvalidMemoryRegionException($"Shared mapping request at 0x{va:X} with size 0x{size:X} overlaps mapping at 0x{map.Address:X} with size 0x{map.Size:X}.");
}
else
{
ulong startOffset = map.Address - va;
ulong mapPa = pa + startOffset;
ulong mapVa = va + startOffset;
ulong mapSize = Math.Min(size - startOffset, map.Size);
Base.MapView(_backingMemory, mapPa, mapVa, mapSize);
Mirror.MapView(_backingMemory, mapPa, mapVa, mapSize);
}
break;
}
map.UpdateState(type);
map = TryCoalesce(map);
if (map.EndAddress >= endAddress)
{
break;
}
}
return map;
}
private Mapping TryCoalesce(Mapping map)
{
Mapping previousMap = map.Predecessor;
Mapping nextMap = map.Successor;
if (previousMap != null && CanCoalesce(previousMap, map))
{
previousMap.Extend(map.Size);
_mappingTree.Remove(map);
map = previousMap;
}
if (nextMap != null && CanCoalesce(map, nextMap))
{
map.Extend(nextMap.Size);
_mappingTree.Remove(nextMap);
}
return map;
}
private static bool CanCoalesce(Mapping left, Mapping right)
{
return left.Type == right.Type;
}
private void MapPrivate(ulong va, ulong size)
{
ulong endAddress = va + size;
ulong alignment = MemoryBlock.GetPageSize();
// Expand the range outwards based on page size to ensure that at least the requested region is mapped.
ulong vaAligned = BitUtils.AlignDown(va, alignment);
ulong endAddressAligned = BitUtils.AlignUp(endAddress, alignment);
PrivateMapping map = _privateTree.GetNode(new PrivateMapping(va, 1UL, default));
for (; map != null; map = map.Successor)
{
if (!map.PrivateAllocation.IsValid)
{
if (map.Address < vaAligned)
{
_privateTree.Add(map.Split(vaAligned));
}
if (map.EndAddress > endAddressAligned)
{
PrivateMapping newMap = map.Split(endAddressAligned);
_privateTree.Add(newMap);
map = newMap;
}
map.Map(Base, Mirror, _privateMemoryAllocator.Allocate(map.Size, MemoryBlock.GetPageSize()));
}
if (map.EndAddress >= endAddressAligned)
{
break;
}
}
}
private void UnmapPrivate(ulong va, ulong size)
{
ulong endAddress = va + size;
ulong alignment = MemoryBlock.GetPageSize();
// Shrink the range inwards based on page size to ensure we won't unmap memory that might be still in use.
ulong vaAligned = BitUtils.AlignUp(va, alignment);
ulong endAddressAligned = BitUtils.AlignDown(endAddress, alignment);
if (endAddressAligned <= vaAligned)
{
return;
}
PrivateMapping map = _privateTree.GetNode(new PrivateMapping(va, 1UL, default));
for (; map != null; map = map.Successor)
{
if (map.PrivateAllocation.IsValid)
{
if (map.Address < vaAligned)
{
_privateTree.Add(map.Split(vaAligned));
}
if (map.EndAddress > endAddressAligned)
{
PrivateMapping newMap = map.Split(endAddressAligned);
_privateTree.Add(newMap);
map = newMap;
}
map.Unmap(Base, Mirror);
map = TryCoalesce(map);
}
if (map.EndAddress >= endAddressAligned)
{
break;
}
}
}
private PrivateMapping TryCoalesce(PrivateMapping map)
{
PrivateMapping previousMap = map.Predecessor;
PrivateMapping nextMap = map.Successor;
if (previousMap != null && CanCoalesce(previousMap, map))
{
previousMap.Extend(map.Size);
_privateTree.Remove(map);
map = previousMap;
}
if (nextMap != null && CanCoalesce(map, nextMap))
{
map.Extend(nextMap.Size);
_privateTree.Remove(nextMap);
}
return map;
}
private static bool CanCoalesce(PrivateMapping left, PrivateMapping right)
{
return !left.PrivateAllocation.IsValid && !right.PrivateAllocation.IsValid;
Base.UnmapView(_backingMemory, va, size);
Mirror.UnmapView(_backingMemory, va, size);
}
public void Dispose()
{
GC.SuppressFinalize(this);
_privateMemoryAllocator?.Dispose();
Base.Dispose();
Mirror.Dispose();
}

View file

@ -1,62 +0,0 @@
using System;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace Ryujinx.Cpu.AppleHv
{
static class HvCodePatcher
{
private const uint XMask = 0x3f808000u;
private const uint XValue = 0x8000000u;
private const uint ZrIndex = 31u;
public static void RewriteUnorderedExclusiveInstructions(Span<byte> code)
{
Span<uint> codeUint = MemoryMarshal.Cast<byte, uint>(code);
Span<Vector128<uint>> codeVector = MemoryMarshal.Cast<byte, Vector128<uint>>(code);
Vector128<uint> mask = Vector128.Create(XMask);
Vector128<uint> value = Vector128.Create(XValue);
for (int index = 0; index < codeVector.Length; index++)
{
Vector128<uint> v = codeVector[index];
if (Vector128.EqualsAny(Vector128.BitwiseAnd(v, mask), value))
{
int baseIndex = index * 4;
for (int instIndex = baseIndex; instIndex < baseIndex + 4; instIndex++)
{
ref uint inst = ref codeUint[instIndex];
if ((inst & XMask) != XValue)
{
continue;
}
bool isPair = (inst & (1u << 21)) != 0;
bool isLoad = (inst & (1u << 22)) != 0;
uint rt2 = (inst >> 10) & 0x1fu;
uint rs = (inst >> 16) & 0x1fu;
if (isLoad && rs != ZrIndex)
{
continue;
}
if (!isPair && rt2 != ZrIndex)
{
continue;
}
// Set the ordered flag.
inst |= 1u << 15;
}
}
}
}
}
}

View file

@ -3,12 +3,11 @@ using Ryujinx.Memory;
using Ryujinx.Memory.Range;
using Ryujinx.Memory.Tracking;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
using System.Threading;
namespace Ryujinx.Cpu.AppleHv
{
@ -17,31 +16,10 @@ namespace Ryujinx.Cpu.AppleHv
/// </summary>
[SupportedOSPlatform("macos")]
[SupportedOSPlatform("ios")]
public class HvMemoryManager : MemoryManagerBase, IMemoryManager, IVirtualMemoryManagerTracked, IWritableBlock
public sealed class HvMemoryManager : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked
{
public const int PageBits = 12;
public const int PageSize = 1 << PageBits;
public const int PageMask = PageSize - 1;
public const int PageToPteShift = 5; // 32 pages (2 bits each) in one ulong page table entry.
public const ulong BlockMappedMask = 0x5555555555555555; // First bit of each table entry set.
private enum HostMappedPtBits : ulong
{
Unmapped = 0,
Mapped,
WriteTracked,
ReadWriteTracked,
MappedReplicated = 0x5555555555555555,
WriteTrackedReplicated = 0xaaaaaaaaaaaaaaaa,
ReadWriteTrackedReplicated = ulong.MaxValue,
}
private readonly InvalidAccessHandler _invalidAccessHandler;
private readonly ulong _addressSpaceSize;
private readonly HvAddressSpace _addressSpace;
internal HvAddressSpace AddressSpace => _addressSpace;
@ -49,9 +27,9 @@ namespace Ryujinx.Cpu.AppleHv
private readonly MemoryBlock _backingMemory;
private readonly PageTable<ulong> _pageTable;
private readonly ulong[] _pageBitmap;
private readonly ManagedPageFlags _pages;
public bool Supports4KBPages => true;
public bool UsesPrivateAllocations => false;
public int AddressSpaceBits { get; }
@ -63,6 +41,8 @@ namespace Ryujinx.Cpu.AppleHv
public event Action<ulong, ulong> UnmapEvent;
protected override ulong AddressSpaceSize { get; }
/// <summary>
/// Creates a new instance of the Hypervisor memory manager.
/// </summary>
@ -74,7 +54,7 @@ namespace Ryujinx.Cpu.AppleHv
_backingMemory = backingMemory;
_pageTable = new PageTable<ulong>();
_invalidAccessHandler = invalidAccessHandler;
_addressSpaceSize = addressSpaceSize;
AddressSpaceSize = addressSpaceSize;
ulong asSize = PageSize;
int asBits = PageBits;
@ -89,46 +69,10 @@ namespace Ryujinx.Cpu.AppleHv
AddressSpaceBits = asBits;
_pageBitmap = new ulong[1 << (AddressSpaceBits - (PageBits + PageToPteShift))];
_pages = new ManagedPageFlags(AddressSpaceBits);
Tracking = new MemoryTracking(this, PageSize, invalidAccessHandler);
}
/// <summary>
/// Checks if the virtual address is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address</param>
/// <returns>True if the virtual address is part of the addressable space</returns>
private bool ValidateAddress(ulong va)
{
return va < _addressSpaceSize;
}
/// <summary>
/// Checks if the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
private bool ValidateAddressAndSize(ulong va, ulong size)
{
ulong endVa = va + size;
return endVa >= va && endVa >= size && endVa <= _addressSpaceSize;
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
private void AssertValidAddressAndSize(ulong va, ulong size)
{
if (!ValidateAddressAndSize(va, size))
{
throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
}
}
/// <inheritdoc/>
public void Map(ulong va, ulong pa, ulong size, MemoryMapFlags flags)
{
@ -136,7 +80,7 @@ namespace Ryujinx.Cpu.AppleHv
PtMap(va, pa, size);
_addressSpace.MapUser(va, pa, size, MemoryPermission.ReadWriteExecute);
AddMapping(va, size);
_pages.AddMapping(va, size);
Tracking.Map(va, size);
}
@ -153,12 +97,6 @@ namespace Ryujinx.Cpu.AppleHv
}
}
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
/// <inheritdoc/>
public void Unmap(ulong va, ulong size)
{
@ -167,7 +105,7 @@ namespace Ryujinx.Cpu.AppleHv
UnmapEvent?.Invoke(va, size);
Tracking.Unmap(va, size);
RemoveMapping(va, size);
_pages.RemoveMapping(va, size);
_addressSpace.UnmapUser(va, size);
PtUnmap(va, size);
}
@ -183,20 +121,11 @@ namespace Ryujinx.Cpu.AppleHv
}
}
/// <inheritdoc/>
public T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
/// <inheritdoc/>
public T ReadTracked<T>(ulong va) where T : unmanaged
public override T ReadTracked<T>(ulong va)
{
try
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false);
return Read<T>(va);
return base.ReadTracked<T>(va);
}
catch (InvalidMemoryRegionException)
{
@ -209,107 +138,11 @@ namespace Ryujinx.Cpu.AppleHv
}
}
/// <inheritdoc/>
public void Read(ulong va, Span<byte> data)
{
ReadImpl(va, data);
}
/// <inheritdoc/>
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
SignalMemoryTracking(va, (ulong)data.Length, true);
WriteImpl(va, data);
}
/// <inheritdoc/>
public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
WriteImpl(va, data);
}
/// <inheritdoc/>
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return false;
}
SignalMemoryTracking(va, (ulong)data.Length, false);
if (IsContiguousAndMapped(va, data.Length))
{
var target = _backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length);
bool changed = !data.SequenceEqual(target);
if (changed)
{
data.CopyTo(target);
}
return changed;
}
else
{
WriteImpl(va, data);
return true;
}
}
private void WriteImpl(ulong va, ReadOnlySpan<byte> data)
public override void Read(ulong va, Span<byte> data)
{
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
if (IsContiguousAndMapped(va, data.Length))
{
data.CopyTo(_backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length));
}
else
{
int offset = 0, size;
if ((va & PageMask) != 0)
{
ulong pa = GetPhysicalAddressChecked(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data[..size].CopyTo(_backingMemory.GetSpan(pa, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
ulong pa = GetPhysicalAddressChecked(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(_backingMemory.GetSpan(pa, size));
}
}
base.Read(va, data);
}
catch (InvalidMemoryRegionException)
{
@ -320,61 +153,53 @@ namespace Ryujinx.Cpu.AppleHv
}
}
/// <inheritdoc/>
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
public override void Write(ulong va, ReadOnlySpan<byte> data)
{
if (size == 0)
try
{
return ReadOnlySpan<byte>.Empty;
base.Write(va, data);
}
if (tracked)
catch (InvalidMemoryRegionException)
{
SignalMemoryTracking(va, (ulong)size, false);
}
if (IsContiguousAndMapped(va, size))
{
return _backingMemory.GetSpan(GetPhysicalAddressInternal(va), size);
}
else
{
Span<byte> data = new byte[size];
ReadImpl(va, data);
return data;
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
/// <inheritdoc/>
public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
public override void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
if (size == 0)
try
{
return new WritableRegion(null, va, Memory<byte>.Empty);
base.WriteUntracked(va, data);
}
if (tracked)
catch (InvalidMemoryRegionException)
{
SignalMemoryTracking(va, (ulong)size, true);
}
if (IsContiguousAndMapped(va, size))
{
return new WritableRegion(null, va, _backingMemory.GetMemory(GetPhysicalAddressInternal(va), size));
}
else
{
Memory<byte> memory = new byte[size];
ReadImpl(va, memory.Span);
return new WritableRegion(this, va, memory);
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
public override ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
{
try
{
return base.GetReadOnlySequence(va, size, tracked);
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
return ReadOnlySequence<byte>.Empty;
}
}
/// <inheritdoc/>
public ref T GetRef<T>(ulong va) where T : unmanaged
{
if (!IsContiguous(va, Unsafe.SizeOf<T>()))
@ -387,26 +212,10 @@ namespace Ryujinx.Cpu.AppleHv
return ref _backingMemory.GetRef<T>(GetPhysicalAddressChecked(va));
}
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va)
public override bool IsMapped(ulong va)
{
return ValidateAddress(va) && IsMappedImpl(va);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsMappedImpl(ulong va)
{
ulong page = va >> PageBits;
int bit = (int)((page & 31) << 1);
int pageIndex = (int)(page >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte = Volatile.Read(ref pageRef);
return ((pte >> bit) & 3) != 0;
return ValidateAddress(va) && _pages.IsMapped(va);
}
/// <inheritdoc/>
@ -414,91 +223,7 @@ namespace Ryujinx.Cpu.AppleHv
{
AssertValidAddressAndSize(va, size);
return IsRangeMappedImpl(va, size);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void GetPageBlockRange(ulong pageStart, ulong pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex)
{
startMask = ulong.MaxValue << ((int)(pageStart & 31) << 1);
endMask = ulong.MaxValue >> (64 - ((int)(pageEnd & 31) << 1));
pageIndex = (int)(pageStart >> PageToPteShift);
pageEndIndex = (int)((pageEnd - 1) >> PageToPteShift);
}
private bool IsRangeMappedImpl(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
if (pages == 1)
{
return IsMappedImpl(va);
}
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
// Check if either bit in each 2 bit page entry is set.
// OR the block with itself shifted down by 1, and check the first bit of each entry.
ulong mask = BlockMappedMask & startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte = Volatile.Read(ref pageRef);
pte |= pte >> 1;
if ((pte & mask) != mask)
{
return false;
}
mask = BlockMappedMask;
}
return true;
}
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguous(ulong va, int size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
{
return false;
}
int pages = GetPagesCount(va, (uint)size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
return _pages.IsRangeMapped(va, size);
}
/// <inheritdoc/>
@ -577,53 +302,10 @@ namespace Ryujinx.Cpu.AppleHv
return regions;
}
private void ReadImpl(ulong va, Span<byte> data)
{
if (data.Length == 0)
{
return;
}
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
int offset = 0, size;
if ((va & PageMask) != 0)
{
ulong pa = GetPhysicalAddressChecked(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
_backingMemory.GetSpan(pa, size).CopyTo(data[..size]);
offset += size;
}
for (; offset < data.Length; offset += size)
{
ulong pa = GetPhysicalAddressChecked(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
_backingMemory.GetSpan(pa, size).CopyTo(data.Slice(offset, size));
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
/// <inheritdoc/>
/// <remarks>
/// This function also validates that the given range is both valid and mapped, and will throw if it is not.
/// </remarks>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{
AssertValidAddressAndSize(va, size);
@ -633,211 +315,37 @@ namespace Ryujinx.Cpu.AppleHv
return;
}
// Software table, used for managed memory tracking.
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
if (pages == 1)
{
ulong tag = (ulong)(write ? HostMappedPtBits.WriteTracked : HostMappedPtBits.ReadWriteTracked);
int bit = (int)((pageStart & 31) << 1);
int pageIndex = (int)(pageStart >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte = Volatile.Read(ref pageRef);
ulong state = ((pte >> bit) & 3);
if (state >= tag)
{
Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
return;
}
else if (state == 0)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
}
else
{
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
ulong anyTrackingTag = (ulong)HostMappedPtBits.WriteTrackedReplicated;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte = Volatile.Read(ref pageRef);
ulong mappedMask = mask & BlockMappedMask;
ulong mappedPte = pte | (pte >> 1);
if ((mappedPte & mappedMask) != mappedMask)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
pte &= mask;
if ((pte & anyTrackingTag) != 0) // Search for any tracking.
{
// Writes trigger any tracking.
// Only trigger tracking from reads if both bits are set on any page.
if (write || (pte & (pte >> 1) & BlockMappedMask) != 0)
{
Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
break;
}
}
mask = ulong.MaxValue;
}
}
_pages.SignalMemoryTracking(Tracking, va, size, write, exemptId);
}
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, ulong size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
/// <inheritdoc/>
public void Reprotect(ulong va, ulong size, MemoryPermission protection)
{
if (protection.HasFlag(MemoryPermission.Execute))
{
// Some applications use unordered exclusive memory access instructions
// where it is not valid to do so, leading to memory re-ordering that
// makes the code behave incorrectly on some CPUs.
// To work around this, we force all such accesses to be ordered.
using WritableRegion writableRegion = GetWritableRegion(va, (int)size);
HvCodePatcher.RewriteUnorderedExclusiveInstructions(writableRegion.Memory.Span);
}
// TODO
}
/// <inheritdoc/>
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection, bool guest)
{
// Protection is inverted on software pages, since the default value is 0.
protection = (~protection) & MemoryPermission.ReadAndWrite;
int pages = GetPagesCount(va, size, out va);
ulong pageStart = va >> PageBits;
if (pages == 1)
if (guest)
{
ulong protTag = protection switch
{
MemoryPermission.None => (ulong)HostMappedPtBits.Mapped,
MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTracked,
_ => (ulong)HostMappedPtBits.ReadWriteTracked,
};
int bit = (int)((pageStart & 31) << 1);
ulong tagMask = 3UL << bit;
ulong invTagMask = ~tagMask;
ulong tag = protTag << bit;
int pageIndex = (int)(pageStart >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while ((pte & tagMask) != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
_addressSpace.ReprotectUser(va, size, protection);
}
else
{
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
ulong protTag = protection switch
{
MemoryPermission.None => (ulong)HostMappedPtBits.MappedReplicated,
MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTrackedReplicated,
_ => (ulong)HostMappedPtBits.ReadWriteTrackedReplicated,
};
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
ulong mappedMask;
// Change the protection of all 2 bit entries that are mapped.
do
{
pte = Volatile.Read(ref pageRef);
mappedMask = pte | (pte >> 1);
mappedMask |= (mappedMask & BlockMappedMask) << 1;
mappedMask &= mask; // Only update mapped pages within the given range.
}
while (Interlocked.CompareExchange(ref pageRef, (pte & (~mappedMask)) | (protTag & mappedMask), pte) != pte);
mask = ulong.MaxValue;
}
_pages.TrackingReprotect(va, size, protection);
}
protection = protection switch
{
MemoryPermission.None => MemoryPermission.ReadAndWrite,
MemoryPermission.Write => MemoryPermission.Read,
_ => MemoryPermission.None,
};
_addressSpace.ReprotectUser(va, size, protection);
}
/// <inheritdoc/>
public RegionHandle BeginTracking(ulong address, ulong size, int id)
public RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None)
{
return Tracking.BeginTracking(address, size, id);
return Tracking.BeginTracking(address, size, id, flags);
}
/// <inheritdoc/>
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id)
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id, RegionFlags flags = RegionFlags.None)
{
return Tracking.BeginGranularTracking(address, size, handles, granularity, id);
return Tracking.BeginGranularTracking(address, size, handles, granularity, id, flags);
}
/// <inheritdoc/>
@ -846,87 +354,7 @@ namespace Ryujinx.Cpu.AppleHv
return Tracking.BeginSmartGranularTracking(address, size, granularity, id);
}
/// <summary>
/// Adds the given address mapping to the page table.
/// </summary>
/// <param name="va">Virtual memory address</param>
/// <param name="size">Size to be mapped</param>
private void AddMapping(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
ulong mappedMask;
// Map all 2-bit entries that are unmapped.
do
{
pte = Volatile.Read(ref pageRef);
mappedMask = pte | (pte >> 1);
mappedMask |= (mappedMask & BlockMappedMask) << 1;
mappedMask |= ~mask; // Treat everything outside the range as mapped, thus unchanged.
}
while (Interlocked.CompareExchange(ref pageRef, (pte & mappedMask) | (BlockMappedMask & (~mappedMask)), pte) != pte);
mask = ulong.MaxValue;
}
}
/// <summary>
/// Removes the given address mapping from the page table.
/// </summary>
/// <param name="va">Virtual memory address</param>
/// <param name="size">Size to be unmapped</param>
private void RemoveMapping(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
startMask = ~startMask;
endMask = ~endMask;
ulong mask = startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask |= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while (Interlocked.CompareExchange(ref pageRef, pte & mask, pte) != pte);
mask = 0;
}
}
private ulong GetPhysicalAddressChecked(ulong va)
private nuint GetPhysicalAddressChecked(ulong va)
{
if (!IsMapped(va))
{
@ -936,9 +364,9 @@ namespace Ryujinx.Cpu.AppleHv
return GetPhysicalAddressInternal(va);
}
private ulong GetPhysicalAddressInternal(ulong va)
private nuint GetPhysicalAddressInternal(ulong va)
{
return _pageTable.Read(va) + (va & PageMask);
return (nuint)(_pageTable.Read(va) + (va & PageMask));
}
/// <summary>
@ -949,6 +377,17 @@ namespace Ryujinx.Cpu.AppleHv
_addressSpace.Dispose();
}
private static void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message);
protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> _backingMemory.GetMemory(pa, size);
protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> _backingMemory.GetSpan(pa, size);
protected override nuint TranslateVirtualAddressChecked(ulong va)
=> GetPhysicalAddressChecked(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> GetPhysicalAddressInternal(va);
}
}

View file

@ -28,8 +28,9 @@ namespace Ryujinx.Cpu
/// <param name="address">CPU virtual address of the region</param>
/// <param name="size">Size of the region</param>
/// <param name="id">Handle ID</param>
/// <param name="flags">Region flags</param>
/// <returns>The memory tracking handle</returns>
RegionHandle BeginTracking(ulong address, ulong size, int id);
RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None);
/// <summary>
/// Obtains a memory tracking handle for the given virtual region, with a specified granularity. This should be disposed when finished with.
@ -39,8 +40,9 @@ namespace Ryujinx.Cpu
/// <param name="handles">Handles to inherit state from or reuse. When none are present, provide null</param>
/// <param name="granularity">Desired granularity of write tracking</param>
/// <param name="id">Handle ID</param>
/// <param name="flags">Region flags</param>
/// <returns>The memory tracking handle</returns>
MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id);
MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id, RegionFlags flags = RegionFlags.None);
/// <summary>
/// Obtains a smart memory tracking handle for the given virtual region, with a specified granularity. This should be disposed when finished with.

View file

@ -133,28 +133,34 @@ namespace Ryujinx.Cpu.Jit
_mappingTree.Remove(_mappingTree.GetNode(offset));
}
private bool VirtualMemoryEvent(ulong address, ulong size, bool write)
private ulong VirtualMemoryEvent(ulong address, ulong size, bool write)
{
Mapping map;
lock (_lock)
{
map = _mappingTree.GetNode(address);
}
if (map == null)
{
return false;
return 0; // Return 0 instead of false
}
address -= map.Address;
if (address >= (map.EndVa - map.Va))
{
address -= (ulong)(map.BridgeSize / 2);
}
return _tracking.VirtualMemoryEvent(map.Va + address, size, write);
// The original was returning a bool, now we need to return a ulong
if (_tracking.VirtualMemoryEvent(map.Va + address, size, write))
{
return map.Va + address;
}
return 0; // Return 0 on failure
}
public override void Destroy()

View file

@ -0,0 +1,35 @@
using Ryujinx.Common.Collections;
using System;
namespace Ryujinx.Cpu.Jit.HostTracked
{
internal class AddressIntrusiveRedBlackTree<T> : IntrusiveRedBlackTree<T> where T : IntrusiveRedBlackTreeNode<T>, IComparable<T>, IComparable<ulong>
{
/// <summary>
/// Retrieve the node that is considered equal to the specified address by the comparator.
/// </summary>
/// <param name="address">Address to compare with</param>
/// <returns>Node that is equal to <paramref name="address"/></returns>
public T GetNode(ulong address)
{
T node = Root;
while (node != null)
{
int cmp = node.CompareTo(address);
if (cmp < 0)
{
node = node.Left;
}
else if (cmp > 0)
{
node = node.Right;
}
else
{
return node;
}
}
return null;
}
}
}

View file

@ -0,0 +1,708 @@
using Ryujinx.Common;
using Ryujinx.Common.Collections;
using Ryujinx.Memory;
using System;
using System.Diagnostics;
using System.Threading;
namespace Ryujinx.Cpu.Jit.HostTracked
{
readonly struct PrivateRange
{
public readonly MemoryBlock Memory;
public readonly ulong Offset;
public readonly ulong Size;
public static PrivateRange Empty => new(null, 0, 0);
public PrivateRange(MemoryBlock memory, ulong offset, ulong size)
{
Memory = memory;
Offset = offset;
Size = size;
}
}
class AddressSpacePartition : IDisposable
{
public const ulong GuestPageSize = 0x1000;
private const int DefaultBlockAlignment = 1 << 20;
private enum MappingType : byte
{
None,
Private,
}
private class Mapping : IntrusiveRedBlackTreeNode<Mapping>, IComparable<Mapping>, IComparable<ulong>
{
public ulong Address { get; private set; }
public ulong Size { get; private set; }
public ulong EndAddress => Address + Size;
public MappingType Type { get; private set; }
public Mapping(ulong address, ulong size, MappingType type)
{
Address = address;
Size = size;
Type = type;
}
public Mapping Split(ulong splitAddress)
{
ulong leftSize = splitAddress - Address;
ulong rightSize = EndAddress - splitAddress;
Mapping left = new(Address, leftSize, Type);
Address = splitAddress;
Size = rightSize;
return left;
}
public void UpdateState(MappingType newType)
{
Type = newType;
}
public void Extend(ulong sizeDelta)
{
Size += sizeDelta;
}
public int CompareTo(Mapping other)
{
if (Address < other.Address)
{
return -1;
}
else if (Address <= other.EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
public int CompareTo(ulong address)
{
if (address < Address)
{
return -1;
}
else if (address <= EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
}
private class PrivateMapping : IntrusiveRedBlackTreeNode<PrivateMapping>, IComparable<PrivateMapping>, IComparable<ulong>
{
public ulong Address { get; private set; }
public ulong Size { get; private set; }
public ulong EndAddress => Address + Size;
public PrivateMemoryAllocation PrivateAllocation { get; private set; }
public PrivateMapping(ulong address, ulong size, PrivateMemoryAllocation privateAllocation)
{
Address = address;
Size = size;
PrivateAllocation = privateAllocation;
}
public PrivateMapping Split(ulong splitAddress)
{
ulong leftSize = splitAddress - Address;
ulong rightSize = EndAddress - splitAddress;
Debug.Assert(leftSize > 0);
Debug.Assert(rightSize > 0);
(var leftAllocation, PrivateAllocation) = PrivateAllocation.Split(leftSize);
PrivateMapping left = new(Address, leftSize, leftAllocation);
Address = splitAddress;
Size = rightSize;
return left;
}
public void Map(AddressSpacePartitionMultiAllocation baseBlock, ulong baseAddress, PrivateMemoryAllocation newAllocation)
{
baseBlock.MapView(newAllocation.Memory, newAllocation.Offset, Address - baseAddress, Size);
PrivateAllocation = newAllocation;
}
public void Unmap(AddressSpacePartitionMultiAllocation baseBlock, ulong baseAddress)
{
if (PrivateAllocation.IsValid)
{
baseBlock.UnmapView(PrivateAllocation.Memory, Address - baseAddress, Size);
PrivateAllocation.Dispose();
}
PrivateAllocation = default;
}
public void Extend(ulong sizeDelta)
{
Size += sizeDelta;
}
public int CompareTo(PrivateMapping other)
{
if (Address < other.Address)
{
return -1;
}
else if (Address <= other.EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
public int CompareTo(ulong address)
{
if (address < Address)
{
return -1;
}
else if (address <= EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
}
private readonly MemoryBlock _backingMemory;
private readonly AddressSpacePartitionMultiAllocation _baseMemory;
private readonly PrivateMemoryAllocator _privateMemoryAllocator;
private readonly AddressIntrusiveRedBlackTree<Mapping> _mappingTree;
private readonly AddressIntrusiveRedBlackTree<PrivateMapping> _privateTree;
private readonly ReaderWriterLockSlim _treeLock;
private readonly ulong _hostPageSize;
private ulong? _firstPagePa;
private ulong? _lastPagePa;
private ulong _cachedFirstPagePa;
private ulong _cachedLastPagePa;
private MemoryBlock _firstPageMemoryForUnmap;
private ulong _firstPageOffsetForLateMap;
private MemoryPermission _firstPageMemoryProtection;
public ulong Address { get; }
public ulong Size { get; }
public ulong EndAddress => Address + Size;
public AddressSpacePartition(AddressSpacePartitionAllocation baseMemory, MemoryBlock backingMemory, ulong address, ulong size)
{
_privateMemoryAllocator = new PrivateMemoryAllocator(DefaultBlockAlignment, MemoryAllocationFlags.Mirrorable);
_mappingTree = new AddressIntrusiveRedBlackTree<Mapping>();
_privateTree = new AddressIntrusiveRedBlackTree<PrivateMapping>();
_treeLock = new ReaderWriterLockSlim();
_mappingTree.Add(new Mapping(address, size, MappingType.None));
_privateTree.Add(new PrivateMapping(address, size, default));
_hostPageSize = MemoryBlock.GetPageSize();
_backingMemory = backingMemory;
_baseMemory = new(baseMemory);
_cachedFirstPagePa = ulong.MaxValue;
_cachedLastPagePa = ulong.MaxValue;
Address = address;
Size = size;
}
public bool IsEmpty()
{
_treeLock.EnterReadLock();
try
{
Mapping map = _mappingTree.GetNode(Address);
return map != null && map.Address == Address && map.Size == Size && map.Type == MappingType.None;
}
finally
{
_treeLock.ExitReadLock();
}
}
public void Map(ulong va, ulong pa, ulong size)
{
Debug.Assert(va >= Address);
Debug.Assert(va + size <= EndAddress);
if (va == Address)
{
_firstPagePa = pa;
}
if (va <= EndAddress - GuestPageSize && va + size > EndAddress - GuestPageSize)
{
_lastPagePa = pa + ((EndAddress - GuestPageSize) - va);
}
Update(va, pa, size, MappingType.Private);
}
public void Unmap(ulong va, ulong size)
{
Debug.Assert(va >= Address);
Debug.Assert(va + size <= EndAddress);
if (va == Address)
{
_firstPagePa = null;
}
if (va <= EndAddress - GuestPageSize && va + size > EndAddress - GuestPageSize)
{
_lastPagePa = null;
}
Update(va, 0UL, size, MappingType.None);
}
public void ReprotectAligned(ulong va, ulong size, MemoryPermission protection)
{
Debug.Assert(va >= Address);
Debug.Assert(va + size <= EndAddress);
_baseMemory.Reprotect(va - Address, size, protection, false);
if (va == Address)
{
_firstPageMemoryProtection = protection;
}
}
public void Reprotect(
ulong va,
ulong size,
MemoryPermission protection,
AddressSpacePartitioned addressSpace,
Action<ulong, IntPtr, ulong> updatePtCallback)
{
if (_baseMemory.LazyInitMirrorForProtection(addressSpace, Address, Size, protection))
{
LateMap();
}
updatePtCallback(va, _baseMemory.GetPointerForProtection(va - Address, size, protection), size);
}
public IntPtr GetPointer(ulong va, ulong size)
{
Debug.Assert(va >= Address);
Debug.Assert(va + size <= EndAddress);
return _baseMemory.GetPointer(va - Address, size);
}
public void InsertBridgeAtEnd(AddressSpacePartition partitionAfter, bool useProtectionMirrors)
{
ulong firstPagePa = partitionAfter?._firstPagePa ?? ulong.MaxValue;
ulong lastPagePa = _lastPagePa ?? ulong.MaxValue;
if (firstPagePa != _cachedFirstPagePa || lastPagePa != _cachedLastPagePa)
{
if (partitionAfter != null && partitionAfter._firstPagePa.HasValue)
{
(MemoryBlock firstPageMemory, ulong firstPageOffset) = partitionAfter.GetFirstPageMemoryAndOffset();
_baseMemory.MapView(firstPageMemory, firstPageOffset, Size, _hostPageSize);
if (!useProtectionMirrors)
{
_baseMemory.Reprotect(Size, _hostPageSize, partitionAfter._firstPageMemoryProtection, throwOnFail: false);
}
_firstPageMemoryForUnmap = firstPageMemory;
_firstPageOffsetForLateMap = firstPageOffset;
}
else
{
MemoryBlock firstPageMemoryForUnmap = _firstPageMemoryForUnmap;
if (firstPageMemoryForUnmap != null)
{
_baseMemory.UnmapView(firstPageMemoryForUnmap, Size, _hostPageSize);
_firstPageMemoryForUnmap = null;
}
}
_cachedFirstPagePa = firstPagePa;
_cachedLastPagePa = lastPagePa;
}
}
public void ReprotectBridge(MemoryPermission protection)
{
if (_firstPageMemoryForUnmap != null)
{
_baseMemory.Reprotect(Size, _hostPageSize, protection, throwOnFail: false);
}
}
private (MemoryBlock, ulong) GetFirstPageMemoryAndOffset()
{
_treeLock.EnterReadLock();
try
{
PrivateMapping map = _privateTree.GetNode(Address);
if (map != null && map.PrivateAllocation.IsValid)
{
return (map.PrivateAllocation.Memory, map.PrivateAllocation.Offset + (Address - map.Address));
}
}
finally
{
_treeLock.ExitReadLock();
}
return (_backingMemory, _firstPagePa.Value);
}
public PrivateRange GetPrivateAllocation(ulong va)
{
_treeLock.EnterReadLock();
try
{
PrivateMapping map = _privateTree.GetNode(va);
if (map != null && map.PrivateAllocation.IsValid)
{
return new(map.PrivateAllocation.Memory, map.PrivateAllocation.Offset + (va - map.Address), map.Size - (va - map.Address));
}
}
finally
{
_treeLock.ExitReadLock();
}
return PrivateRange.Empty;
}
private void Update(ulong va, ulong pa, ulong size, MappingType type)
{
_treeLock.EnterWriteLock();
try
{
Mapping map = _mappingTree.GetNode(va);
Update(map, va, pa, size, type);
}
finally
{
_treeLock.ExitWriteLock();
}
}
private Mapping Update(Mapping map, ulong va, ulong pa, ulong size, MappingType type)
{
ulong endAddress = va + size;
for (; map != null; map = map.Successor)
{
if (map.Address < va)
{
_mappingTree.Add(map.Split(va));
}
if (map.EndAddress > endAddress)
{
Mapping newMap = map.Split(endAddress);
_mappingTree.Add(newMap);
map = newMap;
}
switch (type)
{
case MappingType.None:
ulong alignment = _hostPageSize;
bool unmappedBefore = map.Predecessor == null ||
(map.Predecessor.Type == MappingType.None && map.Predecessor.Address <= BitUtils.AlignDown(va, alignment));
bool unmappedAfter = map.Successor == null ||
(map.Successor.Type == MappingType.None && map.Successor.EndAddress >= BitUtils.AlignUp(endAddress, alignment));
UnmapPrivate(va, size, unmappedBefore, unmappedAfter);
break;
case MappingType.Private:
MapPrivate(va, size);
break;
}
map.UpdateState(type);
map = TryCoalesce(map);
if (map.EndAddress >= endAddress)
{
break;
}
}
return map;
}
private Mapping TryCoalesce(Mapping map)
{
Mapping previousMap = map.Predecessor;
Mapping nextMap = map.Successor;
if (previousMap != null && CanCoalesce(previousMap, map))
{
previousMap.Extend(map.Size);
_mappingTree.Remove(map);
map = previousMap;
}
if (nextMap != null && CanCoalesce(map, nextMap))
{
map.Extend(nextMap.Size);
_mappingTree.Remove(nextMap);
}
return map;
}
private static bool CanCoalesce(Mapping left, Mapping right)
{
return left.Type == right.Type;
}
private void MapPrivate(ulong va, ulong size)
{
ulong endAddress = va + size;
ulong alignment = _hostPageSize;
// Expand the range outwards based on page size to ensure that at least the requested region is mapped.
ulong vaAligned = BitUtils.AlignDown(va, alignment);
ulong endAddressAligned = BitUtils.AlignUp(endAddress, alignment);
PrivateMapping map = _privateTree.GetNode(va);
for (; map != null; map = map.Successor)
{
if (!map.PrivateAllocation.IsValid)
{
if (map.Address < vaAligned)
{
_privateTree.Add(map.Split(vaAligned));
}
if (map.EndAddress > endAddressAligned)
{
PrivateMapping newMap = map.Split(endAddressAligned);
_privateTree.Add(newMap);
map = newMap;
}
map.Map(_baseMemory, Address, _privateMemoryAllocator.Allocate(map.Size, _hostPageSize));
}
if (map.EndAddress >= endAddressAligned)
{
break;
}
}
}
private void UnmapPrivate(ulong va, ulong size, bool unmappedBefore, bool unmappedAfter)
{
ulong endAddress = va + size;
ulong alignment = _hostPageSize;
// If the adjacent mappings are unmapped, expand the range outwards,
// otherwise shrink it inwards. We must ensure we won't unmap pages that might still be in use.
ulong vaAligned = unmappedBefore ? BitUtils.AlignDown(va, alignment) : BitUtils.AlignUp(va, alignment);
ulong endAddressAligned = unmappedAfter ? BitUtils.AlignUp(endAddress, alignment) : BitUtils.AlignDown(endAddress, alignment);
if (endAddressAligned <= vaAligned)
{
return;
}
PrivateMapping map = _privateTree.GetNode(vaAligned);
for (; map != null; map = map.Successor)
{
if (map.PrivateAllocation.IsValid)
{
if (map.Address < vaAligned)
{
_privateTree.Add(map.Split(vaAligned));
}
if (map.EndAddress > endAddressAligned)
{
PrivateMapping newMap = map.Split(endAddressAligned);
_privateTree.Add(newMap);
map = newMap;
}
map.Unmap(_baseMemory, Address);
map = TryCoalesce(map);
}
if (map.EndAddress >= endAddressAligned)
{
break;
}
}
}
private PrivateMapping TryCoalesce(PrivateMapping map)
{
PrivateMapping previousMap = map.Predecessor;
PrivateMapping nextMap = map.Successor;
if (previousMap != null && CanCoalesce(previousMap, map))
{
previousMap.Extend(map.Size);
_privateTree.Remove(map);
map = previousMap;
}
if (nextMap != null && CanCoalesce(map, nextMap))
{
map.Extend(nextMap.Size);
_privateTree.Remove(nextMap);
}
return map;
}
private static bool CanCoalesce(PrivateMapping left, PrivateMapping right)
{
return !left.PrivateAllocation.IsValid && !right.PrivateAllocation.IsValid;
}
private void LateMap()
{
// Map all existing private allocations.
// This is necessary to ensure mirrors that are lazily created have the same mappings as the main one.
PrivateMapping map = _privateTree.GetNode(Address);
for (; map != null; map = map.Successor)
{
if (map.PrivateAllocation.IsValid)
{
_baseMemory.LateMapView(map.PrivateAllocation.Memory, map.PrivateAllocation.Offset, map.Address - Address, map.Size);
}
}
MemoryBlock firstPageMemory = _firstPageMemoryForUnmap;
ulong firstPageOffset = _firstPageOffsetForLateMap;
if (firstPageMemory != null)
{
_baseMemory.LateMapView(firstPageMemory, firstPageOffset, Size, _hostPageSize);
}
}
public PrivateRange GetFirstPrivateAllocation(ulong va, ulong size, out ulong nextVa)
{
_treeLock.EnterReadLock();
try
{
PrivateMapping map = _privateTree.GetNode(va);
nextVa = map.EndAddress;
if (map != null && map.PrivateAllocation.IsValid)
{
ulong startOffset = va - map.Address;
return new(
map.PrivateAllocation.Memory,
map.PrivateAllocation.Offset + startOffset,
Math.Min(map.PrivateAllocation.Size - startOffset, size));
}
}
finally
{
_treeLock.ExitReadLock();
}
return PrivateRange.Empty;
}
public bool HasPrivateAllocation(ulong va, ulong size, ulong startVa, ulong startSize, ref PrivateRange range)
{
ulong endVa = va + size;
_treeLock.EnterReadLock();
try
{
for (PrivateMapping map = _privateTree.GetNode(va); map != null && map.Address < endVa; map = map.Successor)
{
if (map.PrivateAllocation.IsValid)
{
if (map.Address <= startVa && map.EndAddress >= startVa + startSize)
{
ulong startOffset = startVa - map.Address;
range = new(
map.PrivateAllocation.Memory,
map.PrivateAllocation.Offset + startOffset,
Math.Min(map.PrivateAllocation.Size - startOffset, startSize));
}
return true;
}
}
}
finally
{
_treeLock.ExitReadLock();
}
return false;
}
public void Dispose()
{
GC.SuppressFinalize(this);
_privateMemoryAllocator.Dispose();
_baseMemory.Dispose();
}
}
}

View file

@ -0,0 +1,202 @@
using Ryujinx.Common;
using Ryujinx.Common.Collections;
using Ryujinx.Memory;
using Ryujinx.Memory.Tracking;
using System;
namespace Ryujinx.Cpu.Jit.HostTracked
{
readonly struct AddressSpacePartitionAllocation : IDisposable
{
private readonly AddressSpacePartitionAllocator _owner;
private readonly PrivateMemoryAllocatorImpl<AddressSpacePartitionAllocator.Block>.Allocation _allocation;
public IntPtr Pointer => (IntPtr)((ulong)_allocation.Block.Memory.Pointer + _allocation.Offset);
public bool IsValid => _owner != null;
public AddressSpacePartitionAllocation(
AddressSpacePartitionAllocator owner,
PrivateMemoryAllocatorImpl<AddressSpacePartitionAllocator.Block>.Allocation allocation)
{
_owner = owner;
_allocation = allocation;
}
public void RegisterMapping(ulong va, ulong endVa)
{
_allocation.Block.AddMapping(_allocation.Offset, _allocation.Size, va, endVa);
}
public void MapView(MemoryBlock srcBlock, ulong srcOffset, ulong dstOffset, ulong size)
{
_allocation.Block.Memory.MapView(srcBlock, srcOffset, _allocation.Offset + dstOffset, size);
}
public void UnmapView(MemoryBlock srcBlock, ulong offset, ulong size)
{
_allocation.Block.Memory.UnmapView(srcBlock, _allocation.Offset + offset, size);
}
public void Reprotect(ulong offset, ulong size, MemoryPermission permission, bool throwOnFail)
{
_allocation.Block.Memory.Reprotect(_allocation.Offset + offset, size, permission, throwOnFail);
}
public IntPtr GetPointer(ulong offset, ulong size)
{
return _allocation.Block.Memory.GetPointer(_allocation.Offset + offset, size);
}
public void Dispose()
{
_allocation.Block.RemoveMapping(_allocation.Offset, _allocation.Size);
_owner.Free(_allocation.Block, _allocation.Offset, _allocation.Size);
}
}
class AddressSpacePartitionAllocator : PrivateMemoryAllocatorImpl<AddressSpacePartitionAllocator.Block>
{
private const ulong DefaultBlockAlignment = 1UL << 32; // 4GB
public class Block : PrivateMemoryAllocator.Block
{
private readonly MemoryTracking _tracking;
private readonly Func<ulong, ulong> _readPtCallback;
private readonly MemoryEhMeilleure _memoryEh;
private class Mapping : IntrusiveRedBlackTreeNode<Mapping>, IComparable<Mapping>, IComparable<ulong>
{
public ulong Address { get; }
public ulong Size { get; }
public ulong EndAddress => Address + Size;
public ulong Va { get; }
public ulong EndVa { get; }
public Mapping(ulong address, ulong size, ulong va, ulong endVa)
{
Address = address;
Size = size;
Va = va;
EndVa = endVa;
}
public int CompareTo(Mapping other)
{
if (Address < other.Address)
{
return -1;
}
else if (Address <= other.EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
public int CompareTo(ulong address)
{
if (address < Address)
{
return -1;
}
else if (address <= EndAddress - 1UL)
{
return 0;
}
else
{
return 1;
}
}
}
private readonly AddressIntrusiveRedBlackTree<Mapping> _mappingTree;
private readonly object _lock;
public Block(MemoryTracking tracking, Func<ulong, ulong> readPtCallback, MemoryBlock memory, ulong size, object locker) : base(memory, size)
{
_tracking = tracking;
_readPtCallback = readPtCallback;
_memoryEh = new(memory, null, tracking, VirtualMemoryEvent);
_mappingTree = new();
_lock = locker;
}
public void AddMapping(ulong offset, ulong size, ulong va, ulong endVa)
{
_mappingTree.Add(new(offset, size, va, endVa));
}
public void RemoveMapping(ulong offset, ulong size)
{
_mappingTree.Remove(_mappingTree.GetNode(offset));
}
private ulong VirtualMemoryEvent(ulong address, ulong size, bool write)
{
Mapping map;
lock (_lock)
{
map = _mappingTree.GetNode(address);
}
if (map == null)
{
return 0;
}
address -= map.Address;
ulong addressAligned = BitUtils.AlignDown(address, AddressSpacePartition.GuestPageSize);
ulong endAddressAligned = BitUtils.AlignUp(address + size, AddressSpacePartition.GuestPageSize);
ulong sizeAligned = endAddressAligned - addressAligned;
if (!_tracking.VirtualMemoryEvent(map.Va + addressAligned, sizeAligned, write))
{
return 0;
}
return _readPtCallback(map.Va + address);
}
public override void Destroy()
{
_memoryEh.Dispose();
base.Destroy();
}
}
private readonly MemoryTracking _tracking;
private readonly Func<ulong, ulong> _readPtCallback;
private readonly object _lock;
public AddressSpacePartitionAllocator(
MemoryTracking tracking,
Func<ulong, ulong> readPtCallback,
object locker) : base(DefaultBlockAlignment, MemoryAllocationFlags.Reserve | MemoryAllocationFlags.ViewCompatible)
{
_tracking = tracking;
_readPtCallback = readPtCallback;
_lock = locker;
}
public AddressSpacePartitionAllocation Allocate(ulong va, ulong size)
{
AddressSpacePartitionAllocation allocation = new(this, Allocate(size, MemoryBlock.GetPageSize(), CreateBlock));
allocation.RegisterMapping(va, va + size);
return allocation;
}
private Block CreateBlock(MemoryBlock memory, ulong size)
{
return new Block(_tracking, _readPtCallback, memory, size, _lock);
}
}
}

View file

@ -0,0 +1,101 @@
using Ryujinx.Memory;
using System;
using System.Diagnostics;
namespace Ryujinx.Cpu.Jit.HostTracked
{
class AddressSpacePartitionMultiAllocation : IDisposable
{
private readonly AddressSpacePartitionAllocation _baseMemory;
private AddressSpacePartitionAllocation _baseMemoryRo;
private AddressSpacePartitionAllocation _baseMemoryNone;
public AddressSpacePartitionMultiAllocation(AddressSpacePartitionAllocation baseMemory)
{
_baseMemory = baseMemory;
}
public void MapView(MemoryBlock srcBlock, ulong srcOffset, ulong dstOffset, ulong size)
{
_baseMemory.MapView(srcBlock, srcOffset, dstOffset, size);
if (_baseMemoryRo.IsValid)
{
_baseMemoryRo.MapView(srcBlock, srcOffset, dstOffset, size);
_baseMemoryRo.Reprotect(dstOffset, size, MemoryPermission.Read, false);
}
}
public void LateMapView(MemoryBlock srcBlock, ulong srcOffset, ulong dstOffset, ulong size)
{
_baseMemoryRo.MapView(srcBlock, srcOffset, dstOffset, size);
_baseMemoryRo.Reprotect(dstOffset, size, MemoryPermission.Read, false);
}
public void UnmapView(MemoryBlock srcBlock, ulong offset, ulong size)
{
_baseMemory.UnmapView(srcBlock, offset, size);
if (_baseMemoryRo.IsValid)
{
_baseMemoryRo.UnmapView(srcBlock, offset, size);
}
}
public void Reprotect(ulong offset, ulong size, MemoryPermission permission, bool throwOnFail)
{
_baseMemory.Reprotect(offset, size, permission, throwOnFail);
}
public IntPtr GetPointer(ulong offset, ulong size)
{
return _baseMemory.GetPointer(offset, size);
}
public bool LazyInitMirrorForProtection(AddressSpacePartitioned addressSpace, ulong blockAddress, ulong blockSize, MemoryPermission permission)
{
if (permission == MemoryPermission.None && !_baseMemoryNone.IsValid)
{
_baseMemoryNone = addressSpace.CreateAsPartitionAllocation(blockAddress, blockSize);
}
else if (permission == MemoryPermission.Read && !_baseMemoryRo.IsValid)
{
_baseMemoryRo = addressSpace.CreateAsPartitionAllocation(blockAddress, blockSize);
return true;
}
return false;
}
public IntPtr GetPointerForProtection(ulong offset, ulong size, MemoryPermission permission)
{
AddressSpacePartitionAllocation allocation = permission switch
{
MemoryPermission.ReadAndWrite => _baseMemory,
MemoryPermission.Read => _baseMemoryRo,
MemoryPermission.None => _baseMemoryNone,
_ => throw new ArgumentException($"Invalid protection \"{permission}\"."),
};
Debug.Assert(allocation.IsValid);
return allocation.GetPointer(offset, size);
}
public void Dispose()
{
_baseMemory.Dispose();
if (_baseMemoryRo.IsValid)
{
_baseMemoryRo.Dispose();
}
if (_baseMemoryNone.IsValid)
{
_baseMemoryNone.Dispose();
}
}
}
}

View file

@ -0,0 +1,407 @@
using Ryujinx.Common;
using Ryujinx.Memory;
using Ryujinx.Memory.Tracking;
using System;
using System.Collections.Generic;
using System.Diagnostics;
namespace Ryujinx.Cpu.Jit.HostTracked
{
class AddressSpacePartitioned : IDisposable
{
private const int PartitionBits = 25;
private const ulong PartitionSize = 1UL << PartitionBits;
private readonly MemoryBlock _backingMemory;
private readonly List<AddressSpacePartition> _partitions;
private readonly AddressSpacePartitionAllocator _asAllocator;
private readonly Action<ulong, IntPtr, ulong> _updatePtCallback;
private readonly bool _useProtectionMirrors;
public AddressSpacePartitioned(MemoryTracking tracking, MemoryBlock backingMemory, NativePageTable nativePageTable, bool useProtectionMirrors)
{
_backingMemory = backingMemory;
_partitions = new();
_asAllocator = new(tracking, nativePageTable.Read, _partitions);
_updatePtCallback = nativePageTable.Update;
_useProtectionMirrors = useProtectionMirrors;
}
public void Map(ulong va, ulong pa, ulong size)
{
ulong endVa = va + size;
lock (_partitions)
{
EnsurePartitionsLocked(va, size);
while (va < endVa)
{
int partitionIndex = FindPartitionIndexLocked(va);
AddressSpacePartition partition = _partitions[partitionIndex];
(ulong clampedVa, ulong clampedEndVa) = ClampRange(partition, va, endVa);
partition.Map(clampedVa, pa, clampedEndVa - clampedVa);
ulong currentSize = clampedEndVa - clampedVa;
va += currentSize;
pa += currentSize;
InsertOrRemoveBridgeIfNeeded(partitionIndex);
}
}
}
public void Unmap(ulong va, ulong size)
{
ulong endVa = va + size;
while (va < endVa)
{
AddressSpacePartition partition;
lock (_partitions)
{
int partitionIndex = FindPartitionIndexLocked(va);
if (partitionIndex < 0)
{
va += PartitionSize - (va & (PartitionSize - 1));
continue;
}
partition = _partitions[partitionIndex];
(ulong clampedVa, ulong clampedEndVa) = ClampRange(partition, va, endVa);
partition.Unmap(clampedVa, clampedEndVa - clampedVa);
va += clampedEndVa - clampedVa;
InsertOrRemoveBridgeIfNeeded(partitionIndex);
if (partition.IsEmpty())
{
_partitions.Remove(partition);
partition.Dispose();
}
}
}
}
public void Reprotect(ulong va, ulong size, MemoryPermission protection)
{
ulong endVa = va + size;
lock (_partitions)
{
while (va < endVa)
{
AddressSpacePartition partition = FindPartitionWithIndex(va, out int partitionIndex);
if (partition == null)
{
va += PartitionSize - (va & (PartitionSize - 1));
continue;
}
(ulong clampedVa, ulong clampedEndVa) = ClampRange(partition, va, endVa);
if (_useProtectionMirrors)
{
partition.Reprotect(clampedVa, clampedEndVa - clampedVa, protection, this, _updatePtCallback);
}
else
{
partition.ReprotectAligned(clampedVa, clampedEndVa - clampedVa, protection);
if (clampedVa == partition.Address &&
partitionIndex > 0 &&
_partitions[partitionIndex - 1].EndAddress == partition.Address)
{
_partitions[partitionIndex - 1].ReprotectBridge(protection);
}
}
va += clampedEndVa - clampedVa;
}
}
}
public PrivateRange GetPrivateAllocation(ulong va)
{
AddressSpacePartition partition = FindPartition(va);
if (partition == null)
{
return PrivateRange.Empty;
}
return partition.GetPrivateAllocation(va);
}
public PrivateRange GetFirstPrivateAllocation(ulong va, ulong size, out ulong nextVa)
{
AddressSpacePartition partition = FindPartition(va);
if (partition == null)
{
nextVa = (va & ~(PartitionSize - 1)) + PartitionSize;
return PrivateRange.Empty;
}
return partition.GetFirstPrivateAllocation(va, size, out nextVa);
}
public bool HasAnyPrivateAllocation(ulong va, ulong size, out PrivateRange range)
{
range = PrivateRange.Empty;
ulong startVa = va;
ulong endVa = va + size;
while (va < endVa)
{
AddressSpacePartition partition = FindPartition(va);
if (partition == null)
{
va += PartitionSize - (va & (PartitionSize - 1));
continue;
}
(ulong clampedVa, ulong clampedEndVa) = ClampRange(partition, va, endVa);
if (partition.HasPrivateAllocation(clampedVa, clampedEndVa - clampedVa, startVa, size, ref range))
{
return true;
}
va += clampedEndVa - clampedVa;
}
return false;
}
private void InsertOrRemoveBridgeIfNeeded(int partitionIndex)
{
if (partitionIndex > 0)
{
if (_partitions[partitionIndex - 1].EndAddress == _partitions[partitionIndex].Address)
{
_partitions[partitionIndex - 1].InsertBridgeAtEnd(_partitions[partitionIndex], _useProtectionMirrors);
}
else
{
_partitions[partitionIndex - 1].InsertBridgeAtEnd(null, _useProtectionMirrors);
}
}
if (partitionIndex + 1 < _partitions.Count && _partitions[partitionIndex].EndAddress == _partitions[partitionIndex + 1].Address)
{
_partitions[partitionIndex].InsertBridgeAtEnd(_partitions[partitionIndex + 1], _useProtectionMirrors);
}
else
{
_partitions[partitionIndex].InsertBridgeAtEnd(null, _useProtectionMirrors);
}
}
public IntPtr GetPointer(ulong va, ulong size)
{
AddressSpacePartition partition = FindPartition(va);
return partition.GetPointer(va, size);
}
private static (ulong, ulong) ClampRange(AddressSpacePartition partition, ulong va, ulong endVa)
{
if (va < partition.Address)
{
va = partition.Address;
}
if (endVa > partition.EndAddress)
{
endVa = partition.EndAddress;
}
return (va, endVa);
}
private AddressSpacePartition FindPartition(ulong va)
{
lock (_partitions)
{
int index = FindPartitionIndexLocked(va);
if (index >= 0)
{
return _partitions[index];
}
}
return null;
}
private AddressSpacePartition FindPartitionWithIndex(ulong va, out int index)
{
lock (_partitions)
{
index = FindPartitionIndexLocked(va);
if (index >= 0)
{
return _partitions[index];
}
}
return null;
}
private int FindPartitionIndexLocked(ulong va)
{
int left = 0;
int middle;
int right = _partitions.Count - 1;
while (left <= right)
{
middle = left + ((right - left) >> 1);
AddressSpacePartition partition = _partitions[middle];
if (partition.Address <= va && partition.EndAddress > va)
{
return middle;
}
if (partition.Address >= va)
{
right = middle - 1;
}
else
{
left = middle + 1;
}
}
return -1;
}
private void EnsurePartitionsLocked(ulong va, ulong size)
{
ulong endVa = BitUtils.AlignUp(va + size, PartitionSize);
va = BitUtils.AlignDown(va, PartitionSize);
for (int i = 0; i < _partitions.Count && va < endVa; i++)
{
AddressSpacePartition partition = _partitions[i];
if (partition.Address <= va && partition.EndAddress > va)
{
if (partition.EndAddress >= endVa)
{
// Fully mapped already.
va = endVa;
break;
}
ulong gapSize;
if (i + 1 < _partitions.Count)
{
AddressSpacePartition nextPartition = _partitions[i + 1];
if (partition.EndAddress == nextPartition.Address)
{
va = partition.EndAddress;
continue;
}
gapSize = Math.Min(endVa, nextPartition.Address) - partition.EndAddress;
}
else
{
gapSize = endVa - partition.EndAddress;
}
_partitions.Insert(i + 1, CreateAsPartition(partition.EndAddress, gapSize));
va = partition.EndAddress + gapSize;
i++;
}
else if (partition.EndAddress > va)
{
Debug.Assert(partition.Address > va);
ulong gapSize;
if (partition.Address < endVa)
{
gapSize = partition.Address - va;
}
else
{
gapSize = endVa - va;
}
_partitions.Insert(i, CreateAsPartition(va, gapSize));
va = Math.Min(partition.EndAddress, endVa);
i++;
}
}
if (va < endVa)
{
_partitions.Add(CreateAsPartition(va, endVa - va));
}
ValidatePartitionList();
}
[Conditional("DEBUG")]
private void ValidatePartitionList()
{
for (int i = 1; i < _partitions.Count; i++)
{
Debug.Assert(_partitions[i].Address > _partitions[i - 1].Address);
Debug.Assert(_partitions[i].EndAddress > _partitions[i - 1].EndAddress);
}
}
private AddressSpacePartition CreateAsPartition(ulong va, ulong size)
{
return new(CreateAsPartitionAllocation(va, size), _backingMemory, va, size);
}
public AddressSpacePartitionAllocation CreateAsPartitionAllocation(ulong va, ulong size)
{
return _asAllocator.Allocate(va, size + MemoryBlock.GetPageSize());
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
foreach (AddressSpacePartition partition in _partitions)
{
partition.Dispose();
}
_partitions.Clear();
_asAllocator.Dispose();
}
}
public void Dispose()
{
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
}
}

View file

@ -0,0 +1,223 @@
using Ryujinx.Cpu.Signal;
using Ryujinx.Memory;
using System;
using System.Diagnostics;
using System.Numerics;
using System.Runtime.InteropServices;
namespace Ryujinx.Cpu.Jit.HostTracked
{
sealed class NativePageTable : IDisposable
{
private delegate ulong TrackingEventDelegate(ulong address, ulong size, bool write);
private const int PageBits = 12;
private const int PageSize = 1 << PageBits;
private const int PageMask = PageSize - 1;
private const int PteSize = 8;
private readonly int _bitsPerPtPage;
private readonly int _entriesPerPtPage;
private readonly int _pageCommitmentBits;
private readonly PageTable<ulong> _pageTable;
private readonly MemoryBlock _nativePageTable;
private readonly ulong[] _pageCommitmentBitmap;
private readonly ulong _hostPageSize;
private readonly TrackingEventDelegate _trackingEvent;
private bool _disposed;
public IntPtr PageTablePointer => _nativePageTable.Pointer;
public NativePageTable(ulong asSize)
{
ulong hostPageSize = MemoryBlock.GetPageSize();
_entriesPerPtPage = (int)(hostPageSize / sizeof(ulong));
_bitsPerPtPage = BitOperations.Log2((uint)_entriesPerPtPage);
_pageCommitmentBits = PageBits + _bitsPerPtPage;
_hostPageSize = hostPageSize;
_pageTable = new PageTable<ulong>();
_nativePageTable = new MemoryBlock((asSize / PageSize) * PteSize + _hostPageSize, MemoryAllocationFlags.Reserve);
_pageCommitmentBitmap = new ulong[(asSize >> _pageCommitmentBits) / (sizeof(ulong) * 8)];
ulong ptStart = (ulong)_nativePageTable.Pointer;
ulong ptEnd = ptStart + _nativePageTable.Size;
_trackingEvent = VirtualMemoryEvent;
bool added = NativeSignalHandler.AddTrackedRegion((nuint)ptStart, (nuint)ptEnd, Marshal.GetFunctionPointerForDelegate(_trackingEvent));
if (!added)
{
throw new InvalidOperationException("Number of allowed tracked regions exceeded.");
}
}
public void Map(ulong va, ulong pa, ulong size, AddressSpacePartitioned addressSpace, MemoryBlock backingMemory, bool privateMap)
{
while (size != 0)
{
_pageTable.Map(va, pa);
EnsureCommitment(va);
if (privateMap)
{
_nativePageTable.Write((va / PageSize) * PteSize, GetPte(va, addressSpace.GetPointer(va, PageSize)));
}
else
{
_nativePageTable.Write((va / PageSize) * PteSize, GetPte(va, backingMemory.GetPointer(pa, PageSize)));
}
va += PageSize;
pa += PageSize;
size -= PageSize;
}
}
public void Unmap(ulong va, ulong size)
{
IntPtr guardPagePtr = GetGuardPagePointer();
while (size != 0)
{
_pageTable.Unmap(va);
_nativePageTable.Write((va / PageSize) * PteSize, GetPte(va, guardPagePtr));
va += PageSize;
size -= PageSize;
}
}
public ulong Read(ulong va)
{
ulong pte = _nativePageTable.Read<ulong>((va / PageSize) * PteSize);
pte += va & ~(ulong)PageMask;
return pte + (va & PageMask);
}
public void Update(ulong va, IntPtr ptr, ulong size)
{
ulong remainingSize = size;
while (remainingSize != 0)
{
EnsureCommitment(va);
_nativePageTable.Write((va / PageSize) * PteSize, GetPte(va, ptr));
va += PageSize;
ptr += PageSize;
remainingSize -= PageSize;
}
}
private void EnsureCommitment(ulong va)
{
ulong bit = va >> _pageCommitmentBits;
int index = (int)(bit / (sizeof(ulong) * 8));
int shift = (int)(bit % (sizeof(ulong) * 8));
ulong mask = 1UL << shift;
ulong oldMask = _pageCommitmentBitmap[index];
if ((oldMask & mask) == 0)
{
lock (_pageCommitmentBitmap)
{
oldMask = _pageCommitmentBitmap[index];
if ((oldMask & mask) != 0)
{
return;
}
_nativePageTable.Commit(bit * _hostPageSize, _hostPageSize);
Span<ulong> pageSpan = MemoryMarshal.Cast<byte, ulong>(_nativePageTable.GetSpan(bit * _hostPageSize, (int)_hostPageSize));
Debug.Assert(pageSpan.Length == _entriesPerPtPage);
IntPtr guardPagePtr = GetGuardPagePointer();
for (int i = 0; i < pageSpan.Length; i++)
{
pageSpan[i] = GetPte((bit << _pageCommitmentBits) | ((ulong)i * PageSize), guardPagePtr);
}
_pageCommitmentBitmap[index] = oldMask | mask;
}
}
}
private IntPtr GetGuardPagePointer()
{
return _nativePageTable.GetPointer(_nativePageTable.Size - _hostPageSize, _hostPageSize);
}
private static ulong GetPte(ulong va, IntPtr ptr)
{
Debug.Assert((va & PageMask) == 0);
return (ulong)ptr - va;
}
public ulong GetPhysicalAddress(ulong va)
{
return _pageTable.Read(va) + (va & PageMask);
}
private ulong VirtualMemoryEvent(ulong address, ulong size, bool write)
{
if (address < _nativePageTable.Size - _hostPageSize)
{
// Some prefetch instructions do not cause faults with invalid addresses.
// Retry if we are hitting a case where the page table is unmapped, the next
// run will execute the actual instruction.
// The address loaded from the page table will be invalid, and it should hit the else case
// if the instruction faults on unmapped or protected memory.
ulong va = address * (PageSize / sizeof(ulong));
EnsureCommitment(va);
return (ulong)_nativePageTable.Pointer + address;
}
else
{
throw new InvalidMemoryRegionException();
}
}
private void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
NativeSignalHandler.RemoveTrackedRegion((nuint)_nativePageTable.Pointer);
_nativePageTable.Dispose();
}
_disposed = true;
}
}
public void Dispose()
{
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
}
}

View file

@ -1,5 +1,7 @@
using ARMeilleure.Memory;
using ARMeilleure.Translation;
using Ryujinx.Cpu.Signal;
using Ryujinx.Memory;
namespace Ryujinx.Cpu.Jit
{
@ -12,6 +14,12 @@ namespace Ryujinx.Cpu.Jit
{
_tickSource = tickSource;
_translator = new Translator(new JitMemoryAllocator(forJit: true), memory, for64Bit);
if (memory.Type.IsHostMappedOrTracked())
{
NativeSignalHandler.InitializeSignalHandler();
}
memory.UnmapEvent += UnmapHandler;
}

View file

@ -14,7 +14,5 @@ namespace Ryujinx.Cpu.Jit
public IJitMemoryBlock Allocate(ulong size) => new JitMemoryBlock(size, MemoryAllocationFlags.None);
public IJitMemoryBlock Reserve(ulong size) => new JitMemoryBlock(size, MemoryAllocationFlags.Reserve | _jitFlag);
public ulong GetPageSize() => MemoryBlock.GetPageSize();
}
}

View file

@ -3,6 +3,7 @@ using Ryujinx.Memory;
using Ryujinx.Memory.Range;
using Ryujinx.Memory.Tracking;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
@ -14,12 +15,8 @@ namespace Ryujinx.Cpu.Jit
/// <summary>
/// Represents a CPU memory manager.
/// </summary>
public sealed class MemoryManager : MemoryManagerBase, IMemoryManager, IVirtualMemoryManagerTracked, IWritableBlock
public sealed class MemoryManager : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked
{
public const int PageBits = 12;
public const int PageSize = 1 << PageBits;
public const int PageMask = PageSize - 1;
private const int PteSize = 8;
private const int PointerTagBit = 62;
@ -28,17 +25,17 @@ namespace Ryujinx.Cpu.Jit
private readonly InvalidAccessHandler _invalidAccessHandler;
/// <inheritdoc/>
public bool Supports4KBPages => true;
public bool UsesPrivateAllocations => false;
/// <summary>
/// Address space width in bits.
/// </summary>
public int AddressSpaceBits { get; }
private readonly ulong _addressSpaceSize;
private readonly MemoryBlock _pageTable;
private readonly ManagedPageFlags _pages;
/// <summary>
/// Page table base pointer.
/// </summary>
@ -50,6 +47,8 @@ namespace Ryujinx.Cpu.Jit
public event Action<ulong, ulong> UnmapEvent;
protected override ulong AddressSpaceSize { get; }
/// <summary>
/// Creates a new instance of the memory manager.
/// </summary>
@ -71,9 +70,11 @@ namespace Ryujinx.Cpu.Jit
}
AddressSpaceBits = asBits;
_addressSpaceSize = asSize;
AddressSpaceSize = asSize;
_pageTable = new MemoryBlock((asSize / PageSize) * PteSize);
_pages = new ManagedPageFlags(AddressSpaceBits);
Tracking = new MemoryTracking(this, PageSize);
}
@ -93,15 +94,10 @@ namespace Ryujinx.Cpu.Jit
remainingSize -= PageSize;
}
_pages.AddMapping(oVa, size);
Tracking.Map(oVa, size);
}
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
/// <inheritdoc/>
public void Unmap(ulong va, ulong size)
{
@ -115,6 +111,7 @@ namespace Ryujinx.Cpu.Jit
UnmapEvent?.Invoke(va, size);
Tracking.Unmap(va, size);
_pages.RemoveMapping(va, size);
ulong remainingSize = size;
while (remainingSize != 0)
@ -126,18 +123,29 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
/// <inheritdoc/>
public T ReadTracked<T>(ulong va) where T : unmanaged
public override T ReadTracked<T>(ulong va)
{
try
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false);
return base.ReadTracked<T>(va);
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
return default;
}
}
/// <inheritdoc/>
public T ReadGuest<T>(ulong va) where T : unmanaged
{
try
{
SignalMemoryTrackingImpl(va, (ulong)Unsafe.SizeOf<T>(), false, true);
return Read<T>(va);
}
@ -153,112 +161,26 @@ namespace Ryujinx.Cpu.Jit
}
/// <inheritdoc/>
public void Read(ulong va, Span<byte> data)
{
ReadImpl(va, data);
}
/// <inheritdoc/>
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
SignalMemoryTracking(va, (ulong)data.Length, true);
WriteImpl(va, data);
}
/// <inheritdoc/>
public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
WriteImpl(va, data);
}
/// <inheritdoc/>
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return false;
}
SignalMemoryTracking(va, (ulong)data.Length, false);
if (IsContiguousAndMapped(va, data.Length))
{
var target = _backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length);
bool changed = !data.SequenceEqual(target);
if (changed)
{
data.CopyTo(target);
}
return changed;
}
else
{
WriteImpl(va, data);
return true;
}
}
/// <summary>
/// Writes data to CPU mapped memory.
/// </summary>
/// <param name="va">Virtual address to write the data into</param>
/// <param name="data">Data to be written</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void WriteImpl(ulong va, ReadOnlySpan<byte> data)
public override void Read(ulong va, Span<byte> data)
{
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
if (IsContiguousAndMapped(va, data.Length))
base.Read(va, data);
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
data.CopyTo(_backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length));
throw;
}
else
{
int offset = 0, size;
}
}
if ((va & PageMask) != 0)
{
ulong pa = GetPhysicalAddressInternal(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data[..size].CopyTo(_backingMemory.GetSpan(pa, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
ulong pa = GetPhysicalAddressInternal(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(_backingMemory.GetSpan(pa, size));
}
}
public override void Write(ulong va, ReadOnlySpan<byte> data)
{
try
{
base.Write(va, data);
}
catch (InvalidMemoryRegionException)
{
@ -270,60 +192,47 @@ namespace Ryujinx.Cpu.Jit
}
/// <inheritdoc/>
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
public void WriteGuest<T>(ulong va, T value) where T : unmanaged
{
if (size == 0)
Span<byte> data = MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1));
SignalMemoryTrackingImpl(va, (ulong)data.Length, true, true);
Write(va, data);
}
public override void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
try
{
return ReadOnlySpan<byte>.Empty;
base.WriteUntracked(va, data);
}
if (tracked)
catch (InvalidMemoryRegionException)
{
SignalMemoryTracking(va, (ulong)size, false);
}
if (IsContiguousAndMapped(va, size))
{
return _backingMemory.GetSpan(GetPhysicalAddressInternal(va), size);
}
else
{
Span<byte> data = new byte[size];
ReadImpl(va, data);
return data;
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
/// <inheritdoc/>
public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
public override ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
{
if (size == 0)
try
{
return new WritableRegion(null, va, Memory<byte>.Empty);
return base.GetReadOnlySequence(va, size, tracked);
}
if (IsContiguousAndMapped(va, size))
catch (InvalidMemoryRegionException)
{
if (tracked)
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
SignalMemoryTracking(va, (ulong)size, true);
throw;
}
return new WritableRegion(null, va, _backingMemory.GetMemory(GetPhysicalAddressInternal(va), size));
}
else
{
Memory<byte> memory = new byte[size];
GetSpan(va, size).CopyTo(memory.Span);
return new WritableRegion(this, va, memory, tracked);
return ReadOnlySequence<byte>.Empty;
}
}
/// <inheritdoc/>
public ref T GetRef<T>(ulong va) where T : unmanaged
{
if (!IsContiguous(va, Unsafe.SizeOf<T>()))
@ -336,56 +245,6 @@ namespace Ryujinx.Cpu.Jit
return ref _backingMemory.GetRef<T>(GetPhysicalAddressInternal(va));
}
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, uint size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguous(ulong va, int size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
{
return false;
}
int pages = GetPagesCount(va, (uint)size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
}
/// <inheritdoc/>
public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size)
{
@ -462,48 +321,6 @@ namespace Ryujinx.Cpu.Jit
return regions;
}
private void ReadImpl(ulong va, Span<byte> data)
{
if (data.Length == 0)
{
return;
}
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
int offset = 0, size;
if ((va & PageMask) != 0)
{
ulong pa = GetPhysicalAddressInternal(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
_backingMemory.GetSpan(pa, size).CopyTo(data[..size]);
offset += size;
}
for (; offset < data.Length; offset += size)
{
ulong pa = GetPhysicalAddressInternal(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
_backingMemory.GetSpan(pa, size).CopyTo(data.Slice(offset, size));
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
/// <inheritdoc/>
public bool IsRangeMapped(ulong va, ulong size)
{
@ -532,9 +349,8 @@ namespace Ryujinx.Cpu.Jit
return true;
}
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va)
public override bool IsMapped(ulong va)
{
if (!ValidateAddress(va))
{
@ -544,40 +360,9 @@ namespace Ryujinx.Cpu.Jit
return _pageTable.Read<ulong>((va / PageSize) * PteSize) != 0;
}
private bool ValidateAddress(ulong va)
private nuint GetPhysicalAddressInternal(ulong va)
{
return va < _addressSpaceSize;
}
/// <summary>
/// Checks if the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
private bool ValidateAddressAndSize(ulong va, ulong size)
{
ulong endVa = va + size;
return endVa >= va && endVa >= size && endVa <= _addressSpaceSize;
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
private void AssertValidAddressAndSize(ulong va, ulong size)
{
if (!ValidateAddressAndSize(va, size))
{
throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
}
}
private ulong GetPhysicalAddressInternal(ulong va)
{
return PteToPa(_pageTable.Read<ulong>((va / PageSize) * PteSize) & ~(0xffffUL << 48)) + (va & PageMask);
return (nuint)(PteToPa(_pageTable.Read<ulong>((va / PageSize) * PteSize) & ~(0xffffUL << 48)) + (va & PageMask));
}
/// <inheritdoc/>
@ -587,50 +372,57 @@ namespace Ryujinx.Cpu.Jit
}
/// <inheritdoc/>
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection, bool guest)
{
AssertValidAddressAndSize(va, size);
// Protection is inverted on software pages, since the default value is 0.
protection = (~protection) & MemoryPermission.ReadAndWrite;
long tag = protection switch
if (guest)
{
MemoryPermission.None => 0L,
MemoryPermission.Write => 2L << PointerTagBit,
_ => 3L << PointerTagBit,
};
// Protection is inverted on software pages, since the default value is 0.
protection = (~protection) & MemoryPermission.ReadAndWrite;
int pages = GetPagesCount(va, (uint)size, out va);
ulong pageStart = va >> PageBits;
long invTagMask = ~(0xffffL << 48);
for (int page = 0; page < pages; page++)
{
ref long pageRef = ref _pageTable.GetRef<long>(pageStart * PteSize);
long pte;
do
long tag = protection switch
{
pte = Volatile.Read(ref pageRef);
}
while (pte != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
MemoryPermission.None => 0L,
MemoryPermission.Write => 2L << PointerTagBit,
_ => 3L << PointerTagBit,
};
pageStart++;
int pages = GetPagesCount(va, (uint)size, out va);
ulong pageStart = va >> PageBits;
long invTagMask = ~(0xffffL << 48);
for (int page = 0; page < pages; page++)
{
ref long pageRef = ref _pageTable.GetRef<long>(pageStart * PteSize);
long pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while (pte != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
pageStart++;
}
}
else
{
_pages.TrackingReprotect(va, size, protection);
}
}
/// <inheritdoc/>
public RegionHandle BeginTracking(ulong address, ulong size, int id)
public RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None)
{
return Tracking.BeginTracking(address, size, id);
return Tracking.BeginTracking(address, size, id, flags);
}
/// <inheritdoc/>
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id)
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id, RegionFlags flags = RegionFlags.None)
{
return Tracking.BeginGranularTracking(address, size, handles, granularity, id);
return Tracking.BeginGranularTracking(address, size, handles, granularity, id, flags);
}
/// <inheritdoc/>
@ -639,8 +431,7 @@ namespace Ryujinx.Cpu.Jit
return Tracking.BeginSmartGranularTracking(address, size, granularity, id);
}
/// <inheritdoc/>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
private void SignalMemoryTrackingImpl(ulong va, ulong size, bool write, bool guest, bool precise = false, int? exemptId = null)
{
AssertValidAddressAndSize(va, size);
@ -650,31 +441,45 @@ namespace Ryujinx.Cpu.Jit
return;
}
// We emulate guard pages for software memory access. This makes for an easy transition to
// tracking using host guard pages in future, but also supporting platforms where this is not possible.
// If the memory tracking is coming from the guest, use the tag bits in the page table entry.
// Otherwise, use the managed page flags.
// Write tag includes read protection, since we don't have any read actions that aren't performed before write too.
long tag = (write ? 3L : 1L) << PointerTagBit;
int pages = GetPagesCount(va, (uint)size, out _);
ulong pageStart = va >> PageBits;
for (int page = 0; page < pages; page++)
if (guest)
{
ref long pageRef = ref _pageTable.GetRef<long>(pageStart * PteSize);
// We emulate guard pages for software memory access. This makes for an easy transition to
// tracking using host guard pages in future, but also supporting platforms where this is not possible.
long pte;
// Write tag includes read protection, since we don't have any read actions that aren't performed before write too.
long tag = (write ? 3L : 1L) << PointerTagBit;
pte = Volatile.Read(ref pageRef);
int pages = GetPagesCount(va, (uint)size, out _);
ulong pageStart = va >> PageBits;
if ((pte & tag) != 0)
for (int page = 0; page < pages; page++)
{
Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
break;
}
ref long pageRef = ref _pageTable.GetRef<long>(pageStart * PteSize);
pageStart++;
long pte = Volatile.Read(ref pageRef);
if ((pte & tag) != 0)
{
Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId, true);
break;
}
pageStart++;
}
}
else
{
_pages.SignalMemoryTracking(Tracking, va, size, write, exemptId);
}
}
/// <inheritdoc/>
public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{
SignalMemoryTrackingImpl(va, size, write, false, precise, exemptId);
}
private ulong PaToPte(ulong pa)
@ -691,5 +496,17 @@ namespace Ryujinx.Cpu.Jit
/// Disposes of resources used by the memory manager.
/// </summary>
protected override void Destroy() => _pageTable.Dispose();
protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> _backingMemory.GetMemory(pa, size);
protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> _backingMemory.GetSpan(pa, size);
protected override nuint TranslateVirtualAddressChecked(ulong va)
=> GetPhysicalAddressInternal(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> GetPhysicalAddressInternal(va);
}
}

View file

@ -3,52 +3,31 @@ using Ryujinx.Memory;
using Ryujinx.Memory.Range;
using Ryujinx.Memory.Tracking;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
namespace Ryujinx.Cpu.Jit
{
/// <summary>
/// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region.
/// </summary>
public sealed class MemoryManagerHostMapped : MemoryManagerBase, IMemoryManager, IVirtualMemoryManagerTracked, IWritableBlock
public sealed class MemoryManagerHostMapped : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked
{
public const int PageBits = 12;
public const int PageSize = 1 << PageBits;
public const int PageMask = PageSize - 1;
public const int PageToPteShift = 5; // 32 pages (2 bits each) in one ulong page table entry.
public const ulong BlockMappedMask = 0x5555555555555555; // First bit of each table entry set.
private enum HostMappedPtBits : ulong
{
Unmapped = 0,
Mapped,
WriteTracked,
ReadWriteTracked,
MappedReplicated = 0x5555555555555555,
WriteTrackedReplicated = 0xaaaaaaaaaaaaaaaa,
ReadWriteTrackedReplicated = ulong.MaxValue,
}
private readonly InvalidAccessHandler _invalidAccessHandler;
private readonly bool _unsafeMode;
private readonly AddressSpace _addressSpace;
public ulong AddressSpaceSize { get; }
private readonly PageTable<ulong> _pageTable;
private readonly MemoryEhMeilleure _memoryEh;
private readonly ulong[] _pageBitmap;
private readonly ManagedPageFlags _pages;
/// <inheritdoc/>
public bool Supports4KBPages => MemoryBlock.GetPageSize() == PageSize;
public bool UsesPrivateAllocations => false;
public int AddressSpaceBits { get; }
@ -60,6 +39,8 @@ namespace Ryujinx.Cpu.Jit
public event Action<ulong, ulong> UnmapEvent;
protected override ulong AddressSpaceSize { get; }
/// <summary>
/// Creates a new instance of the host mapped memory manager.
/// </summary>
@ -85,48 +66,12 @@ namespace Ryujinx.Cpu.Jit
AddressSpaceBits = asBits;
_pageBitmap = new ulong[1 << (AddressSpaceBits - (PageBits + PageToPteShift))];
_pages = new ManagedPageFlags(AddressSpaceBits);
Tracking = new MemoryTracking(this, (int)MemoryBlock.GetPageSize(), invalidAccessHandler);
_memoryEh = new MemoryEhMeilleure(_addressSpace.Base, _addressSpace.Mirror, Tracking);
}
/// <summary>
/// Checks if the virtual address is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address</param>
/// <returns>True if the virtual address is part of the addressable space</returns>
private bool ValidateAddress(ulong va)
{
return va < AddressSpaceSize;
}
/// <summary>
/// Checks if the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
private bool ValidateAddressAndSize(ulong va, ulong size)
{
ulong endVa = va + size;
return endVa >= va && endVa >= size && endVa <= AddressSpaceSize;
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
private void AssertValidAddressAndSize(ulong va, ulong size)
{
if (!ValidateAddressAndSize(va, size))
{
throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
}
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space and fully mapped.
/// </summary>
@ -134,7 +79,7 @@ namespace Ryujinx.Cpu.Jit
/// <param name="size">Size of the range in bytes</param>
private void AssertMapped(ulong va, ulong size)
{
if (!ValidateAddressAndSize(va, size) || !IsRangeMappedImpl(va, size))
if (!ValidateAddressAndSize(va, size) || !_pages.IsRangeMapped(va, size))
{
throw new InvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
@ -146,18 +91,12 @@ namespace Ryujinx.Cpu.Jit
AssertValidAddressAndSize(va, size);
_addressSpace.Map(va, pa, size, flags);
AddMapping(va, size);
_pages.AddMapping(va, size);
PtMap(va, pa, size);
Tracking.Map(va, size);
}
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
/// <inheritdoc/>
public void Unmap(ulong va, ulong size)
{
@ -166,7 +105,7 @@ namespace Ryujinx.Cpu.Jit
UnmapEvent?.Invoke(va, size);
Tracking.Unmap(va, size);
RemoveMapping(va, size);
_pages.RemoveMapping(va, size);
PtUnmap(va, size);
_addressSpace.Unmap(va, size);
}
@ -194,8 +133,7 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public T Read<T>(ulong va) where T : unmanaged
public override T Read<T>(ulong va)
{
try
{
@ -214,14 +152,11 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public T ReadTracked<T>(ulong va) where T : unmanaged
public override T ReadTracked<T>(ulong va)
{
try
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false);
return Read<T>(va);
return base.ReadTracked<T>(va);
}
catch (InvalidMemoryRegionException)
{
@ -234,8 +169,7 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public void Read(ulong va, Span<byte> data)
public override void Read(ulong va, Span<byte> data)
{
try
{
@ -252,9 +186,7 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public void Write<T>(ulong va, T value) where T : unmanaged
public override void Write<T>(ulong va, T value)
{
try
{
@ -271,8 +203,7 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
public override void Write(ulong va, ReadOnlySpan<byte> data)
{
try
{
@ -289,8 +220,7 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
public override void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
try
{
@ -307,8 +237,7 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
public override bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
try
{
@ -335,8 +264,21 @@ namespace Ryujinx.Cpu.Jit
}
}
/// <inheritdoc/>
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
public override ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
{
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, write: false);
}
else
{
AssertMapped(va, (ulong)size);
}
return new ReadOnlySequence<byte>(_addressSpace.Mirror.GetMemory(va, size));
}
public override ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{
if (tracked)
{
@ -350,8 +292,7 @@ namespace Ryujinx.Cpu.Jit
return _addressSpace.Mirror.GetSpan(va, size);
}
/// <inheritdoc/>
public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
public override WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{
if (tracked)
{
@ -365,7 +306,6 @@ namespace Ryujinx.Cpu.Jit
return _addressSpace.Mirror.GetWritableRegion(va, size);
}
/// <inheritdoc/>
public ref T GetRef<T>(ulong va) where T : unmanaged
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), true);
@ -373,26 +313,10 @@ namespace Ryujinx.Cpu.Jit
return ref _addressSpace.Mirror.GetRef<T>(va);
}
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va)
public override bool IsMapped(ulong va)
{
return ValidateAddress(va) && IsMappedImpl(va);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsMappedImpl(ulong va)
{
ulong page = va >> PageBits;
int bit = (int)((page & 31) << 1);
int pageIndex = (int)(page >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte = Volatile.Read(ref pageRef);
return ((pte >> bit) & 3) != 0;
return ValidateAddress(va) && _pages.IsMapped(va);
}
/// <inheritdoc/>
@ -400,58 +324,7 @@ namespace Ryujinx.Cpu.Jit
{
AssertValidAddressAndSize(va, size);
return IsRangeMappedImpl(va, size);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void GetPageBlockRange(ulong pageStart, ulong pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex)
{
startMask = ulong.MaxValue << ((int)(pageStart & 31) << 1);
endMask = ulong.MaxValue >> (64 - ((int)(pageEnd & 31) << 1));
pageIndex = (int)(pageStart >> PageToPteShift);
pageEndIndex = (int)((pageEnd - 1) >> PageToPteShift);
}
private bool IsRangeMappedImpl(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
if (pages == 1)
{
return IsMappedImpl(va);
}
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
// Check if either bit in each 2 bit page entry is set.
// OR the block with itself shifted down by 1, and check the first bit of each entry.
ulong mask = BlockMappedMask & startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte = Volatile.Read(ref pageRef);
pte |= pte >> 1;
if ((pte & mask) != mask)
{
return false;
}
mask = BlockMappedMask;
}
return true;
return _pages.IsRangeMapped(va, size);
}
/// <inheritdoc/>
@ -512,11 +385,10 @@ namespace Ryujinx.Cpu.Jit
return _pageTable.Read(va) + (va & PageMask);
}
/// <inheritdoc/>
/// <remarks>
/// This function also validates that the given range is both valid and mapped, and will throw if it is not.
/// </remarks>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{
AssertValidAddressAndSize(va, size);
@ -526,93 +398,7 @@ namespace Ryujinx.Cpu.Jit
return;
}
// Software table, used for managed memory tracking.
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
if (pages == 1)
{
ulong tag = (ulong)(write ? HostMappedPtBits.WriteTracked : HostMappedPtBits.ReadWriteTracked);
int bit = (int)((pageStart & 31) << 1);
int pageIndex = (int)(pageStart >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte = Volatile.Read(ref pageRef);
ulong state = ((pte >> bit) & 3);
if (state >= tag)
{
Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
return;
}
else if (state == 0)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
}
else
{
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
ulong anyTrackingTag = (ulong)HostMappedPtBits.WriteTrackedReplicated;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte = Volatile.Read(ref pageRef);
ulong mappedMask = mask & BlockMappedMask;
ulong mappedPte = pte | (pte >> 1);
if ((mappedPte & mappedMask) != mappedMask)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
pte &= mask;
if ((pte & anyTrackingTag) != 0) // Search for any tracking.
{
// Writes trigger any tracking.
// Only trigger tracking from reads if both bits are set on any page.
if (write || (pte & (pte >> 1) & BlockMappedMask) != 0)
{
Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
break;
}
}
mask = ulong.MaxValue;
}
}
}
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, ulong size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
_pages.SignalMemoryTracking(Tracking, va, size, write, exemptId);
}
/// <inheritdoc/>
@ -622,103 +408,28 @@ namespace Ryujinx.Cpu.Jit
}
/// <inheritdoc/>
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection, bool guest)
{
// Protection is inverted on software pages, since the default value is 0.
protection = (~protection) & MemoryPermission.ReadAndWrite;
int pages = GetPagesCount(va, size, out va);
ulong pageStart = va >> PageBits;
if (pages == 1)
if (guest)
{
ulong protTag = protection switch
{
MemoryPermission.None => (ulong)HostMappedPtBits.Mapped,
MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTracked,
_ => (ulong)HostMappedPtBits.ReadWriteTracked,
};
int bit = (int)((pageStart & 31) << 1);
ulong tagMask = 3UL << bit;
ulong invTagMask = ~tagMask;
ulong tag = protTag << bit;
int pageIndex = (int)(pageStart >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while ((pte & tagMask) != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
_addressSpace.Base.Reprotect(va, size, protection, false);
}
else
{
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
ulong protTag = protection switch
{
MemoryPermission.None => (ulong)HostMappedPtBits.MappedReplicated,
MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTrackedReplicated,
_ => (ulong)HostMappedPtBits.ReadWriteTrackedReplicated,
};
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
ulong mappedMask;
// Change the protection of all 2 bit entries that are mapped.
do
{
pte = Volatile.Read(ref pageRef);
mappedMask = pte | (pte >> 1);
mappedMask |= (mappedMask & BlockMappedMask) << 1;
mappedMask &= mask; // Only update mapped pages within the given range.
}
while (Interlocked.CompareExchange(ref pageRef, (pte & (~mappedMask)) | (protTag & mappedMask), pte) != pte);
mask = ulong.MaxValue;
}
_pages.TrackingReprotect(va, size, protection);
}
protection = protection switch
{
MemoryPermission.None => MemoryPermission.ReadAndWrite,
MemoryPermission.Write => MemoryPermission.Read,
_ => MemoryPermission.None,
};
_addressSpace.Base.Reprotect(va, size, protection, false);
}
/// <inheritdoc/>
public RegionHandle BeginTracking(ulong address, ulong size, int id)
public RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None)
{
return Tracking.BeginTracking(address, size, id);
return Tracking.BeginTracking(address, size, id, flags);
}
/// <inheritdoc/>
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id)
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id, RegionFlags flags = RegionFlags.None)
{
return Tracking.BeginGranularTracking(address, size, handles, granularity, id);
return Tracking.BeginGranularTracking(address, size, handles, granularity, id, flags);
}
/// <inheritdoc/>
@ -727,86 +438,6 @@ namespace Ryujinx.Cpu.Jit
return Tracking.BeginSmartGranularTracking(address, size, granularity, id);
}
/// <summary>
/// Adds the given address mapping to the page table.
/// </summary>
/// <param name="va">Virtual memory address</param>
/// <param name="size">Size to be mapped</param>
private void AddMapping(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
ulong mappedMask;
// Map all 2-bit entries that are unmapped.
do
{
pte = Volatile.Read(ref pageRef);
mappedMask = pte | (pte >> 1);
mappedMask |= (mappedMask & BlockMappedMask) << 1;
mappedMask |= ~mask; // Treat everything outside the range as mapped, thus unchanged.
}
while (Interlocked.CompareExchange(ref pageRef, (pte & mappedMask) | (BlockMappedMask & (~mappedMask)), pte) != pte);
mask = ulong.MaxValue;
}
}
/// <summary>
/// Removes the given address mapping from the page table.
/// </summary>
/// <param name="va">Virtual memory address</param>
/// <param name="size">Size to be unmapped</param>
private void RemoveMapping(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
startMask = ~startMask;
endMask = ~endMask;
ulong mask = startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask |= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while (Interlocked.CompareExchange(ref pageRef, pte & mask, pte) != pte);
mask = 0;
}
}
/// <summary>
/// Disposes of resources used by the memory manager.
/// </summary>
@ -816,6 +447,16 @@ namespace Ryujinx.Cpu.Jit
_memoryEh.Dispose();
}
private static void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message);
protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> _addressSpace.Mirror.GetMemory(pa, size);
protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> _addressSpace.Mirror.GetSpan(pa, size);
protected override nuint TranslateVirtualAddressChecked(ulong va)
=> (nuint)GetPhysicalAddressChecked(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> (nuint)GetPhysicalAddressInternal(va);
}
}

View file

@ -1,82 +1,64 @@
using ARMeilleure.Memory;
using Ryujinx.Common.Memory;
using Ryujinx.Cpu.Jit.HostTracked;
using Ryujinx.Cpu.Signal;
using Ryujinx.Memory;
using Ryujinx.Memory.Range;
using Ryujinx.Memory.Tracking;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Threading;
namespace Ryujinx.Cpu.Jit
{
/// <summary>
/// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region.
/// </summary>
public sealed class MemoryManagerHostTracked : MemoryManagerBase, IWritableBlock, IMemoryManager, IVirtualMemoryManagerTracked
public sealed class MemoryManagerHostTracked : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked
{
public const int PageBits = 12;
public const int PageSize = 1 << PageBits;
public const int PageMask = PageSize - 1;
public const int PageToPteShift = 5; // 32 pages (2 bits each) in one ulong page table entry.
public const ulong BlockMappedMask = 0x5555555555555555; // First bit of each table entry set.
private enum HostMappedPtBits : ulong
{
Unmapped = 0,
Mapped,
WriteTracked,
ReadWriteTracked,
MappedReplicated = 0x5555555555555555,
WriteTrackedReplicated = 0xaaaaaaaaaaaaaaaa,
ReadWriteTrackedReplicated = ulong.MaxValue
}
private readonly InvalidAccessHandler _invalidAccessHandler;
private readonly bool _unsafeMode;
private readonly MemoryBlock _backingMemory;
private readonly PageTable<ulong> _pageTable;
private readonly ulong[] _pageBitmap;
public int AddressSpaceBits { get; }
public MemoryTracking Tracking { get; private set; }
public MemoryTracking Tracking { get; }
private const int PteSize = 8;
private readonly NativePageTable _nativePageTable;
private readonly Ryujinx.Cpu.Jit.HostTracked.AddressSpacePartitioned _addressSpace;
private readonly AddressSpacePartitioned _addressSpace;
private readonly ManagedPageFlags _pages;
public ulong AddressSpaceSize { get; }
private readonly MemoryBlock _flatPageTable;
protected override ulong AddressSpaceSize { get; }
/// <inheritdoc/>
public bool Supports4KBPages => false;
public bool UsesPrivateAllocations => true;
public IntPtr PageTablePointer => _flatPageTable.Pointer;
public IntPtr PageTablePointer => _nativePageTable.PageTablePointer;
public MemoryManagerType Type => MemoryManagerType.HostTracked;
public MemoryManagerType Type => _unsafeMode ? MemoryManagerType.HostTrackedUnsafe : MemoryManagerType.HostTracked;
public event Action<ulong, ulong> UnmapEvent;
/// <summary>
/// Creates a new instance of the host mapped memory manager.
/// Creates a new instance of the host tracked memory manager.
/// </summary>
/// <param name="backingMemory">Physical backing memory where virtual memory will be mapped to</param>
/// <param name="addressSpaceSize">Size of the address space</param>
/// <param name="unsafeMode">True if unmanaged access should not be masked (unsafe), false otherwise.</param>
/// <param name="invalidAccessHandler">Optional function to handle invalid memory accesses</param>
public MemoryManagerHostTracked(MemoryBlock backingMemory, ulong addressSpaceSize, InvalidAccessHandler invalidAccessHandler)
public MemoryManagerHostTracked(MemoryBlock backingMemory, ulong addressSpaceSize, bool unsafeMode, InvalidAccessHandler invalidAccessHandler)
{
Tracking = new MemoryTracking(this, AddressSpacePartitioned.Use4KBProtection ? PageSize : (int)MemoryBlock.GetPageSize(), invalidAccessHandler);
bool useProtectionMirrors = MemoryBlock.GetPageSize() > PageSize;
Tracking = new MemoryTracking(this, PageSize, invalidAccessHandler, useProtectionMirrors);
_backingMemory = backingMemory;
_pageTable = new PageTable<ulong>();
_invalidAccessHandler = invalidAccessHandler;
_addressSpace = new(Tracking, backingMemory, UpdatePt);
_unsafeMode = unsafeMode;
AddressSpaceSize = addressSpaceSize;
ulong asSize = PageSize;
@ -90,8 +72,81 @@ namespace Ryujinx.Cpu.Jit
AddressSpaceBits = asBits;
_pageBitmap = new ulong[1 << (AddressSpaceBits - (PageBits + PageToPteShift))];
_flatPageTable = new MemoryBlock((asSize / PageSize) * PteSize);
if (useProtectionMirrors && !NativeSignalHandler.SupportsFaultAddressPatching())
{
// Currently we require being able to change the fault address to something else
// in order to "emulate" 4KB granularity protection on systems with larger page size.
throw new PlatformNotSupportedException();
}
_pages = new ManagedPageFlags(asBits);
_nativePageTable = new(asSize);
_addressSpace = new(Tracking, backingMemory, _nativePageTable, useProtectionMirrors);
}
public override ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return ReadOnlySequence<byte>.Empty;
}
try
{
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, false);
}
else
{
AssertValidAddressAndSize(va, (ulong)size);
}
ulong endVa = va + (ulong)size;
int offset = 0;
BytesReadOnlySequenceSegment first = null, last = null;
while (va < endVa)
{
(MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(size - offset));
Memory<byte> physicalMemory = memory.GetMemory(rangeOffset, (int)copySize);
if (first is null)
{
first = last = new BytesReadOnlySequenceSegment(physicalMemory);
}
else
{
if (last.IsContiguousWith(physicalMemory, out nuint contiguousStart, out int contiguousSize))
{
Memory<byte> contiguousPhysicalMemory = new NativeMemoryManager<byte>(contiguousStart, contiguousSize).Memory;
last.Replace(contiguousPhysicalMemory);
}
else
{
last = last.Append(physicalMemory);
}
}
va += copySize;
offset += (int)copySize;
}
return new ReadOnlySequence<byte>(first, 0, last, (int)(size - last.RunningIndex));
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
return ReadOnlySequence<byte>.Empty;
}
}
/// <inheritdoc/>
@ -104,52 +159,12 @@ namespace Ryujinx.Cpu.Jit
_addressSpace.Map(va, pa, size);
}
AddMapping(va, size);
PtMap(va, pa, size, flags.HasFlag(MemoryMapFlags.Private));
_pages.AddMapping(va, size);
_nativePageTable.Map(va, pa, size, _addressSpace, _backingMemory, flags.HasFlag(MemoryMapFlags.Private));
Tracking.Map(va, size);
}
private void PtMap(ulong va, ulong pa, ulong size, bool privateMap)
{
while (size != 0)
{
_pageTable.Map(va, pa);
if (privateMap)
{
_flatPageTable.Write((va / PageSize) * PteSize, (ulong)_addressSpace.GetPointer(va, PageSize) - va);
}
else
{
_flatPageTable.Write((va / PageSize) * PteSize, (ulong)_backingMemory.GetPointer(pa, PageSize) - va);
}
va += PageSize;
pa += PageSize;
size -= PageSize;
}
}
private void UpdatePt(ulong va, IntPtr ptr, ulong size)
{
ulong remainingSize = size;
while (remainingSize != 0)
{
_flatPageTable.Write((va / PageSize) * PteSize, (ulong)ptr - va);
va += PageSize;
ptr += PageSize;
remainingSize -= PageSize;
}
}
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
/// <inheritdoc/>
public void Unmap(ulong va, ulong size)
{
@ -160,70 +175,15 @@ namespace Ryujinx.Cpu.Jit
UnmapEvent?.Invoke(va, size);
Tracking.Unmap(va, size);
RemoveMapping(va, size);
PtUnmap(va, size);
_pages.RemoveMapping(va, size);
_nativePageTable.Unmap(va, size);
}
private void PtUnmap(ulong va, ulong size)
{
while (size != 0)
{
_pageTable.Unmap(va);
_flatPageTable.Write((va / PageSize) * PteSize, 0UL);
va += PageSize;
size -= PageSize;
}
}
/// <summary>
/// Checks if the virtual address is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address</param>
/// <returns>True if the virtual address is part of the addressable space</returns>
private bool ValidateAddress(ulong va)
{
return va < AddressSpaceSize;
}
/// <summary>
/// Checks if the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
private bool ValidateAddressAndSize(ulong va, ulong size)
{
ulong endVa = va + size;
return endVa >= va && endVa >= size && endVa <= AddressSpaceSize;
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
private void AssertValidAddressAndSize(ulong va, ulong size)
{
if (!ValidateAddressAndSize(va, size))
{
throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
}
}
public T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
public T ReadTracked<T>(ulong va) where T : unmanaged
public override T ReadTracked<T>(ulong va)
{
try
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false);
return Read<T>(va);
return base.ReadTracked<T>(va);
}
catch (InvalidMemoryRegionException)
{
@ -236,39 +196,40 @@ namespace Ryujinx.Cpu.Jit
}
}
public void Read(ulong va, Span<byte> data)
{
ReadImpl(va, data);
}
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
public void Write(ulong va, ReadOnlySpan<byte> data)
public override void Read(ulong va, Span<byte> data)
{
if (data.Length == 0)
{
return;
}
SignalMemoryTracking(va, (ulong)data.Length, true);
WriteImpl(va, data);
}
public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
try
{
return;
}
AssertValidAddressAndSize(va, (ulong)data.Length);
WriteImpl(va, data);
ulong endVa = va + (ulong)data.Length;
int offset = 0;
while (va < endVa)
{
(MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset));
memory.GetSpan(rangeOffset, (int)copySize).CopyTo(data.Slice(offset, (int)copySize));
va += copySize;
offset += (int)copySize;
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
public override bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
@ -298,35 +259,7 @@ namespace Ryujinx.Cpu.Jit
}
}
private void WriteImpl(ulong va, ReadOnlySpan<byte> data)
{
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
ulong endVa = va + (ulong)data.Length;
int offset = 0;
while (va < endVa)
{
(MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset));
data.Slice(offset, (int)copySize).CopyTo(memory.GetSpan(rangeOffset, (int)copySize));
va += copySize;
offset += (int)copySize;
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
public override ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
@ -346,13 +279,13 @@ namespace Ryujinx.Cpu.Jit
{
Span<byte> data = new byte[size];
ReadImpl(va, data);
Read(va, data);
return data;
}
}
public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
public override WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
@ -370,11 +303,11 @@ namespace Ryujinx.Cpu.Jit
}
else
{
Memory<byte> memory = new byte[size];
MemoryOwner<byte> memoryOwner = MemoryOwner<byte>.Rent(size);
ReadImpl(va, memory.Span);
Read(va, memoryOwner.Span);
return new WritableRegion(this, va, memory);
return new WritableRegion(this, va, memoryOwner);
}
}
@ -391,92 +324,24 @@ namespace Ryujinx.Cpu.Jit
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va)
public override bool IsMapped(ulong va)
{
return ValidateAddress(va) && IsMappedImpl(va);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsMappedImpl(ulong va)
{
ulong page = va >> PageBits;
int bit = (int)((page & 31) << 1);
int pageIndex = (int)(page >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte = Volatile.Read(ref pageRef);
return ((pte >> bit) & 3) != 0;
return ValidateAddress(va) && _pages.IsMapped(va);
}
public bool IsRangeMapped(ulong va, ulong size)
{
AssertValidAddressAndSize(va, size);
return IsRangeMappedImpl(va, size);
return _pages.IsRangeMapped(va, size);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void GetPageBlockRange(ulong pageStart, ulong pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex)
{
startMask = ulong.MaxValue << ((int)(pageStart & 31) << 1);
endMask = ulong.MaxValue >> (64 - ((int)(pageEnd & 31) << 1));
pageIndex = (int)(pageStart >> PageToPteShift);
pageEndIndex = (int)((pageEnd - 1) >> PageToPteShift);
}
private bool IsRangeMappedImpl(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
if (pages == 1)
{
return IsMappedImpl(va);
}
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
// Check if either bit in each 2 bit page entry is set.
// OR the block with itself shifted down by 1, and check the first bit of each entry.
ulong mask = BlockMappedMask & startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte = Volatile.Read(ref pageRef);
pte |= pte >> 1;
if ((pte & mask) != mask)
{
return false;
}
mask = BlockMappedMask;
}
return true;
}
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
private bool TryGetVirtualContiguous(ulong va, int size, out MemoryBlock memory, out ulong offset)
{
if (_addressSpace.HasAnyPrivateAllocation(va, (ulong)size, out PrivateRange range))
if (_addressSpace.HasAnyPrivateAllocation(va, (ulong)size, out Ryujinx.Cpu.Jit.HostTracked.PrivateRange range))
{
// If we have a private allocation overlapping the range,
// this the access is only considered contiguous if it covers the entire range.
// then the access is only considered contiguous if it covers the entire range.
if (range.Memory != null)
{
@ -559,9 +424,7 @@ namespace Ryujinx.Cpu.Jit
private (MemoryBlock, ulong, ulong) GetMemoryOffsetAndSize(ulong va, ulong size)
{
ulong endVa = va + size;
PrivateRange privateRange = _addressSpace.GetFirstPrivateAllocation(va, size, out ulong nextVa);
Ryujinx.Cpu.Jit.HostTracked.PrivateRange privateRange = _addressSpace.GetFirstPrivateAllocation(va, size, out ulong nextVa);
if (privateRange.Memory != null)
{
@ -570,7 +433,7 @@ namespace Ryujinx.Cpu.Jit
ulong physSize = GetContiguousSize(va, Math.Min(size, nextVa - va));
return new(_backingMemory, GetPhysicalAddressChecked(va), physSize);
return (_backingMemory, GetPhysicalAddressChecked(va), physSize);
}
public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size)
@ -651,44 +514,11 @@ namespace Ryujinx.Cpu.Jit
return regions;
}
private void ReadImpl(ulong va, Span<byte> data)
{
if (data.Length == 0)
{
return;
}
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
ulong endVa = va + (ulong)data.Length;
int offset = 0;
while (va < endVa)
{
(MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset));
memory.GetSpan(rangeOffset, (int)copySize).CopyTo(data.Slice(offset, (int)copySize));
va += copySize;
offset += (int)copySize;
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
/// <inheritdoc/>
/// <remarks>
/// This function also validates that the given range is both valid and mapped, and will throw if it is not.
/// </remarks>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{
AssertValidAddressAndSize(va, size);
@ -700,101 +530,17 @@ namespace Ryujinx.Cpu.Jit
// Software table, used for managed memory tracking.
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
if (pages == 1)
{
ulong tag = (ulong)(write ? HostMappedPtBits.WriteTracked : HostMappedPtBits.ReadWriteTracked);
int bit = (int)((pageStart & 31) << 1);
int pageIndex = (int)(pageStart >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte = Volatile.Read(ref pageRef);
ulong state = ((pte >> bit) & 3);
if (state >= tag)
{
Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
return;
}
else if (state == 0)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
}
else
{
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
ulong anyTrackingTag = (ulong)HostMappedPtBits.WriteTrackedReplicated;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte = Volatile.Read(ref pageRef);
ulong mappedMask = mask & BlockMappedMask;
ulong mappedPte = pte | (pte >> 1);
if ((mappedPte & mappedMask) != mappedMask)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
pte &= mask;
if ((pte & anyTrackingTag) != 0) // Search for any tracking.
{
// Writes trigger any tracking.
// Only trigger tracking from reads if both bits are set on any page.
if (write || (pte & (pte >> 1) & BlockMappedMask) != 0)
{
Tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
break;
}
}
mask = ulong.MaxValue;
}
}
_pages.SignalMemoryTracking(Tracking, va, size, write, exemptId);
}
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private int GetPagesCount(ulong va, ulong size, out ulong startVa)
public RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
return Tracking.BeginTracking(address, size, id, flags);
}
public RegionHandle BeginTracking(ulong address, ulong size, int id)
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id, RegionFlags flags = RegionFlags.None)
{
return Tracking.BeginTracking(address, size, id);
}
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id)
{
return Tracking.BeginGranularTracking(address, size, handles, granularity, id);
return Tracking.BeginGranularTracking(address, size, handles, granularity, id, flags);
}
public SmartMultiRegionHandle BeginSmartGranularTracking(ulong address, ulong size, ulong granularity, int id)
@ -802,86 +548,6 @@ namespace Ryujinx.Cpu.Jit
return Tracking.BeginSmartGranularTracking(address, size, granularity, id);
}
/// <summary>
/// Adds the given address mapping to the page table.
/// </summary>
/// <param name="va">Virtual memory address</param>
/// <param name="size">Size to be mapped</param>
private void AddMapping(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
ulong mappedMask;
// Map all 2-bit entries that are unmapped.
do
{
pte = Volatile.Read(ref pageRef);
mappedMask = pte | (pte >> 1);
mappedMask |= (mappedMask & BlockMappedMask) << 1;
mappedMask |= ~mask; // Treat everything outside the range as mapped, thus unchanged.
}
while (Interlocked.CompareExchange(ref pageRef, (pte & mappedMask) | (BlockMappedMask & (~mappedMask)), pte) != pte);
mask = ulong.MaxValue;
}
}
/// <summary>
/// Removes the given address mapping from the page table.
/// </summary>
/// <param name="va">Virtual memory address</param>
/// <param name="size">Size to be unmapped</param>
private void RemoveMapping(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
startMask = ~startMask;
endMask = ~endMask;
ulong mask = startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask |= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while (Interlocked.CompareExchange(ref pageRef, pte & mask, pte) != pte);
mask = 0;
}
}
private ulong GetPhysicalAddressChecked(ulong va)
{
if (!IsMapped(va))
@ -894,11 +560,9 @@ namespace Ryujinx.Cpu.Jit
private ulong GetPhysicalAddressInternal(ulong va)
{
return _pageTable.Read(va) + (va & PageMask);
return _nativePageTable.GetPhysicalAddress(va);
}
private static void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message);
/// <inheritdoc/>
public void Reprotect(ulong va, ulong size, MemoryPermission protection)
{
@ -906,91 +570,16 @@ namespace Ryujinx.Cpu.Jit
}
/// <inheritdoc/>
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection, bool guest)
{
// Protection is inverted on software pages, since the default value is 0.
protection = (~protection) & MemoryPermission.ReadAndWrite;
int pages = GetPagesCount(va, size, out va);
ulong pageStart = va >> PageBits;
if (pages == 1)
if (guest)
{
ulong protTag = protection switch
{
MemoryPermission.None => (ulong)HostMappedPtBits.Mapped,
MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTracked,
_ => (ulong)HostMappedPtBits.ReadWriteTracked,
};
int bit = (int)((pageStart & 31) << 1);
ulong tagMask = 3UL << bit;
ulong invTagMask = ~tagMask;
ulong tag = protTag << bit;
int pageIndex = (int)(pageStart >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while ((pte & tagMask) != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
_addressSpace.Reprotect(va, size, protection);
}
else
{
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
ulong protTag = protection switch
{
MemoryPermission.None => (ulong)HostMappedPtBits.MappedReplicated,
MemoryPermission.Write => (ulong)HostMappedPtBits.WriteTrackedReplicated,
_ => (ulong)HostMappedPtBits.ReadWriteTrackedReplicated,
};
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
ulong mappedMask;
// Change the protection of all 2 bit entries that are mapped.
do
{
pte = Volatile.Read(ref pageRef);
mappedMask = pte | (pte >> 1);
mappedMask |= (mappedMask & BlockMappedMask) << 1;
mappedMask &= mask; // Only update mapped pages within the given range.
}
while (Interlocked.CompareExchange(ref pageRef, (pte & (~mappedMask)) | (protTag & mappedMask), pte) != pte);
mask = ulong.MaxValue;
}
_pages.TrackingReprotect(va, size, protection);
}
protection = protection switch
{
MemoryPermission.None => MemoryPermission.ReadAndWrite,
MemoryPermission.Write => MemoryPermission.Read,
_ => MemoryPermission.None,
};
_addressSpace.Reprotect(va, size, protection);
}
/// <summary>
@ -999,6 +588,47 @@ namespace Ryujinx.Cpu.Jit
protected override void Destroy()
{
_addressSpace.Dispose();
_nativePageTable.Dispose();
}
protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> _backingMemory.GetMemory(pa, size);
protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> _backingMemory.GetSpan(pa, size);
protected override void WriteImpl(ulong va, ReadOnlySpan<byte> data)
{
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
ulong endVa = va + (ulong)data.Length;
int offset = 0;
while (va < endVa)
{
(MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset));
data.Slice(offset, (int)copySize).CopyTo(memory.GetSpan(rangeOffset, (int)copySize));
va += copySize;
offset += (int)copySize;
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
protected override nuint TranslateVirtualAddressChecked(ulong va)
=> (nuint)GetPhysicalAddressChecked(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> (nuint)GetPhysicalAddressInternal(va);
}
}

View file

@ -10,6 +10,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
public readonly List<InstInfo> Instructions;
public readonly bool EndsWithBranch;
public readonly bool HasHostCall;
public readonly bool HasHostCallSkipContext;
public readonly bool IsTruncated;
public readonly bool IsLoopEnd;
public readonly bool IsThumb;
@ -20,6 +21,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
List<InstInfo> instructions,
bool endsWithBranch,
bool hasHostCall,
bool hasHostCallSkipContext,
bool isTruncated,
bool isLoopEnd,
bool isThumb)
@ -31,6 +33,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
Instructions = instructions;
EndsWithBranch = endsWithBranch;
HasHostCall = hasHostCall;
HasHostCallSkipContext = hasHostCallSkipContext;
IsTruncated = isTruncated;
IsLoopEnd = isLoopEnd;
IsThumb = isThumb;
@ -57,6 +60,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
Instructions.GetRange(0, splitIndex),
false,
HasHostCall,
HasHostCallSkipContext,
false,
false,
IsThumb);
@ -67,6 +71,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
Instructions.GetRange(splitIndex, splitCount),
EndsWithBranch,
HasHostCall,
HasHostCallSkipContext,
IsTruncated,
IsLoopEnd,
IsThumb);

View file

@ -208,6 +208,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
InstMeta meta;
InstFlags extraFlags = InstFlags.None;
bool hasHostCall = false;
bool hasHostCallSkipContext = false;
bool isTruncated = false;
do
@ -246,9 +247,17 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
meta = InstTableA32<T>.GetMeta(encoding, cpuPreset.Version, cpuPreset.Features);
}
if (meta.Name.IsSystemOrCall() && !hasHostCall)
if (meta.Name.IsSystemOrCall())
{
hasHostCall = meta.Name.IsCall() || InstEmitSystem.NeedsCall(meta.Name);
if (!hasHostCall)
{
hasHostCall = InstEmitSystem.NeedsCall(meta.Name);
}
if (!hasHostCallSkipContext)
{
hasHostCallSkipContext = meta.Name.IsCall() || InstEmitSystem.NeedsCallSkipContext(meta.Name);
}
}
insts.Add(new(encoding, meta.Name, meta.EmitFunc, meta.Flags | extraFlags));
@ -259,8 +268,8 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
if (!isTruncated && IsBackwardsBranch(meta.Name, encoding))
{
hasHostCall = true;
isLoopEnd = true;
hasHostCallSkipContext = true;
}
return new(
@ -269,6 +278,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
insts,
!isTruncated,
hasHostCall,
hasHostCallSkipContext,
isTruncated,
isLoopEnd,
isThumb);
@ -415,7 +425,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
private static bool IsRtWrite(InstName name, uint encoding)
{
// Some instruction can move GPR to FP/SIMD or FP/SIMD to GPR depending on the encoding.
// Some instructions can move GPR to FP/SIMD or FP/SIMD to GPR depending on the encoding.
// Detect those cases so that we can tell if we're actually doing a register write.
switch (name)

View file

@ -6,6 +6,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
{
public readonly List<Block> Blocks;
public readonly bool HasHostCall;
public readonly bool HasHostCallSkipContext;
public readonly bool IsTruncated;
public MultiBlock(List<Block> blocks)
@ -15,12 +16,14 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
Block block = blocks[0];
HasHostCall = block.HasHostCall;
HasHostCallSkipContext = block.HasHostCallSkipContext;
for (int index = 1; index < blocks.Count; index++)
{
block = blocks[index];
HasHostCall |= block.HasHostCall;
HasHostCallSkipContext |= block.HasHostCallSkipContext;
}
block = blocks[^1];

View file

@ -106,6 +106,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32
if ((regMask & AbiConstants.ReservedRegsMask) == 0)
{
_gprMask |= regMask;
UsedGprsMask |= regMask;
return firstCalleeSaved;
}

View file

@ -305,12 +305,23 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
ForceConditionalEnd(cgContext, ref lastCondition, lastConditionIp);
}
int reservedStackSize = 0;
if (multiBlock.HasHostCall)
{
reservedStackSize = CalculateStackSizeForCallSpill(regAlloc.UsedGprsMask, regAlloc.UsedFpSimdMask, UsablePStateMask);
}
else if (multiBlock.HasHostCallSkipContext)
{
reservedStackSize = 2 * sizeof(ulong); // Context and page table pointers.
}
RegisterSaveRestore rsr = new(
regAlloc.UsedGprsMask & AbiConstants.GprCalleeSavedRegsMask,
regAlloc.UsedFpSimdMask & AbiConstants.FpSimdCalleeSavedRegsMask,
OperandType.FP64,
multiBlock.HasHostCall,
multiBlock.HasHostCall ? CalculateStackSizeForCallSpill(regAlloc.UsedGprsMask, regAlloc.UsedFpSimdMask, UsablePStateMask) : 0);
multiBlock.HasHostCall || multiBlock.HasHostCallSkipContext,
reservedStackSize);
TailMerger tailMerger = new();
@ -596,7 +607,8 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
name == InstName.Ldm ||
name == InstName.Ldmda ||
name == InstName.Ldmdb ||
name == InstName.Ldmib)
name == InstName.Ldmib ||
name == InstName.Pop)
{
// Arm32 does not have a return instruction, instead returns are implemented
// either using BX LR (for leaf functions), or POP { ... PC }.
@ -711,7 +723,14 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
switch (type)
{
case BranchType.SyncPoint:
InstEmitSystem.WriteSyncPoint(context.Writer, context.RegisterAllocator, context.TailMerger, context.GetReservedStackOffset());
InstEmitSystem.WriteSyncPoint(
context.Writer,
ref asm,
context.RegisterAllocator,
context.TailMerger,
context.GetReservedStackOffset(),
context.StoreToContext,
context.LoadFromContext);
break;
case BranchType.SoftwareInterrupt:
context.StoreToContext();

View file

@ -199,12 +199,12 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
}
}
private static void WriteSpillSkipContext(ref Assembler asm, RegisterAllocator regAlloc, int spillOffset)
public static void WriteSpillSkipContext(ref Assembler asm, RegisterAllocator regAlloc, int spillOffset)
{
WriteSpillOrFillSkipContext(ref asm, regAlloc, spillOffset, spill: true);
}
private static void WriteFillSkipContext(ref Assembler asm, RegisterAllocator regAlloc, int spillOffset)
public static void WriteFillSkipContext(ref Assembler asm, RegisterAllocator regAlloc, int spillOffset)
{
WriteSpillOrFillSkipContext(ref asm, regAlloc, spillOffset, spill: false);
}

View file

@ -1126,7 +1126,10 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
Operand destination64 = new(destination.Kind, OperandType.I64, destination.Value);
Operand basePointer = new(regAlloc.FixedPageTableRegister, RegisterType.Integer, OperandType.I64);
if (mmType == MemoryManagerType.HostTracked)
// We don't need to mask the address for the safe mode, since it is already naturally limited to 32-bit
// and can never reach out of the guest address space.
if (mmType.IsHostTracked())
{
int tempRegister = regAlloc.AllocateTempGprRegister();
@ -1138,7 +1141,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
regAlloc.FreeTempGprRegister(tempRegister);
}
else if (mmType == MemoryManagerType.HostMapped || mmType == MemoryManagerType.HostMappedUnsafe)
else if (mmType.IsHostMapped())
{
asm.Add(destination64, basePointer, guestAddress);
}

View file

@ -1,6 +1,5 @@
using Ryujinx.Cpu.LightningJit.CodeGen;
using Ryujinx.Cpu.LightningJit.CodeGen.Arm64;
using System.Diagnostics;
namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
{

View file

@ -717,6 +717,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
}
}
[Conditional("DEBUG")]
private static void AssertSequentialRegisters(ReadOnlySpan<ScopedRegister> registers)
{
for (int index = 1; index < registers.Length; index++)

View file

@ -114,7 +114,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
InstEmitCommon.EmitUnsigned16BitPair(context, rd, rn, rm, (d, n, m) =>
{
context.Arm64Assembler.Add(d, n, m);
EmitSaturateUnsignedRange(context, d, 16);
EmitSaturateUqadd(context, d, 16);
});
}
@ -123,7 +123,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
InstEmitCommon.EmitUnsigned8BitPair(context, rd, rn, rm, (d, n, m) =>
{
context.Arm64Assembler.Add(d, n, m);
EmitSaturateUnsignedRange(context, d, 8);
EmitSaturateUqadd(context, d, 8);
});
}
@ -140,7 +140,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
context.Arm64Assembler.Add(d, n, m);
}
EmitSaturateUnsignedRange(context, d, 16);
EmitSaturateUq(context, d, 16, e == 0);
});
}
@ -157,25 +157,25 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
context.Arm64Assembler.Sub(d, n, m);
}
EmitSaturateUnsignedRange(context, d, 16);
EmitSaturateUq(context, d, 16, e != 0);
});
}
public static void Uqsub16(CodeGenContext context, uint rd, uint rn, uint rm)
{
InstEmitCommon.EmitSigned16BitPair(context, rd, rn, rm, (d, n, m) =>
InstEmitCommon.EmitUnsigned16BitPair(context, rd, rn, rm, (d, n, m) =>
{
context.Arm64Assembler.Sub(d, n, m);
EmitSaturateUnsignedRange(context, d, 16);
EmitSaturateUqsub(context, d, 16);
});
}
public static void Uqsub8(CodeGenContext context, uint rd, uint rn, uint rm)
{
InstEmitCommon.EmitSigned8BitPair(context, rd, rn, rm, (d, n, m) =>
InstEmitCommon.EmitUnsigned8BitPair(context, rd, rn, rm, (d, n, m) =>
{
context.Arm64Assembler.Sub(d, n, m);
EmitSaturateUnsignedRange(context, d, 8);
EmitSaturateUqsub(context, d, 8);
});
}
@ -358,7 +358,17 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
}
}
private static void EmitSaturateUnsignedRange(CodeGenContext context, Operand value, uint saturateTo)
private static void EmitSaturateUqadd(CodeGenContext context, Operand value, uint saturateTo)
{
EmitSaturateUq(context, value, saturateTo, isSub: false);
}
private static void EmitSaturateUqsub(CodeGenContext context, Operand value, uint saturateTo)
{
EmitSaturateUq(context, value, saturateTo, isSub: true);
}
private static void EmitSaturateUq(CodeGenContext context, Operand value, uint saturateTo, bool isSub)
{
Debug.Assert(saturateTo <= 32);
@ -379,7 +389,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
return;
}
context.Arm64Assembler.Lsr(tempRegister.Operand, value, InstEmitCommon.Const(32 - (int)saturateTo));
context.Arm64Assembler.Lsr(tempRegister.Operand, value, InstEmitCommon.Const((int)saturateTo));
int branchIndex = context.CodeWriter.InstructionPointer;
@ -387,7 +397,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
context.Arm64Assembler.Cbz(tempRegister.Operand, 0);
// Saturate.
context.Arm64Assembler.Mov(value, uint.MaxValue >> (32 - (int)saturateTo));
context.Arm64Assembler.Mov(value, isSub ? 0u : uint.MaxValue >> (32 - (int)saturateTo));
int delta = context.CodeWriter.InstructionPointer - branchIndex;
context.CodeWriter.WriteInstructionAt(branchIndex, context.CodeWriter.ReadInstructionAt(branchIndex) | (uint)((delta & 0x7ffff) << 5));

View file

@ -354,11 +354,18 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
// All instructions that might do a host call should be included here.
// That is required to reserve space on the stack for caller saved registers.
return name == InstName.Mrrc;
}
public static bool NeedsCallSkipContext(InstName name)
{
// All instructions that might do a host call should be included here.
// That is required to reserve space on the stack for caller saved registers.
switch (name)
{
case InstName.Mcr:
case InstName.Mrc:
case InstName.Mrrc:
case InstName.Svc:
case InstName.Udf:
return true;
@ -372,7 +379,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
Assembler asm = new(writer);
WriteCall(ref asm, regAlloc, GetBkptHandlerPtr(), skipContext: true, spillBaseOffset, null, pc, imm);
WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, skipContext: true, spillBaseOffset);
WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, spillBaseOffset);
}
public static void WriteSvc(CodeWriter writer, RegisterAllocator regAlloc, TailMerger tailMerger, int spillBaseOffset, uint pc, uint svcId)
@ -380,7 +387,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
Assembler asm = new(writer);
WriteCall(ref asm, regAlloc, GetSvcHandlerPtr(), skipContext: true, spillBaseOffset, null, pc, svcId);
WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, skipContext: true, spillBaseOffset);
WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, spillBaseOffset);
}
public static void WriteUdf(CodeWriter writer, RegisterAllocator regAlloc, TailMerger tailMerger, int spillBaseOffset, uint pc, uint imm)
@ -388,7 +395,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
Assembler asm = new(writer);
WriteCall(ref asm, regAlloc, GetUdfHandlerPtr(), skipContext: true, spillBaseOffset, null, pc, imm);
WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, skipContext: true, spillBaseOffset);
WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, spillBaseOffset);
}
public static void WriteReadCntpct(CodeWriter writer, RegisterAllocator regAlloc, int spillBaseOffset, int rt, int rt2)
@ -422,14 +429,14 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
WriteFill(ref asm, regAlloc, resultMask, skipContext: false, spillBaseOffset, tempRegister);
}
public static void WriteSyncPoint(CodeWriter writer, RegisterAllocator regAlloc, TailMerger tailMerger, int spillBaseOffset)
{
Assembler asm = new(writer);
WriteSyncPoint(writer, ref asm, regAlloc, tailMerger, skipContext: false, spillBaseOffset);
}
private static void WriteSyncPoint(CodeWriter writer, ref Assembler asm, RegisterAllocator regAlloc, TailMerger tailMerger, bool skipContext, int spillBaseOffset)
public static void WriteSyncPoint(
CodeWriter writer,
ref Assembler asm,
RegisterAllocator regAlloc,
TailMerger tailMerger,
int spillBaseOffset,
Action storeToContext = null,
Action loadFromContext = null)
{
int tempRegister = regAlloc.AllocateTempGprRegister();
@ -440,7 +447,8 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
int branchIndex = writer.InstructionPointer;
asm.Cbnz(rt, 0);
WriteSpill(ref asm, regAlloc, 1u << tempRegister, skipContext, spillBaseOffset, tempRegister);
storeToContext?.Invoke();
WriteSpill(ref asm, regAlloc, 1u << tempRegister, skipContext: true, spillBaseOffset, tempRegister);
Operand rn = Register(tempRegister == 0 ? 1 : 0);
@ -449,7 +457,8 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
tailMerger.AddConditionalZeroReturn(writer, asm, Register(0, OperandType.I32));
WriteFill(ref asm, regAlloc, 1u << tempRegister, skipContext, spillBaseOffset, tempRegister);
WriteFill(ref asm, regAlloc, 1u << tempRegister, skipContext: true, spillBaseOffset, tempRegister);
loadFromContext?.Invoke();
asm.LdrRiUn(rt, Register(regAlloc.FixedContextRegister), NativeContextOffsets.CounterOffset);
@ -489,8 +498,8 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
// We only support up to 7 arguments right now.
// ABI defines the first 8 integer arguments to be passed on registers X0-X7.
// We need at least one regiser to put the function address on, so that reduces the amount of
// register we can use for that by one.
// We need at least one register to put the function address on, so that reduces the number of
// registers we can use for that by one.
Debug.Assert(callArgs.Length < 8);
@ -514,18 +523,31 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
private static void WriteSpill(ref Assembler asm, RegisterAllocator regAlloc, uint exceptMask, bool skipContext, int spillOffset, int tempRegister)
{
WriteSpillOrFill(ref asm, regAlloc, skipContext, exceptMask, spillOffset, tempRegister, spill: true);
if (skipContext)
{
InstEmitFlow.WriteSpillSkipContext(ref asm, regAlloc, spillOffset);
}
else
{
WriteSpillOrFill(ref asm, regAlloc, exceptMask, spillOffset, tempRegister, spill: true);
}
}
private static void WriteFill(ref Assembler asm, RegisterAllocator regAlloc, uint exceptMask, bool skipContext, int spillOffset, int tempRegister)
{
WriteSpillOrFill(ref asm, regAlloc, skipContext, exceptMask, spillOffset, tempRegister, spill: false);
if (skipContext)
{
InstEmitFlow.WriteFillSkipContext(ref asm, regAlloc, spillOffset);
}
else
{
WriteSpillOrFill(ref asm, regAlloc, exceptMask, spillOffset, tempRegister, spill: false);
}
}
private static void WriteSpillOrFill(
ref Assembler asm,
RegisterAllocator regAlloc,
bool skipContext,
uint exceptMask,
int spillOffset,
int tempRegister,
@ -533,11 +555,6 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
{
uint gprMask = regAlloc.UsedGprsMask & ~(AbiConstants.GprCalleeSavedRegsMask | exceptMask);
if (skipContext)
{
gprMask &= ~Compiler.UsableGprsMask;
}
if (!spill)
{
// We must reload the status register before reloading the GPRs,
@ -600,11 +617,6 @@ namespace Ryujinx.Cpu.LightningJit.Arm32.Target.Arm64
uint fpSimdMask = regAlloc.UsedFpSimdMask;
if (skipContext)
{
fpSimdMask &= ~Compiler.UsableFpSimdMask;
}
while (fpSimdMask != 0)
{
int reg = BitOperations.TrailingZeroCount(fpSimdMask);

View file

@ -1106,6 +1106,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64
case InstName.Mrs:
case InstName.MsrImm:
case InstName.MsrReg:
case InstName.Sysl:
return true;
}
@ -1130,5 +1131,37 @@ namespace Ryujinx.Cpu.LightningJit.Arm64
return false;
}
public static bool IsPartialRegisterUpdateMemory(this InstName name)
{
switch (name)
{
case InstName.Ld1AdvsimdSnglAsNoPostIndex:
case InstName.Ld1AdvsimdSnglAsPostIndex:
case InstName.Ld2AdvsimdSnglAsNoPostIndex:
case InstName.Ld2AdvsimdSnglAsPostIndex:
case InstName.Ld3AdvsimdSnglAsNoPostIndex:
case InstName.Ld3AdvsimdSnglAsPostIndex:
case InstName.Ld4AdvsimdSnglAsNoPostIndex:
case InstName.Ld4AdvsimdSnglAsPostIndex:
return true;
}
return false;
}
public static bool IsPrefetchMemory(this InstName name)
{
switch (name)
{
case InstName.PrfmImm:
case InstName.PrfmLit:
case InstName.PrfmReg:
case InstName.Prfum:
return true;
}
return false;
}
}
}

View file

@ -1,15 +1,12 @@
using ARMeilleure.Memory;
using Ryujinx.Cpu.LightningJit.CodeGen.Arm64;
using System;
using System.Diagnostics;
using System.Numerics;
namespace Ryujinx.Cpu.LightningJit.Arm64
{
class RegisterAllocator
{
public const int MaxTemps = 2;
public const int MaxTempsInclFixed = MaxTemps + 2;
private uint _gprMask;
private readonly uint _fpSimdMask;
private readonly uint _pStateMask;
@ -25,7 +22,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64
public uint AllFpSimdMask => _fpSimdMask;
public uint AllPStateMask => _pStateMask;
public RegisterAllocator(uint gprMask, uint fpSimdMask, uint pStateMask, bool hasHostCall)
public RegisterAllocator(MemoryManagerType mmType, uint gprMask, uint fpSimdMask, uint pStateMask, bool hasHostCall)
{
_gprMask = gprMask;
_fpSimdMask = fpSimdMask;
@ -56,7 +53,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64
BuildRegisterMap(_registerMap);
Span<int> tempRegisters = stackalloc int[MaxTemps];
Span<int> tempRegisters = stackalloc int[CalculateMaxTemps(mmType)];
for (int index = 0; index < tempRegisters.Length; index++)
{
@ -150,5 +147,15 @@ namespace Ryujinx.Cpu.LightningJit.Arm64
{
mask &= ~(1u << index);
}
public static int CalculateMaxTemps(MemoryManagerType mmType)
{
return mmType.IsHostMapped() ? 1 : 2;
}
public static int CalculateMaxTempsInclFixed(MemoryManagerType mmType)
{
return CalculateMaxTemps(mmType) + 2;
}
}
}

View file

@ -247,7 +247,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64
}
}
if (!flags.HasFlag(InstFlags.ReadRt))
if (!flags.HasFlag(InstFlags.ReadRt) || name.IsPartialRegisterUpdateMemory())
{
if (flags.HasFlag(InstFlags.Rt))
{
@ -281,7 +281,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64
gprMask |= MaskFromIndex(ExtractRd(flags, encoding));
}
if (!flags.HasFlag(InstFlags.ReadRt))
if (!flags.HasFlag(InstFlags.ReadRt) || name.IsPartialRegisterUpdateMemory())
{
if (flags.HasFlag(InstFlags.Rt))
{

View file

@ -0,0 +1,48 @@
using System.Diagnostics;
namespace Ryujinx.Cpu.LightningJit.Arm64
{
static class SysUtils
{
public static (uint, uint, uint, uint) UnpackOp1CRnCRmOp2(uint encoding)
{
uint op1 = (encoding >> 16) & 7;
uint crn = (encoding >> 12) & 0xf;
uint crm = (encoding >> 8) & 0xf;
uint op2 = (encoding >> 5) & 7;
return (op1, crn, crm, op2);
}
public static bool IsCacheInstEl0(uint encoding)
{
(uint op1, uint crn, uint crm, uint op2) = UnpackOp1CRnCRmOp2(encoding);
return ((op1 << 11) | (crn << 7) | (crm << 3) | op2) switch
{
0b011_0111_0100_001 => true, // DC ZVA
0b011_0111_1010_001 => true, // DC CVAC
0b011_0111_1100_001 => true, // DC CVAP
0b011_0111_1011_001 => true, // DC CVAU
0b011_0111_1110_001 => true, // DC CIVAC
0b011_0111_0101_001 => true, // IC IVAU
_ => false,
};
}
public static bool IsCacheInstUciTrapped(uint encoding)
{
(uint op1, uint crn, uint crm, uint op2) = UnpackOp1CRnCRmOp2(encoding);
return ((op1 << 11) | (crn << 7) | (crm << 3) | op2) switch
{
0b011_0111_1010_001 => true, // DC CVAC
0b011_0111_1100_001 => true, // DC CVAP
0b011_0111_1011_001 => true, // DC CVAU
0b011_0111_1110_001 => true, // DC CIVAC
0b011_0111_0101_001 => true, // IC IVAU
_ => false,
};
}
}
}

View file

@ -316,7 +316,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
uint pStateUseMask = multiBlock.GlobalUseMask.PStateMask;
CodeWriter writer = new();
RegisterAllocator regAlloc = new(gprUseMask, fpSimdUseMask, pStateUseMask, multiBlock.HasHostCall);
RegisterAllocator regAlloc = new(memoryManager.Type, gprUseMask, fpSimdUseMask, pStateUseMask, multiBlock.HasHostCall);
RegisterSaveRestore rsr = new(
regAlloc.AllGprMask & AbiConstants.GprCalleeSavedRegsMask,
regAlloc.AllFpSimdMask & AbiConstants.FpSimdCalleeSavedRegsMask,
@ -350,11 +350,20 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
if (instInfo.AddressForm != AddressForm.None)
{
InstEmitMemory.RewriteInstruction(memoryManager.Type, writer, regAlloc, instInfo.Name, instInfo.Flags, instInfo.AddressForm, pc, encoding);
InstEmitMemory.RewriteInstruction(
memoryManager.AddressSpaceBits,
memoryManager.Type,
writer,
regAlloc,
instInfo.Name,
instInfo.Flags,
instInfo.AddressForm,
pc,
encoding);
}
else if (instInfo.Name == InstName.Sys)
{
InstEmitMemory.RewriteSysInstruction(memoryManager.Type, writer, regAlloc, encoding);
InstEmitMemory.RewriteSysInstruction(memoryManager.AddressSpaceBits, memoryManager.Type, writer, regAlloc, encoding);
}
else if (instInfo.Name.IsSystem())
{

View file

@ -8,7 +8,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
{
static class Decoder
{
private const int MaxInstructionsPerBlock = 1000;
private const int MaxInstructionsPerFunction = 10000;
private const uint NzcvFlags = 0xfu << 28;
private const uint CFlag = 0x1u << 29;
@ -22,10 +22,11 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
bool hasHostCall = false;
bool hasMemoryInstruction = false;
int totalInsts = 0;
while (true)
{
Block block = Decode(cpuPreset, memoryManager, address, ref useMask, ref hasHostCall, ref hasMemoryInstruction);
Block block = Decode(cpuPreset, memoryManager, address, ref totalInsts, ref useMask, ref hasHostCall, ref hasMemoryInstruction);
if (!block.IsTruncated && TryGetBranchTarget(block, out ulong targetAddress))
{
@ -230,6 +231,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
CpuPreset cpuPreset,
IMemoryManager memoryManager,
ulong address,
ref int totalInsts,
ref RegisterMask useMask,
ref bool hasHostCall,
ref bool hasMemoryInstruction)
@ -255,7 +257,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
(name, flags, AddressForm addressForm) = InstTable.GetInstNameAndFlags(encoding, cpuPreset.Version, cpuPreset.Features);
if (name.IsPrivileged())
if (name.IsPrivileged() || (name == InstName.Sys && IsPrivilegedSys(encoding)))
{
name = InstName.UdfPermUndef;
flags = InstFlags.None;
@ -272,7 +274,8 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
uint tempGprUseMask = gprUseMask | instGprReadMask | instGprWriteMask;
if (CalculateAvailableTemps(tempGprUseMask) < CalculateRequiredGprTemps(tempGprUseMask) || insts.Count >= MaxInstructionsPerBlock)
if (CalculateAvailableTemps(tempGprUseMask) < CalculateRequiredGprTemps(memoryManager.Type, tempGprUseMask) ||
totalInsts++ >= MaxInstructionsPerFunction)
{
isTruncated = true;
address -= 4UL;
@ -339,6 +342,11 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
return new(startAddress, address, insts, !isTruncated && !name.IsException(), isTruncated, isLoopEnd);
}
private static bool IsPrivilegedSys(uint encoding)
{
return !SysUtils.IsCacheInstEl0(encoding);
}
private static bool IsMrsNzcv(uint encoding)
{
return (encoding & ~0x1fu) == 0xd53b4200u;
@ -371,9 +379,9 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
return false;
}
private static int CalculateRequiredGprTemps(uint gprUseMask)
private static int CalculateRequiredGprTemps(MemoryManagerType mmType, uint gprUseMask)
{
return BitOperations.PopCount(gprUseMask & RegisterUtils.ReservedRegsMask) + RegisterAllocator.MaxTempsInclFixed;
return BitOperations.PopCount(gprUseMask & RegisterUtils.ReservedRegsMask) + RegisterAllocator.CalculateMaxTempsInclFixed(mmType);
}
private static int CalculateAvailableTemps(uint gprUseMask)

View file

@ -11,8 +11,16 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
private const uint XMask = 0x3f808000u;
private const uint XValue = 0x8000000u;
public static void RewriteSysInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding)
public static void RewriteSysInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding)
{
// TODO: Handle IC instruction, it should invalidate the JIT cache.
if (InstEmitSystem.IsCacheInstForbidden(encoding))
{
// Current OS does not allow cache maintenance instructions from user mode, just do nothing.
return;
}
int rtIndex = RegisterUtils.ExtractRt(encoding);
if (rtIndex == RegisterUtils.ZrIndex)
{
@ -27,7 +35,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
Assembler asm = new(writer);
WriteAddressTranslation(mmType, regAlloc, ref asm, rt, guestAddress);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, rt, guestAddress);
encoding = RegisterUtils.ReplaceRt(encoding, tempRegister);
@ -37,6 +45,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
}
public static void RewriteInstruction(
int asBits,
MemoryManagerType mmType,
CodeWriter writer,
RegisterAllocator regAlloc,
@ -46,22 +55,32 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
ulong pc,
uint encoding)
{
if (name.IsPrefetchMemory() && mmType == MemoryManagerType.HostTrackedUnsafe)
{
// Prefetch to invalid addresses do not cause faults, so for memory manager
// types where we need to access the page table before doing the prefetch,
// we should make sure we won't try to access an out of bounds page table region.
// To do this, we force the masked memory manager variant to be used.
mmType = MemoryManagerType.HostTracked;
}
switch (addressForm)
{
case AddressForm.OffsetReg:
RewriteOffsetRegMemoryInstruction(mmType, writer, regAlloc, flags, encoding);
RewriteOffsetRegMemoryInstruction(asBits, mmType, writer, regAlloc, flags, encoding);
break;
case AddressForm.PostIndexed:
RewritePostIndexedMemoryInstruction(mmType, writer, regAlloc, flags, encoding);
RewritePostIndexedMemoryInstruction(asBits, mmType, writer, regAlloc, flags, encoding);
break;
case AddressForm.PreIndexed:
RewritePreIndexedMemoryInstruction(mmType, writer, regAlloc, flags, encoding);
RewritePreIndexedMemoryInstruction(asBits, mmType, writer, regAlloc, flags, encoding);
break;
case AddressForm.SignedScaled:
RewriteSignedScaledMemoryInstruction(mmType, writer, regAlloc, flags, encoding);
RewriteSignedScaledMemoryInstruction(asBits, mmType, writer, regAlloc, flags, encoding);
break;
case AddressForm.UnsignedScaled:
RewriteUnsignedScaledMemoryInstruction(mmType, writer, regAlloc, flags, encoding);
RewriteUnsignedScaledMemoryInstruction(asBits, mmType, writer, regAlloc, flags, encoding);
break;
case AddressForm.BaseRegister:
// Some applications uses unordered memory instructions in places where
@ -74,19 +93,19 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
encoding |= 1u << 15;
}
RewriteBaseRegisterMemoryInstruction(mmType, writer, regAlloc, encoding);
RewriteBaseRegisterMemoryInstruction(asBits, mmType, writer, regAlloc, encoding);
break;
case AddressForm.StructNoOffset:
RewriteBaseRegisterMemoryInstruction(mmType, writer, regAlloc, encoding);
RewriteBaseRegisterMemoryInstruction(asBits, mmType, writer, regAlloc, encoding);
break;
case AddressForm.BasePlusOffset:
RewriteBasePlusOffsetMemoryInstruction(mmType, writer, regAlloc, encoding);
RewriteBasePlusOffsetMemoryInstruction(asBits, mmType, writer, regAlloc, encoding);
break;
case AddressForm.Literal:
RewriteLiteralMemoryInstruction(mmType, writer, regAlloc, name, pc, encoding);
RewriteLiteralMemoryInstruction(asBits, mmType, writer, regAlloc, name, pc, encoding);
break;
case AddressForm.StructPostIndexedReg:
RewriteStructPostIndexedRegMemoryInstruction(mmType, writer, regAlloc, encoding);
RewriteStructPostIndexedRegMemoryInstruction(asBits, mmType, writer, regAlloc, encoding);
break;
default:
writer.WriteInstruction(encoding);
@ -94,7 +113,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
}
}
private static void RewriteOffsetRegMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
private static void RewriteOffsetRegMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
{
// TODO: Some unallocated encoding cases.
@ -118,7 +137,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
asm.Add(rn, guestAddress, guestOffset, extensionType, shift);
WriteAddressTranslation(mmType, regAlloc, ref asm, rn, rn);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, rn, rn);
encoding = RegisterUtils.ReplaceRn(encoding, tempRegister);
encoding = (encoding & ~(0xfffu << 10)) | (1u << 24); // Register -> Unsigned offset
@ -128,7 +147,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
regAlloc.FreeTempGprRegister(tempRegister);
}
private static void RewritePostIndexedMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
private static void RewritePostIndexedMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
{
bool isPair = flags.HasFlag(InstFlags.Rt2);
int imm = isPair ? ExtractSImm7Scaled(flags, encoding) : ExtractSImm9(encoding);
@ -139,7 +158,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
Assembler asm = new(writer);
WriteAddressTranslation(mmType, regAlloc, ref asm, rn, guestAddress);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, rn, guestAddress);
encoding = RegisterUtils.ReplaceRn(encoding, tempRegister);
@ -162,7 +181,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
regAlloc.FreeTempGprRegister(tempRegister);
}
private static void RewritePreIndexedMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
private static void RewritePreIndexedMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
{
bool isPair = flags.HasFlag(InstFlags.Rt2);
int imm = isPair ? ExtractSImm7Scaled(flags, encoding) : ExtractSImm9(encoding);
@ -174,7 +193,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
Assembler asm = new(writer);
WriteAddConstant(ref asm, guestAddress, guestAddress, imm);
WriteAddressTranslation(mmType, regAlloc, ref asm, rn, guestAddress);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, rn, guestAddress);
encoding = RegisterUtils.ReplaceRn(encoding, tempRegister);
@ -195,27 +214,27 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
regAlloc.FreeTempGprRegister(tempRegister);
}
private static void RewriteSignedScaledMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
private static void RewriteSignedScaledMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
{
RewriteMemoryInstruction(mmType, writer, regAlloc, encoding, ExtractSImm7Scaled(flags, encoding), 0x7fu << 15);
RewriteMemoryInstruction(asBits, mmType, writer, regAlloc, encoding, ExtractSImm7Scaled(flags, encoding), 0x7fu << 15);
}
private static void RewriteUnsignedScaledMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
private static void RewriteUnsignedScaledMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstFlags flags, uint encoding)
{
RewriteMemoryInstruction(mmType, writer, regAlloc, encoding, ExtractUImm12Scaled(flags, encoding), 0xfffu << 10);
RewriteMemoryInstruction(asBits, mmType, writer, regAlloc, encoding, ExtractUImm12Scaled(flags, encoding), 0xfffu << 10);
}
private static void RewriteBaseRegisterMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding)
private static void RewriteBaseRegisterMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding)
{
RewriteMemoryInstruction(mmType, writer, regAlloc, encoding, 0, 0u);
RewriteMemoryInstruction(asBits, mmType, writer, regAlloc, encoding, 0, 0u);
}
private static void RewriteBasePlusOffsetMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding)
private static void RewriteBasePlusOffsetMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding)
{
RewriteMemoryInstruction(mmType, writer, regAlloc, encoding, ExtractSImm9(encoding), 0x1ffu << 12);
RewriteMemoryInstruction(asBits, mmType, writer, regAlloc, encoding, ExtractSImm9(encoding), 0x1ffu << 12);
}
private static void RewriteMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding, int imm, uint immMask)
private static void RewriteMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding, int imm, uint immMask)
{
int tempRegister = regAlloc.AllocateTempGprRegister();
Operand rn = new(tempRegister, RegisterType.Integer, OperandType.I64);
@ -229,7 +248,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
imm = 0;
}
WriteAddressTranslation(mmType, regAlloc, ref asm, rn, guestAddress, imm);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, rn, guestAddress, imm);
encoding = RegisterUtils.ReplaceRn(encoding, tempRegister);
@ -243,7 +262,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
regAlloc.FreeTempGprRegister(tempRegister);
}
private static void RewriteLiteralMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstName name, ulong pc, uint encoding)
private static void RewriteLiteralMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, InstName name, ulong pc, uint encoding)
{
Assembler asm = new(writer);
@ -308,7 +327,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
int tempRegister = regAlloc.AllocateTempGprRegister();
Operand rn = new(tempRegister, RegisterType.Integer, OperandType.I64);
WriteAddressTranslation(mmType, regAlloc, ref asm, rn, targetAddress);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, rn, targetAddress);
switch (name)
{
@ -332,7 +351,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
}
}
private static void RewriteStructPostIndexedRegMemoryInstruction(MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding)
private static void RewriteStructPostIndexedRegMemoryInstruction(int asBits, MemoryManagerType mmType, CodeWriter writer, RegisterAllocator regAlloc, uint encoding)
{
// TODO: Some unallocated encoding cases.
@ -344,7 +363,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
Assembler asm = new(writer);
WriteAddressTranslation(mmType, regAlloc, ref asm, rn, guestAddress);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, rn, guestAddress);
encoding = RegisterUtils.ReplaceRn(encoding, tempRegister);
encoding &= ~((0x1fu << 16) | (1u << 23)); // Post-index -> No offset
@ -471,6 +490,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
}
private static void WriteAddressTranslation(
int asBits,
MemoryManagerType mmType,
RegisterAllocator regAlloc,
ref Assembler asm,
@ -498,34 +518,58 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
guestAddress = destination;
}
WriteAddressTranslation(mmType, regAlloc, ref asm, destination, guestAddress);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, destination, guestAddress);
}
private static void WriteAddressTranslation(MemoryManagerType mmType, RegisterAllocator regAlloc, ref Assembler asm, Operand destination, ulong guestAddress)
private static void WriteAddressTranslation(
int asBits,
MemoryManagerType mmType,
RegisterAllocator regAlloc,
ref Assembler asm,
Operand destination,
ulong guestAddress)
{
asm.Mov(destination, guestAddress);
WriteAddressTranslation(mmType, regAlloc, ref asm, destination, destination);
WriteAddressTranslation(asBits, mmType, regAlloc, ref asm, destination, destination);
}
private static void WriteAddressTranslation(MemoryManagerType mmType, RegisterAllocator regAlloc, ref Assembler asm, Operand destination, Operand guestAddress)
private static void WriteAddressTranslation(
int asBits,
MemoryManagerType mmType,
RegisterAllocator regAlloc,
ref Assembler asm,
Operand destination,
Operand guestAddress)
{
Operand basePointer = new(regAlloc.FixedPageTableRegister, RegisterType.Integer, OperandType.I64);
if (mmType == MemoryManagerType.HostTracked)
if (mmType.IsHostTracked())
{
int tempRegister = regAlloc.AllocateTempGprRegister();
Operand pte = new(tempRegister, RegisterType.Integer, OperandType.I64);
asm.Lsr(pte, guestAddress, new Operand(OperandKind.Constant, OperandType.I32, 12));
if (mmType == MemoryManagerType.HostTracked)
{
asm.And(pte, pte, new Operand(OperandKind.Constant, OperandType.I64, ulong.MaxValue >> (64 - (asBits - 12))));
}
asm.LdrRr(pte, basePointer, pte, ArmExtensionType.Uxtx, true);
asm.Add(destination, pte, guestAddress);
regAlloc.FreeTempGprRegister(tempRegister);
}
else if (mmType == MemoryManagerType.HostMapped || mmType == MemoryManagerType.HostMappedUnsafe)
else if (mmType.IsHostMapped())
{
if (mmType == MemoryManagerType.HostMapped)
{
asm.And(destination, guestAddress, new Operand(OperandKind.Constant, OperandType.I64, ulong.MaxValue >> (64 - asBits)));
guestAddress = destination;
}
asm.Add(destination, basePointer, guestAddress);
}
else

View file

@ -69,7 +69,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
asm.LdrRiUn(Register((int)rd), Register(regAlloc.FixedContextRegister), NativeContextOffsets.TpidrEl0Offset);
}
}
else if ((encoding & ~0x1f) == 0xd53b0020 && (OperatingSystem.IsMacOS() || OperatingSystem.IsIOS())) // mrs x0, ctr_el0
else if ((encoding & ~0x1f) == 0xd53b0020 && IsCtrEl0AccessForbidden()) // mrs x0, ctr_el0
{
uint rd = encoding & 0x1f;
@ -115,7 +115,7 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
{
return true;
}
else if ((encoding & ~0x1f) == 0xd53b0020 && (OperatingSystem.IsMacOS() || OperatingSystem.IsIOS())) // mrs x0, ctr_el0
else if ((encoding & ~0x1f) == 0xd53b0020 && IsCtrEl0AccessForbidden()) // mrs x0, ctr_el0
{
return true;
}
@ -127,6 +127,18 @@ namespace Ryujinx.Cpu.LightningJit.Arm64.Target.Arm64
return false;
}
private static bool IsCtrEl0AccessForbidden()
{
// Only Linux allows accessing CTR_EL0 from user mode.
return OperatingSystem.IsWindows() || OperatingSystem.IsMacOS() || OperatingSystem.IsIOS();
}
public static bool IsCacheInstForbidden(uint encoding)
{
// Windows does not allow the cache maintenance instructions to be used from user mode.
return OperatingSystem.IsWindows() && SysUtils.IsCacheInstUciTrapped(encoding);
}
public static bool NeedsContextStoreLoad(InstName name)
{
return name == InstName.Svc;

View file

@ -8,7 +8,6 @@ namespace Ryujinx.Cpu.LightningJit.Cache
public int Offset { get; }
public int Size { get; }
public CacheEntry(int offset, int size)
{
Offset = offset;

View file

@ -10,7 +10,7 @@ namespace Ryujinx.Cpu.LightningJit.Cache
class NoWxCache : IDisposable
{
private const int CodeAlignment = 4; // Bytes.
private const int SharedCacheSize = 192 * 1024 * 1024;
private const int SharedCacheSize = 2047 * 1024 * 1024;
private const int LocalCacheSize = 128 * 1024 * 1024;
// How many calls to the same function we allow until we pad the shared cache to force the function to become available there

View file

@ -12,7 +12,7 @@ namespace Ryujinx.Cpu.LightningJit
public LightningJitCpuContext(ITickSource tickSource, IMemoryManager memory, bool for64Bit)
{
_tickSource = tickSource;
_translator = new Translator(new JitMemoryAllocator(forJit: true), memory, for64Bit);
_translator = new Translator(memory, for64Bit);
memory.UnmapEvent += UnmapHandler;
}

View file

@ -1,10 +1,11 @@
using ARMeilleure.Common;
using ARMeilleure.Memory;
using ARMeilleure.Signal;
using Ryujinx.Cpu.Jit;
using Ryujinx.Cpu.LightningJit.Cache;
using Ryujinx.Cpu.LightningJit.CodeGen.Arm64;
using Ryujinx.Cpu.LightningJit.State;
using Ryujinx.Cpu.Signal;
using Ryujinx.Memory;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
@ -46,7 +47,7 @@ namespace Ryujinx.Cpu.LightningJit
internal TranslatorStubs Stubs { get; }
internal IMemoryManager Memory { get; }
public Translator(IJitMemoryAllocator allocator, IMemoryManager memory, bool for64Bits)
public Translator(IMemoryManager memory, bool for64Bits)
{
Memory = memory;
@ -58,22 +59,18 @@ namespace Ryujinx.Cpu.LightningJit
}
else
{
JitCache.Initialize(allocator);
JitCache.Initialize(new JitMemoryAllocator(forJit: true));
}
NativeSignalHandler.Initialize(allocator);
Functions = new TranslatorCache<TranslatedFunction>();
FunctionTable = new AddressTable<ulong>(for64Bits ? _levels64Bit : _levels32Bit);
Stubs = new TranslatorStubs(FunctionTable, _noWxCache);
FunctionTable.Fill = (ulong)Stubs.SlowDispatchStub;
if (memory.Type == MemoryManagerType.HostTracked ||
memory.Type == MemoryManagerType.HostMapped ||
memory.Type == MemoryManagerType.HostMappedUnsafe)
if (memory.Type.IsHostMappedOrTracked())
{
NativeSignalHandler.InitializeSignalHandler(allocator.GetPageSize());
NativeSignalHandler.InitializeSignalHandler();
}
}
@ -141,7 +138,7 @@ namespace Ryujinx.Cpu.LightningJit
}
}
internal TranslatedFunction Translate(ulong address, ExecutionMode mode)
private TranslatedFunction Translate(ulong address, ExecutionMode mode)
{
CompiledFunction func = Compile(address, mode);
IntPtr funcPointer = JitCache.Map(func.Code);
@ -149,7 +146,7 @@ namespace Ryujinx.Cpu.LightningJit
return new TranslatedFunction(funcPointer, (ulong)func.GuestCodeLength);
}
internal CompiledFunction Compile(ulong address, ExecutionMode mode)
private CompiledFunction Compile(ulong address, ExecutionMode mode)
{
return AarchCompiler.Compile(CpuPresets.CortexA57, Memory, address, FunctionTable, Stubs.DispatchStub, mode, RuntimeInformation.ProcessArchitecture);
}

View file

@ -0,0 +1,389 @@
using Ryujinx.Memory;
using Ryujinx.Memory.Tracking;
using System;
using System.Runtime.CompilerServices;
using System.Threading;
namespace Ryujinx.Cpu
{
/// <summary>
/// A page bitmap that keeps track of mapped state and tracking protection
/// for managed memory accesses (not using host page protection).
/// </summary>
internal readonly struct ManagedPageFlags
{
public const int PageBits = 12;
public const int PageSize = 1 << PageBits;
public const int PageMask = PageSize - 1;
private readonly ulong[] _pageBitmap;
public const int PageToPteShift = 5; // 32 pages (2 bits each) in one ulong page table entry.
public const ulong BlockMappedMask = 0x5555555555555555; // First bit of each table entry set.
private enum ManagedPtBits : ulong
{
Unmapped = 0,
Mapped,
WriteTracked,
ReadWriteTracked,
MappedReplicated = 0x5555555555555555,
WriteTrackedReplicated = 0xaaaaaaaaaaaaaaaa,
ReadWriteTrackedReplicated = ulong.MaxValue,
}
public ManagedPageFlags(int addressSpaceBits)
{
int bits = Math.Max(0, addressSpaceBits - (PageBits + PageToPteShift));
_pageBitmap = new ulong[1 << bits];
}
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, ulong size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
/// <summary>
/// Checks if the page at a given CPU virtual address is mapped.
/// </summary>
/// <param name="va">Virtual address to check</param>
/// <returns>True if the address is mapped, false otherwise</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public readonly bool IsMapped(ulong va)
{
ulong page = va >> PageBits;
int bit = (int)((page & 31) << 1);
int pageIndex = (int)(page >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte = Volatile.Read(ref pageRef);
return ((pte >> bit) & 3) != 0;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void GetPageBlockRange(ulong pageStart, ulong pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex)
{
startMask = ulong.MaxValue << ((int)(pageStart & 31) << 1);
endMask = ulong.MaxValue >> (64 - ((int)(pageEnd & 31) << 1));
pageIndex = (int)(pageStart >> PageToPteShift);
pageEndIndex = (int)((pageEnd - 1) >> PageToPteShift);
}
/// <summary>
/// Checks if a memory range is mapped.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <returns>True if the entire range is mapped, false otherwise</returns>
public readonly bool IsRangeMapped(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
if (pages == 1)
{
return IsMapped(va);
}
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
// Check if either bit in each 2 bit page entry is set.
// OR the block with itself shifted down by 1, and check the first bit of each entry.
ulong mask = BlockMappedMask & startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte = Volatile.Read(ref pageRef);
pte |= pte >> 1;
if ((pte & mask) != mask)
{
return false;
}
mask = BlockMappedMask;
}
return true;
}
/// <summary>
/// Reprotect a region of virtual memory for tracking.
/// </summary>
/// <param name="va">Virtual address base</param>
/// <param name="size">Size of the region to protect</param>
/// <param name="protection">Memory protection to set</param>
public readonly void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
{
// Protection is inverted on software pages, since the default value is 0.
protection = (~protection) & MemoryPermission.ReadAndWrite;
int pages = GetPagesCount(va, size, out va);
ulong pageStart = va >> PageBits;
if (pages == 1)
{
ulong protTag = protection switch
{
MemoryPermission.None => (ulong)ManagedPtBits.Mapped,
MemoryPermission.Write => (ulong)ManagedPtBits.WriteTracked,
_ => (ulong)ManagedPtBits.ReadWriteTracked,
};
int bit = (int)((pageStart & 31) << 1);
ulong tagMask = 3UL << bit;
ulong invTagMask = ~tagMask;
ulong tag = protTag << bit;
int pageIndex = (int)(pageStart >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while ((pte & tagMask) != 0 && Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte);
}
else
{
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
ulong protTag = protection switch
{
MemoryPermission.None => (ulong)ManagedPtBits.MappedReplicated,
MemoryPermission.Write => (ulong)ManagedPtBits.WriteTrackedReplicated,
_ => (ulong)ManagedPtBits.ReadWriteTrackedReplicated,
};
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
ulong mappedMask;
// Change the protection of all 2 bit entries that are mapped.
do
{
pte = Volatile.Read(ref pageRef);
mappedMask = pte | (pte >> 1);
mappedMask |= (mappedMask & BlockMappedMask) << 1;
mappedMask &= mask; // Only update mapped pages within the given range.
}
while (Interlocked.CompareExchange(ref pageRef, (pte & (~mappedMask)) | (protTag & mappedMask), pte) != pte);
mask = ulong.MaxValue;
}
}
}
/// <summary>
/// Alerts the memory tracking that a given region has been read from or written to.
/// This should be called before read/write is performed.
/// </summary>
/// <param name="tracking">Memory tracking structure to call when pages are protected</param>
/// <param name="va">Virtual address of the region</param>
/// <param name="size">Size of the region</param>
/// <param name="write">True if the region was written, false if read</param>
/// <param name="exemptId">Optional ID of the handles that should not be signalled</param>
/// <remarks>
/// This function also validates that the given range is both valid and mapped, and will throw if it is not.
/// </remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public readonly void SignalMemoryTracking(MemoryTracking tracking, ulong va, ulong size, bool write, int? exemptId = null)
{
// Software table, used for managed memory tracking.
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
if (pages == 1)
{
ulong tag = (ulong)(write ? ManagedPtBits.WriteTracked : ManagedPtBits.ReadWriteTracked);
int bit = (int)((pageStart & 31) << 1);
int pageIndex = (int)(pageStart >> PageToPteShift);
ref ulong pageRef = ref _pageBitmap[pageIndex];
ulong pte = Volatile.Read(ref pageRef);
ulong state = ((pte >> bit) & 3);
if (state >= tag)
{
tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
return;
}
else if (state == 0)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
}
else
{
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
ulong anyTrackingTag = (ulong)ManagedPtBits.WriteTrackedReplicated;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte = Volatile.Read(ref pageRef);
ulong mappedMask = mask & BlockMappedMask;
ulong mappedPte = pte | (pte >> 1);
if ((mappedPte & mappedMask) != mappedMask)
{
ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}, size=0x{size:X16}");
}
pte &= mask;
if ((pte & anyTrackingTag) != 0) // Search for any tracking.
{
// Writes trigger any tracking.
// Only trigger tracking from reads if both bits are set on any page.
if (write || (pte & (pte >> 1) & BlockMappedMask) != 0)
{
tracking.VirtualMemoryEvent(va, size, write, precise: false, exemptId);
break;
}
}
mask = ulong.MaxValue;
}
}
}
/// <summary>
/// Adds the given address mapping to the page table.
/// </summary>
/// <param name="va">Virtual memory address</param>
/// <param name="size">Size to be mapped</param>
public readonly void AddMapping(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
ulong mask = startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask &= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
ulong mappedMask;
// Map all 2-bit entries that are unmapped.
do
{
pte = Volatile.Read(ref pageRef);
mappedMask = pte | (pte >> 1);
mappedMask |= (mappedMask & BlockMappedMask) << 1;
mappedMask |= ~mask; // Treat everything outside the range as mapped, thus unchanged.
}
while (Interlocked.CompareExchange(ref pageRef, (pte & mappedMask) | (BlockMappedMask & (~mappedMask)), pte) != pte);
mask = ulong.MaxValue;
}
}
/// <summary>
/// Removes the given address mapping from the page table.
/// </summary>
/// <param name="va">Virtual memory address</param>
/// <param name="size">Size to be unmapped</param>
public readonly void RemoveMapping(ulong va, ulong size)
{
int pages = GetPagesCount(va, size, out _);
ulong pageStart = va >> PageBits;
ulong pageEnd = pageStart + (ulong)pages;
GetPageBlockRange(pageStart, pageEnd, out ulong startMask, out ulong endMask, out int pageIndex, out int pageEndIndex);
startMask = ~startMask;
endMask = ~endMask;
ulong mask = startMask;
while (pageIndex <= pageEndIndex)
{
if (pageIndex == pageEndIndex)
{
mask |= endMask;
}
ref ulong pageRef = ref _pageBitmap[pageIndex++];
ulong pte;
do
{
pte = Volatile.Read(ref pageRef);
}
while (Interlocked.CompareExchange(ref pageRef, pte & mask, pte) != pte);
mask = 0;
}
}
private static void ThrowInvalidMemoryRegionException(string message) => throw new InvalidMemoryRegionException(message);
}
}

View file

@ -1,4 +1,5 @@
using ARMeilleure.Signal;
using Ryujinx.Common;
using Ryujinx.Cpu.Signal;
using Ryujinx.Memory;
using Ryujinx.Memory.Tracking;
using System;
@ -8,10 +9,13 @@ namespace Ryujinx.Cpu
{
public class MemoryEhMeilleure : IDisposable
{
public delegate bool TrackingEventDelegate(ulong address, ulong size, bool write);
public delegate ulong TrackingEventDelegate(ulong address, ulong size, bool write);
private readonly MemoryTracking _tracking;
private readonly TrackingEventDelegate _trackingEvent;
private readonly ulong _pageSize;
private readonly ulong _baseAddress;
private readonly ulong _mirrorAddress;
@ -21,7 +25,10 @@ namespace Ryujinx.Cpu
ulong endAddress = _baseAddress + addressSpace.Size;
_trackingEvent = trackingEvent ?? tracking.VirtualMemoryEvent;
_tracking = tracking;
_trackingEvent = trackingEvent ?? VirtualMemoryEvent;
_pageSize = MemoryBlock.GetPageSize();
bool added = NativeSignalHandler.AddTrackedRegion((nuint)_baseAddress, (nuint)endAddress, Marshal.GetFunctionPointerForDelegate(_trackingEvent));
@ -48,6 +55,21 @@ namespace Ryujinx.Cpu
}
}
private ulong VirtualMemoryEvent(ulong address, ulong size, bool write)
{
ulong pageSize = _pageSize;
ulong addressAligned = BitUtils.AlignDown(address, pageSize);
ulong endAddressAligned = BitUtils.AlignUp(address + size, pageSize);
ulong sizeAligned = endAddressAligned - addressAligned;
if (_tracking.VirtualMemoryEvent(addressAligned, sizeAligned, write))
{
return _baseAddress + address;
}
return 0;
}
public void Dispose()
{
GC.SuppressFinalize(this);

View file

@ -0,0 +1,184 @@
using ARMeilleure.Signal;
using Ryujinx.Common;
using Ryujinx.Memory;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Cpu.Signal
{
[StructLayout(LayoutKind.Sequential, Pack = 1)]
struct SignalHandlerRange
{
public int IsActive;
public nuint RangeAddress;
public nuint RangeEndAddress;
public IntPtr ActionPointer;
}
[InlineArray(NativeSignalHandlerGenerator.MaxTrackedRanges)]
struct SignalHandlerRangeArray
{
public SignalHandlerRange Range0;
}
[StructLayout(LayoutKind.Sequential, Pack = 1)]
struct SignalHandlerConfig
{
/// <summary>
/// The byte offset of the faulting address in the SigInfo or ExceptionRecord struct.
/// </summary>
public int StructAddressOffset;
/// <summary>
/// The byte offset of the write flag in the SigInfo or ExceptionRecord struct.
/// </summary>
public int StructWriteOffset;
/// <summary>
/// The sigaction handler that was registered before this one. (unix only)
/// </summary>
public nuint UnixOldSigaction;
/// <summary>
/// The type of the previous sigaction. True for the 3 argument variant. (unix only)
/// </summary>
public int UnixOldSigaction3Arg;
/// <summary>
/// Fixed size array of tracked ranges.
/// </summary>
public SignalHandlerRangeArray Ranges;
}
static class NativeSignalHandler
{
private static readonly IntPtr _handlerConfig;
private static IntPtr _signalHandlerPtr;
private static MemoryBlock _codeBlock;
private static readonly object _lock = new();
private static bool _initialized;
static NativeSignalHandler()
{
_handlerConfig = Marshal.AllocHGlobal(Unsafe.SizeOf<SignalHandlerConfig>());
ref SignalHandlerConfig config = ref GetConfigRef();
config = new SignalHandlerConfig();
}
public static void InitializeSignalHandler(Func<IntPtr, IntPtr, IntPtr> customSignalHandlerFactory = null)
{
if (_initialized)
{
return;
}
lock (_lock)
{
if (_initialized)
{
return;
}
int rangeStructSize = Unsafe.SizeOf<SignalHandlerRange>();
ref SignalHandlerConfig config = ref GetConfigRef();
if (OperatingSystem.IsLinux() || OperatingSystem.IsMacOS() || OperatingSystem.IsIOS())
{
_signalHandlerPtr = MapCode(NativeSignalHandlerGenerator.GenerateUnixSignalHandler(_handlerConfig, rangeStructSize));
if (customSignalHandlerFactory != null)
{
_signalHandlerPtr = customSignalHandlerFactory(UnixSignalHandlerRegistration.GetSegfaultExceptionHandler().sa_handler, _signalHandlerPtr);
}
var old = UnixSignalHandlerRegistration.RegisterExceptionHandler(_signalHandlerPtr);
config.UnixOldSigaction = (nuint)(ulong)old.sa_handler;
config.UnixOldSigaction3Arg = old.sa_flags & 4;
}
else
{
config.StructAddressOffset = 40; // ExceptionInformation1
config.StructWriteOffset = 32; // ExceptionInformation0
_signalHandlerPtr = MapCode(NativeSignalHandlerGenerator.GenerateWindowsSignalHandler(_handlerConfig, rangeStructSize));
if (customSignalHandlerFactory != null)
{
_signalHandlerPtr = customSignalHandlerFactory(IntPtr.Zero, _signalHandlerPtr);
}
WindowsSignalHandlerRegistration.RegisterExceptionHandler(_signalHandlerPtr);
}
_initialized = true;
}
}
private static IntPtr MapCode(ReadOnlySpan<byte> code)
{
Debug.Assert(_codeBlock == null);
ulong codeSizeAligned = BitUtils.AlignUp((ulong)code.Length, MemoryBlock.GetPageSize());
_codeBlock = new MemoryBlock(codeSizeAligned);
_codeBlock.Write(0, code);
_codeBlock.Reprotect(0, codeSizeAligned, MemoryPermission.ReadAndExecute);
return _codeBlock.Pointer;
}
private static unsafe ref SignalHandlerConfig GetConfigRef()
{
return ref Unsafe.AsRef<SignalHandlerConfig>((void*)_handlerConfig);
}
public static bool AddTrackedRegion(nuint address, nuint endAddress, IntPtr action)
{
Span<SignalHandlerRange> ranges = GetConfigRef().Ranges;
for (int i = 0; i < NativeSignalHandlerGenerator.MaxTrackedRanges; i++)
{
if (ranges[i].IsActive == 0)
{
ranges[i].RangeAddress = address;
ranges[i].RangeEndAddress = endAddress;
ranges[i].ActionPointer = action;
ranges[i].IsActive = 1;
return true;
}
}
return false;
}
public static bool RemoveTrackedRegion(nuint address)
{
Span<SignalHandlerRange> ranges = GetConfigRef().Ranges;
for (int i = 0; i < NativeSignalHandlerGenerator.MaxTrackedRanges; i++)
{
if (ranges[i].IsActive == 1 && ranges[i].RangeAddress == address)
{
ranges[i].IsActive = 0;
return true;
}
}
return false;
}
public static bool SupportsFaultAddressPatching()
{
return NativeSignalHandlerGenerator.SupportsFaultAddressPatchingForHost();
}
}
}

View file

@ -0,0 +1,83 @@
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Cpu.Signal
{
static partial class UnixSignalHandlerRegistration
{
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public unsafe struct SigSet
{
fixed long sa_mask[16];
}
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct SigAction
{
public IntPtr sa_handler;
public SigSet sa_mask;
public int sa_flags;
public IntPtr sa_restorer;
}
private const int SIGSEGV = 11;
private const int SIGBUS = 10;
private const int SA_SIGINFO = 0x00000004;
[LibraryImport("libc", SetLastError = true)]
private static partial int sigaction(int signum, ref SigAction sigAction, out SigAction oldAction);
[LibraryImport("libc", SetLastError = true)]
private static partial int sigaction(int signum, IntPtr sigAction, out SigAction oldAction);
[LibraryImport("libc", SetLastError = true)]
private static partial int sigemptyset(ref SigSet set);
public static SigAction GetSegfaultExceptionHandler()
{
int result = sigaction(SIGSEGV, IntPtr.Zero, out SigAction old);
if (result != 0)
{
throw new InvalidOperationException($"Could not get SIGSEGV sigaction. Error: {result}");
}
return old;
}
public static SigAction RegisterExceptionHandler(IntPtr action)
{
SigAction sig = new()
{
sa_handler = action,
sa_flags = SA_SIGINFO,
};
sigemptyset(ref sig.sa_mask);
int result = sigaction(SIGSEGV, ref sig, out SigAction old);
if (result != 0)
{
throw new InvalidOperationException($"Could not register SIGSEGV sigaction. Error: {result}");
}
if (OperatingSystem.IsMacOS() || OperatingSystem.IsIOS())
{
result = sigaction(SIGBUS, ref sig, out _);
if (result != 0)
{
throw new InvalidOperationException($"Could not register SIGBUS sigaction. Error: {result}");
}
}
return old;
}
public static bool RestoreExceptionHandler(SigAction oldAction)
{
return sigaction(SIGSEGV, ref oldAction, out SigAction _) == 0 && (!OperatingSystem.IsMacOS() || OperatingSystem.IsIOS() || sigaction(SIGBUS, ref oldAction, out SigAction _) == 0);
}
}
}

View file

@ -0,0 +1,24 @@
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Cpu.Signal
{
static partial class WindowsSignalHandlerRegistration
{
[LibraryImport("kernel32.dll")]
private static partial IntPtr AddVectoredExceptionHandler(uint first, IntPtr handler);
[LibraryImport("kernel32.dll")]
private static partial ulong RemoveVectoredExceptionHandler(IntPtr handle);
public static IntPtr RegisterExceptionHandler(IntPtr action)
{
return AddVectoredExceptionHandler(1, action);
}
public static bool RemoveExceptionHandler(IntPtr handle)
{
return RemoveVectoredExceptionHandler(handle) != 0;
}
}
}

View file

@ -4,7 +4,7 @@ using System.Threading;
namespace Ryujinx.Cpu
{
public abstract class MemoryManagerBase : IRefCounted
public abstract class VirtualMemoryManagerRefCountedBase : VirtualMemoryManagerBase, IRefCounted
{
private int _referenceCount;