Merge Latest Ryujinx (Unstable)

This commit is contained in:
Stossy11 2025-03-08 10:13:40 +11:00
parent aaefc0a9e5
commit 12ab8bc3e2
1237 changed files with 48656 additions and 21399 deletions

View file

@ -3,7 +3,6 @@ using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Memory
{
@ -11,25 +10,21 @@ namespace Ryujinx.Memory
/// Represents a address space manager.
/// Supports virtual memory region mapping, address translation and read/write access to mapped regions.
/// </summary>
public sealed class AddressSpaceManager : IVirtualMemoryManager, IWritableBlock
public sealed class AddressSpaceManager : VirtualMemoryManagerBase, IVirtualMemoryManager
{
public const int PageBits = PageTable<nuint>.PageBits;
public const int PageSize = PageTable<nuint>.PageSize;
public const int PageMask = PageTable<nuint>.PageMask;
/// <inheritdoc/>
public bool Supports4KBPages => true;
public bool UsesPrivateAllocations => false;
/// <summary>
/// Address space width in bits.
/// </summary>
public int AddressSpaceBits { get; }
private readonly ulong _addressSpaceSize;
private readonly MemoryBlock _backingMemory;
private readonly PageTable<nuint> _pageTable;
protected override ulong AddressSpaceSize { get; }
/// <summary>
/// Creates a new instance of the memory manager.
/// </summary>
@ -47,7 +42,7 @@ namespace Ryujinx.Memory
}
AddressSpaceBits = asBits;
_addressSpaceSize = asSize;
AddressSpaceSize = asSize;
_backingMemory = backingMemory;
_pageTable = new PageTable<nuint>();
}
@ -67,8 +62,7 @@ namespace Ryujinx.Memory
}
}
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
public override void MapForeign(ulong va, nuint hostPointer, ulong size)
{
AssertValidAddressAndSize(va, size);
@ -96,112 +90,6 @@ namespace Ryujinx.Memory
}
}
/// <inheritdoc/>
public T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
/// <inheritdoc/>
public void Read(ulong va, Span<byte> data)
{
ReadImpl(va, data);
}
/// <inheritdoc/>
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
AssertValidAddressAndSize(va, (ulong)data.Length);
if (IsContiguousAndMapped(va, data.Length))
{
data.CopyTo(GetHostSpanContiguous(va, data.Length));
}
else
{
int offset = 0, size;
if ((va & PageMask) != 0)
{
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data[..size].CopyTo(GetHostSpanContiguous(va, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(GetHostSpanContiguous(va + (ulong)offset, size));
}
}
}
/// <inheritdoc/>
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
Write(va, data);
return true;
}
/// <inheritdoc/>
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return ReadOnlySpan<byte>.Empty;
}
if (IsContiguousAndMapped(va, size))
{
return GetHostSpanContiguous(va, size);
}
else
{
Span<byte> data = new byte[size];
ReadImpl(va, data);
return data;
}
}
/// <inheritdoc/>
public unsafe WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return new WritableRegion(null, va, Memory<byte>.Empty);
}
if (IsContiguousAndMapped(va, size))
{
return new WritableRegion(null, va, new NativeMemoryManager<byte>((byte*)GetHostAddress(va), size).Memory);
}
else
{
Memory<byte> memory = new byte[size];
GetSpan(va, size).CopyTo(memory.Span);
return new WritableRegion(this, va, memory);
}
}
/// <inheritdoc/>
public unsafe ref T GetRef<T>(ulong va) where T : unmanaged
{
@ -213,50 +101,6 @@ namespace Ryujinx.Memory
return ref *(T*)GetHostAddress(va);
}
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, uint size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguous(ulong va, int size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
{
return false;
}
int pages = GetPagesCount(va, (uint)size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (GetHostAddress(va) + PageSize != GetHostAddress(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
}
/// <inheritdoc/>
public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size)
{
@ -314,7 +158,7 @@ namespace Ryujinx.Memory
return null;
}
int pages = GetPagesCount(va, (uint)size, out va);
int pages = GetPagesCount(va, size, out va);
var regions = new List<HostMemoryRange>();
@ -346,37 +190,8 @@ namespace Ryujinx.Memory
return regions;
}
private void ReadImpl(ulong va, Span<byte> data)
{
if (data.Length == 0)
{
return;
}
AssertValidAddressAndSize(va, (ulong)data.Length);
int offset = 0, size;
if ((va & PageMask) != 0)
{
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
GetHostSpanContiguous(va, size).CopyTo(data[..size]);
offset += size;
}
for (; offset < data.Length; offset += size)
{
size = Math.Min(data.Length - offset, PageSize);
GetHostSpanContiguous(va + (ulong)offset, size).CopyTo(data.Slice(offset, size));
}
}
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va)
public override bool IsMapped(ulong va)
{
if (!ValidateAddress(va))
{
@ -389,7 +204,7 @@ namespace Ryujinx.Memory
/// <inheritdoc/>
public bool IsRangeMapped(ulong va, ulong size)
{
if (size == 0UL)
if (size == 0)
{
return true;
}
@ -414,42 +229,6 @@ namespace Ryujinx.Memory
return true;
}
private bool ValidateAddress(ulong va)
{
return va < _addressSpaceSize;
}
/// <summary>
/// Checks if the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
private bool ValidateAddressAndSize(ulong va, ulong size)
{
ulong endVa = va + size;
return endVa >= va && endVa >= size && endVa <= _addressSpaceSize;
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
private void AssertValidAddressAndSize(ulong va, ulong size)
{
if (!ValidateAddressAndSize(va, size))
{
throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
}
}
private unsafe Span<byte> GetHostSpanContiguous(ulong va, int size)
{
return new Span<byte>((void*)GetHostAddress(va), size);
}
private nuint GetHostAddress(ulong va)
{
return _pageTable.Read(va) + (nuint)(va & PageMask);
@ -461,15 +240,21 @@ namespace Ryujinx.Memory
}
/// <inheritdoc/>
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection)
public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection, bool guest = false)
{
throw new NotImplementedException();
}
/// <inheritdoc/>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{
// Only the ARM Memory Manager has tracking for now.
}
protected unsafe override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> new NativeMemoryManager<byte>((byte*)pa, size).Memory;
protected override unsafe Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> new Span<byte>((void*)pa, size);
protected override nuint TranslateVirtualAddressChecked(ulong va)
=> GetHostAddress(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> GetHostAddress(va);
}
}

View file

@ -0,0 +1,60 @@
using System;
using System.Buffers;
using System.Runtime.InteropServices;
namespace Ryujinx.Memory
{
/// <summary>
/// A concrete implementation of <seealso cref="ReadOnlySequence{Byte}"/>,
/// with methods to help build a full sequence.
/// </summary>
public sealed class BytesReadOnlySequenceSegment : ReadOnlySequenceSegment<byte>
{
public BytesReadOnlySequenceSegment(Memory<byte> memory) => Memory = memory;
public BytesReadOnlySequenceSegment Append(Memory<byte> memory)
{
var nextSegment = new BytesReadOnlySequenceSegment(memory)
{
RunningIndex = RunningIndex + Memory.Length
};
Next = nextSegment;
return nextSegment;
}
/// <summary>
/// Attempts to determine if the current <seealso cref="Memory{Byte}"/> and <paramref name="other"/> are contiguous.
/// Only works if both were created by a <seealso cref="NativeMemoryManager{Byte}"/>.
/// </summary>
/// <param name="other">The segment to check if continuous with the current one</param>
/// <param name="contiguousStart">The starting address of the contiguous segment</param>
/// <param name="contiguousSize">The size of the contiguous segment</param>
/// <returns>True if the segments are contiguous, otherwise false</returns>
public unsafe bool IsContiguousWith(Memory<byte> other, out nuint contiguousStart, out int contiguousSize)
{
if (MemoryMarshal.TryGetMemoryManager<byte, NativeMemoryManager<byte>>(Memory, out var thisMemoryManager) &&
MemoryMarshal.TryGetMemoryManager<byte, NativeMemoryManager<byte>>(other, out var otherMemoryManager) &&
thisMemoryManager.Pointer + thisMemoryManager.Length == otherMemoryManager.Pointer)
{
contiguousStart = (nuint)thisMemoryManager.Pointer;
contiguousSize = thisMemoryManager.Length + otherMemoryManager.Length;
return true;
}
else
{
contiguousStart = 0;
contiguousSize = 0;
return false;
}
}
/// <summary>
/// Replaces the current <seealso cref="Memory{Byte}"/> value with the one provided.
/// </summary>
/// <param name="memory">The new segment to hold in this <seealso cref="BytesReadOnlySequenceSegment"/></param>
public void Replace(Memory<byte> memory)
=> Memory = memory;
}
}

View file

@ -8,10 +8,10 @@ namespace Ryujinx.Memory
public interface IVirtualMemoryManager
{
/// <summary>
/// Indicates whenever the memory manager supports aliasing pages at 4KB granularity.
/// Indicates whether the memory manager creates private allocations when the <see cref="MemoryMapFlags.Private"/> flag is set on map.
/// </summary>
/// <returns>True if 4KB pages are supported by the memory manager, false otherwise</returns>
bool Supports4KBPages { get; }
/// <returns>True if private mappings might be used, false otherwise</returns>
bool UsesPrivateAllocations { get; }
/// <summary>
/// Maps a virtual memory range into a physical memory range.
@ -124,6 +124,16 @@ namespace Ryujinx.Memory
}
}
/// <summary>
/// Gets a read-only sequence of read-only memory blocks from CPU mapped memory.
/// </summary>
/// <param name="va">Virtual address of the data</param>
/// <param name="size">Size of the data</param>
/// <param name="tracked">True if read tracking is triggered on the memory</param>
/// <returns>A read-only sequence of read-only memory of the data</returns>
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false);
/// <summary>
/// Gets a read-only span of data from CPU mapped memory.
/// </summary>
@ -214,6 +224,7 @@ namespace Ryujinx.Memory
/// <param name="va">Virtual address base</param>
/// <param name="size">Size of the region to protect</param>
/// <param name="protection">Memory protection to set</param>
void TrackingReprotect(ulong va, ulong size, MemoryPermission protection);
/// <param name="guest">True if the protection is for guest access, false otherwise</param>
void TrackingReprotect(ulong va, ulong size, MemoryPermission protection, bool guest);
}
}

View file

@ -1,9 +1,25 @@
using System;
using System.Buffers;
namespace Ryujinx.Memory
{
public interface IWritableBlock
{
/// <summary>
/// Writes data to CPU mapped memory, with write tracking.
/// </summary>
/// <param name="va">Virtual address to write the data into</param>
/// <param name="data">Data to be written</param>
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
void Write(ulong va, ReadOnlySequence<byte> data)
{
foreach (ReadOnlyMemory<byte> segment in data)
{
Write(va, segment.Span);
va += (ulong)segment.Length;
}
}
void Write(ulong va, ReadOnlySpan<byte> data);
void WriteUntracked(ulong va, ReadOnlySpan<byte> data) => Write(va, data);

View file

@ -174,7 +174,7 @@ namespace Ryujinx.Memory
/// <param name="offset">Starting offset of the range being read</param>
/// <param name="data">Span where the bytes being read will be copied to</param>
/// <exception cref="ObjectDisposedException">Throw when the memory block has already been disposed</exception>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified for the the data is out of range</exception>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified for the data is out of range</exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Read(ulong offset, Span<byte> data)
{
@ -188,7 +188,7 @@ namespace Ryujinx.Memory
/// <param name="offset">Offset where the data is located</param>
/// <returns>Data at the specified address</returns>
/// <exception cref="ObjectDisposedException">Throw when the memory block has already been disposed</exception>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified for the the data is out of range</exception>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified for the data is out of range</exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public T Read<T>(ulong offset) where T : unmanaged
{
@ -201,7 +201,7 @@ namespace Ryujinx.Memory
/// <param name="offset">Starting offset of the range being written</param>
/// <param name="data">Span where the bytes being written will be copied from</param>
/// <exception cref="ObjectDisposedException">Throw when the memory block has already been disposed</exception>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified for the the data is out of range</exception>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified for the data is out of range</exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Write(ulong offset, ReadOnlySpan<byte> data)
{
@ -215,7 +215,7 @@ namespace Ryujinx.Memory
/// <param name="offset">Offset to write the data into</param>
/// <param name="data">Data to be written</param>
/// <exception cref="ObjectDisposedException">Throw when the memory block has already been disposed</exception>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified for the the data is out of range</exception>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified for the data is out of range</exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Write<T>(ulong offset, T data) where T : unmanaged
{

View file

@ -8,12 +8,21 @@ namespace Ryujinx.Memory
private readonly T* _pointer;
private readonly int _length;
public NativeMemoryManager(nuint pointer, int length)
: this((T*)pointer, length)
{
}
public NativeMemoryManager(T* pointer, int length)
{
_pointer = pointer;
_length = length;
}
public unsafe T* Pointer => _pointer;
public int Length => _length;
public override Span<T> GetSpan()
{
return new Span<T>((void*)_pointer, _length);

View file

@ -4,6 +4,22 @@ namespace Ryujinx.Memory.Range
{
MultiRange Range { get; }
ulong BaseAddress => Range.GetSubRange(0).Address;
ulong BaseAddress
{
get
{
for (int index = 0; index < Range.Count; index++)
{
MemoryRange subRange = Range.GetSubRange(index);
if (!MemoryRange.IsInvalid(ref subRange))
{
return subRange.Address;
}
}
return MemoryRange.InvalidAddress;
}
}
}
}

View file

@ -5,6 +5,11 @@ namespace Ryujinx.Memory.Range
/// </summary>
public readonly record struct MemoryRange
{
/// <summary>
/// Special address value used to indicate than an address is invalid.
/// </summary>
internal const ulong InvalidAddress = ulong.MaxValue;
/// <summary>
/// An empty memory range, with a null address and zero size.
/// </summary>
@ -58,13 +63,24 @@ namespace Ryujinx.Memory.Range
return thisAddress < otherEndAddress && otherAddress < thisEndAddress;
}
/// <summary>
/// Checks if a given sub-range of memory is invalid.
/// Those are used to represent unmapped memory regions (holes in the region mapping).
/// </summary>
/// <param name="subRange">Memory range to check</param>
/// <returns>True if the memory range is considered invalid, false otherwise</returns>
internal static bool IsInvalid(ref MemoryRange subRange)
{
return subRange.Address == InvalidAddress;
}
/// <summary>
/// Returns a string summary of the memory range.
/// </summary>
/// <returns>A string summary of the memory range</returns>
public override string ToString()
{
if (Address == ulong.MaxValue)
if (Address == InvalidAddress)
{
return $"[Unmapped 0x{Size:X}]";
}

View file

@ -30,7 +30,7 @@ namespace Ryujinx.Memory.Range
{
var subrange = range.GetSubRange(i);
if (IsInvalid(ref subrange))
if (MemoryRange.IsInvalid(ref subrange))
{
continue;
}
@ -56,7 +56,7 @@ namespace Ryujinx.Memory.Range
{
var subrange = range.GetSubRange(i);
if (IsInvalid(ref subrange))
if (MemoryRange.IsInvalid(ref subrange))
{
continue;
}
@ -99,7 +99,7 @@ namespace Ryujinx.Memory.Range
{
var subrange = range.GetSubRange(i);
if (IsInvalid(ref subrange))
if (MemoryRange.IsInvalid(ref subrange))
{
continue;
}
@ -142,17 +142,6 @@ namespace Ryujinx.Memory.Range
return overlapCount;
}
/// <summary>
/// Checks if a given sub-range of memory is invalid.
/// Those are used to represent unmapped memory regions (holes in the region mapping).
/// </summary>
/// <param name="subRange">Memory range to checl</param>
/// <returns>True if the memory range is considered invalid, false otherwise</returns>
private static bool IsInvalid(ref MemoryRange subRange)
{
return subRange.Address == ulong.MaxValue;
}
/// <summary>
/// Gets all items on the list starting at the specified memory address.
/// </summary>

View file

@ -14,9 +14,14 @@ namespace Ryujinx.Memory.Tracking
// Only use these from within the lock.
private readonly NonOverlappingRangeList<VirtualRegion> _virtualRegions;
// Guest virtual regions are a subset of the normal virtual regions, with potentially different protection
// and expanded area of effect on platforms that don't support misaligned page protection.
private readonly NonOverlappingRangeList<VirtualRegion> _guestVirtualRegions;
private readonly int _pageSize;
private readonly bool _singleByteGuestTracking;
/// <summary>
/// This lock must be obtained when traversing or updating the region-handle hierarchy.
/// It is not required when reading dirty flags.
@ -27,16 +32,27 @@ namespace Ryujinx.Memory.Tracking
/// Create a new tracking structure for the given "physical" memory block,
/// with a given "virtual" memory manager that will provide mappings and virtual memory protection.
/// </summary>
/// <remarks>
/// If <paramref name="singleByteGuestTracking" /> is true, the memory manager must also support protection on partially
/// unmapped regions without throwing exceptions or dropping protection on the mapped portion.
/// </remarks>
/// <param name="memoryManager">Virtual memory manager</param>
/// <param name="block">Physical memory block</param>
/// <param name="pageSize">Page size of the virtual memory space</param>
public MemoryTracking(IVirtualMemoryManager memoryManager, int pageSize, InvalidAccessHandler invalidAccessHandler = null)
/// <param name="invalidAccessHandler">Method to call for invalid memory accesses</param>
/// <param name="singleByteGuestTracking">True if the guest only signals writes for the first byte</param>
public MemoryTracking(
IVirtualMemoryManager memoryManager,
int pageSize,
InvalidAccessHandler invalidAccessHandler = null,
bool singleByteGuestTracking = false)
{
_memoryManager = memoryManager;
_pageSize = pageSize;
_invalidAccessHandler = invalidAccessHandler;
_singleByteGuestTracking = singleByteGuestTracking;
_virtualRegions = new NonOverlappingRangeList<VirtualRegion>();
_guestVirtualRegions = new NonOverlappingRangeList<VirtualRegion>();
}
private (ulong address, ulong size) PageAlign(ulong address, ulong size)
@ -62,20 +78,25 @@ namespace Ryujinx.Memory.Tracking
{
ref var overlaps = ref ThreadStaticArray<VirtualRegion>.Get();
int count = _virtualRegions.FindOverlapsNonOverlapping(va, size, ref overlaps);
for (int i = 0; i < count; i++)
for (int type = 0; type < 2; type++)
{
VirtualRegion region = overlaps[i];
NonOverlappingRangeList<VirtualRegion> regions = type == 0 ? _virtualRegions : _guestVirtualRegions;
// If the region has been fully remapped, signal that it has been mapped again.
bool remapped = _memoryManager.IsRangeMapped(region.Address, region.Size);
if (remapped)
int count = regions.FindOverlapsNonOverlapping(va, size, ref overlaps);
for (int i = 0; i < count; i++)
{
region.SignalMappingChanged(true);
}
VirtualRegion region = overlaps[i];
region.UpdateProtection();
// If the region has been fully remapped, signal that it has been mapped again.
bool remapped = _memoryManager.IsRangeMapped(region.Address, region.Size);
if (remapped)
{
region.SignalMappingChanged(true);
}
region.UpdateProtection();
}
}
}
}
@ -95,27 +116,58 @@ namespace Ryujinx.Memory.Tracking
{
ref var overlaps = ref ThreadStaticArray<VirtualRegion>.Get();
int count = _virtualRegions.FindOverlapsNonOverlapping(va, size, ref overlaps);
for (int i = 0; i < count; i++)
for (int type = 0; type < 2; type++)
{
VirtualRegion region = overlaps[i];
NonOverlappingRangeList<VirtualRegion> regions = type == 0 ? _virtualRegions : _guestVirtualRegions;
region.SignalMappingChanged(false);
int count = regions.FindOverlapsNonOverlapping(va, size, ref overlaps);
for (int i = 0; i < count; i++)
{
VirtualRegion region = overlaps[i];
region.SignalMappingChanged(false);
}
}
}
}
/// <summary>
/// Alter a tracked memory region to properly capture unaligned accesses.
/// For most memory manager modes, this does nothing.
/// </summary>
/// <param name="address">Original region address</param>
/// <param name="size">Original region size</param>
/// <returns>A new address and size for tracking unaligned accesses</returns>
internal (ulong newAddress, ulong newSize) GetUnalignedSafeRegion(ulong address, ulong size)
{
if (_singleByteGuestTracking)
{
// The guest only signals the first byte of each memory access with the current memory manager.
// To catch unaligned access properly, we need to also protect the page before the address.
// Assume that the address and size are already aligned.
return (address - (ulong)_pageSize, size + (ulong)_pageSize);
}
else
{
return (address, size);
}
}
/// <summary>
/// Get a list of virtual regions that a handle covers.
/// </summary>
/// <param name="va">Starting virtual memory address of the handle</param>
/// <param name="size">Size of the handle's memory region</param>
/// <param name="guest">True if getting handles for guest protection, false otherwise</param>
/// <returns>A list of virtual regions within the given range</returns>
internal List<VirtualRegion> GetVirtualRegionsForHandle(ulong va, ulong size)
internal List<VirtualRegion> GetVirtualRegionsForHandle(ulong va, ulong size, bool guest)
{
List<VirtualRegion> result = new();
_virtualRegions.GetOrAddRegions(result, va, size, (va, size) => new VirtualRegion(this, va, size));
NonOverlappingRangeList<VirtualRegion> regions = guest ? _guestVirtualRegions : _virtualRegions;
regions.GetOrAddRegions(result, va, size, (va, size) => new VirtualRegion(this, va, size, guest));
return result;
}
@ -126,7 +178,14 @@ namespace Ryujinx.Memory.Tracking
/// <param name="region">Region to remove</param>
internal void RemoveVirtual(VirtualRegion region)
{
_virtualRegions.Remove(region);
if (region.Guest)
{
_guestVirtualRegions.Remove(region);
}
else
{
_virtualRegions.Remove(region);
}
}
/// <summary>
@ -137,10 +196,11 @@ namespace Ryujinx.Memory.Tracking
/// <param name="handles">Handles to inherit state from or reuse. When none are present, provide null</param>
/// <param name="granularity">Desired granularity of write tracking</param>
/// <param name="id">Handle ID</param>
/// <param name="flags">Region flags</param>
/// <returns>The memory tracking handle</returns>
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id)
public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id, RegionFlags flags = RegionFlags.None)
{
return new MultiRegionHandle(this, address, size, handles, granularity, id);
return new MultiRegionHandle(this, address, size, handles, granularity, id, flags);
}
/// <summary>
@ -164,15 +224,16 @@ namespace Ryujinx.Memory.Tracking
/// <param name="address">CPU virtual address of the region</param>
/// <param name="size">Size of the region</param>
/// <param name="id">Handle ID</param>
/// <param name="flags">Region flags</param>
/// <returns>The memory tracking handle</returns>
public RegionHandle BeginTracking(ulong address, ulong size, int id)
public RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None)
{
var (paAddress, paSize) = PageAlign(address, size);
lock (TrackingLock)
{
bool mapped = _memoryManager.IsRangeMapped(address, size);
RegionHandle handle = new(this, paAddress, paSize, address, size, id, mapped);
RegionHandle handle = new(this, paAddress, paSize, address, size, id, flags, mapped);
return handle;
}
@ -186,15 +247,16 @@ namespace Ryujinx.Memory.Tracking
/// <param name="bitmap">The bitmap owning the dirty flag for this handle</param>
/// <param name="bit">The bit of this handle within the dirty flag</param>
/// <param name="id">Handle ID</param>
/// <param name="flags">Region flags</param>
/// <returns>The memory tracking handle</returns>
internal RegionHandle BeginTrackingBitmap(ulong address, ulong size, ConcurrentBitmap bitmap, int bit, int id)
internal RegionHandle BeginTrackingBitmap(ulong address, ulong size, ConcurrentBitmap bitmap, int bit, int id, RegionFlags flags = RegionFlags.None)
{
var (paAddress, paSize) = PageAlign(address, size);
lock (TrackingLock)
{
bool mapped = _memoryManager.IsRangeMapped(address, size);
RegionHandle handle = new(this, paAddress, paSize, address, size, bitmap, bit, id, mapped);
RegionHandle handle = new(this, paAddress, paSize, address, size, bitmap, bit, id, flags, mapped);
return handle;
}
@ -202,6 +264,7 @@ namespace Ryujinx.Memory.Tracking
/// <summary>
/// Signal that a virtual memory event happened at the given location.
/// The memory event is assumed to be triggered by guest code.
/// </summary>
/// <param name="address">Virtual address accessed</param>
/// <param name="size">Size of the region affected in bytes</param>
@ -209,7 +272,7 @@ namespace Ryujinx.Memory.Tracking
/// <returns>True if the event triggered any tracking regions, false otherwise</returns>
public bool VirtualMemoryEvent(ulong address, ulong size, bool write)
{
return VirtualMemoryEvent(address, size, write, precise: false, null);
return VirtualMemoryEvent(address, size, write, precise: false, exemptId: null, guest: true);
}
/// <summary>
@ -222,8 +285,9 @@ namespace Ryujinx.Memory.Tracking
/// <param name="write">Whether the region was written to or read</param>
/// <param name="precise">True if the access is precise, false otherwise</param>
/// <param name="exemptId">Optional ID that of the handles that should not be signalled</param>
/// <param name="guest">True if the access is from the guest, false otherwise</param>
/// <returns>True if the event triggered any tracking regions, false otherwise</returns>
public bool VirtualMemoryEvent(ulong address, ulong size, bool write, bool precise, int? exemptId = null)
public bool VirtualMemoryEvent(ulong address, ulong size, bool write, bool precise, int? exemptId = null, bool guest = false)
{
// Look up the virtual region using the region list.
// Signal up the chain to relevant handles.
@ -234,7 +298,9 @@ namespace Ryujinx.Memory.Tracking
{
ref var overlaps = ref ThreadStaticArray<VirtualRegion>.Get();
int count = _virtualRegions.FindOverlapsNonOverlapping(address, size, ref overlaps);
NonOverlappingRangeList<VirtualRegion> regions = guest ? _guestVirtualRegions : _virtualRegions;
int count = regions.FindOverlapsNonOverlapping(address, size, ref overlaps);
if (count == 0 && !precise)
{
@ -242,7 +308,7 @@ namespace Ryujinx.Memory.Tracking
{
// TODO: There is currently the possibility that a page can be protected after its virtual region is removed.
// This code handles that case when it happens, but it would be better to find out how this happens.
_memoryManager.TrackingReprotect(address & ~(ulong)(_pageSize - 1), (ulong)_pageSize, MemoryPermission.ReadAndWrite);
_memoryManager.TrackingReprotect(address & ~(ulong)(_pageSize - 1), (ulong)_pageSize, MemoryPermission.ReadAndWrite, guest);
return true; // This memory _should_ be mapped, so we need to try again.
}
else
@ -252,6 +318,12 @@ namespace Ryujinx.Memory.Tracking
}
else
{
if (guest && _singleByteGuestTracking)
{
// Increase the access size to trigger handles with misaligned accesses.
size += (ulong)_pageSize;
}
for (int i = 0; i < count; i++)
{
VirtualRegion region = overlaps[i];
@ -285,9 +357,10 @@ namespace Ryujinx.Memory.Tracking
/// </summary>
/// <param name="region">Region to reprotect</param>
/// <param name="permission">Memory permission to protect with</param>
internal void ProtectVirtualRegion(VirtualRegion region, MemoryPermission permission)
/// <param name="guest">True if the protection is for guest access, false otherwise</param>
internal void ProtectVirtualRegion(VirtualRegion region, MemoryPermission permission, bool guest)
{
_memoryManager.TrackingReprotect(region.Address, region.Size, permission);
_memoryManager.TrackingReprotect(region.Address, region.Size, permission, guest);
}
/// <summary>

View file

@ -37,7 +37,8 @@ namespace Ryujinx.Memory.Tracking
ulong size,
IEnumerable<IRegionHandle> handles,
ulong granularity,
int id)
int id,
RegionFlags flags)
{
_handles = new RegionHandle[(size + granularity - 1) / granularity];
Granularity = granularity;
@ -62,7 +63,7 @@ namespace Ryujinx.Memory.Tracking
// Fill any gap left before this handle.
while (i < startIndex)
{
RegionHandle fillHandle = tracking.BeginTrackingBitmap(address + (ulong)i * granularity, granularity, _dirtyBitmap, i, id);
RegionHandle fillHandle = tracking.BeginTrackingBitmap(address + (ulong)i * granularity, granularity, _dirtyBitmap, i, id, flags);
fillHandle.Parent = this;
_handles[i++] = fillHandle;
}
@ -83,7 +84,7 @@ namespace Ryujinx.Memory.Tracking
while (i < endIndex)
{
RegionHandle splitHandle = tracking.BeginTrackingBitmap(address + (ulong)i * granularity, granularity, _dirtyBitmap, i, id);
RegionHandle splitHandle = tracking.BeginTrackingBitmap(address + (ulong)i * granularity, granularity, _dirtyBitmap, i, id, flags);
splitHandle.Parent = this;
splitHandle.Reprotect(handle.Dirty);
@ -106,7 +107,7 @@ namespace Ryujinx.Memory.Tracking
// Fill any remaining space with new handles.
while (i < _handles.Length)
{
RegionHandle handle = tracking.BeginTrackingBitmap(address + (ulong)i * granularity, granularity, _dirtyBitmap, i, id);
RegionHandle handle = tracking.BeginTrackingBitmap(address + (ulong)i * granularity, granularity, _dirtyBitmap, i, id, flags);
handle.Parent = this;
_handles[i++] = handle;
}

View file

@ -0,0 +1,21 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Ryujinx.Memory.Tracking
{
[Flags]
public enum RegionFlags
{
None = 0,
/// <summary>
/// Access to the resource is expected to occasionally be unaligned.
/// With some memory managers, guest protection must extend into the previous page to cover unaligned access.
/// If this is not expected, protection is not altered, which can avoid unintended resource dirty/flush.
/// </summary>
UnalignedAccess = 1,
}
}

View file

@ -55,6 +55,8 @@ namespace Ryujinx.Memory.Tracking
private RegionSignal _preAction; // Action to perform before a read or write. This will block the memory access.
private PreciseRegionSignal _preciseAction; // Action to perform on a precise read or write.
private readonly List<VirtualRegion> _regions;
private readonly List<VirtualRegion> _guestRegions;
private readonly List<VirtualRegion> _allRegions;
private readonly MemoryTracking _tracking;
private bool _disposed;
@ -99,6 +101,7 @@ namespace Ryujinx.Memory.Tracking
/// <param name="bitmap">The bitmap the dirty flag for this handle is stored in</param>
/// <param name="bit">The bit index representing the dirty flag for this handle</param>
/// <param name="id">Handle ID</param>
/// <param name="flags">Region flags</param>
/// <param name="mapped">True if the region handle starts mapped</param>
internal RegionHandle(
MemoryTracking tracking,
@ -109,6 +112,7 @@ namespace Ryujinx.Memory.Tracking
ConcurrentBitmap bitmap,
int bit,
int id,
RegionFlags flags,
bool mapped = true)
{
Bitmap = bitmap;
@ -128,11 +132,12 @@ namespace Ryujinx.Memory.Tracking
RealEndAddress = realAddress + realSize;
_tracking = tracking;
_regions = tracking.GetVirtualRegionsForHandle(address, size);
foreach (var region in _regions)
{
region.Handles.Add(this);
}
_regions = tracking.GetVirtualRegionsForHandle(address, size, false);
_guestRegions = GetGuestRegions(tracking, address, size, flags);
_allRegions = new List<VirtualRegion>(_regions.Count + _guestRegions.Count);
InitializeRegions();
}
/// <summary>
@ -145,8 +150,9 @@ namespace Ryujinx.Memory.Tracking
/// <param name="realAddress">The real, unaligned address of the handle</param>
/// <param name="realSize">The real, unaligned size of the handle</param>
/// <param name="id">Handle ID</param>
/// <param name="flags">Region flags</param>
/// <param name="mapped">True if the region handle starts mapped</param>
internal RegionHandle(MemoryTracking tracking, ulong address, ulong size, ulong realAddress, ulong realSize, int id, bool mapped = true)
internal RegionHandle(MemoryTracking tracking, ulong address, ulong size, ulong realAddress, ulong realSize, int id, RegionFlags flags, bool mapped = true)
{
Bitmap = new ConcurrentBitmap(1, mapped);
@ -163,8 +169,37 @@ namespace Ryujinx.Memory.Tracking
RealEndAddress = realAddress + realSize;
_tracking = tracking;
_regions = tracking.GetVirtualRegionsForHandle(address, size);
foreach (var region in _regions)
_regions = tracking.GetVirtualRegionsForHandle(address, size, false);
_guestRegions = GetGuestRegions(tracking, address, size, flags);
_allRegions = new List<VirtualRegion>(_regions.Count + _guestRegions.Count);
InitializeRegions();
}
private List<VirtualRegion> GetGuestRegions(MemoryTracking tracking, ulong address, ulong size, RegionFlags flags)
{
ulong guestAddress;
ulong guestSize;
if (flags.HasFlag(RegionFlags.UnalignedAccess))
{
(guestAddress, guestSize) = tracking.GetUnalignedSafeRegion(address, size);
}
else
{
(guestAddress, guestSize) = (address, size);
}
return tracking.GetVirtualRegionsForHandle(guestAddress, guestSize, true);
}
private void InitializeRegions()
{
_allRegions.AddRange(_regions);
_allRegions.AddRange(_guestRegions);
foreach (var region in _allRegions)
{
region.Handles.Add(this);
}
@ -321,7 +356,7 @@ namespace Ryujinx.Memory.Tracking
lock (_tracking.TrackingLock)
{
foreach (VirtualRegion region in _regions)
foreach (VirtualRegion region in _allRegions)
{
protectionChanged |= region.UpdateProtection();
}
@ -379,7 +414,7 @@ namespace Ryujinx.Memory.Tracking
{
lock (_tracking.TrackingLock)
{
foreach (VirtualRegion region in _regions)
foreach (VirtualRegion region in _allRegions)
{
region.UpdateProtection();
}
@ -414,7 +449,16 @@ namespace Ryujinx.Memory.Tracking
/// <param name="region">Virtual region to add as a child</param>
internal void AddChild(VirtualRegion region)
{
_regions.Add(region);
if (region.Guest)
{
_guestRegions.Add(region);
}
else
{
_regions.Add(region);
}
_allRegions.Add(region);
}
/// <summary>
@ -469,7 +513,7 @@ namespace Ryujinx.Memory.Tracking
lock (_tracking.TrackingLock)
{
foreach (VirtualRegion region in _regions)
foreach (VirtualRegion region in _allRegions)
{
region.RemoveHandle(this);
}

View file

@ -13,10 +13,14 @@ namespace Ryujinx.Memory.Tracking
private readonly MemoryTracking _tracking;
private MemoryPermission _lastPermission;
public VirtualRegion(MemoryTracking tracking, ulong address, ulong size, MemoryPermission lastPermission = MemoryPermission.Invalid) : base(address, size)
public bool Guest { get; }
public VirtualRegion(MemoryTracking tracking, ulong address, ulong size, bool guest, MemoryPermission lastPermission = MemoryPermission.Invalid) : base(address, size)
{
_lastPermission = lastPermission;
_tracking = tracking;
Guest = guest;
}
/// <inheritdoc/>
@ -66,9 +70,12 @@ namespace Ryujinx.Memory.Tracking
{
_lastPermission = MemoryPermission.Invalid;
foreach (RegionHandle handle in Handles)
if (!Guest)
{
handle.SignalMappingChanged(mapped);
foreach (RegionHandle handle in Handles)
{
handle.SignalMappingChanged(mapped);
}
}
}
@ -103,7 +110,7 @@ namespace Ryujinx.Memory.Tracking
if (_lastPermission != permission)
{
_tracking.ProtectVirtualRegion(this, permission);
_tracking.ProtectVirtualRegion(this, permission, Guest);
_lastPermission = permission;
return true;
@ -131,7 +138,7 @@ namespace Ryujinx.Memory.Tracking
public override INonOverlappingRange Split(ulong splitAddress)
{
VirtualRegion newRegion = new(_tracking, splitAddress, EndAddress - splitAddress, _lastPermission);
VirtualRegion newRegion = new(_tracking, splitAddress, EndAddress - splitAddress, Guest, _lastPermission);
Size = splitAddress - Address;
// The new region inherits all of our parents.

View file

@ -0,0 +1,405 @@
using Ryujinx.Common.Memory;
using System;
using System.Buffers;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Memory
{
public abstract class VirtualMemoryManagerBase : IWritableBlock
{
public const int PageBits = 12;
public const int PageSize = 1 << PageBits;
public const int PageMask = PageSize - 1;
protected abstract ulong AddressSpaceSize { get; }
public virtual ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return ReadOnlySequence<byte>.Empty;
}
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, false);
}
if (IsContiguousAndMapped(va, size))
{
nuint pa = TranslateVirtualAddressUnchecked(va);
return new ReadOnlySequence<byte>(GetPhysicalAddressMemory(pa, size));
}
else
{
AssertValidAddressAndSize(va, size);
int offset = 0, segmentSize;
BytesReadOnlySequenceSegment first = null, last = null;
if ((va & PageMask) != 0)
{
nuint pa = TranslateVirtualAddressChecked(va);
segmentSize = Math.Min(size, PageSize - (int)(va & PageMask));
Memory<byte> memory = GetPhysicalAddressMemory(pa, segmentSize);
first = last = new BytesReadOnlySequenceSegment(memory);
offset += segmentSize;
}
for (; offset < size; offset += segmentSize)
{
nuint pa = TranslateVirtualAddressChecked(va + (ulong)offset);
segmentSize = Math.Min(size - offset, PageSize);
Memory<byte> memory = GetPhysicalAddressMemory(pa, segmentSize);
if (first is null)
{
first = last = new BytesReadOnlySequenceSegment(memory);
}
else
{
if (last.IsContiguousWith(memory, out nuint contiguousStart, out int contiguousSize))
{
last.Replace(GetPhysicalAddressMemory(contiguousStart, contiguousSize));
}
else
{
last = last.Append(memory);
}
}
}
return new ReadOnlySequence<byte>(first, 0, last, (int)(size - last.RunningIndex));
}
}
public virtual ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return ReadOnlySpan<byte>.Empty;
}
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, false);
}
if (IsContiguousAndMapped(va, size))
{
nuint pa = TranslateVirtualAddressUnchecked(va);
return GetPhysicalAddressSpan(pa, size);
}
else
{
Span<byte> data = new byte[size];
Read(va, data);
return data;
}
}
public virtual WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return new WritableRegion(null, va, Memory<byte>.Empty);
}
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, true);
}
if (IsContiguousAndMapped(va, size))
{
nuint pa = TranslateVirtualAddressUnchecked(va);
return new WritableRegion(null, va, GetPhysicalAddressMemory(pa, size));
}
else
{
MemoryOwner<byte> memoryOwner = MemoryOwner<byte>.Rent(size);
Read(va, memoryOwner.Span);
return new WritableRegion(this, va, memoryOwner);
}
}
public abstract bool IsMapped(ulong va);
public virtual void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
public virtual T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
public virtual void Read(ulong va, Span<byte> data)
{
if (data.Length == 0)
{
return;
}
AssertValidAddressAndSize(va, data.Length);
int offset = 0, size;
if ((va & PageMask) != 0)
{
nuint pa = TranslateVirtualAddressChecked(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
GetPhysicalAddressSpan(pa, size).CopyTo(data[..size]);
offset += size;
}
for (; offset < data.Length; offset += size)
{
nuint pa = TranslateVirtualAddressChecked(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
GetPhysicalAddressSpan(pa, size).CopyTo(data.Slice(offset, size));
}
}
public virtual T ReadTracked<T>(ulong va) where T : unmanaged
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false);
return Read<T>(va);
}
public virtual void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{
// No default implementation
}
public virtual void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
SignalMemoryTracking(va, (ulong)data.Length, true);
WriteImpl(va, data);
}
public virtual void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
public virtual void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
WriteImpl(va, data);
}
public virtual bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return false;
}
if (IsContiguousAndMapped(va, data.Length))
{
SignalMemoryTracking(va, (ulong)data.Length, false);
nuint pa = TranslateVirtualAddressChecked(va);
var target = GetPhysicalAddressSpan(pa, data.Length);
bool changed = !data.SequenceEqual(target);
if (changed)
{
data.CopyTo(target);
}
return changed;
}
else
{
Write(va, data);
return true;
}
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
protected void AssertValidAddressAndSize(ulong va, ulong size)
{
if (!ValidateAddressAndSize(va, size))
{
throw new InvalidMemoryRegionException($"va=0x{va:X16}, size=0x{size:X16}");
}
}
/// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected void AssertValidAddressAndSize(ulong va, int size)
=> AssertValidAddressAndSize(va, (ulong)size);
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected static int GetPagesCount(ulong va, ulong size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
protected abstract Memory<byte> GetPhysicalAddressMemory(nuint pa, int size);
protected abstract Span<byte> GetPhysicalAddressSpan(nuint pa, int size);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool IsContiguous(ulong va, int size) => IsContiguous(va, (ulong)size);
protected virtual bool IsContiguous(ulong va, ulong size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, size))
{
return false;
}
int pages = GetPagesCount(va, size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (TranslateVirtualAddressUnchecked(va) + PageSize != TranslateVirtualAddressUnchecked(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool IsContiguousAndMapped(ulong va, int size)
=> IsContiguous(va, size) && IsMapped(va);
protected abstract nuint TranslateVirtualAddressChecked(ulong va);
protected abstract nuint TranslateVirtualAddressUnchecked(ulong va);
/// <summary>
/// Checks if the virtual address is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address</param>
/// <returns>True if the virtual address is part of the addressable space</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool ValidateAddress(ulong va)
{
return va < AddressSpaceSize;
}
/// <summary>
/// Checks if the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
protected bool ValidateAddressAndSize(ulong va, ulong size)
{
ulong endVa = va + size;
return endVa >= va && endVa >= size && endVa <= AddressSpaceSize;
}
protected static void ThrowInvalidMemoryRegionException(string message)
=> throw new InvalidMemoryRegionException(message);
protected static void ThrowMemoryNotContiguous()
=> throw new MemoryNotContiguousException();
protected virtual void WriteImpl(ulong va, ReadOnlySpan<byte> data)
{
AssertValidAddressAndSize(va, data.Length);
if (IsContiguousAndMapped(va, data.Length))
{
nuint pa = TranslateVirtualAddressUnchecked(va);
data.CopyTo(GetPhysicalAddressSpan(pa, data.Length));
}
else
{
int offset = 0, size;
if ((va & PageMask) != 0)
{
nuint pa = TranslateVirtualAddressChecked(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data[..size].CopyTo(GetPhysicalAddressSpan(pa, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
nuint pa = TranslateVirtualAddressChecked(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(GetPhysicalAddressSpan(pa, size));
}
}
}
}
}

View file

@ -1,3 +1,4 @@
using Ryujinx.Common.Memory;
using System;
namespace Ryujinx.Memory
@ -6,6 +7,7 @@ namespace Ryujinx.Memory
{
private readonly IWritableBlock _block;
private readonly ulong _va;
private readonly MemoryOwner<byte> _memoryOwner;
private readonly bool _tracked;
private bool NeedsWriteback => _block != null;
@ -20,6 +22,12 @@ namespace Ryujinx.Memory
Memory = memory;
}
public WritableRegion(IWritableBlock block, ulong va, MemoryOwner<byte> memoryOwner, bool tracked = false)
: this(block, va, memoryOwner.Memory, tracked)
{
_memoryOwner = memoryOwner;
}
public void Dispose()
{
if (NeedsWriteback)
@ -33,6 +41,8 @@ namespace Ryujinx.Memory
_block.WriteUntracked(_va, Memory.Span);
}
}
_memoryOwner?.Dispose();
}
}
}