MemoryManagerHostTracked.cs
1 using ARMeilleure.Memory; 2 using Ryujinx.Common.Memory; 3 using Ryujinx.Cpu.Jit.HostTracked; 4 using Ryujinx.Cpu.Signal; 5 using Ryujinx.Memory; 6 using Ryujinx.Memory.Range; 7 using Ryujinx.Memory.Tracking; 8 using System; 9 using System.Buffers; 10 using System.Collections.Generic; 11 using System.Linq; 12 using System.Runtime.CompilerServices; 13 14 namespace Ryujinx.Cpu.Jit 15 { 16 /// <summary> 17 /// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region. 18 /// </summary> 19 public sealed class MemoryManagerHostTracked : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked 20 { 21 private readonly InvalidAccessHandler _invalidAccessHandler; 22 private readonly bool _unsafeMode; 23 24 private readonly MemoryBlock _backingMemory; 25 26 public int AddressSpaceBits { get; } 27 28 public MemoryTracking Tracking { get; } 29 30 private readonly NativePageTable _nativePageTable; 31 private readonly AddressSpacePartitioned _addressSpace; 32 33 private readonly ManagedPageFlags _pages; 34 35 protected override ulong AddressSpaceSize { get; } 36 37 /// <inheritdoc/> 38 public bool UsesPrivateAllocations => true; 39 40 public IntPtr PageTablePointer => _nativePageTable.PageTablePointer; 41 42 public MemoryManagerType Type => _unsafeMode ? MemoryManagerType.HostTrackedUnsafe : MemoryManagerType.HostTracked; 43 44 public event Action<ulong, ulong> UnmapEvent; 45 46 /// <summary> 47 /// Creates a new instance of the host tracked memory manager. 48 /// </summary> 49 /// <param name="backingMemory">Physical backing memory where virtual memory will be mapped to</param> 50 /// <param name="addressSpaceSize">Size of the address space</param> 51 /// <param name="unsafeMode">True if unmanaged access should not be masked (unsafe), false otherwise.</param> 52 /// <param name="invalidAccessHandler">Optional function to handle invalid memory accesses</param> 53 public MemoryManagerHostTracked(MemoryBlock backingMemory, ulong addressSpaceSize, bool unsafeMode, InvalidAccessHandler invalidAccessHandler) 54 { 55 bool useProtectionMirrors = MemoryBlock.GetPageSize() > PageSize; 56 57 Tracking = new MemoryTracking(this, PageSize, invalidAccessHandler, useProtectionMirrors); 58 59 _backingMemory = backingMemory; 60 _invalidAccessHandler = invalidAccessHandler; 61 _unsafeMode = unsafeMode; 62 AddressSpaceSize = addressSpaceSize; 63 64 ulong asSize = PageSize; 65 int asBits = PageBits; 66 67 while (asSize < AddressSpaceSize) 68 { 69 asSize <<= 1; 70 asBits++; 71 } 72 73 AddressSpaceBits = asBits; 74 75 if (useProtectionMirrors && !NativeSignalHandler.SupportsFaultAddressPatching()) 76 { 77 // Currently we require being able to change the fault address to something else 78 // in order to "emulate" 4KB granularity protection on systems with larger page size. 79 80 throw new PlatformNotSupportedException(); 81 } 82 83 _pages = new ManagedPageFlags(asBits); 84 _nativePageTable = new(asSize); 85 _addressSpace = new(Tracking, backingMemory, _nativePageTable, useProtectionMirrors); 86 } 87 88 public override ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false) 89 { 90 if (size == 0) 91 { 92 return ReadOnlySequence<byte>.Empty; 93 } 94 95 try 96 { 97 if (tracked) 98 { 99 SignalMemoryTracking(va, (ulong)size, false); 100 } 101 else 102 { 103 AssertValidAddressAndSize(va, (ulong)size); 104 } 105 106 ulong endVa = va + (ulong)size; 107 int offset = 0; 108 109 BytesReadOnlySequenceSegment first = null, last = null; 110 111 while (va < endVa) 112 { 113 (MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(size - offset)); 114 115 Memory<byte> physicalMemory = memory.GetMemory(rangeOffset, (int)copySize); 116 117 if (first is null) 118 { 119 first = last = new BytesReadOnlySequenceSegment(physicalMemory); 120 } 121 else 122 { 123 if (last.IsContiguousWith(physicalMemory, out nuint contiguousStart, out int contiguousSize)) 124 { 125 Memory<byte> contiguousPhysicalMemory = new NativeMemoryManager<byte>(contiguousStart, contiguousSize).Memory; 126 127 last.Replace(contiguousPhysicalMemory); 128 } 129 else 130 { 131 last = last.Append(physicalMemory); 132 } 133 } 134 135 va += copySize; 136 offset += (int)copySize; 137 } 138 139 return new ReadOnlySequence<byte>(first, 0, last, (int)(size - last.RunningIndex)); 140 } 141 catch (InvalidMemoryRegionException) 142 { 143 if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) 144 { 145 throw; 146 } 147 148 return ReadOnlySequence<byte>.Empty; 149 } 150 } 151 152 /// <inheritdoc/> 153 public void Map(ulong va, ulong pa, ulong size, MemoryMapFlags flags) 154 { 155 AssertValidAddressAndSize(va, size); 156 157 if (flags.HasFlag(MemoryMapFlags.Private)) 158 { 159 _addressSpace.Map(va, pa, size); 160 } 161 162 _pages.AddMapping(va, size); 163 _nativePageTable.Map(va, pa, size, _addressSpace, _backingMemory, flags.HasFlag(MemoryMapFlags.Private)); 164 165 Tracking.Map(va, size); 166 } 167 168 /// <inheritdoc/> 169 public void Unmap(ulong va, ulong size) 170 { 171 AssertValidAddressAndSize(va, size); 172 173 _addressSpace.Unmap(va, size); 174 175 UnmapEvent?.Invoke(va, size); 176 Tracking.Unmap(va, size); 177 178 _pages.RemoveMapping(va, size); 179 _nativePageTable.Unmap(va, size); 180 } 181 182 public override T ReadTracked<T>(ulong va) 183 { 184 try 185 { 186 return base.ReadTracked<T>(va); 187 } 188 catch (InvalidMemoryRegionException) 189 { 190 if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) 191 { 192 throw; 193 } 194 195 return default; 196 } 197 } 198 199 public override void Read(ulong va, Span<byte> data) 200 { 201 if (data.Length == 0) 202 { 203 return; 204 } 205 206 try 207 { 208 AssertValidAddressAndSize(va, (ulong)data.Length); 209 210 ulong endVa = va + (ulong)data.Length; 211 int offset = 0; 212 213 while (va < endVa) 214 { 215 (MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset)); 216 217 memory.GetSpan(rangeOffset, (int)copySize).CopyTo(data.Slice(offset, (int)copySize)); 218 219 va += copySize; 220 offset += (int)copySize; 221 } 222 } 223 catch (InvalidMemoryRegionException) 224 { 225 if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) 226 { 227 throw; 228 } 229 } 230 } 231 232 public override bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data) 233 { 234 if (data.Length == 0) 235 { 236 return false; 237 } 238 239 SignalMemoryTracking(va, (ulong)data.Length, false); 240 241 if (TryGetVirtualContiguous(va, data.Length, out MemoryBlock memoryBlock, out ulong offset)) 242 { 243 var target = memoryBlock.GetSpan(offset, data.Length); 244 245 bool changed = !data.SequenceEqual(target); 246 247 if (changed) 248 { 249 data.CopyTo(target); 250 } 251 252 return changed; 253 } 254 else 255 { 256 WriteImpl(va, data); 257 258 return true; 259 } 260 } 261 262 public override ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false) 263 { 264 if (size == 0) 265 { 266 return ReadOnlySpan<byte>.Empty; 267 } 268 269 if (tracked) 270 { 271 SignalMemoryTracking(va, (ulong)size, false); 272 } 273 274 if (TryGetVirtualContiguous(va, size, out MemoryBlock memoryBlock, out ulong offset)) 275 { 276 return memoryBlock.GetSpan(offset, size); 277 } 278 else 279 { 280 Span<byte> data = new byte[size]; 281 282 Read(va, data); 283 284 return data; 285 } 286 } 287 288 public override WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false) 289 { 290 if (size == 0) 291 { 292 return new WritableRegion(null, va, Memory<byte>.Empty); 293 } 294 295 if (tracked) 296 { 297 SignalMemoryTracking(va, (ulong)size, true); 298 } 299 300 if (TryGetVirtualContiguous(va, size, out MemoryBlock memoryBlock, out ulong offset)) 301 { 302 return new WritableRegion(null, va, memoryBlock.GetMemory(offset, size)); 303 } 304 else 305 { 306 MemoryOwner<byte> memoryOwner = MemoryOwner<byte>.Rent(size); 307 308 Read(va, memoryOwner.Span); 309 310 return new WritableRegion(this, va, memoryOwner); 311 } 312 } 313 314 public ref T GetRef<T>(ulong va) where T : unmanaged 315 { 316 if (!TryGetVirtualContiguous(va, Unsafe.SizeOf<T>(), out MemoryBlock memory, out ulong offset)) 317 { 318 ThrowMemoryNotContiguous(); 319 } 320 321 SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), true); 322 323 return ref memory.GetRef<T>(offset); 324 } 325 326 [MethodImpl(MethodImplOptions.AggressiveInlining)] 327 public override bool IsMapped(ulong va) 328 { 329 return ValidateAddress(va) && _pages.IsMapped(va); 330 } 331 332 public bool IsRangeMapped(ulong va, ulong size) 333 { 334 AssertValidAddressAndSize(va, size); 335 336 return _pages.IsRangeMapped(va, size); 337 } 338 339 private bool TryGetVirtualContiguous(ulong va, int size, out MemoryBlock memory, out ulong offset) 340 { 341 if (_addressSpace.HasAnyPrivateAllocation(va, (ulong)size, out PrivateRange range)) 342 { 343 // If we have a private allocation overlapping the range, 344 // then the access is only considered contiguous if it covers the entire range. 345 346 if (range.Memory != null) 347 { 348 memory = range.Memory; 349 offset = range.Offset; 350 351 return true; 352 } 353 354 memory = null; 355 offset = 0; 356 357 return false; 358 } 359 360 memory = _backingMemory; 361 offset = GetPhysicalAddressInternal(va); 362 363 return IsPhysicalContiguous(va, size); 364 } 365 366 [MethodImpl(MethodImplOptions.AggressiveInlining)] 367 private bool IsPhysicalContiguous(ulong va, int size) 368 { 369 if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size)) 370 { 371 return false; 372 } 373 374 int pages = GetPagesCount(va, (uint)size, out va); 375 376 for (int page = 0; page < pages - 1; page++) 377 { 378 if (!ValidateAddress(va + PageSize)) 379 { 380 return false; 381 } 382 383 if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize)) 384 { 385 return false; 386 } 387 388 va += PageSize; 389 } 390 391 return true; 392 } 393 394 [MethodImpl(MethodImplOptions.AggressiveInlining)] 395 private ulong GetContiguousSize(ulong va, ulong size) 396 { 397 ulong contiguousSize = PageSize - (va & PageMask); 398 399 if (!ValidateAddress(va) || !ValidateAddressAndSize(va, size)) 400 { 401 return contiguousSize; 402 } 403 404 int pages = GetPagesCount(va, size, out va); 405 406 for (int page = 0; page < pages - 1; page++) 407 { 408 if (!ValidateAddress(va + PageSize)) 409 { 410 return contiguousSize; 411 } 412 413 if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize)) 414 { 415 return contiguousSize; 416 } 417 418 va += PageSize; 419 contiguousSize += PageSize; 420 } 421 422 return Math.Min(contiguousSize, size); 423 } 424 425 private (MemoryBlock, ulong, ulong) GetMemoryOffsetAndSize(ulong va, ulong size) 426 { 427 PrivateRange privateRange = _addressSpace.GetFirstPrivateAllocation(va, size, out ulong nextVa); 428 429 if (privateRange.Memory != null) 430 { 431 return (privateRange.Memory, privateRange.Offset, privateRange.Size); 432 } 433 434 ulong physSize = GetContiguousSize(va, Math.Min(size, nextVa - va)); 435 436 return (_backingMemory, GetPhysicalAddressChecked(va), physSize); 437 } 438 439 public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size) 440 { 441 if (!ValidateAddressAndSize(va, size)) 442 { 443 return null; 444 } 445 446 var regions = new List<HostMemoryRange>(); 447 ulong endVa = va + size; 448 449 try 450 { 451 while (va < endVa) 452 { 453 (MemoryBlock memory, ulong rangeOffset, ulong rangeSize) = GetMemoryOffsetAndSize(va, endVa - va); 454 455 regions.Add(new((UIntPtr)memory.GetPointer(rangeOffset, rangeSize), rangeSize)); 456 457 va += rangeSize; 458 } 459 } 460 catch (InvalidMemoryRegionException) 461 { 462 return null; 463 } 464 465 return regions; 466 } 467 468 public IEnumerable<MemoryRange> GetPhysicalRegions(ulong va, ulong size) 469 { 470 if (size == 0) 471 { 472 return Enumerable.Empty<MemoryRange>(); 473 } 474 475 return GetPhysicalRegionsImpl(va, size); 476 } 477 478 private List<MemoryRange> GetPhysicalRegionsImpl(ulong va, ulong size) 479 { 480 if (!ValidateAddress(va) || !ValidateAddressAndSize(va, size)) 481 { 482 return null; 483 } 484 485 int pages = GetPagesCount(va, (uint)size, out va); 486 487 var regions = new List<MemoryRange>(); 488 489 ulong regionStart = GetPhysicalAddressInternal(va); 490 ulong regionSize = PageSize; 491 492 for (int page = 0; page < pages - 1; page++) 493 { 494 if (!ValidateAddress(va + PageSize)) 495 { 496 return null; 497 } 498 499 ulong newPa = GetPhysicalAddressInternal(va + PageSize); 500 501 if (GetPhysicalAddressInternal(va) + PageSize != newPa) 502 { 503 regions.Add(new MemoryRange(regionStart, regionSize)); 504 regionStart = newPa; 505 regionSize = 0; 506 } 507 508 va += PageSize; 509 regionSize += PageSize; 510 } 511 512 regions.Add(new MemoryRange(regionStart, regionSize)); 513 514 return regions; 515 } 516 517 /// <inheritdoc/> 518 /// <remarks> 519 /// This function also validates that the given range is both valid and mapped, and will throw if it is not. 520 /// </remarks> 521 public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null) 522 { 523 AssertValidAddressAndSize(va, size); 524 525 if (precise) 526 { 527 Tracking.VirtualMemoryEvent(va, size, write, precise: true, exemptId); 528 return; 529 } 530 531 // Software table, used for managed memory tracking. 532 533 _pages.SignalMemoryTracking(Tracking, va, size, write, exemptId); 534 } 535 536 public RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None) 537 { 538 return Tracking.BeginTracking(address, size, id, flags); 539 } 540 541 public MultiRegionHandle BeginGranularTracking(ulong address, ulong size, IEnumerable<IRegionHandle> handles, ulong granularity, int id, RegionFlags flags = RegionFlags.None) 542 { 543 return Tracking.BeginGranularTracking(address, size, handles, granularity, id, flags); 544 } 545 546 public SmartMultiRegionHandle BeginSmartGranularTracking(ulong address, ulong size, ulong granularity, int id) 547 { 548 return Tracking.BeginSmartGranularTracking(address, size, granularity, id); 549 } 550 551 private ulong GetPhysicalAddressChecked(ulong va) 552 { 553 if (!IsMapped(va)) 554 { 555 ThrowInvalidMemoryRegionException($"Not mapped: va=0x{va:X16}"); 556 } 557 558 return GetPhysicalAddressInternal(va); 559 } 560 561 private ulong GetPhysicalAddressInternal(ulong va) 562 { 563 return _nativePageTable.GetPhysicalAddress(va); 564 } 565 566 /// <inheritdoc/> 567 public void Reprotect(ulong va, ulong size, MemoryPermission protection) 568 { 569 // TODO 570 } 571 572 /// <inheritdoc/> 573 public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection, bool guest) 574 { 575 if (guest) 576 { 577 _addressSpace.Reprotect(va, size, protection); 578 } 579 else 580 { 581 _pages.TrackingReprotect(va, size, protection); 582 } 583 } 584 585 /// <summary> 586 /// Disposes of resources used by the memory manager. 587 /// </summary> 588 protected override void Destroy() 589 { 590 _addressSpace.Dispose(); 591 _nativePageTable.Dispose(); 592 } 593 594 protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size) 595 => _backingMemory.GetMemory(pa, size); 596 597 protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size) 598 => _backingMemory.GetSpan(pa, size); 599 600 protected override void WriteImpl(ulong va, ReadOnlySpan<byte> data) 601 { 602 try 603 { 604 AssertValidAddressAndSize(va, (ulong)data.Length); 605 606 ulong endVa = va + (ulong)data.Length; 607 int offset = 0; 608 609 while (va < endVa) 610 { 611 (MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset)); 612 613 data.Slice(offset, (int)copySize).CopyTo(memory.GetSpan(rangeOffset, (int)copySize)); 614 615 va += copySize; 616 offset += (int)copySize; 617 } 618 } 619 catch (InvalidMemoryRegionException) 620 { 621 if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) 622 { 623 throw; 624 } 625 } 626 } 627 628 protected override nuint TranslateVirtualAddressChecked(ulong va) 629 => (nuint)GetPhysicalAddressChecked(va); 630 631 protected override nuint TranslateVirtualAddressUnchecked(ulong va) 632 => (nuint)GetPhysicalAddressInternal(va); 633 } 634 }