LinearScanAllocator.cs
1 using ARMeilleure.Common; 2 using ARMeilleure.IntermediateRepresentation; 3 using ARMeilleure.Translation; 4 using System; 5 using System.Collections.Generic; 6 using System.Diagnostics; 7 using System.Linq; 8 using System.Numerics; 9 10 namespace ARMeilleure.CodeGen.RegisterAllocators 11 { 12 // Based on: 13 // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler". 14 // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf 15 class LinearScanAllocator : IRegisterAllocator 16 { 17 private const int InstructionGap = 2; 18 private const int InstructionGapMask = InstructionGap - 1; 19 20 private HashSet<int> _blockEdges; 21 private LiveRange[] _blockRanges; 22 private BitMap[] _blockLiveIn; 23 24 private List<LiveInterval> _intervals; 25 private LiveInterval[] _parentIntervals; 26 27 private List<(IntrusiveList<Operation>, Operation)> _operationNodes; 28 private int _operationsCount; 29 30 private class AllocationContext 31 { 32 public RegisterMasks Masks { get; } 33 34 public StackAllocator StackAlloc { get; } 35 36 public BitMap Active { get; } 37 public BitMap Inactive { get; } 38 39 public int IntUsedRegisters { get; set; } 40 public int VecUsedRegisters { get; set; } 41 42 private readonly int[] _intFreePositions; 43 private readonly int[] _vecFreePositions; 44 private readonly int _intFreePositionsCount; 45 private readonly int _vecFreePositionsCount; 46 47 public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount) 48 { 49 StackAlloc = stackAlloc; 50 Masks = masks; 51 52 Active = new BitMap(Allocators.Default, intervalsCount); 53 Inactive = new BitMap(Allocators.Default, intervalsCount); 54 55 PopulateFreePositions(RegisterType.Integer, out _intFreePositions, out _intFreePositionsCount); 56 PopulateFreePositions(RegisterType.Vector, out _vecFreePositions, out _vecFreePositionsCount); 57 58 void PopulateFreePositions(RegisterType type, out int[] positions, out int count) 59 { 60 positions = new int[masks.RegistersCount]; 61 count = BitOperations.PopCount((uint)masks.GetAvailableRegisters(type)); 62 63 int mask = masks.GetAvailableRegisters(type); 64 65 for (int i = 0; i < positions.Length; i++) 66 { 67 if ((mask & (1 << i)) != 0) 68 { 69 positions[i] = int.MaxValue; 70 } 71 } 72 } 73 } 74 75 public void GetFreePositions(RegisterType type, in Span<int> positions, out int count) 76 { 77 if (type == RegisterType.Integer) 78 { 79 _intFreePositions.CopyTo(positions); 80 81 count = _intFreePositionsCount; 82 } 83 else 84 { 85 Debug.Assert(type == RegisterType.Vector); 86 87 _vecFreePositions.CopyTo(positions); 88 89 count = _vecFreePositionsCount; 90 } 91 } 92 93 public void MoveActiveToInactive(int bit) 94 { 95 Move(Active, Inactive, bit); 96 } 97 98 public void MoveInactiveToActive(int bit) 99 { 100 Move(Inactive, Active, bit); 101 } 102 103 private static void Move(BitMap source, BitMap dest, int bit) 104 { 105 source.Clear(bit); 106 107 dest.Set(bit); 108 } 109 } 110 111 public AllocationResult RunPass( 112 ControlFlowGraph cfg, 113 StackAllocator stackAlloc, 114 RegisterMasks regMasks) 115 { 116 NumberLocals(cfg, regMasks.RegistersCount); 117 118 var context = new AllocationContext(stackAlloc, regMasks, _intervals.Count); 119 120 BuildIntervals(cfg, context); 121 122 for (int index = 0; index < _intervals.Count; index++) 123 { 124 LiveInterval current = _intervals[index]; 125 126 if (current.IsEmpty) 127 { 128 continue; 129 } 130 131 if (current.IsFixed) 132 { 133 context.Active.Set(index); 134 135 if (current.IsFixedAndUsed) 136 { 137 if (current.Register.Type == RegisterType.Integer) 138 { 139 context.IntUsedRegisters |= 1 << current.Register.Index; 140 } 141 else /* if (interval.Register.Type == RegisterType.Vector) */ 142 { 143 context.VecUsedRegisters |= 1 << current.Register.Index; 144 } 145 } 146 147 continue; 148 } 149 150 AllocateInterval(context, current, index, regMasks.RegistersCount); 151 } 152 153 for (int index = regMasks.RegistersCount * 2; index < _intervals.Count; index++) 154 { 155 if (!_intervals[index].IsSpilled) 156 { 157 ReplaceLocalWithRegister(_intervals[index]); 158 } 159 } 160 161 InsertSplitCopies(); 162 InsertSplitCopiesAtEdges(cfg); 163 164 return new AllocationResult(context.IntUsedRegisters, context.VecUsedRegisters, context.StackAlloc.TotalSize); 165 } 166 167 private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex, int registersCount) 168 { 169 // Check active intervals that already ended. 170 foreach (int iIndex in context.Active) 171 { 172 LiveInterval interval = _intervals[iIndex]; 173 174 interval.Forward(current.GetStart()); 175 176 if (interval.GetEnd() < current.GetStart()) 177 { 178 context.Active.Clear(iIndex); 179 } 180 else if (!interval.Overlaps(current.GetStart())) 181 { 182 context.MoveActiveToInactive(iIndex); 183 } 184 } 185 186 // Check inactive intervals that already ended or were reactivated. 187 foreach (int iIndex in context.Inactive) 188 { 189 LiveInterval interval = _intervals[iIndex]; 190 191 interval.Forward(current.GetStart()); 192 193 if (interval.GetEnd() < current.GetStart()) 194 { 195 context.Inactive.Clear(iIndex); 196 } 197 else if (interval.Overlaps(current.GetStart())) 198 { 199 context.MoveInactiveToActive(iIndex); 200 } 201 } 202 203 if (!TryAllocateRegWithoutSpill(context, current, cIndex, registersCount)) 204 { 205 AllocateRegWithSpill(context, current, cIndex, registersCount); 206 } 207 } 208 209 private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex, int registersCount) 210 { 211 RegisterType regType = current.Local.Type.ToRegisterType(); 212 213 Span<int> freePositions = stackalloc int[registersCount]; 214 215 context.GetFreePositions(regType, freePositions, out int freePositionsCount); 216 217 foreach (int iIndex in context.Active) 218 { 219 LiveInterval interval = _intervals[iIndex]; 220 Register reg = interval.Register; 221 222 if (reg.Type == regType) 223 { 224 freePositions[reg.Index] = 0; 225 freePositionsCount--; 226 } 227 } 228 229 // If all registers are already active, return early. No point in inspecting the inactive set to look for 230 // holes. 231 if (freePositionsCount == 0) 232 { 233 return false; 234 } 235 236 foreach (int iIndex in context.Inactive) 237 { 238 LiveInterval interval = _intervals[iIndex]; 239 Register reg = interval.Register; 240 241 ref int freePosition = ref freePositions[reg.Index]; 242 243 if (reg.Type == regType && freePosition != 0) 244 { 245 int overlapPosition = interval.GetOverlapPosition(current); 246 247 if (overlapPosition != LiveInterval.NotFound && freePosition > overlapPosition) 248 { 249 freePosition = overlapPosition; 250 } 251 } 252 } 253 254 // If this is a copy destination variable, we prefer the register used for the copy source. 255 // If the register is available, then the copy can be eliminated later as both source 256 // and destination will use the same register. 257 int selectedReg; 258 259 if (current.TryGetCopySourceRegister(out int preferredReg) && freePositions[preferredReg] >= current.GetEnd()) 260 { 261 selectedReg = preferredReg; 262 } 263 else 264 { 265 selectedReg = GetHighestValueIndex(freePositions); 266 } 267 268 int selectedNextUse = freePositions[selectedReg]; 269 270 // Intervals starts and ends at odd positions, unless they span an entire 271 // block, in this case they will have ranges at a even position. 272 // When a interval is loaded from the stack to a register, we can only 273 // do the split at a odd position, because otherwise the split interval 274 // that is inserted on the list to be processed may clobber a register 275 // used by the instruction at the same position as the split. 276 // The problem only happens when a interval ends exactly at this instruction, 277 // because otherwise they would interfere, and the register wouldn't be selected. 278 // When the interval is aligned and the above happens, there's no problem as 279 // the instruction that is actually with the last use is the one 280 // before that position. 281 selectedNextUse &= ~InstructionGapMask; 282 283 if (selectedNextUse <= current.GetStart()) 284 { 285 return false; 286 } 287 else if (selectedNextUse < current.GetEnd()) 288 { 289 LiveInterval splitChild = current.Split(selectedNextUse); 290 291 if (splitChild.UsesCount != 0) 292 { 293 Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position."); 294 295 InsertInterval(splitChild, registersCount); 296 } 297 else 298 { 299 Spill(context, splitChild); 300 } 301 } 302 303 current.Register = new Register(selectedReg, regType); 304 305 if (regType == RegisterType.Integer) 306 { 307 context.IntUsedRegisters |= 1 << selectedReg; 308 } 309 else /* if (regType == RegisterType.Vector) */ 310 { 311 context.VecUsedRegisters |= 1 << selectedReg; 312 } 313 314 context.Active.Set(cIndex); 315 316 return true; 317 } 318 319 private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex, int registersCount) 320 { 321 RegisterType regType = current.Local.Type.ToRegisterType(); 322 323 Span<int> usePositions = stackalloc int[registersCount]; 324 Span<int> blockedPositions = stackalloc int[registersCount]; 325 326 context.GetFreePositions(regType, usePositions, out _); 327 context.GetFreePositions(regType, blockedPositions, out _); 328 329 foreach (int iIndex in context.Active) 330 { 331 LiveInterval interval = _intervals[iIndex]; 332 Register reg = interval.Register; 333 334 if (reg.Type == regType) 335 { 336 ref int usePosition = ref usePositions[reg.Index]; 337 ref int blockedPosition = ref blockedPositions[reg.Index]; 338 339 if (interval.IsFixed) 340 { 341 usePosition = 0; 342 blockedPosition = 0; 343 } 344 else 345 { 346 int nextUse = interval.NextUseAfter(current.GetStart()); 347 348 if (nextUse != LiveInterval.NotFound && usePosition > nextUse) 349 { 350 usePosition = nextUse; 351 } 352 } 353 } 354 } 355 356 foreach (int iIndex in context.Inactive) 357 { 358 LiveInterval interval = _intervals[iIndex]; 359 Register reg = interval.Register; 360 361 if (reg.Type == regType) 362 { 363 ref int usePosition = ref usePositions[reg.Index]; 364 ref int blockedPosition = ref blockedPositions[reg.Index]; 365 366 if (interval.IsFixed) 367 { 368 int overlapPosition = interval.GetOverlapPosition(current); 369 370 if (overlapPosition != LiveInterval.NotFound) 371 { 372 blockedPosition = Math.Min(blockedPosition, overlapPosition); 373 usePosition = Math.Min(usePosition, overlapPosition); 374 } 375 } 376 else if (interval.Overlaps(current)) 377 { 378 int nextUse = interval.NextUseAfter(current.GetStart()); 379 380 if (nextUse != LiveInterval.NotFound && usePosition > nextUse) 381 { 382 usePosition = nextUse; 383 } 384 } 385 } 386 } 387 388 int selectedReg = GetHighestValueIndex(usePositions); 389 int currentFirstUse = current.FirstUse(); 390 391 Debug.Assert(currentFirstUse >= 0, "Current interval has no uses."); 392 393 if (usePositions[selectedReg] < currentFirstUse) 394 { 395 // All intervals on inactive and active are being used before current, 396 // so spill the current interval. 397 Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used."); 398 399 LiveInterval splitChild = current.Split(currentFirstUse); 400 401 Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position."); 402 403 InsertInterval(splitChild, registersCount); 404 405 Spill(context, current); 406 } 407 else if (blockedPositions[selectedReg] > current.GetEnd()) 408 { 409 // Spill made the register available for the entire current lifetime, 410 // so we only need to split the intervals using the selected register. 411 current.Register = new Register(selectedReg, regType); 412 413 SplitAndSpillOverlappingIntervals(context, current, registersCount); 414 415 context.Active.Set(cIndex); 416 } 417 else 418 { 419 // There are conflicts even after spill due to the use of fixed registers 420 // that can't be spilled, so we need to also split current at the point of 421 // the first fixed register use. 422 current.Register = new Register(selectedReg, regType); 423 424 int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask; 425 426 Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position."); 427 428 LiveInterval splitChild = current.Split(splitPosition); 429 430 if (splitChild.UsesCount != 0) 431 { 432 Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position."); 433 434 InsertInterval(splitChild, registersCount); 435 } 436 else 437 { 438 Spill(context, splitChild); 439 } 440 441 SplitAndSpillOverlappingIntervals(context, current, registersCount); 442 443 context.Active.Set(cIndex); 444 } 445 } 446 447 private static int GetHighestValueIndex(ReadOnlySpan<int> span) 448 { 449 int highest = int.MinValue; 450 451 int selected = 0; 452 453 for (int index = 0; index < span.Length; index++) 454 { 455 int current = span[index]; 456 457 if (highest < current) 458 { 459 highest = current; 460 selected = index; 461 462 if (current == int.MaxValue) 463 { 464 break; 465 } 466 } 467 } 468 469 return selected; 470 } 471 472 private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current, int registersCount) 473 { 474 foreach (int iIndex in context.Active) 475 { 476 LiveInterval interval = _intervals[iIndex]; 477 478 if (!interval.IsFixed && interval.Register == current.Register) 479 { 480 SplitAndSpillOverlappingInterval(context, current, interval, registersCount); 481 482 context.Active.Clear(iIndex); 483 } 484 } 485 486 foreach (int iIndex in context.Inactive) 487 { 488 LiveInterval interval = _intervals[iIndex]; 489 490 if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current)) 491 { 492 SplitAndSpillOverlappingInterval(context, current, interval, registersCount); 493 494 context.Inactive.Clear(iIndex); 495 } 496 } 497 } 498 499 private void SplitAndSpillOverlappingInterval( 500 AllocationContext context, 501 LiveInterval current, 502 LiveInterval interval, 503 int registersCount) 504 { 505 // If there's a next use after the start of the current interval, 506 // we need to split the spilled interval twice, and re-insert it 507 // on the "pending" list to ensure that it will get a new register 508 // on that use position. 509 int nextUse = interval.NextUseAfter(current.GetStart()); 510 511 LiveInterval splitChild; 512 513 if (interval.GetStart() < current.GetStart()) 514 { 515 splitChild = interval.Split(current.GetStart()); 516 } 517 else 518 { 519 splitChild = interval; 520 } 521 522 if (nextUse != -1) 523 { 524 Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used."); 525 526 if (nextUse > splitChild.GetStart()) 527 { 528 LiveInterval right = splitChild.Split(nextUse); 529 530 Spill(context, splitChild); 531 532 splitChild = right; 533 } 534 535 InsertInterval(splitChild, registersCount); 536 } 537 else 538 { 539 Spill(context, splitChild); 540 } 541 } 542 543 private void InsertInterval(LiveInterval interval, int registersCount) 544 { 545 Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses."); 546 Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval."); 547 Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval."); 548 549 int startIndex = registersCount * 2; 550 551 int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null); 552 553 if (insertIndex < 0) 554 { 555 insertIndex = ~insertIndex; 556 } 557 558 _intervals.Insert(insertIndex, interval); 559 } 560 561 private static void Spill(AllocationContext context, LiveInterval interval) 562 { 563 Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval."); 564 Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses."); 565 566 // We first check if any of the siblings were spilled, if so we can reuse 567 // the stack offset. Otherwise, we allocate a new space on the stack. 568 // This prevents stack-to-stack copies being necessary for a split interval. 569 if (!interval.TrySpillWithSiblingOffset()) 570 { 571 interval.Spill(context.StackAlloc.Allocate(interval.Local.Type)); 572 } 573 } 574 575 private void InsertSplitCopies() 576 { 577 Dictionary<int, CopyResolver> copyResolvers = new(); 578 579 CopyResolver GetCopyResolver(int position) 580 { 581 if (!copyResolvers.TryGetValue(position, out CopyResolver copyResolver)) 582 { 583 copyResolver = new CopyResolver(); 584 585 copyResolvers.Add(position, copyResolver); 586 } 587 588 return copyResolver; 589 } 590 591 foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit)) 592 { 593 LiveInterval previous = interval; 594 595 foreach (LiveInterval splitChild in interval.SplitChildren()) 596 { 597 int splitPosition = splitChild.GetStart(); 598 599 if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition) 600 { 601 GetCopyResolver(splitPosition).AddSplit(previous, splitChild); 602 } 603 604 previous = splitChild; 605 } 606 } 607 608 foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers) 609 { 610 CopyResolver copyResolver = kv.Value; 611 612 if (!copyResolver.HasCopy) 613 { 614 continue; 615 } 616 617 int splitPosition = kv.Key; 618 619 (IntrusiveList<Operation> nodes, Operation node) = GetOperationNode(splitPosition); 620 621 Operation[] sequence = copyResolver.Sequence(); 622 623 nodes.AddBefore(node, sequence[0]); 624 625 node = sequence[0]; 626 627 for (int index = 1; index < sequence.Length; index++) 628 { 629 nodes.AddAfter(node, sequence[index]); 630 631 node = sequence[index]; 632 } 633 } 634 } 635 636 private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg) 637 { 638 int blocksCount = cfg.Blocks.Count; 639 640 bool IsSplitEdgeBlock(BasicBlock block) 641 { 642 return block.Index >= blocksCount; 643 } 644 645 // Reset iterators to beginning because GetSplitChild depends on the state of the iterator. 646 foreach (LiveInterval interval in _intervals) 647 { 648 interval.Reset(); 649 } 650 651 for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext) 652 { 653 if (IsSplitEdgeBlock(block)) 654 { 655 continue; 656 } 657 658 bool hasSingleOrNoSuccessor = block.SuccessorsCount <= 1; 659 660 for (int i = 0; i < block.SuccessorsCount; i++) 661 { 662 BasicBlock successor = block.GetSuccessor(i); 663 664 int succIndex = successor.Index; 665 666 // If the current node is a split node, then the actual successor node 667 // (the successor before the split) should be right after it. 668 if (IsSplitEdgeBlock(successor)) 669 { 670 succIndex = successor.GetSuccessor(0).Index; 671 } 672 673 CopyResolver copyResolver = null; 674 675 foreach (int iIndex in _blockLiveIn[succIndex]) 676 { 677 LiveInterval interval = _parentIntervals[iIndex]; 678 679 if (!interval.IsSplit) 680 { 681 continue; 682 } 683 684 int lEnd = _blockRanges[block.Index].End - 1; 685 int rStart = _blockRanges[succIndex].Start; 686 687 LiveInterval left = interval.GetSplitChild(lEnd); 688 LiveInterval right = interval.GetSplitChild(rStart); 689 690 if (left != default && right != default && left != right) 691 { 692 copyResolver ??= new CopyResolver(); 693 694 copyResolver.AddSplit(left, right); 695 } 696 } 697 698 if (copyResolver == null || !copyResolver.HasCopy) 699 { 700 continue; 701 } 702 703 Operation[] sequence = copyResolver.Sequence(); 704 705 if (hasSingleOrNoSuccessor) 706 { 707 foreach (Operation operation in sequence) 708 { 709 block.Append(operation); 710 } 711 } 712 else if (successor.Predecessors.Count == 1) 713 { 714 successor.Operations.AddFirst(sequence[0]); 715 716 Operation prependNode = sequence[0]; 717 718 for (int index = 1; index < sequence.Length; index++) 719 { 720 Operation operation = sequence[index]; 721 722 successor.Operations.AddAfter(prependNode, operation); 723 724 prependNode = operation; 725 } 726 } 727 else 728 { 729 // Split the critical edge. 730 BasicBlock splitBlock = cfg.SplitEdge(block, successor); 731 732 foreach (Operation operation in sequence) 733 { 734 splitBlock.Append(operation); 735 } 736 } 737 } 738 } 739 } 740 741 private void ReplaceLocalWithRegister(LiveInterval current) 742 { 743 Operand register = GetRegister(current); 744 745 foreach (int usePosition in current.UsePositions()) 746 { 747 (_, Operation operation) = GetOperationNode(usePosition); 748 749 for (int index = 0; index < operation.SourcesCount; index++) 750 { 751 Operand source = operation.GetSource(index); 752 753 if (source == current.Local) 754 { 755 operation.SetSource(index, register); 756 } 757 else if (source.Kind == OperandKind.Memory) 758 { 759 MemoryOperand memOp = source.GetMemory(); 760 761 if (memOp.BaseAddress == current.Local) 762 { 763 memOp.BaseAddress = register; 764 } 765 766 if (memOp.Index == current.Local) 767 { 768 memOp.Index = register; 769 } 770 } 771 } 772 773 for (int index = 0; index < operation.DestinationsCount; index++) 774 { 775 Operand dest = operation.GetDestination(index); 776 777 if (dest == current.Local) 778 { 779 operation.SetDestination(index, register); 780 } 781 } 782 } 783 } 784 785 private static Operand GetRegister(LiveInterval interval) 786 { 787 Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed."); 788 789 return Operand.Factory.Register( 790 interval.Register.Index, 791 interval.Register.Type, 792 interval.Local.Type); 793 } 794 795 private (IntrusiveList<Operation>, Operation) GetOperationNode(int position) 796 { 797 return _operationNodes[position / InstructionGap]; 798 } 799 800 private void NumberLocals(ControlFlowGraph cfg, int registersCount) 801 { 802 _operationNodes = new List<(IntrusiveList<Operation>, Operation)>(); 803 _intervals = new List<LiveInterval>(); 804 805 for (int index = 0; index < registersCount; index++) 806 { 807 _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer))); 808 _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector))); 809 } 810 811 // The "visited" state is stored in the MSB of the local's value. 812 const ulong VisitedMask = 1ul << 63; 813 814 static bool IsVisited(Operand local) 815 { 816 return (local.GetValueUnsafe() & VisitedMask) != 0; 817 } 818 819 static void SetVisited(Operand local) 820 { 821 local.GetValueUnsafe() |= VisitedMask; 822 } 823 824 _operationsCount = 0; 825 826 for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--) 827 { 828 BasicBlock block = cfg.PostOrderBlocks[index]; 829 830 for (Operation node = block.Operations.First; node != default; node = node.ListNext) 831 { 832 _operationNodes.Add((block.Operations, node)); 833 834 for (int i = 0; i < node.DestinationsCount; i++) 835 { 836 Operand dest = node.GetDestination(i); 837 838 if (dest.Kind == OperandKind.LocalVariable && !IsVisited(dest)) 839 { 840 dest.NumberLocal(_intervals.Count); 841 842 LiveInterval interval = new LiveInterval(dest); 843 _intervals.Add(interval); 844 845 SetVisited(dest); 846 847 // If this is a copy (or copy-like operation), set the copy source interval as well. 848 // This is used for register preferencing later on, which allows the copy to be eliminated 849 // in some cases. 850 if (node.Instruction == Instruction.Copy || node.Instruction == Instruction.ZeroExtend32) 851 { 852 Operand source = node.GetSource(0); 853 854 if (source.Kind == OperandKind.LocalVariable && 855 source.GetLocalNumber() > 0 && 856 (node.Instruction == Instruction.Copy || source.Type == OperandType.I32)) 857 { 858 interval.SetCopySource(_intervals[source.GetLocalNumber()]); 859 } 860 } 861 } 862 } 863 } 864 865 _operationsCount += block.Operations.Count * InstructionGap; 866 867 if (block.Operations.Count == 0) 868 { 869 // Pretend we have a dummy instruction on the empty block. 870 _operationNodes.Add((default, default)); 871 872 _operationsCount += InstructionGap; 873 } 874 } 875 876 _parentIntervals = _intervals.ToArray(); 877 } 878 879 private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context) 880 { 881 _blockRanges = new LiveRange[cfg.Blocks.Count]; 882 883 int mapSize = _intervals.Count; 884 885 BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count]; 886 BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count]; 887 888 // Compute local live sets. 889 for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext) 890 { 891 BitMap liveGen = new(Allocators.Default, mapSize); 892 BitMap liveKill = new(Allocators.Default, mapSize); 893 894 for (Operation node = block.Operations.First; node != default; node = node.ListNext) 895 { 896 for (int i = 0; i < node.SourcesCount; i++) 897 { 898 VisitSource(node.GetSource(i)); 899 } 900 901 for (int i = 0; i < node.DestinationsCount; i++) 902 { 903 VisitDestination(node.GetDestination(i)); 904 } 905 906 void VisitSource(Operand source) 907 { 908 if (IsLocalOrRegister(source.Kind)) 909 { 910 int id = GetOperandId(source); 911 912 if (!liveKill.IsSet(id)) 913 { 914 liveGen.Set(id); 915 } 916 } 917 else if (source.Kind == OperandKind.Memory) 918 { 919 MemoryOperand memOp = source.GetMemory(); 920 921 if (memOp.BaseAddress != default) 922 { 923 VisitSource(memOp.BaseAddress); 924 } 925 926 if (memOp.Index != default) 927 { 928 VisitSource(memOp.Index); 929 } 930 } 931 } 932 933 void VisitDestination(Operand dest) 934 { 935 liveKill.Set(GetOperandId(dest)); 936 } 937 } 938 939 blkLiveGen[block.Index] = liveGen; 940 blkLiveKill[block.Index] = liveKill; 941 } 942 943 // Compute global live sets. 944 BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count]; 945 BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count]; 946 947 for (int index = 0; index < cfg.Blocks.Count; index++) 948 { 949 blkLiveIn[index] = new BitMap(Allocators.Default, mapSize); 950 blkLiveOut[index] = new BitMap(Allocators.Default, mapSize); 951 } 952 953 bool modified; 954 955 do 956 { 957 modified = false; 958 959 for (int index = 0; index < cfg.PostOrderBlocks.Length; index++) 960 { 961 BasicBlock block = cfg.PostOrderBlocks[index]; 962 963 BitMap liveOut = blkLiveOut[block.Index]; 964 965 for (int i = 0; i < block.SuccessorsCount; i++) 966 { 967 BasicBlock succ = block.GetSuccessor(i); 968 969 modified |= liveOut.Set(blkLiveIn[succ.Index]); 970 } 971 972 BitMap liveIn = blkLiveIn[block.Index]; 973 974 liveIn.Set(liveOut); 975 liveIn.Clear(blkLiveKill[block.Index]); 976 liveIn.Set(blkLiveGen[block.Index]); 977 } 978 } 979 while (modified); 980 981 _blockLiveIn = blkLiveIn; 982 983 _blockEdges = new HashSet<int>(); 984 985 // Compute lifetime intervals. 986 int operationPos = _operationsCount; 987 988 for (int index = 0; index < cfg.PostOrderBlocks.Length; index++) 989 { 990 BasicBlock block = cfg.PostOrderBlocks[index]; 991 992 // We handle empty blocks by pretending they have a dummy instruction, 993 // because otherwise the block would have the same start and end position, 994 // and this is not valid. 995 int instCount = Math.Max(block.Operations.Count, 1); 996 997 int blockStart = operationPos - instCount * InstructionGap; 998 int blockEnd = operationPos; 999 1000 _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd); 1001 1002 _blockEdges.Add(blockStart); 1003 1004 BitMap liveOut = blkLiveOut[block.Index]; 1005 1006 foreach (int id in liveOut) 1007 { 1008 _intervals[id].AddRange(blockStart, blockEnd); 1009 } 1010 1011 if (block.Operations.Count == 0) 1012 { 1013 operationPos -= InstructionGap; 1014 1015 continue; 1016 } 1017 1018 for (Operation node = block.Operations.Last; node != default; node = node.ListPrevious) 1019 { 1020 operationPos -= InstructionGap; 1021 1022 for (int i = 0; i < node.DestinationsCount; i++) 1023 { 1024 VisitDestination(node.GetDestination(i)); 1025 } 1026 1027 for (int i = 0; i < node.SourcesCount; i++) 1028 { 1029 VisitSource(node.GetSource(i)); 1030 } 1031 1032 if (node.Instruction == Instruction.Call) 1033 { 1034 AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer); 1035 AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector); 1036 } 1037 1038 void VisitSource(Operand source) 1039 { 1040 if (IsLocalOrRegister(source.Kind)) 1041 { 1042 LiveInterval interval = _intervals[GetOperandId(source)]; 1043 1044 interval.AddRange(blockStart, operationPos + 1); 1045 interval.AddUsePosition(operationPos); 1046 } 1047 else if (source.Kind == OperandKind.Memory) 1048 { 1049 MemoryOperand memOp = source.GetMemory(); 1050 1051 if (memOp.BaseAddress != default) 1052 { 1053 VisitSource(memOp.BaseAddress); 1054 } 1055 1056 if (memOp.Index != default) 1057 { 1058 VisitSource(memOp.Index); 1059 } 1060 } 1061 } 1062 1063 void VisitDestination(Operand dest) 1064 { 1065 LiveInterval interval = _intervals[GetOperandId(dest)]; 1066 1067 if (interval.IsFixed) 1068 { 1069 interval.IsFixedAndUsed = true; 1070 } 1071 1072 interval.SetStart(operationPos + 1); 1073 interval.AddUsePosition(operationPos + 1); 1074 } 1075 } 1076 } 1077 1078 foreach (LiveInterval interval in _parentIntervals) 1079 { 1080 interval.Reset(); 1081 } 1082 } 1083 1084 private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType) 1085 { 1086 while (mask != 0) 1087 { 1088 int regIndex = BitOperations.TrailingZeroCount(mask); 1089 1090 Register callerSavedReg = new(regIndex, regType); 1091 1092 LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)]; 1093 1094 interval.AddRange(operationPos + 1, operationPos + InstructionGap); 1095 1096 mask &= ~(1 << regIndex); 1097 } 1098 } 1099 1100 private static int GetOperandId(Operand operand) 1101 { 1102 if (operand.Kind == OperandKind.LocalVariable) 1103 { 1104 return operand.GetLocalNumber(); 1105 } 1106 else if (operand.Kind == OperandKind.Register) 1107 { 1108 return GetRegisterId(operand.GetRegister()); 1109 } 1110 else 1111 { 1112 throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\"."); 1113 } 1114 } 1115 1116 private static int GetRegisterId(Register register) 1117 { 1118 return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0); 1119 } 1120 1121 private static bool IsLocalOrRegister(OperandKind kind) 1122 { 1123 return kind == OperandKind.LocalVariable || 1124 kind == OperandKind.Register; 1125 } 1126 } 1127 }