AccessCase.cpp
1 /* 2 * Copyright (C) 2017-2020 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26 #include "config.h" 27 #include "AccessCase.h" 28 29 #if ENABLE(JIT) 30 31 #include "CCallHelpers.h" 32 #include "CacheableIdentifierInlines.h" 33 #include "CallLinkInfo.h" 34 #include "DirectArguments.h" 35 #include "GetterSetter.h" 36 #include "GetterSetterAccessCase.h" 37 #include "InstanceOfAccessCase.h" 38 #include "IntrinsicGetterAccessCase.h" 39 #include "JSCInlines.h" 40 #include "JSModuleEnvironment.h" 41 #include "JSModuleNamespaceObject.h" 42 #include "LLIntThunks.h" 43 #include "LinkBuffer.h" 44 #include "ModuleNamespaceAccessCase.h" 45 #include "PolymorphicAccess.h" 46 #include "ScopedArguments.h" 47 #include "ScratchRegisterAllocator.h" 48 #include "StructureStubInfo.h" 49 #include "SuperSampler.h" 50 #include "ThunkGenerators.h" 51 52 namespace JSC { 53 54 namespace AccessCaseInternal { 55 static constexpr bool verbose = false; 56 } 57 58 DEFINE_ALLOCATOR_WITH_HEAP_IDENTIFIER(AccessCase); 59 60 AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, CacheableIdentifier identifier, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain) 61 : m_type(type) 62 , m_offset(offset) 63 , m_polyProtoAccessChain(WTFMove(prototypeAccessChain)) 64 , m_identifier(identifier) 65 { 66 m_structure.setMayBeNull(vm, owner, structure); 67 m_conditionSet = conditionSet; 68 RELEASE_ASSERT(m_conditionSet.isValid()); 69 } 70 71 std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, CacheableIdentifier identifier, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain) 72 { 73 switch (type) { 74 case InHit: 75 case InMiss: 76 case DeleteNonConfigurable: 77 case DeleteMiss: 78 break; 79 case ArrayLength: 80 case StringLength: 81 case DirectArgumentsLength: 82 case ScopedArgumentsLength: 83 case ModuleNamespaceLoad: 84 case Replace: 85 case InstanceOfGeneric: 86 case IndexedInt32Load: 87 case IndexedDoubleLoad: 88 case IndexedContiguousLoad: 89 case IndexedArrayStorageLoad: 90 case IndexedScopedArgumentsLoad: 91 case IndexedDirectArgumentsLoad: 92 case IndexedTypedArrayInt8Load: 93 case IndexedTypedArrayUint8Load: 94 case IndexedTypedArrayUint8ClampedLoad: 95 case IndexedTypedArrayInt16Load: 96 case IndexedTypedArrayUint16Load: 97 case IndexedTypedArrayInt32Load: 98 case IndexedTypedArrayUint32Load: 99 case IndexedTypedArrayFloat32Load: 100 case IndexedTypedArrayFloat64Load: 101 case IndexedStringLoad: 102 RELEASE_ASSERT(!prototypeAccessChain); 103 break; 104 default: 105 RELEASE_ASSERT_NOT_REACHED(); 106 }; 107 108 return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, identifier, offset, structure, conditionSet, WTFMove(prototypeAccessChain))); 109 } 110 111 std::unique_ptr<AccessCase> AccessCase::createTransition( 112 VM& vm, JSCell* owner, CacheableIdentifier identifier, PropertyOffset offset, Structure* oldStructure, Structure* newStructure, 113 const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain) 114 { 115 RELEASE_ASSERT(oldStructure == newStructure->previousID()); 116 117 // Skip optimizing the case where we need a realloc, if we don't have 118 // enough registers to make it happen. 119 if (GPRInfo::numberOfRegisters < 6 120 && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity() 121 && oldStructure->outOfLineCapacity()) { 122 return nullptr; 123 } 124 125 return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, identifier, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain))); 126 } 127 128 std::unique_ptr<AccessCase> AccessCase::createDelete( 129 VM& vm, JSCell* owner, CacheableIdentifier identifier, PropertyOffset offset, Structure* oldStructure, Structure* newStructure) 130 { 131 RELEASE_ASSERT(oldStructure == newStructure->previousID()); 132 ASSERT(!newStructure->outOfLineCapacity() || oldStructure->outOfLineCapacity()); 133 return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Delete, identifier, offset, newStructure, { }, { })); 134 } 135 136 AccessCase::~AccessCase() 137 { 138 } 139 140 std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo( 141 VM& vm, JSCell* owner, CacheableIdentifier identifier, StructureStubInfo& stubInfo) 142 { 143 switch (stubInfo.cacheType()) { 144 case CacheType::GetByIdSelf: 145 RELEASE_ASSERT(stubInfo.hasConstantIdentifier); 146 return ProxyableAccessCase::create(vm, owner, Load, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get()); 147 148 case CacheType::PutByIdReplace: 149 RELEASE_ASSERT(stubInfo.hasConstantIdentifier); 150 return AccessCase::create(vm, owner, Replace, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get()); 151 152 case CacheType::InByIdSelf: 153 RELEASE_ASSERT(stubInfo.hasConstantIdentifier); 154 return AccessCase::create(vm, owner, InHit, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get()); 155 156 case CacheType::ArrayLength: 157 RELEASE_ASSERT(stubInfo.hasConstantIdentifier); 158 return AccessCase::create(vm, owner, AccessCase::ArrayLength, identifier); 159 160 case CacheType::StringLength: 161 RELEASE_ASSERT(stubInfo.hasConstantIdentifier); 162 return AccessCase::create(vm, owner, AccessCase::StringLength, identifier); 163 164 default: 165 return nullptr; 166 } 167 } 168 169 bool AccessCase::hasAlternateBase() const 170 { 171 return !conditionSet().isEmpty(); 172 } 173 174 JSObject* AccessCase::alternateBase() const 175 { 176 return conditionSet().slotBaseCondition().object(); 177 } 178 179 std::unique_ptr<AccessCase> AccessCase::clone() const 180 { 181 std::unique_ptr<AccessCase> result(new AccessCase(*this)); 182 result->resetState(); 183 return result; 184 } 185 186 Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm) 187 { 188 // It's fine to commit something that is already committed. That arises when we switch to using 189 // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK 190 // because most AccessCases have no extra watchpoints anyway. 191 RELEASE_ASSERT(m_state == Primordial || m_state == Committed); 192 193 Vector<WatchpointSet*, 2> result; 194 Structure* structure = this->structure(); 195 196 if (m_identifier) { 197 if ((structure && structure->needImpurePropertyWatchpoint()) 198 || m_conditionSet.needImpurePropertyWatchpoint() 199 || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint(vm))) 200 result.append(vm.ensureWatchpointSetForImpureProperty(m_identifier.uid())); 201 } 202 203 if (additionalSet()) 204 result.append(additionalSet()); 205 206 if (structure 207 && structure->hasRareData() 208 && structure->rareData()->hasSharedPolyProtoWatchpoint() 209 && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) { 210 WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate(); 211 result.append(set); 212 } 213 214 m_state = Committed; 215 216 return result; 217 } 218 219 bool AccessCase::guardedByStructureCheck(const StructureStubInfo& stubInfo) const 220 { 221 if (!stubInfo.hasConstantIdentifier) 222 return false; 223 return guardedByStructureCheckSkippingConstantIdentifierCheck(); 224 } 225 226 bool AccessCase::guardedByStructureCheckSkippingConstantIdentifierCheck() const 227 { 228 if (viaProxy()) 229 return false; 230 231 if (m_polyProtoAccessChain) 232 return false; 233 234 switch (m_type) { 235 case ArrayLength: 236 case StringLength: 237 case DirectArgumentsLength: 238 case ScopedArgumentsLength: 239 case ModuleNamespaceLoad: 240 case InstanceOfHit: 241 case InstanceOfMiss: 242 case InstanceOfGeneric: 243 case IndexedInt32Load: 244 case IndexedDoubleLoad: 245 case IndexedContiguousLoad: 246 case IndexedArrayStorageLoad: 247 case IndexedScopedArgumentsLoad: 248 case IndexedDirectArgumentsLoad: 249 case IndexedTypedArrayInt8Load: 250 case IndexedTypedArrayUint8Load: 251 case IndexedTypedArrayUint8ClampedLoad: 252 case IndexedTypedArrayInt16Load: 253 case IndexedTypedArrayUint16Load: 254 case IndexedTypedArrayInt32Load: 255 case IndexedTypedArrayUint32Load: 256 case IndexedTypedArrayFloat32Load: 257 case IndexedTypedArrayFloat64Load: 258 case IndexedStringLoad: 259 return false; 260 default: 261 return true; 262 } 263 } 264 265 bool AccessCase::requiresIdentifierNameMatch() const 266 { 267 switch (m_type) { 268 case Load: 269 // We don't currently have a by_val for these puts, but we do care about the identifier. 270 case Transition: 271 case Delete: 272 case DeleteNonConfigurable: 273 case DeleteMiss: 274 case Replace: 275 case Miss: 276 case GetGetter: 277 case Getter: 278 case Setter: 279 case CustomValueGetter: 280 case CustomAccessorGetter: 281 case CustomValueSetter: 282 case CustomAccessorSetter: 283 case IntrinsicGetter: 284 case InHit: 285 case InMiss: 286 case ArrayLength: 287 case StringLength: 288 case DirectArgumentsLength: 289 case ScopedArgumentsLength: 290 case ModuleNamespaceLoad: 291 return true; 292 case InstanceOfHit: 293 case InstanceOfMiss: 294 case InstanceOfGeneric: 295 case IndexedInt32Load: 296 case IndexedDoubleLoad: 297 case IndexedContiguousLoad: 298 case IndexedArrayStorageLoad: 299 case IndexedScopedArgumentsLoad: 300 case IndexedDirectArgumentsLoad: 301 case IndexedTypedArrayInt8Load: 302 case IndexedTypedArrayUint8Load: 303 case IndexedTypedArrayUint8ClampedLoad: 304 case IndexedTypedArrayInt16Load: 305 case IndexedTypedArrayUint16Load: 306 case IndexedTypedArrayInt32Load: 307 case IndexedTypedArrayUint32Load: 308 case IndexedTypedArrayFloat32Load: 309 case IndexedTypedArrayFloat64Load: 310 case IndexedStringLoad: 311 return false; 312 } 313 RELEASE_ASSERT_NOT_REACHED(); 314 } 315 316 bool AccessCase::requiresInt32PropertyCheck() const 317 { 318 switch (m_type) { 319 case Load: 320 case Transition: 321 case Delete: 322 case DeleteNonConfigurable: 323 case DeleteMiss: 324 case Replace: 325 case Miss: 326 case GetGetter: 327 case Getter: 328 case Setter: 329 case CustomValueGetter: 330 case CustomAccessorGetter: 331 case CustomValueSetter: 332 case CustomAccessorSetter: 333 case IntrinsicGetter: 334 case InHit: 335 case InMiss: 336 case ArrayLength: 337 case StringLength: 338 case DirectArgumentsLength: 339 case ScopedArgumentsLength: 340 case ModuleNamespaceLoad: 341 case InstanceOfHit: 342 case InstanceOfMiss: 343 case InstanceOfGeneric: 344 return false; 345 case IndexedInt32Load: 346 case IndexedDoubleLoad: 347 case IndexedContiguousLoad: 348 case IndexedArrayStorageLoad: 349 case IndexedScopedArgumentsLoad: 350 case IndexedDirectArgumentsLoad: 351 case IndexedTypedArrayInt8Load: 352 case IndexedTypedArrayUint8Load: 353 case IndexedTypedArrayUint8ClampedLoad: 354 case IndexedTypedArrayInt16Load: 355 case IndexedTypedArrayUint16Load: 356 case IndexedTypedArrayInt32Load: 357 case IndexedTypedArrayUint32Load: 358 case IndexedTypedArrayFloat32Load: 359 case IndexedTypedArrayFloat64Load: 360 case IndexedStringLoad: 361 return true; 362 } 363 RELEASE_ASSERT_NOT_REACHED(); 364 } 365 366 bool AccessCase::needsScratchFPR() const 367 { 368 switch (m_type) { 369 case Load: 370 case Transition: 371 case Delete: 372 case DeleteNonConfigurable: 373 case DeleteMiss: 374 case Replace: 375 case Miss: 376 case GetGetter: 377 case Getter: 378 case Setter: 379 case CustomValueGetter: 380 case CustomAccessorGetter: 381 case CustomValueSetter: 382 case CustomAccessorSetter: 383 case IntrinsicGetter: 384 case InHit: 385 case InMiss: 386 case ArrayLength: 387 case StringLength: 388 case DirectArgumentsLength: 389 case ScopedArgumentsLength: 390 case ModuleNamespaceLoad: 391 case InstanceOfHit: 392 case InstanceOfMiss: 393 case InstanceOfGeneric: 394 case IndexedInt32Load: 395 case IndexedContiguousLoad: 396 case IndexedArrayStorageLoad: 397 case IndexedScopedArgumentsLoad: 398 case IndexedDirectArgumentsLoad: 399 case IndexedTypedArrayInt8Load: 400 case IndexedTypedArrayUint8Load: 401 case IndexedTypedArrayUint8ClampedLoad: 402 case IndexedTypedArrayInt16Load: 403 case IndexedTypedArrayUint16Load: 404 case IndexedTypedArrayInt32Load: 405 case IndexedStringLoad: 406 return false; 407 case IndexedDoubleLoad: 408 case IndexedTypedArrayFloat32Load: 409 case IndexedTypedArrayFloat64Load: 410 case IndexedTypedArrayUint32Load: 411 return true; 412 } 413 RELEASE_ASSERT_NOT_REACHED(); 414 } 415 416 template<typename Functor> 417 void AccessCase::forEachDependentCell(VM& vm, const Functor& functor) const 418 { 419 m_conditionSet.forEachDependentCell(functor); 420 if (m_structure) 421 functor(m_structure.get()); 422 if (m_polyProtoAccessChain) { 423 for (StructureID structureID : m_polyProtoAccessChain->chain()) 424 functor(vm.getStructure(structureID)); 425 } 426 427 switch (type()) { 428 case Getter: 429 case Setter: { 430 auto& accessor = this->as<GetterSetterAccessCase>(); 431 if (accessor.callLinkInfo()) 432 accessor.callLinkInfo()->forEachDependentCell(functor); 433 break; 434 } 435 case CustomValueGetter: 436 case CustomValueSetter: { 437 auto& accessor = this->as<GetterSetterAccessCase>(); 438 if (accessor.customSlotBase()) 439 functor(accessor.customSlotBase()); 440 break; 441 } 442 case IntrinsicGetter: { 443 auto& intrinsic = this->as<IntrinsicGetterAccessCase>(); 444 if (intrinsic.intrinsicFunction()) 445 functor(intrinsic.intrinsicFunction()); 446 break; 447 } 448 case ModuleNamespaceLoad: { 449 auto& accessCase = this->as<ModuleNamespaceAccessCase>(); 450 if (accessCase.moduleNamespaceObject()) 451 functor(accessCase.moduleNamespaceObject()); 452 if (accessCase.moduleEnvironment()) 453 functor(accessCase.moduleEnvironment()); 454 break; 455 } 456 case InstanceOfHit: 457 case InstanceOfMiss: 458 if (as<InstanceOfAccessCase>().prototype()) 459 functor(as<InstanceOfAccessCase>().prototype()); 460 break; 461 case CustomAccessorGetter: 462 case CustomAccessorSetter: 463 case Load: 464 case Transition: 465 case Delete: 466 case DeleteNonConfigurable: 467 case DeleteMiss: 468 case Replace: 469 case Miss: 470 case GetGetter: 471 case InHit: 472 case InMiss: 473 case ArrayLength: 474 case StringLength: 475 case DirectArgumentsLength: 476 case ScopedArgumentsLength: 477 case InstanceOfGeneric: 478 case IndexedInt32Load: 479 case IndexedDoubleLoad: 480 case IndexedContiguousLoad: 481 case IndexedArrayStorageLoad: 482 case IndexedScopedArgumentsLoad: 483 case IndexedDirectArgumentsLoad: 484 case IndexedTypedArrayInt8Load: 485 case IndexedTypedArrayUint8Load: 486 case IndexedTypedArrayUint8ClampedLoad: 487 case IndexedTypedArrayInt16Load: 488 case IndexedTypedArrayUint16Load: 489 case IndexedTypedArrayInt32Load: 490 case IndexedTypedArrayUint32Load: 491 case IndexedTypedArrayFloat32Load: 492 case IndexedTypedArrayFloat64Load: 493 case IndexedStringLoad: 494 break; 495 } 496 } 497 498 bool AccessCase::doesCalls(VM& vm, Vector<JSCell*>* cellsToMarkIfDoesCalls) const 499 { 500 bool doesCalls = false; 501 switch (type()) { 502 case Transition: 503 doesCalls = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity() && structure()->couldHaveIndexingHeader(); 504 break; 505 case Getter: 506 case Setter: 507 case CustomValueGetter: 508 case CustomAccessorGetter: 509 case CustomValueSetter: 510 case CustomAccessorSetter: 511 doesCalls = true; 512 break; 513 case Delete: 514 case DeleteNonConfigurable: 515 case DeleteMiss: 516 case Load: 517 case Miss: 518 case GetGetter: 519 case IntrinsicGetter: 520 case InHit: 521 case InMiss: 522 case ArrayLength: 523 case StringLength: 524 case DirectArgumentsLength: 525 case ScopedArgumentsLength: 526 case ModuleNamespaceLoad: 527 case InstanceOfHit: 528 case InstanceOfMiss: 529 case InstanceOfGeneric: 530 case IndexedInt32Load: 531 case IndexedDoubleLoad: 532 case IndexedContiguousLoad: 533 case IndexedArrayStorageLoad: 534 case IndexedScopedArgumentsLoad: 535 case IndexedDirectArgumentsLoad: 536 case IndexedTypedArrayInt8Load: 537 case IndexedTypedArrayUint8Load: 538 case IndexedTypedArrayUint8ClampedLoad: 539 case IndexedTypedArrayInt16Load: 540 case IndexedTypedArrayUint16Load: 541 case IndexedTypedArrayInt32Load: 542 case IndexedTypedArrayUint32Load: 543 case IndexedTypedArrayFloat32Load: 544 case IndexedTypedArrayFloat64Load: 545 case IndexedStringLoad: 546 doesCalls = false; 547 break; 548 case Replace: 549 doesCalls = viaProxy(); 550 break; 551 } 552 553 if (doesCalls && cellsToMarkIfDoesCalls) { 554 forEachDependentCell(vm, [&](JSCell* cell) { 555 cellsToMarkIfDoesCalls->append(cell); 556 }); 557 } 558 return doesCalls; 559 } 560 561 bool AccessCase::couldStillSucceed() const 562 { 563 for (const ObjectPropertyCondition& condition : m_conditionSet) { 564 if (condition.condition().kind() == PropertyCondition::Equivalence) { 565 if (!condition.isWatchableAssumingImpurePropertyWatchpoint(PropertyCondition::WatchabilityEffort::EnsureWatchability)) 566 return false; 567 } else { 568 if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint()) 569 return false; 570 } 571 } 572 return true; 573 } 574 575 bool AccessCase::canReplace(const AccessCase& other) const 576 { 577 // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'. 578 // It's fine for this to return false if it's in doubt. 579 // 580 // Note that if A->guardedByStructureCheck() && B->guardedByStructureCheck() then 581 // A->canReplace(B) == B->canReplace(A). 582 583 if (m_identifier != other.m_identifier) 584 return false; 585 586 if (viaProxy() != other.viaProxy()) 587 return false; 588 589 auto checkPolyProtoAndStructure = [&] { 590 if (m_polyProtoAccessChain) { 591 if (!other.m_polyProtoAccessChain) 592 return false; 593 // This is the only check we need since PolyProtoAccessChain contains the base structure. 594 // If we ever change it to contain only the prototype chain, we'll also need to change 595 // this to check the base structure. 596 return structure() == other.structure() 597 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain; 598 } 599 600 if (!guardedByStructureCheckSkippingConstantIdentifierCheck() || !other.guardedByStructureCheckSkippingConstantIdentifierCheck()) 601 return false; 602 603 return structure() == other.structure(); 604 }; 605 606 switch (type()) { 607 case IndexedInt32Load: 608 case IndexedDoubleLoad: 609 case IndexedContiguousLoad: 610 case IndexedArrayStorageLoad: 611 case ArrayLength: 612 case StringLength: 613 case DirectArgumentsLength: 614 case ScopedArgumentsLength: 615 case IndexedScopedArgumentsLoad: 616 case IndexedDirectArgumentsLoad: 617 case IndexedTypedArrayInt8Load: 618 case IndexedTypedArrayUint8Load: 619 case IndexedTypedArrayUint8ClampedLoad: 620 case IndexedTypedArrayInt16Load: 621 case IndexedTypedArrayUint16Load: 622 case IndexedTypedArrayInt32Load: 623 case IndexedTypedArrayUint32Load: 624 case IndexedTypedArrayFloat32Load: 625 case IndexedTypedArrayFloat64Load: 626 case IndexedStringLoad: 627 return other.type() == type(); 628 629 case ModuleNamespaceLoad: { 630 if (other.type() != type()) 631 return false; 632 auto& thisCase = this->as<ModuleNamespaceAccessCase>(); 633 auto& otherCase = this->as<ModuleNamespaceAccessCase>(); 634 return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject(); 635 } 636 637 case InstanceOfHit: 638 case InstanceOfMiss: { 639 if (other.type() != type()) 640 return false; 641 642 if (this->as<InstanceOfAccessCase>().prototype() != other.as<InstanceOfAccessCase>().prototype()) 643 return false; 644 645 return structure() == other.structure(); 646 } 647 648 case InstanceOfGeneric: 649 switch (other.type()) { 650 case InstanceOfGeneric: 651 case InstanceOfHit: 652 case InstanceOfMiss: 653 return true; 654 default: 655 return false; 656 } 657 658 case Load: 659 case Transition: 660 case Delete: 661 case DeleteNonConfigurable: 662 case DeleteMiss: 663 case Replace: 664 case Miss: 665 case GetGetter: 666 case Setter: 667 case CustomValueGetter: 668 case CustomAccessorGetter: 669 case CustomValueSetter: 670 case CustomAccessorSetter: 671 case InHit: 672 case InMiss: 673 if (other.type() != type()) 674 return false; 675 676 return checkPolyProtoAndStructure(); 677 678 case IntrinsicGetter: 679 case Getter: 680 if (other.type() != Getter && other.type() != IntrinsicGetter) 681 return false; 682 683 return checkPolyProtoAndStructure(); 684 } 685 RELEASE_ASSERT_NOT_REACHED(); 686 } 687 688 void AccessCase::dump(PrintStream& out) const 689 { 690 out.print("\n", m_type, ":("); 691 692 CommaPrinter comma; 693 694 out.print(comma, m_state); 695 696 out.print(comma, "ident = '", m_identifier, "'"); 697 if (isValidOffset(m_offset)) 698 out.print(comma, "offset = ", m_offset); 699 700 if (m_polyProtoAccessChain) { 701 out.print(comma, "prototype access chain = "); 702 m_polyProtoAccessChain->dump(structure(), out); 703 } else { 704 if (m_type == Transition || m_type == Delete) 705 out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure())); 706 else if (m_structure) 707 out.print(comma, "structure = ", pointerDump(m_structure.get())); 708 } 709 710 if (!m_conditionSet.isEmpty()) 711 out.print(comma, "conditions = ", m_conditionSet); 712 713 dumpImpl(out, comma); 714 out.print(")"); 715 } 716 717 bool AccessCase::visitWeak(VM& vm) const 718 { 719 if (isAccessor()) { 720 auto& accessor = this->as<GetterSetterAccessCase>(); 721 if (accessor.callLinkInfo()) 722 accessor.callLinkInfo()->visitWeak(vm); 723 } 724 725 bool isValid = true; 726 forEachDependentCell(vm, [&](JSCell* cell) { 727 isValid &= vm.heap.isMarked(cell); 728 }); 729 return isValid; 730 } 731 732 bool AccessCase::propagateTransitions(SlotVisitor& visitor) const 733 { 734 bool result = true; 735 736 if (m_structure) 737 result &= m_structure->markIfCheap(visitor); 738 739 if (m_polyProtoAccessChain) { 740 for (StructureID structureID : m_polyProtoAccessChain->chain()) 741 result &= visitor.vm().getStructure(structureID)->markIfCheap(visitor); 742 } 743 744 switch (m_type) { 745 case Transition: 746 case Delete: 747 if (visitor.vm().heap.isMarked(m_structure->previousID())) 748 visitor.appendUnbarriered(m_structure.get()); 749 else 750 result = false; 751 break; 752 default: 753 break; 754 } 755 756 return result; 757 } 758 759 void AccessCase::visitAggregate(SlotVisitor& visitor) const 760 { 761 m_identifier.visitAggregate(visitor); 762 } 763 764 void AccessCase::generateWithGuard( 765 AccessGenerationState& state, CCallHelpers::JumpList& fallThrough) 766 { 767 SuperSamplerScope superSamplerScope(false); 768 769 checkConsistency(*state.stubInfo); 770 771 RELEASE_ASSERT(m_state == Committed); 772 m_state = Generated; 773 774 CCallHelpers& jit = *state.jit; 775 StructureStubInfo& stubInfo = *state.stubInfo; 776 VM& vm = state.m_vm; 777 JSValueRegs valueRegs = state.valueRegs; 778 GPRReg baseGPR = state.baseGPR; 779 GPRReg scratchGPR = state.scratchGPR; 780 781 if (requiresIdentifierNameMatch() && !stubInfo.hasConstantIdentifier) { 782 RELEASE_ASSERT(m_identifier); 783 GPRReg propertyGPR = state.u.propertyGPR; 784 // non-rope string check done inside polymorphic access. 785 786 if (uid()->isSymbol()) 787 jit.loadPtr(MacroAssembler::Address(propertyGPR, Symbol::offsetOfSymbolImpl()), scratchGPR); 788 else 789 jit.loadPtr(MacroAssembler::Address(propertyGPR, JSString::offsetOfValue()), scratchGPR); 790 fallThrough.append(jit.branchPtr(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImmPtr(uid()))); 791 } 792 793 auto emitDefaultGuard = [&] () { 794 if (m_polyProtoAccessChain) { 795 ASSERT(!viaProxy()); 796 GPRReg baseForAccessGPR = state.scratchGPR; 797 jit.move(state.baseGPR, baseForAccessGPR); 798 m_polyProtoAccessChain->forEach(vm, structure(), [&] (Structure* structure, bool atEnd) { 799 fallThrough.append( 800 jit.branchStructure( 801 CCallHelpers::NotEqual, 802 CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()), 803 structure)); 804 if (atEnd) { 805 if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) { 806 // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto. 807 // Transitions must do this because they need to verify there isn't a setter in the chain. 808 // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that 809 // has the property. 810 #if USE(JSVALUE64) 811 jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR); 812 fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(JSValue::ValueNull))); 813 #else 814 jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR); 815 fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR)); 816 #endif 817 } 818 } else { 819 if (structure->hasMonoProto()) { 820 JSValue prototype = structure->prototypeForLookup(state.m_globalObject); 821 RELEASE_ASSERT(prototype.isObject()); 822 jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR); 823 } else { 824 RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them. 825 #if USE(JSVALUE64) 826 jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR); 827 fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(JSValue::ValueNull))); 828 #else 829 jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR); 830 fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR)); 831 #endif 832 } 833 } 834 }); 835 return; 836 } 837 838 if (viaProxy()) { 839 fallThrough.append( 840 jit.branchIfNotType(baseGPR, PureForwardingProxyType)); 841 842 jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR); 843 844 fallThrough.append( 845 jit.branchStructure( 846 CCallHelpers::NotEqual, 847 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()), 848 structure())); 849 return; 850 } 851 852 fallThrough.append( 853 jit.branchStructure( 854 CCallHelpers::NotEqual, 855 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()), 856 structure())); 857 }; 858 859 switch (m_type) { 860 case ArrayLength: { 861 ASSERT(!viaProxy()); 862 jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR); 863 fallThrough.append( 864 jit.branchTest32( 865 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray))); 866 fallThrough.append( 867 jit.branchTest32( 868 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask))); 869 break; 870 } 871 872 case StringLength: { 873 ASSERT(!viaProxy()); 874 fallThrough.append( 875 jit.branchIfNotString(baseGPR)); 876 break; 877 } 878 879 case DirectArgumentsLength: { 880 ASSERT(!viaProxy()); 881 fallThrough.append( 882 jit.branchIfNotType(baseGPR, DirectArgumentsType)); 883 884 fallThrough.append( 885 jit.branchTestPtr( 886 CCallHelpers::NonZero, 887 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments()))); 888 jit.load32( 889 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()), 890 valueRegs.payloadGPR()); 891 jit.boxInt32(valueRegs.payloadGPR(), valueRegs); 892 state.succeed(); 893 return; 894 } 895 896 case ScopedArgumentsLength: { 897 ASSERT(!viaProxy()); 898 fallThrough.append( 899 jit.branchIfNotType(baseGPR, ScopedArgumentsType)); 900 901 fallThrough.append( 902 jit.branchTest8( 903 CCallHelpers::NonZero, 904 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings()))); 905 jit.load32( 906 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()), 907 valueRegs.payloadGPR()); 908 jit.boxInt32(valueRegs.payloadGPR(), valueRegs); 909 state.succeed(); 910 return; 911 } 912 913 case ModuleNamespaceLoad: { 914 ASSERT(!viaProxy()); 915 this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough); 916 return; 917 } 918 919 case IndexedScopedArgumentsLoad: { 920 ASSERT(!viaProxy()); 921 // This code is written such that the result could alias with the base or the property. 922 GPRReg propertyGPR = state.u.propertyGPR; 923 924 jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR); 925 fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(ScopedArgumentsType))); 926 927 ScratchRegisterAllocator allocator(stubInfo.usedRegisters); 928 allocator.lock(stubInfo.baseRegs()); 929 allocator.lock(valueRegs); 930 allocator.lock(stubInfo.propertyRegs()); 931 allocator.lock(scratchGPR); 932 933 GPRReg scratch2GPR = allocator.allocateScratchGPR(); 934 GPRReg scratch3GPR = allocator.allocateScratchGPR(); 935 936 ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing( 937 jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace); 938 939 CCallHelpers::JumpList failAndIgnore; 940 941 failAndIgnore.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()))); 942 943 jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTable()), scratchGPR); 944 jit.load32(CCallHelpers::Address(scratchGPR, ScopedArgumentsTable::offsetOfLength()), scratch2GPR); 945 auto overflowCase = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratch2GPR); 946 947 jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfScope()), scratch2GPR); 948 jit.loadPtr(CCallHelpers::Address(scratchGPR, ScopedArgumentsTable::offsetOfArguments()), scratchGPR); 949 jit.zeroExtend32ToWord(propertyGPR, scratch3GPR); 950 jit.load32(CCallHelpers::BaseIndex(scratchGPR, scratch3GPR, CCallHelpers::TimesFour), scratchGPR); 951 failAndIgnore.append(jit.branch32(CCallHelpers::Equal, scratchGPR, CCallHelpers::TrustedImm32(ScopeOffset::invalidOffset))); 952 jit.loadValue(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesEight, JSLexicalEnvironment::offsetOfVariables()), valueRegs); 953 auto done = jit.jump(); 954 955 overflowCase.link(&jit); 956 jit.sub32(propertyGPR, scratch2GPR); 957 jit.neg32(scratch2GPR); 958 jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfStorage()), scratch3GPR); 959 #if USE(JSVALUE64) 960 jit.loadValue(CCallHelpers::BaseIndex(scratch3GPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs(scratchGPR)); 961 failAndIgnore.append(jit.branchIfEmpty(scratchGPR)); 962 jit.move(scratchGPR, valueRegs.payloadGPR()); 963 #else 964 jit.loadValue(CCallHelpers::BaseIndex(scratch3GPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs(scratch2GPR, scratchGPR)); 965 failAndIgnore.append(jit.branchIfEmpty(scratch2GPR)); 966 jit.move(scratchGPR, valueRegs.payloadGPR()); 967 jit.move(scratch2GPR, valueRegs.tagGPR()); 968 #endif 969 970 done.link(&jit); 971 972 allocator.restoreReusedRegistersByPopping(jit, preservedState); 973 state.succeed(); 974 975 if (allocator.didReuseRegisters()) { 976 failAndIgnore.link(&jit); 977 allocator.restoreReusedRegistersByPopping(jit, preservedState); 978 state.failAndIgnore.append(jit.jump()); 979 } else 980 state.failAndIgnore.append(failAndIgnore); 981 982 return; 983 } 984 985 case IndexedDirectArgumentsLoad: { 986 ASSERT(!viaProxy()); 987 // This code is written such that the result could alias with the base or the property. 988 GPRReg propertyGPR = state.u.propertyGPR; 989 jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR); 990 fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(DirectArgumentsType))); 991 992 jit.load32(CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()), scratchGPR); 993 state.failAndRepatch.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR)); 994 state.failAndRepatch.append(jit.branchTestPtr(CCallHelpers::NonZero, CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments()))); 995 jit.zeroExtend32ToWord(propertyGPR, scratchGPR); 996 jit.loadValue(CCallHelpers::BaseIndex(baseGPR, scratchGPR, CCallHelpers::TimesEight, DirectArguments::storageOffset()), valueRegs); 997 state.succeed(); 998 return; 999 } 1000 1001 case IndexedTypedArrayInt8Load: 1002 case IndexedTypedArrayUint8Load: 1003 case IndexedTypedArrayUint8ClampedLoad: 1004 case IndexedTypedArrayInt16Load: 1005 case IndexedTypedArrayUint16Load: 1006 case IndexedTypedArrayInt32Load: 1007 case IndexedTypedArrayUint32Load: 1008 case IndexedTypedArrayFloat32Load: 1009 case IndexedTypedArrayFloat64Load: { 1010 ASSERT(!viaProxy()); 1011 // This code is written such that the result could alias with the base or the property. 1012 1013 TypedArrayType type = toTypedArrayType(m_type); 1014 1015 GPRReg propertyGPR = state.u.propertyGPR; 1016 1017 1018 jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR); 1019 fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(typeForTypedArrayType(type)))); 1020 1021 jit.load32(CCallHelpers::Address(baseGPR, JSArrayBufferView::offsetOfLength()), scratchGPR); 1022 state.failAndRepatch.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR)); 1023 1024 ScratchRegisterAllocator allocator(stubInfo.usedRegisters); 1025 allocator.lock(stubInfo.baseRegs()); 1026 allocator.lock(valueRegs); 1027 allocator.lock(stubInfo.propertyRegs()); 1028 allocator.lock(scratchGPR); 1029 GPRReg scratch2GPR = allocator.allocateScratchGPR(); 1030 1031 ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing( 1032 jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace); 1033 1034 jit.loadPtr(CCallHelpers::Address(baseGPR, JSArrayBufferView::offsetOfVector()), scratch2GPR); 1035 jit.cageConditionally(Gigacage::Primitive, scratch2GPR, scratchGPR, scratchGPR); 1036 1037 jit.signExtend32ToPtr(propertyGPR, scratchGPR); 1038 if (isInt(type)) { 1039 switch (elementSize(type)) { 1040 case 1: 1041 if (JSC::isSigned(type)) 1042 jit.load8SignedExtendTo32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne), valueRegs.payloadGPR()); 1043 else 1044 jit.load8(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne), valueRegs.payloadGPR()); 1045 break; 1046 case 2: 1047 if (JSC::isSigned(type)) 1048 jit.load16SignedExtendTo32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo), valueRegs.payloadGPR()); 1049 else 1050 jit.load16(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo), valueRegs.payloadGPR()); 1051 break; 1052 case 4: 1053 jit.load32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesFour), valueRegs.payloadGPR()); 1054 break; 1055 default: 1056 CRASH(); 1057 } 1058 1059 CCallHelpers::Jump done; 1060 if (type == TypeUint32) { 1061 RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg); 1062 auto canBeInt = jit.branch32(CCallHelpers::GreaterThanOrEqual, valueRegs.payloadGPR(), CCallHelpers::TrustedImm32(0)); 1063 1064 jit.convertInt32ToDouble(valueRegs.payloadGPR(), state.scratchFPR); 1065 jit.addDouble(CCallHelpers::AbsoluteAddress(&CCallHelpers::twoToThe32), state.scratchFPR); 1066 jit.boxDouble(state.scratchFPR, valueRegs); 1067 done = jit.jump(); 1068 canBeInt.link(&jit); 1069 } 1070 1071 jit.boxInt32(valueRegs.payloadGPR(), valueRegs); 1072 if (done.isSet()) 1073 done.link(&jit); 1074 } else { 1075 ASSERT(isFloat(type)); 1076 RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg); 1077 switch (elementSize(type)) { 1078 case 4: 1079 jit.loadFloat(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesFour), state.scratchFPR); 1080 jit.convertFloatToDouble(state.scratchFPR, state.scratchFPR); 1081 break; 1082 case 8: { 1083 jit.loadDouble(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesEight), state.scratchFPR); 1084 break; 1085 } 1086 default: 1087 CRASH(); 1088 } 1089 1090 jit.purifyNaN(state.scratchFPR); 1091 jit.boxDouble(state.scratchFPR, valueRegs); 1092 } 1093 1094 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1095 state.succeed(); 1096 1097 return; 1098 } 1099 1100 case IndexedStringLoad: { 1101 ASSERT(!viaProxy()); 1102 // This code is written such that the result could alias with the base or the property. 1103 GPRReg propertyGPR = state.u.propertyGPR; 1104 1105 fallThrough.append(jit.branchIfNotString(baseGPR)); 1106 1107 ScratchRegisterAllocator allocator(stubInfo.usedRegisters); 1108 allocator.lock(stubInfo.baseRegs()); 1109 allocator.lock(valueRegs); 1110 allocator.lock(stubInfo.propertyRegs()); 1111 allocator.lock(scratchGPR); 1112 GPRReg scratch2GPR = allocator.allocateScratchGPR(); 1113 1114 CCallHelpers::JumpList failAndIgnore; 1115 1116 ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing( 1117 jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace); 1118 1119 jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratch2GPR); 1120 failAndIgnore.append(jit.branchIfRopeStringImpl(scratch2GPR)); 1121 jit.load32(CCallHelpers::Address(scratch2GPR, StringImpl::lengthMemoryOffset()), scratchGPR); 1122 1123 failAndIgnore.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR)); 1124 1125 jit.load32(CCallHelpers::Address(scratch2GPR, StringImpl::flagsOffset()), scratchGPR); 1126 jit.loadPtr(CCallHelpers::Address(scratch2GPR, StringImpl::dataOffset()), scratch2GPR); 1127 auto is16Bit = jit.branchTest32(CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(StringImpl::flagIs8Bit())); 1128 jit.zeroExtend32ToWord(propertyGPR, scratchGPR); 1129 jit.load8(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne, 0), scratch2GPR); 1130 auto is8BitLoadDone = jit.jump(); 1131 is16Bit.link(&jit); 1132 jit.zeroExtend32ToWord(propertyGPR, scratchGPR); 1133 jit.load16(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo, 0), scratch2GPR); 1134 is8BitLoadDone.link(&jit); 1135 1136 failAndIgnore.append(jit.branch32(CCallHelpers::Above, scratch2GPR, CCallHelpers::TrustedImm32(maxSingleCharacterString))); 1137 jit.move(CCallHelpers::TrustedImmPtr(vm.smallStrings.singleCharacterStrings()), scratchGPR); 1138 jit.loadPtr(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::ScalePtr, 0), valueRegs.payloadGPR()); 1139 jit.boxCell(valueRegs.payloadGPR(), valueRegs); 1140 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1141 state.succeed(); 1142 1143 if (allocator.didReuseRegisters()) { 1144 failAndIgnore.link(&jit); 1145 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1146 state.failAndIgnore.append(jit.jump()); 1147 } else 1148 state.failAndIgnore.append(failAndIgnore); 1149 1150 return; 1151 } 1152 1153 case IndexedInt32Load: 1154 case IndexedDoubleLoad: 1155 case IndexedContiguousLoad: 1156 case IndexedArrayStorageLoad: { 1157 ASSERT(!viaProxy()); 1158 // This code is written such that the result could alias with the base or the property. 1159 GPRReg propertyGPR = state.u.propertyGPR; 1160 1161 // int32 check done in polymorphic access. 1162 jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR); 1163 jit.and32(CCallHelpers::TrustedImm32(IndexingShapeMask), scratchGPR); 1164 1165 CCallHelpers::Jump isOutOfBounds; 1166 CCallHelpers::Jump isEmpty; 1167 1168 ScratchRegisterAllocator allocator(stubInfo.usedRegisters); 1169 allocator.lock(stubInfo.baseRegs()); 1170 allocator.lock(valueRegs); 1171 allocator.lock(stubInfo.propertyRegs()); 1172 allocator.lock(scratchGPR); 1173 GPRReg scratch2GPR = allocator.allocateScratchGPR(); 1174 #if USE(JSVALUE32_64) 1175 GPRReg scratch3GPR = allocator.allocateScratchGPR(); 1176 #endif 1177 ScratchRegisterAllocator::PreservedState preservedState; 1178 1179 CCallHelpers::JumpList failAndIgnore; 1180 auto preserveReusedRegisters = [&] { 1181 preservedState = allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace); 1182 }; 1183 1184 if (m_type == IndexedArrayStorageLoad) { 1185 jit.add32(CCallHelpers::TrustedImm32(-ArrayStorageShape), scratchGPR, scratchGPR); 1186 fallThrough.append(jit.branch32(CCallHelpers::Above, scratchGPR, CCallHelpers::TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape))); 1187 1188 preserveReusedRegisters(); 1189 1190 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); 1191 isOutOfBounds = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(scratchGPR, ArrayStorage::vectorLengthOffset())); 1192 1193 jit.zeroExtend32ToWord(propertyGPR, scratch2GPR); 1194 #if USE(JSVALUE64) 1195 jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight, ArrayStorage::vectorOffset()), JSValueRegs(scratchGPR)); 1196 isEmpty = jit.branchIfEmpty(scratchGPR); 1197 jit.move(scratchGPR, valueRegs.payloadGPR()); 1198 #else 1199 jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight, ArrayStorage::vectorOffset()), JSValueRegs(scratch3GPR, scratchGPR)); 1200 isEmpty = jit.branchIfEmpty(scratch3GPR); 1201 jit.move(scratchGPR, valueRegs.payloadGPR()); 1202 jit.move(scratch3GPR, valueRegs.tagGPR()); 1203 #endif 1204 } else { 1205 IndexingType expectedShape; 1206 switch (m_type) { 1207 case IndexedInt32Load: 1208 expectedShape = Int32Shape; 1209 break; 1210 case IndexedDoubleLoad: 1211 expectedShape = DoubleShape; 1212 break; 1213 case IndexedContiguousLoad: 1214 expectedShape = ContiguousShape; 1215 break; 1216 default: 1217 RELEASE_ASSERT_NOT_REACHED(); 1218 break; 1219 } 1220 1221 fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(expectedShape))); 1222 1223 preserveReusedRegisters(); 1224 1225 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); 1226 isOutOfBounds = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(scratchGPR, Butterfly::offsetOfPublicLength())); 1227 jit.zeroExtend32ToWord(propertyGPR, scratch2GPR); 1228 if (m_type == IndexedDoubleLoad) { 1229 RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg); 1230 jit.loadDouble(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight), state.scratchFPR); 1231 isEmpty = jit.branchIfNaN(state.scratchFPR); 1232 jit.boxDouble(state.scratchFPR, valueRegs); 1233 } else { 1234 #if USE(JSVALUE64) 1235 jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs(scratchGPR)); 1236 isEmpty = jit.branchIfEmpty(scratchGPR); 1237 jit.move(scratchGPR, valueRegs.payloadGPR()); 1238 #else 1239 jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs(scratch3GPR, scratchGPR)); 1240 isEmpty = jit.branchIfEmpty(scratch3GPR); 1241 jit.move(scratchGPR, valueRegs.payloadGPR()); 1242 jit.move(scratch3GPR, valueRegs.tagGPR()); 1243 #endif 1244 } 1245 } 1246 1247 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1248 state.succeed(); 1249 1250 if (allocator.didReuseRegisters()) { 1251 isOutOfBounds.link(&jit); 1252 isEmpty.link(&jit); 1253 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1254 state.failAndIgnore.append(jit.jump()); 1255 } else { 1256 state.failAndIgnore.append(isOutOfBounds); 1257 state.failAndIgnore.append(isEmpty); 1258 } 1259 1260 return; 1261 } 1262 1263 case InstanceOfHit: 1264 case InstanceOfMiss: 1265 emitDefaultGuard(); 1266 1267 fallThrough.append( 1268 jit.branchPtr( 1269 CCallHelpers::NotEqual, state.u.prototypeGPR, 1270 CCallHelpers::TrustedImmPtr(as<InstanceOfAccessCase>().prototype()))); 1271 break; 1272 1273 case InstanceOfGeneric: { 1274 ASSERT(!viaProxy()); 1275 GPRReg prototypeGPR = state.u.prototypeGPR; 1276 // Legend: value = `base instanceof prototypeGPR`. 1277 1278 GPRReg valueGPR = valueRegs.payloadGPR(); 1279 1280 ScratchRegisterAllocator allocator(stubInfo.usedRegisters); 1281 allocator.lock(stubInfo.baseRegs()); 1282 allocator.lock(valueRegs); 1283 allocator.lock(stubInfo.propertyRegs()); 1284 allocator.lock(scratchGPR); 1285 1286 GPRReg scratch2GPR = allocator.allocateScratchGPR(); 1287 1288 if (!state.stubInfo->prototypeIsKnownObject) 1289 state.failAndIgnore.append(jit.branchIfNotObject(prototypeGPR)); 1290 1291 ScratchRegisterAllocator::PreservedState preservedState = 1292 allocator.preserveReusedRegistersByPushing( 1293 jit, 1294 ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace); 1295 CCallHelpers::JumpList failAndIgnore; 1296 1297 jit.move(baseGPR, valueGPR); 1298 1299 CCallHelpers::Label loop(&jit); 1300 1301 #if USE(JSVALUE64) 1302 JSValueRegs resultRegs(scratch2GPR); 1303 #else 1304 JSValueRegs resultRegs(scratchGPR, scratch2GPR); 1305 #endif 1306 1307 jit.emitLoadPrototype(vm, valueGPR, resultRegs, scratchGPR, failAndIgnore); 1308 jit.move(scratch2GPR, valueGPR); 1309 1310 CCallHelpers::Jump isInstance = jit.branchPtr(CCallHelpers::Equal, valueGPR, prototypeGPR); 1311 1312 #if USE(JSVALUE64) 1313 jit.branchIfCell(JSValueRegs(valueGPR)).linkTo(loop, &jit); 1314 #else 1315 jit.branchTestPtr(CCallHelpers::NonZero, valueGPR).linkTo(loop, &jit); 1316 #endif 1317 1318 jit.boxBooleanPayload(false, valueGPR); 1319 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1320 state.succeed(); 1321 1322 isInstance.link(&jit); 1323 jit.boxBooleanPayload(true, valueGPR); 1324 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1325 state.succeed(); 1326 1327 if (allocator.didReuseRegisters()) { 1328 failAndIgnore.link(&jit); 1329 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1330 state.failAndIgnore.append(jit.jump()); 1331 } else 1332 state.failAndIgnore.append(failAndIgnore); 1333 return; 1334 } 1335 1336 default: 1337 emitDefaultGuard(); 1338 break; 1339 } 1340 1341 generateImpl(state); 1342 } 1343 1344 void AccessCase::generate(AccessGenerationState& state) 1345 { 1346 RELEASE_ASSERT(m_state == Committed); 1347 RELEASE_ASSERT(state.stubInfo->hasConstantIdentifier); 1348 m_state = Generated; 1349 1350 checkConsistency(*state.stubInfo); 1351 1352 generateImpl(state); 1353 } 1354 1355 void AccessCase::generateImpl(AccessGenerationState& state) 1356 { 1357 SuperSamplerScope superSamplerScope(false); 1358 if (AccessCaseInternal::verbose) 1359 dataLog("\n\nGenerating code for: ", *this, "\n"); 1360 1361 ASSERT(m_state == Generated); // We rely on the callers setting this for us. 1362 1363 CCallHelpers& jit = *state.jit; 1364 VM& vm = state.m_vm; 1365 CodeBlock* codeBlock = jit.codeBlock(); 1366 JSGlobalObject* globalObject = state.m_globalObject; 1367 ECMAMode ecmaMode = state.m_ecmaMode; 1368 StructureStubInfo& stubInfo = *state.stubInfo; 1369 JSValueRegs valueRegs = state.valueRegs; 1370 GPRReg baseGPR = state.baseGPR; 1371 GPRReg thisGPR = stubInfo.thisValueIsInThisGPR() ? state.u.thisGPR : baseGPR; 1372 GPRReg scratchGPR = state.scratchGPR; 1373 1374 for (const ObjectPropertyCondition& condition : m_conditionSet) { 1375 RELEASE_ASSERT(!m_polyProtoAccessChain); 1376 1377 if (condition.isWatchableAssumingImpurePropertyWatchpoint(PropertyCondition::WatchabilityEffort::EnsureWatchability)) { 1378 state.installWatchpoint(condition); 1379 continue; 1380 } 1381 1382 // For now, we only allow equivalence when it's watchable. 1383 RELEASE_ASSERT(condition.condition().kind() != PropertyCondition::Equivalence); 1384 1385 if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint()) { 1386 // The reason why this cannot happen is that we require that PolymorphicAccess calls 1387 // AccessCase::generate() only after it has verified that 1388 // AccessCase::couldStillSucceed() returned true. 1389 1390 dataLog("This condition is no longer met: ", condition, "\n"); 1391 RELEASE_ASSERT_NOT_REACHED(); 1392 } 1393 1394 // We will emit code that has a weak reference that isn't otherwise listed anywhere. 1395 Structure* structure = condition.object()->structure(vm); 1396 state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure)); 1397 1398 jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR); 1399 state.failAndRepatch.append( 1400 jit.branchStructure( 1401 CCallHelpers::NotEqual, 1402 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()), 1403 structure)); 1404 } 1405 1406 switch (m_type) { 1407 case InHit: 1408 case InMiss: 1409 jit.boxBoolean(m_type == InHit, valueRegs); 1410 state.succeed(); 1411 return; 1412 1413 case Miss: 1414 jit.moveTrustedValue(jsUndefined(), valueRegs); 1415 state.succeed(); 1416 return; 1417 1418 case InstanceOfHit: 1419 case InstanceOfMiss: 1420 jit.boxBooleanPayload(m_type == InstanceOfHit, valueRegs.payloadGPR()); 1421 state.succeed(); 1422 return; 1423 1424 case Load: 1425 case GetGetter: 1426 case Getter: 1427 case Setter: 1428 case CustomValueGetter: 1429 case CustomAccessorGetter: 1430 case CustomValueSetter: 1431 case CustomAccessorSetter: { 1432 GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR(); 1433 1434 if (isValidOffset(m_offset)) { 1435 Structure* currStructure; 1436 if (!hasAlternateBase()) 1437 currStructure = structure(); 1438 else 1439 currStructure = alternateBase()->structure(vm); 1440 currStructure->startWatchingPropertyForReplacements(vm, offset()); 1441 } 1442 1443 bool doesPropertyStorageLoads = m_type == Load 1444 || m_type == GetGetter 1445 || m_type == Getter 1446 || m_type == Setter; 1447 1448 bool takesPropertyOwnerAsCFunctionArgument = m_type == CustomValueGetter || m_type == CustomValueSetter; 1449 1450 GPRReg receiverGPR = baseGPR; 1451 GPRReg propertyOwnerGPR; 1452 1453 if (m_polyProtoAccessChain) { 1454 // This isn't pretty, but we know we got here via generateWithGuard, 1455 // and it left the baseForAccess inside scratchGPR. We could re-derive the base, 1456 // but it'd require emitting the same code to load the base twice. 1457 propertyOwnerGPR = scratchGPR; 1458 } else if (hasAlternateBase()) { 1459 jit.move( 1460 CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR); 1461 propertyOwnerGPR = scratchGPR; 1462 } else if (viaProxy() && doesPropertyStorageLoads) { 1463 // We only need this when loading an inline or out of line property. For customs accessors, 1464 // we can invoke with a receiver value that is a JSProxy. For custom values, we unbox to the 1465 // JSProxy's target. For getters/setters, we'll also invoke them with the JSProxy as |this|, 1466 // but we need to load the actual GetterSetter cell from the JSProxy's target. 1467 1468 if (m_type == Getter || m_type == Setter) 1469 propertyOwnerGPR = scratchGPR; 1470 else 1471 propertyOwnerGPR = valueRegsPayloadGPR; 1472 1473 jit.loadPtr( 1474 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), propertyOwnerGPR); 1475 } else if (viaProxy() && takesPropertyOwnerAsCFunctionArgument) { 1476 propertyOwnerGPR = scratchGPR; 1477 jit.loadPtr( 1478 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), propertyOwnerGPR); 1479 } else 1480 propertyOwnerGPR = receiverGPR; 1481 1482 GPRReg loadedValueGPR = InvalidGPRReg; 1483 if (doesPropertyStorageLoads) { 1484 if (m_type == Load || m_type == GetGetter) 1485 loadedValueGPR = valueRegsPayloadGPR; 1486 else 1487 loadedValueGPR = scratchGPR; 1488 1489 ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR); 1490 ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR); 1491 1492 GPRReg storageGPR; 1493 if (isInlineOffset(m_offset)) 1494 storageGPR = propertyOwnerGPR; 1495 else { 1496 jit.loadPtr( 1497 CCallHelpers::Address(propertyOwnerGPR, JSObject::butterflyOffset()), 1498 loadedValueGPR); 1499 storageGPR = loadedValueGPR; 1500 } 1501 1502 #if USE(JSVALUE64) 1503 jit.load64( 1504 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR); 1505 #else 1506 if (m_type == Load || m_type == GetGetter) { 1507 jit.load32( 1508 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset), 1509 valueRegs.tagGPR()); 1510 } 1511 jit.load32( 1512 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset), 1513 loadedValueGPR); 1514 #endif 1515 } 1516 1517 if (m_type == Load || m_type == GetGetter) { 1518 state.succeed(); 1519 return; 1520 } 1521 1522 if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) { 1523 auto& access = this->as<GetterSetterAccessCase>(); 1524 // We do not need to emit CheckDOM operation since structure check ensures 1525 // that the structure of the given base value is structure()! So all we should 1526 // do is performing the CheckDOM thingy in IC compiling time here. 1527 if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) { 1528 state.failAndIgnore.append(jit.jump()); 1529 return; 1530 } 1531 1532 if (Options::useDOMJIT() && access.domAttribute()->domJIT) { 1533 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, receiverGPR); 1534 return; 1535 } 1536 } 1537 1538 // Stuff for custom getters/setters. 1539 CCallHelpers::Call operationCall; 1540 1541 // Stuff for JS getters/setters. 1542 CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck; 1543 CCallHelpers::Call fastPathCall; 1544 CCallHelpers::Call slowPathCall; 1545 1546 // This also does the necessary calculations of whether or not we're an 1547 // exception handling call site. 1548 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(); 1549 1550 auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) { 1551 RegisterSet dontRestore; 1552 if (callHasReturnValue) { 1553 // This is the result value. We don't want to overwrite the result with what we stored to the stack. 1554 // We sometimes have to store it to the stack just in case we throw an exception and need the original value. 1555 dontRestore.set(valueRegs); 1556 } 1557 state.restoreLiveRegistersFromStackForCall(spillState, dontRestore); 1558 }; 1559 1560 jit.store32( 1561 CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()), 1562 CCallHelpers::tagFor(CallFrameSlot::argumentCountIncludingThis)); 1563 1564 if (m_type == Getter || m_type == Setter) { 1565 auto& access = this->as<GetterSetterAccessCase>(); 1566 ASSERT(baseGPR != loadedValueGPR); 1567 ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR); 1568 1569 // Create a JS call using a JS call inline cache. Assume that: 1570 // 1571 // - SP is aligned and represents the extent of the calling compiler's stack usage. 1572 // 1573 // - FP is set correctly (i.e. it points to the caller's call frame header). 1574 // 1575 // - SP - FP is an aligned difference. 1576 // 1577 // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling 1578 // code. 1579 // 1580 // Therefore, we temporarily grow the stack for the purpose of the call and then 1581 // shrink it after. 1582 1583 state.setSpillStateForJSGetterSetter(spillState); 1584 1585 RELEASE_ASSERT(!access.callLinkInfo()); 1586 CallLinkInfo* callLinkInfo = state.m_callLinkInfos.add(stubInfo.codeOrigin); 1587 access.m_callLinkInfo = callLinkInfo; 1588 1589 // FIXME: If we generated a polymorphic call stub that jumped back to the getter 1590 // stub, which then jumped back to the main code, then we'd have a reachability 1591 // situation that the GC doesn't know about. The GC would ensure that the polymorphic 1592 // call stub stayed alive, and it would ensure that the main code stayed alive, but 1593 // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would 1594 // be GC objects, and then we'd be able to say that the polymorphic call stub has a 1595 // reference to the getter stub. 1596 // https://bugs.webkit.org/show_bug.cgi?id=148914 1597 callLinkInfo->disallowStubs(); 1598 1599 callLinkInfo->setUpCall(CallLinkInfo::Call, loadedValueGPR); 1600 1601 CCallHelpers::JumpList done; 1602 1603 // There is a "this" argument. 1604 unsigned numberOfParameters = 1; 1605 // ... and a value argument if we're calling a setter. 1606 if (m_type == Setter) 1607 numberOfParameters++; 1608 1609 // Get the accessor; if there ain't one then the result is jsUndefined(). 1610 // Note that GetterSetter always has cells for both. If it is not set (like, getter exits, but setter is not set), Null{Getter,Setter}Function is stored. 1611 Optional<CCallHelpers::Jump> returnUndefined; 1612 if (m_type == Setter) { 1613 jit.loadPtr( 1614 CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()), 1615 loadedValueGPR); 1616 if (ecmaMode.isStrict()) { 1617 CCallHelpers::Jump shouldNotThrowError = jit.branchIfNotType(loadedValueGPR, NullSetterFunctionType); 1618 // We replace setter with this AccessCase's JSGlobalObject::nullSetterStrictFunction, which will throw an error with the right JSGlobalObject. 1619 jit.move(CCallHelpers::TrustedImmPtr(globalObject->nullSetterStrictFunction()), loadedValueGPR); 1620 shouldNotThrowError.link(&jit); 1621 } 1622 } else { 1623 jit.loadPtr( 1624 CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()), 1625 loadedValueGPR); 1626 returnUndefined = jit.branchIfType(loadedValueGPR, NullSetterFunctionType); 1627 } 1628 1629 unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + roundArgumentCountToAlignFrame(numberOfParameters); 1630 ASSERT(!(numberOfRegsForCall % stackAlignmentRegisters())); 1631 unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC); 1632 1633 unsigned alignedNumberOfBytesForCall = 1634 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall); 1635 1636 jit.subPtr( 1637 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall), 1638 CCallHelpers::stackPointerRegister); 1639 1640 CCallHelpers::Address calleeFrame = CCallHelpers::Address( 1641 CCallHelpers::stackPointerRegister, 1642 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))); 1643 1644 jit.store32( 1645 CCallHelpers::TrustedImm32(numberOfParameters), 1646 calleeFrame.withOffset(CallFrameSlot::argumentCountIncludingThis * sizeof(Register) + PayloadOffset)); 1647 1648 jit.storeCell( 1649 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register))); 1650 1651 jit.storeCell( 1652 thisGPR, 1653 calleeFrame.withOffset(virtualRegisterForArgumentIncludingThis(0).offset() * sizeof(Register))); 1654 1655 if (m_type == Setter) { 1656 jit.storeValue( 1657 valueRegs, 1658 calleeFrame.withOffset( 1659 virtualRegisterForArgumentIncludingThis(1).offset() * sizeof(Register))); 1660 } 1661 1662 CCallHelpers::Jump slowCase = jit.branchPtrWithPatch( 1663 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck, 1664 CCallHelpers::TrustedImmPtr(nullptr)); 1665 1666 fastPathCall = jit.nearCall(); 1667 if (m_type == Getter) 1668 jit.setupResults(valueRegs); 1669 done.append(jit.jump()); 1670 1671 slowCase.link(&jit); 1672 jit.move(loadedValueGPR, GPRInfo::regT0); 1673 #if USE(JSVALUE32_64) 1674 // We *always* know that the getter/setter, if non-null, is a cell. 1675 jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1); 1676 #endif 1677 jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2); 1678 jit.move(CCallHelpers::TrustedImmPtr(globalObject), GPRInfo::regT3); 1679 slowPathCall = jit.nearCall(); 1680 if (m_type == Getter) 1681 jit.setupResults(valueRegs); 1682 done.append(jit.jump()); 1683 1684 if (returnUndefined) { 1685 ASSERT(m_type == Getter); 1686 returnUndefined.value().link(&jit); 1687 jit.moveTrustedValue(jsUndefined(), valueRegs); 1688 } 1689 done.link(&jit); 1690 1691 jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation), 1692 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister); 1693 bool callHasReturnValue = isGetter(); 1694 restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue); 1695 1696 jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) { 1697 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations( 1698 CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOfNearCall<JSInternalPtrTag>(slowPathCall)), 1699 CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(addressOfLinkFunctionCheck)), 1700 linkBuffer.locationOfNearCall<JSInternalPtrTag>(fastPathCall)); 1701 1702 linkBuffer.link( 1703 slowPathCall, 1704 CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code())); 1705 }); 1706 } else { 1707 ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter); 1708 ASSERT(!doesPropertyStorageLoads); // Or we need an extra register. We rely on propertyOwnerGPR being correct here. 1709 1710 // Need to make room for the C call so any of our stack spillage isn't overwritten. It's 1711 // hard to track if someone did spillage or not, so we just assume that we always need 1712 // to make some space here. 1713 jit.makeSpaceOnStackForCCall(); 1714 1715 // Check if it is a super access 1716 GPRReg receiverForCustomGetGPR = baseGPR != thisGPR ? thisGPR : receiverGPR; 1717 1718 // getter: EncodedJSValue (*GetValueFunc)(JSGlobalObject*, EncodedJSValue thisValue, PropertyName); 1719 // setter: bool (*PutValueFunc)(JSGlobalObject*, EncodedJSValue thisObject, EncodedJSValue value); 1720 // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisValue (receiver). 1721 GPRReg baseForCustom = takesPropertyOwnerAsCFunctionArgument ? propertyOwnerGPR : receiverForCustomGetGPR; 1722 // We do not need to keep globalObject alive since the owner CodeBlock (even if JSGlobalObject* is one of CodeBlock that is inlined and held by DFG CodeBlock) 1723 // must keep it alive. 1724 if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) { 1725 RELEASE_ASSERT(m_identifier); 1726 if (Options::useJITCage()) { 1727 jit.setupArguments<PropertySlot::GetValueFuncWithPtr>( 1728 CCallHelpers::TrustedImmPtr(globalObject), 1729 CCallHelpers::CellValue(baseForCustom), 1730 CCallHelpers::TrustedImmPtr(uid()), 1731 CCallHelpers::TrustedImmPtr(this->as<GetterSetterAccessCase>().m_customAccessor.executableAddress())); 1732 } else { 1733 jit.setupArguments<PropertySlot::GetValueFunc>( 1734 CCallHelpers::TrustedImmPtr(globalObject), 1735 CCallHelpers::CellValue(baseForCustom), 1736 CCallHelpers::TrustedImmPtr(uid())); 1737 } 1738 } else { 1739 if (Options::useJITCage()) { 1740 jit.setupArguments<PutPropertySlot::PutValueFuncWithPtr>( 1741 CCallHelpers::TrustedImmPtr(globalObject), 1742 CCallHelpers::CellValue(baseForCustom), 1743 valueRegs, 1744 CCallHelpers::TrustedImmPtr(this->as<GetterSetterAccessCase>().m_customAccessor.executableAddress())); 1745 } else { 1746 jit.setupArguments<PutPropertySlot::PutValueFunc>( 1747 CCallHelpers::TrustedImmPtr(globalObject), 1748 CCallHelpers::CellValue(baseForCustom), 1749 valueRegs); 1750 } 1751 } 1752 jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame); 1753 1754 if (Options::useJITCage()) 1755 operationCall = jit.call(OperationPtrTag); 1756 else 1757 operationCall = jit.call(CustomAccessorPtrTag); 1758 jit.addLinkTask([=] (LinkBuffer& linkBuffer) { 1759 if (Options::useJITCage()) 1760 linkBuffer.link(operationCall, FunctionPtr<OperationPtrTag>(vmEntryCustomAccessor)); 1761 else 1762 linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor); 1763 }); 1764 1765 if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) 1766 jit.setupResults(valueRegs); 1767 jit.reclaimSpaceOnStackForCCall(); 1768 1769 CCallHelpers::Jump noException = 1770 jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck); 1771 1772 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState); 1773 state.emitExplicitExceptionHandler(); 1774 1775 noException.link(&jit); 1776 bool callHasReturnValue = isGetter(); 1777 restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue); 1778 } 1779 state.succeed(); 1780 return; 1781 } 1782 1783 case Replace: { 1784 GPRReg base = baseGPR; 1785 if (viaProxy()) { 1786 // This aint pretty, but the path that structure checks loads the real base into scratchGPR. 1787 base = scratchGPR; 1788 } 1789 1790 if (isInlineOffset(m_offset)) { 1791 jit.storeValue( 1792 valueRegs, 1793 CCallHelpers::Address( 1794 base, 1795 JSObject::offsetOfInlineStorage() + 1796 offsetInInlineStorage(m_offset) * sizeof(JSValue))); 1797 } else { 1798 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), scratchGPR); 1799 jit.storeValue( 1800 valueRegs, 1801 CCallHelpers::Address( 1802 scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue))); 1803 } 1804 1805 if (viaProxy()) { 1806 CCallHelpers::JumpList skipBarrier; 1807 skipBarrier.append(jit.branchIfNotCell(valueRegs)); 1808 if (!isInlineOffset(m_offset)) 1809 jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR); 1810 skipBarrier.append(jit.barrierBranch(vm, scratchGPR, scratchGPR)); 1811 1812 jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR); 1813 auto spillState = state.preserveLiveRegistersToStackForCallWithoutExceptions(); 1814 jit.setupArguments<decltype(operationWriteBarrierSlowPath)>(CCallHelpers::TrustedImmPtr(&vm), scratchGPR); 1815 jit.prepareCallOperation(vm); 1816 auto operationCall = jit.call(OperationPtrTag); 1817 jit.addLinkTask([=] (LinkBuffer& linkBuffer) { 1818 linkBuffer.link(operationCall, FunctionPtr<OperationPtrTag>(operationWriteBarrierSlowPath)); 1819 }); 1820 state.restoreLiveRegistersFromStackForCall(spillState); 1821 1822 skipBarrier.link(&jit); 1823 } 1824 1825 state.succeed(); 1826 return; 1827 } 1828 1829 case Transition: { 1830 ASSERT(!viaProxy()); 1831 // AccessCase::transition() should have returned null if this wasn't true. 1832 RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity()); 1833 1834 // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows 1835 // exactly when this would make calls. 1836 bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity(); 1837 bool reallocating = allocating && structure()->outOfLineCapacity(); 1838 bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader(); 1839 1840 ScratchRegisterAllocator allocator(stubInfo.usedRegisters); 1841 allocator.lock(stubInfo.baseRegs()); 1842 allocator.lock(valueRegs); 1843 allocator.lock(scratchGPR); 1844 1845 GPRReg scratchGPR2 = InvalidGPRReg; 1846 GPRReg scratchGPR3 = InvalidGPRReg; 1847 if (allocatingInline) { 1848 scratchGPR2 = allocator.allocateScratchGPR(); 1849 scratchGPR3 = allocator.allocateScratchGPR(); 1850 } 1851 1852 ScratchRegisterAllocator::PreservedState preservedState = 1853 allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall); 1854 1855 CCallHelpers::JumpList slowPath; 1856 1857 ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated()); 1858 1859 if (allocating) { 1860 size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue); 1861 1862 if (allocatingInline) { 1863 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists); 1864 1865 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath); 1866 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR); 1867 1868 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue); 1869 ASSERT(newSize > oldSize); 1870 1871 if (reallocating) { 1872 // Handle the case where we are reallocating (i.e. the old structure/butterfly 1873 // already had out-of-line property storage). 1874 1875 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3); 1876 1877 // We have scratchGPR = new storage, scratchGPR3 = old storage, 1878 // scratchGPR2 = available 1879 for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) { 1880 jit.loadPtr( 1881 CCallHelpers::Address( 1882 scratchGPR3, 1883 -static_cast<ptrdiff_t>( 1884 offset + sizeof(JSValue) + sizeof(void*))), 1885 scratchGPR2); 1886 jit.storePtr( 1887 scratchGPR2, 1888 CCallHelpers::Address( 1889 scratchGPR, 1890 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*)))); 1891 } 1892 } 1893 1894 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*)) 1895 jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*)))); 1896 } else { 1897 // Handle the case where we are allocating out-of-line using an operation. 1898 RegisterSet extraRegistersToPreserve; 1899 extraRegistersToPreserve.set(baseGPR); 1900 extraRegistersToPreserve.set(valueRegs); 1901 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve); 1902 1903 jit.store32( 1904 CCallHelpers::TrustedImm32( 1905 state.callSiteIndexForExceptionHandlingOrOriginal().bits()), 1906 CCallHelpers::tagFor(CallFrameSlot::argumentCountIncludingThis)); 1907 1908 jit.makeSpaceOnStackForCCall(); 1909 1910 if (!reallocating) { 1911 jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(CCallHelpers::TrustedImmPtr(&vm), baseGPR); 1912 jit.prepareCallOperation(vm); 1913 1914 CCallHelpers::Call operationCall = jit.call(OperationPtrTag); 1915 jit.addLinkTask([=] (LinkBuffer& linkBuffer) { 1916 linkBuffer.link( 1917 operationCall, 1918 FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)); 1919 }); 1920 } else { 1921 // Handle the case where we are reallocating (i.e. the old structure/butterfly 1922 // already had out-of-line property storage). 1923 jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(CCallHelpers::TrustedImmPtr(&vm), baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue))); 1924 jit.prepareCallOperation(vm); 1925 1926 CCallHelpers::Call operationCall = jit.call(OperationPtrTag); 1927 jit.addLinkTask([=] (LinkBuffer& linkBuffer) { 1928 linkBuffer.link( 1929 operationCall, 1930 FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage)); 1931 }); 1932 } 1933 1934 jit.reclaimSpaceOnStackForCCall(); 1935 jit.move(GPRInfo::returnValueGPR, scratchGPR); 1936 1937 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck); 1938 1939 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState); 1940 state.emitExplicitExceptionHandler(); 1941 1942 noException.link(&jit); 1943 RegisterSet resultRegisterToExclude; 1944 resultRegisterToExclude.set(scratchGPR); 1945 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude); 1946 } 1947 } 1948 1949 if (isInlineOffset(m_offset)) { 1950 jit.storeValue( 1951 valueRegs, 1952 CCallHelpers::Address( 1953 baseGPR, 1954 JSObject::offsetOfInlineStorage() + 1955 offsetInInlineStorage(m_offset) * sizeof(JSValue))); 1956 } else { 1957 if (!allocating) 1958 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); 1959 jit.storeValue( 1960 valueRegs, 1961 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue))); 1962 } 1963 1964 if (allocatingInline) { 1965 // If we were to have any indexed properties, then we would need to update the indexing mask on the base object. 1966 RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader()); 1967 // We set the new butterfly and the structure last. Doing it this way ensures that 1968 // whatever we had done up to this point is forgotten if we choose to branch to slow 1969 // path. 1970 jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR); 1971 } 1972 1973 uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id()); 1974 jit.store32( 1975 CCallHelpers::TrustedImm32(structureBits), 1976 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset())); 1977 1978 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1979 state.succeed(); 1980 1981 // We will have a slow path if we were allocating without the help of an operation. 1982 if (allocatingInline) { 1983 if (allocator.didReuseRegisters()) { 1984 slowPath.link(&jit); 1985 allocator.restoreReusedRegistersByPopping(jit, preservedState); 1986 state.failAndIgnore.append(jit.jump()); 1987 } else 1988 state.failAndIgnore.append(slowPath); 1989 } else 1990 RELEASE_ASSERT(slowPath.empty()); 1991 return; 1992 } 1993 1994 case Delete: { 1995 ScratchRegisterAllocator allocator(stubInfo.usedRegisters); 1996 allocator.lock(stubInfo.baseRegs()); 1997 allocator.lock(valueRegs); 1998 allocator.lock(baseGPR); 1999 allocator.lock(scratchGPR); 2000 ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated()); 2001 ASSERT(newStructure()->transitionKind() == TransitionKind::PropertyDeletion); 2002 ASSERT(baseGPR != scratchGPR); 2003 ASSERT(!valueRegs.uses(baseGPR)); 2004 ASSERT(!valueRegs.uses(scratchGPR)); 2005 2006 ScratchRegisterAllocator::PreservedState preservedState = 2007 allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace); 2008 2009 jit.moveValue(JSValue(), valueRegs); 2010 2011 if (isInlineOffset(m_offset)) { 2012 jit.storeValue( 2013 valueRegs, 2014 CCallHelpers::Address( 2015 baseGPR, 2016 JSObject::offsetOfInlineStorage() + 2017 offsetInInlineStorage(m_offset) * sizeof(JSValue))); 2018 } else { 2019 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); 2020 jit.storeValue( 2021 valueRegs, 2022 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue))); 2023 } 2024 2025 uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id()); 2026 jit.store32( 2027 CCallHelpers::TrustedImm32(structureBits), 2028 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset())); 2029 2030 jit.move(MacroAssembler::TrustedImm32(true), valueRegs.payloadGPR()); 2031 2032 allocator.restoreReusedRegistersByPopping(jit, preservedState); 2033 state.succeed(); 2034 return; 2035 } 2036 2037 case DeleteNonConfigurable: { 2038 jit.move(MacroAssembler::TrustedImm32(false), valueRegs.payloadGPR()); 2039 state.succeed(); 2040 return; 2041 } 2042 2043 case DeleteMiss: { 2044 jit.move(MacroAssembler::TrustedImm32(true), valueRegs.payloadGPR()); 2045 state.succeed(); 2046 return; 2047 } 2048 2049 case ArrayLength: { 2050 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); 2051 jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR); 2052 state.failAndIgnore.append( 2053 jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0))); 2054 jit.boxInt32(scratchGPR, valueRegs); 2055 state.succeed(); 2056 return; 2057 } 2058 2059 case StringLength: { 2060 jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratchGPR); 2061 auto isRope = jit.branchIfRopeStringImpl(scratchGPR); 2062 jit.load32(CCallHelpers::Address(scratchGPR, StringImpl::lengthMemoryOffset()), valueRegs.payloadGPR()); 2063 auto done = jit.jump(); 2064 2065 isRope.link(&jit); 2066 jit.load32(CCallHelpers::Address(baseGPR, JSRopeString::offsetOfLength()), valueRegs.payloadGPR()); 2067 2068 done.link(&jit); 2069 jit.boxInt32(valueRegs.payloadGPR(), valueRegs); 2070 state.succeed(); 2071 return; 2072 } 2073 2074 case IntrinsicGetter: { 2075 RELEASE_ASSERT(isValidOffset(offset())); 2076 2077 // We need to ensure the getter value does not move from under us. Note that GetterSetters 2078 // are immutable so we just need to watch the property not any value inside it. 2079 Structure* currStructure; 2080 if (!hasAlternateBase()) 2081 currStructure = structure(); 2082 else 2083 currStructure = alternateBase()->structure(vm); 2084 currStructure->startWatchingPropertyForReplacements(vm, offset()); 2085 2086 this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state); 2087 return; 2088 } 2089 2090 case DirectArgumentsLength: 2091 case ScopedArgumentsLength: 2092 case ModuleNamespaceLoad: 2093 case InstanceOfGeneric: 2094 case IndexedInt32Load: 2095 case IndexedDoubleLoad: 2096 case IndexedContiguousLoad: 2097 case IndexedArrayStorageLoad: 2098 case IndexedScopedArgumentsLoad: 2099 case IndexedDirectArgumentsLoad: 2100 case IndexedTypedArrayInt8Load: 2101 case IndexedTypedArrayUint8Load: 2102 case IndexedTypedArrayUint8ClampedLoad: 2103 case IndexedTypedArrayInt16Load: 2104 case IndexedTypedArrayUint16Load: 2105 case IndexedTypedArrayInt32Load: 2106 case IndexedTypedArrayUint32Load: 2107 case IndexedTypedArrayFloat32Load: 2108 case IndexedTypedArrayFloat64Load: 2109 case IndexedStringLoad: 2110 // These need to be handled by generateWithGuard(), since the guard is part of the 2111 // algorithm. We can be sure that nobody will call generate() directly for these since they 2112 // are not guarded by structure checks. 2113 RELEASE_ASSERT_NOT_REACHED(); 2114 } 2115 2116 RELEASE_ASSERT_NOT_REACHED(); 2117 } 2118 2119 TypedArrayType AccessCase::toTypedArrayType(AccessType accessType) 2120 { 2121 switch (accessType) { 2122 case IndexedTypedArrayInt8Load: 2123 return TypeInt8; 2124 case IndexedTypedArrayUint8Load: 2125 return TypeUint8; 2126 case IndexedTypedArrayUint8ClampedLoad: 2127 return TypeUint8Clamped; 2128 case IndexedTypedArrayInt16Load: 2129 return TypeInt16; 2130 case IndexedTypedArrayUint16Load: 2131 return TypeUint16; 2132 case IndexedTypedArrayInt32Load: 2133 return TypeInt32; 2134 case IndexedTypedArrayUint32Load: 2135 return TypeUint32; 2136 case IndexedTypedArrayFloat32Load: 2137 return TypeFloat32; 2138 case IndexedTypedArrayFloat64Load: 2139 return TypeFloat64; 2140 default: 2141 RELEASE_ASSERT_NOT_REACHED(); 2142 } 2143 } 2144 2145 #if ASSERT_ENABLED 2146 void AccessCase::checkConsistency(StructureStubInfo& stubInfo) 2147 { 2148 RELEASE_ASSERT(!(requiresInt32PropertyCheck() && requiresIdentifierNameMatch())); 2149 2150 if (stubInfo.hasConstantIdentifier) { 2151 RELEASE_ASSERT(!requiresInt32PropertyCheck()); 2152 RELEASE_ASSERT(requiresIdentifierNameMatch()); 2153 } 2154 } 2155 #endif // ASSERT_ENABLED 2156 2157 } // namespace JSC 2158 2159 #endif