GetByStatus.cpp
1 /* 2 * Copyright (C) 2012-2020 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26 #include "config.h" 27 #include "GetByStatus.h" 28 29 #include "BytecodeStructs.h" 30 #include "CacheableIdentifierInlines.h" 31 #include "CodeBlock.h" 32 #include "ComplexGetStatus.h" 33 #include "GetterSetterAccessCase.h" 34 #include "ICStatusUtils.h" 35 #include "IntrinsicGetterAccessCase.h" 36 #include "ModuleNamespaceAccessCase.h" 37 #include "PolymorphicAccess.h" 38 #include "StructureStubInfo.h" 39 #include <wtf/ListDump.h> 40 41 namespace JSC { 42 43 bool GetByStatus::appendVariant(const GetByIdVariant& variant) 44 { 45 return appendICStatusVariant(m_variants, variant); 46 } 47 48 GetByStatus GetByStatus::computeFromLLInt(CodeBlock* profiledBlock, BytecodeIndex bytecodeIndex) 49 { 50 VM& vm = profiledBlock->vm(); 51 52 auto instruction = profiledBlock->instructions().at(bytecodeIndex.offset()); 53 54 StructureID structureID; 55 const Identifier* identifier = nullptr; 56 switch (instruction->opcodeID()) { 57 case op_get_by_id: { 58 auto& metadata = instruction->as<OpGetById>().metadata(profiledBlock); 59 // FIXME: We should not just bail if we see a get_by_id_proto_load. 60 // https://bugs.webkit.org/show_bug.cgi?id=158039 61 if (metadata.m_modeMetadata.mode != GetByIdMode::Default) 62 return GetByStatus(NoInformation, false); 63 structureID = metadata.m_modeMetadata.defaultMode.structureID; 64 65 identifier = &(profiledBlock->identifier(instruction->as<OpGetById>().m_property)); 66 break; 67 } 68 case op_get_by_id_direct: 69 structureID = instruction->as<OpGetByIdDirect>().metadata(profiledBlock).m_structureID; 70 identifier = &(profiledBlock->identifier(instruction->as<OpGetByIdDirect>().m_property)); 71 break; 72 case op_try_get_by_id: { 73 // FIXME: We should not just bail if we see a try_get_by_id. 74 // https://bugs.webkit.org/show_bug.cgi?id=158039 75 return GetByStatus(NoInformation, false); 76 } 77 78 case op_get_by_val: 79 return GetByStatus(NoInformation, false); 80 81 case op_iterator_open: { 82 ASSERT(bytecodeIndex.checkpoint() == OpIteratorOpen::getNext); 83 auto& metadata = instruction->as<OpIteratorOpen>().metadata(profiledBlock); 84 85 // FIXME: We should not just bail if we see a get_by_id_proto_load. 86 // https://bugs.webkit.org/show_bug.cgi?id=158039 87 if (metadata.m_modeMetadata.mode != GetByIdMode::Default) 88 return GetByStatus(NoInformation, false); 89 structureID = metadata.m_modeMetadata.defaultMode.structureID; 90 identifier = &vm.propertyNames->next; 91 break; 92 } 93 94 case op_iterator_next: { 95 auto& metadata = instruction->as<OpIteratorNext>().metadata(profiledBlock); 96 if (bytecodeIndex.checkpoint() == OpIteratorNext::getDone) { 97 if (metadata.m_doneModeMetadata.mode != GetByIdMode::Default) 98 return GetByStatus(NoInformation, false); 99 structureID = metadata.m_doneModeMetadata.defaultMode.structureID; 100 identifier = &vm.propertyNames->done; 101 } else { 102 ASSERT(bytecodeIndex.checkpoint() == OpIteratorNext::getValue); 103 if (metadata.m_valueModeMetadata.mode != GetByIdMode::Default) 104 return GetByStatus(NoInformation, false); 105 structureID = metadata.m_valueModeMetadata.defaultMode.structureID; 106 identifier = &vm.propertyNames->value; 107 } 108 break; 109 } 110 111 case op_get_private_name: 112 // FIXME: Consider using LLInt caches or IC information to populate GetByStatus 113 // https://bugs.webkit.org/show_bug.cgi?id=217245 114 return GetByStatus(NoInformation, false); 115 116 default: { 117 ASSERT_NOT_REACHED(); 118 return GetByStatus(NoInformation, false); 119 } 120 } 121 122 if (!structureID) 123 return GetByStatus(NoInformation, false); 124 125 Structure* structure = vm.heap.structureIDTable().get(structureID); 126 127 if (structure->takesSlowPathInDFGForImpureProperty()) 128 return GetByStatus(NoInformation, false); 129 130 unsigned attributes; 131 PropertyOffset offset = structure->getConcurrently(identifier->impl(), attributes); 132 if (!isValidOffset(offset)) 133 return GetByStatus(NoInformation, false); 134 if (attributes & PropertyAttribute::CustomAccessorOrValue) 135 return GetByStatus(NoInformation, false); 136 137 GetByStatus result(Simple, false); 138 result.appendVariant(GetByIdVariant(nullptr, StructureSet(structure), offset)); 139 return result; 140 } 141 142 GetByStatus GetByStatus::computeFor(CodeBlock* profiledBlock, ICStatusMap& map, BytecodeIndex bytecodeIndex, ExitFlag didExit, CallLinkStatus::ExitSiteData callExitSiteData) 143 { 144 ConcurrentJSLocker locker(profiledBlock->m_lock); 145 146 GetByStatus result; 147 148 #if ENABLE(DFG_JIT) 149 result = computeForStubInfoWithoutExitSiteFeedback( 150 locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)).stubInfo, callExitSiteData); 151 152 if (didExit) 153 return result.slowVersion(); 154 #else 155 UNUSED_PARAM(map); 156 UNUSED_PARAM(didExit); 157 UNUSED_PARAM(callExitSiteData); 158 #endif 159 160 if (!result) 161 return computeFromLLInt(profiledBlock, bytecodeIndex); 162 163 return result; 164 } 165 166 #if ENABLE(JIT) 167 GetByStatus::GetByStatus(StubInfoSummary summary, StructureStubInfo& stubInfo) 168 : m_wasSeenInJIT(true) 169 { 170 switch (summary) { 171 case StubInfoSummary::NoInformation: 172 m_state = NoInformation; 173 return; 174 case StubInfoSummary::Simple: 175 case StubInfoSummary::MakesCalls: 176 RELEASE_ASSERT_NOT_REACHED(); 177 return; 178 case StubInfoSummary::TakesSlowPath: 179 m_state = stubInfo.tookSlowPath ? ObservedTakesSlowPath : LikelyTakesSlowPath; 180 return; 181 case StubInfoSummary::TakesSlowPathAndMakesCalls: 182 m_state = stubInfo.tookSlowPath ? ObservedSlowPathAndMakesCalls : MakesCalls; 183 return; 184 } 185 RELEASE_ASSERT_NOT_REACHED(); 186 } 187 188 GetByStatus::GetByStatus(const ModuleNamespaceAccessCase& accessCase) 189 : m_moduleNamespaceData(Box<ModuleNamespaceData>::create(ModuleNamespaceData { accessCase.moduleNamespaceObject(), accessCase.moduleEnvironment(), accessCase.scopeOffset(), accessCase.identifier() })) 190 , m_state(ModuleNamespace) 191 , m_wasSeenInJIT(true) 192 { 193 } 194 195 GetByStatus GetByStatus::computeForStubInfoWithoutExitSiteFeedback( 196 const ConcurrentJSLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, CallLinkStatus::ExitSiteData callExitSiteData) 197 { 198 StubInfoSummary summary = StructureStubInfo::summary(profiledBlock->vm(), stubInfo); 199 if (!isInlineable(summary)) 200 return GetByStatus(summary, *stubInfo); 201 202 // Finally figure out if we can derive an access strategy. 203 GetByStatus result; 204 result.m_state = Simple; 205 result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only. 206 switch (stubInfo->cacheType()) { 207 case CacheType::Unset: 208 return GetByStatus(NoInformation); 209 210 case CacheType::GetByIdSelf: { 211 Structure* structure = stubInfo->u.byIdSelf.baseObjectStructure.get(); 212 if (structure->takesSlowPathInDFGForImpureProperty()) 213 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 214 CacheableIdentifier identifier = stubInfo->identifier(); 215 UniquedStringImpl* uid = identifier.uid(); 216 RELEASE_ASSERT(uid); 217 GetByIdVariant variant(WTFMove(identifier)); 218 unsigned attributes; 219 variant.m_offset = structure->getConcurrently(uid, attributes); 220 if (!isValidOffset(variant.m_offset)) 221 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 222 if (attributes & PropertyAttribute::CustomAccessorOrValue) 223 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 224 225 variant.m_structureSet.add(structure); 226 bool didAppend = result.appendVariant(variant); 227 ASSERT_UNUSED(didAppend, didAppend); 228 return result; 229 } 230 231 case CacheType::Stub: { 232 PolymorphicAccess* list = stubInfo->u.stub; 233 if (list->size() == 1) { 234 const AccessCase& access = list->at(0); 235 switch (access.type()) { 236 case AccessCase::ModuleNamespaceLoad: 237 return GetByStatus(access.as<ModuleNamespaceAccessCase>()); 238 default: 239 break; 240 } 241 } 242 243 for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) { 244 const AccessCase& access = list->at(listIndex); 245 if (access.viaProxy()) 246 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 247 248 if (access.usesPolyProto()) 249 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 250 251 if (!access.requiresIdentifierNameMatch()) { 252 // FIXME: We could use this for indexed loads in the future. This is pretty solid profiling 253 // information, and probably better than ArrayProfile when it's available. 254 // https://bugs.webkit.org/show_bug.cgi?id=204215 255 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 256 } 257 258 Structure* structure = access.structure(); 259 if (!structure) { 260 // The null structure cases arise due to array.length and string.length. We have no way 261 // of creating a GetByIdVariant for those, and we don't really have to since the DFG 262 // handles those cases in FixupPhase using value profiling. That's a bit awkward - we 263 // shouldn't have to use value profiling to discover something that the AccessCase 264 // could have told us. But, it works well enough. So, our only concern here is to not 265 // crash on null structure. 266 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 267 } 268 269 ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor( 270 structure, access.conditionSet(), access.uid()); 271 272 switch (complexGetStatus.kind()) { 273 case ComplexGetStatus::ShouldSkip: 274 continue; 275 276 case ComplexGetStatus::TakesSlowPath: 277 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 278 279 case ComplexGetStatus::Inlineable: { 280 std::unique_ptr<CallLinkStatus> callLinkStatus; 281 JSFunction* intrinsicFunction = nullptr; 282 FunctionPtr<CustomAccessorPtrTag> customAccessorGetter; 283 std::unique_ptr<DOMAttributeAnnotation> domAttribute; 284 bool haveDOMAttribute = false; 285 286 switch (access.type()) { 287 case AccessCase::Load: 288 case AccessCase::GetGetter: 289 case AccessCase::Miss: { 290 break; 291 } 292 case AccessCase::IntrinsicGetter: { 293 intrinsicFunction = access.as<IntrinsicGetterAccessCase>().intrinsicFunction(); 294 break; 295 } 296 case AccessCase::Getter: { 297 callLinkStatus = makeUnique<CallLinkStatus>(); 298 if (CallLinkInfo* callLinkInfo = access.as<GetterSetterAccessCase>().callLinkInfo()) { 299 *callLinkStatus = CallLinkStatus::computeFor( 300 locker, profiledBlock, *callLinkInfo, callExitSiteData); 301 } 302 break; 303 } 304 case AccessCase::CustomAccessorGetter: { 305 customAccessorGetter = access.as<GetterSetterAccessCase>().customAccessor(); 306 if (!access.as<GetterSetterAccessCase>().domAttribute()) 307 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 308 domAttribute = WTF::makeUnique<DOMAttributeAnnotation>(*access.as<GetterSetterAccessCase>().domAttribute()); 309 haveDOMAttribute = true; 310 result.m_state = Custom; 311 break; 312 } 313 default: { 314 // FIXME: It would be totally sweet to support more of these at some point in the 315 // future. https://bugs.webkit.org/show_bug.cgi?id=133052 316 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 317 } } 318 319 ASSERT((AccessCase::Miss == access.type() || access.isCustom()) == (access.offset() == invalidOffset)); 320 GetByIdVariant variant(access.identifier(), StructureSet(structure), complexGetStatus.offset(), 321 complexGetStatus.conditionSet(), WTFMove(callLinkStatus), 322 intrinsicFunction, 323 customAccessorGetter, 324 WTFMove(domAttribute)); 325 326 if (!result.appendVariant(variant)) 327 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 328 329 if (haveDOMAttribute) { 330 // Give up when custom accesses are not merged into one. 331 if (result.numVariants() != 1) 332 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 333 } else { 334 // Give up when custom access and simple access are mixed. 335 if (result.m_state == Custom) 336 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 337 } 338 break; 339 } } 340 } 341 342 return result; 343 } 344 345 default: 346 return GetByStatus(JSC::slowVersion(summary), *stubInfo); 347 } 348 349 RELEASE_ASSERT_NOT_REACHED(); 350 return GetByStatus(); 351 } 352 353 GetByStatus GetByStatus::computeFor( 354 CodeBlock* profiledBlock, ICStatusMap& baselineMap, 355 ICStatusContextStack& icContextStack, CodeOrigin codeOrigin) 356 { 357 BytecodeIndex bytecodeIndex = codeOrigin.bytecodeIndex(); 358 CallLinkStatus::ExitSiteData callExitSiteData = CallLinkStatus::computeExitSiteData(profiledBlock, bytecodeIndex); 359 ExitFlag didExit = hasBadCacheExitSite(profiledBlock, bytecodeIndex); 360 361 for (ICStatusContext* context : icContextStack) { 362 ICStatus status = context->get(codeOrigin); 363 364 auto bless = [&] (const GetByStatus& result) -> GetByStatus { 365 if (!context->isInlined(codeOrigin)) { 366 // Merge with baseline result, which also happens to contain exit data for both 367 // inlined and not-inlined. 368 GetByStatus baselineResult = computeFor( 369 profiledBlock, baselineMap, bytecodeIndex, didExit, 370 callExitSiteData); 371 baselineResult.merge(result); 372 return baselineResult; 373 } 374 if (didExit.isSet(ExitFromInlined)) 375 return result.slowVersion(); 376 return result; 377 }; 378 379 if (status.stubInfo) { 380 GetByStatus result; 381 { 382 ConcurrentJSLocker locker(context->optimizedCodeBlock->m_lock); 383 result = computeForStubInfoWithoutExitSiteFeedback( 384 locker, context->optimizedCodeBlock, status.stubInfo, callExitSiteData); 385 } 386 if (result.isSet()) 387 return bless(result); 388 } 389 390 if (status.getStatus) 391 return bless(*status.getStatus); 392 } 393 394 return computeFor(profiledBlock, baselineMap, bytecodeIndex, didExit, callExitSiteData); 395 } 396 397 GetByStatus GetByStatus::computeFor(const StructureSet& set, UniquedStringImpl* uid) 398 { 399 // For now we only handle the super simple self access case. We could handle the 400 // prototype case in the future. 401 // 402 // Note that this code is also used for GetByIdDirect since this function only looks 403 // into direct properties. When supporting prototype chains, we should split this for 404 // GetById and GetByIdDirect. 405 406 if (set.isEmpty()) 407 return GetByStatus(); 408 409 if (parseIndex(*uid)) 410 return GetByStatus(LikelyTakesSlowPath); 411 412 GetByStatus result; 413 result.m_state = Simple; 414 result.m_wasSeenInJIT = false; 415 for (unsigned i = 0; i < set.size(); ++i) { 416 Structure* structure = set[i]; 417 if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType) 418 return GetByStatus(LikelyTakesSlowPath); 419 420 if (!structure->propertyAccessesAreCacheable()) 421 return GetByStatus(LikelyTakesSlowPath); 422 423 unsigned attributes; 424 PropertyOffset offset = structure->getConcurrently(uid, attributes); 425 if (!isValidOffset(offset)) 426 return GetByStatus(LikelyTakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it. 427 if (attributes & PropertyAttribute::Accessor) 428 return GetByStatus(MakesCalls); // We could be smarter here, like strength-reducing this to a Call. 429 if (attributes & PropertyAttribute::CustomAccessorOrValue) 430 return GetByStatus(LikelyTakesSlowPath); 431 432 if (!result.appendVariant(GetByIdVariant(nullptr, structure, offset))) 433 return GetByStatus(LikelyTakesSlowPath); 434 } 435 436 return result; 437 } 438 #endif // ENABLE(JIT) 439 440 bool GetByStatus::makesCalls() const 441 { 442 switch (m_state) { 443 case NoInformation: 444 case LikelyTakesSlowPath: 445 case ObservedTakesSlowPath: 446 case Custom: 447 case ModuleNamespace: 448 return false; 449 case Simple: 450 for (unsigned i = m_variants.size(); i--;) { 451 if (m_variants[i].callLinkStatus()) 452 return true; 453 } 454 return false; 455 case MakesCalls: 456 case ObservedSlowPathAndMakesCalls: 457 return true; 458 } 459 RELEASE_ASSERT_NOT_REACHED(); 460 461 return false; 462 } 463 464 GetByStatus GetByStatus::slowVersion() const 465 { 466 if (observedStructureStubInfoSlowPath()) 467 return GetByStatus(makesCalls() ? ObservedSlowPathAndMakesCalls : ObservedTakesSlowPath, wasSeenInJIT()); 468 return GetByStatus(makesCalls() ? MakesCalls : LikelyTakesSlowPath, wasSeenInJIT()); 469 } 470 471 void GetByStatus::merge(const GetByStatus& other) 472 { 473 if (other.m_state == NoInformation) 474 return; 475 476 auto mergeSlow = [&] () { 477 if (observedStructureStubInfoSlowPath() || other.observedStructureStubInfoSlowPath()) 478 *this = GetByStatus((makesCalls() || other.makesCalls()) ? ObservedSlowPathAndMakesCalls : ObservedTakesSlowPath); 479 else 480 *this = GetByStatus((makesCalls() || other.makesCalls()) ? MakesCalls : LikelyTakesSlowPath); 481 }; 482 483 switch (m_state) { 484 case NoInformation: 485 *this = other; 486 return; 487 488 case Simple: 489 case Custom: 490 if (m_state != other.m_state) 491 return mergeSlow(); 492 493 for (const GetByIdVariant& otherVariant : other.m_variants) { 494 if (!appendVariant(otherVariant)) 495 return mergeSlow(); 496 } 497 return; 498 499 case ModuleNamespace: 500 if (other.m_state != ModuleNamespace) 501 return mergeSlow(); 502 503 if (m_moduleNamespaceData->m_moduleNamespaceObject != other.m_moduleNamespaceData->m_moduleNamespaceObject) 504 return mergeSlow(); 505 506 if (m_moduleNamespaceData->m_moduleEnvironment != other.m_moduleNamespaceData->m_moduleEnvironment) 507 return mergeSlow(); 508 509 if (m_moduleNamespaceData->m_scopeOffset != other.m_moduleNamespaceData->m_scopeOffset) 510 return mergeSlow(); 511 512 return; 513 514 case LikelyTakesSlowPath: 515 case ObservedTakesSlowPath: 516 case MakesCalls: 517 case ObservedSlowPathAndMakesCalls: 518 return mergeSlow(); 519 } 520 521 RELEASE_ASSERT_NOT_REACHED(); 522 } 523 524 void GetByStatus::filter(const StructureSet& set) 525 { 526 if (m_state != Simple) 527 return; 528 filterICStatusVariants(m_variants, set); 529 if (m_variants.isEmpty()) 530 m_state = NoInformation; 531 } 532 533 void GetByStatus::visitAggregate(SlotVisitor& visitor) 534 { 535 if (isModuleNamespace()) 536 m_moduleNamespaceData->m_identifier.visitAggregate(visitor); 537 for (GetByIdVariant& variant : m_variants) 538 variant.visitAggregate(visitor); 539 } 540 541 void GetByStatus::markIfCheap(SlotVisitor& visitor) 542 { 543 for (GetByIdVariant& variant : m_variants) 544 variant.markIfCheap(visitor); 545 } 546 547 bool GetByStatus::finalize(VM& vm) 548 { 549 for (GetByIdVariant& variant : m_variants) { 550 if (!variant.finalize(vm)) 551 return false; 552 } 553 if (isModuleNamespace()) { 554 if (m_moduleNamespaceData->m_moduleNamespaceObject && !vm.heap.isMarked(m_moduleNamespaceData->m_moduleNamespaceObject)) 555 return false; 556 if (m_moduleNamespaceData->m_moduleEnvironment && !vm.heap.isMarked(m_moduleNamespaceData->m_moduleEnvironment)) 557 return false; 558 } 559 return true; 560 } 561 562 CacheableIdentifier GetByStatus::singleIdentifier() const 563 { 564 if (isModuleNamespace()) 565 return m_moduleNamespaceData->m_identifier; 566 567 if (m_variants.isEmpty()) 568 return nullptr; 569 570 CacheableIdentifier result = m_variants.first().identifier(); 571 if (!result) 572 return nullptr; 573 for (size_t i = 1; i < m_variants.size(); ++i) { 574 CacheableIdentifier identifier = m_variants[i].identifier(); 575 if (!identifier) 576 return nullptr; 577 if (identifier != result) 578 return nullptr; 579 } 580 return result; 581 } 582 583 void GetByStatus::dump(PrintStream& out) const 584 { 585 out.print("("); 586 switch (m_state) { 587 case NoInformation: 588 out.print("NoInformation"); 589 break; 590 case Simple: 591 out.print("Simple"); 592 break; 593 case Custom: 594 out.print("Custom"); 595 break; 596 case ModuleNamespace: 597 out.print("ModuleNamespace"); 598 break; 599 case LikelyTakesSlowPath: 600 out.print("LikelyTakesSlowPath"); 601 break; 602 case ObservedTakesSlowPath: 603 out.print("ObservedTakesSlowPath"); 604 break; 605 case MakesCalls: 606 out.print("MakesCalls"); 607 break; 608 case ObservedSlowPathAndMakesCalls: 609 out.print("ObservedSlowPathAndMakesCalls"); 610 break; 611 } 612 out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")"); 613 } 614 615 } // namespace JSC 616