/ runtime / VM.cpp
VM.cpp
   1  /*
   2   * Copyright (C) 2008-2020 Apple Inc. All rights reserved.
   3   *
   4   * Redistribution and use in source and binary forms, with or without
   5   * modification, are permitted provided that the following conditions
   6   * are met:
   7   *
   8   * 1.  Redistributions of source code must retain the above copyright
   9   *     notice, this list of conditions and the following disclaimer. 
  10   * 2.  Redistributions in binary form must reproduce the above copyright
  11   *     notice, this list of conditions and the following disclaimer in the
  12   *     documentation and/or other materials provided with the distribution. 
  13   * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
  14   *     its contributors may be used to endorse or promote products derived
  15   *     from this software without specific prior written permission. 
  16   *
  17   * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
  18   * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  19   * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
  20   * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
  21   * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
  22   * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  23   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
  24   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25   * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26   * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27   */
  28  
  29  #include "config.h"
  30  #include "VM.h"
  31  
  32  #include "AggregateError.h"
  33  #include "ArgList.h"
  34  #include "BigIntObject.h"
  35  #include "BooleanObject.h"
  36  #include "BuiltinExecutables.h"
  37  #include "BytecodeIntrinsicRegistry.h"
  38  #include "CheckpointOSRExitSideState.h"
  39  #include "ClonedArguments.h"
  40  #include "CodeBlock.h"
  41  #include "CodeCache.h"
  42  #include "CommonIdentifiers.h"
  43  #include "ControlFlowProfiler.h"
  44  #include "CustomGetterSetter.h"
  45  #include "DFGWorklist.h"
  46  #include "DOMAttributeGetterSetter.h"
  47  #include "DateInstance.h"
  48  #include "DebuggerScope.h"
  49  #include "DeferredWorkTimer.h"
  50  #include "Disassembler.h"
  51  #include "DoublePredictionFuzzerAgent.h"
  52  #include "ErrorInstance.h"
  53  #include "EvalCodeBlock.h"
  54  #include "Exception.h"
  55  #include "ExecutableToCodeBlockEdge.h"
  56  #include "FTLThunks.h"
  57  #include "FastMallocAlignedMemoryAllocator.h"
  58  #include "FileBasedFuzzerAgent.h"
  59  #include "FunctionCodeBlock.h"
  60  #include "FunctionExecutable.h"
  61  #include "GetterSetter.h"
  62  #include "GigacageAlignedMemoryAllocator.h"
  63  #include "HasOwnPropertyCache.h"
  64  #include "Heap.h"
  65  #include "HeapProfiler.h"
  66  #include "Interpreter.h"
  67  #include "IntlCache.h"
  68  #include "IntlCollator.h"
  69  #include "IntlDateTimeFormat.h"
  70  #include "IntlDisplayNames.h"
  71  #include "IntlListFormat.h"
  72  #include "IntlLocale.h"
  73  #include "IntlNumberFormat.h"
  74  #include "IntlPluralRules.h"
  75  #include "IntlRelativeTimeFormat.h"
  76  #include "IntlSegmentIterator.h"
  77  #include "IntlSegmenter.h"
  78  #include "IntlSegments.h"
  79  #include "IsoHeapCellType.h"
  80  #include "IsoInlinedHeapCellType.h"
  81  #include "JITCode.h"
  82  #include "JITOperationList.h"
  83  #include "JITThunks.h"
  84  #include "JITWorklist.h"
  85  #include "JSAPIGlobalObject.h"
  86  #include "JSAPIValueWrapper.h"
  87  #include "JSAPIWrapperObject.h"
  88  #include "JSArray.h"
  89  #include "JSArrayBuffer.h"
  90  #include "JSArrayIterator.h"
  91  #include "JSAsyncGenerator.h"
  92  #include "JSBigInt.h"
  93  #include "JSBoundFunction.h"
  94  #include "JSCallbackConstructor.h"
  95  #include "JSCallbackFunction.h"
  96  #include "JSCallbackObject.h"
  97  #include "JSCallee.h"
  98  #include "JSCustomGetterSetterFunction.h"
  99  #include "JSDestructibleObjectHeapCellType.h"
 100  #include "JSFinalizationRegistry.h"
 101  #include "JSFunction.h"
 102  #include "JSGlobalLexicalEnvironment.h"
 103  #include "JSGlobalObject.h"
 104  #include "JSImmutableButterfly.h"
 105  #include "JSInjectedScriptHost.h"
 106  #include "JSJavaScriptCallFrame.h"
 107  #include "JSLock.h"
 108  #include "JSMap.h"
 109  #include "JSMapIterator.h"
 110  #include "JSModuleNamespaceObject.h"
 111  #include "JSModuleRecord.h"
 112  #include "JSNativeStdFunction.h"
 113  #include "JSPromise.h"
 114  #include "JSPropertyNameEnumerator.h"
 115  #include "JSProxy.h"
 116  #include "JSScriptFetchParameters.h"
 117  #include "JSScriptFetcher.h"
 118  #include "JSSet.h"
 119  #include "JSSetIterator.h"
 120  #include "JSSourceCode.h"
 121  #include "JSStringIterator.h"
 122  #include "JSTemplateObjectDescriptor.h"
 123  #include "JSToWasmICCallee.h"
 124  #include "JSTypedArrays.h"
 125  #include "JSWeakMap.h"
 126  #include "JSWeakObjectRef.h"
 127  #include "JSWeakSet.h"
 128  #include "JSWebAssemblyCodeBlock.h"
 129  #include "JSWebAssemblyGlobal.h"
 130  #include "JSWebAssemblyInstance.h"
 131  #include "JSWebAssemblyMemory.h"
 132  #include "JSWebAssemblyModule.h"
 133  #include "JSWebAssemblyTable.h"
 134  #include "JSWithScope.h"
 135  #include "LLIntData.h"
 136  #include "MinimumReservedZoneSize.h"
 137  #include "ModuleProgramCodeBlock.h"
 138  #include "ModuleProgramExecutable.h"
 139  #include "NarrowingNumberPredictionFuzzerAgent.h"
 140  #include "NativeExecutable.h"
 141  #include "NumberObject.h"
 142  #include "PredictionFileCreatingFuzzerAgent.h"
 143  #include "ProfilerDatabase.h"
 144  #include "ProgramCodeBlock.h"
 145  #include "ProgramExecutable.h"
 146  #include "PropertyMapHashTable.h"
 147  #include "ProxyRevoke.h"
 148  #include "RandomizingFuzzerAgent.h"
 149  #include "RegExpCache.h"
 150  #include "RegExpObject.h"
 151  #include "SamplingProfiler.h"
 152  #include "ScopedArguments.h"
 153  #include "ShadowChicken.h"
 154  #include "SimpleTypedArrayController.h"
 155  #include "SourceProviderCache.h"
 156  #include "StrictEvalActivation.h"
 157  #include "StringObject.h"
 158  #include "StrongInlines.h"
 159  #include "StructureChain.h"
 160  #include "StructureInlines.h"
 161  #include "SymbolObject.h"
 162  #include "TestRunnerUtils.h"
 163  #include "ThunkGenerators.h"
 164  #include "TypeProfiler.h"
 165  #include "TypeProfilerLog.h"
 166  #include "VMEntryScope.h"
 167  #include "VMInlines.h"
 168  #include "VMInspector.h"
 169  #include "VariableEnvironment.h"
 170  #include "WasmWorklist.h"
 171  #include "Watchdog.h"
 172  #include "WeakGCMapInlines.h"
 173  #include "WebAssemblyFunction.h"
 174  #include "WebAssemblyModuleRecord.h"
 175  #include "WebAssemblyWrapperFunction.h"
 176  #include "WideningNumberPredictionFuzzerAgent.h"
 177  #include <wtf/ProcessID.h>
 178  #include <wtf/ReadWriteLock.h>
 179  #include <wtf/SimpleStats.h>
 180  #include <wtf/StringPrintStream.h>
 181  #include <wtf/Threading.h>
 182  #include <wtf/text/AtomStringTable.h>
 183  
 184  #if ENABLE(C_LOOP)
 185  #include "CLoopStack.h"
 186  #include "CLoopStackInlines.h"
 187  #endif
 188  
 189  #if ENABLE(DFG_JIT)
 190  #include "ConservativeRoots.h"
 191  #endif
 192  
 193  #if ENABLE(REGEXP_TRACING)
 194  #include "RegExp.h"
 195  #endif
 196  
 197  #if JSC_OBJC_API_ENABLED
 198  #include "ObjCCallbackFunction.h"
 199  #endif
 200  
 201  #ifdef JSC_GLIB_API_ENABLED
 202  #include "JSAPIWrapperGlobalObject.h"
 203  #include "JSCCallbackFunction.h"
 204  #endif
 205  
 206  namespace JSC {
 207  
 208  Atomic<unsigned> VM::s_numberOfIDs;
 209  
 210  DEFINE_ALLOCATOR_WITH_HEAP_IDENTIFIER(VM);
 211  
 212  // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
 213  // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
 214  // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
 215  
 216  #if ENABLE(ASSEMBLER)
 217  static bool enableAssembler()
 218  {
 219      if (!Options::useJIT())
 220          return false;
 221  
 222      char* canUseJITString = getenv("JavaScriptCoreUseJIT");
 223      if (canUseJITString && !atoi(canUseJITString))
 224          return false;
 225  
 226      ExecutableAllocator::initializeUnderlyingAllocator();
 227      if (!ExecutableAllocator::singleton().isValid()) {
 228          if (Options::crashIfCantAllocateJITMemory())
 229              CRASH();
 230          return false;
 231      }
 232  
 233      return true;
 234  }
 235  #endif // ENABLE(!ASSEMBLER)
 236  
 237  bool VM::canUseAssembler()
 238  {
 239  #if ENABLE(ASSEMBLER)
 240      static std::once_flag onceKey;
 241      static bool enabled = false;
 242      std::call_once(onceKey, [] {
 243          enabled = enableAssembler();
 244      });
 245      return enabled;
 246  #else
 247      return false; // interpreter only
 248  #endif
 249  }
 250  
 251  void VM::computeCanUseJIT()
 252  {
 253  #if ENABLE(JIT)
 254  #if ASSERT_ENABLED
 255      RELEASE_ASSERT(!g_jscConfig.vm.canUseJITIsSet);
 256      g_jscConfig.vm.canUseJITIsSet = true;
 257  #endif
 258      g_jscConfig.vm.canUseJIT = VM::canUseAssembler() && Options::useJIT();
 259  #endif
 260  }
 261  
 262  inline unsigned VM::nextID()
 263  {
 264      for (;;) {
 265          unsigned currentNumberOfIDs = s_numberOfIDs.load();
 266          unsigned newID = currentNumberOfIDs + 1;
 267          if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
 268              return newID;
 269      }
 270  }
 271  
 272  static bool vmCreationShouldCrash = false;
 273  
 274  VM::VM(VMType vmType, HeapType heapType, WTF::RunLoop* runLoop, bool* success)
 275      : m_id(nextID())
 276      , m_apiLock(adoptRef(new JSLock(this)))
 277      , m_runLoop(runLoop ? *runLoop : WTF::RunLoop::current())
 278      , m_random(Options::seedOfVMRandomForFuzzer() ? Options::seedOfVMRandomForFuzzer() : cryptographicallyRandomNumber())
 279      , m_integrityRandom(*this)
 280      , heap(*this, heapType)
 281      , fastMallocAllocator(makeUnique<FastMallocAlignedMemoryAllocator>())
 282      , primitiveGigacageAllocator(makeUnique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
 283      , jsValueGigacageAllocator(makeUnique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
 284      , auxiliaryHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
 285      , immutableButterflyHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithIndexingHeader)))
 286      , cellHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
 287      , destructibleCellHeapCellType(makeUnique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
 288      , apiGlobalObjectHeapCellType(IsoHeapCellType::create<JSAPIGlobalObject>())
 289      , callbackConstructorHeapCellType(IsoHeapCellType::create<JSCallbackConstructor>())
 290      , callbackGlobalObjectHeapCellType(IsoHeapCellType::create<JSCallbackObject<JSGlobalObject>>())
 291      , callbackObjectHeapCellType(IsoHeapCellType::create<JSCallbackObject<JSNonFinalObject>>())
 292      , dateInstanceHeapCellType(IsoHeapCellType::create<DateInstance>())
 293      , errorInstanceHeapCellType(IsoHeapCellType::create<ErrorInstance>())
 294      , finalizationRegistryCellType(IsoHeapCellType::create<JSFinalizationRegistry>())
 295      , globalLexicalEnvironmentHeapCellType(IsoHeapCellType::create<JSGlobalLexicalEnvironment>())
 296      , globalObjectHeapCellType(IsoHeapCellType::create<JSGlobalObject>())
 297      , injectedScriptHostSpaceHeapCellType(IsoHeapCellType::create<Inspector::JSInjectedScriptHost>())
 298      , javaScriptCallFrameHeapCellType(IsoHeapCellType::create<Inspector::JSJavaScriptCallFrame>())
 299      , jsModuleRecordHeapCellType(IsoHeapCellType::create<JSModuleRecord>())
 300      , moduleNamespaceObjectHeapCellType(IsoHeapCellType::create<JSModuleNamespaceObject>())
 301      , nativeStdFunctionHeapCellType(IsoHeapCellType::create<JSNativeStdFunction>())
 302      , stringHeapCellType(makeUnique<IsoInlinedHeapCellType<JSString>>())
 303      , weakMapHeapCellType(IsoHeapCellType::create<JSWeakMap>())
 304      , weakSetHeapCellType(IsoHeapCellType::create<JSWeakSet>())
 305      , destructibleObjectHeapCellType(makeUnique<JSDestructibleObjectHeapCellType>())
 306  #if JSC_OBJC_API_ENABLED
 307      , apiWrapperObjectHeapCellType(IsoHeapCellType::create<JSCallbackObject<JSAPIWrapperObject>>())
 308      , objCCallbackFunctionHeapCellType(IsoHeapCellType::create<ObjCCallbackFunction>())
 309  #endif
 310  #ifdef JSC_GLIB_API_ENABLED
 311      , apiWrapperObjectHeapCellType(IsoHeapCellType::create<JSCallbackObject<JSAPIWrapperObject>>())
 312      , callbackAPIWrapperGlobalObjectHeapCellType(IsoHeapCellType::create<JSCallbackObject<JSAPIWrapperGlobalObject>>())
 313      , jscCallbackFunctionHeapCellType(IsoHeapCellType::create<JSCCallbackFunction>())
 314  #endif
 315      , intlCollatorHeapCellType(IsoHeapCellType::create<IntlCollator>())
 316      , intlDateTimeFormatHeapCellType(IsoHeapCellType::create<IntlDateTimeFormat>())
 317      , intlDisplayNamesHeapCellType(IsoHeapCellType::create<IntlDisplayNames>())
 318      , intlListFormatHeapCellType(IsoHeapCellType::create<IntlListFormat>())
 319      , intlLocaleHeapCellType(IsoHeapCellType::create<IntlLocale>())
 320      , intlNumberFormatHeapCellType(IsoHeapCellType::create<IntlNumberFormat>())
 321      , intlPluralRulesHeapCellType(IsoHeapCellType::create<IntlPluralRules>())
 322      , intlRelativeTimeFormatHeapCellType(IsoHeapCellType::create<IntlRelativeTimeFormat>())
 323      , intlSegmentIteratorHeapCellType(IsoHeapCellType::create<IntlSegmentIterator>())
 324      , intlSegmenterHeapCellType(IsoHeapCellType::create<IntlSegmenter>())
 325      , intlSegmentsHeapCellType(IsoHeapCellType::create<IntlSegments>())
 326  #if ENABLE(WEBASSEMBLY)
 327      , webAssemblyCodeBlockHeapCellType(IsoHeapCellType::create<JSWebAssemblyCodeBlock>())
 328      , webAssemblyFunctionHeapCellType(IsoHeapCellType::create<WebAssemblyFunction>())
 329      , webAssemblyGlobalHeapCellType(IsoHeapCellType::create<JSWebAssemblyGlobal>())
 330      , webAssemblyInstanceHeapCellType(IsoHeapCellType::create<JSWebAssemblyInstance>())
 331      , webAssemblyMemoryHeapCellType(IsoHeapCellType::create<JSWebAssemblyMemory>())
 332      , webAssemblyModuleHeapCellType(IsoHeapCellType::create<JSWebAssemblyModule>())
 333      , webAssemblyModuleRecordHeapCellType(IsoHeapCellType::create<WebAssemblyModuleRecord>())
 334      , webAssemblyTableHeapCellType(IsoHeapCellType::create<JSWebAssemblyTable>())
 335  #endif
 336      , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get()) // Hash:0x3e7cd762
 337      , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x241e946
 338      , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithIndexingHeader", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x7a945300
 339      , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xadfb5a79
 340      , variableSizedCellSpace("Variable Sized JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xbcd769cc
 341      , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x4f5ed7a9
 342      , arraySpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSArray)
 343      , bigIntSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSBigInt)
 344      , calleeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSCallee)
 345      , clonedArgumentsSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ClonedArguments)
 346      , customGetterSetterSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), CustomGetterSetter)
 347      , dateInstanceSpace ISO_SUBSPACE_INIT(heap, dateInstanceHeapCellType.get(), DateInstance)
 348      , domAttributeGetterSetterSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), DOMAttributeGetterSetter)
 349      , exceptionSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Exception)
 350      , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge) // Hash:0x7b730b20
 351      , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction) // Hash:0x800fca72
 352      , getterSetterSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), GetterSetter)
 353      , globalLexicalEnvironmentSpace ISO_SUBSPACE_INIT(heap, globalLexicalEnvironmentHeapCellType.get(), JSGlobalLexicalEnvironment)
 354      , internalFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), InternalFunction) // Hash:0xf845c464
 355      , jsProxySpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSProxy)
 356      , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable) // Hash:0x67567f95
 357      , numberObjectSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), NumberObject)
 358      , plainObjectSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSNonFinalObject) // Mainly used for prototypes.
 359      , promiseSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSPromise)
 360      , propertyNameEnumeratorSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSPropertyNameEnumerator)
 361      , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable) // Hash:0xc6bc9f12
 362      , regExpSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), RegExp)
 363      , regExpObjectSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), RegExpObject)
 364      , ropeStringSpace ISO_SUBSPACE_INIT(heap, stringHeapCellType.get(), JSRopeString)
 365      , scopedArgumentsSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ScopedArguments)
 366      , sparseArrayValueMapSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), SparseArrayValueMap)
 367      , stringSpace ISO_SUBSPACE_INIT(heap, stringHeapCellType.get(), JSString) // Hash:0x90cf758f
 368      , stringObjectSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), StringObject)
 369      , structureChainSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), StructureChain)
 370      , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData) // Hash:0xaca4e62d
 371      , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure) // Hash:0x1f1bcdca
 372      , symbolTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), SymbolTable) // Hash:0xc5215afd
 373      , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
 374      , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
 375      , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock) // Hash:0x77e66ec9
 376      , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable) // Hash:0x5d158f3
 377      , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable) // Hash:0x527c77e7
 378      , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable) // Hash:0xf6b828d9
 379      , vmType(vmType)
 380      , clientData(nullptr)
 381      , topEntryFrame(nullptr)
 382      , topCallFrame(CallFrame::noCaller())
 383      , deferredWorkTimer(DeferredWorkTimer::create(*this))
 384      , m_atomStringTable(vmType == Default ? Thread::current().atomStringTable() : new AtomStringTable)
 385      , m_privateSymbolRegistry(WTF::SymbolRegistry::Type::PrivateSymbol)
 386      , propertyNames(nullptr)
 387      , emptyList(new ArgList)
 388      , machineCodeBytesPerBytecodeWordForBaselineJIT(makeUnique<SimpleStats>())
 389      , customGetterSetterFunctionMap(*this)
 390      , stringCache(*this)
 391      , symbolImplToSymbolMap(*this)
 392      , structureCache(*this)
 393      , interpreter(nullptr)
 394      , entryScope(nullptr)
 395      , m_regExpCache(new RegExpCache(this))
 396      , m_compactVariableMap(adoptRef(*(new CompactTDZEnvironmentMap)))
 397  #if ENABLE(REGEXP_TRACING)
 398      , m_rtTraceList(new RTTraceList())
 399  #endif
 400  #if ENABLE(GC_VALIDATION)
 401      , m_initializingObjectClass(0)
 402  #endif
 403      , m_stackPointerAtVMEntry(nullptr)
 404      , m_codeCache(makeUnique<CodeCache>())
 405      , m_intlCache(makeUnique<IntlCache>())
 406      , m_builtinExecutables(makeUnique<BuiltinExecutables>(*this))
 407      , m_typeProfilerEnabledCount(0)
 408      , m_primitiveGigacageEnabled(IsWatched)
 409      , m_controlFlowProfilerEnabledCount(0)
 410  {
 411      if (UNLIKELY(vmCreationShouldCrash))
 412          CRASH_WITH_INFO(0x4242424220202020, 0xbadbeef0badbeef, 0x1234123412341234, 0x1337133713371337);
 413  
 414      interpreter = new Interpreter(*this);
 415      StackBounds stack = Thread::current().stack();
 416      updateSoftReservedZoneSize(Options::softReservedZoneSize());
 417      setLastStackTop(stack.origin());
 418  
 419      JSRunLoopTimer::Manager::shared().registerVM(*this);
 420  
 421      // Need to be careful to keep everything consistent here
 422      JSLockHolder lock(this);
 423      AtomStringTable* existingEntryAtomStringTable = Thread::current().setCurrentAtomStringTable(m_atomStringTable);
 424      structureStructure.set(*this, Structure::createStructure(*this));
 425      structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, nullptr, jsNull()));
 426      stringStructure.set(*this, JSString::createStructure(*this, nullptr, jsNull()));
 427  
 428      smallStrings.initializeCommonStrings(*this);
 429  
 430      propertyNames = new CommonIdentifiers(*this);
 431      terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, nullptr, jsNull()));
 432      propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, nullptr, jsNull()));
 433      getterSetterStructure.set(*this, GetterSetter::createStructure(*this, nullptr, jsNull()));
 434      customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, nullptr, jsNull()));
 435      domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, nullptr, jsNull()));
 436      scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, nullptr, jsNull()));
 437      apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, nullptr, jsNull()));
 438      nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, nullptr, jsNull()));
 439      evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, nullptr, jsNull()));
 440      programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, nullptr, jsNull()));
 441      functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, nullptr, jsNull()));
 442  #if ENABLE(WEBASSEMBLY)
 443      webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, nullptr, jsNull()));
 444  #endif
 445      moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, nullptr, jsNull()));
 446      regExpStructure.set(*this, RegExp::createStructure(*this, nullptr, jsNull()));
 447      symbolStructure.set(*this, Symbol::createStructure(*this, nullptr, jsNull()));
 448      symbolTableStructure.set(*this, SymbolTable::createStructure(*this, nullptr, jsNull()));
 449  
 450      immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, nullptr, jsNull(), CopyOnWriteArrayWithInt32));
 451      immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, nullptr, jsNull(), CopyOnWriteArrayWithDouble));
 452      immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, nullptr, jsNull(), CopyOnWriteArrayWithContiguous));
 453  
 454      sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, nullptr, jsNull()));
 455      scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, nullptr, jsNull()));
 456      scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, nullptr, jsNull()));
 457      structureChainStructure.set(*this, StructureChain::createStructure(*this, nullptr, jsNull()));
 458      sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, nullptr, jsNull()));
 459      templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, nullptr, jsNull()));
 460      unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, nullptr, jsNull()));
 461      unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, nullptr, jsNull()));
 462      unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, nullptr, jsNull()));
 463      unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, nullptr, jsNull()));
 464      unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, nullptr, jsNull()));
 465      propertyTableStructure.set(*this, PropertyTable::createStructure(*this, nullptr, jsNull()));
 466      functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, nullptr, jsNull()));
 467      exceptionStructure.set(*this, Exception::createStructure(*this, nullptr, jsNull()));
 468      programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, nullptr, jsNull()));
 469      moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, nullptr, jsNull()));
 470      evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, nullptr, jsNull()));
 471      functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, nullptr, jsNull()));
 472      hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, nullptr, jsNull()));
 473      hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, nullptr, jsNull()));
 474      bigIntStructure.set(*this, JSBigInt::createStructure(*this, nullptr, jsNull()));
 475      executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
 476  
 477      // Eagerly initialize constant cells since the concurrent compiler can access them.
 478      if (Options::useJIT()) {
 479          sentinelMapBucket();
 480          sentinelSetBucket();
 481      }
 482      {
 483          auto* bigInt = JSBigInt::tryCreateFrom(*this, 1);
 484          if (bigInt)
 485              heapBigIntConstantOne.set(*this, bigInt);
 486          else {
 487              if (success)
 488                  *success = false;
 489              else
 490                  RELEASE_ASSERT(bigInt);
 491          }
 492      }
 493  
 494      Thread::current().setCurrentAtomStringTable(existingEntryAtomStringTable);
 495      
 496      Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
 497  
 498      heap.notifyIsSafeToCollect();
 499      
 500      LLInt::Data::performAssertions(*this);
 501      
 502      if (UNLIKELY(Options::useProfiler())) {
 503          m_perBytecodeProfiler = makeUnique<Profiler::Database>(*this);
 504  
 505          StringPrintStream pathOut;
 506          const char* profilerPath = getenv("JSC_PROFILER_PATH");
 507          if (profilerPath)
 508              pathOut.print(profilerPath, "/");
 509          pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
 510          m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
 511      }
 512  
 513      callFrameForCatch = nullptr;
 514  
 515      // Initialize this last, as a free way of asserting that VM initialization itself
 516      // won't use this.
 517      m_typedArrayController = adoptRef(new SimpleTypedArrayController());
 518  
 519      m_bytecodeIntrinsicRegistry = makeUnique<BytecodeIntrinsicRegistry>(*this);
 520  
 521      if (Options::useTypeProfiler())
 522          enableTypeProfiler();
 523      if (Options::useControlFlowProfiler())
 524          enableControlFlowProfiler();
 525  #if ENABLE(SAMPLING_PROFILER)
 526      if (Options::useSamplingProfiler()) {
 527          setShouldBuildPCToCodeOriginMapping();
 528          Ref<Stopwatch> stopwatch = Stopwatch::create();
 529          stopwatch->start();
 530          m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
 531          if (Options::samplingProfilerPath())
 532              m_samplingProfiler->registerForReportAtExit();
 533          m_samplingProfiler->start();
 534      }
 535  #endif // ENABLE(SAMPLING_PROFILER)
 536  
 537      if (Options::useRandomizingFuzzerAgent())
 538          setFuzzerAgent(makeUnique<RandomizingFuzzerAgent>(*this));
 539      if (Options::useDoublePredictionFuzzerAgent())
 540          setFuzzerAgent(makeUnique<DoublePredictionFuzzerAgent>(*this));
 541      if (Options::useFileBasedFuzzerAgent())
 542          setFuzzerAgent(makeUnique<FileBasedFuzzerAgent>(*this));
 543      if (Options::usePredictionFileCreatingFuzzerAgent())
 544          setFuzzerAgent(makeUnique<PredictionFileCreatingFuzzerAgent>(*this));
 545      if (Options::useNarrowingNumberPredictionFuzzerAgent())
 546          setFuzzerAgent(makeUnique<NarrowingNumberPredictionFuzzerAgent>(*this));
 547      if (Options::useWideningNumberPredictionFuzzerAgent())
 548          setFuzzerAgent(makeUnique<WideningNumberPredictionFuzzerAgent>(*this));
 549  
 550      if (Options::alwaysGeneratePCToCodeOriginMap())
 551          setShouldBuildPCToCodeOriginMapping();
 552  
 553      if (Options::watchdog()) {
 554          Watchdog& watchdog = ensureWatchdog();
 555          watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
 556      }
 557  
 558  #if ENABLE(JIT)
 559      // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
 560      if (Options::useJIT()) {
 561          jitStubs = makeUnique<JITThunks>();
 562  #if ENABLE(FTL_JIT)
 563          ftlThunks = makeUnique<FTL::Thunks>();
 564  #endif // ENABLE(FTL_JIT)
 565          getCTIInternalFunctionTrampolineFor(CodeForCall);
 566          getCTIInternalFunctionTrampolineFor(CodeForConstruct);
 567      }
 568  #endif
 569  
 570      if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
 571          ensureShadowChicken();
 572  
 573      VMInspector::instance().add(this);
 574  
 575      if (!g_jscConfig.disabledFreezingForTesting)
 576          Config::permanentlyFreeze();
 577  }
 578  
 579  static ReadWriteLock s_destructionLock;
 580  
 581  void waitForVMDestruction()
 582  {
 583      auto locker = holdLock(s_destructionLock.write());
 584  }
 585  
 586  VM::~VM()
 587  {
 588      auto destructionLocker = holdLock(s_destructionLock.read());
 589      
 590      Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
 591      deferredWorkTimer->stopRunningTasks();
 592  #if ENABLE(WEBASSEMBLY)
 593      if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
 594          worklist->stopAllPlansForContext(wasmContext);
 595  #endif
 596      if (UNLIKELY(m_watchdog))
 597          m_watchdog->willDestroyVM(this);
 598      m_traps.willDestroyVM();
 599      VMInspector::instance().remove(this);
 600  
 601      // Never GC, ever again.
 602      heap.incrementDeferralDepth();
 603  
 604  #if ENABLE(SAMPLING_PROFILER)
 605      if (m_samplingProfiler) {
 606          m_samplingProfiler->reportDataToOptionFile();
 607          m_samplingProfiler->shutdown();
 608      }
 609  #endif // ENABLE(SAMPLING_PROFILER)
 610      
 611  #if ENABLE(JIT)
 612      if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
 613          worklist->completeAllForVM(*this);
 614  #endif // ENABLE(JIT)
 615  
 616  #if ENABLE(DFG_JIT)
 617      // Make sure concurrent compilations are done, but don't install them, since there is
 618      // no point to doing so.
 619      for (unsigned i = DFG::numberOfWorklists(); i--;) {
 620          if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
 621              worklist->removeNonCompilingPlansForVM(*this);
 622              worklist->waitUntilAllPlansForVMAreReady(*this);
 623              worklist->removeAllReadyPlansForVM(*this);
 624          }
 625      }
 626  #endif // ENABLE(DFG_JIT)
 627      
 628      waitForAsynchronousDisassembly();
 629      
 630      // Clear this first to ensure that nobody tries to remove themselves from it.
 631      m_perBytecodeProfiler = nullptr;
 632  
 633      ASSERT(currentThreadIsHoldingAPILock());
 634      m_apiLock->willDestroyVM(this);
 635      smallStrings.setIsInitialized(false);
 636      heap.lastChanceToFinalize();
 637  
 638      JSRunLoopTimer::Manager::shared().unregisterVM(*this);
 639      
 640      delete interpreter;
 641  #ifndef NDEBUG
 642      interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
 643  #endif
 644  
 645      delete emptyList;
 646  
 647      delete propertyNames;
 648      if (vmType != Default)
 649          delete m_atomStringTable;
 650  
 651      delete clientData;
 652      delete m_regExpCache;
 653  
 654  #if ENABLE(REGEXP_TRACING)
 655      delete m_rtTraceList;
 656  #endif
 657  
 658  #if ENABLE(DFG_JIT)
 659      for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
 660          VMMalloc::free(m_scratchBuffers[i]);
 661  #endif
 662  }
 663  
 664  void VM::primitiveGigacageDisabledCallback(void* argument)
 665  {
 666      static_cast<VM*>(argument)->primitiveGigacageDisabled();
 667  }
 668  
 669  void VM::primitiveGigacageDisabled()
 670  {
 671      if (m_apiLock->currentThreadIsHoldingLock()) {
 672          m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
 673          return;
 674      }
 675   
 676      // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
 677      // uncaged buffer in a nicely synchronized manner.
 678      m_needToFirePrimitiveGigacageEnabled = true;
 679  }
 680  
 681  void VM::setLastStackTop(void* lastStackTop)
 682  { 
 683      m_lastStackTop = lastStackTop;
 684  }
 685  
 686  Ref<VM> VM::createContextGroup(HeapType heapType)
 687  {
 688      return adoptRef(*new VM(APIContextGroup, heapType));
 689  }
 690  
 691  Ref<VM> VM::create(HeapType heapType, WTF::RunLoop* runLoop)
 692  {
 693      return adoptRef(*new VM(Default, heapType, runLoop));
 694  }
 695  
 696  RefPtr<VM> VM::tryCreate(HeapType heapType, WTF::RunLoop* runLoop)
 697  {
 698      bool success = true;
 699      RefPtr<VM> vm = adoptRef(new VM(Default, heapType, runLoop, &success));
 700      if (!success) {
 701          // Here, we're destructing a partially constructed VM and we know that
 702          // no one else can be using it at the same time. So, acquiring the lock
 703          // is superflous. However, we don't want to change how VMs are destructed.
 704          // Just going through the motion of acquiring the lock here allows us to
 705          // use the standard destruction process.
 706  
 707          // VM expects us to be holding the VM lock when destructing it. Acquiring
 708          // the lock also puts the VM in a state (e.g. acquiring heap access) that
 709          // is needed for destruction. The lock will hold the last reference to
 710          // the VM after we nullify the refPtr below. The VM will actually be
 711          // destructed in JSLockHolder's destructor.
 712          JSLockHolder lock(vm.get());
 713          vm = nullptr;
 714      }
 715      return vm;
 716  }
 717  
 718  bool VM::sharedInstanceExists()
 719  {
 720      return sharedInstanceInternal();
 721  }
 722  
 723  VM& VM::sharedInstance()
 724  {
 725      GlobalJSLock globalLock;
 726      VM*& instance = sharedInstanceInternal();
 727      if (!instance)
 728          instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
 729      return *instance;
 730  }
 731  
 732  VM*& VM::sharedInstanceInternal()
 733  {
 734      static VM* sharedInstance;
 735      return sharedInstance;
 736  }
 737  
 738  Watchdog& VM::ensureWatchdog()
 739  {
 740      if (!m_watchdog)
 741          m_watchdog = adoptRef(new Watchdog(this));
 742      return *m_watchdog;
 743  }
 744  
 745  HeapProfiler& VM::ensureHeapProfiler()
 746  {
 747      if (!m_heapProfiler)
 748          m_heapProfiler = makeUnique<HeapProfiler>(*this);
 749      return *m_heapProfiler;
 750  }
 751  
 752  #if ENABLE(SAMPLING_PROFILER)
 753  SamplingProfiler& VM::ensureSamplingProfiler(Ref<Stopwatch>&& stopwatch)
 754  {
 755      if (!m_samplingProfiler)
 756          m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
 757      return *m_samplingProfiler;
 758  }
 759  #endif // ENABLE(SAMPLING_PROFILER)
 760  
 761  #if ENABLE(JIT)
 762  static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
 763  {
 764      switch (intrinsic) {
 765      case CharCodeAtIntrinsic:
 766          return charCodeAtThunkGenerator;
 767      case CharAtIntrinsic:
 768          return charAtThunkGenerator;
 769      case StringPrototypeCodePointAtIntrinsic:
 770          return stringPrototypeCodePointAtThunkGenerator;
 771      case Clz32Intrinsic:
 772          return clz32ThunkGenerator;
 773      case FromCharCodeIntrinsic:
 774          return fromCharCodeThunkGenerator;
 775      case SqrtIntrinsic:
 776          return sqrtThunkGenerator;
 777      case AbsIntrinsic:
 778          return absThunkGenerator;
 779      case FloorIntrinsic:
 780          return floorThunkGenerator;
 781      case CeilIntrinsic:
 782          return ceilThunkGenerator;
 783      case TruncIntrinsic:
 784          return truncThunkGenerator;
 785      case RoundIntrinsic:
 786          return roundThunkGenerator;
 787      case ExpIntrinsic:
 788          return expThunkGenerator;
 789      case LogIntrinsic:
 790          return logThunkGenerator;
 791      case IMulIntrinsic:
 792          return imulThunkGenerator;
 793      case RandomIntrinsic:
 794          return randomThunkGenerator;
 795      case BoundFunctionCallIntrinsic:
 796          return boundFunctionCallGenerator;
 797      default:
 798          return nullptr;
 799      }
 800  }
 801  
 802  MacroAssemblerCodeRef<JITThunkPtrTag> VM::getCTIStub(ThunkGenerator generator)
 803  {
 804      return jitStubs->ctiStub(*this, generator);
 805  }
 806  
 807  #endif // ENABLE(JIT)
 808  
 809  NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
 810  {
 811      return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
 812  }
 813  
 814  static Ref<NativeJITCode> jitCodeForCallTrampoline()
 815  {
 816      static NativeJITCode* result;
 817      static std::once_flag onceKey;
 818      std::call_once(onceKey, [&] {
 819          result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITType::HostCallThunk, NoIntrinsic);
 820      });
 821      return makeRef(*result);
 822  }
 823  
 824  static Ref<NativeJITCode> jitCodeForConstructTrampoline()
 825  {
 826      static NativeJITCode* result;
 827      static std::once_flag onceKey;
 828      std::call_once(onceKey, [&] {
 829          result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITType::HostCallThunk, NoIntrinsic);
 830      });
 831      return makeRef(*result);
 832  }
 833  
 834  NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
 835  {
 836  #if ENABLE(JIT)
 837      if (Options::useJIT()) {
 838          return jitStubs->hostFunctionStub(
 839              *this, function, constructor,
 840              intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : nullptr,
 841              intrinsic, signature, name);
 842      }
 843  #endif // ENABLE(JIT)
 844      UNUSED_PARAM(intrinsic);
 845      UNUSED_PARAM(signature);
 846      return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
 847  }
 848  
 849  NativeExecutable* VM::getBoundFunction(bool isJSFunction, bool canConstruct)
 850  {
 851      bool slowCase = !isJSFunction;
 852  
 853      auto getOrCreate = [&] (Weak<NativeExecutable>& slot) -> NativeExecutable* {
 854          if (auto* cached = slot.get())
 855              return cached;
 856          NativeExecutable* result = getHostFunction(
 857              slowCase ? boundFunctionCall : boundThisNoArgsFunctionCall,
 858              slowCase ? NoIntrinsic : BoundFunctionCallIntrinsic,
 859              canConstruct ? (slowCase ? boundFunctionConstruct : boundThisNoArgsFunctionConstruct) : callHostFunctionAsConstructor, nullptr, String());
 860          slot = Weak<NativeExecutable>(result);
 861          return result;
 862      };
 863  
 864      if (slowCase) {
 865          if (canConstruct)
 866              return getOrCreate(m_slowCanConstructBoundExecutable);
 867          return getOrCreate(m_slowBoundExecutable);
 868      }
 869      if (canConstruct)
 870          return getOrCreate(m_fastCanConstructBoundExecutable);
 871      return getOrCreate(m_fastBoundExecutable);
 872  }
 873  
 874  MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
 875  {
 876  #if ENABLE(JIT)
 877      if (Options::useJIT()) {
 878          if (kind == CodeForCall)
 879              return jitStubs->ctiInternalFunctionCall(*this).retagged<JSEntryPtrTag>();
 880          return jitStubs->ctiInternalFunctionConstruct(*this).retagged<JSEntryPtrTag>();
 881      }
 882  #endif
 883      if (kind == CodeForCall)
 884          return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
 885      return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
 886  }
 887  
 888  VM::ClientData::~ClientData()
 889  {
 890  }
 891  
 892  void VM::whenIdle(Function<void()>&& callback)
 893  {
 894      if (!entryScope) {
 895          callback();
 896          return;
 897      }
 898  
 899      entryScope->addDidPopListener(WTFMove(callback));
 900  }
 901  
 902  void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
 903  {
 904      whenIdle([=] () {
 905          heap.deleteAllCodeBlocks(effort);
 906      });
 907  }
 908  
 909  void VM::deleteAllCode(DeleteAllCodeEffort effort)
 910  {
 911      whenIdle([=] () {
 912          m_codeCache->clear();
 913          m_regExpCache->deleteAllCode();
 914          heap.deleteAllCodeBlocks(effort);
 915          heap.deleteAllUnlinkedCodeBlocks(effort);
 916          heap.reportAbandonedObjectGraph();
 917      });
 918  }
 919  
 920  void VM::shrinkFootprintWhenIdle()
 921  {
 922      whenIdle([=] () {
 923          sanitizeStackForVM(*this);
 924          deleteAllCode(DeleteAllCodeIfNotCollecting);
 925          heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
 926          // FIXME: Consider stopping various automatic threads here.
 927          // https://bugs.webkit.org/show_bug.cgi?id=185447
 928          WTF::releaseFastMallocFreeMemory();
 929      });
 930  }
 931  
 932  SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
 933  {
 934      auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
 935      if (addResult.isNewEntry)
 936          addResult.iterator->value = adoptRef(new SourceProviderCache);
 937      return addResult.iterator->value.get();
 938  }
 939  
 940  void VM::clearSourceProviderCaches()
 941  {
 942      sourceProviderCacheMap.clear();
 943  }
 944  
 945  Exception* VM::throwException(JSGlobalObject* globalObject, Exception* exception)
 946  {
 947      CallFrame* throwOriginFrame = topJSCallFrame();
 948      if (!throwOriginFrame)
 949          throwOriginFrame = globalObject->deprecatedCallFrameForDebugger();
 950  
 951      if (UNLIKELY(Options::breakOnThrow())) {
 952          CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
 953          dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
 954          CRASH();
 955      }
 956  
 957      interpreter->notifyDebuggerOfExceptionToBeThrown(*this, globalObject, throwOriginFrame, exception);
 958  
 959      setException(exception);
 960  
 961  #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
 962      m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
 963      m_throwingThread = &Thread::current();
 964  #endif
 965      return exception;
 966  }
 967  
 968  Exception* VM::throwException(JSGlobalObject* globalObject, JSValue thrownValue)
 969  {
 970      VM& vm = *this;
 971      Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
 972      if (!exception)
 973          exception = Exception::create(*this, thrownValue);
 974  
 975      return throwException(globalObject, exception);
 976  }
 977  
 978  Exception* VM::throwException(JSGlobalObject* globalObject, JSObject* error)
 979  {
 980      return throwException(globalObject, JSValue(error));
 981  }
 982  
 983  void VM::setStackPointerAtVMEntry(void* sp)
 984  {
 985      m_stackPointerAtVMEntry = sp;
 986      updateStackLimits();
 987  }
 988  
 989  size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
 990  {
 991      size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
 992      m_currentSoftReservedZoneSize = softReservedZoneSize;
 993  #if ENABLE(C_LOOP)
 994      interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
 995  #endif
 996  
 997      updateStackLimits();
 998  
 999      return oldSoftReservedZoneSize;
1000  }
1001  
1002  #if OS(WINDOWS)
1003  // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
1004  // where the guard page is a barrier between committed and uncommitted memory.
1005  // When data from the guard page is read or written, the guard page is moved, and memory is committed.
1006  // This is how the system grows the stack.
1007  // When using the C stack on Windows we need to precommit the needed stack space.
1008  // Otherwise we might crash later if we access uncommitted stack memory.
1009  // This can happen if we allocate stack space larger than the page guard size (4K).
1010  // The system does not get the chance to move the guard page, and commit more memory,
1011  // and we crash if uncommitted memory is accessed.
1012  // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
1013  // when needed, see http://support.microsoft.com/kb/100775.
1014  // By touching every page up to the stack limit with a dummy operation,
1015  // we force the system to move the guard page, and commit memory.
1016  
1017  static void preCommitStackMemory(void* stackLimit)
1018  {
1019      const int pageSize = 4096;
1020      for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
1021          char ch = *p;
1022          *p = ch;
1023      }
1024  }
1025  #endif
1026  
1027  void VM::updateStackLimits()
1028  {
1029  #if OS(WINDOWS)
1030      void* lastSoftStackLimit = m_softStackLimit;
1031  #endif
1032  
1033      const StackBounds& stack = Thread::current().stack();
1034      size_t reservedZoneSize = Options::reservedZoneSize();
1035      // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
1036      // options initialization time, and the option value should not have been changed thereafter.
1037      // We don't have the ability to assert here that it hasn't changed, but we can at least assert
1038      // that the value is sane.
1039      RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
1040  
1041      if (m_stackPointerAtVMEntry) {
1042          char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
1043          m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
1044          m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
1045      } else {
1046          m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
1047          m_stackLimit = stack.recursionLimit(reservedZoneSize);
1048      }
1049  
1050  #if OS(WINDOWS)
1051      // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
1052      // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
1053      // generated code which can allocate stack space that the C++ compiler does not know
1054      // about. As such, we have to precommit that stack memory manually.
1055      //
1056      // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
1057      // used exclusively by C++ code, and the C++ compiler will automatically commit the
1058      // needed stack pages.
1059      if (lastSoftStackLimit != m_softStackLimit)
1060          preCommitStackMemory(m_softStackLimit);
1061  #endif
1062  }
1063  
1064  #if ENABLE(DFG_JIT)
1065  void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
1066  {
1067      auto lock = holdLock(m_scratchBufferLock);
1068      for (auto* scratchBuffer : m_scratchBuffers) {
1069          if (scratchBuffer->activeLength()) {
1070              void* bufferStart = scratchBuffer->dataBuffer();
1071              conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
1072          }
1073      }
1074  }
1075  
1076  void VM::scanSideState(ConservativeRoots& roots) const
1077  {
1078      ASSERT(heap.worldIsStopped());
1079      for (const auto& sideState : m_checkpointSideState) {
1080          static_assert(sizeof(sideState->tmps) / sizeof(JSValue) == maxNumCheckpointTmps);
1081          roots.add(sideState->tmps, sideState->tmps + maxNumCheckpointTmps);
1082      }
1083  }
1084  #endif
1085  
1086  void VM::pushCheckpointOSRSideState(std::unique_ptr<CheckpointOSRExitSideState>&& payload)
1087  {
1088      ASSERT(currentThreadIsHoldingAPILock());
1089      ASSERT(payload->associatedCallFrame);
1090  #if ASSERT_ENABLED
1091      for (const auto& sideState : m_checkpointSideState)
1092          ASSERT(sideState->associatedCallFrame != payload->associatedCallFrame);
1093  #endif
1094      m_checkpointSideState.append(WTFMove(payload));
1095  
1096  #if ASSERT_ENABLED
1097      auto bounds = StackBounds::currentThreadStackBounds();
1098      void* previousCallFrame = bounds.end();
1099      for (size_t i = m_checkpointSideState.size(); i--;) {
1100          auto* callFrame = m_checkpointSideState[i]->associatedCallFrame;
1101          if (!bounds.contains(callFrame))
1102              break;
1103          ASSERT(previousCallFrame < callFrame);
1104          previousCallFrame = callFrame;
1105      }
1106  #endif
1107  }
1108  
1109  std::unique_ptr<CheckpointOSRExitSideState> VM::popCheckpointOSRSideState(CallFrame* expectedCallFrame)
1110  {
1111      ASSERT(currentThreadIsHoldingAPILock());
1112      auto sideState = m_checkpointSideState.takeLast();
1113      RELEASE_ASSERT(sideState->associatedCallFrame == expectedCallFrame);
1114      return sideState;
1115  }
1116  
1117  void VM::popAllCheckpointOSRSideStateUntil(CallFrame* target)
1118  {
1119      ASSERT(currentThreadIsHoldingAPILock());
1120      auto bounds = StackBounds::currentThreadStackBounds().withSoftOrigin(target);
1121      ASSERT(bounds.contains(target));
1122  
1123      // We have to worry about migrating from another thread since there may be no checkpoints in our thread but one in the other threads.
1124      while (m_checkpointSideState.size() && bounds.contains(m_checkpointSideState.last()->associatedCallFrame))
1125          m_checkpointSideState.takeLast();
1126  }
1127  
1128  void logSanitizeStack(VM& vm)
1129  {
1130      if (Options::verboseSanitizeStack() && vm.topCallFrame) {
1131          int dummy;
1132          auto& stackBounds = Thread::current().stack();
1133          dataLog(
1134              "Sanitizing stack for VM = ", RawPointer(&vm), " with top call frame at ", RawPointer(vm.topCallFrame),
1135              ", current stack pointer at ", RawPointer(&dummy), ", in ",
1136              pointerDump(vm.topCallFrame->codeBlock()), ", last code origin = ",
1137              vm.topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm.lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
1138      }
1139  }
1140  
1141  #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
1142  char* VM::acquireRegExpPatternContexBuffer()
1143  {
1144      m_regExpPatternContextLock.lock();
1145      ASSERT(m_regExpPatternContextLock.isLocked());
1146      if (!m_regExpPatternContexBuffer)
1147          m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
1148      return m_regExpPatternContexBuffer.get();
1149  }
1150  
1151  void VM::releaseRegExpPatternContexBuffer()
1152  {
1153      ASSERT(m_regExpPatternContextLock.isLocked());
1154  
1155      m_regExpPatternContextLock.unlock();
1156  }
1157  #endif
1158  
1159  #if ENABLE(REGEXP_TRACING)
1160  void VM::addRegExpToTrace(RegExp* regExp)
1161  {
1162      gcProtect(regExp);
1163      m_rtTraceList->add(regExp);
1164  }
1165  
1166  void VM::dumpRegExpTrace()
1167  {
1168      // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
1169      RTTraceList::iterator iter = ++m_rtTraceList->begin();
1170      
1171      if (iter != m_rtTraceList->end()) {
1172          dataLogF("\nRegExp Tracing\n");
1173          dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
1174          dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
1175          dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
1176      
1177          unsigned reCount = 0;
1178      
1179          for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
1180              (*iter)->printTraceData();
1181              gcUnprotect(*iter);
1182          }
1183  
1184          dataLogF("%d Regular Expressions\n", reCount);
1185      }
1186      
1187      m_rtTraceList->clear();
1188  }
1189  #else
1190  void VM::dumpRegExpTrace()
1191  {
1192  }
1193  #endif
1194  
1195  WatchpointSet* VM::ensureWatchpointSetForImpureProperty(UniquedStringImpl* propertyName)
1196  {
1197      auto result = m_impurePropertyWatchpointSets.add(propertyName, nullptr);
1198      if (result.isNewEntry)
1199          result.iterator->value = WatchpointSet::create(IsWatched);
1200      return result.iterator->value.get();
1201  }
1202  
1203  void VM::addImpureProperty(UniquedStringImpl* propertyName)
1204  {
1205      if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1206          watchpointSet->fireAll(*this, "Impure property added");
1207  }
1208  
1209  template<typename Func>
1210  static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1211  {
1212      bool needsToRecompile = false;
1213      if (!counter) {
1214          doEnableWork();
1215          needsToRecompile = true;
1216      }
1217      counter++;
1218  
1219      return needsToRecompile;
1220  }
1221  
1222  template<typename Func>
1223  static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1224  {
1225      RELEASE_ASSERT(counter > 0);
1226      bool needsToRecompile = false;
1227      counter--;
1228      if (!counter) {
1229          doDisableWork();
1230          needsToRecompile = true;
1231      }
1232  
1233      return needsToRecompile;
1234  }
1235  
1236  bool VM::enableTypeProfiler()
1237  {
1238      auto enableTypeProfiler = [this] () {
1239          this->m_typeProfiler = makeUnique<TypeProfiler>();
1240          this->m_typeProfilerLog = makeUnique<TypeProfilerLog>(*this);
1241      };
1242  
1243      return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1244  }
1245  
1246  bool VM::disableTypeProfiler()
1247  {
1248      auto disableTypeProfiler = [this] () {
1249          this->m_typeProfiler.reset(nullptr);
1250          this->m_typeProfilerLog.reset(nullptr);
1251      };
1252  
1253      return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1254  }
1255  
1256  bool VM::enableControlFlowProfiler()
1257  {
1258      auto enableControlFlowProfiler = [this] () {
1259          this->m_controlFlowProfiler = makeUnique<ControlFlowProfiler>();
1260      };
1261  
1262      return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1263  }
1264  
1265  bool VM::disableControlFlowProfiler()
1266  {
1267      auto disableControlFlowProfiler = [this] () {
1268          this->m_controlFlowProfiler.reset(nullptr);
1269      };
1270  
1271      return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1272  }
1273  
1274  void VM::dumpTypeProfilerData()
1275  {
1276      if (!typeProfiler())
1277          return;
1278  
1279      typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1280      typeProfiler()->dumpTypeProfilerData(*this);
1281  }
1282  
1283  void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1284  {
1285      m_microtaskQueue.append(makeUnique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1286  }
1287  
1288  void VM::callPromiseRejectionCallback(Strong<JSPromise>& promise)
1289  {
1290      JSObject* callback = promise->globalObject()->unhandledRejectionCallback();
1291      if (!callback)
1292          return;
1293  
1294      auto scope = DECLARE_CATCH_SCOPE(*this);
1295  
1296      auto callData = getCallData(*this, callback);
1297      ASSERT(callData.type != CallData::Type::None);
1298  
1299      MarkedArgumentBuffer args;
1300      args.append(promise.get());
1301      args.append(promise->result(*this));
1302      call(promise->globalObject(), callback, callData, jsNull(), args);
1303      scope.clearException();
1304  }
1305  
1306  void VM::didExhaustMicrotaskQueue()
1307  {
1308      auto unhandledRejections = WTFMove(m_aboutToBeNotifiedRejectedPromises);
1309      for (auto& promise : unhandledRejections) {
1310          if (promise->isHandled(*this))
1311              continue;
1312  
1313          callPromiseRejectionCallback(promise);
1314      }
1315  }
1316  
1317  void VM::promiseRejected(JSPromise* promise)
1318  {
1319      m_aboutToBeNotifiedRejectedPromises.constructAndAppend(*this, promise);
1320  }
1321  
1322  void VM::drainMicrotasks()
1323  {
1324      do {
1325          while (!m_microtaskQueue.isEmpty()) {
1326              m_microtaskQueue.takeFirst()->run();
1327              if (m_onEachMicrotaskTick)
1328                  m_onEachMicrotaskTick(*this);
1329          }
1330          didExhaustMicrotaskQueue();
1331      } while (!m_microtaskQueue.isEmpty());
1332      finalizeSynchronousJSExecution();
1333  }
1334  
1335  void QueuedTask::run()
1336  {
1337      m_microtask->run(m_globalObject.get());
1338  }
1339  
1340  void sanitizeStackForVM(VM& vm)
1341  {
1342      logSanitizeStack(vm);
1343      if (vm.topCallFrame) {
1344          auto& stackBounds = Thread::current().stack();
1345          ASSERT(vm.currentThreadIsHoldingAPILock());
1346          ASSERT_UNUSED(stackBounds, stackBounds.contains(vm.lastStackTop()));
1347      }
1348  #if ENABLE(C_LOOP)
1349      vm.interpreter->cloopStack().sanitizeStack();
1350  #else
1351      sanitizeStackForVMImpl(&vm);
1352  #endif
1353  }
1354  
1355  size_t VM::committedStackByteCount()
1356  {
1357  #if !ENABLE(C_LOOP)
1358      // When using the C stack, we don't know how many stack pages are actually
1359      // committed. So, we use the current stack usage as an estimate.
1360      uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1361      uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1362      return high - current;
1363  #else
1364      return CLoopStack::committedByteCount();
1365  #endif
1366  }
1367  
1368  #if ENABLE(C_LOOP)
1369  bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1370  {
1371      return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1372  }
1373  
1374  bool VM::isSafeToRecurseSoftCLoop() const
1375  {
1376      return interpreter->cloopStack().isSafeToRecurse();
1377  }
1378  
1379  void* VM::currentCLoopStackPointer() const
1380  {
1381      return interpreter->cloopStack().currentStackPointer();
1382  }
1383  #endif // ENABLE(C_LOOP)
1384  
1385  #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1386  void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1387  {
1388      if (!Options::validateExceptionChecks())
1389          return;
1390  
1391      if (UNLIKELY(m_needExceptionCheck)) {
1392          auto throwDepth = m_simulatedThrowPointRecursionDepth;
1393          auto& throwLocation = m_simulatedThrowPointLocation;
1394  
1395          dataLog(
1396              "ERROR: Unchecked JS exception:\n"
1397              "    This scope can throw a JS exception: ", throwLocation, "\n"
1398              "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1399              "    But the exception was unchecked as of this scope: ", location, "\n"
1400              "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1401              "\n");
1402  
1403          StringPrintStream out;
1404          std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1405  
1406          if (Options::dumpSimulatedThrows()) {
1407              out.println("The simulated exception was thrown at:");
1408              m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1409              out.println();
1410          }
1411          out.println("Unchecked exception detected at:");
1412          currentTrace->dump(out, "    ");
1413          out.println();
1414  
1415          dataLog(out.toCString());
1416          RELEASE_ASSERT(!m_needExceptionCheck);
1417      }
1418  }
1419  #endif
1420  
1421  ScratchBuffer* VM::scratchBufferForSize(size_t size)
1422  {
1423      if (!size)
1424          return nullptr;
1425  
1426      auto locker = holdLock(m_scratchBufferLock);
1427  
1428      if (size > m_sizeOfLastScratchBuffer) {
1429          // Protect against a N^2 memory usage pathology by ensuring
1430          // that at worst, we get a geometric series, meaning that the
1431          // total memory usage is somewhere around
1432          // max(scratch buffer size) * 4.
1433          m_sizeOfLastScratchBuffer = size * 2;
1434  
1435          ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1436          RELEASE_ASSERT(newBuffer);
1437          m_scratchBuffers.append(newBuffer);
1438      }
1439  
1440      ScratchBuffer* result = m_scratchBuffers.last();
1441      return result;
1442  }
1443  
1444  void VM::clearScratchBuffers()
1445  {
1446      auto lock = holdLock(m_scratchBufferLock);
1447      for (auto* scratchBuffer : m_scratchBuffers)
1448          scratchBuffer->setActiveLength(0);
1449  }
1450  
1451  void VM::ensureShadowChicken()
1452  {
1453      if (m_shadowChicken)
1454          return;
1455      m_shadowChicken = makeUnique<ShadowChicken>();
1456  }
1457  
1458  #define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1459      IsoSubspace* VM::name##Slow() \
1460      { \
1461          ASSERT(!m_##name); \
1462          auto space = makeUnique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1463          WTF::storeStoreFence(); \
1464          m_##name = WTFMove(space); \
1465          return m_##name.get(); \
1466      }
1467  
1468  
1469  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(apiGlobalObjectSpace, apiGlobalObjectHeapCellType.get(), JSAPIGlobalObject)
1470  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(apiValueWrapperSpace, cellHeapCellType.get(), JSAPIValueWrapper)
1471  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(arrayBufferSpace, cellHeapCellType.get(), JSArrayBuffer)
1472  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(arrayIteratorSpace, cellHeapCellType.get(), JSArrayIterator)
1473  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(asyncGeneratorSpace, cellHeapCellType.get(), JSAsyncGenerator)
1474  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(bigIntObjectSpace, cellHeapCellType.get(), BigIntObject)
1475  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(booleanObjectSpace, cellHeapCellType.get(), BooleanObject)
1476  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction) // Hash:0xd7916d41
1477  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackConstructorSpace, callbackConstructorHeapCellType.get(), JSCallbackConstructor)
1478  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackGlobalObjectSpace, callbackGlobalObjectHeapCellType.get(), JSCallbackObject<JSGlobalObject>)
1479  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, cellHeapCellType.get(), JSCallbackFunction) // Hash:0xe7648ebc
1480  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackObjectSpace, callbackObjectHeapCellType.get(), JSCallbackObject<JSNonFinalObject>)
1481  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction) // Hash:0x18091000
1482  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(dataViewSpace, cellHeapCellType.get(), JSDataView)
1483  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(debuggerScopeSpace, cellHeapCellType.get(), DebuggerScope)
1484  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, errorInstanceHeapCellType.get(), ErrorInstance) // Hash:0x3f40d4a
1485  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(float32ArraySpace, cellHeapCellType.get(), JSFloat32Array)
1486  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(float64ArraySpace, cellHeapCellType.get(), JSFloat64Array)
1487  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(functionRareDataSpace, destructibleCellHeapCellType.get(), FunctionRareData)
1488  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(generatorSpace, cellHeapCellType.get(), JSGenerator)
1489  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(globalObjectSpace, globalObjectHeapCellType.get(), JSGlobalObject)
1490  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(injectedScriptHostSpace, injectedScriptHostSpaceHeapCellType.get(), Inspector::JSInjectedScriptHost)
1491  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(int8ArraySpace, cellHeapCellType.get(), JSInt8Array)
1492  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(int16ArraySpace, cellHeapCellType.get(), JSInt16Array)
1493  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(int32ArraySpace, cellHeapCellType.get(), JSInt32Array)
1494  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(javaScriptCallFrameSpace, javaScriptCallFrameHeapCellType.get(), Inspector::JSJavaScriptCallFrame)
1495  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(jsModuleRecordSpace, jsModuleRecordHeapCellType.get(), JSModuleRecord)
1496  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(mapBucketSpace, cellHeapCellType.get(), JSMap::BucketType)
1497  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(mapIteratorSpace, cellHeapCellType.get(), JSMapIterator)
1498  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(mapSpace, cellHeapCellType.get(), JSMap)
1499  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(moduleNamespaceObjectSpace, moduleNamespaceObjectHeapCellType.get(), JSModuleNamespaceObject)
1500  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, nativeStdFunctionHeapCellType.get(), JSNativeStdFunction) // Hash:0x70ed61e4
1501  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyObjectSpace, cellHeapCellType.get(), ProxyObject)
1502  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, cellHeapCellType.get(), ProxyRevoke) // Hash:0xb506a939
1503  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(scopedArgumentsTableSpace, destructibleCellHeapCellType.get(), ScopedArgumentsTable)
1504  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(scriptFetchParametersSpace, destructibleCellHeapCellType.get(), JSScriptFetchParameters)
1505  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(scriptFetcherSpace, destructibleCellHeapCellType.get(), JSScriptFetcher)
1506  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(setBucketSpace, cellHeapCellType.get(), JSSet::BucketType)
1507  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(setIteratorSpace, cellHeapCellType.get(), JSSetIterator)
1508  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(setSpace, cellHeapCellType.get(), JSSet)
1509  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(strictEvalActivationSpace, cellHeapCellType.get(), StrictEvalActivation)
1510  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(stringIteratorSpace, cellHeapCellType.get(), JSStringIterator)
1511  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(sourceCodeSpace, destructibleCellHeapCellType.get(), JSSourceCode)
1512  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(symbolSpace, destructibleCellHeapCellType.get(), Symbol)
1513  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(symbolObjectSpace, cellHeapCellType.get(), SymbolObject)
1514  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(templateObjectDescriptorSpace, destructibleCellHeapCellType.get(), JSTemplateObjectDescriptor)
1515  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(uint8ArraySpace, cellHeapCellType.get(), JSUint8Array)
1516  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(uint8ClampedArraySpace, cellHeapCellType.get(), JSUint8ClampedArray)
1517  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(uint16ArraySpace, cellHeapCellType.get(), JSUint16Array)
1518  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(uint32ArraySpace, cellHeapCellType.get(), JSUint32Array)
1519  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(unlinkedEvalCodeBlockSpace, destructibleCellHeapCellType.get(), UnlinkedEvalCodeBlock)
1520  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(unlinkedFunctionCodeBlockSpace, destructibleCellHeapCellType.get(), UnlinkedFunctionCodeBlock)
1521  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(unlinkedModuleProgramCodeBlockSpace, destructibleCellHeapCellType.get(), UnlinkedModuleProgramCodeBlock)
1522  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(unlinkedProgramCodeBlockSpace, destructibleCellHeapCellType.get(), UnlinkedProgramCodeBlock)
1523  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, weakMapHeapCellType.get(), JSWeakMap) // Hash:0x662b12a3
1524  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, weakSetHeapCellType.get(), JSWeakSet) // Hash:0x4c781b30
1525  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakObjectRefSpace, cellHeapCellType.get(), JSWeakObjectRef) // Hash:0x8ec68f1f
1526  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(withScopeSpace, cellHeapCellType.get(), JSWithScope)
1527  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(finalizationRegistrySpace, finalizationRegistryCellType.get(), JSFinalizationRegistry)
1528  #if JSC_OBJC_API_ENABLED
1529  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(apiWrapperObjectSpace, apiWrapperObjectHeapCellType.get(), JSCallbackObject<JSAPIWrapperObject>)
1530  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, objCCallbackFunctionHeapCellType.get(), ObjCCallbackFunction) // Hash:0x10f610b8
1531  #endif
1532  #ifdef JSC_GLIB_API_ENABLED
1533  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(apiWrapperObjectSpace, apiWrapperObjectHeapCellType.get(), JSCallbackObject<JSAPIWrapperObject>)
1534  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(jscCallbackFunctionSpace, jscCallbackFunctionHeapCellType.get(), JSCCallbackFunction)
1535  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackAPIWrapperGlobalObjectSpace, callbackAPIWrapperGlobalObjectHeapCellType.get(), JSCallbackObject<JSAPIWrapperGlobalObject>)
1536  #endif
1537  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlCollatorSpace, intlCollatorHeapCellType.get(), IntlCollator)
1538  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlDateTimeFormatSpace, intlDateTimeFormatHeapCellType.get(), IntlDateTimeFormat)
1539  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlDisplayNamesSpace, intlDisplayNamesHeapCellType.get(), IntlDisplayNames)
1540  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlListFormatSpace, intlListFormatHeapCellType.get(), IntlListFormat)
1541  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlLocaleSpace, intlLocaleHeapCellType.get(), IntlLocale)
1542  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlNumberFormatSpace, intlNumberFormatHeapCellType.get(), IntlNumberFormat)
1543  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlPluralRulesSpace, intlPluralRulesHeapCellType.get(), IntlPluralRules)
1544  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlRelativeTimeFormatSpace, intlRelativeTimeFormatHeapCellType.get(), IntlRelativeTimeFormat)
1545  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlSegmentIteratorSpace, intlSegmentIteratorHeapCellType.get(), IntlSegmentIterator)
1546  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlSegmenterSpace, intlSegmenterHeapCellType.get(), IntlSegmenter)
1547  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(intlSegmentsSpace, intlSegmentsHeapCellType.get(), IntlSegments)
1548  #if ENABLE(WEBASSEMBLY)
1549  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(jsToWasmICCalleeSpace, cellHeapCellType.get(), JSToWasmICCallee)
1550  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock) // Hash:0x9ad995cd
1551  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, webAssemblyFunctionHeapCellType.get(), WebAssemblyFunction) // Hash:0x8b7c32db
1552  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyGlobalSpace, webAssemblyGlobalHeapCellType.get(), JSWebAssemblyGlobal)
1553  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyInstanceSpace, webAssemblyInstanceHeapCellType.get(), JSWebAssemblyInstance)
1554  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyMemorySpace, webAssemblyMemoryHeapCellType.get(), JSWebAssemblyMemory)
1555  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyModuleSpace, webAssemblyModuleHeapCellType.get(), JSWebAssemblyModule)
1556  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyModuleRecordSpace, webAssemblyModuleRecordHeapCellType.get(), WebAssemblyModuleRecord)
1557  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyTableSpace, webAssemblyTableHeapCellType.get(), JSWebAssemblyTable)
1558  DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction) // Hash:0xd4a5ff01
1559  #endif
1560  
1561  #undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1562  
1563  #define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1564      IsoSubspace* VM::name##Slow() \
1565      { \
1566          ASSERT(!m_##name); \
1567          auto space = makeUnique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1568          WTF::storeStoreFence(); \
1569          m_##name = WTFMove(space); \
1570          return &m_##name->space; \
1571      }
1572  
1573  DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable) // Hash:0x958e3e9d
1574  DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable) // Hash:0x6506fa3c
1575  
1576  #undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1577  
1578  JSCell* VM::sentinelSetBucketSlow()
1579  {
1580      ASSERT(!m_sentinelSetBucket);
1581      auto* sentinel = JSSet::BucketType::createSentinel(*this);
1582      m_sentinelSetBucket.set(*this, sentinel);
1583      return sentinel;
1584  }
1585  
1586  JSCell* VM::sentinelMapBucketSlow()
1587  {
1588      ASSERT(!m_sentinelMapBucket);
1589      auto* sentinel = JSMap::BucketType::createSentinel(*this);
1590      m_sentinelMapBucket.set(*this, sentinel);
1591      return sentinel;
1592  }
1593  
1594  JSPropertyNameEnumerator* VM::emptyPropertyNameEnumeratorSlow()
1595  {
1596      ASSERT(!m_emptyPropertyNameEnumerator);
1597      PropertyNameArray propertyNames(*this, PropertyNameMode::Strings, PrivateSymbolMode::Exclude);
1598      auto* enumerator = JSPropertyNameEnumerator::create(*this, nullptr, 0, 0, WTFMove(propertyNames));
1599      m_emptyPropertyNameEnumerator.set(*this, enumerator);
1600      return enumerator;
1601  }
1602  
1603  JSGlobalObject* VM::deprecatedVMEntryGlobalObject(JSGlobalObject* globalObject) const
1604  {
1605      if (entryScope)
1606          return entryScope->globalObject();
1607      return globalObject;
1608  }
1609  
1610  void VM::setCrashOnVMCreation(bool shouldCrash)
1611  {
1612      vmCreationShouldCrash = shouldCrash;
1613  }
1614  
1615  void VM::addLoopHintExecutionCounter(const Instruction* instruction)
1616  {
1617      auto locker = holdLock(m_loopHintExecutionCountLock);
1618      auto addResult = m_loopHintExecutionCounts.add(instruction, std::pair<unsigned, std::unique_ptr<uint64_t>>(0, nullptr));
1619      if (addResult.isNewEntry) {
1620          auto ptr = WTF::makeUniqueWithoutFastMallocCheck<uint64_t>();
1621          *ptr = 0;
1622          addResult.iterator->value.second = WTFMove(ptr);
1623      }
1624      ++addResult.iterator->value.first;
1625  }
1626  
1627  uint64_t* VM::getLoopHintExecutionCounter(const Instruction* instruction)
1628  {
1629      auto locker = holdLock(m_loopHintExecutionCountLock);
1630      auto iter = m_loopHintExecutionCounts.find(instruction);
1631      return iter->value.second.get();
1632  }
1633  
1634  void VM::removeLoopHintExecutionCounter(const Instruction* instruction)
1635  {
1636      auto locker = holdLock(m_loopHintExecutionCountLock);
1637      auto iter = m_loopHintExecutionCounts.find(instruction);
1638      RELEASE_ASSERT(!!iter->value.first);
1639      --iter->value.first;
1640      if (!iter->value.first)
1641          m_loopHintExecutionCounts.remove(iter);
1642  }
1643  
1644  } // namespace JSC