/ runtime / SamplingProfiler.cpp
SamplingProfiler.cpp
   1  /*
   2   * Copyright (C) 2016-2020 Apple Inc. All rights reserved.
   3   *
   4   * Redistribution and use in source and binary forms, with or without
   5   * modification, are permitted provided that the following conditions
   6   * are met:
   7   * 1. Redistributions of source code must retain the above copyright
   8   *    notice, this list of conditions and the following disclaimer.
   9   * 2. Redistributions in binary form must reproduce the above copyright
  10   *    notice, this list of conditions and the following disclaimer in the
  11   *    documentation and/or other materials provided with the distribution.
  12   *
  13   * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14   * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15   * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16   * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
  17   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18   * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19   * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20   * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21   * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22   * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23   * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
  24   */
  25  
  26  #include "config.h"
  27  #include "SamplingProfiler.h"
  28  
  29  #if ENABLE(SAMPLING_PROFILER)
  30  
  31  #include "CodeBlock.h"
  32  #include "CodeBlockSet.h"
  33  #include "HeapIterationScope.h"
  34  #include "HeapUtil.h"
  35  #include "InlineCallFrame.h"
  36  #include "JSCInlines.h"
  37  #include "LLIntPCRanges.h"
  38  #include "MachineContext.h"
  39  #include "MarkedBlockInlines.h"
  40  #include "MarkedBlockSet.h"
  41  #include "NativeExecutable.h"
  42  #include "VM.h"
  43  #include "WasmCallee.h"
  44  #include "WasmCalleeRegistry.h"
  45  #include "WasmCapabilities.h"
  46  #include <wtf/FilePrintStream.h>
  47  #include <wtf/HashSet.h>
  48  #include <wtf/RefPtr.h>
  49  #include <wtf/StackTrace.h>
  50  #include <wtf/text/StringBuilder.h>
  51  
  52  namespace JSC {
  53  
  54  static double sNumTotalStackTraces = 0;
  55  static double sNumTotalWalks = 0;
  56  static double sNumFailedWalks = 0;
  57  static const uint32_t sNumWalkReportingFrequency = 50;
  58  static const double sWalkErrorPercentage = .05;
  59  static constexpr bool sReportStatsOnlyWhenTheyreAboveThreshold = false;
  60  static constexpr bool sReportStats = false;
  61  
  62  using FrameType = SamplingProfiler::FrameType;
  63  using UnprocessedStackFrame = SamplingProfiler::UnprocessedStackFrame;
  64  
  65  ALWAYS_INLINE static void reportStats()
  66  {
  67      if (sReportStats && sNumTotalWalks && static_cast<uint64_t>(sNumTotalWalks) % sNumWalkReportingFrequency == 0) {
  68          if (!sReportStatsOnlyWhenTheyreAboveThreshold || (sNumFailedWalks / sNumTotalWalks > sWalkErrorPercentage)) {
  69              dataLogF("Num total walks: %llu. Failed walks percent: %lf\n",
  70                  static_cast<unsigned long long>(sNumTotalWalks), sNumFailedWalks / sNumTotalWalks);
  71          }
  72      }
  73  }
  74  
  75  class FrameWalker {
  76  public:
  77      FrameWalker(VM& vm, CallFrame* callFrame, const AbstractLocker& codeBlockSetLocker, const AbstractLocker& machineThreadsLocker, const Optional<LockHolder>& wasmCalleeLocker)
  78          : m_vm(vm)
  79          , m_callFrame(callFrame)
  80          , m_entryFrame(vm.topEntryFrame)
  81          , m_codeBlockSetLocker(codeBlockSetLocker)
  82          , m_machineThreadsLocker(machineThreadsLocker)
  83          , m_wasmCalleeLocker(wasmCalleeLocker)
  84      {
  85      }
  86  
  87      SUPPRESS_ASAN
  88      size_t walk(Vector<UnprocessedStackFrame>& stackTrace, bool& didRunOutOfSpace)
  89      {
  90          if (sReportStats)
  91              sNumTotalWalks++;
  92          resetAtMachineFrame();
  93          size_t maxStackTraceSize = stackTrace.size();
  94          while (!isAtTop() && !m_bailingOut && m_depth < maxStackTraceSize) {
  95              recordJITFrame(stackTrace);
  96              advanceToParentFrame();
  97              resetAtMachineFrame();
  98          }
  99          didRunOutOfSpace = m_depth >= maxStackTraceSize && !isAtTop();
 100          reportStats();
 101          return m_depth;
 102      }
 103  
 104      bool wasValidWalk() const
 105      {
 106          return !m_bailingOut;
 107      }
 108  
 109  protected:
 110  
 111      SUPPRESS_ASAN
 112      void recordJITFrame(Vector<UnprocessedStackFrame>& stackTrace)
 113      {
 114          CallSiteIndex callSiteIndex;
 115          CalleeBits unsafeCallee = m_callFrame->unsafeCallee();
 116          CodeBlock* codeBlock = m_callFrame->unsafeCodeBlock();
 117          if (unsafeCallee.isWasm())
 118              codeBlock = nullptr;
 119          if (codeBlock) {
 120              ASSERT(isValidCodeBlock(codeBlock));
 121              callSiteIndex = m_callFrame->unsafeCallSiteIndex();
 122          }
 123          stackTrace[m_depth] = UnprocessedStackFrame(codeBlock, unsafeCallee, callSiteIndex);
 124  #if ENABLE(WEBASSEMBLY)
 125          if (Wasm::isSupported() && unsafeCallee.isWasm()) {
 126              auto* wasmCallee = unsafeCallee.asWasmCallee();
 127              if (Wasm::CalleeRegistry::singleton().isValidCallee(*m_wasmCalleeLocker, wasmCallee)) {
 128                  // At this point, Wasm::Callee would be dying (ref count is 0), but its fields are still live.
 129                  // And we can safely copy Wasm::IndexOrName even when any lock is held by suspended threads.
 130                  stackTrace[m_depth].wasmIndexOrName = wasmCallee->indexOrName();
 131                  stackTrace[m_depth].wasmCompilationMode = wasmCallee->compilationMode();
 132              }
 133          }
 134  #endif
 135          m_depth++;
 136      }
 137  
 138      SUPPRESS_ASAN
 139      void advanceToParentFrame()
 140      {
 141          m_callFrame = m_callFrame->unsafeCallerFrame(m_entryFrame);
 142      }
 143  
 144      bool isAtTop() const
 145      {
 146          return !m_callFrame;
 147      }
 148  
 149      SUPPRESS_ASAN
 150      void resetAtMachineFrame()
 151      {
 152          if (isAtTop())
 153              return;
 154  
 155          if (!isValidFramePointer(m_callFrame)) {
 156              // Guard against pausing the process at weird program points.
 157              m_bailingOut = true;
 158              if (sReportStats)
 159                  sNumFailedWalks++;
 160              return;
 161          }
 162  
 163          CodeBlock* codeBlock = m_callFrame->unsafeCodeBlock();
 164          if (!codeBlock || m_callFrame->unsafeCallee().isWasm())
 165              return;
 166  
 167          if (!isValidCodeBlock(codeBlock)) {
 168              m_bailingOut = true;
 169              if (sReportStats)
 170                  sNumFailedWalks++;
 171              return;
 172          }
 173      }
 174  
 175      bool isValidFramePointer(void* callFrame)
 176      {
 177          uint8_t* fpCast = bitwise_cast<uint8_t*>(callFrame);
 178          for (auto& thread : m_vm.heap.machineThreads().threads(m_machineThreadsLocker)) {
 179              uint8_t* stackBase = static_cast<uint8_t*>(thread->stack().origin());
 180              uint8_t* stackLimit = static_cast<uint8_t*>(thread->stack().end());
 181              RELEASE_ASSERT(stackBase);
 182              RELEASE_ASSERT(stackLimit);
 183              RELEASE_ASSERT(stackLimit <= stackBase);
 184              if (fpCast < stackBase && fpCast >= stackLimit)
 185                  return true;
 186          }
 187          return false;
 188      }
 189  
 190      bool isValidCodeBlock(CodeBlock* codeBlock)
 191      {
 192          if (!codeBlock)
 193              return false;
 194          bool result = m_vm.heap.codeBlockSet().contains(m_codeBlockSetLocker, codeBlock);
 195          return result;
 196      }
 197  
 198      VM& m_vm;
 199      CallFrame* m_callFrame;
 200      EntryFrame* m_entryFrame;
 201      const AbstractLocker& m_codeBlockSetLocker;
 202      const AbstractLocker& m_machineThreadsLocker;
 203      const Optional<LockHolder>& m_wasmCalleeLocker;
 204      bool m_bailingOut { false };
 205      size_t m_depth { 0 };
 206  };
 207  
 208  class CFrameWalker : public FrameWalker {
 209  public:
 210      typedef FrameWalker Base;
 211  
 212      CFrameWalker(VM& vm, void* machineFrame, CallFrame* callFrame, const AbstractLocker& codeBlockSetLocker, const AbstractLocker& machineThreadsLocker, const Optional<LockHolder>& wasmCalleeLocker)
 213          : Base(vm, callFrame, codeBlockSetLocker, machineThreadsLocker, wasmCalleeLocker)
 214          , m_machineFrame(machineFrame)
 215      {
 216      }
 217  
 218      size_t walk(Vector<UnprocessedStackFrame>& stackTrace, bool& didRunOutOfSpace)
 219      {
 220          if (sReportStats)
 221              sNumTotalWalks++;
 222          resetAtMachineFrame();
 223          size_t maxStackTraceSize = stackTrace.size();
 224          // The way the C walker decides if a frame it is about to trace is C or JS is by
 225          // ensuring m_callFrame points to some frame above the machineFrame.
 226          if (!isAtTop() && !m_bailingOut && m_machineFrame == m_callFrame) {
 227              recordJITFrame(stackTrace);
 228              Base::advanceToParentFrame();
 229              resetAtMachineFrame();
 230          }
 231  
 232          while (!isAtTop() && !m_bailingOut && m_depth < maxStackTraceSize) {
 233              if (m_machineFrame >= m_callFrame) {
 234                  // If we get to this state we probably have an invalid trace.
 235                  m_bailingOut = true;
 236                  break;
 237              }
 238  
 239              if (isCFrame()) {
 240                  RELEASE_ASSERT(!LLInt::isLLIntPC(frame()->callerFrame));
 241                  stackTrace[m_depth] = UnprocessedStackFrame(frame()->returnPC);
 242                  m_depth++;
 243              } else
 244                  recordJITFrame(stackTrace);
 245              advanceToParentFrame();
 246              resetAtMachineFrame();
 247          }
 248          didRunOutOfSpace = m_depth >= maxStackTraceSize && !isAtTop();
 249          reportStats();
 250          return m_depth;
 251      }
 252  
 253  private:
 254  
 255      bool isCFrame()
 256      {
 257          return frame()->callerFrame != m_callFrame;
 258      }
 259  
 260      void advanceToParentFrame()
 261      {
 262          if (!isCFrame())
 263              Base::advanceToParentFrame();
 264          m_machineFrame = frame()->callerFrame;
 265      }
 266  
 267      void resetAtMachineFrame()
 268      {
 269          if (!isValidFramePointer(m_machineFrame)) {
 270              // Guard against pausing the process at weird program points.
 271              m_bailingOut = true;
 272              if (sReportStats)
 273                  sNumFailedWalks++;
 274              return;
 275          }
 276          Base::resetAtMachineFrame();
 277      }
 278  
 279      CallerFrameAndPC* frame()
 280      {
 281          return reinterpret_cast<CallerFrameAndPC*>(m_machineFrame);
 282      }
 283  
 284      void* m_machineFrame;
 285  };
 286  
 287  SamplingProfiler::SamplingProfiler(VM& vm, Ref<Stopwatch>&& stopwatch)
 288      : m_isPaused(false)
 289      , m_isShutDown(false)
 290      , m_vm(vm)
 291      , m_weakRandom()
 292      , m_stopwatch(WTFMove(stopwatch))
 293      , m_timingInterval(Seconds::fromMicroseconds(Options::sampleInterval()))
 294  {
 295      if (sReportStats) {
 296          sNumTotalWalks = 0;
 297          sNumFailedWalks = 0;
 298      }
 299  
 300      m_currentFrames.grow(256);
 301      vm.heap.objectSpace().enablePreciseAllocationTracking();
 302  }
 303  
 304  SamplingProfiler::~SamplingProfiler()
 305  {
 306  }
 307  
 308  void SamplingProfiler::createThreadIfNecessary(const AbstractLocker&)
 309  {
 310      ASSERT(m_lock.isLocked());
 311  
 312      if (m_thread)
 313          return;
 314  
 315      RefPtr<SamplingProfiler> profiler = this;
 316      m_thread = Thread::create("jsc.sampling-profiler.thread", [profiler] {
 317          profiler->timerLoop();
 318      });
 319  }
 320  
 321  void SamplingProfiler::timerLoop()
 322  {
 323      while (true) {
 324          Seconds stackTraceProcessingTime = 0_s;
 325          {
 326              LockHolder locker(m_lock);
 327              if (UNLIKELY(m_isShutDown))
 328                  return;
 329  
 330              if (!m_isPaused && m_jscExecutionThread)
 331                  takeSample(locker, stackTraceProcessingTime);
 332  
 333              m_lastTime = m_stopwatch->elapsedTime();
 334          }
 335  
 336          // Read section 6.2 of this paper for more elaboration of why we add a random
 337          // fluctuation here. The main idea is to prevent our timer from being in sync
 338          // with some system process such as a scheduled context switch.
 339          // http://plv.colorado.edu/papers/mytkowicz-pldi10.pdf
 340          double randomSignedNumber = (m_weakRandom.get() * 2.0) - 1.0; // A random number between [-1, 1).
 341          Seconds randomFluctuation = m_timingInterval * 0.2 * randomSignedNumber;
 342          WTF::sleep(m_timingInterval - std::min(m_timingInterval, stackTraceProcessingTime) + randomFluctuation);
 343      }
 344  }
 345  
 346  void SamplingProfiler::takeSample(const AbstractLocker&, Seconds& stackTraceProcessingTime)
 347  {
 348      ASSERT(m_lock.isLocked());
 349      if (m_vm.entryScope) {
 350          Seconds nowTime = m_stopwatch->elapsedTime();
 351  
 352          auto machineThreadsLocker = holdLock(m_vm.heap.machineThreads().getLock());
 353          auto codeBlockSetLocker = holdLock(m_vm.heap.codeBlockSet().getLock());
 354          auto executableAllocatorLocker = holdLock(ExecutableAllocator::singleton().getLock());
 355          Optional<LockHolder> wasmCalleesLocker;
 356  #if ENABLE(WEBASSEMBLY)
 357          if (Wasm::isSupported())
 358              wasmCalleesLocker = holdLock(Wasm::CalleeRegistry::singleton().getLock());
 359  #endif
 360  
 361          auto didSuspend = m_jscExecutionThread->suspend();
 362          if (didSuspend) {
 363              // While the JSC thread is suspended, we can't do things like malloc because the JSC thread
 364              // may be holding the malloc lock.
 365              void* machineFrame;
 366              CallFrame* callFrame;
 367              void* machinePC;
 368              bool topFrameIsLLInt = false;
 369              void* llintPC;
 370              {
 371                  PlatformRegisters registers;
 372                  m_jscExecutionThread->getRegisters(registers);
 373                  machineFrame = MachineContext::framePointer(registers);
 374                  callFrame = static_cast<CallFrame*>(machineFrame);
 375                  auto instructionPointer = MachineContext::instructionPointer(registers);
 376                  if (instructionPointer)
 377                      machinePC = instructionPointer->untaggedExecutableAddress();
 378                  else
 379                      machinePC = nullptr;
 380                  llintPC = removeCodePtrTag(MachineContext::llintInstructionPointer(registers));
 381                  assertIsNotTagged(machinePC);
 382              }
 383              // FIXME: Lets have a way of detecting when we're parsing code.
 384              // https://bugs.webkit.org/show_bug.cgi?id=152761
 385              if (ExecutableAllocator::singleton().isValidExecutableMemory(executableAllocatorLocker, machinePC)) {
 386                  if (m_vm.isExecutingInRegExpJIT) {
 387                      // FIXME: We're executing a regexp. Lets gather more intersting data.
 388                      // https://bugs.webkit.org/show_bug.cgi?id=152729
 389                      callFrame = m_vm.topCallFrame; // We need to do this or else we'd fail our backtrace validation b/c this isn't a JS frame.
 390                  }
 391              } else if (LLInt::isLLIntPC(machinePC)) {
 392                  topFrameIsLLInt = true;
 393                  // We're okay to take a normal stack trace when the PC
 394                  // is in LLInt code.
 395              } else {
 396                  // We resort to topCallFrame to see if we can get anything
 397                  // useful. We usually get here when we're executing C code.
 398                  callFrame = m_vm.topCallFrame;
 399              }
 400  
 401              size_t walkSize;
 402              bool wasValidWalk;
 403              bool didRunOutOfVectorSpace;
 404              if (Options::sampleCCode()) {
 405                  CFrameWalker walker(m_vm, machineFrame, callFrame, codeBlockSetLocker, machineThreadsLocker, wasmCalleesLocker);
 406                  walkSize = walker.walk(m_currentFrames, didRunOutOfVectorSpace);
 407                  wasValidWalk = walker.wasValidWalk();
 408              } else {
 409                  FrameWalker walker(m_vm, callFrame, codeBlockSetLocker, machineThreadsLocker, wasmCalleesLocker);
 410                  walkSize = walker.walk(m_currentFrames, didRunOutOfVectorSpace);
 411                  wasValidWalk = walker.wasValidWalk();
 412              }
 413  
 414              m_jscExecutionThread->resume();
 415  
 416              auto startTime = MonotonicTime::now();
 417              // We can now use data structures that malloc, and do other interesting things, again.
 418  
 419              // FIXME: It'd be interesting to take data about the program's state when
 420              // we fail to take a stack trace: https://bugs.webkit.org/show_bug.cgi?id=152758
 421              if (wasValidWalk && walkSize) {
 422                  if (sReportStats)
 423                      sNumTotalStackTraces++;
 424                  Vector<UnprocessedStackFrame> stackTrace;
 425                  stackTrace.reserveInitialCapacity(walkSize);
 426                  for (size_t i = 0; i < walkSize; i++) {
 427                      UnprocessedStackFrame frame = m_currentFrames[i];
 428                      stackTrace.uncheckedAppend(frame);
 429                  }
 430  
 431                  m_unprocessedStackTraces.append(UnprocessedStackTrace { nowTime, machinePC, topFrameIsLLInt, llintPC, WTFMove(stackTrace) });
 432  
 433                  if (didRunOutOfVectorSpace)
 434                      m_currentFrames.grow(m_currentFrames.size() * 1.25);
 435              }
 436  
 437              auto endTime = MonotonicTime::now();
 438              stackTraceProcessingTime = endTime - startTime;
 439          }
 440      }
 441  }
 442  
 443  static ALWAYS_INLINE BytecodeIndex tryGetBytecodeIndex(unsigned llintPC, CodeBlock* codeBlock)
 444  {
 445  #if ENABLE(DFG_JIT)
 446      RELEASE_ASSERT(!codeBlock->hasCodeOrigins());
 447  #endif
 448  
 449      unsigned bytecodeOffset = llintPC;
 450      if (bytecodeOffset < codeBlock->instructionsSize())
 451          return BytecodeIndex(bytecodeOffset);
 452      return BytecodeIndex();
 453  }
 454  
 455  void SamplingProfiler::processUnverifiedStackTraces(const AbstractLocker&)
 456  {
 457      // This function needs to be called from the JSC execution thread.
 458      RELEASE_ASSERT(m_lock.isLocked());
 459  
 460      TinyBloomFilter filter = m_vm.heap.objectSpace().blocks().filter();
 461  
 462      for (UnprocessedStackTrace& unprocessedStackTrace : m_unprocessedStackTraces) {
 463          m_stackTraces.append(StackTrace());
 464          StackTrace& stackTrace = m_stackTraces.last();
 465          stackTrace.timestamp = unprocessedStackTrace.timestamp;
 466  
 467          auto populateCodeLocation = [] (CodeBlock* codeBlock, BytecodeIndex bytecodeIndex, StackFrame::CodeLocation& location) {
 468              if (bytecodeIndex.offset() < codeBlock->instructionsSize()) {
 469                  int divot;
 470                  int startOffset;
 471                  int endOffset;
 472                  codeBlock->expressionRangeForBytecodeIndex(bytecodeIndex, divot, startOffset, endOffset,
 473                      location.lineNumber, location.columnNumber);
 474                  location.bytecodeIndex = bytecodeIndex;
 475              }
 476              if (Options::collectSamplingProfilerDataForJSCShell()) {
 477                  location.codeBlockHash = codeBlock->hash();
 478                  location.jitType = codeBlock->jitType();
 479              }
 480          };
 481  
 482          auto appendCodeBlock = [&] (CodeBlock* codeBlock, BytecodeIndex bytecodeIndex) {
 483              stackTrace.frames.append(StackFrame(codeBlock->ownerExecutable()));
 484              m_liveCellPointers.add(codeBlock->ownerExecutable());
 485              populateCodeLocation(codeBlock, bytecodeIndex, stackTrace.frames.last().semanticLocation);
 486          };
 487  
 488          auto appendEmptyFrame = [&] {
 489              stackTrace.frames.append(StackFrame());
 490          };
 491  
 492          auto storeCalleeIntoLastFrame = [&] (UnprocessedStackFrame& unprocessedStackFrame) {
 493              // Set the callee if it's a valid GC object.
 494              CalleeBits calleeBits = unprocessedStackFrame.unverifiedCallee;
 495              StackFrame& stackFrame = stackTrace.frames.last();
 496              bool alreadyHasExecutable = !!stackFrame.executable;
 497  #if ENABLE(WEBASSEMBLY)
 498              if (calleeBits.isWasm()) {
 499                  stackFrame.frameType = FrameType::Wasm;
 500                  stackFrame.wasmIndexOrName = unprocessedStackFrame.wasmIndexOrName;
 501                  stackFrame.wasmCompilationMode = unprocessedStackFrame.wasmCompilationMode;
 502                  return;
 503              }
 504  #endif
 505  
 506              JSValue callee = calleeBits.asCell();
 507              if (!HeapUtil::isValueGCObject(m_vm.heap, filter, callee)) {
 508                  if (!alreadyHasExecutable)
 509                      stackFrame.frameType = FrameType::Unknown;
 510                  return;
 511              }
 512  
 513              JSCell* calleeCell = callee.asCell();
 514              auto setFallbackFrameType = [&] {
 515                  ASSERT(!alreadyHasExecutable);
 516                  FrameType result = FrameType::Unknown;
 517                  auto callData = getCallData(m_vm, calleeCell);
 518                  if (callData.type == CallData::Type::Native)
 519                      result = FrameType::Host;
 520  
 521                  stackFrame.frameType = result;
 522              };
 523  
 524              auto addCallee = [&] (JSObject* callee) {
 525                  stackFrame.callee = callee;
 526                  m_liveCellPointers.add(callee);
 527              };
 528  
 529              if (calleeCell->type() != JSFunctionType) {
 530                  if (JSObject* object = jsDynamicCast<JSObject*>(calleeCell->vm(), calleeCell))
 531                      addCallee(object);
 532  
 533                  if (!alreadyHasExecutable)
 534                      setFallbackFrameType();
 535  
 536                  return;
 537              }
 538  
 539              addCallee(jsCast<JSFunction*>(calleeCell));
 540  
 541              if (alreadyHasExecutable)
 542                  return;
 543  
 544              ExecutableBase* executable = jsCast<JSFunction*>(calleeCell)->executable();
 545              if (!executable) {
 546                  setFallbackFrameType();
 547                  return;
 548              }
 549  
 550              RELEASE_ASSERT(HeapUtil::isPointerGCObjectJSCell(m_vm.heap, filter, executable));
 551              stackFrame.frameType = FrameType::Executable;
 552              stackFrame.executable = executable;
 553              m_liveCellPointers.add(executable);
 554          };
 555  
 556          auto appendCodeOrigin = [&] (CodeBlock* machineCodeBlock, CodeOrigin origin) {
 557              size_t startIndex = stackTrace.frames.size(); // We want to change stack traces that we're about to append.
 558  
 559              CodeOrigin machineOrigin;
 560              origin.walkUpInlineStack([&] (const CodeOrigin& codeOrigin) {
 561                  machineOrigin = codeOrigin;
 562                  auto* inlineCallFrame = codeOrigin.inlineCallFrame();
 563                  appendCodeBlock(inlineCallFrame ? inlineCallFrame->baselineCodeBlock.get() : machineCodeBlock, codeOrigin.bytecodeIndex());
 564              });
 565  
 566              if (Options::collectSamplingProfilerDataForJSCShell()) {
 567                  RELEASE_ASSERT(machineOrigin.isSet());
 568                  RELEASE_ASSERT(!machineOrigin.inlineCallFrame());
 569  
 570                  StackFrame::CodeLocation machineLocation = stackTrace.frames.last().semanticLocation;
 571  
 572                  // We want to tell each inlined frame about the machine frame
 573                  // they were inlined into. Currently, we only use this for dumping
 574                  // output on the command line, but we could extend it to the web
 575                  // inspector in the future if we find a need for it there.
 576                  RELEASE_ASSERT(stackTrace.frames.size());
 577                  m_liveCellPointers.add(machineCodeBlock);
 578                  for (size_t i = startIndex; i < stackTrace.frames.size() - 1; i++)
 579                      stackTrace.frames[i].machineLocation = std::make_pair(machineLocation, machineCodeBlock);
 580              }
 581          };
 582  
 583          // Prepend the top-most inlined frame if needed and gather
 584          // location information about where the top frame is executing.
 585          size_t startIndex = 0;
 586          if (unprocessedStackTrace.frames.size() && !!unprocessedStackTrace.frames[0].verifiedCodeBlock) {
 587              CodeBlock* topCodeBlock = unprocessedStackTrace.frames[0].verifiedCodeBlock;
 588              if (unprocessedStackTrace.topFrameIsLLInt) {
 589                  // We reuse LLInt CodeBlocks for the baseline JIT, so we need to check for both jit types.
 590                  // This might also be false for various reasons (known and unknown), even though
 591                  // it's super unlikely. One reason that this can be false is when we throw from a DFG frame,
 592                  // and we end up having to unwind past an EntryFrame, we will end up executing
 593                  // inside the LLInt's llint_handle_ucaught_exception. So we just protect against this
 594                  // by ignoring it.
 595                  BytecodeIndex bytecodeIndex = BytecodeIndex(0);
 596                  if (topCodeBlock->jitType() == JITType::InterpreterThunk || topCodeBlock->jitType() == JITType::BaselineJIT) {
 597                      unsigned bits = static_cast<unsigned>(bitwise_cast<uintptr_t>(unprocessedStackTrace.llintPC));
 598                      bytecodeIndex = tryGetBytecodeIndex(bits, topCodeBlock);
 599  
 600                      UNUSED_PARAM(bytecodeIndex); // FIXME: do something with this info for the web inspector: https://bugs.webkit.org/show_bug.cgi?id=153455
 601  
 602                      appendCodeBlock(topCodeBlock, bytecodeIndex);
 603                      storeCalleeIntoLastFrame(unprocessedStackTrace.frames[0]);
 604                      startIndex = 1;
 605                  }
 606              } else {
 607  #if ENABLE(JIT)
 608                  if (Optional<CodeOrigin> codeOrigin = topCodeBlock->findPC(unprocessedStackTrace.topPC)) {
 609                      appendCodeOrigin(topCodeBlock, *codeOrigin);
 610                      storeCalleeIntoLastFrame(unprocessedStackTrace.frames[0]);
 611                      startIndex = 1;
 612                  }
 613  #endif
 614                  UNUSED_PARAM(appendCodeOrigin);
 615              }
 616          }
 617  
 618          for (size_t i = startIndex; i < unprocessedStackTrace.frames.size(); i++) {
 619              UnprocessedStackFrame& unprocessedStackFrame = unprocessedStackTrace.frames[i];
 620              if (CodeBlock* codeBlock = unprocessedStackFrame.verifiedCodeBlock) {
 621                  CallSiteIndex callSiteIndex = unprocessedStackFrame.callSiteIndex;
 622  
 623                  auto appendCodeBlockNoInlining = [&] {
 624                      appendCodeBlock(codeBlock, tryGetBytecodeIndex(callSiteIndex.bits(), codeBlock));
 625                  };
 626  
 627  #if ENABLE(DFG_JIT)
 628                  if (codeBlock->hasCodeOrigins()) {
 629                      if (codeBlock->canGetCodeOrigin(callSiteIndex))
 630                          appendCodeOrigin(codeBlock, codeBlock->codeOrigin(callSiteIndex));
 631                      else
 632                          appendCodeBlock(codeBlock, BytecodeIndex());
 633                  } else
 634                      appendCodeBlockNoInlining();
 635  #else
 636                  appendCodeBlockNoInlining();
 637  #endif
 638              } else if (unprocessedStackFrame.cCodePC) {
 639                  appendEmptyFrame();
 640                  stackTrace.frames.last().cCodePC = unprocessedStackFrame.cCodePC;
 641                  stackTrace.frames.last().frameType = FrameType::C;
 642              } else
 643                  appendEmptyFrame();
 644  
 645              // Note that this is okay to do if we walked the inline stack because
 646              // the machine frame will be at the top of the processed stack trace.
 647              if (!unprocessedStackFrame.cCodePC)
 648                  storeCalleeIntoLastFrame(unprocessedStackFrame);
 649          }
 650      }
 651  
 652      m_unprocessedStackTraces.clear();
 653  }
 654  
 655  void SamplingProfiler::visit(SlotVisitor& slotVisitor)
 656  {
 657      RELEASE_ASSERT(m_lock.isLocked());
 658      for (JSCell* cell : m_liveCellPointers)
 659          slotVisitor.appendUnbarriered(cell);
 660  }
 661  
 662  void SamplingProfiler::shutdown()
 663  {
 664      LockHolder locker(m_lock);
 665      m_isShutDown = true;
 666  }
 667  
 668  void SamplingProfiler::start()
 669  {
 670      LockHolder locker(m_lock);
 671      start(locker);
 672  }
 673  
 674  void SamplingProfiler::start(const AbstractLocker& locker)
 675  {
 676      ASSERT(m_lock.isLocked());
 677      m_isPaused = false;
 678      createThreadIfNecessary(locker);
 679  }
 680  
 681  void SamplingProfiler::pause(const AbstractLocker&)
 682  {
 683      ASSERT(m_lock.isLocked());
 684      m_isPaused = true;
 685      reportStats();
 686  }
 687  
 688  void SamplingProfiler::noticeCurrentThreadAsJSCExecutionThread(const AbstractLocker&)
 689  {
 690      ASSERT(m_lock.isLocked());
 691      m_jscExecutionThread = &Thread::current();
 692  }
 693  
 694  void SamplingProfiler::noticeCurrentThreadAsJSCExecutionThread()
 695  {
 696      LockHolder locker(m_lock);
 697      noticeCurrentThreadAsJSCExecutionThread(locker);
 698  }
 699  
 700  void SamplingProfiler::noticeJSLockAcquisition()
 701  {
 702      LockHolder locker(m_lock);
 703      noticeCurrentThreadAsJSCExecutionThread(locker);
 704  }
 705  
 706  void SamplingProfiler::noticeVMEntry()
 707  {
 708      LockHolder locker(m_lock);
 709      ASSERT(m_vm.entryScope);
 710      noticeCurrentThreadAsJSCExecutionThread(locker);
 711      m_lastTime = m_stopwatch->elapsedTime();
 712      createThreadIfNecessary(locker);
 713  }
 714  
 715  void SamplingProfiler::clearData(const AbstractLocker&)
 716  {
 717      ASSERT(m_lock.isLocked());
 718      m_stackTraces.clear();
 719      m_liveCellPointers.clear();
 720      m_unprocessedStackTraces.clear();
 721  }
 722  
 723  String SamplingProfiler::StackFrame::nameFromCallee(VM& vm)
 724  {
 725      if (!callee)
 726          return String();
 727  
 728      auto scope = DECLARE_CATCH_SCOPE(vm);
 729      JSGlobalObject* globalObject = callee->globalObject(vm);
 730      auto getPropertyIfPureOperation = [&] (const Identifier& ident) -> String {
 731          PropertySlot slot(callee, PropertySlot::InternalMethodType::VMInquiry, &vm);
 732          PropertyName propertyName(ident);
 733          bool hasProperty = callee->getPropertySlot(globalObject, propertyName, slot);
 734          scope.assertNoException();
 735          if (hasProperty) {
 736              if (slot.isValue()) {
 737                  JSValue nameValue = slot.getValue(globalObject, propertyName);
 738                  if (isJSString(nameValue))
 739                      return asString(nameValue)->tryGetValue();
 740              }
 741          }
 742          return String();
 743      };
 744  
 745      String name = getPropertyIfPureOperation(vm.propertyNames->displayName);
 746      if (!name.isEmpty())
 747          return name;
 748  
 749      return getPropertyIfPureOperation(vm.propertyNames->name);
 750  }
 751  
 752  String SamplingProfiler::StackFrame::displayName(VM& vm)
 753  {
 754      {
 755          String name = nameFromCallee(vm);
 756          if (!name.isEmpty())
 757              return name;
 758      }
 759  
 760      switch (frameType) {
 761      case FrameType::Unknown:
 762      case FrameType::C:
 763  #if HAVE(DLADDR)
 764          if (frameType == FrameType::C) {
 765              auto demangled = WTF::StackTrace::demangle(const_cast<void*>(cCodePC));
 766              if (demangled)
 767                  return String(demangled->demangledName() ? demangled->demangledName() : demangled->mangledName());
 768              WTF::dataLog("couldn't get a name");
 769          }
 770  #endif
 771          return "(unknown)"_s;
 772  
 773      case FrameType::Host:
 774          return "(host)"_s;
 775  
 776      case FrameType::Wasm:
 777  #if ENABLE(WEBASSEMBLY)
 778          if (wasmIndexOrName)
 779              return makeString(wasmIndexOrName.value());
 780  #endif
 781          return "(wasm)"_s;
 782  
 783      case FrameType::Executable:
 784          if (executable->isHostFunction())
 785              return static_cast<NativeExecutable*>(executable)->name();
 786  
 787          if (executable->isFunctionExecutable())
 788              return static_cast<FunctionExecutable*>(executable)->ecmaName().string();
 789          if (executable->isProgramExecutable() || executable->isEvalExecutable())
 790              return "(program)"_s;
 791          if (executable->isModuleProgramExecutable())
 792              return "(module)"_s;
 793  
 794          RELEASE_ASSERT_NOT_REACHED();
 795          return String();
 796      }
 797      RELEASE_ASSERT_NOT_REACHED();
 798      return String();
 799  }
 800  
 801  String SamplingProfiler::StackFrame::displayNameForJSONTests(VM& vm)
 802  {
 803      {
 804          String name = nameFromCallee(vm);
 805          if (!name.isEmpty())
 806              return name;
 807      }
 808  
 809      switch (frameType) {
 810      case FrameType::Unknown:
 811      case FrameType::C:
 812          return "(unknown)"_s;
 813  
 814      case FrameType::Host:
 815          return "(host)"_s;
 816  
 817      case FrameType::Wasm: {
 818  #if ENABLE(WEBASSEMBLY)
 819          if (wasmIndexOrName)
 820              return makeString(wasmIndexOrName.value());
 821  #endif
 822          return "(wasm)"_s;
 823      }
 824  
 825      case FrameType::Executable:
 826          if (executable->isHostFunction())
 827              return static_cast<NativeExecutable*>(executable)->name();
 828  
 829          if (executable->isFunctionExecutable()) {
 830              String result = static_cast<FunctionExecutable*>(executable)->ecmaName().string();
 831              if (result.isEmpty())
 832                  return "(anonymous function)"_s;
 833              return result;
 834          }
 835          if (executable->isEvalExecutable())
 836              return "(eval)"_s;
 837          if (executable->isProgramExecutable())
 838              return "(program)"_s;
 839          if (executable->isModuleProgramExecutable())
 840              return "(module)"_s;
 841  
 842          RELEASE_ASSERT_NOT_REACHED();
 843          return String();
 844      }
 845      RELEASE_ASSERT_NOT_REACHED();
 846      return String();
 847  }
 848  
 849  int SamplingProfiler::StackFrame::functionStartLine()
 850  {
 851      switch (frameType) {
 852      case FrameType::Unknown:
 853      case FrameType::Host:
 854      case FrameType::C:
 855      case FrameType::Wasm:
 856          return -1;
 857  
 858      case FrameType::Executable:
 859          if (executable->isHostFunction())
 860              return -1;
 861          return static_cast<ScriptExecutable*>(executable)->firstLine();
 862      }
 863      RELEASE_ASSERT_NOT_REACHED();
 864      return -1;
 865  }
 866  
 867  unsigned SamplingProfiler::StackFrame::functionStartColumn()
 868  {
 869      switch (frameType) {
 870      case FrameType::Unknown:
 871      case FrameType::Host:
 872      case FrameType::C:
 873      case FrameType::Wasm:
 874          return std::numeric_limits<unsigned>::max();
 875  
 876      case FrameType::Executable:
 877          if (executable->isHostFunction())
 878              return std::numeric_limits<unsigned>::max();
 879  
 880          return static_cast<ScriptExecutable*>(executable)->startColumn();
 881      }
 882      RELEASE_ASSERT_NOT_REACHED();
 883      return std::numeric_limits<unsigned>::max();
 884  }
 885  
 886  intptr_t SamplingProfiler::StackFrame::sourceID()
 887  {
 888      switch (frameType) {
 889      case FrameType::Unknown:
 890      case FrameType::Host:
 891      case FrameType::C:
 892      case FrameType::Wasm:
 893          return -1;
 894  
 895      case FrameType::Executable:
 896          if (executable->isHostFunction())
 897              return -1;
 898  
 899          return static_cast<ScriptExecutable*>(executable)->sourceID();
 900      }
 901      RELEASE_ASSERT_NOT_REACHED();
 902      return -1;
 903  }
 904  
 905  String SamplingProfiler::StackFrame::url()
 906  {
 907      switch (frameType) {
 908      case FrameType::Unknown:
 909      case FrameType::Host:
 910      case FrameType::C:
 911      case FrameType::Wasm:
 912          return emptyString();
 913      case FrameType::Executable:
 914          if (executable->isHostFunction())
 915              return emptyString();
 916  
 917          String url = static_cast<ScriptExecutable*>(executable)->sourceURL();
 918          if (url.isEmpty())
 919              return static_cast<ScriptExecutable*>(executable)->source().provider()->sourceURLDirective(); // Fall back to sourceURL directive.
 920          return url;
 921      }
 922      RELEASE_ASSERT_NOT_REACHED();
 923      return String();
 924  }
 925  
 926  Vector<SamplingProfiler::StackTrace> SamplingProfiler::releaseStackTraces(const AbstractLocker& locker)
 927  {
 928      ASSERT(m_lock.isLocked());
 929      {
 930          HeapIterationScope heapIterationScope(m_vm.heap);
 931          processUnverifiedStackTraces(locker);
 932      }
 933  
 934      Vector<StackTrace> result(WTFMove(m_stackTraces));
 935      clearData(locker);
 936      return result;
 937  }
 938  
 939  String SamplingProfiler::stackTracesAsJSON()
 940  {
 941      DeferGC deferGC(m_vm.heap);
 942      auto locker = holdLock(m_lock);
 943  
 944      {
 945          HeapIterationScope heapIterationScope(m_vm.heap);
 946          processUnverifiedStackTraces(locker);
 947      }
 948  
 949      StringBuilder json;
 950      json.append('[');
 951  
 952      bool loopedOnce = false;
 953      auto comma = [&] {
 954          if (loopedOnce)
 955              json.append(',');
 956      };
 957      for (StackTrace& stackTrace : m_stackTraces) {
 958          comma();
 959          json.append('[');
 960          loopedOnce = false;
 961          for (StackFrame& stackFrame : stackTrace.frames) {
 962              comma();
 963              json.appendQuotedJSONString(stackFrame.displayNameForJSONTests(m_vm));
 964              loopedOnce = true;
 965          }
 966          json.append(']');
 967          loopedOnce = true;
 968      }
 969  
 970      json.append(']');
 971  
 972      clearData(locker);
 973  
 974      return json.toString();
 975  }
 976  
 977  void SamplingProfiler::registerForReportAtExit()
 978  {
 979      static Lock registrationLock;
 980      static HashSet<RefPtr<SamplingProfiler>>* profilesToReport;
 981  
 982      LockHolder holder(registrationLock);
 983  
 984      if (!profilesToReport) {
 985          profilesToReport = new HashSet<RefPtr<SamplingProfiler>>();
 986          atexit([]() {
 987              for (const auto& profile : *profilesToReport)
 988                  profile->reportDataToOptionFile();
 989          });
 990      }
 991  
 992      profilesToReport->add(adoptRef(this));
 993      m_needsReportAtExit = true;
 994  }
 995  
 996  void SamplingProfiler::reportDataToOptionFile()
 997  {
 998      if (m_needsReportAtExit) {
 999          m_needsReportAtExit = false;
1000          JSLockHolder holder(m_vm);
1001          const char* path = Options::samplingProfilerPath();
1002          StringPrintStream pathOut;
1003          pathOut.print(path, "/");
1004          pathOut.print("JSCSampilingProfile-", reinterpret_cast<uintptr_t>(this), ".txt");
1005          auto out = FilePrintStream::open(pathOut.toCString().data(), "w");
1006          reportTopFunctions(*out);
1007          reportTopBytecodes(*out);
1008      }
1009  }
1010  
1011  void SamplingProfiler::reportTopFunctions()
1012  {
1013      reportTopFunctions(WTF::dataFile());
1014  }
1015  
1016  void SamplingProfiler::reportTopFunctions(PrintStream& out)
1017  {
1018      auto locker = holdLock(m_lock);
1019      DeferGCForAWhile deferGC(m_vm.heap);
1020  
1021      {
1022          HeapIterationScope heapIterationScope(m_vm.heap);
1023          processUnverifiedStackTraces(locker);
1024      }
1025  
1026      size_t totalSamples = 0;
1027      HashMap<String, size_t> functionCounts;
1028      for (StackTrace& stackTrace : m_stackTraces) {
1029          if (!stackTrace.frames.size())
1030              continue;
1031  
1032          StackFrame& frame = stackTrace.frames.first();
1033          String hash = ""_s;
1034          if (frame.semanticLocation.hasCodeBlockHash()) {
1035              StringPrintStream stream;
1036              frame.semanticLocation.codeBlockHash.dump(stream);
1037              hash = stream.toString();
1038          } else
1039              hash = "<nil>"_s;
1040          String frameDescription = makeString(frame.displayName(m_vm), '#', hash, ':', frame.sourceID());
1041          functionCounts.add(frameDescription, 0).iterator->value++;
1042          totalSamples++;
1043      }
1044  
1045      auto takeMax = [&] () -> std::pair<String, size_t> {
1046          String maxFrameDescription;
1047          size_t maxFrameCount = 0;
1048          for (const auto& entry : functionCounts) {
1049              if (entry.value > maxFrameCount) {
1050                  maxFrameCount = entry.value;
1051                  maxFrameDescription = entry.key;
1052              }
1053          }
1054          if (!maxFrameDescription.isEmpty())
1055              functionCounts.remove(maxFrameDescription);
1056          return std::make_pair(maxFrameDescription, maxFrameCount);
1057      };
1058  
1059      if (Options::samplingProfilerTopFunctionsCount()) {
1060          out.println("\n\nSampling rate: ", m_timingInterval.microseconds(), " microseconds. Total samples: ", totalSamples);
1061          out.println("Top functions as <numSamples  'functionName#hash:sourceID'>");
1062          for (size_t i = 0; i < Options::samplingProfilerTopFunctionsCount(); i++) {
1063              auto pair = takeMax();
1064              if (pair.first.isEmpty())
1065                  break;
1066              out.printf("%6zu ", pair.second);
1067              out.println("   '", pair.first, "'");
1068          }
1069      }
1070  }
1071  
1072  void SamplingProfiler::reportTopBytecodes()
1073  {
1074      reportTopBytecodes(WTF::dataFile());
1075  }
1076  
1077  void SamplingProfiler::reportTopBytecodes(PrintStream& out)
1078  {
1079      auto locker = holdLock(m_lock);
1080      DeferGCForAWhile deferGC(m_vm.heap);
1081  
1082      {
1083          HeapIterationScope heapIterationScope(m_vm.heap);
1084          processUnverifiedStackTraces(locker);
1085      }
1086  
1087      size_t totalSamples = 0;
1088      HashMap<String, size_t> bytecodeCounts;
1089      for (StackTrace& stackTrace : m_stackTraces) {
1090          if (!stackTrace.frames.size())
1091              continue;
1092  
1093          auto descriptionForLocation = [&] (StackFrame::CodeLocation location, Optional<Wasm::CompilationMode> wasmCompilationMode) -> String {
1094              String bytecodeIndex;
1095              String codeBlockHash;
1096              String jitType;
1097              if (location.hasBytecodeIndex())
1098                  bytecodeIndex = toString(location.bytecodeIndex);
1099              else
1100                  bytecodeIndex = "<nil>";
1101  
1102              if (location.hasCodeBlockHash()) {
1103                  StringPrintStream stream;
1104                  location.codeBlockHash.dump(stream);
1105                  codeBlockHash = stream.toString();
1106              } else
1107                  codeBlockHash = "<nil>";
1108  
1109              if (wasmCompilationMode)
1110                  jitType = Wasm::makeString(wasmCompilationMode.value());
1111              else
1112                  jitType = JITCode::typeName(location.jitType);
1113  
1114              return makeString("#", codeBlockHash, ":", jitType, ":", bytecodeIndex);
1115          };
1116  
1117          StackFrame& frame = stackTrace.frames.first();
1118          String frameDescription = makeString(frame.displayName(m_vm), descriptionForLocation(frame.semanticLocation, frame.wasmCompilationMode));
1119          if (Optional<std::pair<StackFrame::CodeLocation, CodeBlock*>> machineLocation = frame.machineLocation) {
1120              frameDescription = makeString(frameDescription, " <-- ",
1121                  machineLocation->second->inferredName().data(), descriptionForLocation(machineLocation->first, WTF::nullopt));
1122          }
1123          bytecodeCounts.add(frameDescription, 0).iterator->value++;
1124          totalSamples++;
1125      }
1126  
1127      auto takeMax = [&] () -> std::pair<String, size_t> {
1128          String maxFrameDescription;
1129          size_t maxFrameCount = 0;
1130          for (const auto& entry : bytecodeCounts) {
1131              if (entry.value > maxFrameCount) {
1132                  maxFrameCount = entry.value;
1133                  maxFrameDescription = entry.key;
1134              }
1135          }
1136          if (!maxFrameDescription.isEmpty())
1137              bytecodeCounts.remove(maxFrameDescription);
1138          return std::make_pair(maxFrameDescription, maxFrameCount);
1139      };
1140  
1141      if (Options::samplingProfilerTopBytecodesCount()) {
1142          out.println("\n\nSampling rate: ", m_timingInterval.microseconds(), " microseconds. Total samples: ", totalSamples);
1143          out.println("Hottest bytecodes as <numSamples   'functionName#hash:JITType:bytecodeIndex'>");
1144          for (size_t i = 0; i < Options::samplingProfilerTopBytecodesCount(); i++) {
1145              auto pair = takeMax();
1146              if (pair.first.isEmpty())
1147                  break;
1148              out.printf("%6zu ", pair.second);
1149              out.println("   '", pair.first, "'");
1150          }
1151      }
1152  }
1153  
1154  Thread* SamplingProfiler::thread() const
1155  {
1156      return m_thread.get();
1157  }
1158  
1159  } // namespace JSC
1160  
1161  namespace WTF {
1162  
1163  using namespace JSC;
1164  
1165  void printInternal(PrintStream& out, SamplingProfiler::FrameType frameType)
1166  {
1167      switch (frameType) {
1168      case SamplingProfiler::FrameType::Executable:
1169          out.print("Executable");
1170          break;
1171      case SamplingProfiler::FrameType::Wasm:
1172          out.print("Wasm");
1173          break;
1174      case SamplingProfiler::FrameType::Host:
1175          out.print("Host");
1176          break;
1177      case SamplingProfiler::FrameType::C:
1178      case SamplingProfiler::FrameType::Unknown:
1179          out.print("Unknown");
1180          break;
1181      }
1182  }
1183  
1184  } // namespace WTF
1185  
1186  #endif // ENABLE(SAMPLING_PROFILER)