/ bytecode / InlineCallFrame.h
InlineCallFrame.h
  1  /*
  2   * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
  3   *
  4   * Redistribution and use in source and binary forms, with or without
  5   * modification, are permitted provided that the following conditions
  6   * are met:
  7   * 1. Redistributions of source code must retain the above copyright
  8   *    notice, this list of conditions and the following disclaimer.
  9   * 2. Redistributions in binary form must reproduce the above copyright
 10   *    notice, this list of conditions and the following disclaimer in the
 11   *    documentation and/or other materials provided with the distribution.
 12   *
 13   * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
 14   * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 15   * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 16   * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
 17   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 18   * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 19   * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 20   * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
 21   * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 22   * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 23   * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 24   */
 25  
 26  #pragma once
 27  
 28  #include "CodeBlock.h"
 29  #include "CodeBlockHash.h"
 30  #include "CodeOrigin.h"
 31  #include "ValueRecovery.h"
 32  #include "WriteBarrier.h"
 33  #include <wtf/PrintStream.h>
 34  #include <wtf/StdLibExtras.h>
 35  #include <wtf/Vector.h>
 36  
 37  namespace JSC {
 38  
 39  struct InlineCallFrame;
 40  class CallFrame;
 41  class JSFunction;
 42  
 43  struct InlineCallFrame {
 44      enum Kind {
 45          Call,
 46          Construct,
 47          TailCall,
 48          CallVarargs,
 49          ConstructVarargs,
 50          TailCallVarargs,
 51          
 52          // For these, the stackOffset incorporates the argument count plus the true return PC
 53          // slot.
 54          GetterCall,
 55          SetterCall
 56      };
 57  
 58      static CallMode callModeFor(Kind kind)
 59      {
 60          switch (kind) {
 61          case Call:
 62          case CallVarargs:
 63          case GetterCall:
 64          case SetterCall:
 65              return CallMode::Regular;
 66          case TailCall:
 67          case TailCallVarargs:
 68              return CallMode::Tail;
 69          case Construct:
 70          case ConstructVarargs:
 71              return CallMode::Construct;
 72          }
 73          RELEASE_ASSERT_NOT_REACHED();
 74      }
 75  
 76      static Kind kindFor(CallMode callMode)
 77      {
 78          switch (callMode) {
 79          case CallMode::Regular:
 80              return Call;
 81          case CallMode::Construct:
 82              return Construct;
 83          case CallMode::Tail:
 84              return TailCall;
 85          }
 86          RELEASE_ASSERT_NOT_REACHED();
 87      }
 88      
 89      static Kind varargsKindFor(CallMode callMode)
 90      {
 91          switch (callMode) {
 92          case CallMode::Regular:
 93              return CallVarargs;
 94          case CallMode::Construct:
 95              return ConstructVarargs;
 96          case CallMode::Tail:
 97              return TailCallVarargs;
 98          }
 99          RELEASE_ASSERT_NOT_REACHED();
100      }
101      
102      static CodeSpecializationKind specializationKindFor(Kind kind)
103      {
104          switch (kind) {
105          case Call:
106          case CallVarargs:
107          case TailCall:
108          case TailCallVarargs:
109          case GetterCall:
110          case SetterCall:
111              return CodeForCall;
112          case Construct:
113          case ConstructVarargs:
114              return CodeForConstruct;
115          }
116          RELEASE_ASSERT_NOT_REACHED();
117      }
118      
119      static bool isVarargs(Kind kind)
120      {
121          switch (kind) {
122          case CallVarargs:
123          case TailCallVarargs:
124          case ConstructVarargs:
125              return true;
126          default:
127              return false;
128          }
129      }
130  
131      static bool isTail(Kind kind)
132      {
133          switch (kind) {
134          case TailCall:
135          case TailCallVarargs:
136              return true;
137          default:
138              return false;
139          }
140      }
141      bool isTail() const
142      {
143          return isTail(static_cast<Kind>(kind));
144      }
145  
146      static CodeOrigin* computeCallerSkippingTailCalls(InlineCallFrame* inlineCallFrame, Kind* callerCallKind = nullptr)
147      {
148          CodeOrigin* codeOrigin;
149          bool tailCallee;
150          int callKind;
151          do {
152              tailCallee = inlineCallFrame->isTail();
153              callKind = inlineCallFrame->kind;
154              codeOrigin = &inlineCallFrame->directCaller;
155              inlineCallFrame = codeOrigin->inlineCallFrame();
156          } while (inlineCallFrame && tailCallee);
157  
158          if (tailCallee)
159              return nullptr;
160  
161          if (callerCallKind)
162              *callerCallKind = static_cast<Kind>(callKind);
163  
164          return codeOrigin;
165      }
166  
167      CodeOrigin* getCallerSkippingTailCalls(Kind* callerCallKind = nullptr)
168      {
169          return computeCallerSkippingTailCalls(this, callerCallKind);
170      }
171  
172      InlineCallFrame* getCallerInlineFrameSkippingTailCalls()
173      {
174          CodeOrigin* caller = getCallerSkippingTailCalls();
175          return caller ? caller->inlineCallFrame() : nullptr;
176      }
177      
178      Vector<ValueRecovery> argumentsWithFixup; // Includes 'this' and arity fixups.
179      WriteBarrier<CodeBlock> baselineCodeBlock;
180      CodeOrigin directCaller;
181  
182      unsigned argumentCountIncludingThis : 22; // Do not include fixups.
183      unsigned tmpOffset : 10;
184      signed stackOffset : 28;
185      unsigned kind : 3; // real type is Kind
186      bool isClosureCall : 1; // If false then we know that callee/scope are constants and the DFG won't treat them as variables, i.e. they have to be recovered manually.
187      VirtualRegister argumentCountRegister; // Only set when we inline a varargs call.
188  
189      ValueRecovery calleeRecovery;
190      
191      // There is really no good notion of a "default" set of values for
192      // InlineCallFrame's fields. This constructor is here just to reduce confusion if
193      // we forgot to initialize explicitly.
194      InlineCallFrame()
195          : argumentCountIncludingThis(0)
196          , tmpOffset(0)
197          , stackOffset(0)
198          , kind(Call)
199          , isClosureCall(false)
200      {
201      }
202      
203      bool isVarargs() const
204      {
205          return isVarargs(static_cast<Kind>(kind));
206      }
207  
208      CodeSpecializationKind specializationKind() const { return specializationKindFor(static_cast<Kind>(kind)); }
209  
210      JSFunction* calleeConstant() const;
211      
212      // Get the callee given a machine call frame to which this InlineCallFrame belongs.
213      JSFunction* calleeForCallFrame(CallFrame*) const;
214      
215      CString inferredName() const;
216      CodeBlockHash hash() const;
217      CString hashAsStringIfPossible() const;
218      
219      void setStackOffset(signed offset)
220      {
221          stackOffset = offset;
222          RELEASE_ASSERT(static_cast<signed>(stackOffset) == offset);
223      }
224  
225      void setTmpOffset(unsigned offset)
226      {
227          tmpOffset = offset;
228          RELEASE_ASSERT(static_cast<unsigned>(tmpOffset) == offset);
229      }
230  
231      ptrdiff_t callerFrameOffset() const { return stackOffset * sizeof(Register) + CallFrame::callerFrameOffset(); }
232      ptrdiff_t returnPCOffset() const { return stackOffset * sizeof(Register) + CallFrame::returnPCOffset(); }
233  
234      bool isInStrictContext() const { return baselineCodeBlock->ownerExecutable()->isInStrictContext(); }
235  
236      void dumpBriefFunctionInformation(PrintStream&) const;
237      void dump(PrintStream&) const;
238      void dumpInContext(PrintStream&, DumpContext*) const;
239  
240      MAKE_PRINT_METHOD(InlineCallFrame, dumpBriefFunctionInformation, briefFunctionInformation);
241  
242  };
243  
244  inline CodeBlock* baselineCodeBlockForInlineCallFrame(InlineCallFrame* inlineCallFrame)
245  {
246      RELEASE_ASSERT(inlineCallFrame);
247      return inlineCallFrame->baselineCodeBlock.get();
248  }
249  
250  inline CodeBlock* baselineCodeBlockForOriginAndBaselineCodeBlock(const CodeOrigin& codeOrigin, CodeBlock* baselineCodeBlock)
251  {
252      ASSERT(JITCode::isBaselineCode(baselineCodeBlock->jitType()));
253      auto* inlineCallFrame = codeOrigin.inlineCallFrame();
254      if (inlineCallFrame)
255          return baselineCodeBlockForInlineCallFrame(inlineCallFrame);
256      return baselineCodeBlock;
257  }
258  
259  // These function is defined here and not in CodeOrigin because it needs access to the directCaller field in InlineCallFrame
260  template <typename Function>
261  inline void CodeOrigin::walkUpInlineStack(const Function& function) const
262  {
263      CodeOrigin codeOrigin = *this;
264      while (true) {
265          function(codeOrigin);
266          auto* inlineCallFrame = codeOrigin.inlineCallFrame();
267          if (!inlineCallFrame)
268              break;
269          codeOrigin = inlineCallFrame->directCaller;
270      }
271  }
272  
273  inline bool CodeOrigin::inlineStackContainsActiveCheckpoint() const
274  {
275      bool result = false;
276      walkUpInlineStack([&] (CodeOrigin origin) {
277          if (origin.bytecodeIndex().checkpoint())
278              result = true;
279      });
280      return result;
281  }
282  
283  ALWAYS_INLINE Operand remapOperand(InlineCallFrame* inlineCallFrame, Operand operand)
284  {
285      if (inlineCallFrame)
286          return operand.isTmp() ? Operand::tmp(operand.value() + inlineCallFrame->tmpOffset) : operand.virtualRegister() + inlineCallFrame->stackOffset;
287      return operand;
288  }
289  
290  ALWAYS_INLINE Operand remapOperand(InlineCallFrame* inlineCallFrame, VirtualRegister reg)
291  {
292      return remapOperand(inlineCallFrame, Operand(reg));
293  }
294  
295  ALWAYS_INLINE Operand unmapOperand(InlineCallFrame* inlineCallFrame, Operand operand)
296  {
297      if (inlineCallFrame)
298          return operand.isTmp() ? Operand::tmp(operand.value() - inlineCallFrame->tmpOffset) : Operand(operand.virtualRegister() - inlineCallFrame->stackOffset);
299      return operand;
300  }
301  
302  ALWAYS_INLINE Operand unmapOperand(InlineCallFrame* inlineCallFrame, VirtualRegister reg)
303  {
304      return unmapOperand(inlineCallFrame, Operand(reg));
305  }
306  
307  } // namespace JSC
308  
309  namespace WTF {
310  
311  void printInternal(PrintStream&, JSC::InlineCallFrame::Kind);
312  
313  } // namespace WTF