/ bytecode / PolymorphicAccess.h
PolymorphicAccess.h
  1  /*
  2   * Copyright (C) 2014-2020 Apple Inc. All rights reserved.
  3   *
  4   * Redistribution and use in source and binary forms, with or without
  5   * modification, are permitted provided that the following conditions
  6   * are met:
  7   * 1. Redistributions of source code must retain the above copyright
  8   *    notice, this list of conditions and the following disclaimer.
  9   * 2. Redistributions in binary form must reproduce the above copyright
 10   *    notice, this list of conditions and the following disclaimer in the
 11   *    documentation and/or other materials provided with the distribution.
 12   *
 13   * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
 14   * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 15   * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 16   * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
 17   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 18   * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 19   * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 20   * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
 21   * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 22   * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 23   * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 24   */
 25  
 26  #pragma once
 27  
 28  #if ENABLE(JIT)
 29  
 30  #include "AccessCase.h"
 31  #include "JITStubRoutine.h"
 32  #include "JSFunctionInlines.h"
 33  #include "MacroAssembler.h"
 34  #include "ScratchRegisterAllocator.h"
 35  #include <wtf/Vector.h>
 36  
 37  namespace JSC {
 38  namespace DOMJIT {
 39  class GetterSetter;
 40  }
 41  
 42  class CCallHelpers;
 43  class CodeBlock;
 44  class PolymorphicAccess;
 45  class StructureStubInfo;
 46  class WatchpointsOnStructureStubInfo;
 47  class ScratchRegisterAllocator;
 48  
 49  DECLARE_ALLOCATOR_WITH_HEAP_IDENTIFIER(PolymorphicAccess);
 50  
 51  class AccessGenerationResult {
 52  public:
 53      enum Kind {
 54          MadeNoChanges,
 55          GaveUp,
 56          Buffered,
 57          GeneratedNewCode,
 58          GeneratedFinalCode, // Generated so much code that we never want to generate code again.
 59          ResetStubAndFireWatchpoints // We found out some data that makes us want to start over fresh with this stub. Currently, this happens when we detect poly proto.
 60      };
 61      
 62  
 63      AccessGenerationResult() = default;
 64      AccessGenerationResult(AccessGenerationResult&&) = default;
 65      AccessGenerationResult& operator=(AccessGenerationResult&&) = default;
 66      
 67      AccessGenerationResult(Kind kind)
 68          : m_kind(kind)
 69      {
 70          RELEASE_ASSERT(kind != GeneratedNewCode);
 71          RELEASE_ASSERT(kind != GeneratedFinalCode);
 72      }
 73      
 74      AccessGenerationResult(Kind kind, MacroAssemblerCodePtr<JITStubRoutinePtrTag> code)
 75          : m_kind(kind)
 76          , m_code(code)
 77      {
 78          RELEASE_ASSERT(kind == GeneratedNewCode || kind == GeneratedFinalCode);
 79          RELEASE_ASSERT(code);
 80      }
 81      
 82      bool operator==(const AccessGenerationResult& other) const
 83      {
 84          return m_kind == other.m_kind && m_code == other.m_code;
 85      }
 86      
 87      bool operator!=(const AccessGenerationResult& other) const
 88      {
 89          return !(*this == other);
 90      }
 91      
 92      explicit operator bool() const
 93      {
 94          return *this != AccessGenerationResult();
 95      }
 96      
 97      Kind kind() const { return m_kind; }
 98      
 99      const MacroAssemblerCodePtr<JITStubRoutinePtrTag>& code() const { return m_code; }
100      
101      bool madeNoChanges() const { return m_kind == MadeNoChanges; }
102      bool gaveUp() const { return m_kind == GaveUp; }
103      bool buffered() const { return m_kind == Buffered; }
104      bool generatedNewCode() const { return m_kind == GeneratedNewCode; }
105      bool generatedFinalCode() const { return m_kind == GeneratedFinalCode; }
106      bool shouldResetStubAndFireWatchpoints() const { return m_kind == ResetStubAndFireWatchpoints; }
107      
108      // If we gave up on this attempt to generate code, or if we generated the "final" code, then we
109      // should give up after this.
110      bool shouldGiveUpNow() const { return gaveUp() || generatedFinalCode(); }
111      
112      bool generatedSomeCode() const { return generatedNewCode() || generatedFinalCode(); }
113      
114      void dump(PrintStream&) const;
115  
116      void addWatchpointToFire(InlineWatchpointSet& set, StringFireDetail detail)
117      {
118          m_watchpointsToFire.append(std::pair<InlineWatchpointSet&, StringFireDetail>(set, detail));
119      }
120      void fireWatchpoints(VM& vm)
121      {
122          ASSERT(m_kind == ResetStubAndFireWatchpoints);
123          for (auto& pair : m_watchpointsToFire)
124              pair.first.invalidate(vm, pair.second);
125      }
126      
127  private:
128      Kind m_kind;
129      MacroAssemblerCodePtr<JITStubRoutinePtrTag> m_code;
130      Vector<std::pair<InlineWatchpointSet&, StringFireDetail>> m_watchpointsToFire;
131  };
132  
133  class PolymorphicAccess {
134      WTF_MAKE_NONCOPYABLE(PolymorphicAccess);
135      WTF_MAKE_STRUCT_FAST_ALLOCATED_WITH_HEAP_IDENTIFIER(PolymorphicAccess);
136  public:
137      PolymorphicAccess();
138      ~PolymorphicAccess();
139  
140      // When this fails (returns GaveUp), this will leave the old stub intact but you should not try
141      // to call this method again for that PolymorphicAccess instance.
142      AccessGenerationResult addCases(
143          const GCSafeConcurrentJSLocker&, VM&, CodeBlock*, StructureStubInfo&, Vector<std::unique_ptr<AccessCase>, 2>);
144  
145      AccessGenerationResult addCase(
146          const GCSafeConcurrentJSLocker&, VM&, CodeBlock*, StructureStubInfo&, std::unique_ptr<AccessCase>);
147      
148      AccessGenerationResult regenerate(const GCSafeConcurrentJSLocker&, VM&, JSGlobalObject*, CodeBlock*, ECMAMode, StructureStubInfo&);
149      
150      bool isEmpty() const { return m_list.isEmpty(); }
151      unsigned size() const { return m_list.size(); }
152      const AccessCase& at(unsigned i) const { return *m_list[i]; }
153      const AccessCase& operator[](unsigned i) const { return *m_list[i]; }
154  
155      void visitAggregate(SlotVisitor&);
156  
157      // If this returns false then we are requesting a reset of the owning StructureStubInfo.
158      bool visitWeak(VM&) const;
159      
160      // This returns true if it has marked everything it will ever marked. This can be used as an
161      // optimization to then avoid calling this method again during the fixpoint.
162      bool propagateTransitions(SlotVisitor&) const;
163  
164      void aboutToDie();
165  
166      void dump(PrintStream& out) const;
167      bool containsPC(void* pc) const
168      { 
169          if (!m_stubRoutine)
170              return false;
171  
172          uintptr_t pcAsInt = bitwise_cast<uintptr_t>(pc);
173          return m_stubRoutine->startAddress() <= pcAsInt && pcAsInt <= m_stubRoutine->endAddress();
174      }
175  
176  private:
177      friend class AccessCase;
178      friend class CodeBlock;
179      friend struct AccessGenerationState;
180      
181      typedef Vector<std::unique_ptr<AccessCase>, 2> ListType;
182      
183      void commit(
184          const GCSafeConcurrentJSLocker&, VM&, std::unique_ptr<WatchpointsOnStructureStubInfo>&, CodeBlock*, StructureStubInfo&,
185          AccessCase&);
186  
187      ListType m_list;
188      RefPtr<JITStubRoutine> m_stubRoutine;
189      std::unique_ptr<WatchpointsOnStructureStubInfo> m_watchpoints;
190      std::unique_ptr<Vector<WriteBarrier<JSCell>>> m_weakReferences;
191  };
192  
193  struct AccessGenerationState {
194      AccessGenerationState(VM& vm, JSGlobalObject* globalObject, ECMAMode ecmaMode)
195          : m_vm(vm) 
196          , m_globalObject(globalObject)
197          , m_ecmaMode(ecmaMode)
198          , m_calculatedRegistersForCallAndExceptionHandling(false)
199          , m_needsToRestoreRegistersIfException(false)
200          , m_calculatedCallSiteIndex(false)
201      {
202          u.thisGPR = InvalidGPRReg;
203      }
204      VM& m_vm;
205      JSGlobalObject* m_globalObject;
206      CCallHelpers* jit { nullptr };
207      ScratchRegisterAllocator* allocator;
208      ScratchRegisterAllocator::PreservedState preservedReusedRegisterState;
209      PolymorphicAccess* access { nullptr };
210      StructureStubInfo* stubInfo { nullptr };
211      MacroAssembler::JumpList success;
212      MacroAssembler::JumpList failAndRepatch;
213      MacroAssembler::JumpList failAndIgnore;
214      GPRReg baseGPR { InvalidGPRReg };
215      union {
216          GPRReg thisGPR;
217          GPRReg prototypeGPR;
218          GPRReg propertyGPR;
219      } u;
220      JSValueRegs valueRegs;
221      GPRReg scratchGPR { InvalidGPRReg };
222      FPRReg scratchFPR { InvalidFPRReg };
223      ECMAMode m_ecmaMode { ECMAMode::sloppy() };
224      std::unique_ptr<WatchpointsOnStructureStubInfo> watchpoints;
225      Vector<WriteBarrier<JSCell>> weakReferences;
226      Bag<CallLinkInfo> m_callLinkInfos;
227  
228      void installWatchpoint(const ObjectPropertyCondition&);
229  
230      void restoreScratch();
231      void succeed();
232  
233      struct SpillState {
234          SpillState() = default;
235          SpillState(RegisterSet&& regs, unsigned usedStackBytes)
236              : spilledRegisters(WTFMove(regs))
237              , numberOfStackBytesUsedForRegisterPreservation(usedStackBytes)
238          {
239          }
240  
241          RegisterSet spilledRegisters { };
242          unsigned numberOfStackBytesUsedForRegisterPreservation { std::numeric_limits<unsigned>::max() };
243  
244          bool isEmpty() const { return numberOfStackBytesUsedForRegisterPreservation == std::numeric_limits<unsigned>::max(); }
245      };
246  
247      const RegisterSet& calculateLiveRegistersForCallAndExceptionHandling();
248  
249      SpillState preserveLiveRegistersToStackForCall(const RegisterSet& extra = { });
250      SpillState preserveLiveRegistersToStackForCallWithoutExceptions();
251  
252      void restoreLiveRegistersFromStackForCallWithThrownException(const SpillState&);
253      void restoreLiveRegistersFromStackForCall(const SpillState&, const RegisterSet& dontRestore = { });
254  
255      const RegisterSet& liveRegistersForCall();
256  
257      CallSiteIndex callSiteIndexForExceptionHandlingOrOriginal();
258      DisposableCallSiteIndex callSiteIndexForExceptionHandling();
259  
260      const HandlerInfo& originalExceptionHandler();
261  
262      bool needsToRestoreRegistersIfException() const { return m_needsToRestoreRegistersIfException; }
263      CallSiteIndex originalCallSiteIndex() const;
264      
265      void emitExplicitExceptionHandler();
266  
267      void setSpillStateForJSGetterSetter(SpillState& spillState)
268      {
269          if (!m_spillStateForJSGetterSetter.isEmpty()) {
270              ASSERT(m_spillStateForJSGetterSetter.numberOfStackBytesUsedForRegisterPreservation == spillState.numberOfStackBytesUsedForRegisterPreservation);
271              ASSERT(m_spillStateForJSGetterSetter.spilledRegisters == spillState.spilledRegisters);
272          }
273          m_spillStateForJSGetterSetter = spillState;
274      }
275      SpillState spillStateForJSGetterSetter() const { return m_spillStateForJSGetterSetter; }
276      
277  private:
278      const RegisterSet& liveRegistersToPreserveAtExceptionHandlingCallSite();
279      
280      RegisterSet m_liveRegistersToPreserveAtExceptionHandlingCallSite;
281      RegisterSet m_liveRegistersForCall;
282      CallSiteIndex m_callSiteIndex;
283      SpillState m_spillStateForJSGetterSetter;
284      bool m_calculatedRegistersForCallAndExceptionHandling : 1;
285      bool m_needsToRestoreRegistersIfException : 1;
286      bool m_calculatedCallSiteIndex : 1;
287  };
288  
289  } // namespace JSC
290  
291  namespace WTF {
292  
293  void printInternal(PrintStream&, JSC::AccessGenerationResult::Kind);
294  void printInternal(PrintStream&, JSC::AccessCase::AccessType);
295  void printInternal(PrintStream&, JSC::AccessCase::State);
296  
297  } // namespace WTF
298  
299  #endif // ENABLE(JIT)