/ bytecode / StructureStubInfo.cpp
StructureStubInfo.cpp
  1  /*
  2   * Copyright (C) 2008-2020 Apple Inc. All rights reserved.
  3   *
  4   * Redistribution and use in source and binary forms, with or without
  5   * modification, are permitted provided that the following conditions
  6   * are met:
  7   * 1. Redistributions of source code must retain the above copyright
  8   *    notice, this list of conditions and the following disclaimer.
  9   * 2. Redistributions in binary form must reproduce the above copyright
 10   *    notice, this list of conditions and the following disclaimer in the
 11   *    documentation and/or other materials provided with the distribution.
 12   *
 13   * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
 14   * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 15   * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 16   * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
 17   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 18   * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 19   * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 20   * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
 21   * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 22   * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 23   * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 24   */
 25  
 26  #include "config.h"
 27  #include "StructureStubInfo.h"
 28  
 29  #include "CacheableIdentifierInlines.h"
 30  #include "PolymorphicAccess.h"
 31  #include "Repatch.h"
 32  
 33  namespace JSC {
 34  
 35  #if ENABLE(JIT)
 36  
 37  namespace StructureStubInfoInternal {
 38  static constexpr bool verbose = false;
 39  }
 40  
 41  StructureStubInfo::StructureStubInfo(AccessType accessType, CodeOrigin codeOrigin)
 42      : codeOrigin(codeOrigin)
 43      , accessType(accessType)
 44      , bufferingCountdown(Options::repatchBufferingCountdown())
 45      , resetByGC(false)
 46      , tookSlowPath(false)
 47      , everConsidered(false)
 48      , prototypeIsKnownObject(false)
 49      , sawNonCell(false)
 50      , hasConstantIdentifier(true)
 51      , propertyIsString(false)
 52      , propertyIsInt32(false)
 53      , propertyIsSymbol(false)
 54  {
 55  }
 56  
 57  StructureStubInfo::~StructureStubInfo()
 58  {
 59  }
 60  
 61  void StructureStubInfo::initGetByIdSelf(const ConcurrentJSLockerBase& locker, CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset, CacheableIdentifier identifier)
 62  {
 63      ASSERT(hasConstantIdentifier);
 64      setCacheType(locker, CacheType::GetByIdSelf);
 65      m_identifier = identifier;
 66      codeBlock->vm().heap.writeBarrier(codeBlock);
 67      
 68      u.byIdSelf.baseObjectStructure.set(
 69          codeBlock->vm(), codeBlock, baseObjectStructure);
 70      u.byIdSelf.offset = offset;
 71  }
 72  
 73  void StructureStubInfo::initArrayLength(const ConcurrentJSLockerBase& locker)
 74  {
 75      setCacheType(locker, CacheType::ArrayLength);
 76  }
 77  
 78  void StructureStubInfo::initStringLength(const ConcurrentJSLockerBase& locker)
 79  {
 80      setCacheType(locker, CacheType::StringLength);
 81  }
 82  
 83  void StructureStubInfo::initPutByIdReplace(const ConcurrentJSLockerBase& locker, CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset, CacheableIdentifier identifier)
 84  {
 85      setCacheType(locker, CacheType::PutByIdReplace);
 86      m_identifier = identifier;
 87      codeBlock->vm().heap.writeBarrier(codeBlock);
 88  
 89      u.byIdSelf.baseObjectStructure.set(
 90          codeBlock->vm(), codeBlock, baseObjectStructure);
 91      u.byIdSelf.offset = offset;
 92  }
 93  
 94  void StructureStubInfo::initInByIdSelf(const ConcurrentJSLockerBase& locker, CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset, CacheableIdentifier identifier)
 95  {
 96      setCacheType(locker, CacheType::InByIdSelf);
 97      m_identifier = identifier;
 98      codeBlock->vm().heap.writeBarrier(codeBlock);
 99  
100      u.byIdSelf.baseObjectStructure.set(
101          codeBlock->vm(), codeBlock, baseObjectStructure);
102      u.byIdSelf.offset = offset;
103  }
104  
105  void StructureStubInfo::deref()
106  {
107      switch (m_cacheType) {
108      case CacheType::Stub:
109          delete u.stub;
110          return;
111      case CacheType::Unset:
112      case CacheType::GetByIdSelf:
113      case CacheType::PutByIdReplace:
114      case CacheType::InByIdSelf:
115      case CacheType::ArrayLength:
116      case CacheType::StringLength:
117          return;
118      }
119  
120      RELEASE_ASSERT_NOT_REACHED();
121  }
122  
123  void StructureStubInfo::aboutToDie()
124  {
125      switch (m_cacheType) {
126      case CacheType::Stub:
127          u.stub->aboutToDie();
128          return;
129      case CacheType::Unset:
130      case CacheType::GetByIdSelf:
131      case CacheType::PutByIdReplace:
132      case CacheType::InByIdSelf:
133      case CacheType::ArrayLength:
134      case CacheType::StringLength:
135          return;
136      }
137  
138      RELEASE_ASSERT_NOT_REACHED();
139  }
140  
141  AccessGenerationResult StructureStubInfo::addAccessCase(
142      const GCSafeConcurrentJSLocker& locker, JSGlobalObject* globalObject, CodeBlock* codeBlock, ECMAMode ecmaMode, CacheableIdentifier ident, std::unique_ptr<AccessCase> accessCase)
143  {
144      checkConsistency();
145  
146      VM& vm = codeBlock->vm();
147      ASSERT(vm.heap.isDeferred());
148      AccessGenerationResult result = ([&] () -> AccessGenerationResult {
149          if (StructureStubInfoInternal::verbose)
150              dataLog("Adding access case: ", accessCase, "\n");
151          
152          if (!accessCase)
153              return AccessGenerationResult::GaveUp;
154          
155          AccessGenerationResult result;
156          
157          if (m_cacheType == CacheType::Stub) {
158              result = u.stub->addCase(locker, vm, codeBlock, *this, WTFMove(accessCase));
159              
160              if (StructureStubInfoInternal::verbose)
161                  dataLog("Had stub, result: ", result, "\n");
162  
163              if (result.shouldResetStubAndFireWatchpoints())
164                  return result;
165  
166              if (!result.buffered()) {
167                  clearBufferedStructures();
168                  return result;
169              }
170          } else {
171              std::unique_ptr<PolymorphicAccess> access = makeUnique<PolymorphicAccess>();
172              
173              Vector<std::unique_ptr<AccessCase>, 2> accessCases;
174              
175              std::unique_ptr<AccessCase> previousCase = AccessCase::fromStructureStubInfo(vm, codeBlock, ident, *this);
176              if (previousCase)
177                  accessCases.append(WTFMove(previousCase));
178              
179              accessCases.append(WTFMove(accessCase));
180              
181              result = access->addCases(locker, vm, codeBlock, *this, WTFMove(accessCases));
182              
183              if (StructureStubInfoInternal::verbose)
184                  dataLog("Created stub, result: ", result, "\n");
185  
186              if (result.shouldResetStubAndFireWatchpoints())
187                  return result;
188  
189              if (!result.buffered()) {
190                  clearBufferedStructures();
191                  return result;
192              }
193              
194              setCacheType(locker, CacheType::Stub);
195              u.stub = access.release();
196          }
197          
198          ASSERT(m_cacheType == CacheType::Stub);
199          RELEASE_ASSERT(!result.generatedSomeCode());
200          
201          // If we didn't buffer any cases then bail. If this made no changes then we'll just try again
202          // subject to cool-down.
203          if (!result.buffered()) {
204              if (StructureStubInfoInternal::verbose)
205                  dataLog("Didn't buffer anything, bailing.\n");
206              clearBufferedStructures();
207              return result;
208          }
209          
210          // The buffering countdown tells us if we should be repatching now.
211          if (bufferingCountdown) {
212              if (StructureStubInfoInternal::verbose)
213                  dataLog("Countdown is too high: ", bufferingCountdown, ".\n");
214              return result;
215          }
216          
217          // Forget the buffered structures so that all future attempts to cache get fully handled by the
218          // PolymorphicAccess.
219          clearBufferedStructures();
220          
221          result = u.stub->regenerate(locker, vm, globalObject, codeBlock, ecmaMode, *this);
222          
223          if (StructureStubInfoInternal::verbose)
224              dataLog("Regeneration result: ", result, "\n");
225          
226          RELEASE_ASSERT(!result.buffered());
227          
228          if (!result.generatedSomeCode())
229              return result;
230          
231          // If we generated some code then we don't want to attempt to repatch in the future until we
232          // gather enough cases.
233          bufferingCountdown = Options::repatchBufferingCountdown();
234          return result;
235      })();
236      vm.heap.writeBarrier(codeBlock);
237      return result;
238  }
239  
240  void StructureStubInfo::reset(const ConcurrentJSLockerBase& locker, CodeBlock* codeBlock)
241  {
242      clearBufferedStructures();
243  
244      if (m_cacheType == CacheType::Unset)
245          return;
246  
247      if (Options::verboseOSR()) {
248          // This can be called from GC destructor calls, so we don't try to do a full dump
249          // of the CodeBlock.
250          dataLog("Clearing structure cache (kind ", static_cast<int>(accessType), ") in ", RawPointer(codeBlock), ".\n");
251      }
252  
253      switch (accessType) {
254      case AccessType::TryGetById:
255          resetGetBy(codeBlock, *this, GetByKind::Try);
256          break;
257      case AccessType::GetById:
258          resetGetBy(codeBlock, *this, GetByKind::Normal);
259          break;
260      case AccessType::GetByIdWithThis:
261          resetGetBy(codeBlock, *this, GetByKind::WithThis);
262          break;
263      case AccessType::GetByIdDirect:
264          resetGetBy(codeBlock, *this, GetByKind::Direct);
265          break;
266      case AccessType::GetByVal:
267          resetGetBy(codeBlock, *this, GetByKind::NormalByVal);
268          break;
269      case AccessType::GetPrivateName:
270          resetGetBy(codeBlock, *this, GetByKind::PrivateName);
271          break;
272      case AccessType::Put:
273          resetPutByID(codeBlock, *this);
274          break;
275      case AccessType::In:
276          resetInByID(codeBlock, *this);
277          break;
278      case AccessType::InstanceOf:
279          resetInstanceOf(*this);
280          break;
281      case AccessType::DeleteByID:
282          resetDelBy(codeBlock, *this, DelByKind::Normal);
283          break;
284      case AccessType::DeleteByVal:
285          resetDelBy(codeBlock, *this, DelByKind::NormalByVal);
286          break;
287      }
288      
289      deref();
290      setCacheType(locker, CacheType::Unset);
291  }
292  
293  void StructureStubInfo::visitAggregate(SlotVisitor& visitor)
294  {
295      {
296          auto locker = holdLock(m_bufferedStructuresLock);
297          for (auto& bufferedStructure : m_bufferedStructures)
298              bufferedStructure.byValId().visitAggregate(visitor);
299      }
300      switch (m_cacheType) {
301      case CacheType::Unset:
302      case CacheType::ArrayLength:
303      case CacheType::StringLength:
304          return;
305      case CacheType::PutByIdReplace:
306      case CacheType::InByIdSelf:
307      case CacheType::GetByIdSelf:
308          m_identifier.visitAggregate(visitor);
309          return;
310      case CacheType::Stub:
311          u.stub->visitAggregate(visitor);
312          return;
313      }
314      
315      RELEASE_ASSERT_NOT_REACHED();
316      return;
317  }
318  
319  void StructureStubInfo::visitWeakReferences(const ConcurrentJSLockerBase& locker, CodeBlock* codeBlock)
320  {
321      VM& vm = codeBlock->vm();
322      {
323          auto locker = holdLock(m_bufferedStructuresLock);
324          m_bufferedStructures.removeIf(
325              [&] (auto& entry) -> bool {
326                  return !vm.heap.isMarked(entry.structure());
327              });
328      }
329  
330      switch (m_cacheType) {
331      case CacheType::GetByIdSelf:
332      case CacheType::PutByIdReplace:
333      case CacheType::InByIdSelf:
334          if (vm.heap.isMarked(u.byIdSelf.baseObjectStructure.get()))
335              return;
336          break;
337      case CacheType::Stub:
338          if (u.stub->visitWeak(vm))
339              return;
340          break;
341      default:
342          return;
343      }
344  
345      reset(locker, codeBlock);
346      resetByGC = true;
347  }
348  
349  bool StructureStubInfo::propagateTransitions(SlotVisitor& visitor)
350  {
351      switch (m_cacheType) {
352      case CacheType::Unset:
353      case CacheType::ArrayLength:
354      case CacheType::StringLength:
355          return true;
356      case CacheType::GetByIdSelf:
357      case CacheType::PutByIdReplace:
358      case CacheType::InByIdSelf:
359          return u.byIdSelf.baseObjectStructure->markIfCheap(visitor);
360      case CacheType::Stub:
361          return u.stub->propagateTransitions(visitor);
362      }
363      
364      RELEASE_ASSERT_NOT_REACHED();
365      return true;
366  }
367  
368  StubInfoSummary StructureStubInfo::summary(VM& vm) const
369  {
370      StubInfoSummary takesSlowPath = StubInfoSummary::TakesSlowPath;
371      StubInfoSummary simple = StubInfoSummary::Simple;
372      if (m_cacheType == CacheType::Stub) {
373          PolymorphicAccess* list = u.stub;
374          for (unsigned i = 0; i < list->size(); ++i) {
375              const AccessCase& access = list->at(i);
376              if (access.doesCalls(vm)) {
377                  takesSlowPath = StubInfoSummary::TakesSlowPathAndMakesCalls;
378                  simple = StubInfoSummary::MakesCalls;
379                  break;
380              }
381          }
382      }
383      
384      if (tookSlowPath || sawNonCell)
385          return takesSlowPath;
386      
387      if (!everConsidered)
388          return StubInfoSummary::NoInformation;
389      
390      return simple;
391  }
392  
393  StubInfoSummary StructureStubInfo::summary(VM& vm, const StructureStubInfo* stubInfo)
394  {
395      if (!stubInfo)
396          return StubInfoSummary::NoInformation;
397      
398      return stubInfo->summary(vm);
399  }
400  
401  bool StructureStubInfo::containsPC(void* pc) const
402  {
403      if (m_cacheType != CacheType::Stub)
404          return false;
405      return u.stub->containsPC(pc);
406  }
407  
408  void StructureStubInfo::setCacheType(const ConcurrentJSLockerBase&, CacheType newCacheType)
409  {
410      switch (m_cacheType) {
411      case CacheType::Unset:
412      case CacheType::ArrayLength:
413      case CacheType::StringLength:
414      case CacheType::Stub:
415          break;
416      case CacheType::PutByIdReplace:
417      case CacheType::InByIdSelf:
418      case CacheType::GetByIdSelf:
419          m_identifier = nullptr;
420          break;
421      }
422      m_cacheType = newCacheType;
423  }
424  
425  #if ASSERT_ENABLED
426  void StructureStubInfo::checkConsistency()
427  {
428      if (thisValueIsInThisGPR()) {
429          // We currently use a union for both "thisGPR" and "propertyGPR". If this were
430          // not the case, we'd need to take one of them out of the union.
431          RELEASE_ASSERT(hasConstantIdentifier);
432      }
433  }
434  #endif // ASSERT_ENABLED
435  
436  #endif // ENABLE(JIT)
437  
438  } // namespace JSC