WasmLLIntGenerator.cpp
1 /* 2 * Copyright (C) 2019 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26 #include "config.h" 27 #include "WasmLLIntGenerator.h" 28 29 #if ENABLE(WEBASSEMBLY) 30 31 #include "BytecodeGeneratorBaseInlines.h" 32 #include "BytecodeStructs.h" 33 #include "InstructionStream.h" 34 #include "JSCJSValueInlines.h" 35 #include "Label.h" 36 #include "WasmCallingConvention.h" 37 #include "WasmContextInlines.h" 38 #include "WasmFunctionCodeBlock.h" 39 #include "WasmFunctionParser.h" 40 #include "WasmGeneratorTraits.h" 41 #include <wtf/CompletionHandler.h> 42 #include <wtf/RefPtr.h> 43 #include <wtf/Variant.h> 44 45 namespace JSC { namespace Wasm { 46 47 class LLIntGenerator : public BytecodeGeneratorBase<GeneratorTraits> { 48 public: 49 using ExpressionType = VirtualRegister; 50 51 struct ControlLoop { 52 Ref<Label> m_body; 53 }; 54 55 struct ControlTopLevel { 56 }; 57 58 struct ControlBlock { 59 }; 60 61 struct ControlIf { 62 Ref<Label> m_alternate; 63 }; 64 65 struct ControlType : public Variant<ControlLoop, ControlTopLevel, ControlBlock, ControlIf> { 66 using Base = Variant<ControlLoop, ControlTopLevel, ControlBlock, ControlIf>; 67 68 ControlType() 69 : Base(ControlBlock { }) 70 { 71 } 72 73 static ControlType topLevel(BlockSignature signature, unsigned stackSize, RefPtr<Label>&& continuation) 74 { 75 return ControlType(signature, stackSize, WTFMove(continuation), ControlTopLevel { }); 76 } 77 78 static ControlType loop(BlockSignature signature, unsigned stackSize, Ref<Label>&& body, RefPtr<Label>&& continuation) 79 { 80 return ControlType(signature, stackSize - signature->argumentCount(), WTFMove(continuation), ControlLoop { WTFMove(body) }); 81 } 82 83 static ControlType block(BlockSignature signature, unsigned stackSize, RefPtr<Label>&& continuation) 84 { 85 return ControlType(signature, stackSize - signature->argumentCount(), WTFMove(continuation), ControlBlock { }); 86 } 87 88 static ControlType if_(BlockSignature signature, unsigned stackSize, Ref<Label>&& alternate, RefPtr<Label>&& continuation) 89 { 90 return ControlType(signature, stackSize - signature->argumentCount(), WTFMove(continuation), ControlIf { WTFMove(alternate) }); 91 } 92 93 static bool isIf(const ControlType& control) { return WTF::holds_alternative<ControlIf>(control); } 94 static bool isTopLevel(const ControlType& control) { return WTF::holds_alternative<ControlTopLevel>(control); } 95 96 unsigned stackSize() const { return m_stackSize; } 97 BlockSignature signature() const { return m_signature; } 98 99 RefPtr<Label> targetLabelForBranch() const 100 { 101 if (WTF::holds_alternative<ControlLoop>(*this)) 102 return WTF::get<ControlLoop>(*this).m_body.ptr(); 103 return m_continuation; 104 } 105 106 SignatureArgCount branchTargetArity() const 107 { 108 if (WTF::holds_alternative<ControlLoop>(*this)) 109 return m_signature->argumentCount(); 110 return m_signature->returnCount(); 111 } 112 113 Type branchTargetType(unsigned i) const 114 { 115 ASSERT(i < branchTargetArity()); 116 if (WTF::holds_alternative<ControlLoop>(*this)) 117 return m_signature->argument(i); 118 return m_signature->returnType(i); 119 } 120 121 BlockSignature m_signature; 122 unsigned m_stackSize; 123 RefPtr<Label> m_continuation; 124 125 private: 126 template<typename T> 127 ControlType(BlockSignature signature, unsigned stackSize, RefPtr<Label>&& continuation, T&& t) 128 : Base(WTFMove(t)) 129 , m_signature(signature) 130 , m_stackSize(stackSize) 131 , m_continuation(WTFMove(continuation)) 132 { 133 } 134 }; 135 136 using ErrorType = String; 137 using PartialResult = Expected<void, ErrorType>; 138 using UnexpectedResult = Unexpected<ErrorType>; 139 140 using ControlEntry = FunctionParser<LLIntGenerator>::ControlEntry; 141 using ControlStack = FunctionParser<LLIntGenerator>::ControlStack; 142 using ResultList = FunctionParser<LLIntGenerator>::ResultList; 143 using Stack = FunctionParser<LLIntGenerator>::Stack; 144 using TypedExpression = FunctionParser<LLIntGenerator>::TypedExpression; 145 146 static ExpressionType emptyExpression() { return { }; }; 147 148 template <typename ...Args> 149 NEVER_INLINE UnexpectedResult WARN_UNUSED_RETURN fail(Args... args) const 150 { 151 using namespace FailureHelper; // See ADL comment in WasmParser.h. 152 return UnexpectedResult(makeString("WebAssembly.Module failed compiling: "_s, makeString(args)...)); 153 } 154 155 LLIntGenerator(const ModuleInformation&, unsigned functionIndex, const Signature&); 156 157 std::unique_ptr<FunctionCodeBlock> finalize(); 158 159 template<typename ExpressionListA, typename ExpressionListB> 160 void unifyValuesWithBlock(const ExpressionListA& destinations, const ExpressionListB& values) 161 { 162 ASSERT(destinations.size() <= values.size()); 163 auto offset = values.size() - destinations.size(); 164 for (size_t i = 0; i < destinations.size(); ++i) 165 WasmMov::emit(this, destinations[i], values[offset + i]); 166 } 167 168 enum NoConsistencyCheckTag { NoConsistencyCheck }; 169 ExpressionType push(NoConsistencyCheckTag) 170 { 171 m_maxStackSize = std::max(m_maxStackSize, ++m_stackSize); 172 return virtualRegisterForLocal((m_stackSize - 1).unsafeGet()); 173 } 174 175 ExpressionType push() 176 { 177 checkConsistency(); 178 return push(NoConsistencyCheck); 179 } 180 181 void didPopValueFromStack() { --m_stackSize; } 182 183 PartialResult WARN_UNUSED_RETURN addArguments(const Signature&); 184 PartialResult WARN_UNUSED_RETURN addLocal(Type, uint32_t); 185 ExpressionType addConstant(Type, int64_t); 186 187 // References 188 PartialResult WARN_UNUSED_RETURN addRefIsNull(ExpressionType value, ExpressionType& result); 189 PartialResult WARN_UNUSED_RETURN addRefFunc(uint32_t index, ExpressionType& result); 190 191 // Tables 192 PartialResult WARN_UNUSED_RETURN addTableGet(unsigned, ExpressionType index, ExpressionType& result); 193 PartialResult WARN_UNUSED_RETURN addTableSet(unsigned, ExpressionType index, ExpressionType value); 194 PartialResult WARN_UNUSED_RETURN addTableInit(unsigned, unsigned, ExpressionType dstOffset, ExpressionType srcOffset, ExpressionType length); 195 PartialResult WARN_UNUSED_RETURN addElemDrop(unsigned); 196 PartialResult WARN_UNUSED_RETURN addTableSize(unsigned, ExpressionType& result); 197 PartialResult WARN_UNUSED_RETURN addTableGrow(unsigned, ExpressionType fill, ExpressionType delta, ExpressionType& result); 198 PartialResult WARN_UNUSED_RETURN addTableFill(unsigned, ExpressionType offset, ExpressionType fill, ExpressionType count); 199 PartialResult WARN_UNUSED_RETURN addTableCopy(unsigned, unsigned, ExpressionType dstOffset, ExpressionType srcOffset, ExpressionType length); 200 201 // Locals 202 PartialResult WARN_UNUSED_RETURN getLocal(uint32_t index, ExpressionType& result); 203 PartialResult WARN_UNUSED_RETURN setLocal(uint32_t index, ExpressionType value); 204 205 // Globals 206 PartialResult WARN_UNUSED_RETURN getGlobal(uint32_t index, ExpressionType& result); 207 PartialResult WARN_UNUSED_RETURN setGlobal(uint32_t index, ExpressionType value); 208 209 // Memory 210 PartialResult WARN_UNUSED_RETURN load(LoadOpType, ExpressionType pointer, ExpressionType& result, uint32_t offset); 211 PartialResult WARN_UNUSED_RETURN store(StoreOpType, ExpressionType pointer, ExpressionType value, uint32_t offset); 212 PartialResult WARN_UNUSED_RETURN addGrowMemory(ExpressionType delta, ExpressionType& result); 213 PartialResult WARN_UNUSED_RETURN addCurrentMemory(ExpressionType& result); 214 PartialResult WARN_UNUSED_RETURN addMemoryFill(ExpressionType dstAddress, ExpressionType targetValue, ExpressionType count); 215 PartialResult WARN_UNUSED_RETURN addMemoryCopy(ExpressionType dstAddress, ExpressionType srcAddress, ExpressionType count); 216 PartialResult WARN_UNUSED_RETURN addMemoryInit(unsigned, ExpressionType dstAddress, ExpressionType srcAddress, ExpressionType length); 217 PartialResult WARN_UNUSED_RETURN addDataDrop(unsigned); 218 219 // Atomics 220 PartialResult WARN_UNUSED_RETURN atomicLoad(ExtAtomicOpType, Type, ExpressionType pointer, ExpressionType& result, uint32_t offset); 221 PartialResult WARN_UNUSED_RETURN atomicStore(ExtAtomicOpType, Type, ExpressionType pointer, ExpressionType value, uint32_t offset); 222 PartialResult WARN_UNUSED_RETURN atomicBinaryRMW(ExtAtomicOpType, Type, ExpressionType pointer, ExpressionType value, ExpressionType& result, uint32_t offset); 223 PartialResult WARN_UNUSED_RETURN atomicCompareExchange(ExtAtomicOpType, Type, ExpressionType pointer, ExpressionType expected, ExpressionType value, ExpressionType& result, uint32_t offset); 224 PartialResult WARN_UNUSED_RETURN atomicWait(ExtAtomicOpType, ExpressionType pointer, ExpressionType value, ExpressionType timeout, ExpressionType& result, uint32_t offset); 225 PartialResult WARN_UNUSED_RETURN atomicNotify(ExtAtomicOpType, ExpressionType pointer, ExpressionType value, ExpressionType& result, uint32_t offset); 226 PartialResult WARN_UNUSED_RETURN atomicFence(ExtAtomicOpType, uint8_t flags); 227 228 // Basic operators 229 template<OpType> 230 PartialResult WARN_UNUSED_RETURN addOp(ExpressionType arg, ExpressionType& result); 231 template<OpType> 232 PartialResult WARN_UNUSED_RETURN addOp(ExpressionType left, ExpressionType right, ExpressionType& result); 233 PartialResult WARN_UNUSED_RETURN addSelect(ExpressionType condition, ExpressionType nonZero, ExpressionType zero, ExpressionType& result); 234 235 // Control flow 236 ControlType WARN_UNUSED_RETURN addTopLevel(BlockSignature); 237 PartialResult WARN_UNUSED_RETURN addBlock(BlockSignature, Stack& enclosingStack, ControlType& newBlock, Stack& newStack); 238 PartialResult WARN_UNUSED_RETURN addLoop(BlockSignature, Stack& enclosingStack, ControlType& block, Stack& newStack, uint32_t loopIndex); 239 PartialResult WARN_UNUSED_RETURN addIf(ExpressionType condition, BlockSignature, Stack& enclosingStack, ControlType& result, Stack& newStack); 240 PartialResult WARN_UNUSED_RETURN addElse(ControlType&, Stack&); 241 PartialResult WARN_UNUSED_RETURN addElseToUnreachable(ControlType&); 242 243 PartialResult WARN_UNUSED_RETURN addReturn(const ControlType&, Stack& returnValues); 244 PartialResult WARN_UNUSED_RETURN addBranch(ControlType&, ExpressionType condition, Stack& returnValues); 245 PartialResult WARN_UNUSED_RETURN addSwitch(ExpressionType condition, const Vector<ControlType*>& targets, ControlType& defaultTargets, Stack& expressionStack); 246 PartialResult WARN_UNUSED_RETURN endBlock(ControlEntry&, Stack& expressionStack); 247 PartialResult WARN_UNUSED_RETURN addEndToUnreachable(ControlEntry&, const Stack& expressionStack = { }, bool unreachable = true); 248 PartialResult WARN_UNUSED_RETURN endTopLevel(BlockSignature, const Stack&); 249 250 // Calls 251 PartialResult WARN_UNUSED_RETURN addCall(uint32_t calleeIndex, const Signature&, Vector<ExpressionType>& args, ResultList& results); 252 PartialResult WARN_UNUSED_RETURN addCallIndirect(unsigned tableIndex, const Signature&, Vector<ExpressionType>& args, ResultList& results); 253 PartialResult WARN_UNUSED_RETURN addUnreachable(); 254 255 void didFinishParsingLocals(); 256 257 void setParser(FunctionParser<LLIntGenerator>* parser) { m_parser = parser; }; 258 259 // We need this for autogenerated templates used by JS bytecodes. 260 void setUsesCheckpoints() const { UNREACHABLE_FOR_PLATFORM(); } 261 262 void dump(const ControlStack&, const Stack*) { } 263 264 private: 265 friend GenericLabel<Wasm::GeneratorTraits>; 266 267 struct LLIntCallInformation { 268 unsigned stackOffset; 269 unsigned numberOfStackArguments; 270 ResultList arguments; 271 CompletionHandler<void(ResultList&)> commitResults; 272 }; 273 274 LLIntCallInformation callInformationForCaller(const Signature&); 275 Vector<VirtualRegister, 2> callInformationForCallee(const Signature&); 276 void linkSwitchTargets(Label&, unsigned location); 277 278 VirtualRegister virtualRegisterForWasmLocal(uint32_t index) 279 { 280 if (index < m_codeBlock->m_numArguments) 281 return m_normalizedArguments[index]; 282 283 const auto& callingConvention = wasmCallingConvention(); 284 const uint32_t gprCount = callingConvention.gprArgs.size(); 285 const uint32_t fprCount = callingConvention.fprArgs.size(); 286 return virtualRegisterForLocal(index - m_codeBlock->m_numArguments + gprCount + fprCount + numberOfLLIntCalleeSaveRegisters); 287 } 288 289 ExpressionType jsNullConstant() 290 { 291 if (UNLIKELY(!m_jsNullConstant.isValid())) { 292 m_jsNullConstant = VirtualRegister(FirstConstantRegisterIndex + m_codeBlock->m_constants.size()); 293 m_codeBlock->m_constants.append(JSValue::encode(jsNull())); 294 if (UNLIKELY(Options::dumpGeneratedWasmBytecodes())) 295 m_codeBlock->m_constantTypes.append(Type::Externref); 296 } 297 return m_jsNullConstant; 298 } 299 300 ExpressionType zeroConstant() 301 { 302 if (UNLIKELY(!m_zeroConstant.isValid())) { 303 m_zeroConstant = VirtualRegister(FirstConstantRegisterIndex + m_codeBlock->m_constants.size()); 304 m_codeBlock->m_constants.append(0); 305 if (UNLIKELY(Options::dumpGeneratedWasmBytecodes())) 306 m_codeBlock->m_constantTypes.append(Type::I32); 307 } 308 return m_zeroConstant; 309 } 310 311 void getDropKeepCount(const ControlType& target, unsigned& startOffset, unsigned& drop, unsigned& keep) 312 { 313 startOffset = target.stackSize() + 1; 314 keep = target.branchTargetArity(); 315 drop = (m_stackSize - target.stackSize() - target.branchTargetArity()).unsafeGet(); 316 } 317 318 void dropKeep(Stack& values, const ControlType& target, bool dropValues) 319 { 320 unsigned startOffset; 321 unsigned keep; 322 unsigned drop; 323 324 getDropKeepCount(target, startOffset, drop, keep); 325 326 if (dropValues) 327 values.shrink(keep); 328 329 if (!drop) 330 return; 331 332 if (keep) 333 WasmDropKeep::emit(this, startOffset, drop, keep); 334 } 335 336 template<typename Functor> 337 void walkExpressionStack(Stack& expressionStack, unsigned stackSize, const Functor& functor) 338 { 339 for (unsigned i = expressionStack.size(); i > 0; --i) { 340 VirtualRegister slot = virtualRegisterForLocal(stackSize - i); 341 functor(expressionStack[expressionStack.size() - i], slot); 342 } 343 } 344 345 template<typename Functor> 346 void walkExpressionStack(Stack& expressionStack, const Functor& functor) 347 { 348 walkExpressionStack(expressionStack, m_stackSize.unsafeGet(), functor); 349 } 350 351 template<typename Functor> 352 void walkExpressionStack(ControlEntry& entry, const Functor& functor) 353 { 354 walkExpressionStack(entry.enclosedExpressionStack, entry.controlData.stackSize(), functor); 355 } 356 357 void checkConsistency() 358 { 359 #if ASSERT_ENABLED 360 // The rules for locals and constants in the stack are: 361 // 1) Locals have to be materialized whenever a control entry is pushed to the control stack (i.e. every time we splitStack) 362 // NOTE: This is a trade-off so that set_local does not have to walk up the control stack looking for delayed get_locals 363 // 2) If the control entry is a loop, we also need to materialize constants in the newStack, since those slots will be written 364 // to from loop back edges 365 // 3) Both locals and constants have to be materialized before branches, since multiple branches might share the same target, 366 // we can't make any assumptions about the stack state at that point, so we materialize the stack. 367 for (ControlEntry& controlEntry : m_parser->controlStack()) { 368 walkExpressionStack(controlEntry, [&](VirtualRegister expression, VirtualRegister slot) { 369 ASSERT(expression == slot || expression.isConstant()); 370 }); 371 } 372 walkExpressionStack(m_parser->expressionStack(), [&](VirtualRegister expression, VirtualRegister slot) { 373 ASSERT(expression == slot || expression.isConstant() || expression.isArgument() || static_cast<unsigned>(expression.toLocal()) < m_codeBlock->m_numVars); 374 }); 375 #endif // ASSERT_ENABLED 376 } 377 378 void materializeConstantsAndLocals(Stack& expressionStack) 379 { 380 if (expressionStack.isEmpty()) 381 return; 382 383 checkConsistency(); 384 walkExpressionStack(expressionStack, [&](TypedExpression& expression, VirtualRegister slot) { 385 ASSERT(expression.value() == slot || expression.value().isConstant() || expression.value().isArgument() || static_cast<unsigned>(expression.value().toLocal()) < m_codeBlock->m_numVars); 386 if (expression.value() == slot) 387 return; 388 WasmMov::emit(this, slot, expression); 389 expression = TypedExpression { expression.type(), slot }; 390 }); 391 checkConsistency(); 392 } 393 394 void splitStack(BlockSignature signature, Stack& enclosingStack, Stack& newStack) 395 { 396 JSC::Wasm::splitStack(signature, enclosingStack, newStack); 397 398 m_stackSize -= newStack.size(); 399 checkConsistency(); 400 walkExpressionStack(enclosingStack, [&](TypedExpression& expression, VirtualRegister slot) { 401 ASSERT(expression.value() == slot || expression.value().isConstant() || expression.value().isArgument() || static_cast<unsigned>(expression.value().toLocal()) < m_codeBlock->m_numVars); 402 if (expression.value() == slot || expression.value().isConstant()) 403 return; 404 WasmMov::emit(this, slot, expression); 405 expression = TypedExpression { expression.type(), slot }; 406 }); 407 checkConsistency(); 408 m_stackSize += newStack.size(); 409 } 410 411 struct SwitchEntry { 412 InstructionStream::Offset offset; 413 int* jumpTarget; 414 }; 415 416 struct ConstantMapHashTraits : WTF::GenericHashTraits<EncodedJSValue> { 417 static constexpr bool emptyValueIsZero = true; 418 static void constructDeletedValue(EncodedJSValue& slot) { slot = JSValue::encode(jsNull()); } 419 static bool isDeletedValue(EncodedJSValue value) { return value == JSValue::encode(jsNull()); } 420 }; 421 422 FunctionParser<LLIntGenerator>* m_parser { nullptr }; 423 const ModuleInformation& m_info; 424 const unsigned m_functionIndex { UINT_MAX }; 425 Vector<VirtualRegister> m_normalizedArguments; 426 HashMap<Label*, Vector<SwitchEntry>> m_switches; 427 ExpressionType m_jsNullConstant; 428 ExpressionType m_zeroConstant; 429 ResultList m_unitializedLocals; 430 HashMap<EncodedJSValue, VirtualRegister, WTF::IntHash<EncodedJSValue>, ConstantMapHashTraits> m_constantMap; 431 Vector<VirtualRegister, 2> m_results; 432 Checked<unsigned> m_stackSize { 0 }; 433 Checked<unsigned> m_maxStackSize { 0 }; 434 }; 435 436 Expected<std::unique_ptr<FunctionCodeBlock>, String> parseAndCompileBytecode(const uint8_t* functionStart, size_t functionLength, const Signature& signature, const ModuleInformation& info, uint32_t functionIndex) 437 { 438 LLIntGenerator llintGenerator(info, functionIndex, signature); 439 FunctionParser<LLIntGenerator> parser(llintGenerator, functionStart, functionLength, signature, info); 440 WASM_FAIL_IF_HELPER_FAILS(parser.parse()); 441 442 return llintGenerator.finalize(); 443 } 444 445 446 using Buffer = InstructionStream::InstructionBuffer; 447 static ThreadSpecific<Buffer>* threadSpecificBufferPtr; 448 449 static ThreadSpecific<Buffer>& threadSpecificBuffer() 450 { 451 static std::once_flag flag; 452 std::call_once( 453 flag, 454 [] () { 455 threadSpecificBufferPtr = new ThreadSpecific<Buffer>(); 456 }); 457 return *threadSpecificBufferPtr; 458 } 459 460 LLIntGenerator::LLIntGenerator(const ModuleInformation& info, unsigned functionIndex, const Signature&) 461 : BytecodeGeneratorBase(makeUnique<FunctionCodeBlock>(functionIndex), 0) 462 , m_info(info) 463 , m_functionIndex(functionIndex) 464 { 465 { 466 auto& threadSpecific = threadSpecificBuffer(); 467 Buffer buffer = WTFMove(*threadSpecific); 468 *threadSpecific = Buffer(); 469 m_writer.setInstructionBuffer(WTFMove(buffer)); 470 } 471 472 m_codeBlock->m_numVars = numberOfLLIntCalleeSaveRegisters; 473 m_stackSize = numberOfLLIntCalleeSaveRegisters; 474 m_maxStackSize = numberOfLLIntCalleeSaveRegisters; 475 476 WasmEnter::emit(this); 477 } 478 479 std::unique_ptr<FunctionCodeBlock> LLIntGenerator::finalize() 480 { 481 RELEASE_ASSERT(m_codeBlock); 482 size_t numCalleeLocals = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), m_maxStackSize.unsafeGet()); 483 m_codeBlock->m_numCalleeLocals = numCalleeLocals; 484 RELEASE_ASSERT(numCalleeLocals == m_codeBlock->m_numCalleeLocals); 485 486 auto& threadSpecific = threadSpecificBuffer(); 487 Buffer usedBuffer; 488 m_codeBlock->setInstructions(m_writer.finalize(usedBuffer)); 489 size_t oldCapacity = usedBuffer.capacity(); 490 usedBuffer.resize(0); 491 RELEASE_ASSERT(usedBuffer.capacity() == oldCapacity); 492 *threadSpecific = WTFMove(usedBuffer); 493 494 return WTFMove(m_codeBlock); 495 } 496 497 // Generated from wasm.json 498 #include "WasmLLIntGeneratorInlines.h" 499 500 auto LLIntGenerator::callInformationForCaller(const Signature& signature) -> LLIntCallInformation 501 { 502 // This function sets up the stack layout for calls. The desired stack layout is: 503 504 // FPRn 505 // ... 506 // FPR1 507 // FPR0 508 // --- 509 // GPRn 510 // ... 511 // GPR1 512 // GPR0 513 // ---- 514 // stackN 515 // ... 516 // stack1 517 // stack0 518 // --- 519 // call frame header 520 521 // We need to allocate at least space for all GPRs and FPRs. 522 // Return values use the same allocation layout. 523 524 const auto initialStackSize = m_stackSize; 525 526 const auto& callingConvention = wasmCallingConvention(); 527 const uint32_t gprCount = callingConvention.gprArgs.size(); 528 const uint32_t fprCount = callingConvention.fprArgs.size(); 529 530 uint32_t stackCount = 0; 531 uint32_t gprIndex = 0; 532 uint32_t fprIndex = 0; 533 uint32_t stackIndex = 0; 534 535 auto allocateStackRegister = [&](Type type) { 536 switch (type) { 537 case Type::I32: 538 case Type::I64: 539 case Type::Externref: 540 case Type::Funcref: 541 if (gprIndex < gprCount) 542 ++gprIndex; 543 else if (stackIndex++ >= stackCount) 544 ++stackCount; 545 break; 546 case Type::F32: 547 case Type::F64: 548 if (fprIndex < fprCount) 549 ++fprIndex; 550 else if (stackIndex++ >= stackCount) 551 ++stackCount; 552 break; 553 case Void: 554 case Func: 555 RELEASE_ASSERT_NOT_REACHED(); 556 } 557 }; 558 559 560 for (uint32_t i = 0; i < signature.argumentCount(); i++) 561 allocateStackRegister(signature.argument(i)); 562 563 gprIndex = 0; 564 fprIndex = 0; 565 stackIndex = 0; 566 for (uint32_t i = 0; i < signature.returnCount(); i++) 567 allocateStackRegister(signature.returnType(i)); 568 569 // FIXME: we are allocating the extra space for the argument/return count in order to avoid interference, but we could do better 570 // NOTE: We increase arg count by 1 for the case of indirect calls 571 m_stackSize += std::max(signature.argumentCount() + 1, signature.returnCount()) + gprCount + fprCount + stackCount + CallFrame::headerSizeInRegisters; 572 if (m_stackSize.unsafeGet() % stackAlignmentRegisters()) 573 ++m_stackSize; 574 if (m_maxStackSize < m_stackSize) 575 m_maxStackSize = m_stackSize; 576 577 578 ResultList arguments(signature.argumentCount()); 579 ResultList temporaryResults(signature.returnCount()); 580 581 const unsigned stackOffset = m_stackSize.unsafeGet(); 582 const unsigned base = stackOffset - CallFrame::headerSizeInRegisters; 583 584 const uint32_t gprLimit = base - stackCount - gprCount; 585 const uint32_t fprLimit = gprLimit - fprCount; 586 587 stackIndex = base; 588 gprIndex = base - stackCount; 589 fprIndex = gprIndex - gprCount; 590 for (uint32_t i = 0; i < signature.argumentCount(); i++) { 591 switch (signature.argument(i)) { 592 case Type::I32: 593 case Type::I64: 594 case Type::Externref: 595 case Type::Funcref: 596 if (gprIndex > gprLimit) 597 arguments[i] = virtualRegisterForLocal(--gprIndex); 598 else 599 arguments[i] = virtualRegisterForLocal(--stackIndex); 600 break; 601 case Type::F32: 602 case Type::F64: 603 if (fprIndex > fprLimit) 604 arguments[i] = virtualRegisterForLocal(--fprIndex); 605 else 606 arguments[i] = virtualRegisterForLocal(--stackIndex); 607 break; 608 case Void: 609 case Func: 610 RELEASE_ASSERT_NOT_REACHED(); 611 } 612 } 613 614 stackIndex = base; 615 gprIndex = base - stackCount; 616 fprIndex = gprIndex - gprCount; 617 for (uint32_t i = 0; i < signature.returnCount(); i++) { 618 switch (signature.returnType(i)) { 619 case Type::I32: 620 case Type::I64: 621 case Type::Externref: 622 case Type::Funcref: 623 if (gprIndex > gprLimit) 624 temporaryResults[i] = virtualRegisterForLocal(--gprIndex); 625 else 626 temporaryResults[i] = virtualRegisterForLocal(--stackIndex); 627 break; 628 case Type::F32: 629 case Type::F64: 630 if (fprIndex > fprLimit) 631 temporaryResults[i] = virtualRegisterForLocal(--fprIndex); 632 else 633 temporaryResults[i] = virtualRegisterForLocal(--stackIndex); 634 break; 635 case Void: 636 case Func: 637 RELEASE_ASSERT_NOT_REACHED(); 638 } 639 } 640 641 m_stackSize = initialStackSize; 642 643 auto commitResults = [this, temporaryResults = WTFMove(temporaryResults)](ResultList& results) { 644 checkConsistency(); 645 for (auto temporaryResult : temporaryResults) { 646 ExpressionType result = push(NoConsistencyCheck); 647 WasmMov::emit(this, result, temporaryResult); 648 results.append(result); 649 } 650 }; 651 652 return LLIntCallInformation { stackOffset, stackCount, WTFMove(arguments), WTFMove(commitResults) }; 653 } 654 655 auto LLIntGenerator::callInformationForCallee(const Signature& signature) -> Vector<VirtualRegister, 2> 656 { 657 if (m_results.size()) 658 return m_results; 659 660 m_results.reserveInitialCapacity(signature.returnCount()); 661 662 const auto& callingConvention = wasmCallingConvention(); 663 const uint32_t gprCount = callingConvention.gprArgs.size(); 664 const uint32_t fprCount = callingConvention.fprArgs.size(); 665 666 uint32_t gprIndex = 0; 667 uint32_t fprIndex = gprCount; 668 uint32_t stackIndex = 0; 669 const uint32_t maxGPRIndex = gprCount; 670 const uint32_t maxFPRIndex = maxGPRIndex + fprCount; 671 672 for (uint32_t i = 0; i < signature.returnCount(); i++) { 673 switch (signature.returnType(i)) { 674 case Type::I32: 675 case Type::I64: 676 case Type::Externref: 677 case Type::Funcref: 678 if (gprIndex < maxGPRIndex) 679 m_results.append(virtualRegisterForLocal(numberOfLLIntCalleeSaveRegisters + gprIndex++)); 680 else 681 m_results.append(virtualRegisterForArgumentIncludingThis(stackIndex++)); 682 break; 683 case Type::F32: 684 case Type::F64: 685 if (fprIndex < maxFPRIndex) 686 m_results.append(virtualRegisterForLocal(numberOfLLIntCalleeSaveRegisters + fprIndex++)); 687 else 688 m_results.append(virtualRegisterForArgumentIncludingThis(stackIndex++)); 689 break; 690 case Void: 691 case Func: 692 RELEASE_ASSERT_NOT_REACHED(); 693 } 694 } 695 696 return m_results; 697 } 698 699 auto LLIntGenerator::addArguments(const Signature& signature) -> PartialResult 700 { 701 checkConsistency(); 702 703 m_codeBlock->m_numArguments = signature.argumentCount(); 704 m_normalizedArguments.resize(m_codeBlock->m_numArguments); 705 706 const auto& callingConvention = wasmCallingConvention(); 707 const uint32_t gprCount = callingConvention.gprArgs.size(); 708 const uint32_t fprCount = callingConvention.fprArgs.size(); 709 const uint32_t maxGPRIndex = gprCount; 710 const uint32_t maxFPRIndex = gprCount + fprCount; 711 uint32_t gprIndex = 0; 712 uint32_t fprIndex = maxGPRIndex; 713 uint32_t stackIndex = 0; 714 715 Vector<VirtualRegister> registerArguments(gprCount + fprCount); 716 for (uint32_t i = 0; i < gprCount + fprCount; i++) 717 registerArguments[i] = push(NoConsistencyCheck); 718 719 const auto addArgument = [&](uint32_t index, uint32_t& count, uint32_t max) { 720 if (count < max) 721 m_normalizedArguments[index] = registerArguments[count++]; 722 else 723 m_normalizedArguments[index] = virtualRegisterForArgumentIncludingThis(stackIndex++); 724 }; 725 726 for (uint32_t i = 0; i < signature.argumentCount(); i++) { 727 switch (signature.argument(i)) { 728 case Type::I32: 729 case Type::I64: 730 case Type::Externref: 731 case Type::Funcref: 732 addArgument(i, gprIndex, maxGPRIndex); 733 break; 734 case Type::F32: 735 case Type::F64: 736 addArgument(i, fprIndex, maxFPRIndex); 737 break; 738 case Void: 739 case Func: 740 RELEASE_ASSERT_NOT_REACHED(); 741 } 742 } 743 744 m_codeBlock->m_numVars += gprCount + fprCount; 745 746 return { }; 747 } 748 749 auto LLIntGenerator::addLocal(Type type, uint32_t count) -> PartialResult 750 { 751 checkConsistency(); 752 753 m_codeBlock->m_numVars += count; 754 switch (type) { 755 case Type::Externref: 756 case Type::Funcref: 757 while (count--) 758 m_unitializedLocals.append(push(NoConsistencyCheck)); 759 break; 760 default: 761 m_stackSize += count; 762 break; 763 } 764 return { }; 765 } 766 767 void LLIntGenerator::didFinishParsingLocals() 768 { 769 if (m_unitializedLocals.isEmpty()) 770 return; 771 772 auto null = jsNullConstant(); 773 for (auto local : m_unitializedLocals) 774 WasmMov::emit(this, local, null); 775 m_unitializedLocals.clear(); 776 } 777 778 auto LLIntGenerator::addConstant(Type type, int64_t value) -> ExpressionType 779 { 780 auto constant = [&] { 781 if (!value) 782 return zeroConstant(); 783 784 if (value == JSValue::encode(jsNull())) 785 return jsNullConstant(); 786 787 VirtualRegister source(FirstConstantRegisterIndex + m_codeBlock->m_constants.size()); 788 auto result = m_constantMap.add(value, source); 789 if (!result.isNewEntry) 790 return result.iterator->value; 791 m_codeBlock->m_constants.append(value); 792 if (UNLIKELY(Options::dumpGeneratedWasmBytecodes())) 793 m_codeBlock->m_constantTypes.append(type); 794 return source; 795 }; 796 // leave a hole if we need to materialize the constant 797 push(); 798 return constant(); 799 } 800 801 auto LLIntGenerator::getLocal(uint32_t index, ExpressionType& result) -> PartialResult 802 { 803 // leave a hole if we need to materialize the local 804 push(); 805 result = virtualRegisterForWasmLocal(index); 806 return { }; 807 } 808 809 auto LLIntGenerator::setLocal(uint32_t index, ExpressionType value) -> PartialResult 810 { 811 VirtualRegister target = virtualRegisterForWasmLocal(index); 812 813 // If this local is currently on the stack we need to materialize it, otherwise it'll see the new value instead of the old one 814 walkExpressionStack(m_parser->expressionStack(), [&](TypedExpression& expression, VirtualRegister slot) { 815 if (expression.value() != target) 816 return; 817 WasmMov::emit(this, slot, expression); 818 expression = TypedExpression { expression.type(), slot }; 819 }); 820 821 WasmMov::emit(this, target, value); 822 823 return { }; 824 } 825 826 auto LLIntGenerator::getGlobal(uint32_t index, ExpressionType& result) -> PartialResult 827 { 828 const Wasm::GlobalInformation& global = m_info.globals[index]; 829 result = push(); 830 switch (global.bindingMode) { 831 case Wasm::GlobalInformation::BindingMode::EmbeddedInInstance: 832 WasmGetGlobal::emit(this, result, index); 833 break; 834 case Wasm::GlobalInformation::BindingMode::Portable: 835 WasmGetGlobalPortableBinding::emit(this, result, index); 836 break; 837 } 838 return { }; 839 } 840 841 auto LLIntGenerator::setGlobal(uint32_t index, ExpressionType value) -> PartialResult 842 { 843 const Wasm::GlobalInformation& global = m_info.globals[index]; 844 Type type = global.type; 845 switch (global.bindingMode) { 846 case Wasm::GlobalInformation::BindingMode::EmbeddedInInstance: 847 if (isRefType(type)) 848 WasmSetGlobalRef::emit(this, index, value); 849 else 850 WasmSetGlobal::emit(this, index, value); 851 break; 852 case Wasm::GlobalInformation::BindingMode::Portable: 853 if (isRefType(type)) 854 WasmSetGlobalRefPortableBinding::emit(this, index, value); 855 else 856 WasmSetGlobalPortableBinding::emit(this, index, value); 857 break; 858 } 859 return { }; 860 } 861 862 auto LLIntGenerator::addLoop(BlockSignature signature, Stack& enclosingStack, ControlType& block, Stack& newStack, uint32_t loopIndex) -> PartialResult 863 { 864 splitStack(signature, enclosingStack, newStack); 865 materializeConstantsAndLocals(newStack); 866 867 Ref<Label> body = newEmittedLabel(); 868 Ref<Label> continuation = newLabel(); 869 870 block = ControlType::loop(signature, m_stackSize.unsafeGet(), WTFMove(body), WTFMove(continuation)); 871 872 Vector<VirtualRegister> osrEntryData; 873 for (uint32_t i = 0; i < m_codeBlock->m_numArguments; i++) 874 osrEntryData.append(m_normalizedArguments[i]); 875 876 const auto& callingConvention = wasmCallingConvention(); 877 const uint32_t gprCount = callingConvention.gprArgs.size(); 878 const uint32_t fprCount = callingConvention.fprArgs.size(); 879 for (uint32_t i = gprCount + fprCount + numberOfLLIntCalleeSaveRegisters; i < m_codeBlock->m_numVars; i++) 880 osrEntryData.append(virtualRegisterForLocal(i)); 881 for (unsigned controlIndex = 0; controlIndex < m_parser->controlStack().size(); ++controlIndex) { 882 Stack& expressionStack = m_parser->controlStack()[controlIndex].enclosedExpressionStack; 883 for (TypedExpression expression : expressionStack) 884 osrEntryData.append(expression); 885 } 886 for (TypedExpression expression : enclosingStack) 887 osrEntryData.append(expression); 888 889 WasmLoopHint::emit(this); 890 891 m_codeBlock->tierUpCounter().addOSREntryDataForLoop(m_lastInstruction.offset(), { loopIndex, WTFMove(osrEntryData) }); 892 893 return { }; 894 } 895 896 auto LLIntGenerator::addTopLevel(BlockSignature signature) -> ControlType 897 { 898 return ControlType::topLevel(signature, m_stackSize.unsafeGet(), newLabel()); 899 } 900 901 auto LLIntGenerator::addBlock(BlockSignature signature, Stack& enclosingStack, ControlType& newBlock, Stack& newStack) -> PartialResult 902 { 903 splitStack(signature, enclosingStack, newStack); 904 newBlock = ControlType::block(signature, m_stackSize.unsafeGet(), newLabel()); 905 return { }; 906 } 907 908 auto LLIntGenerator::addIf(ExpressionType condition, BlockSignature signature, Stack& enclosingStack, ControlType& result, Stack& newStack) -> PartialResult 909 { 910 Ref<Label> alternate = newLabel(); 911 Ref<Label> continuation = newLabel(); 912 913 splitStack(signature, enclosingStack, newStack); 914 915 WasmJfalse::emit(this, condition, alternate->bind(this)); 916 917 result = ControlType::if_(signature, m_stackSize.unsafeGet(), WTFMove(alternate), WTFMove(continuation)); 918 return { }; 919 } 920 921 auto LLIntGenerator::addElse(ControlType& data, Stack& expressionStack) -> PartialResult 922 { 923 ASSERT(WTF::holds_alternative<ControlIf>(data)); 924 materializeConstantsAndLocals(expressionStack); 925 WasmJmp::emit(this, data.m_continuation->bind(this)); 926 return addElseToUnreachable(data); 927 } 928 929 auto LLIntGenerator::addElseToUnreachable(ControlType& data) -> PartialResult 930 { 931 m_stackSize = data.stackSize() + data.m_signature->argumentCount(); 932 933 ControlIf& control = WTF::get<ControlIf>(data); 934 emitLabel(control.m_alternate.get()); 935 data = ControlType::block(data.m_signature, m_stackSize.unsafeGet(), WTFMove(data.m_continuation)); 936 return { }; 937 } 938 939 auto LLIntGenerator::addReturn(const ControlType& data, Stack& returnValues) -> PartialResult 940 { 941 if (!data.m_signature->returnCount()) { 942 WasmRetVoid::emit(this); 943 return { }; 944 } 945 946 // no need to drop keep here, since we have to move anyway 947 unifyValuesWithBlock(callInformationForCallee(*data.m_signature), returnValues); 948 WasmRet::emit(this); 949 950 return { }; 951 } 952 953 auto LLIntGenerator::addBranch(ControlType& data, ExpressionType condition, Stack& returnValues) -> PartialResult 954 { 955 RefPtr<Label> target = data.targetLabelForBranch(); 956 RefPtr<Label> skip = nullptr; 957 958 materializeConstantsAndLocals(returnValues); 959 960 if (condition.isValid()) { 961 skip = newLabel(); 962 WasmJfalse::emit(this, condition, skip->bind(this)); 963 } 964 965 dropKeep(returnValues, data, !skip); 966 WasmJmp::emit(this, target->bind(this)); 967 968 if (skip) 969 emitLabel(*skip); 970 971 return { }; 972 } 973 974 auto LLIntGenerator::addSwitch(ExpressionType condition, const Vector<ControlType*>& targets, ControlType& defaultTarget, Stack& expressionStack) -> PartialResult 975 { 976 materializeConstantsAndLocals(expressionStack); 977 978 unsigned tableIndex = m_codeBlock->numberOfJumpTables(); 979 FunctionCodeBlock::JumpTable& jumpTable = m_codeBlock->addJumpTable(targets.size() + 1); 980 981 WasmSwitch::emit(this, condition, tableIndex); 982 983 unsigned index = 0; 984 InstructionStream::Offset offset = m_lastInstruction.offset(); 985 986 auto addTarget = [&](ControlType& target) { 987 RefPtr<Label> targetLabel = target.targetLabelForBranch(); 988 989 getDropKeepCount(target, jumpTable[index].startOffset, jumpTable[index].dropCount, jumpTable[index].keepCount); 990 991 if (targetLabel->isForward()) { 992 auto result = m_switches.add(targetLabel.get(), Vector<SwitchEntry>()); 993 ASSERT(!jumpTable[index].target); 994 result.iterator->value.append(SwitchEntry { offset, &jumpTable[index++].target }); 995 } else { 996 int jumpTarget = targetLabel->location() - offset; 997 ASSERT(jumpTarget); 998 jumpTable[index++].target = jumpTarget; 999 } 1000 }; 1001 1002 for (const auto& target : targets) 1003 addTarget(*target); 1004 addTarget(defaultTarget); 1005 1006 return { }; 1007 } 1008 1009 auto LLIntGenerator::endBlock(ControlEntry& entry, Stack& expressionStack) -> PartialResult 1010 { 1011 // FIXME: We only need to materialize constants here if there exists a jump to this label 1012 // https://bugs.webkit.org/show_bug.cgi?id=203657 1013 materializeConstantsAndLocals(expressionStack); 1014 return addEndToUnreachable(entry, expressionStack, false); 1015 } 1016 1017 1018 auto LLIntGenerator::addEndToUnreachable(ControlEntry& entry, const Stack& expressionStack, bool unreachable) -> PartialResult 1019 { 1020 ControlType& data = entry.controlData; 1021 1022 RELEASE_ASSERT(unreachable || m_stackSize == data.stackSize() + data.m_signature->returnCount()); 1023 1024 m_stackSize = data.stackSize(); 1025 1026 for (unsigned i = 0; i < data.m_signature->returnCount(); ++i) { 1027 // We don't want to do a consistency check here because we just reset the stack size 1028 // are pushing new values, while we already have the same values in the stack. 1029 // The only reason we do things this way is so that it also works for unreachable blocks, 1030 // since they might not have the right number of values in the expression stack. 1031 // Instead, we do a stricter consistency check below. 1032 auto tmp = push(NoConsistencyCheck); 1033 ASSERT(unreachable || tmp == expressionStack[i].value()); 1034 if (unreachable) 1035 entry.enclosedExpressionStack.constructAndAppend(data.m_signature->returnType(i), tmp); 1036 else 1037 entry.enclosedExpressionStack.append(expressionStack[i]); 1038 } 1039 1040 if (m_lastOpcodeID == wasm_jmp && data.m_continuation->unresolvedJumps().size() == 1 && data.m_continuation->unresolvedJumps()[0] == static_cast<int>(m_lastInstruction.offset())) { 1041 linkSwitchTargets(*data.m_continuation, m_lastInstruction.offset()); 1042 m_lastOpcodeID = wasm_unreachable; 1043 m_writer.rewind(m_lastInstruction); 1044 } else 1045 emitLabel(*data.m_continuation); 1046 1047 return { }; 1048 } 1049 1050 auto LLIntGenerator::endTopLevel(BlockSignature signature, const Stack& expressionStack) -> PartialResult 1051 { 1052 RELEASE_ASSERT(expressionStack.size() == signature->returnCount()); 1053 1054 if (!signature->returnCount()) { 1055 WasmRetVoid::emit(this); 1056 return { }; 1057 } 1058 1059 checkConsistency(); 1060 unifyValuesWithBlock(callInformationForCallee(*signature), expressionStack); 1061 WasmRet::emit(this); 1062 1063 return { }; 1064 } 1065 1066 auto LLIntGenerator::addCall(uint32_t functionIndex, const Signature& signature, Vector<ExpressionType>& args, ResultList& results) -> PartialResult 1067 { 1068 ASSERT(signature.argumentCount() == args.size()); 1069 LLIntCallInformation info = callInformationForCaller(signature); 1070 unifyValuesWithBlock(info.arguments, args); 1071 if (Context::useFastTLS()) 1072 WasmCall::emit(this, functionIndex, info.stackOffset, info.numberOfStackArguments); 1073 else 1074 WasmCallNoTls::emit(this, functionIndex, info.stackOffset, info.numberOfStackArguments); 1075 info.commitResults(results); 1076 1077 return { }; 1078 } 1079 1080 auto LLIntGenerator::addCallIndirect(unsigned tableIndex, const Signature& signature, Vector<ExpressionType>& args, ResultList& results) -> PartialResult 1081 { 1082 ExpressionType calleeIndex = args.takeLast(); 1083 1084 ASSERT(signature.argumentCount() == args.size()); 1085 ASSERT(m_info.tableCount() > tableIndex); 1086 ASSERT(m_info.tables[tableIndex].type() == TableElementType::Funcref); 1087 1088 LLIntCallInformation info = callInformationForCaller(signature); 1089 unifyValuesWithBlock(info.arguments, args); 1090 if (Context::useFastTLS()) 1091 WasmCallIndirect::emit(this, calleeIndex, m_codeBlock->addSignature(signature), info.stackOffset, info.numberOfStackArguments, tableIndex); 1092 else 1093 WasmCallIndirectNoTls::emit(this, calleeIndex, m_codeBlock->addSignature(signature), info.stackOffset, info.numberOfStackArguments, tableIndex); 1094 info.commitResults(results); 1095 1096 return { }; 1097 } 1098 1099 auto LLIntGenerator::addRefIsNull(ExpressionType value, ExpressionType& result) -> PartialResult 1100 { 1101 result = push(); 1102 WasmRefIsNull::emit(this, result, value); 1103 1104 return { }; 1105 } 1106 1107 auto LLIntGenerator::addRefFunc(uint32_t index, ExpressionType& result) -> PartialResult 1108 { 1109 result = push(); 1110 WasmRefFunc::emit(this, result, index); 1111 1112 return { }; 1113 } 1114 1115 auto LLIntGenerator::addTableGet(unsigned tableIndex, ExpressionType index, ExpressionType& result) -> PartialResult 1116 { 1117 result = push(); 1118 WasmTableGet::emit(this, result, index, tableIndex); 1119 1120 return { }; 1121 } 1122 1123 auto LLIntGenerator::addTableSet(unsigned tableIndex, ExpressionType index, ExpressionType value) -> PartialResult 1124 { 1125 WasmTableSet::emit(this, index, value, tableIndex); 1126 1127 return { }; 1128 } 1129 1130 auto LLIntGenerator::addTableInit(unsigned elementIndex, unsigned tableIndex, ExpressionType dstOffset, ExpressionType srcOffset, ExpressionType length) -> PartialResult 1131 { 1132 WasmTableInit::emit(this, dstOffset, srcOffset, length, elementIndex, tableIndex); 1133 1134 return { }; 1135 } 1136 1137 auto LLIntGenerator::addElemDrop(unsigned elementIndex) -> PartialResult 1138 { 1139 WasmElemDrop::emit(this, elementIndex); 1140 1141 return { }; 1142 } 1143 1144 auto LLIntGenerator::addTableSize(unsigned tableIndex, ExpressionType& result) -> PartialResult 1145 { 1146 result = push(); 1147 WasmTableSize::emit(this, result, tableIndex); 1148 1149 return { }; 1150 } 1151 1152 auto LLIntGenerator::addTableGrow(unsigned tableIndex, ExpressionType fill, ExpressionType delta, ExpressionType& result) -> PartialResult 1153 { 1154 result = push(); 1155 WasmTableGrow::emit(this, result, fill, delta, tableIndex); 1156 1157 return { }; 1158 } 1159 1160 auto LLIntGenerator::addTableFill(unsigned tableIndex, ExpressionType offset, ExpressionType fill, ExpressionType count) -> PartialResult 1161 { 1162 WasmTableFill::emit(this, offset, fill, count, tableIndex); 1163 1164 return { }; 1165 } 1166 1167 auto LLIntGenerator::addTableCopy(unsigned dstTableIndex, unsigned srcTableIndex, ExpressionType dstOffset, ExpressionType srcOffset, ExpressionType length) -> PartialResult 1168 { 1169 WasmTableCopy::emit(this, dstOffset, srcOffset, length, dstTableIndex, srcTableIndex); 1170 return { }; 1171 } 1172 1173 auto LLIntGenerator::addUnreachable() -> PartialResult 1174 { 1175 WasmUnreachable::emit(this); 1176 1177 return { }; 1178 } 1179 1180 auto LLIntGenerator::addCurrentMemory(ExpressionType& result) -> PartialResult 1181 { 1182 result = push(); 1183 WasmCurrentMemory::emit(this, result); 1184 1185 return { }; 1186 } 1187 1188 auto LLIntGenerator::addMemoryInit(unsigned dataSegmentIndex, ExpressionType dstAddress, ExpressionType srcAddress, ExpressionType length) -> PartialResult 1189 { 1190 WasmMemoryInit::emit(this, dstAddress, srcAddress, length, dataSegmentIndex); 1191 1192 return { }; 1193 } 1194 1195 auto LLIntGenerator::addDataDrop(unsigned dataSegmentIndex) -> PartialResult 1196 { 1197 WasmDataDrop::emit(this, dataSegmentIndex); 1198 1199 return { }; 1200 } 1201 1202 auto LLIntGenerator::addGrowMemory(ExpressionType delta, ExpressionType& result) -> PartialResult 1203 { 1204 result = push(); 1205 WasmGrowMemory::emit(this, result, delta); 1206 1207 return { }; 1208 } 1209 1210 auto LLIntGenerator::addMemoryFill(ExpressionType dstAddress, ExpressionType targetValue, ExpressionType count) -> PartialResult 1211 { 1212 WasmMemoryFill::emit(this, dstAddress, targetValue, count); 1213 return { }; 1214 } 1215 1216 auto LLIntGenerator::addMemoryCopy(ExpressionType dstAddress, ExpressionType srcAddress, ExpressionType count) -> PartialResult 1217 { 1218 WasmMemoryCopy::emit(this, dstAddress, srcAddress, count); 1219 return { }; 1220 } 1221 1222 auto LLIntGenerator::addSelect(ExpressionType condition, ExpressionType nonZero, ExpressionType zero, ExpressionType& result) -> PartialResult 1223 { 1224 result = push(); 1225 WasmSelect::emit(this, result, condition, nonZero, zero); 1226 1227 return { }; 1228 } 1229 1230 auto LLIntGenerator::load(LoadOpType op, ExpressionType pointer, ExpressionType& result, uint32_t offset) -> PartialResult 1231 { 1232 result = push(); 1233 switch (op) { 1234 case LoadOpType::I32Load8S: 1235 WasmI32Load8S::emit(this, result, pointer, offset); 1236 break; 1237 1238 case LoadOpType::I64Load8S: 1239 WasmI64Load8S::emit(this, result, pointer, offset); 1240 break; 1241 1242 case LoadOpType::I32Load8U: 1243 case LoadOpType::I64Load8U: 1244 WasmLoad8U::emit(this, result, pointer, offset); 1245 break; 1246 1247 case LoadOpType::I32Load16S: 1248 WasmI32Load16S::emit(this, result, pointer, offset); 1249 break; 1250 1251 case LoadOpType::I64Load16S: 1252 WasmI64Load16S::emit(this, result, pointer, offset); 1253 break; 1254 1255 case LoadOpType::I32Load16U: 1256 case LoadOpType::I64Load16U: 1257 WasmLoad16U::emit(this, result, pointer, offset); 1258 break; 1259 1260 case LoadOpType::I32Load: 1261 case LoadOpType::F32Load: 1262 case LoadOpType::I64Load32U: 1263 WasmLoad32U::emit(this, result, pointer, offset); 1264 break; 1265 1266 case LoadOpType::I64Load32S: 1267 WasmI64Load32S::emit(this, result, pointer, offset); 1268 break; 1269 1270 case LoadOpType::I64Load: 1271 case LoadOpType::F64Load: 1272 WasmLoad64U::emit(this, result, pointer, offset); 1273 break; 1274 } 1275 1276 return { }; 1277 } 1278 1279 auto LLIntGenerator::store(StoreOpType op, ExpressionType pointer, ExpressionType value, uint32_t offset) -> PartialResult 1280 { 1281 switch (op) { 1282 case StoreOpType::I64Store8: 1283 case StoreOpType::I32Store8: 1284 WasmStore8::emit(this, pointer, value, offset); 1285 break; 1286 1287 case StoreOpType::I64Store16: 1288 case StoreOpType::I32Store16: 1289 WasmStore16::emit(this, pointer, value, offset); 1290 break; 1291 1292 case StoreOpType::I64Store32: 1293 case StoreOpType::I32Store: 1294 case StoreOpType::F32Store: 1295 WasmStore32::emit(this, pointer, value, offset); 1296 break; 1297 1298 case StoreOpType::I64Store: 1299 case StoreOpType::F64Store: 1300 WasmStore64::emit(this, pointer, value, offset); 1301 break; 1302 } 1303 1304 return { }; 1305 } 1306 1307 auto LLIntGenerator::atomicLoad(ExtAtomicOpType op, Type, ExpressionType pointer, ExpressionType& result, uint32_t offset) -> PartialResult 1308 { 1309 result = push(); 1310 switch (op) { 1311 case ExtAtomicOpType::I32AtomicLoad8U: 1312 case ExtAtomicOpType::I64AtomicLoad8U: 1313 WasmI64AtomicRmw8AddU::emit(this, result, pointer, offset, zeroConstant()); 1314 break; 1315 case ExtAtomicOpType::I32AtomicLoad16U: 1316 case ExtAtomicOpType::I64AtomicLoad16U: 1317 WasmI64AtomicRmw16AddU::emit(this, result, pointer, offset, zeroConstant()); 1318 break; 1319 case ExtAtomicOpType::I32AtomicLoad: 1320 case ExtAtomicOpType::I64AtomicLoad32U: 1321 WasmI64AtomicRmw32AddU::emit(this, result, pointer, offset, zeroConstant()); 1322 break; 1323 case ExtAtomicOpType::I64AtomicLoad: 1324 WasmI64AtomicRmwAdd::emit(this, result, pointer, offset, zeroConstant()); 1325 break; 1326 default: 1327 RELEASE_ASSERT_NOT_REACHED(); 1328 } 1329 1330 return { }; 1331 } 1332 1333 auto LLIntGenerator::atomicStore(ExtAtomicOpType op, Type, ExpressionType pointer, ExpressionType value, uint32_t offset) -> PartialResult 1334 { 1335 auto result = push(); 1336 switch (op) { 1337 case ExtAtomicOpType::I32AtomicStore8U: 1338 case ExtAtomicOpType::I64AtomicStore8U: 1339 WasmI64AtomicRmw8XchgU::emit(this, result, pointer, offset, value); 1340 break; 1341 case ExtAtomicOpType::I32AtomicStore16U: 1342 case ExtAtomicOpType::I64AtomicStore16U: 1343 WasmI64AtomicRmw16XchgU::emit(this, result, pointer, offset, value); 1344 break; 1345 case ExtAtomicOpType::I32AtomicStore: 1346 case ExtAtomicOpType::I64AtomicStore32U: 1347 WasmI64AtomicRmw32XchgU::emit(this, result, pointer, offset, value); 1348 break; 1349 case ExtAtomicOpType::I64AtomicStore: 1350 WasmI64AtomicRmwXchg::emit(this, result, pointer, offset, value); 1351 break; 1352 default: 1353 RELEASE_ASSERT_NOT_REACHED(); 1354 } 1355 1356 didPopValueFromStack(); // Ignore the result. 1357 return { }; 1358 } 1359 1360 auto LLIntGenerator::atomicBinaryRMW(ExtAtomicOpType op, Type, ExpressionType pointer, ExpressionType value, ExpressionType& result, uint32_t offset) -> PartialResult 1361 { 1362 result = push(); 1363 switch (op) { 1364 case ExtAtomicOpType::I32AtomicRmw8AddU: 1365 case ExtAtomicOpType::I64AtomicRmw8AddU: 1366 WasmI64AtomicRmw8AddU::emit(this, result, pointer, offset, value); 1367 break; 1368 case ExtAtomicOpType::I32AtomicRmw16AddU: 1369 case ExtAtomicOpType::I64AtomicRmw16AddU: 1370 WasmI64AtomicRmw16AddU::emit(this, result, pointer, offset, value); 1371 break; 1372 case ExtAtomicOpType::I32AtomicRmwAdd: 1373 case ExtAtomicOpType::I64AtomicRmw32AddU: 1374 WasmI64AtomicRmw32AddU::emit(this, result, pointer, offset, value); 1375 break; 1376 case ExtAtomicOpType::I64AtomicRmwAdd: 1377 WasmI64AtomicRmwAdd::emit(this, result, pointer, offset, value); 1378 break; 1379 case ExtAtomicOpType::I32AtomicRmw8SubU: 1380 case ExtAtomicOpType::I64AtomicRmw8SubU: 1381 WasmI64AtomicRmw8SubU::emit(this, result, pointer, offset, value); 1382 break; 1383 case ExtAtomicOpType::I32AtomicRmw16SubU: 1384 case ExtAtomicOpType::I64AtomicRmw16SubU: 1385 WasmI64AtomicRmw16SubU::emit(this, result, pointer, offset, value); 1386 break; 1387 case ExtAtomicOpType::I32AtomicRmwSub: 1388 case ExtAtomicOpType::I64AtomicRmw32SubU: 1389 WasmI64AtomicRmw32SubU::emit(this, result, pointer, offset, value); 1390 break; 1391 case ExtAtomicOpType::I64AtomicRmwSub: 1392 WasmI64AtomicRmwSub::emit(this, result, pointer, offset, value); 1393 break; 1394 case ExtAtomicOpType::I32AtomicRmw8AndU: 1395 case ExtAtomicOpType::I64AtomicRmw8AndU: 1396 WasmI64AtomicRmw8AndU::emit(this, result, pointer, offset, value); 1397 break; 1398 case ExtAtomicOpType::I32AtomicRmw16AndU: 1399 case ExtAtomicOpType::I64AtomicRmw16AndU: 1400 WasmI64AtomicRmw16AndU::emit(this, result, pointer, offset, value); 1401 break; 1402 case ExtAtomicOpType::I32AtomicRmwAnd: 1403 case ExtAtomicOpType::I64AtomicRmw32AndU: 1404 WasmI64AtomicRmw32AndU::emit(this, result, pointer, offset, value); 1405 break; 1406 case ExtAtomicOpType::I64AtomicRmwAnd: 1407 WasmI64AtomicRmwAnd::emit(this, result, pointer, offset, value); 1408 break; 1409 case ExtAtomicOpType::I32AtomicRmw8OrU: 1410 case ExtAtomicOpType::I64AtomicRmw8OrU: 1411 WasmI64AtomicRmw8OrU::emit(this, result, pointer, offset, value); 1412 break; 1413 case ExtAtomicOpType::I32AtomicRmw16OrU: 1414 case ExtAtomicOpType::I64AtomicRmw16OrU: 1415 WasmI64AtomicRmw16OrU::emit(this, result, pointer, offset, value); 1416 break; 1417 case ExtAtomicOpType::I32AtomicRmwOr: 1418 case ExtAtomicOpType::I64AtomicRmw32OrU: 1419 WasmI64AtomicRmw32OrU::emit(this, result, pointer, offset, value); 1420 break; 1421 case ExtAtomicOpType::I64AtomicRmwOr: 1422 WasmI64AtomicRmwOr::emit(this, result, pointer, offset, value); 1423 break; 1424 case ExtAtomicOpType::I32AtomicRmw8XorU: 1425 case ExtAtomicOpType::I64AtomicRmw8XorU: 1426 WasmI64AtomicRmw8XorU::emit(this, result, pointer, offset, value); 1427 break; 1428 case ExtAtomicOpType::I32AtomicRmw16XorU: 1429 case ExtAtomicOpType::I64AtomicRmw16XorU: 1430 WasmI64AtomicRmw16XorU::emit(this, result, pointer, offset, value); 1431 break; 1432 case ExtAtomicOpType::I32AtomicRmwXor: 1433 case ExtAtomicOpType::I64AtomicRmw32XorU: 1434 WasmI64AtomicRmw32XorU::emit(this, result, pointer, offset, value); 1435 break; 1436 case ExtAtomicOpType::I64AtomicRmwXor: 1437 WasmI64AtomicRmwXor::emit(this, result, pointer, offset, value); 1438 break; 1439 case ExtAtomicOpType::I32AtomicRmw8XchgU: 1440 case ExtAtomicOpType::I64AtomicRmw8XchgU: 1441 WasmI64AtomicRmw8XchgU::emit(this, result, pointer, offset, value); 1442 break; 1443 case ExtAtomicOpType::I32AtomicRmw16XchgU: 1444 case ExtAtomicOpType::I64AtomicRmw16XchgU: 1445 WasmI64AtomicRmw16XchgU::emit(this, result, pointer, offset, value); 1446 break; 1447 case ExtAtomicOpType::I32AtomicRmwXchg: 1448 case ExtAtomicOpType::I64AtomicRmw32XchgU: 1449 WasmI64AtomicRmw32XchgU::emit(this, result, pointer, offset, value); 1450 break; 1451 case ExtAtomicOpType::I64AtomicRmwXchg: 1452 WasmI64AtomicRmwXchg::emit(this, result, pointer, offset, value); 1453 break; 1454 default: 1455 RELEASE_ASSERT_NOT_REACHED(); 1456 break; 1457 } 1458 1459 return { }; 1460 } 1461 1462 auto LLIntGenerator::atomicCompareExchange(ExtAtomicOpType op, Type, ExpressionType pointer, ExpressionType expected, ExpressionType value, ExpressionType& result, uint32_t offset) -> PartialResult 1463 { 1464 result = push(); 1465 switch (op) { 1466 case ExtAtomicOpType::I32AtomicRmw8CmpxchgU: 1467 case ExtAtomicOpType::I64AtomicRmw8CmpxchgU: 1468 WasmI64AtomicRmw8CmpxchgU::emit(this, result, pointer, offset, expected, value); 1469 break; 1470 case ExtAtomicOpType::I32AtomicRmw16CmpxchgU: 1471 case ExtAtomicOpType::I64AtomicRmw16CmpxchgU: 1472 WasmI64AtomicRmw16CmpxchgU::emit(this, result, pointer, offset, expected, value); 1473 break; 1474 case ExtAtomicOpType::I32AtomicRmwCmpxchg: 1475 case ExtAtomicOpType::I64AtomicRmw32CmpxchgU: 1476 WasmI64AtomicRmw32CmpxchgU::emit(this, result, pointer, offset, expected, value); 1477 break; 1478 case ExtAtomicOpType::I64AtomicRmwCmpxchg: 1479 WasmI64AtomicRmwCmpxchg::emit(this, result, pointer, offset, expected, value); 1480 break; 1481 default: 1482 RELEASE_ASSERT_NOT_REACHED(); 1483 break; 1484 } 1485 1486 return { }; 1487 } 1488 1489 auto LLIntGenerator::atomicWait(ExtAtomicOpType op, ExpressionType pointer, ExpressionType value, ExpressionType timeout, ExpressionType& result, uint32_t offset) -> PartialResult 1490 { 1491 result = push(); 1492 switch (op) { 1493 case ExtAtomicOpType::MemoryAtomicWait32: 1494 WasmMemoryAtomicWait32::emit(this, result, pointer, offset, value, timeout); 1495 break; 1496 case ExtAtomicOpType::MemoryAtomicWait64: 1497 WasmMemoryAtomicWait64::emit(this, result, pointer, offset, value, timeout); 1498 break; 1499 default: 1500 RELEASE_ASSERT_NOT_REACHED(); 1501 break; 1502 } 1503 return { }; 1504 } 1505 1506 auto LLIntGenerator::atomicNotify(ExtAtomicOpType op, ExpressionType pointer, ExpressionType count, ExpressionType& result, uint32_t offset) -> PartialResult 1507 { 1508 result = push(); 1509 RELEASE_ASSERT(op == ExtAtomicOpType::MemoryAtomicNotify); 1510 WasmMemoryAtomicNotify::emit(this, result, pointer, offset, count); 1511 return { }; 1512 } 1513 1514 auto LLIntGenerator::atomicFence(ExtAtomicOpType, uint8_t) -> PartialResult 1515 { 1516 WasmAtomicFence::emit(this); 1517 return { }; 1518 } 1519 1520 void LLIntGenerator::linkSwitchTargets(Label& label, unsigned location) 1521 { 1522 auto it = m_switches.find(&label); 1523 if (it != m_switches.end()) { 1524 for (const auto& entry : it->value) { 1525 ASSERT(!*entry.jumpTarget); 1526 *entry.jumpTarget = location - entry.offset; 1527 } 1528 m_switches.remove(it); 1529 } 1530 } 1531 1532 } 1533 1534 template<> 1535 void GenericLabel<Wasm::GeneratorTraits>::setLocation(BytecodeGeneratorBase<Wasm::GeneratorTraits>& generator, unsigned location) 1536 { 1537 RELEASE_ASSERT(isForward()); 1538 1539 m_location = location; 1540 1541 Wasm::LLIntGenerator* llintGenerator = static_cast<Wasm::LLIntGenerator*>(&generator); 1542 llintGenerator->linkSwitchTargets(*this, m_location); 1543 1544 for (auto offset : m_unresolvedJumps) { 1545 auto instruction = generator.m_writer.ref(offset); 1546 int target = m_location - offset; 1547 1548 #define CASE(__op) \ 1549 case __op::opcodeID: \ 1550 instruction->cast<__op, WasmOpcodeTraits>()->setTargetLabel(BoundLabel(target), [&]() { \ 1551 generator.m_codeBlock->addOutOfLineJumpTarget(instruction.offset(), target); \ 1552 return BoundLabel(); \ 1553 }); \ 1554 break; 1555 1556 switch (instruction->opcodeID<WasmOpcodeTraits>()) { 1557 CASE(WasmJmp) 1558 CASE(WasmJtrue) 1559 CASE(WasmJfalse) 1560 default: 1561 RELEASE_ASSERT_NOT_REACHED(); 1562 } 1563 #undef CASE 1564 } 1565 } 1566 1567 } // namespace JSC::Wasm 1568 1569 #endif // ENABLE(WEBASSEMBLY)