1/*
2 * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "WebAssemblyFunction.h"
28
29#if ENABLE(WEBASSEMBLY)
30
31#include "B3Compilation.h"
32#include "JSCInlines.h"
33#include "JSFunctionInlines.h"
34#include "JSObject.h"
35#include "JSWebAssemblyInstance.h"
36#include "JSWebAssemblyMemory.h"
37#include "JSWebAssemblyRuntimeError.h"
38#include "LLIntThunks.h"
39#include "LinkBuffer.h"
40#include "ProtoCallFrame.h"
41#include "VM.h"
42#include "WasmCallee.h"
43#include "WasmCallingConvention.h"
44#include "WasmContextInlines.h"
45#include "WasmFormat.h"
46#include "WasmMemory.h"
47#include "WasmMemoryInformation.h"
48#include "WasmModuleInformation.h"
49#include "WasmSignatureInlines.h"
50#include <wtf/FastTLS.h>
51#include <wtf/StackPointer.h>
52#include <wtf/SystemTracing.h>
53
54namespace JSC {
55
56const ClassInfo WebAssemblyFunction::s_info = { "WebAssemblyFunction", &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(WebAssemblyFunction) };
57
58static EncodedJSValue JSC_HOST_CALL callWebAssemblyFunction(ExecState* exec)
59{
60 VM& vm = exec->vm();
61 auto scope = DECLARE_THROW_SCOPE(vm);
62 WebAssemblyFunction* wasmFunction = jsCast<WebAssemblyFunction*>(exec->jsCallee());
63 Wasm::SignatureIndex signatureIndex = wasmFunction->signatureIndex();
64 const Wasm::Signature& signature = Wasm::SignatureInformation::get(signatureIndex);
65
66 // Make sure that the memory we think we are going to run with matches the one we expect.
67 ASSERT(wasmFunction->instance()->instance().codeBlock()->isSafeToRun(wasmFunction->instance()->memory()->memory().mode()));
68
69 Optional<TraceScope> traceScope;
70 if (Options::useTracePoints())
71 traceScope.emplace(WebAssemblyExecuteStart, WebAssemblyExecuteEnd);
72
73 Vector<JSValue, MarkedArgumentBuffer::inlineCapacity> boxedArgs;
74 JSWebAssemblyInstance* instance = wasmFunction->instance();
75 Wasm::Instance* wasmInstance = &instance->instance();
76 // When we don't use fast TLS to store the context, the JS
77 // entry wrapper expects a JSWebAssemblyInstance as the first argument.
78 if (!Wasm::Context::useFastTLS())
79 boxedArgs.append(instance);
80
81 for (unsigned argIndex = 0; argIndex < signature.argumentCount(); ++argIndex) {
82 JSValue arg = exec->argument(argIndex);
83 switch (signature.argument(argIndex)) {
84 case Wasm::I32:
85 arg = JSValue::decode(arg.toInt32(exec));
86 break;
87 case Wasm::Anyref:
88 break;
89 case Wasm::I64:
90 arg = JSValue();
91 break;
92 case Wasm::F32:
93 arg = JSValue::decode(bitwise_cast<uint32_t>(arg.toFloat(exec)));
94 break;
95 case Wasm::F64:
96 arg = JSValue::decode(bitwise_cast<uint64_t>(arg.toNumber(exec)));
97 break;
98 case Wasm::Void:
99 case Wasm::Func:
100 case Wasm::Anyfunc:
101 RELEASE_ASSERT_NOT_REACHED();
102 }
103 RETURN_IF_EXCEPTION(scope, encodedJSValue());
104 boxedArgs.append(arg);
105 }
106
107 JSValue firstArgument = JSValue();
108 int argCount = 1;
109 JSValue* remainingArgs = nullptr;
110 if (boxedArgs.size()) {
111 remainingArgs = boxedArgs.data();
112 firstArgument = *remainingArgs;
113 remainingArgs++;
114 argCount = boxedArgs.size();
115 }
116
117 // Note: we specifically use the WebAssemblyFunction as the callee to begin with in the ProtoCallFrame.
118 // The reason for this is that calling into the llint may stack overflow, and the stack overflow
119 // handler might read the global object from the callee.
120 ProtoCallFrame protoCallFrame;
121 protoCallFrame.init(nullptr, wasmFunction, firstArgument, argCount, remainingArgs);
122
123 // FIXME Do away with this entire function, and only use the entrypoint generated by B3. https://bugs.webkit.org/show_bug.cgi?id=166486
124 Wasm::Instance* prevWasmInstance = vm.wasmContext.load();
125 {
126 // We do the stack check here for the wrapper function because we don't
127 // want to emit a stack check inside every wrapper function.
128 const intptr_t sp = bitwise_cast<intptr_t>(currentStackPointer());
129 const intptr_t frameSize = (boxedArgs.size() + CallFrame::headerSizeInRegisters) * sizeof(Register);
130 const intptr_t stackSpaceUsed = 2 * frameSize; // We're making two calls. One to the wrapper, and one to the actual wasm code.
131 if (UNLIKELY((sp < stackSpaceUsed) || ((sp - stackSpaceUsed) < bitwise_cast<intptr_t>(vm.softStackLimit()))))
132 return JSValue::encode(throwException(exec, scope, createStackOverflowError(exec)));
133 }
134 vm.wasmContext.store(wasmInstance, vm.softStackLimit());
135 ASSERT(wasmFunction->instance());
136 ASSERT(&wasmFunction->instance()->instance() == vm.wasmContext.load());
137 EncodedJSValue rawResult = vmEntryToWasm(wasmFunction->jsEntrypoint(MustCheckArity).executableAddress(), &vm, &protoCallFrame);
138 // We need to make sure this is in a register or on the stack since it's stored in Vector<JSValue>.
139 // This probably isn't strictly necessary, since the WebAssemblyFunction* should keep the instance
140 // alive. But it's good hygiene.
141 instance->use();
142 if (prevWasmInstance != wasmInstance) {
143 // This is just for some extra safety instead of leaving a cached
144 // value in there. If we ever forget to set the value to be a real
145 // bounds, this will force every stack overflow check to immediately
146 // fire. The stack limit never changes while executing except when
147 // WebAssembly is used through the JSC API: API users can ask the code
148 // to migrate threads.
149 wasmInstance->setCachedStackLimit(bitwise_cast<void*>(std::numeric_limits<uintptr_t>::max()));
150 }
151 vm.wasmContext.store(prevWasmInstance, vm.softStackLimit());
152 RETURN_IF_EXCEPTION(scope, { });
153
154 return rawResult;
155}
156
157bool WebAssemblyFunction::useTagRegisters() const
158{
159 const auto& signature = Wasm::SignatureInformation::get(signatureIndex());
160 return signature.argumentCount() || signature.returnType() != Wasm::Void;
161}
162
163RegisterSet WebAssemblyFunction::calleeSaves() const
164{
165 RegisterSet toSave = Wasm::PinnedRegisterInfo::get().toSave(instance()->memoryMode());
166 if (useTagRegisters()) {
167 RegisterSet tagRegisters = RegisterSet::runtimeTagRegisters();
168 // We rely on these being disjoint sets.
169#if !ASSERT_DISABLED
170 for (Reg reg : tagRegisters)
171 ASSERT(!toSave.contains(reg));
172#endif
173 toSave.merge(tagRegisters);
174 }
175 return toSave;
176}
177
178RegisterAtOffsetList WebAssemblyFunction::usedCalleeSaveRegisters() const
179{
180 return RegisterAtOffsetList { calleeSaves(), RegisterAtOffsetList::OffsetBaseType::FramePointerBased };
181}
182
183ptrdiff_t WebAssemblyFunction::previousInstanceOffset() const
184{
185 ptrdiff_t result = calleeSaves().numberOfSetRegisters() * sizeof(CPURegister);
186 result = -result - sizeof(CPURegister);
187#if !ASSERT_DISABLED
188 ptrdiff_t minOffset = 1;
189 for (const RegisterAtOffset& regAtOffset : usedCalleeSaveRegisters()) {
190 ptrdiff_t offset = regAtOffset.offset();
191 ASSERT(offset < 0);
192 minOffset = std::min(offset, minOffset);
193 }
194 ASSERT(minOffset - static_cast<ptrdiff_t>(sizeof(CPURegister)) == result);
195#endif
196 return result;
197}
198
199Wasm::Instance* WebAssemblyFunction::previousInstance(CallFrame* callFrame)
200{
201 ASSERT(callFrame->callee().rawPtr() == m_jsToWasmICCallee.get());
202 auto* result = *bitwise_cast<Wasm::Instance**>(bitwise_cast<char*>(callFrame) + previousInstanceOffset());
203 return result;
204}
205
206MacroAssemblerCodePtr<JSEntryPtrTag> WebAssemblyFunction::jsCallEntrypointSlow()
207{
208 VM& vm = *this->vm();
209 CCallHelpers jit;
210
211 const auto& signature = Wasm::SignatureInformation::get(signatureIndex());
212 const auto& pinnedRegs = Wasm::PinnedRegisterInfo::get();
213 RegisterAtOffsetList registersToSpill = usedCalleeSaveRegisters();
214
215 auto& moduleInformation = instance()->instance().module().moduleInformation();
216
217 unsigned totalFrameSize = registersToSpill.size() * sizeof(CPURegister);
218 totalFrameSize += sizeof(CPURegister); // Slot for the VM's previous wasm instance.
219 totalFrameSize += Wasm::WasmCallingConvention::headerSizeInBytes();
220 totalFrameSize -= sizeof(CallerFrameAndPC);
221
222 unsigned numGPRs = 0;
223 unsigned numFPRs = 0;
224 bool argumentsIncludeI64 = false;
225 for (unsigned i = 0; i < signature.argumentCount(); i++) {
226 switch (signature.argument(i)) {
227 case Wasm::I64:
228 argumentsIncludeI64 = true;
229 break;
230 case Wasm::Anyref:
231 case Wasm::I32:
232 if (numGPRs >= Wasm::wasmCallingConvention().m_gprArgs.size())
233 totalFrameSize += sizeof(CPURegister);
234 ++numGPRs;
235 break;
236 case Wasm::F32:
237 case Wasm::F64:
238 if (numFPRs >= Wasm::wasmCallingConvention().m_fprArgs.size())
239 totalFrameSize += sizeof(CPURegister);
240 ++numFPRs;
241 break;
242 default:
243 RELEASE_ASSERT_NOT_REACHED();
244 }
245 }
246
247 if (argumentsIncludeI64)
248 return nullptr;
249
250 totalFrameSize = WTF::roundUpToMultipleOf(stackAlignmentBytes(), totalFrameSize);
251
252 jit.emitFunctionPrologue();
253 jit.subPtr(MacroAssembler::TrustedImm32(totalFrameSize), MacroAssembler::stackPointerRegister);
254 jit.store64(CCallHelpers::TrustedImm64(0), CCallHelpers::addressFor(CallFrameSlot::codeBlock));
255
256 for (const RegisterAtOffset& regAtOffset : registersToSpill) {
257 GPRReg reg = regAtOffset.reg().gpr();
258 ptrdiff_t offset = regAtOffset.offset();
259 jit.storePtr(reg, CCallHelpers::Address(GPRInfo::callFrameRegister, offset));
260 }
261
262 GPRReg scratchGPR = Wasm::wasmCallingConventionAir().prologueScratch(1);
263 GPRReg scratch2GPR = Wasm::wasmCallingConventionAir().prologueScratch(0);
264 jit.loadPtr(vm.addressOfSoftStackLimit(), scratch2GPR);
265
266 CCallHelpers::JumpList slowPath;
267 slowPath.append(jit.branchPtr(CCallHelpers::Above, MacroAssembler::stackPointerRegister, GPRInfo::callFrameRegister));
268 slowPath.append(jit.branchPtr(CCallHelpers::Below, MacroAssembler::stackPointerRegister, scratch2GPR));
269
270 // Ensure:
271 // argCountPlusThis - 1 >= signature.argumentCount()
272 // argCountPlusThis >= signature.argumentCount() + 1
273 // FIXME: We should handle mismatched arity
274 // https://bugs.webkit.org/show_bug.cgi?id=196564
275 slowPath.append(jit.branch32(CCallHelpers::Below,
276 CCallHelpers::payloadFor(CallFrameSlot::argumentCount), CCallHelpers::TrustedImm32(signature.argumentCount() + 1)));
277
278 if (useTagRegisters())
279 jit.emitMaterializeTagCheckRegisters();
280
281 // First we do stack slots for FPRs so we can use FPR argument registers as scratch.
282 // After that, we handle FPR argument registers.
283 // We also handle all GPR types here as we have GPR scratch registers.
284 {
285 CCallHelpers::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
286 numGPRs = 0;
287 numFPRs = 0;
288 FPRReg scratchFPR = Wasm::wasmCallingConvention().m_fprArgs[0].fpr();
289
290 ptrdiff_t jsOffset = CallFrameSlot::firstArgument * sizeof(EncodedJSValue);
291
292 ptrdiff_t wasmOffset = CallFrame::headerSizeInRegisters * sizeof(CPURegister);
293 for (unsigned i = 0; i < signature.argumentCount(); i++) {
294 switch (signature.argument(i)) {
295 case Wasm::I32:
296 jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR);
297 slowPath.append(jit.branchIfNotInt32(scratchGPR));
298 if (numGPRs >= Wasm::wasmCallingConvention().m_gprArgs.size()) {
299 jit.store32(scratchGPR, calleeFrame.withOffset(wasmOffset));
300 wasmOffset += sizeof(CPURegister);
301 } else {
302 jit.zeroExtend32ToPtr(scratchGPR, Wasm::wasmCallingConvention().m_gprArgs[numGPRs].gpr());
303 ++numGPRs;
304 }
305 break;
306 case Wasm::Anyref: {
307 jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR);
308
309 if (numGPRs >= Wasm::wasmCallingConvention().m_gprArgs.size()) {
310 jit.store64(scratchGPR, calleeFrame.withOffset(wasmOffset));
311 wasmOffset += sizeof(CPURegister);
312 } else {
313 jit.move(scratchGPR, Wasm::wasmCallingConvention().m_gprArgs[numGPRs].gpr());
314 ++numGPRs;
315 }
316 break;
317 }
318 case Wasm::F32:
319 case Wasm::F64:
320 if (numFPRs >= Wasm::wasmCallingConvention().m_fprArgs.size()) {
321 jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR);
322 slowPath.append(jit.branchIfNotNumber(scratchGPR));
323 auto isInt32 = jit.branchIfInt32(scratchGPR);
324 if (signature.argument(i) == Wasm::F32) {
325 jit.unboxDouble(scratchGPR, scratchGPR, scratchFPR);
326 jit.convertDoubleToFloat(scratchFPR, scratchFPR);
327 jit.storeFloat(scratchFPR, calleeFrame.withOffset(wasmOffset));
328 } else {
329 jit.add64(GPRInfo::tagTypeNumberRegister, scratchGPR, scratchGPR);
330 jit.store64(scratchGPR, calleeFrame.withOffset(wasmOffset));
331 }
332 auto done = jit.jump();
333
334 isInt32.link(&jit);
335 if (signature.argument(i) == Wasm::F32) {
336 jit.convertInt32ToFloat(scratchGPR, scratchFPR);
337 jit.storeFloat(scratchFPR, calleeFrame.withOffset(wasmOffset));
338 } else {
339 jit.convertInt32ToDouble(scratchGPR, scratchFPR);
340 jit.storeDouble(scratchFPR, calleeFrame.withOffset(wasmOffset));
341 }
342 done.link(&jit);
343
344 wasmOffset += sizeof(CPURegister);
345 } else
346 ++numFPRs;
347 break;
348 default:
349 RELEASE_ASSERT_NOT_REACHED();
350 }
351
352 jsOffset += sizeof(EncodedJSValue);
353 }
354 }
355
356 // Now handle FPR arguments in registers.
357 {
358 numFPRs = 0;
359 ptrdiff_t jsOffset = CallFrameSlot::firstArgument * sizeof(EncodedJSValue);
360 for (unsigned i = 0; i < signature.argumentCount(); i++) {
361 switch (signature.argument(i)) {
362 case Wasm::F32:
363 case Wasm::F64:
364 if (numFPRs < Wasm::wasmCallingConvention().m_fprArgs.size()) {
365 FPRReg argFPR = Wasm::wasmCallingConvention().m_fprArgs[numFPRs].fpr();
366 jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR);
367 slowPath.append(jit.branchIfNotNumber(scratchGPR));
368 auto isInt32 = jit.branchIfInt32(scratchGPR);
369 jit.unboxDouble(scratchGPR, scratchGPR, argFPR);
370 if (signature.argument(i) == Wasm::F32)
371 jit.convertDoubleToFloat(argFPR, argFPR);
372 auto done = jit.jump();
373
374 isInt32.link(&jit);
375 if (signature.argument(i) == Wasm::F32)
376 jit.convertInt32ToFloat(scratchGPR, argFPR);
377 else
378 jit.convertInt32ToDouble(scratchGPR, argFPR);
379
380 done.link(&jit);
381 ++numFPRs;
382 }
383 break;
384 default:
385 break;
386 }
387
388 jsOffset += sizeof(EncodedJSValue);
389 }
390 }
391
392 // At this point, we're committed to doing a fast call.
393
394 if (Wasm::Context::useFastTLS())
395 jit.loadWasmContextInstance(scratchGPR);
396 else
397 jit.loadPtr(vm.wasmContext.pointerToInstance(), scratchGPR);
398 ptrdiff_t previousInstanceOffset = this->previousInstanceOffset();
399 jit.storePtr(scratchGPR, CCallHelpers::Address(GPRInfo::callFrameRegister, previousInstanceOffset));
400
401 jit.move(CCallHelpers::TrustedImmPtr(&instance()->instance()), scratchGPR);
402 if (Wasm::Context::useFastTLS())
403 jit.storeWasmContextInstance(scratchGPR);
404 else {
405 jit.move(scratchGPR, pinnedRegs.wasmContextInstancePointer);
406 jit.storePtr(scratchGPR, vm.wasmContext.pointerToInstance());
407 }
408 // This contains the cached stack limit still.
409 jit.storePtr(scratch2GPR, CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedStackLimit()));
410
411 if (!!moduleInformation.memory) {
412 GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
413 GPRReg scratchOrSize = scratch2GPR;
414 auto mode = instance()->memoryMode();
415
416 if (isARM64E()) {
417 if (mode != Wasm::MemoryMode::Signaling)
418 scratchOrSize = pinnedRegs.sizeRegister;
419 jit.loadPtr(CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedMemorySize()), scratchOrSize);
420 } else {
421 if (mode != Wasm::MemoryMode::Signaling)
422 jit.loadPtr(CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedMemorySize()), pinnedRegs.sizeRegister);
423 }
424
425 jit.loadPtr(CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedMemory()), baseMemory);
426 jit.cageConditionally(Gigacage::Primitive, baseMemory, scratchOrSize, scratchOrSize);
427 }
428
429 // We use this callee to indicate how to unwind past these types of frames:
430 // 1. We need to know where to get callee saves.
431 // 2. We need to know to restore the previous wasm context.
432 if (!m_jsToWasmICCallee)
433 m_jsToWasmICCallee.set(vm, this, JSToWasmICCallee::create(vm, globalObject(), this));
434 jit.storePtr(CCallHelpers::TrustedImmPtr(m_jsToWasmICCallee.get()), CCallHelpers::addressFor(CallFrameSlot::callee));
435
436 {
437 // FIXME: Currently we just do an indirect jump. But we should teach the Module
438 // how to repatch us:
439 // https://bugs.webkit.org/show_bug.cgi?id=196570
440 jit.loadPtr(entrypointLoadLocation(), scratchGPR);
441 jit.call(scratchGPR, WasmEntryPtrTag);
442 }
443
444 ASSERT(!RegisterSet::runtimeTagRegisters().contains(GPRInfo::nonPreservedNonReturnGPR));
445 jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, previousInstanceOffset), GPRInfo::nonPreservedNonReturnGPR);
446 if (Wasm::Context::useFastTLS())
447 jit.storeWasmContextInstance(GPRInfo::nonPreservedNonReturnGPR);
448 else
449 jit.storePtr(GPRInfo::nonPreservedNonReturnGPR, vm.wasmContext.pointerToInstance());
450
451 switch (signature.returnType()) {
452 case Wasm::Void:
453 jit.moveTrustedValue(jsUndefined(), JSValueRegs { GPRInfo::returnValueGPR });
454 break;
455 case Wasm::I32:
456 jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
457 jit.boxInt32(GPRInfo::returnValueGPR, JSValueRegs { GPRInfo::returnValueGPR });
458 break;
459 case Wasm::F32:
460 jit.convertFloatToDouble(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
461 FALLTHROUGH;
462 case Wasm::F64: {
463 jit.moveTrustedValue(jsNumber(pureNaN()), JSValueRegs { GPRInfo::returnValueGPR });
464 auto isNaN = jit.branchIfNaN(FPRInfo::returnValueFPR);
465 jit.boxDouble(FPRInfo::returnValueFPR, JSValueRegs { GPRInfo::returnValueGPR });
466 isNaN.link(&jit);
467 break;
468 }
469 case Wasm::Anyref: {
470 // FIXME: We need to box wasm Funcrefs once they are supported here.
471 break;
472 }
473 case Wasm::I64:
474 case Wasm::Func:
475 case Wasm::Anyfunc:
476 return nullptr;
477 default:
478 break;
479 }
480
481 auto emitRestoreCalleeSaves = [&] {
482 for (const RegisterAtOffset& regAtOffset : registersToSpill) {
483 GPRReg reg = regAtOffset.reg().gpr();
484 ASSERT(reg != GPRInfo::returnValueGPR);
485 ptrdiff_t offset = regAtOffset.offset();
486 jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, offset), reg);
487 }
488 };
489
490 emitRestoreCalleeSaves();
491
492 jit.emitFunctionEpilogue();
493 jit.ret();
494
495 slowPath.link(&jit);
496 emitRestoreCalleeSaves();
497 jit.move(CCallHelpers::TrustedImmPtr(this), GPRInfo::regT0);
498 jit.emitFunctionEpilogue();
499#if CPU(ARM64E)
500 jit.untagReturnAddress();
501#endif
502 auto jumpToHostCallThunk = jit.jump();
503
504 LinkBuffer linkBuffer(jit, nullptr, JITCompilationCanFail);
505 if (UNLIKELY(linkBuffer.didFailToAllocate()))
506 return nullptr;
507
508 linkBuffer.link(jumpToHostCallThunk, CodeLocationLabel<JSEntryPtrTag>(executable()->entrypointFor(CodeForCall, MustCheckArity).executableAddress()));
509 m_jsCallEntrypoint = FINALIZE_CODE(linkBuffer, WasmEntryPtrTag, "JS->Wasm IC");
510 return m_jsCallEntrypoint.code();
511}
512
513WebAssemblyFunction* WebAssemblyFunction::create(VM& vm, JSGlobalObject* globalObject, Structure* structure, unsigned length, const String& name, JSWebAssemblyInstance* instance, Wasm::Callee& jsEntrypoint, Wasm::WasmToWasmImportableFunction::LoadLocation wasmToWasmEntrypointLoadLocation, Wasm::SignatureIndex signatureIndex)
514{
515 NativeExecutable* executable = vm.getHostFunction(callWebAssemblyFunction, NoIntrinsic, callHostFunctionAsConstructor, nullptr, name);
516 WebAssemblyFunction* function = new (NotNull, allocateCell<WebAssemblyFunction>(vm.heap)) WebAssemblyFunction(vm, globalObject, structure, jsEntrypoint, wasmToWasmEntrypointLoadLocation, signatureIndex);
517 function->finishCreation(vm, executable, length, name, instance);
518 ASSERT_WITH_MESSAGE(!function->isLargeAllocation(), "WebAssemblyFunction should be allocated not in large allocation since it is JSCallee.");
519 return function;
520}
521
522Structure* WebAssemblyFunction::createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
523{
524 ASSERT(globalObject);
525 return Structure::create(vm, globalObject, prototype, TypeInfo(JSFunctionType, StructureFlags), info());
526}
527
528WebAssemblyFunction::WebAssemblyFunction(VM& vm, JSGlobalObject* globalObject, Structure* structure, Wasm::Callee& jsEntrypoint, Wasm::WasmToWasmImportableFunction::LoadLocation wasmToWasmEntrypointLoadLocation, Wasm::SignatureIndex signatureIndex)
529 : Base { vm, globalObject, structure }
530 , m_jsEntrypoint { jsEntrypoint.entrypoint() }
531 , m_importableFunction { signatureIndex, wasmToWasmEntrypointLoadLocation }
532{ }
533
534void WebAssemblyFunction::visitChildren(JSCell* cell, SlotVisitor& visitor)
535{
536 WebAssemblyFunction* thisObject = jsCast<WebAssemblyFunction*>(cell);
537 ASSERT_GC_OBJECT_INHERITS(thisObject, info());
538
539 Base::visitChildren(thisObject, visitor);
540 visitor.append(thisObject->m_jsToWasmICCallee);
541}
542
543void WebAssemblyFunction::destroy(JSCell* cell)
544{
545 static_cast<WebAssemblyFunction*>(cell)->WebAssemblyFunction::~WebAssemblyFunction();
546}
547
548} // namespace JSC
549
550#endif // ENABLE(WEBASSEMBLY)
551