1/*
2 * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "WebAssemblyFunction.h"
28
29#if ENABLE(WEBASSEMBLY)
30
31#include "B3Compilation.h"
32#include "JSCInlines.h"
33#include "JSFunctionInlines.h"
34#include "JSObject.h"
35#include "JSWebAssemblyInstance.h"
36#include "JSWebAssemblyMemory.h"
37#include "JSWebAssemblyRuntimeError.h"
38#include "LLIntThunks.h"
39#include "LinkBuffer.h"
40#include "ProtoCallFrame.h"
41#include "VM.h"
42#include "WasmCallee.h"
43#include "WasmCallingConvention.h"
44#include "WasmContextInlines.h"
45#include "WasmFormat.h"
46#include "WasmMemory.h"
47#include "WasmMemoryInformation.h"
48#include "WasmModuleInformation.h"
49#include "WasmSignatureInlines.h"
50#include <wtf/FastTLS.h>
51#include <wtf/StackPointer.h>
52#include <wtf/SystemTracing.h>
53
54namespace JSC {
55
56const ClassInfo WebAssemblyFunction::s_info = { "WebAssemblyFunction", &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(WebAssemblyFunction) };
57
58static EncodedJSValue JSC_HOST_CALL callWebAssemblyFunction(ExecState* exec)
59{
60 VM& vm = exec->vm();
61 auto scope = DECLARE_THROW_SCOPE(vm);
62 WebAssemblyFunction* wasmFunction = jsCast<WebAssemblyFunction*>(exec->jsCallee());
63 Wasm::SignatureIndex signatureIndex = wasmFunction->signatureIndex();
64 const Wasm::Signature& signature = Wasm::SignatureInformation::get(signatureIndex);
65
66 // Make sure that the memory we think we are going to run with matches the one we expect.
67 ASSERT(wasmFunction->instance()->instance().codeBlock()->isSafeToRun(wasmFunction->instance()->memory()->memory().mode()));
68
69 Optional<TraceScope> traceScope;
70 if (Options::useTracePoints())
71 traceScope.emplace(WebAssemblyExecuteStart, WebAssemblyExecuteEnd);
72
73 Vector<JSValue, MarkedArgumentBuffer::inlineCapacity> boxedArgs;
74 JSWebAssemblyInstance* instance = wasmFunction->instance();
75 Wasm::Instance* wasmInstance = &instance->instance();
76 // When we don't use fast TLS to store the context, the JS
77 // entry wrapper expects a JSWebAssemblyInstance as the first argument.
78 if (!Wasm::Context::useFastTLS())
79 boxedArgs.append(instance);
80
81 for (unsigned argIndex = 0; argIndex < signature.argumentCount(); ++argIndex) {
82 JSValue arg = exec->argument(argIndex);
83 switch (signature.argument(argIndex)) {
84 case Wasm::I32:
85 arg = JSValue::decode(arg.toInt32(exec));
86 break;
87 case Wasm::I64:
88 arg = JSValue();
89 break;
90 case Wasm::F32:
91 arg = JSValue::decode(bitwise_cast<uint32_t>(arg.toFloat(exec)));
92 break;
93 case Wasm::F64:
94 arg = JSValue::decode(bitwise_cast<uint64_t>(arg.toNumber(exec)));
95 break;
96 case Wasm::Void:
97 case Wasm::Func:
98 case Wasm::Anyfunc:
99 RELEASE_ASSERT_NOT_REACHED();
100 }
101 RETURN_IF_EXCEPTION(scope, encodedJSValue());
102 boxedArgs.append(arg);
103 }
104
105 JSValue firstArgument = JSValue();
106 int argCount = 1;
107 JSValue* remainingArgs = nullptr;
108 if (boxedArgs.size()) {
109 remainingArgs = boxedArgs.data();
110 firstArgument = *remainingArgs;
111 remainingArgs++;
112 argCount = boxedArgs.size();
113 }
114
115 // Note: we specifically use the WebAssemblyFunction as the callee to begin with in the ProtoCallFrame.
116 // The reason for this is that calling into the llint may stack overflow, and the stack overflow
117 // handler might read the global object from the callee.
118 ProtoCallFrame protoCallFrame;
119 protoCallFrame.init(nullptr, wasmFunction, firstArgument, argCount, remainingArgs);
120
121 // FIXME Do away with this entire function, and only use the entrypoint generated by B3. https://bugs.webkit.org/show_bug.cgi?id=166486
122 Wasm::Instance* prevWasmInstance = vm.wasmContext.load();
123 {
124 // We do the stack check here for the wrapper function because we don't
125 // want to emit a stack check inside every wrapper function.
126 const intptr_t sp = bitwise_cast<intptr_t>(currentStackPointer());
127 const intptr_t frameSize = (boxedArgs.size() + CallFrame::headerSizeInRegisters) * sizeof(Register);
128 const intptr_t stackSpaceUsed = 2 * frameSize; // We're making two calls. One to the wrapper, and one to the actual wasm code.
129 if (UNLIKELY((sp < stackSpaceUsed) || ((sp - stackSpaceUsed) < bitwise_cast<intptr_t>(vm.softStackLimit()))))
130 return JSValue::encode(throwException(exec, scope, createStackOverflowError(exec)));
131 }
132 vm.wasmContext.store(wasmInstance, vm.softStackLimit());
133 ASSERT(wasmFunction->instance());
134 ASSERT(&wasmFunction->instance()->instance() == vm.wasmContext.load());
135 EncodedJSValue rawResult = vmEntryToWasm(wasmFunction->jsEntrypoint(MustCheckArity).executableAddress(), &vm, &protoCallFrame);
136 // We need to make sure this is in a register or on the stack since it's stored in Vector<JSValue>.
137 // This probably isn't strictly necessary, since the WebAssemblyFunction* should keep the instance
138 // alive. But it's good hygiene.
139 instance->use();
140 if (prevWasmInstance != wasmInstance) {
141 // This is just for some extra safety instead of leaving a cached
142 // value in there. If we ever forget to set the value to be a real
143 // bounds, this will force every stack overflow check to immediately
144 // fire. The stack limit never changes while executing except when
145 // WebAssembly is used through the JSC API: API users can ask the code
146 // to migrate threads.
147 wasmInstance->setCachedStackLimit(bitwise_cast<void*>(std::numeric_limits<uintptr_t>::max()));
148 }
149 vm.wasmContext.store(prevWasmInstance, vm.softStackLimit());
150 RETURN_IF_EXCEPTION(scope, { });
151
152 return rawResult;
153}
154
155bool WebAssemblyFunction::useTagRegisters() const
156{
157 const auto& signature = Wasm::SignatureInformation::get(signatureIndex());
158 return signature.argumentCount() || signature.returnType() != Wasm::Void;
159}
160
161RegisterSet WebAssemblyFunction::calleeSaves() const
162{
163 RegisterSet toSave = Wasm::PinnedRegisterInfo::get().toSave(instance()->memoryMode());
164 if (useTagRegisters()) {
165 RegisterSet tagRegisters = RegisterSet::runtimeTagRegisters();
166 // We rely on these being disjoint sets.
167#if !ASSERT_DISABLED
168 for (Reg reg : tagRegisters)
169 ASSERT(!toSave.contains(reg));
170#endif
171 toSave.merge(tagRegisters);
172 }
173 return toSave;
174}
175
176RegisterAtOffsetList WebAssemblyFunction::usedCalleeSaveRegisters() const
177{
178 return RegisterAtOffsetList { calleeSaves(), RegisterAtOffsetList::OffsetBaseType::FramePointerBased };
179}
180
181ptrdiff_t WebAssemblyFunction::previousInstanceOffset() const
182{
183 ptrdiff_t result = calleeSaves().numberOfSetRegisters() * sizeof(CPURegister);
184 result = -result - sizeof(CPURegister);
185#if !ASSERT_DISABLED
186 ptrdiff_t minOffset = 1;
187 for (const RegisterAtOffset& regAtOffset : usedCalleeSaveRegisters()) {
188 ptrdiff_t offset = regAtOffset.offset();
189 ASSERT(offset < 0);
190 minOffset = std::min(offset, minOffset);
191 }
192 ASSERT(minOffset - static_cast<ptrdiff_t>(sizeof(CPURegister)) == result);
193#endif
194 return result;
195}
196
197Wasm::Instance* WebAssemblyFunction::previousInstance(CallFrame* callFrame)
198{
199 ASSERT(callFrame->callee().rawPtr() == m_jsToWasmICCallee.get());
200 auto* result = *bitwise_cast<Wasm::Instance**>(bitwise_cast<char*>(callFrame) + previousInstanceOffset());
201 return result;
202}
203
204MacroAssemblerCodePtr<JSEntryPtrTag> WebAssemblyFunction::jsCallEntrypointSlow()
205{
206 VM& vm = *this->vm();
207 CCallHelpers jit;
208
209 const auto& signature = Wasm::SignatureInformation::get(signatureIndex());
210 const auto& pinnedRegs = Wasm::PinnedRegisterInfo::get();
211 RegisterAtOffsetList registersToSpill = usedCalleeSaveRegisters();
212
213 auto& moduleInformation = instance()->instance().module().moduleInformation();
214
215 unsigned totalFrameSize = registersToSpill.size() * sizeof(CPURegister);
216 totalFrameSize += sizeof(CPURegister); // Slot for the VM's previous wasm instance.
217 totalFrameSize += Wasm::WasmCallingConvention::headerSizeInBytes();
218 totalFrameSize -= sizeof(CallerFrameAndPC);
219
220 unsigned numGPRs = 0;
221 unsigned numFPRs = 0;
222 bool argumentsIncludeI64 = false;
223 for (unsigned i = 0; i < signature.argumentCount(); i++) {
224 switch (signature.argument(i)) {
225 case Wasm::I64:
226 argumentsIncludeI64 = true;
227 break;
228 case Wasm::I32:
229 if (numGPRs >= Wasm::wasmCallingConvention().m_gprArgs.size())
230 totalFrameSize += sizeof(CPURegister);
231 ++numGPRs;
232 break;
233 case Wasm::F32:
234 case Wasm::F64:
235 if (numFPRs >= Wasm::wasmCallingConvention().m_fprArgs.size())
236 totalFrameSize += sizeof(CPURegister);
237 ++numFPRs;
238 break;
239 default:
240 RELEASE_ASSERT_NOT_REACHED();
241 }
242 }
243
244 if (argumentsIncludeI64)
245 return nullptr;
246
247 totalFrameSize = WTF::roundUpToMultipleOf(stackAlignmentBytes(), totalFrameSize);
248
249 jit.emitFunctionPrologue();
250 jit.subPtr(MacroAssembler::TrustedImm32(totalFrameSize), MacroAssembler::stackPointerRegister);
251 jit.store64(CCallHelpers::TrustedImm64(0), CCallHelpers::addressFor(CallFrameSlot::codeBlock));
252
253 for (const RegisterAtOffset& regAtOffset : registersToSpill) {
254 GPRReg reg = regAtOffset.reg().gpr();
255 ptrdiff_t offset = regAtOffset.offset();
256 jit.storePtr(reg, CCallHelpers::Address(GPRInfo::callFrameRegister, offset));
257 }
258
259 GPRReg scratchGPR = Wasm::wasmCallingConventionAir().prologueScratch(1);
260 GPRReg scratch2GPR = Wasm::wasmCallingConventionAir().prologueScratch(0);
261 jit.loadPtr(vm.addressOfSoftStackLimit(), scratch2GPR);
262
263 CCallHelpers::JumpList slowPath;
264 slowPath.append(jit.branchPtr(CCallHelpers::Above, MacroAssembler::stackPointerRegister, GPRInfo::callFrameRegister));
265 slowPath.append(jit.branchPtr(CCallHelpers::Below, MacroAssembler::stackPointerRegister, scratch2GPR));
266
267 // Ensure:
268 // argCountPlusThis - 1 >= signature.argumentCount()
269 // argCountPlusThis >= signature.argumentCount() + 1
270 // FIXME: We should handle mismatched arity
271 // https://bugs.webkit.org/show_bug.cgi?id=196564
272 slowPath.append(jit.branch32(CCallHelpers::Below,
273 CCallHelpers::payloadFor(CallFrameSlot::argumentCount), CCallHelpers::TrustedImm32(signature.argumentCount() + 1)));
274
275 if (useTagRegisters())
276 jit.emitMaterializeTagCheckRegisters();
277
278 // First we do stack slots for FPRs so we can use FPR argument registers as scratch.
279 // After that, we handle FPR argument registers.
280 // We also handle all GPR types here as we have GPR scratch registers.
281 {
282 CCallHelpers::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
283 numGPRs = 0;
284 numFPRs = 0;
285 FPRReg scratchFPR = Wasm::wasmCallingConvention().m_fprArgs[0].fpr();
286
287 ptrdiff_t jsOffset = CallFrameSlot::firstArgument * sizeof(EncodedJSValue);
288
289 ptrdiff_t wasmOffset = CallFrame::headerSizeInRegisters * sizeof(CPURegister);
290 for (unsigned i = 0; i < signature.argumentCount(); i++) {
291 switch (signature.argument(i)) {
292 case Wasm::I32:
293 jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR);
294 slowPath.append(jit.branchIfNotInt32(scratchGPR));
295 if (numGPRs >= Wasm::wasmCallingConvention().m_gprArgs.size()) {
296 jit.store32(scratchGPR, calleeFrame.withOffset(wasmOffset));
297 wasmOffset += sizeof(CPURegister);
298 } else {
299 jit.zeroExtend32ToPtr(scratchGPR, Wasm::wasmCallingConvention().m_gprArgs[numGPRs].gpr());
300 ++numGPRs;
301 }
302 break;
303 case Wasm::F32:
304 case Wasm::F64:
305 if (numFPRs >= Wasm::wasmCallingConvention().m_fprArgs.size()) {
306 jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR);
307 slowPath.append(jit.branchIfNotNumber(scratchGPR));
308 auto isInt32 = jit.branchIfInt32(scratchGPR);
309 if (signature.argument(i) == Wasm::F32) {
310 jit.unboxDouble(scratchGPR, scratchGPR, scratchFPR);
311 jit.convertDoubleToFloat(scratchFPR, scratchFPR);
312 jit.storeFloat(scratchFPR, calleeFrame.withOffset(wasmOffset));
313 } else {
314 jit.add64(GPRInfo::tagTypeNumberRegister, scratchGPR, scratchGPR);
315 jit.store64(scratchGPR, calleeFrame.withOffset(wasmOffset));
316 }
317 auto done = jit.jump();
318
319 isInt32.link(&jit);
320 if (signature.argument(i) == Wasm::F32) {
321 jit.convertInt32ToFloat(scratchGPR, scratchFPR);
322 jit.storeFloat(scratchFPR, calleeFrame.withOffset(wasmOffset));
323 } else {
324 jit.convertInt32ToDouble(scratchGPR, scratchFPR);
325 jit.storeDouble(scratchFPR, calleeFrame.withOffset(wasmOffset));
326 }
327 done.link(&jit);
328
329 wasmOffset += sizeof(CPURegister);
330 } else
331 ++numFPRs;
332 break;
333 default:
334 RELEASE_ASSERT_NOT_REACHED();
335 }
336
337 jsOffset += sizeof(EncodedJSValue);
338 }
339 }
340
341 // Now handle FPR arguments in registers.
342 {
343 numFPRs = 0;
344 ptrdiff_t jsOffset = CallFrameSlot::firstArgument * sizeof(EncodedJSValue);
345 for (unsigned i = 0; i < signature.argumentCount(); i++) {
346 switch (signature.argument(i)) {
347 case Wasm::F32:
348 case Wasm::F64:
349 if (numFPRs < Wasm::wasmCallingConvention().m_fprArgs.size()) {
350 FPRReg argFPR = Wasm::wasmCallingConvention().m_fprArgs[numFPRs].fpr();
351 jit.load64(CCallHelpers::Address(GPRInfo::callFrameRegister, jsOffset), scratchGPR);
352 slowPath.append(jit.branchIfNotNumber(scratchGPR));
353 auto isInt32 = jit.branchIfInt32(scratchGPR);
354 jit.unboxDouble(scratchGPR, scratchGPR, argFPR);
355 if (signature.argument(i) == Wasm::F32)
356 jit.convertDoubleToFloat(argFPR, argFPR);
357 auto done = jit.jump();
358
359 isInt32.link(&jit);
360 if (signature.argument(i) == Wasm::F32)
361 jit.convertInt32ToFloat(scratchGPR, argFPR);
362 else
363 jit.convertInt32ToDouble(scratchGPR, argFPR);
364
365 done.link(&jit);
366 ++numFPRs;
367 }
368 break;
369 default:
370 break;
371 }
372
373 jsOffset += sizeof(EncodedJSValue);
374 }
375 }
376
377 // At this point, we're committed to doing a fast call.
378
379 if (Wasm::Context::useFastTLS())
380 jit.loadWasmContextInstance(scratchGPR);
381 else
382 jit.loadPtr(vm.wasmContext.pointerToInstance(), scratchGPR);
383 ptrdiff_t previousInstanceOffset = this->previousInstanceOffset();
384 jit.storePtr(scratchGPR, CCallHelpers::Address(GPRInfo::callFrameRegister, previousInstanceOffset));
385
386 jit.move(CCallHelpers::TrustedImmPtr(&instance()->instance()), scratchGPR);
387 if (Wasm::Context::useFastTLS())
388 jit.storeWasmContextInstance(scratchGPR);
389 else {
390 jit.move(scratchGPR, pinnedRegs.wasmContextInstancePointer);
391 jit.storePtr(scratchGPR, vm.wasmContext.pointerToInstance());
392 }
393 // This contains the cached stack limit still.
394 jit.storePtr(scratch2GPR, CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedStackLimit()));
395
396 if (!!moduleInformation.memory) {
397 GPRReg baseMemory = pinnedRegs.baseMemoryPointer;
398
399 if (instance()->memoryMode() != Wasm::MemoryMode::Signaling) {
400 ASSERT(pinnedRegs.sizeRegister != scratchGPR);
401 jit.loadPtr(CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedMemorySize()), pinnedRegs.sizeRegister);
402 }
403
404 jit.loadPtr(CCallHelpers::Address(scratchGPR, Wasm::Instance::offsetOfCachedMemory()), baseMemory);
405 }
406
407 // We use this callee to indicate how to unwind past these types of frames:
408 // 1. We need to know where to get callee saves.
409 // 2. We need to know to restore the previous wasm context.
410 if (!m_jsToWasmICCallee)
411 m_jsToWasmICCallee.set(vm, this, JSToWasmICCallee::create(vm, globalObject(), this));
412 jit.storePtr(CCallHelpers::TrustedImmPtr(m_jsToWasmICCallee.get()), CCallHelpers::addressFor(CallFrameSlot::callee));
413
414 {
415 // FIXME: Currently we just do an indirect jump. But we should teach the Module
416 // how to repatch us:
417 // https://bugs.webkit.org/show_bug.cgi?id=196570
418 jit.loadPtr(entrypointLoadLocation(), scratchGPR);
419 jit.call(scratchGPR, WasmEntryPtrTag);
420 }
421
422 ASSERT(!RegisterSet::runtimeTagRegisters().contains(GPRInfo::nonPreservedNonReturnGPR));
423 jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, previousInstanceOffset), GPRInfo::nonPreservedNonReturnGPR);
424 if (Wasm::Context::useFastTLS())
425 jit.storeWasmContextInstance(GPRInfo::nonPreservedNonReturnGPR);
426 else
427 jit.storePtr(GPRInfo::nonPreservedNonReturnGPR, vm.wasmContext.pointerToInstance());
428
429 switch (signature.returnType()) {
430 case Wasm::Void:
431 jit.moveTrustedValue(jsUndefined(), JSValueRegs { GPRInfo::returnValueGPR });
432 break;
433 case Wasm::I32:
434 jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
435 jit.boxInt32(GPRInfo::returnValueGPR, JSValueRegs { GPRInfo::returnValueGPR });
436 break;
437 case Wasm::F32:
438 jit.convertFloatToDouble(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
439 FALLTHROUGH;
440 case Wasm::F64: {
441 jit.moveTrustedValue(jsNumber(pureNaN()), JSValueRegs { GPRInfo::returnValueGPR });
442 auto isNaN = jit.branchIfNaN(FPRInfo::returnValueFPR);
443 jit.boxDouble(FPRInfo::returnValueFPR, JSValueRegs { GPRInfo::returnValueGPR });
444 isNaN.link(&jit);
445 break;
446 }
447 case Wasm::I64:
448 case Wasm::Func:
449 case Wasm::Anyfunc:
450 return nullptr;
451 default:
452 break;
453 }
454
455 auto emitRestoreCalleeSaves = [&] {
456 for (const RegisterAtOffset& regAtOffset : registersToSpill) {
457 GPRReg reg = regAtOffset.reg().gpr();
458 ASSERT(reg != GPRInfo::returnValueGPR);
459 ptrdiff_t offset = regAtOffset.offset();
460 jit.loadPtr(CCallHelpers::Address(GPRInfo::callFrameRegister, offset), reg);
461 }
462 };
463
464 emitRestoreCalleeSaves();
465
466 jit.emitFunctionEpilogue();
467 jit.ret();
468
469 slowPath.link(&jit);
470 emitRestoreCalleeSaves();
471 jit.move(CCallHelpers::TrustedImmPtr(this), GPRInfo::regT0);
472 jit.emitFunctionEpilogue();
473 auto jumpToHostCallThunk = jit.jump();
474
475 LinkBuffer linkBuffer(jit, nullptr, JITCompilationCanFail);
476 if (UNLIKELY(linkBuffer.didFailToAllocate()))
477 return nullptr;
478
479 linkBuffer.link(jumpToHostCallThunk, CodeLocationLabel<JSEntryPtrTag>(executable()->entrypointFor(CodeForCall, MustCheckArity).executableAddress()));
480 m_jsCallEntrypoint = FINALIZE_CODE(linkBuffer, WasmEntryPtrTag, "JS->Wasm IC");
481 return m_jsCallEntrypoint.code();
482}
483
484WebAssemblyFunction* WebAssemblyFunction::create(VM& vm, JSGlobalObject* globalObject, Structure* structure, unsigned length, const String& name, JSWebAssemblyInstance* instance, Wasm::Callee& jsEntrypoint, Wasm::WasmToWasmImportableFunction::LoadLocation wasmToWasmEntrypointLoadLocation, Wasm::SignatureIndex signatureIndex)
485{
486 NativeExecutable* executable = vm.getHostFunction(callWebAssemblyFunction, NoIntrinsic, callHostFunctionAsConstructor, nullptr, name);
487 WebAssemblyFunction* function = new (NotNull, allocateCell<WebAssemblyFunction>(vm.heap)) WebAssemblyFunction(vm, globalObject, structure, jsEntrypoint, wasmToWasmEntrypointLoadLocation, signatureIndex);
488 function->finishCreation(vm, executable, length, name, instance);
489 ASSERT_WITH_MESSAGE(!function->isLargeAllocation(), "WebAssemblyFunction should be allocated not in large allocation since it is JSCallee.");
490 return function;
491}
492
493Structure* WebAssemblyFunction::createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
494{
495 ASSERT(globalObject);
496 return Structure::create(vm, globalObject, prototype, TypeInfo(JSFunctionType, StructureFlags), info());
497}
498
499WebAssemblyFunction::WebAssemblyFunction(VM& vm, JSGlobalObject* globalObject, Structure* structure, Wasm::Callee& jsEntrypoint, Wasm::WasmToWasmImportableFunction::LoadLocation wasmToWasmEntrypointLoadLocation, Wasm::SignatureIndex signatureIndex)
500 : Base { vm, globalObject, structure }
501 , m_jsEntrypoint { jsEntrypoint.entrypoint() }
502 , m_importableFunction { signatureIndex, wasmToWasmEntrypointLoadLocation }
503{ }
504
505void WebAssemblyFunction::visitChildren(JSCell* cell, SlotVisitor& visitor)
506{
507 WebAssemblyFunction* thisObject = jsCast<WebAssemblyFunction*>(cell);
508 ASSERT_GC_OBJECT_INHERITS(thisObject, info());
509
510 Base::visitChildren(thisObject, visitor);
511 visitor.append(thisObject->m_jsToWasmICCallee);
512}
513
514void WebAssemblyFunction::destroy(JSCell* cell)
515{
516 static_cast<WebAssemblyFunction*>(cell)->WebAssemblyFunction::~WebAssemblyFunction();
517}
518
519} // namespace JSC
520
521#endif // ENABLE(WEBASSEMBLY)
522