1/*
2 * Copyright (C) 2016-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "WasmToJS.h"
28
29#if ENABLE(WEBASSEMBLY)
30
31#include "CCallHelpers.h"
32#include "FrameTracers.h"
33#include "JITExceptions.h"
34#include "JSCInlines.h"
35#include "JSWebAssemblyInstance.h"
36#include "JSWebAssemblyRuntimeError.h"
37#include "LinkBuffer.h"
38#include "NativeErrorConstructor.h"
39#include "ThunkGenerators.h"
40#include "WasmCallingConvention.h"
41#include "WasmContext.h"
42#include "WasmExceptionType.h"
43#include "WasmInstance.h"
44#include "WasmSignatureInlines.h"
45
46namespace JSC { namespace Wasm {
47
48using JIT = CCallHelpers;
49
50static void materializeImportJSCell(JIT& jit, unsigned importIndex, GPRReg result)
51{
52 // We're calling out of the current WebAssembly.Instance. That Instance has a list of all its import functions.
53 jit.loadWasmContextInstance(result);
54 jit.loadPtr(JIT::Address(result, Instance::offsetOfImportFunction(importIndex)), result);
55}
56
57static Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> handleBadI64Use(VM* vm, JIT& jit, const Signature& signature, unsigned importIndex)
58{
59 unsigned argCount = signature.argumentCount();
60
61 bool hasBadI64Use = false;
62 hasBadI64Use |= signature.returnType() == I64;
63 for (unsigned argNum = 0; argNum < argCount && !hasBadI64Use; ++argNum) {
64 Type argType = signature.argument(argNum);
65 switch (argType) {
66 case Void:
67 case Func:
68 case Anyfunc:
69 RELEASE_ASSERT_NOT_REACHED();
70
71 case I64: {
72 hasBadI64Use = true;
73 break;
74 }
75
76 default:
77 break;
78 }
79 }
80
81 if (hasBadI64Use) {
82 jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
83 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
84 jit.loadWasmContextInstance(GPRInfo::argumentGPR1);
85
86 // Store Callee.
87 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR1, Instance::offsetOfOwner()), GPRInfo::argumentGPR1);
88 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR1, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR2);
89 jit.storePtr(GPRInfo::argumentGPR2, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
90
91 auto call = jit.call(OperationPtrTag);
92 jit.jumpToExceptionHandler(*vm);
93
94 void (*throwBadI64)(ExecState*, JSWebAssemblyInstance*) = [] (ExecState* exec, JSWebAssemblyInstance* instance) -> void {
95 VM* vm = &exec->vm();
96 NativeCallFrameTracer tracer(vm, exec);
97
98 {
99 auto throwScope = DECLARE_THROW_SCOPE(*vm);
100 JSGlobalObject* globalObject = instance->globalObject(*vm);
101 auto* error = ErrorInstance::create(exec, *vm, globalObject->errorStructure(ErrorType::TypeError), "i64 not allowed as return type or argument to an imported function"_s);
102 throwException(exec, throwScope, error);
103 }
104
105 genericUnwind(vm, exec);
106 ASSERT(!!vm->callFrameForCatch);
107 };
108
109 LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
110 if (UNLIKELY(linkBuffer.didFailToAllocate()))
111 return makeUnexpected(BindingFailure::OutOfMemory);
112
113 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(throwBadI64));
114 return FINALIZE_CODE(linkBuffer, WasmEntryPtrTag, "WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex);
115 }
116
117 return MacroAssemblerCodeRef<WasmEntryPtrTag>();
118}
119
120Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> wasmToJS(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
121{
122 // FIXME: This function doesn't properly abstract away the calling convention.
123 // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
124 const WasmCallingConvention& wasmCC = wasmCallingConvention();
125 const JSCCallingConvention& jsCC = jscCallingConvention();
126 const Signature& signature = SignatureInformation::get(signatureIndex);
127 unsigned argCount = signature.argumentCount();
128 JIT jit;
129
130 // Note: WasmB3IRGenerator assumes that this stub treats SP as a callee save.
131 // If we ever change this, we will also need to change WasmB3IRGenerator.
132
133 // Below, we assume that the JS calling convention is always on the stack.
134 ASSERT(!jsCC.m_gprArgs.size());
135 ASSERT(!jsCC.m_fprArgs.size());
136
137 jit.emitFunctionPrologue();
138 jit.store64(JIT::TrustedImm32(0), JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register)))); // FIXME Stop using 0 as codeBlocks. https://bugs.webkit.org/show_bug.cgi?id=165321
139
140 auto badI64 = handleBadI64Use(vm, jit, signature, importIndex);
141 if (!badI64 || badI64.value())
142 return badI64;
143
144 // Here we assume that the JS calling convention saves at least all the wasm callee saved. We therefore don't need to save and restore more registers since the wasm callee already took care of this.
145 RegisterSet missingCalleeSaves = wasmCC.m_calleeSaveRegisters;
146 missingCalleeSaves.exclude(jsCC.m_calleeSaveRegisters);
147 ASSERT(missingCalleeSaves.isEmpty());
148
149 if (!Options::useCallICsForWebAssemblyToJSCalls()) {
150 ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(argCount * sizeof(uint64_t));
151 char* buffer = argCount ? static_cast<char*>(scratchBuffer->dataBuffer()) : nullptr;
152 unsigned marshalledGPRs = 0;
153 unsigned marshalledFPRs = 0;
154 unsigned bufferOffset = 0;
155 unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
156 const GPRReg scratchGPR = GPRInfo::regCS0;
157 jit.subPtr(MacroAssembler::TrustedImm32(WTF::roundUpToMultipleOf(stackAlignmentBytes(), sizeof(Register))), MacroAssembler::stackPointerRegister);
158 jit.storePtr(scratchGPR, MacroAssembler::Address(MacroAssembler::stackPointerRegister));
159
160 for (unsigned argNum = 0; argNum < argCount; ++argNum) {
161 Type argType = signature.argument(argNum);
162 switch (argType) {
163 case Void:
164 case Func:
165 case Anyfunc:
166 case I64:
167 RELEASE_ASSERT_NOT_REACHED();
168 case Anyref:
169 case I32: {
170 GPRReg gprReg;
171 if (marshalledGPRs < wasmCC.m_gprArgs.size())
172 gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
173 else {
174 // We've already spilled all arguments, these registers are available as scratch.
175 gprReg = GPRInfo::argumentGPR0;
176 jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
177 frOffset += sizeof(Register);
178 }
179 if (argType == I32)
180 jit.zeroExtend32ToPtr(gprReg, gprReg);
181 jit.store64(gprReg, buffer + bufferOffset);
182 ++marshalledGPRs;
183 break;
184 }
185 case F32: {
186 FPRReg fprReg;
187 if (marshalledFPRs < wasmCC.m_fprArgs.size())
188 fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
189 else {
190 // We've already spilled all arguments, these registers are available as scratch.
191 fprReg = FPRInfo::argumentFPR0;
192 jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
193 frOffset += sizeof(Register);
194 }
195 jit.convertFloatToDouble(fprReg, fprReg);
196 jit.moveDoubleTo64(fprReg, scratchGPR);
197 jit.store64(scratchGPR, buffer + bufferOffset);
198 ++marshalledFPRs;
199 break;
200 }
201 case F64: {
202 FPRReg fprReg;
203 if (marshalledFPRs < wasmCC.m_fprArgs.size())
204 fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
205 else {
206 // We've already spilled all arguments, these registers are available as scratch.
207 fprReg = FPRInfo::argumentFPR0;
208 jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
209 frOffset += sizeof(Register);
210 }
211 jit.moveDoubleTo64(fprReg, scratchGPR);
212 jit.store64(scratchGPR, buffer + bufferOffset);
213 ++marshalledFPRs;
214 break;
215 }
216 }
217
218 bufferOffset += sizeof(Register);
219 }
220 jit.loadPtr(MacroAssembler::Address(MacroAssembler::stackPointerRegister), scratchGPR);
221 if (argCount) {
222 // The GC should not look at this buffer at all, these aren't JSValues.
223 jit.move(CCallHelpers::TrustedImmPtr(scratchBuffer->addressOfActiveLength()), GPRInfo::argumentGPR0);
224 jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), GPRInfo::argumentGPR0);
225 }
226
227 uint64_t (*callFunc)(ExecState*, JSObject*, SignatureIndex, uint64_t*) =
228 [] (ExecState* exec, JSObject* callee, SignatureIndex signatureIndex, uint64_t* buffer) -> uint64_t {
229 VM* vm = &exec->vm();
230 NativeCallFrameTracer tracer(vm, exec);
231 auto throwScope = DECLARE_THROW_SCOPE(*vm);
232 const Signature& signature = SignatureInformation::get(signatureIndex);
233 MarkedArgumentBuffer args;
234 for (unsigned argNum = 0; argNum < signature.argumentCount(); ++argNum) {
235 Type argType = signature.argument(argNum);
236 JSValue arg;
237 switch (argType) {
238 case Void:
239 case Func:
240 case Anyfunc:
241 case I64:
242 RELEASE_ASSERT_NOT_REACHED();
243 case I32:
244 arg = jsNumber(static_cast<int32_t>(buffer[argNum]));
245 break;
246 case Anyref:
247 // FIXME: We need to box wasm Funcrefs once they are supported here.
248 arg = JSValue::decode(buffer[argNum]);
249 break;
250 case F32:
251 case F64:
252 arg = jsNumber(purifyNaN(bitwise_cast<double>(buffer[argNum])));
253 break;
254 }
255 args.append(arg);
256 }
257 if (UNLIKELY(args.hasOverflowed())) {
258 throwOutOfMemoryError(exec, throwScope);
259 return 0;
260 }
261
262 CallData callData;
263 CallType callType = callee->methodTable(*vm)->getCallData(callee, callData);
264 RELEASE_ASSERT(callType != CallType::None);
265 JSValue result = call(exec, callee, callType, callData, jsUndefined(), args);
266 RETURN_IF_EXCEPTION(throwScope, 0);
267
268 uint64_t realResult;
269 switch (signature.returnType()) {
270 case Func:
271 case Anyfunc:
272 case I64:
273 RELEASE_ASSERT_NOT_REACHED();
274 break;
275 case Void:
276 break;
277 case I32: {
278 realResult = static_cast<uint64_t>(static_cast<uint32_t>(result.toInt32(exec)));
279 break;
280 }
281 case Anyref: {
282 realResult = JSValue::encode(result);
283 break;
284 }
285 case F64:
286 case F32: {
287 realResult = bitwise_cast<uint64_t>(result.toNumber(exec));
288 break;
289 }
290 }
291
292 RETURN_IF_EXCEPTION(throwScope, 0);
293 return realResult;
294 };
295
296 jit.loadWasmContextInstance(GPRInfo::argumentGPR0);
297 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, Instance::offsetOfOwner()), GPRInfo::argumentGPR0);
298 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
299 jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
300
301 materializeImportJSCell(jit, importIndex, GPRInfo::argumentGPR1);
302
303 static_assert(GPRInfo::numberOfArgumentRegisters >= 4, "We rely on this with the call below.");
304 static_assert(sizeof(SignatureIndex) == sizeof(uint64_t), "Following code assumes SignatureIndex is 64bit.");
305 jit.setupArguments<decltype(callFunc)>(GPRInfo::argumentGPR1, CCallHelpers::TrustedImm64(signatureIndex), CCallHelpers::TrustedImmPtr(buffer));
306 auto call = jit.call(OperationPtrTag);
307 auto noException = jit.emitExceptionCheck(*vm, AssemblyHelpers::InvertedExceptionCheck);
308
309 // Exception here.
310 jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
311 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
312 void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
313 VM* vm = &exec->vm();
314 NativeCallFrameTracer tracer(vm, exec);
315 genericUnwind(vm, exec);
316 ASSERT(!!vm->callFrameForCatch);
317 };
318 auto exceptionCall = jit.call(OperationPtrTag);
319 jit.jumpToExceptionHandler(*vm);
320
321 noException.link(&jit);
322 switch (signature.returnType()) {
323 case F64: {
324 jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
325 break;
326 }
327 case F32: {
328 jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
329 jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
330 break;
331 }
332 default:
333 break;
334 }
335
336 jit.emitFunctionEpilogue();
337 jit.ret();
338
339 LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
340 if (UNLIKELY(linkBuffer.didFailToAllocate()))
341 return makeUnexpected(BindingFailure::OutOfMemory);
342
343 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(callFunc));
344 linkBuffer.link(exceptionCall, FunctionPtr<OperationPtrTag>(doUnwinding));
345
346 return FINALIZE_CODE(linkBuffer, WasmEntryPtrTag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
347 }
348
349 // Note: We don't need to perform a stack check here since WasmB3IRGenerator
350 // will do the stack check for us. Whenever it detects that it might make
351 // a call to this thunk, it'll make sure its stack check includes space
352 // for us here.
353
354 const unsigned numberOfParameters = argCount + 1; // There is a "this" argument.
355 const unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
356 const unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
357 const unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
358 jit.subPtr(MacroAssembler::TrustedImm32(stackOffset), MacroAssembler::stackPointerRegister);
359 JIT::Address calleeFrame = CCallHelpers::Address(MacroAssembler::stackPointerRegister, -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
360
361 // FIXME make these loops which switch on Signature if there are many arguments on the stack. It'll otherwise be huge for huge signatures. https://bugs.webkit.org/show_bug.cgi?id=165547
362
363 // First go through the integer parameters, freeing up their register for use afterwards.
364 {
365 unsigned marshalledGPRs = 0;
366 unsigned marshalledFPRs = 0;
367 unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
368 unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
369 for (unsigned argNum = 0; argNum < argCount; ++argNum) {
370 Type argType = signature.argument(argNum);
371 switch (argType) {
372 case Void:
373 case Func:
374 case Anyfunc:
375 case I64:
376 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
377 case Anyref:
378 case I32: {
379 GPRReg gprReg;
380 if (marshalledGPRs < wasmCC.m_gprArgs.size())
381 gprReg = wasmCC.m_gprArgs[marshalledGPRs].gpr();
382 else {
383 // We've already spilled all arguments, these registers are available as scratch.
384 gprReg = GPRInfo::argumentGPR0;
385 jit.load64(JIT::Address(GPRInfo::callFrameRegister, frOffset), gprReg);
386 frOffset += sizeof(Register);
387 }
388 ++marshalledGPRs;
389 if (argType == I32) {
390 jit.zeroExtend32ToPtr(gprReg, gprReg); // Clear non-int32 and non-tag bits.
391 jit.boxInt32(gprReg, JSValueRegs(gprReg), DoNotHaveTagRegisters);
392 }
393 // FIXME: We need to box wasm Funcrefs once they are supported here.
394 jit.store64(gprReg, calleeFrame.withOffset(calleeFrameOffset));
395 calleeFrameOffset += sizeof(Register);
396 break;
397 }
398 case F32:
399 case F64:
400 // Skipped: handled below.
401 if (marshalledFPRs >= wasmCC.m_fprArgs.size())
402 frOffset += sizeof(Register);
403 ++marshalledFPRs;
404 calleeFrameOffset += sizeof(Register);
405 break;
406 }
407 }
408 }
409
410 {
411 // Integer registers have already been spilled, these are now available.
412 GPRReg doubleEncodeOffsetGPRReg = GPRInfo::argumentGPR0;
413 GPRReg scratch = GPRInfo::argumentGPR1;
414 bool hasMaterializedDoubleEncodeOffset = false;
415 auto materializeDoubleEncodeOffset = [&hasMaterializedDoubleEncodeOffset, &jit] (GPRReg dest) {
416 if (!hasMaterializedDoubleEncodeOffset) {
417 static_assert(DoubleEncodeOffset == 1ll << 48, "codegen assumes this below");
418 jit.move(JIT::TrustedImm32(1), dest);
419 jit.lshift64(JIT::TrustedImm32(48), dest);
420 hasMaterializedDoubleEncodeOffset = true;
421 }
422 };
423
424 unsigned marshalledGPRs = 0;
425 unsigned marshalledFPRs = 0;
426 unsigned calleeFrameOffset = CallFrameSlot::firstArgument * static_cast<int>(sizeof(Register));
427 unsigned frOffset = CallFrame::headerSizeInRegisters * static_cast<int>(sizeof(Register));
428
429 auto marshallFPR = [&] (FPRReg fprReg) {
430 jit.purifyNaN(fprReg);
431 jit.moveDoubleTo64(fprReg, scratch);
432 materializeDoubleEncodeOffset(doubleEncodeOffsetGPRReg);
433 jit.add64(doubleEncodeOffsetGPRReg, scratch);
434 jit.store64(scratch, calleeFrame.withOffset(calleeFrameOffset));
435 calleeFrameOffset += sizeof(Register);
436 ++marshalledFPRs;
437 };
438
439 for (unsigned argNum = 0; argNum < argCount; ++argNum) {
440 Type argType = signature.argument(argNum);
441 switch (argType) {
442 case Void:
443 case Func:
444 case Anyfunc:
445 case I64:
446 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
447 case Anyref:
448 case I32:
449 // Skipped: handled above.
450 if (marshalledGPRs >= wasmCC.m_gprArgs.size())
451 frOffset += sizeof(Register);
452 ++marshalledGPRs;
453 calleeFrameOffset += sizeof(Register);
454 break;
455 case F32: {
456 FPRReg fprReg;
457 if (marshalledFPRs < wasmCC.m_fprArgs.size())
458 fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
459 else {
460 // We've already spilled all arguments, these registers are available as scratch.
461 fprReg = FPRInfo::argumentFPR0;
462 jit.loadFloat(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
463 frOffset += sizeof(Register);
464 }
465 jit.convertFloatToDouble(fprReg, fprReg);
466 marshallFPR(fprReg);
467 break;
468 }
469 case F64: {
470 FPRReg fprReg;
471 if (marshalledFPRs < wasmCC.m_fprArgs.size())
472 fprReg = wasmCC.m_fprArgs[marshalledFPRs].fpr();
473 else {
474 // We've already spilled all arguments, these registers are available as scratch.
475 fprReg = FPRInfo::argumentFPR0;
476 jit.loadDouble(JIT::Address(GPRInfo::callFrameRegister, frOffset), fprReg);
477 frOffset += sizeof(Register);
478 }
479 marshallFPR(fprReg);
480 break;
481 }
482 }
483 }
484 }
485
486 jit.loadWasmContextInstance(GPRInfo::argumentGPR0);
487 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, Instance::offsetOfOwner()), GPRInfo::argumentGPR0);
488 jit.loadPtr(CCallHelpers::Address(GPRInfo::argumentGPR0, JSWebAssemblyInstance::offsetOfCallee()), GPRInfo::argumentGPR0);
489 jit.storePtr(GPRInfo::argumentGPR0, JIT::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
490
491 GPRReg importJSCellGPRReg = GPRInfo::regT0; // Callee needs to be in regT0 for slow path below.
492
493 ASSERT(!wasmCC.m_calleeSaveRegisters.get(importJSCellGPRReg));
494 materializeImportJSCell(jit, importIndex, importJSCellGPRReg);
495
496 jit.store64(importJSCellGPRReg, calleeFrame.withOffset(CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
497 jit.store32(JIT::TrustedImm32(numberOfParameters), calleeFrame.withOffset(CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
498 jit.store64(JIT::TrustedImm64(ValueUndefined), calleeFrame.withOffset(CallFrameSlot::thisArgument * static_cast<int>(sizeof(Register))));
499
500 // FIXME Tail call if the wasm return type is void and no registers were spilled. https://bugs.webkit.org/show_bug.cgi?id=165488
501
502 CallLinkInfo* callLinkInfo = callLinkInfos.add();
503 callLinkInfo->setUpCall(CallLinkInfo::Call, CodeOrigin(), importJSCellGPRReg);
504 JIT::DataLabelPtr targetToCheck;
505 JIT::TrustedImmPtr initialRightValue(nullptr);
506 JIT::Jump slowPath = jit.branchPtrWithPatch(MacroAssembler::NotEqual, importJSCellGPRReg, targetToCheck, initialRightValue);
507 JIT::Call fastCall = jit.nearCall();
508 JIT::Jump done = jit.jump();
509 slowPath.link(&jit);
510 // Callee needs to be in regT0 here.
511 jit.move(MacroAssembler::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); // Link info needs to be in regT2.
512 JIT::Call slowCall = jit.nearCall();
513 done.link(&jit);
514
515 CCallHelpers::JumpList exceptionChecks;
516
517 switch (signature.returnType()) {
518 case Void:
519 // Discard.
520 break;
521 case Func:
522 case Anyfunc:
523 // For the JavaScript embedding, imports with these types in their signature return are a WebAssembly.Module validation error.
524 RELEASE_ASSERT_NOT_REACHED();
525 break;
526 case I64: {
527 RELEASE_ASSERT_NOT_REACHED(); // Handled above.
528 }
529 case I32: {
530 CCallHelpers::JumpList done;
531 CCallHelpers::JumpList slowPath;
532
533 int32_t (*convertToI32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> int32_t {
534 VM* vm = &exec->vm();
535 NativeCallFrameTracer tracer(vm, exec);
536 return v.toInt32(exec);
537 };
538
539 slowPath.append(jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters));
540 slowPath.append(jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters));
541 jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
542 done.append(jit.jump());
543
544 slowPath.link(&jit);
545 jit.setupArguments<decltype(convertToI32)>(GPRInfo::returnValueGPR);
546 auto call = jit.call(OperationPtrTag);
547 exceptionChecks.append(jit.emitJumpIfException(*vm));
548
549 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
550 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(convertToI32));
551 });
552
553 done.link(&jit);
554 break;
555 }
556 case Anyref:
557 break;
558 case F32: {
559 CCallHelpers::JumpList done;
560
561 float (*convertToF32)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> float {
562 VM* vm = &exec->vm();
563 NativeCallFrameTracer tracer(vm, exec);
564 return static_cast<float>(v.toNumber(exec));
565 };
566
567 auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
568 auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
569 // We're an int32
570 jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
571 jit.convertInt64ToFloat(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
572 done.append(jit.jump());
573
574 isDouble.link(&jit);
575 jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
576 jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
577 jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
578 jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
579 done.append(jit.jump());
580
581 notANumber.link(&jit);
582 jit.setupArguments<decltype(convertToF32)>(GPRInfo::returnValueGPR);
583 auto call = jit.call(OperationPtrTag);
584 exceptionChecks.append(jit.emitJumpIfException(*vm));
585
586 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
587 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(convertToF32));
588 });
589
590 done.link(&jit);
591 break;
592 }
593 case F64: {
594 CCallHelpers::JumpList done;
595
596 double (*convertToF64)(ExecState*, JSValue) = [] (ExecState* exec, JSValue v) -> double {
597 VM* vm = &exec->vm();
598 NativeCallFrameTracer tracer(vm, exec);
599 return v.toNumber(exec);
600 };
601
602 auto notANumber = jit.branchIfNotNumber(GPRInfo::returnValueGPR, DoNotHaveTagRegisters);
603 auto isDouble = jit.branchIfNotInt32(JSValueRegs(GPRInfo::returnValueGPR), DoNotHaveTagRegisters);
604 // We're an int32
605 jit.signExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
606 jit.convertInt64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
607 done.append(jit.jump());
608
609 isDouble.link(&jit);
610 jit.move(JIT::TrustedImm64(TagTypeNumber), GPRInfo::returnValueGPR2);
611 jit.add64(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
612 jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
613 done.append(jit.jump());
614
615 notANumber.link(&jit);
616 jit.setupArguments<decltype(convertToF64)>(GPRInfo::returnValueGPR);
617 auto call = jit.call(OperationPtrTag);
618 exceptionChecks.append(jit.emitJumpIfException(*vm));
619
620 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
621 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(convertToF64));
622 });
623
624 done.link(&jit);
625 break;
626 }
627 }
628
629 jit.emitFunctionEpilogue();
630 jit.ret();
631
632 if (!exceptionChecks.empty()) {
633 exceptionChecks.link(&jit);
634 jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
635 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
636 auto call = jit.call(OperationPtrTag);
637 jit.jumpToExceptionHandler(*vm);
638
639 void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
640 VM* vm = &exec->vm();
641 NativeCallFrameTracer tracer(vm, exec);
642 genericUnwind(vm, exec);
643 ASSERT(!!vm->callFrameForCatch);
644 };
645
646 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
647 linkBuffer.link(call, FunctionPtr<OperationPtrTag>(doUnwinding));
648 });
649 }
650
651 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
652 if (UNLIKELY(patchBuffer.didFailToAllocate()))
653 return makeUnexpected(BindingFailure::OutOfMemory);
654
655 patchBuffer.link(slowCall, FunctionPtr<JITThunkPtrTag>(vm->getCTIStub(linkCallThunkGenerator).code()));
656 CodeLocationLabel<JSInternalPtrTag> callReturnLocation(patchBuffer.locationOfNearCall<JSInternalPtrTag>(slowCall));
657 CodeLocationLabel<JSInternalPtrTag> hotPathBegin(patchBuffer.locationOf<JSInternalPtrTag>(targetToCheck));
658 CodeLocationNearCall<JSInternalPtrTag> hotPathOther = patchBuffer.locationOfNearCall<JSInternalPtrTag>(fastCall);
659 callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
660
661 return FINALIZE_CODE(patchBuffer, WasmEntryPtrTag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
662}
663
664void* wasmToJSException(ExecState* exec, Wasm::ExceptionType type, Instance* wasmInstance)
665{
666 wasmInstance->storeTopCallFrame(exec);
667 JSWebAssemblyInstance* instance = wasmInstance->owner<JSWebAssemblyInstance>();
668 JSGlobalObject* globalObject = instance->globalObject();
669
670 // Do not retrieve VM& from ExecState since ExecState's callee is not a JSCell.
671 VM& vm = globalObject->vm();
672
673 {
674 auto throwScope = DECLARE_THROW_SCOPE(vm);
675
676 JSObject* error;
677 if (type == ExceptionType::StackOverflow)
678 error = createStackOverflowError(exec, globalObject);
679 else
680 error = JSWebAssemblyRuntimeError::create(exec, vm, globalObject->webAssemblyRuntimeErrorStructure(), Wasm::errorMessageForExceptionType(type));
681 throwException(exec, throwScope, error);
682 }
683
684 genericUnwind(&vm, exec);
685 ASSERT(!!vm.callFrameForCatch);
686 ASSERT(!!vm.targetMachinePCForThrow);
687 // FIXME: We could make this better:
688 // This is a total hack, but the llint (both op_catch and handleUncaughtException)
689 // require a cell in the callee field to load the VM. (The baseline JIT does not require
690 // this since it is compiled with a constant VM pointer.) We could make the calling convention
691 // for exceptions first load callFrameForCatch info call frame register before jumping
692 // to the exception handler. If we did this, we could remove this terrible hack.
693 // https://bugs.webkit.org/show_bug.cgi?id=170440
694 bitwise_cast<uint64_t*>(exec)[CallFrameSlot::callee] = bitwise_cast<uint64_t>(instance->webAssemblyToJSCallee());
695 return vm.targetMachinePCForThrow;
696}
697
698} } // namespace JSC::Wasm
699
700#endif // ENABLE(WEBASSEMBLY)
701