1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27
28#if ENABLE(JIT)
29#if USE(JSVALUE32_64)
30#include "JIT.h"
31
32#include "CodeBlock.h"
33#include "Interpreter.h"
34#include "JITInlines.h"
35#include "JSArray.h"
36#include "JSFunction.h"
37#include "JSCInlines.h"
38#include "LinkBuffer.h"
39#include "OpcodeInlines.h"
40#include "ResultType.h"
41#include "SetupVarargsFrame.h"
42#include "StackAlignment.h"
43#include "ThunkGenerators.h"
44#include <wtf/StringPrintStream.h>
45
46namespace JSC {
47
48template<typename Op>
49void JIT::emitPutCallResult(const Op& bytecode)
50{
51 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
52 emitStore(bytecode.m_dst.offset(), regT1, regT0);
53}
54
55void JIT::emit_op_ret(const Instruction* currentInstruction)
56{
57 auto bytecode = currentInstruction->as<OpRet>();
58 int value = bytecode.m_value.offset();
59
60 emitLoad(value, regT1, regT0);
61
62 checkStackPointerAlignment();
63 emitRestoreCalleeSaves();
64 emitFunctionEpilogue();
65 ret();
66}
67
68void JIT::emitSlow_op_call(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
69{
70 compileOpCallSlowCase<OpCall>(currentInstruction, iter, m_callLinkInfoIndex++);
71}
72
73void JIT::emitSlow_op_tail_call(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
74{
75 compileOpCallSlowCase<OpTailCall>(currentInstruction, iter, m_callLinkInfoIndex++);
76}
77
78void JIT::emitSlow_op_call_eval(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
79{
80 compileOpCallSlowCase<OpCallEval>(currentInstruction, iter, m_callLinkInfoIndex);
81}
82
83void JIT::emitSlow_op_call_varargs(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
84{
85 compileOpCallSlowCase<OpCallVarargs>(currentInstruction, iter, m_callLinkInfoIndex++);
86}
87
88void JIT::emitSlow_op_tail_call_varargs(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
89{
90 compileOpCallSlowCase<OpTailCallVarargs>(currentInstruction, iter, m_callLinkInfoIndex++);
91}
92
93void JIT::emitSlow_op_tail_call_forward_arguments(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
94{
95 compileOpCallSlowCase<OpTailCallForwardArguments>(currentInstruction, iter, m_callLinkInfoIndex++);
96}
97
98void JIT::emitSlow_op_construct_varargs(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
99{
100 compileOpCallSlowCase<OpConstructVarargs>(currentInstruction, iter, m_callLinkInfoIndex++);
101}
102
103void JIT::emitSlow_op_construct(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
104{
105 compileOpCallSlowCase<OpConstruct>(currentInstruction, iter, m_callLinkInfoIndex++);
106}
107
108void JIT::emit_op_call(const Instruction* currentInstruction)
109{
110 compileOpCall<OpCall>(currentInstruction, m_callLinkInfoIndex++);
111}
112
113void JIT::emit_op_tail_call(const Instruction* currentInstruction)
114{
115 compileOpCall<OpTailCall>(currentInstruction, m_callLinkInfoIndex++);
116}
117
118void JIT::emit_op_call_eval(const Instruction* currentInstruction)
119{
120 compileOpCall<OpCallEval>(currentInstruction, m_callLinkInfoIndex);
121}
122
123void JIT::emit_op_call_varargs(const Instruction* currentInstruction)
124{
125 compileOpCall<OpCallVarargs>(currentInstruction, m_callLinkInfoIndex++);
126}
127
128void JIT::emit_op_tail_call_varargs(const Instruction* currentInstruction)
129{
130 compileOpCall<OpTailCallVarargs>(currentInstruction, m_callLinkInfoIndex++);
131}
132
133void JIT::emit_op_tail_call_forward_arguments(const Instruction* currentInstruction)
134{
135 compileOpCall<OpTailCallForwardArguments>(currentInstruction, m_callLinkInfoIndex++);
136}
137
138void JIT::emit_op_construct_varargs(const Instruction* currentInstruction)
139{
140 compileOpCall<OpConstructVarargs>(currentInstruction, m_callLinkInfoIndex++);
141}
142
143void JIT::emit_op_construct(const Instruction* currentInstruction)
144{
145 compileOpCall<OpConstruct>(currentInstruction, m_callLinkInfoIndex++);
146}
147
148template <typename Op>
149std::enable_if_t<
150 Op::opcodeID != op_call_varargs && Op::opcodeID != op_construct_varargs
151 && Op::opcodeID != op_tail_call_varargs && Op::opcodeID != op_tail_call_forward_arguments
152, void>
153JIT::compileSetupFrame(const Op& bytecode, CallLinkInfo*)
154{
155 auto& metadata = bytecode.metadata(m_codeBlock);
156 int argCount = bytecode.m_argc;
157 int registerOffset = -static_cast<int>(bytecode.m_argv);
158
159 if (Op::opcodeID == op_call && shouldEmitProfiling()) {
160 emitLoad(registerOffset + CallFrame::argumentOffsetIncludingThis(0), regT0, regT1);
161 Jump done = branchIfNotCell(regT0);
162 load32(Address(regT1, JSCell::structureIDOffset()), regT1);
163 store32(regT1, metadata.m_callLinkInfo.m_arrayProfile.addressOfLastSeenStructureID());
164 done.link(this);
165 }
166
167 addPtr(TrustedImm32(registerOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), callFrameRegister, stackPointerRegister);
168 store32(TrustedImm32(argCount), Address(stackPointerRegister, CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset - sizeof(CallerFrameAndPC)));
169}
170
171template<typename Op>
172std::enable_if_t<
173 Op::opcodeID == op_call_varargs || Op::opcodeID == op_construct_varargs
174 || Op::opcodeID == op_tail_call_varargs || Op::opcodeID == op_tail_call_forward_arguments
175, void>
176JIT::compileSetupFrame(const Op& bytecode, CallLinkInfo* info)
177{
178 OpcodeID opcodeID = Op::opcodeID;
179 int thisValue = bytecode.m_thisValue.offset();
180 int arguments = bytecode.m_arguments.offset();
181 int firstFreeRegister = bytecode.m_firstFree.offset();
182 int firstVarArgOffset = bytecode.m_firstVarArg;
183
184 emitLoad(arguments, regT1, regT0);
185 Z_JITOperation_EJZZ sizeOperation;
186 if (Op::opcodeID == op_tail_call_forward_arguments)
187 sizeOperation = operationSizeFrameForForwardArguments;
188 else
189 sizeOperation = operationSizeFrameForVarargs;
190 callOperation(sizeOperation, JSValueRegs(regT1, regT0), -firstFreeRegister, firstVarArgOffset);
191 move(TrustedImm32(-firstFreeRegister), regT1);
192 emitSetVarargsFrame(*this, returnValueGPR, false, regT1, regT1);
193 addPtr(TrustedImm32(-(sizeof(CallerFrameAndPC) + WTF::roundUpToMultipleOf(stackAlignmentBytes(), 6 * sizeof(void*)))), regT1, stackPointerRegister);
194 emitLoad(arguments, regT2, regT4);
195 F_JITOperation_EFJZZ setupOperation;
196 if (opcodeID == op_tail_call_forward_arguments)
197 setupOperation = operationSetupForwardArgumentsFrame;
198 else
199 setupOperation = operationSetupVarargsFrame;
200 callOperation(setupOperation, regT1, JSValueRegs(regT2, regT4), firstVarArgOffset, regT0);
201 move(returnValueGPR, regT1);
202
203 // Profile the argument count.
204 load32(Address(regT1, CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset), regT2);
205 load32(info->addressOfMaxNumArguments(), regT0);
206 Jump notBiggest = branch32(Above, regT0, regT2);
207 store32(regT2, info->addressOfMaxNumArguments());
208 notBiggest.link(this);
209
210 // Initialize 'this'.
211 emitLoad(thisValue, regT2, regT0);
212 store32(regT0, Address(regT1, PayloadOffset + (CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))));
213 store32(regT2, Address(regT1, TagOffset + (CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))));
214
215 addPtr(TrustedImm32(sizeof(CallerFrameAndPC)), regT1, stackPointerRegister);
216}
217
218template<typename Op>
219bool JIT::compileCallEval(const Op&)
220{
221 return false;
222}
223
224template<>
225bool JIT::compileCallEval(const OpCallEval& bytecode)
226{
227 addPtr(TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), stackPointerRegister, regT1);
228 storePtr(callFrameRegister, Address(regT1, CallFrame::callerFrameOffset()));
229
230 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
231
232 callOperation(operationCallEval, regT1);
233
234 addSlowCase(branchIfEmpty(regT1));
235
236 sampleCodeBlock(m_codeBlock);
237
238 emitPutCallResult(bytecode);
239
240 return true;
241}
242
243void JIT::compileCallEvalSlowCase(const Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
244{
245 linkAllSlowCases(iter);
246
247 auto bytecode = instruction->as<OpCallEval>();
248 CallLinkInfo* info = m_codeBlock->addCallLinkInfo();
249 info->setUpCall(CallLinkInfo::Call, CodeOrigin(m_bytecodeOffset), regT0);
250
251 int registerOffset = -bytecode.m_argv;
252 int callee = bytecode.m_callee.offset();
253
254 addPtr(TrustedImm32(registerOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), callFrameRegister, stackPointerRegister);
255
256 emitLoad(callee, regT1, regT0);
257 emitDumbVirtualCall(*vm(), info);
258 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
259 checkStackPointerAlignment();
260
261 sampleCodeBlock(m_codeBlock);
262
263 emitPutCallResult(bytecode);
264}
265
266template <typename Op>
267void JIT::compileOpCall(const Instruction* instruction, unsigned callLinkInfoIndex)
268{
269 OpcodeID opcodeID = Op::opcodeID;
270 auto bytecode = instruction->as<Op>();
271 int callee = bytecode.m_callee.offset();
272
273 /* Caller always:
274 - Updates callFrameRegister to callee callFrame.
275 - Initializes ArgumentCount; CallerFrame; Callee.
276
277 For a JS call:
278 - Callee initializes ReturnPC; CodeBlock.
279 - Callee restores callFrameRegister before return.
280
281 For a non-JS call:
282 - Caller initializes ReturnPC; CodeBlock.
283 - Caller restores callFrameRegister after return.
284 */
285 CallLinkInfo* info = nullptr;
286 if (opcodeID != op_call_eval)
287 info = m_codeBlock->addCallLinkInfo();
288 compileSetupFrame(bytecode, info);
289 // SP holds newCallFrame + sizeof(CallerFrameAndPC), with ArgumentCount initialized.
290
291 uint32_t locationBits = CallSiteIndex(instruction).bits();
292 store32(TrustedImm32(locationBits), tagFor(CallFrameSlot::argumentCount));
293 emitLoad(callee, regT1, regT0); // regT1, regT0 holds callee.
294
295 store32(regT0, Address(stackPointerRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register)) + PayloadOffset - sizeof(CallerFrameAndPC)));
296 store32(regT1, Address(stackPointerRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register)) + TagOffset - sizeof(CallerFrameAndPC)));
297
298 if (compileCallEval(bytecode))
299 return;
300
301 if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs)
302 emitRestoreCalleeSaves();
303
304 addSlowCase(branchIfNotCell(regT1));
305
306 DataLabelPtr addressOfLinkedFunctionCheck;
307 Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(nullptr));
308
309 addSlowCase(slowCase);
310
311 ASSERT(m_callCompilationInfo.size() == callLinkInfoIndex);
312 info->setUpCall(CallLinkInfo::callTypeFor(opcodeID), CodeOrigin(m_bytecodeOffset), regT0);
313 m_callCompilationInfo.append(CallCompilationInfo());
314 m_callCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
315 m_callCompilationInfo[callLinkInfoIndex].callLinkInfo = info;
316
317 checkStackPointerAlignment();
318 if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs || opcodeID == op_tail_call_forward_arguments) {
319 prepareForTailCallSlow();
320 m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedTailCall();
321 return;
322 }
323
324 m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
325
326 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
327 checkStackPointerAlignment();
328
329 sampleCodeBlock(m_codeBlock);
330 emitPutCallResult(bytecode);
331}
332
333template <typename Op>
334void JIT::compileOpCallSlowCase(const Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex)
335{
336 OpcodeID opcodeID = Op::opcodeID;
337
338 if (opcodeID == op_call_eval) {
339 compileCallEvalSlowCase(instruction, iter);
340 return;
341 }
342
343 linkAllSlowCases(iter);
344
345 move(TrustedImmPtr(m_callCompilationInfo[callLinkInfoIndex].callLinkInfo), regT2);
346
347 if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs || opcodeID == op_tail_call_forward_arguments)
348 emitRestoreCalleeSaves();
349
350 m_callCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(linkCallThunkGenerator).retaggedCode<NoPtrTag>());
351
352 if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs) {
353 abortWithReason(JITDidReturnFromTailCall);
354 return;
355 }
356
357 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
358 checkStackPointerAlignment();
359
360 sampleCodeBlock(m_codeBlock);
361
362 auto bytecode = instruction->as<Op>();
363 emitPutCallResult(bytecode);
364}
365
366} // namespace JSC
367
368#endif // USE(JSVALUE32_64)
369#endif // ENABLE(JIT)
370