1/*
2 * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "FTLLink.h"
28
29#if ENABLE(FTL_JIT)
30
31#include "CCallHelpers.h"
32#include "CodeBlockWithJITType.h"
33#include "DFGCommon.h"
34#include "FTLJITCode.h"
35#include "JITOperations.h"
36#include "LinkBuffer.h"
37#include "JSCInlines.h"
38#include "ProfilerCompilation.h"
39#include "ThunkGenerators.h"
40#include "VirtualRegister.h"
41
42namespace JSC { namespace FTL {
43
44void link(State& state)
45{
46 using namespace DFG;
47 Graph& graph = state.graph;
48 CodeBlock* codeBlock = graph.m_codeBlock;
49 VM& vm = graph.m_vm;
50
51 // B3 will create its own jump tables as needed.
52 codeBlock->clearSwitchJumpTables();
53
54 state.jitCode->common.requiredRegisterCountForExit = graph.requiredRegisterCountForExit();
55
56 if (!graph.m_plan.inlineCallFrames()->isEmpty())
57 state.jitCode->common.inlineCallFrames = graph.m_plan.inlineCallFrames();
58
59 graph.registerFrozenValues();
60
61 // Create the entrypoint. Note that we use this entrypoint totally differently
62 // depending on whether we're doing OSR entry or not.
63 CCallHelpers jit(codeBlock);
64
65 std::unique_ptr<LinkBuffer> linkBuffer;
66
67 CCallHelpers::Address frame = CCallHelpers::Address(
68 CCallHelpers::stackPointerRegister, -static_cast<int32_t>(AssemblyHelpers::prologueStackPointerDelta()));
69
70 Profiler::Compilation* compilation = graph.compilation();
71 if (UNLIKELY(compilation)) {
72 compilation->addDescription(
73 Profiler::OriginStack(),
74 toCString("Generated FTL JIT code for ", CodeBlockWithJITType(codeBlock, JITType::FTLJIT), ", instructions size = ", graph.m_codeBlock->instructionsSize(), ":\n"));
75
76 graph.ensureSSADominators();
77 graph.ensureSSANaturalLoops();
78
79 const char* prefix = " ";
80
81 DumpContext dumpContext;
82 StringPrintStream out;
83 Node* lastNode = 0;
84 for (size_t blockIndex = 0; blockIndex < graph.numBlocks(); ++blockIndex) {
85 BasicBlock* block = graph.block(blockIndex);
86 if (!block)
87 continue;
88
89 graph.dumpBlockHeader(out, prefix, block, Graph::DumpLivePhisOnly, &dumpContext);
90 compilation->addDescription(Profiler::OriginStack(), out.toCString());
91 out.reset();
92
93 for (size_t nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
94 Node* node = block->at(nodeIndex);
95
96 Profiler::OriginStack stack;
97
98 if (node->origin.semantic.isSet()) {
99 stack = Profiler::OriginStack(
100 *vm.m_perBytecodeProfiler, codeBlock, node->origin.semantic);
101 }
102
103 if (graph.dumpCodeOrigin(out, prefix, lastNode, node, &dumpContext)) {
104 compilation->addDescription(stack, out.toCString());
105 out.reset();
106 }
107
108 graph.dump(out, prefix, node, &dumpContext);
109 compilation->addDescription(stack, out.toCString());
110 out.reset();
111
112 if (node->origin.semantic.isSet())
113 lastNode = node;
114 }
115 }
116
117 dumpContext.dump(out, prefix);
118 compilation->addDescription(Profiler::OriginStack(), out.toCString());
119 out.reset();
120
121 out.print(" Disassembly:\n");
122 out.print(" <not implemented yet>\n");
123 compilation->addDescription(Profiler::OriginStack(), out.toCString());
124 out.reset();
125
126 state.jitCode->common.compilation = compilation;
127 }
128
129 switch (graph.m_plan.mode()) {
130 case FTLMode: {
131 bool requiresArityFixup = codeBlock->numParameters() != 1;
132 if (codeBlock->codeType() == FunctionCode && requiresArityFixup) {
133 CCallHelpers::JumpList mainPathJumps;
134
135 jit.load32(
136 frame.withOffset(sizeof(Register) * CallFrameSlot::argumentCount),
137 GPRInfo::regT1);
138 mainPathJumps.append(jit.branch32(
139 CCallHelpers::AboveOrEqual, GPRInfo::regT1,
140 CCallHelpers::TrustedImm32(codeBlock->numParameters())));
141 jit.emitFunctionPrologue();
142 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
143 jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
144 CCallHelpers::Call callArityCheck = jit.call(OperationPtrTag);
145
146 auto noException = jit.branch32(CCallHelpers::GreaterThanOrEqual, GPRInfo::returnValueGPR, CCallHelpers::TrustedImm32(0));
147 jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
148 jit.move(CCallHelpers::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
149 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
150 CCallHelpers::Call callLookupExceptionHandlerFromCallerFrame = jit.call(OperationPtrTag);
151 jit.jumpToExceptionHandler(vm);
152 noException.link(&jit);
153
154 if (!ASSERT_DISABLED) {
155 jit.load64(vm.addressOfException(), GPRInfo::regT1);
156 jit.jitAssertIsNull(GPRInfo::regT1);
157 }
158
159 jit.move(GPRInfo::returnValueGPR, GPRInfo::argumentGPR0);
160 jit.emitFunctionEpilogue();
161 jit.untagReturnAddress();
162 mainPathJumps.append(jit.branchTest32(CCallHelpers::Zero, GPRInfo::argumentGPR0));
163 jit.emitFunctionPrologue();
164 CCallHelpers::Call callArityFixup = jit.nearCall();
165 jit.emitFunctionEpilogue();
166 jit.untagReturnAddress();
167 mainPathJumps.append(jit.jump());
168
169 linkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail);
170 if (linkBuffer->didFailToAllocate()) {
171 state.allocationFailed = true;
172 return;
173 }
174 linkBuffer->link(callArityCheck, FunctionPtr<OperationPtrTag>(codeBlock->isConstructor() ? operationConstructArityCheck : operationCallArityCheck));
175 linkBuffer->link(callLookupExceptionHandlerFromCallerFrame, FunctionPtr<OperationPtrTag>(lookupExceptionHandlerFromCallerFrame));
176 linkBuffer->link(callArityFixup, FunctionPtr<JITThunkPtrTag>(vm.getCTIStub(arityFixupGenerator).code()));
177 linkBuffer->link(mainPathJumps, state.generatedFunction);
178 }
179
180 state.jitCode->initializeAddressForCall(state.generatedFunction);
181 break;
182 }
183
184 case FTLForOSREntryMode: {
185 // We jump to here straight from DFG code, after having boxed up all of the
186 // values into the scratch buffer. Everything should be good to go - at this
187 // point we've even done the stack check. Basically we just have to make the
188 // call to the B3-generated code.
189 CCallHelpers::Label start = jit.label();
190 jit.emitFunctionEpilogue();
191 jit.untagReturnAddress();
192 CCallHelpers::Jump mainPathJump = jit.jump();
193
194 linkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail);
195 if (linkBuffer->didFailToAllocate()) {
196 state.allocationFailed = true;
197 return;
198 }
199 linkBuffer->link(mainPathJump, state.generatedFunction);
200
201 state.jitCode->initializeAddressForCall(linkBuffer->locationOf<JSEntryPtrTag>(start));
202 break;
203 }
204
205 default:
206 RELEASE_ASSERT_NOT_REACHED();
207 break;
208 }
209
210 state.finalizer->entrypointLinkBuffer = WTFMove(linkBuffer);
211 state.finalizer->function = state.generatedFunction;
212 state.finalizer->jitCode = state.jitCode;
213}
214
215} } // namespace JSC::FTL
216
217#endif // ENABLE(FTL_JIT)
218
219