1/*
2 * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "AirCode.h"
28
29#if ENABLE(B3_JIT)
30
31#include "AirAllocateRegistersAndStackAndGenerateCode.h"
32#include "AirCCallSpecial.h"
33#include "AirCFG.h"
34#include "AllowMacroScratchRegisterUsageIf.h"
35#include "B3BasicBlockUtils.h"
36#include "B3Procedure.h"
37#include "B3StackSlot.h"
38#include <wtf/ListDump.h>
39#include <wtf/MathExtras.h>
40
41namespace JSC { namespace B3 { namespace Air {
42
43static void defaultPrologueGenerator(CCallHelpers& jit, Code& code)
44{
45 jit.emitFunctionPrologue();
46 if (code.frameSize()) {
47 AllowMacroScratchRegisterUsageIf allowScratch(jit, isARM64());
48 jit.addPtr(MacroAssembler::TrustedImm32(-code.frameSize()), MacroAssembler::framePointerRegister, MacroAssembler::stackPointerRegister);
49 if (Options::zeroStackFrame())
50 jit.clearStackFrame(MacroAssembler::framePointerRegister, MacroAssembler::stackPointerRegister, GPRInfo::nonArgGPR0, code.frameSize());
51 }
52
53 jit.emitSave(code.calleeSaveRegisterAtOffsetList());
54}
55
56Code::Code(Procedure& proc)
57 : m_proc(proc)
58 , m_cfg(new CFG(*this))
59 , m_lastPhaseName("initial")
60 , m_defaultPrologueGenerator(createSharedTask<PrologueGeneratorFunction>(&defaultPrologueGenerator))
61{
62 // Come up with initial orderings of registers. The user may replace this with something else.
63 forEachBank(
64 [&] (Bank bank) {
65 Vector<Reg> volatileRegs;
66 Vector<Reg> calleeSaveRegs;
67 RegisterSet all = bank == GP ? RegisterSet::allGPRs() : RegisterSet::allFPRs();
68 all.exclude(RegisterSet::stackRegisters());
69 all.exclude(RegisterSet::reservedHardwareRegisters());
70 RegisterSet calleeSave = RegisterSet::calleeSaveRegisters();
71 all.forEach(
72 [&] (Reg reg) {
73 if (!calleeSave.get(reg))
74 volatileRegs.append(reg);
75 });
76 all.forEach(
77 [&] (Reg reg) {
78 if (calleeSave.get(reg))
79 calleeSaveRegs.append(reg);
80 });
81 if (Options::airRandomizeRegs()) {
82 shuffleVector(volatileRegs, [&] (unsigned limit) { return m_weakRandom.getUint32(limit); });
83 shuffleVector(calleeSaveRegs, [&] (unsigned limit) { return m_weakRandom.getUint32(limit); });
84 }
85 Vector<Reg> result;
86 result.appendVector(volatileRegs);
87 result.appendVector(calleeSaveRegs);
88 setRegsInPriorityOrder(bank, result);
89 });
90
91 if (auto reg = pinnedExtendedOffsetAddrRegister())
92 pinRegister(*reg);
93
94 m_pinnedRegs.set(MacroAssembler::framePointerRegister);
95}
96
97Code::~Code()
98{
99}
100
101void Code::emitDefaultPrologue(CCallHelpers& jit)
102{
103 defaultPrologueGenerator(jit, *this);
104}
105
106void Code::setRegsInPriorityOrder(Bank bank, const Vector<Reg>& regs)
107{
108 regsInPriorityOrderImpl(bank) = regs;
109 m_mutableRegs = { };
110 forEachBank(
111 [&] (Bank bank) {
112 for (Reg reg : regsInPriorityOrder(bank))
113 m_mutableRegs.set(reg);
114 });
115}
116
117void Code::pinRegister(Reg reg)
118{
119 Vector<Reg>& regs = regsInPriorityOrderImpl(Arg(Tmp(reg)).bank());
120 ASSERT(regs.contains(reg));
121 regs.removeFirst(reg);
122 m_mutableRegs.clear(reg);
123 ASSERT(!regs.contains(reg));
124 m_pinnedRegs.set(reg);
125}
126
127RegisterSet Code::mutableGPRs()
128{
129 RegisterSet result = m_mutableRegs;
130 result.filter(RegisterSet::allGPRs());
131 return result;
132}
133
134RegisterSet Code::mutableFPRs()
135{
136 RegisterSet result = m_mutableRegs;
137 result.filter(RegisterSet::allFPRs());
138 return result;
139}
140
141bool Code::needsUsedRegisters() const
142{
143 return m_proc.needsUsedRegisters();
144}
145
146BasicBlock* Code::addBlock(double frequency)
147{
148 std::unique_ptr<BasicBlock> block(new BasicBlock(m_blocks.size(), frequency));
149 BasicBlock* result = block.get();
150 m_blocks.append(WTFMove(block));
151 return result;
152}
153
154StackSlot* Code::addStackSlot(unsigned byteSize, StackSlotKind kind, B3::StackSlot* b3Slot)
155{
156 StackSlot* result = m_stackSlots.addNew(byteSize, kind, b3Slot);
157 if (m_stackIsAllocated) {
158 // FIXME: This is unnecessarily awful. Fortunately, it doesn't run often.
159 unsigned extent = WTF::roundUpToMultipleOf(result->alignment(), frameSize() + byteSize);
160 result->setOffsetFromFP(-static_cast<ptrdiff_t>(extent));
161 setFrameSize(WTF::roundUpToMultipleOf(stackAlignmentBytes(), extent));
162 }
163 return result;
164}
165
166StackSlot* Code::addStackSlot(B3::StackSlot* b3Slot)
167{
168 return addStackSlot(b3Slot->byteSize(), StackSlotKind::Locked, b3Slot);
169}
170
171Special* Code::addSpecial(std::unique_ptr<Special> special)
172{
173 special->m_code = this;
174 return m_specials.add(WTFMove(special));
175}
176
177CCallSpecial* Code::cCallSpecial()
178{
179 if (!m_cCallSpecial) {
180 m_cCallSpecial = static_cast<CCallSpecial*>(
181 addSpecial(std::make_unique<CCallSpecial>()));
182 }
183
184 return m_cCallSpecial;
185}
186
187bool Code::isEntrypoint(BasicBlock* block) const
188{
189 // Note: This function must work both before and after LowerEntrySwitch.
190
191 if (m_entrypoints.isEmpty())
192 return !block->index();
193
194 for (const FrequentedBlock& entrypoint : m_entrypoints) {
195 if (entrypoint.block() == block)
196 return true;
197 }
198 return false;
199}
200
201Optional<unsigned> Code::entrypointIndex(BasicBlock* block) const
202{
203 RELEASE_ASSERT(m_entrypoints.size());
204 for (unsigned i = 0; i < m_entrypoints.size(); ++i) {
205 if (m_entrypoints[i].block() == block)
206 return i;
207 }
208 return WTF::nullopt;
209}
210
211void Code::setCalleeSaveRegisterAtOffsetList(RegisterAtOffsetList&& registerAtOffsetList, StackSlot* slot)
212{
213 m_uncorrectedCalleeSaveRegisterAtOffsetList = WTFMove(registerAtOffsetList);
214 for (const RegisterAtOffset& registerAtOffset : m_uncorrectedCalleeSaveRegisterAtOffsetList)
215 m_calleeSaveRegisters.set(registerAtOffset.reg());
216 m_calleeSaveStackSlot = slot;
217}
218
219RegisterAtOffsetList Code::calleeSaveRegisterAtOffsetList() const
220{
221 RegisterAtOffsetList result = m_uncorrectedCalleeSaveRegisterAtOffsetList;
222 if (StackSlot* slot = m_calleeSaveStackSlot) {
223 ptrdiff_t offset = slot->byteSize() + slot->offsetFromFP();
224 for (size_t i = result.size(); i--;) {
225 result.at(i) = RegisterAtOffset(
226 result.at(i).reg(),
227 result.at(i).offset() + offset);
228 }
229 }
230 return result;
231}
232
233void Code::resetReachability()
234{
235 clearPredecessors(m_blocks);
236 if (m_entrypoints.isEmpty())
237 updatePredecessorsAfter(m_blocks[0].get());
238 else {
239 for (const FrequentedBlock& entrypoint : m_entrypoints)
240 updatePredecessorsAfter(entrypoint.block());
241 }
242
243 for (auto& block : m_blocks) {
244 if (isBlockDead(block.get()) && !isEntrypoint(block.get()))
245 block = nullptr;
246 }
247}
248
249void Code::dump(PrintStream& out) const
250{
251 if (!m_entrypoints.isEmpty())
252 out.print("Entrypoints: ", listDump(m_entrypoints), "\n");
253 for (BasicBlock* block : *this)
254 out.print(deepDump(block));
255 if (stackSlots().size()) {
256 out.print("Stack slots:\n");
257 for (StackSlot* slot : stackSlots())
258 out.print(" ", pointerDump(slot), ": ", deepDump(slot), "\n");
259 }
260 if (specials().size()) {
261 out.print("Specials:\n");
262 for (Special* special : specials())
263 out.print(" ", deepDump(special), "\n");
264 }
265 if (m_frameSize || m_stackIsAllocated)
266 out.print("Frame size: ", m_frameSize, m_stackIsAllocated ? " (Allocated)" : "", "\n");
267 if (m_callArgAreaSize)
268 out.print("Call arg area size: ", m_callArgAreaSize, "\n");
269 RegisterAtOffsetList calleeSaveRegisters = this->calleeSaveRegisterAtOffsetList();
270 if (calleeSaveRegisters.size())
271 out.print("Callee saves: ", calleeSaveRegisters, "\n");
272}
273
274unsigned Code::findFirstBlockIndex(unsigned index) const
275{
276 while (index < size() && !at(index))
277 index++;
278 return index;
279}
280
281unsigned Code::findNextBlockIndex(unsigned index) const
282{
283 return findFirstBlockIndex(index + 1);
284}
285
286BasicBlock* Code::findNextBlock(BasicBlock* block) const
287{
288 unsigned index = findNextBlockIndex(block->index());
289 if (index < size())
290 return at(index);
291 return nullptr;
292}
293
294void Code::addFastTmp(Tmp tmp)
295{
296 m_fastTmps.add(tmp);
297}
298
299void* Code::addDataSection(size_t size)
300{
301 return m_proc.addDataSection(size);
302}
303
304unsigned Code::jsHash() const
305{
306 unsigned result = 0;
307
308 for (BasicBlock* block : *this) {
309 result *= 1000001;
310 for (Inst& inst : *block) {
311 result *= 97;
312 result += inst.jsHash();
313 }
314 for (BasicBlock* successor : block->successorBlocks()) {
315 result *= 7;
316 result += successor->index();
317 }
318 }
319 for (StackSlot* slot : stackSlots()) {
320 result *= 101;
321 result += slot->jsHash();
322 }
323
324 return result;
325}
326
327void Code::setNumEntrypoints(unsigned numEntrypoints)
328{
329 m_prologueGenerators.clear();
330 m_prologueGenerators.reserveCapacity(numEntrypoints);
331 for (unsigned i = 0; i < numEntrypoints; ++i)
332 m_prologueGenerators.uncheckedAppend(m_defaultPrologueGenerator.copyRef());
333}
334
335} } } // namespace JSC::B3::Air
336
337#endif // ENABLE(B3_JIT)
338