1/*
2 * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#if ENABLE(B3_JIT)
29
30#include "AirArg.h"
31#include "AirBasicBlock.h"
32#include "AirDisassembler.h"
33#include "AirSpecial.h"
34#include "AirStackSlot.h"
35#include "AirTmp.h"
36#include "B3SparseCollection.h"
37#include "CCallHelpers.h"
38#include "RegisterAtOffsetList.h"
39#include "StackAlignment.h"
40#include <wtf/IndexMap.h>
41#include <wtf/WeakRandom.h>
42
43namespace JSC { namespace B3 {
44
45class Procedure;
46
47#if ASSERT_DISABLED
48IGNORE_RETURN_TYPE_WARNINGS_BEGIN
49#endif
50
51namespace Air {
52
53class GenerateAndAllocateRegisters;
54class BlockInsertionSet;
55class CCallSpecial;
56class CFG;
57class Code;
58class Disassembler;
59
60typedef void WasmBoundsCheckGeneratorFunction(CCallHelpers&, GPRReg);
61typedef SharedTask<WasmBoundsCheckGeneratorFunction> WasmBoundsCheckGenerator;
62
63typedef void PrologueGeneratorFunction(CCallHelpers&, Code&);
64typedef SharedTask<PrologueGeneratorFunction> PrologueGenerator;
65
66// This is an IR that is very close to the bare metal. It requires about 40x more bytes than the
67// generated machine code - for example if you're generating 1MB of machine code, you need about
68// 40MB of Air.
69
70class Code {
71 WTF_MAKE_NONCOPYABLE(Code);
72 WTF_MAKE_FAST_ALLOCATED;
73public:
74 ~Code();
75
76 Procedure& proc() { return m_proc; }
77
78 const Vector<Reg>& regsInPriorityOrder(Bank bank) const
79 {
80 switch (bank) {
81 case GP:
82 return m_gpRegsInPriorityOrder;
83 case FP:
84 return m_fpRegsInPriorityOrder;
85 }
86 ASSERT_NOT_REACHED();
87 }
88
89 // This is the set of registers that Air is allowed to emit code to mutate. It's derived from
90 // regsInPriorityOrder. Any registers not in this set are said to be "pinned".
91 const RegisterSet& mutableRegs() const { return m_mutableRegs; }
92
93 bool isPinned(Reg reg) const { return !mutableRegs().get(reg); }
94 void pinRegister(Reg);
95
96 void setOptLevel(unsigned optLevel) { m_optLevel = optLevel; }
97 unsigned optLevel() const { return m_optLevel; }
98
99 bool needsUsedRegisters() const;
100
101 JS_EXPORT_PRIVATE BasicBlock* addBlock(double frequency = 1);
102
103 // Note that you can rely on stack slots always getting indices that are larger than the index
104 // of any prior stack slot. In fact, all stack slots you create in the future will have an index
105 // that is >= stackSlots().size().
106 JS_EXPORT_PRIVATE StackSlot* addStackSlot(
107 unsigned byteSize, StackSlotKind, B3::StackSlot* = nullptr);
108 StackSlot* addStackSlot(B3::StackSlot*);
109
110 JS_EXPORT_PRIVATE Special* addSpecial(std::unique_ptr<Special>);
111
112 // This is the special you need to make a C call!
113 CCallSpecial* cCallSpecial();
114
115 Tmp newTmp(Bank bank)
116 {
117 switch (bank) {
118 case GP:
119 return Tmp::gpTmpForIndex(m_numGPTmps++);
120 case FP:
121 return Tmp::fpTmpForIndex(m_numFPTmps++);
122 }
123 ASSERT_NOT_REACHED();
124 }
125
126 unsigned numTmps(Bank bank)
127 {
128 switch (bank) {
129 case GP:
130 return m_numGPTmps;
131 case FP:
132 return m_numFPTmps;
133 }
134 ASSERT_NOT_REACHED();
135 }
136
137 template<typename Func>
138 void forEachTmp(const Func& func)
139 {
140 for (unsigned bankIndex = 0; bankIndex < numBanks; ++bankIndex) {
141 Bank bank = static_cast<Bank>(bankIndex);
142 unsigned numTmps = this->numTmps(bank);
143 for (unsigned i = 0; i < numTmps; ++i)
144 func(Tmp::tmpForIndex(bank, i));
145 }
146 }
147
148 unsigned callArgAreaSizeInBytes() const { return m_callArgAreaSize; }
149
150 // You can call this before code generation to force a minimum call arg area size.
151 void requestCallArgAreaSizeInBytes(unsigned size)
152 {
153 m_callArgAreaSize = std::max(
154 m_callArgAreaSize,
155 static_cast<unsigned>(WTF::roundUpToMultipleOf(stackAlignmentBytes(), size)));
156 }
157
158 unsigned frameSize() const { return m_frameSize; }
159
160 // Only phases that do stack allocation are allowed to set this. Currently, only
161 // Air::allocateStack() does this.
162 void setFrameSize(unsigned frameSize)
163 {
164 m_frameSize = frameSize;
165 }
166
167 // Note that this is not the same thing as proc().numEntrypoints(). This value here may be zero
168 // until we lower EntrySwitch.
169 unsigned numEntrypoints() const { return m_entrypoints.size(); }
170 const Vector<FrequentedBlock>& entrypoints() const { return m_entrypoints; }
171 const FrequentedBlock& entrypoint(unsigned index) const { return m_entrypoints[index]; }
172 bool isEntrypoint(BasicBlock*) const;
173 // Note: It is only valid to call this function after LowerEntrySwitch.
174 Optional<unsigned> entrypointIndex(BasicBlock*) const;
175
176 // Note: We allow this to be called even before we set m_entrypoints just for convenience to users of this API.
177 // However, if you call this before setNumEntrypoints, setNumEntrypoints will overwrite this value.
178 void setPrologueForEntrypoint(unsigned entrypointIndex, Ref<PrologueGenerator>&& generator)
179 {
180 m_prologueGenerators[entrypointIndex] = WTFMove(generator);
181 }
182 const Ref<PrologueGenerator>& prologueGeneratorForEntrypoint(unsigned entrypointIndex)
183 {
184 return m_prologueGenerators[entrypointIndex];
185 }
186
187 void setNumEntrypoints(unsigned);
188
189 // This is used by lowerEntrySwitch().
190 template<typename Vector>
191 void setEntrypoints(Vector&& vector)
192 {
193 m_entrypoints = std::forward<Vector>(vector);
194 RELEASE_ASSERT(m_entrypoints.size() == m_prologueGenerators.size());
195 }
196
197 CCallHelpers::Label entrypointLabel(unsigned index) const
198 {
199 return m_entrypointLabels[index];
200 }
201
202 // This is used by generate().
203 template<typename Vector>
204 void setEntrypointLabels(Vector&& vector)
205 {
206 m_entrypointLabels = std::forward<Vector>(vector);
207 RELEASE_ASSERT(m_entrypointLabels.size() == m_prologueGenerators.size());
208 }
209
210 void setStackIsAllocated(bool value)
211 {
212 m_stackIsAllocated = value;
213 }
214
215 bool stackIsAllocated() const { return m_stackIsAllocated; }
216
217 // This sets the callee save registers.
218 void setCalleeSaveRegisterAtOffsetList(RegisterAtOffsetList&&, StackSlot*);
219
220 // This returns the correctly offset list of callee save registers.
221 RegisterAtOffsetList calleeSaveRegisterAtOffsetList() const;
222
223 // This just tells you what the callee saves are.
224 RegisterSet calleeSaveRegisters() const { return m_calleeSaveRegisters; }
225
226 // Recomputes predecessors and deletes unreachable blocks.
227 JS_EXPORT_PRIVATE void resetReachability();
228
229 JS_EXPORT_PRIVATE void dump(PrintStream&) const;
230
231 unsigned size() const { return m_blocks.size(); }
232 BasicBlock* at(unsigned index) const { return m_blocks[index].get(); }
233 BasicBlock* operator[](unsigned index) const { return at(index); }
234
235 // This is used by phases that optimize the block list. You shouldn't use this unless you really know
236 // what you're doing.
237 Vector<std::unique_ptr<BasicBlock>>& blockList() { return m_blocks; }
238
239 // Finds the smallest index' such that at(index') != null and index' >= index.
240 JS_EXPORT_PRIVATE unsigned findFirstBlockIndex(unsigned index) const;
241
242 // Finds the smallest index' such that at(index') != null and index' > index.
243 unsigned findNextBlockIndex(unsigned index) const;
244
245 BasicBlock* findNextBlock(BasicBlock*) const;
246
247 class iterator {
248 public:
249 iterator()
250 : m_code(nullptr)
251 , m_index(0)
252 {
253 }
254
255 iterator(const Code& code, unsigned index)
256 : m_code(&code)
257 , m_index(m_code->findFirstBlockIndex(index))
258 {
259 }
260
261 BasicBlock* operator*()
262 {
263 return m_code->at(m_index);
264 }
265
266 iterator& operator++()
267 {
268 m_index = m_code->findFirstBlockIndex(m_index + 1);
269 return *this;
270 }
271
272 bool operator==(const iterator& other) const
273 {
274 return m_index == other.m_index;
275 }
276
277 bool operator!=(const iterator& other) const
278 {
279 return !(*this == other);
280 }
281
282 private:
283 const Code* m_code;
284 unsigned m_index;
285 };
286
287 iterator begin() const { return iterator(*this, 0); }
288 iterator end() const { return iterator(*this, size()); }
289
290 const SparseCollection<StackSlot>& stackSlots() const { return m_stackSlots; }
291 SparseCollection<StackSlot>& stackSlots() { return m_stackSlots; }
292
293 const SparseCollection<Special>& specials() const { return m_specials; }
294 SparseCollection<Special>& specials() { return m_specials; }
295
296 template<typename Callback>
297 void forAllTmps(const Callback& callback) const
298 {
299 for (unsigned i = m_numGPTmps; i--;)
300 callback(Tmp::gpTmpForIndex(i));
301 for (unsigned i = m_numFPTmps; i--;)
302 callback(Tmp::fpTmpForIndex(i));
303 }
304
305 void addFastTmp(Tmp);
306 bool isFastTmp(Tmp tmp) const { return m_fastTmps.contains(tmp); }
307
308 CFG& cfg() const { return *m_cfg; }
309
310 void* addDataSection(size_t);
311
312 // The name has to be a string literal, since we don't do any memory management for the string.
313 void setLastPhaseName(const char* name)
314 {
315 m_lastPhaseName = name;
316 }
317
318 const char* lastPhaseName() const { return m_lastPhaseName; }
319
320 void setWasmBoundsCheckGenerator(RefPtr<WasmBoundsCheckGenerator> generator)
321 {
322 m_wasmBoundsCheckGenerator = generator;
323 }
324
325 RefPtr<WasmBoundsCheckGenerator> wasmBoundsCheckGenerator() const { return m_wasmBoundsCheckGenerator; }
326
327 // This is a hash of the code. You can use this if you want to put code into a hashtable, but
328 // it's mainly for validating the results from JSAir.
329 unsigned jsHash() const;
330
331 void setDisassembler(std::unique_ptr<Disassembler>&& disassembler) { m_disassembler = WTFMove(disassembler); }
332 Disassembler* disassembler() { return m_disassembler.get(); }
333
334 RegisterSet mutableGPRs();
335 RegisterSet mutableFPRs();
336 RegisterSet pinnedRegisters() const { return m_pinnedRegs; }
337
338 WeakRandom& weakRandom() { return m_weakRandom; }
339
340 void emitDefaultPrologue(CCallHelpers&);
341
342 std::unique_ptr<GenerateAndAllocateRegisters> m_generateAndAllocateRegisters;
343
344private:
345 friend class ::JSC::B3::Procedure;
346 friend class BlockInsertionSet;
347
348 Code(Procedure&);
349
350 void setRegsInPriorityOrder(Bank, const Vector<Reg>&);
351
352 Vector<Reg>& regsInPriorityOrderImpl(Bank bank)
353 {
354 switch (bank) {
355 case GP:
356 return m_gpRegsInPriorityOrder;
357 case FP:
358 return m_fpRegsInPriorityOrder;
359 }
360 ASSERT_NOT_REACHED();
361 }
362
363 WeakRandom m_weakRandom;
364 Procedure& m_proc; // Some meta-data, like byproducts, is stored in the Procedure.
365 Vector<Reg> m_gpRegsInPriorityOrder;
366 Vector<Reg> m_fpRegsInPriorityOrder;
367 RegisterSet m_mutableRegs;
368 RegisterSet m_pinnedRegs;
369 SparseCollection<StackSlot> m_stackSlots;
370 Vector<std::unique_ptr<BasicBlock>> m_blocks;
371 SparseCollection<Special> m_specials;
372 std::unique_ptr<CFG> m_cfg;
373 HashSet<Tmp> m_fastTmps;
374 CCallSpecial* m_cCallSpecial { nullptr };
375 unsigned m_numGPTmps { 0 };
376 unsigned m_numFPTmps { 0 };
377 unsigned m_frameSize { 0 };
378 unsigned m_callArgAreaSize { 0 };
379 bool m_stackIsAllocated { false };
380 RegisterAtOffsetList m_uncorrectedCalleeSaveRegisterAtOffsetList;
381 RegisterSet m_calleeSaveRegisters;
382 StackSlot* m_calleeSaveStackSlot { nullptr };
383 Vector<FrequentedBlock> m_entrypoints; // This is empty until after lowerEntrySwitch().
384 Vector<CCallHelpers::Label> m_entrypointLabels; // This is empty until code generation.
385 Vector<Ref<PrologueGenerator>, 1> m_prologueGenerators;
386 RefPtr<WasmBoundsCheckGenerator> m_wasmBoundsCheckGenerator;
387 const char* m_lastPhaseName;
388 std::unique_ptr<Disassembler> m_disassembler;
389 unsigned m_optLevel { defaultOptLevel() };
390 Ref<PrologueGenerator> m_defaultPrologueGenerator;
391};
392
393} } } // namespace JSC::B3::Air
394
395#if ASSERT_DISABLED
396IGNORE_RETURN_TYPE_WARNINGS_END
397#endif
398
399#endif // ENABLE(B3_JIT)
400