1/*
2 * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#if ENABLE(B3_JIT)
29
30#include "B3OpaqueByproducts.h"
31#include "B3Origin.h"
32#include "B3PCToOriginMap.h"
33#include "B3SparseCollection.h"
34#include "B3Type.h"
35#include "B3ValueKey.h"
36#include "CCallHelpers.h"
37#include "PureNaN.h"
38#include "RegisterAtOffsetList.h"
39#include <wtf/Bag.h>
40#include <wtf/FastMalloc.h>
41#include <wtf/HashSet.h>
42#include <wtf/IndexedContainerIterator.h>
43#include <wtf/Noncopyable.h>
44#include <wtf/PrintStream.h>
45#include <wtf/SharedTask.h>
46#include <wtf/TriState.h>
47#include <wtf/Vector.h>
48
49namespace JSC { namespace B3 {
50
51class BackwardsCFG;
52class BackwardsDominators;
53class BasicBlock;
54class BlockInsertionSet;
55class CFG;
56class Dominators;
57class NaturalLoops;
58class StackSlot;
59class Value;
60class Variable;
61
62namespace Air { class Code; }
63
64typedef void WasmBoundsCheckGeneratorFunction(CCallHelpers&, GPRReg);
65typedef SharedTask<WasmBoundsCheckGeneratorFunction> WasmBoundsCheckGenerator;
66
67// This represents B3's view of a piece of code. Note that this object must exist in a 1:1
68// relationship with Air::Code. B3::Procedure and Air::Code are just different facades of the B3
69// compiler's knowledge about a piece of code. Some kinds of state aren't perfect fits for either
70// Procedure or Code, and are placed in one or the other based on convenience. Procedure always
71// allocates a Code, and a Code cannot be allocated without an owning Procedure and they always
72// have references to each other.
73
74class Procedure {
75 WTF_MAKE_NONCOPYABLE(Procedure);
76 WTF_MAKE_FAST_ALLOCATED;
77public:
78
79 JS_EXPORT_PRIVATE Procedure();
80 JS_EXPORT_PRIVATE ~Procedure();
81
82 template<typename Callback>
83 void setOriginPrinter(Callback&& callback)
84 {
85 m_originPrinter = createSharedTask<void(PrintStream&, Origin)>(
86 std::forward<Callback>(callback));
87 }
88
89 // Usually you use this via OriginDump, though it's cool to use it directly.
90 void printOrigin(PrintStream& out, Origin origin) const;
91
92 // This is a debugging hack. Sometimes while debugging B3 you need to break the abstraction
93 // and get at the DFG Graph, or whatever data structure the frontend used to describe the
94 // program. The FTL passes the DFG Graph.
95 void setFrontendData(const void* value) { m_frontendData = value; }
96 const void* frontendData() const { return m_frontendData; }
97
98 JS_EXPORT_PRIVATE BasicBlock* addBlock(double frequency = 1);
99
100 // Changes the order of basic blocks to be as in the supplied vector. The vector does not
101 // need to mention every block in the procedure. Blocks not mentioned will be placed after
102 // these blocks in the same order as they were in originally.
103 template<typename BlockIterable>
104 void setBlockOrder(const BlockIterable& iterable)
105 {
106 Vector<BasicBlock*> blocks;
107 for (BasicBlock* block : iterable)
108 blocks.append(block);
109 setBlockOrderImpl(blocks);
110 }
111
112 JS_EXPORT_PRIVATE StackSlot* addStackSlot(unsigned byteSize);
113 JS_EXPORT_PRIVATE Variable* addVariable(Type);
114
115 JS_EXPORT_PRIVATE Type addTuple(Vector<Type>&& types);
116 const Vector<Vector<Type>>& tuples() const { return m_tuples; };
117 bool isValidTuple(Type tuple) const;
118 Type extractFromTuple(Type tuple, unsigned index) const;
119 const Vector<Type>& tupleForType(Type tuple) const;
120
121 unsigned resultCount(Type type) const { return type.isTuple() ? tupleForType(type).size() : type.isNumeric(); }
122 Type typeAtOffset(Type type, unsigned index) const { ASSERT(index < resultCount(type)); return type.isTuple() ? extractFromTuple(type, index) : type; }
123
124 template<typename ValueType, typename... Arguments>
125 ValueType* add(Arguments...);
126
127 Value* clone(Value*);
128
129 Value* addIntConstant(Origin, Type, int64_t value);
130 Value* addIntConstant(Value*, int64_t value);
131
132 // bits is a bitwise_cast of the constant you want.
133 Value* addConstant(Origin, Type, uint64_t bits);
134
135 // You're guaranteed that bottom is zero.
136 Value* addBottom(Origin, Type);
137 Value* addBottom(Value*);
138
139 // Returns null for MixedTriState.
140 Value* addBoolConstant(Origin, TriState);
141
142 void resetValueOwners();
143 JS_EXPORT_PRIVATE void resetReachability();
144
145 // This destroys CFG analyses. If we ask for them again, we will recompute them. Usually you
146 // should call this anytime you call resetReachability().
147 void invalidateCFG();
148
149 JS_EXPORT_PRIVATE void dump(PrintStream&) const;
150
151 unsigned size() const { return m_blocks.size(); }
152 BasicBlock* at(unsigned index) const { return m_blocks[index].get(); }
153 BasicBlock* operator[](unsigned index) const { return at(index); }
154
155 typedef WTF::IndexedContainerIterator<Procedure> iterator;
156
157 iterator begin() const { return iterator(*this, 0); }
158 iterator end() const { return iterator(*this, size()); }
159
160 Vector<BasicBlock*> blocksInPreOrder();
161 Vector<BasicBlock*> blocksInPostOrder();
162
163 SparseCollection<StackSlot>& stackSlots() { return m_stackSlots; }
164 const SparseCollection<StackSlot>& stackSlots() const { return m_stackSlots; }
165
166 // Short for stackSlots().remove(). It's better to call this method since it's out of line.
167 void deleteStackSlot(StackSlot*);
168
169 SparseCollection<Variable>& variables() { return m_variables; }
170 const SparseCollection<Variable>& variables() const { return m_variables; }
171
172 // Short for variables().remove(). It's better to call this method since it's out of line.
173 void deleteVariable(Variable*);
174
175 SparseCollection<Value>& values() { return m_values; }
176 const SparseCollection<Value>& values() const { return m_values; }
177
178 // Short for values().remove(). It's better to call this method since it's out of line.
179 void deleteValue(Value*);
180
181 // A valid procedure cannot contain any orphan values. An orphan is a value that is not in
182 // any basic block. It is possible to create an orphan value during code generation or during
183 // transformation. If you know that you may have created some, you can call this method to
184 // delete them, making the procedure valid again.
185 void deleteOrphans();
186
187 CFG& cfg() const { return *m_cfg; }
188
189 Dominators& dominators();
190 NaturalLoops& naturalLoops();
191 BackwardsCFG& backwardsCFG();
192 BackwardsDominators& backwardsDominators();
193
194 void addFastConstant(const ValueKey&);
195 bool isFastConstant(const ValueKey&);
196
197 unsigned numEntrypoints() const { return m_numEntrypoints; }
198 JS_EXPORT_PRIVATE void setNumEntrypoints(unsigned);
199
200 // Only call this after code generation is complete. Note that the label for the 0th entrypoint
201 // should point to exactly where the code generation cursor was before you started generating
202 // code.
203 JS_EXPORT_PRIVATE CCallHelpers::Label entrypointLabel(unsigned entrypointIndex) const;
204
205 // The name has to be a string literal, since we don't do any memory management for the string.
206 void setLastPhaseName(const char* name)
207 {
208 m_lastPhaseName = name;
209 }
210
211 const char* lastPhaseName() const { return m_lastPhaseName; }
212
213 // Allocates a slab of memory that will be kept alive by anyone who keeps the resulting code
214 // alive. Great for compiler-generated data sections, like switch jump tables and constant pools.
215 // This returns memory that has been zero-initialized.
216 JS_EXPORT_PRIVATE void* addDataSection(size_t);
217
218 // Some operations are specified in B3 IR to behave one way but on this given CPU they behave a
219 // different way. When true, those B3 IR ops switch to behaving the CPU way, and the optimizer may
220 // start taking advantage of it.
221 //
222 // One way to think of it is like this. Imagine that you find that the cleanest way of lowering
223 // something in lowerMacros is to unconditionally replace one opcode with another. This is a shortcut
224 // where you instead keep the same opcode, but rely on the opcode's meaning changes once lowerMacros
225 // sets hasQuirks.
226 bool hasQuirks() const { return m_hasQuirks; }
227 void setHasQuirks(bool value) { m_hasQuirks = value; }
228
229 OpaqueByproducts& byproducts() { return *m_byproducts; }
230
231 // Below are methods that make sense to call after you have generated code for the procedure.
232
233 // You have to call this method after calling generate(). The code generated by B3::generate()
234 // will require you to keep this object alive for as long as that code is runnable. Usually, this
235 // just keeps alive things like the double constant pool and switch lookup tables. If this sounds
236 // confusing, you should probably be using the B3::Compilation API to compile code. If you use
237 // that API, then you don't have to worry about this.
238 std::unique_ptr<OpaqueByproducts> releaseByproducts() { return WTFMove(m_byproducts); }
239
240 // This gives you direct access to Code. However, the idea is that clients of B3 shouldn't have to
241 // call this. So, Procedure has some methods (below) that expose some Air::Code functionality.
242 const Air::Code& code() const { return *m_code; }
243 Air::Code& code() { return *m_code; }
244
245 unsigned callArgAreaSizeInBytes() const;
246 void requestCallArgAreaSizeInBytes(unsigned size);
247
248 // This tells the register allocators to stay away from this register.
249 JS_EXPORT_PRIVATE void pinRegister(Reg);
250
251 JS_EXPORT_PRIVATE void setOptLevel(unsigned value);
252 unsigned optLevel() const { return m_optLevel; }
253
254 // You can turn off used registers calculation. This may speed up compilation a bit. But if
255 // you turn it off then you cannot use StackmapGenerationParams::usedRegisters() or
256 // StackmapGenerationParams::unavailableRegisters().
257 void setNeedsUsedRegisters(bool value) { m_needsUsedRegisters = value; }
258 bool needsUsedRegisters() const { return m_needsUsedRegisters; }
259
260 JS_EXPORT_PRIVATE unsigned frameSize() const;
261 JS_EXPORT_PRIVATE RegisterAtOffsetList calleeSaveRegisterAtOffsetList() const;
262
263 PCToOriginMap& pcToOriginMap() { return m_pcToOriginMap; }
264 PCToOriginMap releasePCToOriginMap() { return WTFMove(m_pcToOriginMap); }
265
266 JS_EXPORT_PRIVATE void setWasmBoundsCheckGenerator(RefPtr<WasmBoundsCheckGenerator>);
267
268 template<typename Functor>
269 void setWasmBoundsCheckGenerator(const Functor& functor)
270 {
271 setWasmBoundsCheckGenerator(RefPtr<WasmBoundsCheckGenerator>(createSharedTask<WasmBoundsCheckGeneratorFunction>(functor)));
272 }
273
274 JS_EXPORT_PRIVATE RegisterSet mutableGPRs();
275 JS_EXPORT_PRIVATE RegisterSet mutableFPRs();
276
277private:
278 friend class BlockInsertionSet;
279
280 JS_EXPORT_PRIVATE Value* addValueImpl(Value*);
281 void setBlockOrderImpl(Vector<BasicBlock*>&);
282
283 SparseCollection<StackSlot> m_stackSlots;
284 SparseCollection<Variable> m_variables;
285 Vector<Vector<Type>> m_tuples;
286 Vector<std::unique_ptr<BasicBlock>> m_blocks;
287 SparseCollection<Value> m_values;
288 std::unique_ptr<CFG> m_cfg;
289 std::unique_ptr<Dominators> m_dominators;
290 std::unique_ptr<NaturalLoops> m_naturalLoops;
291 std::unique_ptr<BackwardsCFG> m_backwardsCFG;
292 std::unique_ptr<BackwardsDominators> m_backwardsDominators;
293 HashSet<ValueKey> m_fastConstants;
294 unsigned m_numEntrypoints { 1 };
295 const char* m_lastPhaseName;
296 std::unique_ptr<OpaqueByproducts> m_byproducts;
297 std::unique_ptr<Air::Code> m_code;
298 RefPtr<SharedTask<void(PrintStream&, Origin)>> m_originPrinter;
299 const void* m_frontendData;
300 PCToOriginMap m_pcToOriginMap;
301 unsigned m_optLevel { defaultOptLevel() };
302 bool m_needsUsedRegisters { true };
303 bool m_hasQuirks { false };
304};
305
306} } // namespace JSC::B3
307
308#endif // ENABLE(B3_JIT)
309