1/*
2 * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#if ENABLE(B3_JIT)
29
30#include "B3ConstrainedValue.h"
31#include "B3Value.h"
32#include "B3ValueRep.h"
33#include "CCallHelpers.h"
34#include "RegisterSet.h"
35#include <wtf/SharedTask.h>
36
37namespace JSC { namespace B3 {
38
39class StackmapGenerationParams;
40
41typedef void StackmapGeneratorFunction(CCallHelpers&, const StackmapGenerationParams&);
42typedef SharedTask<StackmapGeneratorFunction> StackmapGenerator;
43
44class JS_EXPORT_PRIVATE StackmapValue : public Value {
45public:
46 static bool accepts(Kind kind)
47 {
48 // This needs to include opcodes of all subclasses.
49 switch (kind.opcode()) {
50 case CheckAdd:
51 case CheckSub:
52 case CheckMul:
53 case Check:
54 case Patchpoint:
55 return true;
56 default:
57 return false;
58 }
59 }
60
61 ~StackmapValue();
62
63 // Use this to add children.
64 void append(const ConstrainedValue& value)
65 {
66 append(value.value(), value.rep());
67 }
68
69 void append(Value*, const ValueRep&);
70
71 template<typename VectorType>
72 void appendVector(const VectorType& vector)
73 {
74 for (const auto& value : vector)
75 append(value);
76 }
77
78 // Helper for appending a bunch of values with some ValueRep.
79 template<typename VectorType>
80 void appendVectorWithRep(const VectorType& vector, const ValueRep& rep)
81 {
82 for (Value* value : vector)
83 append(value, rep);
84 }
85
86 // Helper for appending cold any's. This often used by clients to implement OSR.
87 template<typename VectorType>
88 void appendColdAnys(const VectorType& vector)
89 {
90 appendVectorWithRep(vector, ValueRep::ColdAny);
91 }
92 template<typename VectorType>
93 void appendLateColdAnys(const VectorType& vector)
94 {
95 appendVectorWithRep(vector, ValueRep::LateColdAny);
96 }
97
98 // This is a helper for something you might do a lot of: append a value that should be constrained
99 // to SomeRegister.
100 void appendSomeRegister(Value*);
101 void appendSomeRegisterWithClobber(Value*);
102
103 const Vector<ValueRep>& reps() const { return m_reps; }
104
105 // Stackmaps allow you to specify that the operation may clobber some registers. Clobbering a register
106 // means that the operation appears to store a value into the register, but the compiler doesn't
107 // assume to know anything about what kind of value might have been stored. In B3's model of
108 // execution, registers are read or written at instruction boundaries rather than inside the
109 // instructions themselves. A register could be read or written immediately before the instruction
110 // executes, or immediately after. Note that at a boundary between instruction A and instruction B we
111 // simultaneously look at what A does after it executes and what B does before it executes. This is
112 // because when the compiler considers what happens to registers, it views the boundary between two
113 // instructions as a kind of atomic point where the late effects of A happen at the same time as the
114 // early effects of B.
115 //
116 // The compiler views a stackmap as a single instruction, even though of course the stackmap may be
117 // composed of any number of instructions (if it's a Patchpoint). You can claim that a stackmap value
118 // clobbers a set of registers before the stackmap's instruction or after. Clobbering before is called
119 // early clobber, while clobbering after is called late clobber.
120 //
121 // This is quite flexible but it has its limitations. Any register listed as an early clobber will
122 // interfere with all uses of the stackmap. Any register listed as a late clobber will interfere with
123 // all defs of the stackmap (i.e. the result). This means that it's currently not possible to claim
124 // to clobber a register while still allowing that register to be used for both an input and an output
125 // of the instruction. It just so happens that B3's sole client (the FTL) currently never wants to
126 // convey such a constraint, but it will want it eventually (FIXME:
127 // https://bugs.webkit.org/show_bug.cgi?id=151823).
128 //
129 // Note that a common use case of early clobber sets is to indicate that this is the set of registers
130 // that shall not be used for inputs to the value. But B3 supports two different ways of specifying
131 // this, the other being LateUse in combination with late clobber (not yet available to stackmaps
132 // directly, FIXME: https://bugs.webkit.org/show_bug.cgi?id=151335). A late use makes the use of that
133 // value appear to happen after the instruction. This means that a late use cannot use the same
134 // register as the result and it cannot use the same register as either early or late clobbered
135 // registers. Late uses are usually a better way of saying that a clobbered register cannot be used
136 // for an input. Early clobber means that some register(s) interfere with *all* inputs, while LateUse
137 // means that some value interferes with whatever is live after the instruction. Below is a list of
138 // examples of how the FTL can handle its various kinds of scenarios using a combination of early
139 // clobber, late clobber, and late use. These examples are for X86_64, w.l.o.g.
140 //
141 // Basic ById patchpoint: Early and late clobber of r11. Early clobber prevents any inputs from using
142 // r11 since that would mess with the MacroAssembler's assumptions when we
143 // AllowMacroScratchRegisterUsage. Late clobber tells B3 that the patchpoint may overwrite r11.
144 //
145 // ById patchpoint in a try block with some live state: This might throw an exception after already
146 // assigning to the result. So, this should LateUse all stackmap values to ensure that the stackmap
147 // values don't interfere with the result. Note that we do not LateUse the non-OSR inputs of the ById
148 // since LateUse implies that the use is cold: the register allocator will assume that the use is not
149 // important for the critical path. Also, early and late clobber of r11.
150 //
151 // Basic ByIdFlush patchpoint: We could do Flush the same way we did it with LLVM: ignore it and let
152 // PolymorphicAccess figure it out. Or, we could add internal clobber support (FIXME:
153 // https://bugs.webkit.org/show_bug.cgi?id=151823). Or, we could do it by early clobbering r11, late
154 // clobbering all volatile registers, and constraining the result to some register. Or, we could do
155 // that but leave the result constrained to SomeRegister, which will cause it to use a callee-save
156 // register. Internal clobber support would allow us to use SomeRegister while getting the result into
157 // a volatile register.
158 //
159 // ByIdFlush patchpoint in a try block with some live state: LateUse all for-OSR stackmap values,
160 // early clobber of r11 to prevent the other inputs from using r11, and late clobber of all volatile
161 // registers to make way for the call. To handle the result, we could do any of what is listed in the
162 // previous paragraph.
163 //
164 // Basic JS call: Force all non-OSR inputs into specific locations (register, stack, whatever).
165 // All volatile registers are late-clobbered. The output is constrained to a register as well.
166 //
167 // JS call in a try block with some live state: LateUse all for-OSR stackmap values, fully constrain
168 // all non-OSR inputs and the result, and late clobber all volatile registers.
169 //
170 // JS tail call: Pass all inputs as a warm variant of Any (FIXME:
171 // https://bugs.webkit.org/show_bug.cgi?id=151811).
172 //
173 // Note that we cannot yet do all of these things because although Air already supports all of these
174 // various forms of uses (LateUse and warm unconstrained use), B3 doesn't yet expose all of it. The
175 // bugs are:
176 // https://bugs.webkit.org/show_bug.cgi?id=151335 (LateUse)
177 // https://bugs.webkit.org/show_bug.cgi?id=151811 (warm Any)
178 void clobberEarly(const RegisterSet& set)
179 {
180 m_earlyClobbered.merge(set);
181 }
182
183 void clobberLate(const RegisterSet& set)
184 {
185 m_lateClobbered.merge(set);
186 }
187
188 void clobber(const RegisterSet& set)
189 {
190 clobberEarly(set);
191 clobberLate(set);
192 }
193
194 RegisterSet& earlyClobbered() { return m_earlyClobbered; }
195 RegisterSet& lateClobbered() { return m_lateClobbered; }
196 const RegisterSet& earlyClobbered() const { return m_earlyClobbered; }
197 const RegisterSet& lateClobbered() const { return m_lateClobbered; }
198
199 void setGenerator(RefPtr<StackmapGenerator> generator)
200 {
201 m_generator = generator;
202 }
203
204 template<typename Functor>
205 void setGenerator(const Functor& functor)
206 {
207 m_generator = createSharedTask<StackmapGeneratorFunction>(functor);
208 }
209
210 RefPtr<StackmapGenerator> generator() const { return m_generator; }
211
212 ConstrainedValue constrainedChild(unsigned index) const
213 {
214 return ConstrainedValue(child(index), index < m_reps.size() ? m_reps[index] : ValueRep::ColdAny);
215 }
216
217 void setConstrainedChild(unsigned index, const ConstrainedValue&);
218
219 void setConstraint(unsigned index, const ValueRep&);
220
221 class ConstrainedValueCollection {
222 public:
223 ConstrainedValueCollection(const StackmapValue& value)
224 : m_value(value)
225 {
226 }
227
228 unsigned size() const { return m_value.numChildren(); }
229
230 ConstrainedValue at(unsigned index) const { return m_value.constrainedChild(index); }
231
232 ConstrainedValue operator[](unsigned index) const { return at(index); }
233
234 class iterator {
235 public:
236 iterator()
237 : m_collection(nullptr)
238 , m_index(0)
239 {
240 }
241
242 iterator(const ConstrainedValueCollection& collection, unsigned index)
243 : m_collection(&collection)
244 , m_index(index)
245 {
246 }
247
248 ConstrainedValue operator*() const
249 {
250 return m_collection->at(m_index);
251 }
252
253 iterator& operator++()
254 {
255 m_index++;
256 return *this;
257 }
258
259 bool operator==(const iterator& other) const
260 {
261 ASSERT(m_collection == other.m_collection);
262 return m_index == other.m_index;
263 }
264
265 bool operator!=(const iterator& other) const
266 {
267 return !(*this == other);
268 }
269
270 private:
271 const ConstrainedValueCollection* m_collection;
272 unsigned m_index;
273 };
274
275 iterator begin() const { return iterator(*this, 0); }
276 iterator end() const { return iterator(*this, size()); }
277
278 private:
279 const StackmapValue& m_value;
280 };
281
282 ConstrainedValueCollection constrainedChildren() const
283 {
284 return ConstrainedValueCollection(*this);
285 }
286
287 B3_SPECIALIZE_VALUE_FOR_VARARGS_CHILDREN
288
289protected:
290 void dumpChildren(CommaPrinter&, PrintStream&) const override;
291 void dumpMeta(CommaPrinter&, PrintStream&) const override;
292
293 StackmapValue(CheckedOpcodeTag, Kind, Type, Origin);
294
295private:
296 friend class CheckSpecial;
297 friend class PatchpointSpecial;
298 friend class StackmapGenerationParams;
299 friend class StackmapSpecial;
300
301 Vector<ValueRep> m_reps;
302 RefPtr<StackmapGenerator> m_generator;
303 RegisterSet m_earlyClobbered;
304 RegisterSet m_lateClobbered;
305 RegisterSet m_usedRegisters; // Stackmaps could be further duplicated by Air, but that's unlikely, so we just merge the used registers sets if that were to happen.
306};
307
308} } // namespace JSC::B3
309
310#endif // ENABLE(B3_JIT)
311