1/*
2 * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#if ENABLE(B3_JIT)
29
30#include "AirArg.h"
31#include "AirKind.h"
32#include "CCallHelpers.h"
33
34namespace JSC {
35
36class CCallHelpers;
37class RegisterSet;
38
39namespace B3 {
40
41class Value;
42
43namespace Air {
44
45struct GenerationContext;
46
47struct Inst {
48 typedef Vector<Arg, 3> ArgList;
49
50 Inst()
51 : origin(nullptr)
52 {
53 }
54
55 Inst(Kind kind, Value* origin)
56 : origin(origin)
57 , kind(kind)
58 {
59 }
60
61 template<typename... Arguments>
62 Inst(Kind kind, Value* origin, Arg arg, Arguments... arguments)
63 : args{ arg, arguments... }
64 , origin(origin)
65 , kind(kind)
66 {
67 }
68
69 Inst(Kind kind, Value* origin, const ArgList& arguments)
70 : args(arguments)
71 , origin(origin)
72 , kind(kind)
73 {
74 }
75
76 Inst(Kind kind, Value* origin, ArgList&& arguments)
77 : args(WTFMove(arguments))
78 , origin(origin)
79 , kind(kind)
80 {
81 }
82
83 explicit operator bool() const { return origin || kind || args.size(); }
84
85 void append() { }
86
87 template<typename... Arguments>
88 void append(Arg arg, Arguments... arguments)
89 {
90 args.append(arg);
91 append(arguments...);
92 }
93
94 // Note that these functors all avoid using "const" because we want to use them for things that
95 // edit IR. IR is meant to be edited; if you're carrying around a "const Inst&" then you're
96 // probably doing it wrong.
97
98 // This only walks those Tmps that are explicitly mentioned, and it doesn't tell you their role
99 // or type.
100 template<typename Functor>
101 void forEachTmpFast(const Functor& functor)
102 {
103 for (Arg& arg : args)
104 arg.forEachTmpFast(functor);
105 }
106
107 typedef void EachArgCallback(Arg&, Arg::Role, Bank, Width);
108
109 // Calls the functor with (arg, role, type, width). This function is auto-generated by
110 // opcode_generator.rb.
111 template<typename Functor>
112 void forEachArg(const Functor&);
113
114 // Calls the functor with (tmp, role, type, width).
115 template<typename Functor>
116 void forEachTmp(const Functor& functor)
117 {
118 forEachArg(
119 [&] (Arg& arg, Arg::Role role, Bank bank, Width width) {
120 arg.forEachTmp(role, bank, width, functor);
121 });
122 }
123
124 // Thing can be either Arg, Tmp, or StackSlot*.
125 template<typename Thing, typename Functor>
126 void forEach(const Functor&);
127
128 // Reports any additional registers clobbered by this operation. Note that for efficiency,
129 // extraClobberedRegs() only works for the Patch opcode.
130 RegisterSet extraClobberedRegs();
131 RegisterSet extraEarlyClobberedRegs();
132
133 // Iterate over all Def's that happen at the end of an instruction. You supply a pair
134 // instructions. The instructions must appear next to each other, in that order, in some basic
135 // block. You can pass null for the first instruction when analyzing what happens at the top of
136 // a basic block. You can pass null for the second instruction when analyzing what happens at the
137 // bottom of a basic block.
138 template<typename Thing, typename Functor>
139 static void forEachDef(Inst* prevInst, Inst* nextInst, const Functor&);
140
141 // Iterate over all Def's that happen at the end of this instruction, including extra clobbered
142 // registers. Note that Thing can only be Arg or Tmp when you use this functor.
143 template<typename Thing, typename Functor>
144 static void forEachDefWithExtraClobberedRegs(Inst* prevInst, Inst* nextInst, const Functor&);
145
146 // Some summaries about all arguments. These are useful for needsPadding().
147 bool hasEarlyDef();
148 bool hasLateUseOrDef();
149
150 // Check if there needs to be a padding Nop between these two instructions.
151 static bool needsPadding(Inst* prevInst, Inst* nextInst);
152
153 // Use this to report which registers are live. This should be done just before codegen. Note
154 // that for efficiency, reportUsedRegisters() only works for the Patch opcode.
155 void reportUsedRegisters(const RegisterSet&);
156
157 // Is this instruction in one of the valid forms right now? This function is auto-generated by
158 // opcode_generator.rb.
159 bool isValidForm();
160
161 // Assuming this instruction is in a valid form right now, will it still be in one of the valid
162 // forms if we put an Addr referencing the stack (or a StackSlot or CallArg, of course) in the
163 // given index? Spilling uses this: it walks the args by index to find Tmps that need spilling;
164 // if it finds one, it calls this to see if it can replace the Arg::Tmp with an Arg::Addr. If it
165 // finds a non-Tmp Arg, then it calls that Arg's forEachTmp to do a replacement that way.
166 //
167 // This function is auto-generated by opcode_generator.rb.
168 bool admitsStack(unsigned argIndex);
169 bool admitsStack(Arg&);
170
171 bool admitsExtendedOffsetAddr(unsigned argIndex);
172 bool admitsExtendedOffsetAddr(Arg&);
173
174 // Defined by opcode_generator.rb.
175 bool isTerminal();
176
177 // Returns true if this instruction can have any effects other than control flow or arguments.
178 bool hasNonArgNonControlEffects();
179
180 // Returns true if this instruction can have any effects other than what is implied by arguments.
181 // For example, "Move $42, (%rax)" will return false because the effect of storing to (%rax) is
182 // implied by the second argument.
183 bool hasNonArgEffects();
184
185 // Tells you if this operation has arg effects.
186 bool hasArgEffects();
187
188 // Tells you if this operation has non-control effects.
189 bool hasNonControlEffects() { return hasNonArgNonControlEffects() || hasArgEffects(); }
190
191 // Generate some code for this instruction. This is, like, literally our backend. If this is the
192 // terminal, it returns the jump that needs to be linked for the "then" case, with the "else"
193 // case being fall-through. This function is auto-generated by opcode_generator.rb.
194 CCallHelpers::Jump generate(CCallHelpers&, GenerationContext&);
195
196 // If source arguments benefits from being aliased to a destination argument,
197 // this return the index of the destination argument.
198 // The source are assumed to be at (index - 1) and (index - 2)
199 // For example,
200 // Add Tmp1, Tmp2, Tmp3
201 // returns 2 if 0 and 1 benefit from aliasing to Tmp3.
202 Optional<unsigned> shouldTryAliasingDef();
203
204 // This computes a hash for comparing this to JSAir's Inst.
205 unsigned jsHash() const;
206
207 void dump(PrintStream&) const;
208
209 ArgList args;
210 Value* origin; // The B3::Value that this originated from.
211 Kind kind;
212
213private:
214 template<typename Func>
215 void forEachArgSimple(const Func&);
216 void forEachArgCustom(ScopedLambda<EachArgCallback>);
217};
218
219} } } // namespace JSC::B3::Air
220
221#endif // ENABLE(B3_JIT)
222