1/*
2 * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#if ENABLE(B3_JIT)
29
30#include "AirInst.h"
31#include "AirOpcodeUtils.h"
32#include "AirSpecial.h"
33#include "AirStackSlot.h"
34#include "B3Value.h"
35
36namespace JSC { namespace B3 { namespace Air {
37
38template<typename Thing, typename Functor>
39void Inst::forEach(const Functor& functor)
40{
41 forEachArg(
42 [&] (Arg& arg, Arg::Role role, Bank bank, Width width) {
43 arg.forEach<Thing>(role, bank, width, functor);
44 });
45}
46
47inline RegisterSet Inst::extraClobberedRegs()
48{
49 ASSERT(kind.opcode == Patch);
50 return args[0].special()->extraClobberedRegs(*this);
51}
52
53inline RegisterSet Inst::extraEarlyClobberedRegs()
54{
55 ASSERT(kind.opcode == Patch);
56 return args[0].special()->extraEarlyClobberedRegs(*this);
57}
58
59template<typename Thing, typename Functor>
60inline void Inst::forEachDef(Inst* prevInst, Inst* nextInst, const Functor& functor)
61{
62 if (prevInst) {
63 prevInst->forEach<Thing>(
64 [&] (Thing& thing, Arg::Role role, Bank argBank, Width argWidth) {
65 if (Arg::isLateDef(role))
66 functor(thing, role, argBank, argWidth);
67 });
68 }
69
70 if (nextInst) {
71 nextInst->forEach<Thing>(
72 [&] (Thing& thing, Arg::Role role, Bank argBank, Width argWidth) {
73 if (Arg::isEarlyDef(role))
74 functor(thing, role, argBank, argWidth);
75 });
76 }
77}
78
79template<typename Thing, typename Functor>
80inline void Inst::forEachDefWithExtraClobberedRegs(
81 Inst* prevInst, Inst* nextInst, const Functor& functor)
82{
83 forEachDef<Thing>(prevInst, nextInst, functor);
84
85 Arg::Role regDefRole;
86
87 auto reportReg = [&] (Reg reg) {
88 Bank bank = reg.isGPR() ? GP : FP;
89 functor(Thing(reg), regDefRole, bank, conservativeWidth(bank));
90 };
91
92 if (prevInst && prevInst->kind.opcode == Patch) {
93 regDefRole = Arg::Def;
94 prevInst->extraClobberedRegs().forEach(reportReg);
95 }
96
97 if (nextInst && nextInst->kind.opcode == Patch) {
98 regDefRole = Arg::EarlyDef;
99 nextInst->extraEarlyClobberedRegs().forEach(reportReg);
100 }
101}
102
103inline void Inst::reportUsedRegisters(const RegisterSet& usedRegisters)
104{
105 ASSERT(kind.opcode == Patch);
106 args[0].special()->reportUsedRegisters(*this, usedRegisters);
107}
108
109inline bool Inst::admitsStack(Arg& arg)
110{
111 return admitsStack(&arg - &args[0]);
112}
113
114inline bool Inst::admitsExtendedOffsetAddr(Arg& arg)
115{
116 return admitsExtendedOffsetAddr(&arg - &args[0]);
117}
118
119inline Optional<unsigned> Inst::shouldTryAliasingDef()
120{
121 if (!isX86())
122 return WTF::nullopt;
123
124 switch (kind.opcode) {
125 case Add32:
126 case Add64:
127 case And32:
128 case And64:
129 case Mul32:
130 case Mul64:
131 case Or32:
132 case Or64:
133 case Xor32:
134 case Xor64:
135 case AndFloat:
136 case AndDouble:
137 case OrFloat:
138 case OrDouble:
139 case XorDouble:
140 case XorFloat:
141 if (args.size() == 3)
142 return 2;
143 break;
144 case AddDouble:
145 case AddFloat:
146 case MulDouble:
147 case MulFloat:
148#if CPU(X86) || CPU(X86_64)
149 if (MacroAssembler::supportsAVX())
150 return WTF::nullopt;
151#endif
152 if (args.size() == 3)
153 return 2;
154 break;
155 case BranchAdd32:
156 case BranchAdd64:
157 if (args.size() == 4)
158 return 3;
159 break;
160 case MoveConditionally32:
161 case MoveConditionally64:
162 case MoveConditionallyTest32:
163 case MoveConditionallyTest64:
164 case MoveConditionallyDouble:
165 case MoveConditionallyFloat:
166 case MoveDoubleConditionally32:
167 case MoveDoubleConditionally64:
168 case MoveDoubleConditionallyTest32:
169 case MoveDoubleConditionallyTest64:
170 case MoveDoubleConditionallyDouble:
171 case MoveDoubleConditionallyFloat:
172 if (args.size() == 6)
173 return 5;
174 break;
175 break;
176 case Patch:
177 return PatchCustom::shouldTryAliasingDef(*this);
178 default:
179 break;
180 }
181 return WTF::nullopt;
182}
183
184inline bool isShiftValid(const Inst& inst)
185{
186#if CPU(X86) || CPU(X86_64)
187 return inst.args[0] == Tmp(X86Registers::ecx);
188#else
189 UNUSED_PARAM(inst);
190 return true;
191#endif
192}
193
194inline bool isLshift32Valid(const Inst& inst)
195{
196 return isShiftValid(inst);
197}
198
199inline bool isLshift64Valid(const Inst& inst)
200{
201 return isShiftValid(inst);
202}
203
204inline bool isRshift32Valid(const Inst& inst)
205{
206 return isShiftValid(inst);
207}
208
209inline bool isRshift64Valid(const Inst& inst)
210{
211 return isShiftValid(inst);
212}
213
214inline bool isUrshift32Valid(const Inst& inst)
215{
216 return isShiftValid(inst);
217}
218
219inline bool isUrshift64Valid(const Inst& inst)
220{
221 return isShiftValid(inst);
222}
223
224inline bool isRotateRight32Valid(const Inst& inst)
225{
226 return isShiftValid(inst);
227}
228
229inline bool isRotateLeft32Valid(const Inst& inst)
230{
231 return isShiftValid(inst);
232}
233
234inline bool isRotateRight64Valid(const Inst& inst)
235{
236 return isShiftValid(inst);
237}
238
239inline bool isRotateLeft64Valid(const Inst& inst)
240{
241 return isShiftValid(inst);
242}
243
244inline bool isX86DivHelperValid(const Inst& inst)
245{
246#if CPU(X86) || CPU(X86_64)
247 return inst.args[0] == Tmp(X86Registers::eax)
248 && inst.args[1] == Tmp(X86Registers::edx);
249#else
250 UNUSED_PARAM(inst);
251 return false;
252#endif
253}
254
255inline bool isX86ConvertToDoubleWord32Valid(const Inst& inst)
256{
257 return isX86DivHelperValid(inst);
258}
259
260inline bool isX86ConvertToQuadWord64Valid(const Inst& inst)
261{
262 return isX86DivHelperValid(inst);
263}
264
265inline bool isX86Div32Valid(const Inst& inst)
266{
267 return isX86DivHelperValid(inst);
268}
269
270inline bool isX86UDiv32Valid(const Inst& inst)
271{
272 return isX86DivHelperValid(inst);
273}
274
275inline bool isX86Div64Valid(const Inst& inst)
276{
277 return isX86DivHelperValid(inst);
278}
279
280inline bool isX86UDiv64Valid(const Inst& inst)
281{
282 return isX86DivHelperValid(inst);
283}
284
285inline bool isAtomicStrongCASValid(const Inst& inst)
286{
287#if CPU(X86) || CPU(X86_64)
288 switch (inst.args.size()) {
289 case 3:
290 return inst.args[0] == Tmp(X86Registers::eax);
291 case 5:
292 return inst.args[1] == Tmp(X86Registers::eax);
293 default:
294 return false;
295 }
296#else // CPU(X86) || CPU(X86_64)
297 UNUSED_PARAM(inst);
298 return false;
299#endif // CPU(X86) || CPU(X86_64)
300}
301
302inline bool isBranchAtomicStrongCASValid(const Inst& inst)
303{
304#if CPU(X86) || CPU(X86_64)
305 return inst.args[1] == Tmp(X86Registers::eax);
306#else // CPU(X86) || CPU(X86_64)
307 UNUSED_PARAM(inst);
308 return false;
309#endif // CPU(X86) || CPU(X86_64)
310}
311
312inline bool isAtomicStrongCAS8Valid(const Inst& inst)
313{
314 return isAtomicStrongCASValid(inst);
315}
316
317inline bool isAtomicStrongCAS16Valid(const Inst& inst)
318{
319 return isAtomicStrongCASValid(inst);
320}
321
322inline bool isAtomicStrongCAS32Valid(const Inst& inst)
323{
324 return isAtomicStrongCASValid(inst);
325}
326
327inline bool isAtomicStrongCAS64Valid(const Inst& inst)
328{
329 return isAtomicStrongCASValid(inst);
330}
331
332inline bool isBranchAtomicStrongCAS8Valid(const Inst& inst)
333{
334 return isBranchAtomicStrongCASValid(inst);
335}
336
337inline bool isBranchAtomicStrongCAS16Valid(const Inst& inst)
338{
339 return isBranchAtomicStrongCASValid(inst);
340}
341
342inline bool isBranchAtomicStrongCAS32Valid(const Inst& inst)
343{
344 return isBranchAtomicStrongCASValid(inst);
345}
346
347inline bool isBranchAtomicStrongCAS64Valid(const Inst& inst)
348{
349 return isBranchAtomicStrongCASValid(inst);
350}
351
352} } } // namespace JSC::B3::Air
353
354#endif // ENABLE(B3_JIT)
355