1/*
2 * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#if ENABLE(B3_JIT)
29
30#include "AirTmp.h"
31#include "B3Bank.h"
32#include "B3Common.h"
33#include "B3Type.h"
34#include "B3Value.h"
35#include "B3Width.h"
36#include <wtf/Optional.h>
37
38#if ASSERT_DISABLED
39IGNORE_RETURN_TYPE_WARNINGS_BEGIN
40#endif
41
42namespace JSC { namespace B3 {
43
44class Value;
45
46namespace Air {
47
48class Special;
49class StackSlot;
50
51// This class name is also intentionally terse because we will say it a lot. You'll see code like
52// Inst(..., Arg::imm(5), Arg::addr(thing, blah), ...)
53class Arg {
54public:
55 // These enum members are intentionally terse because we have to mention them a lot.
56 enum Kind : int8_t {
57 Invalid,
58
59 // This is either an unassigned temporary or a register. All unassigned temporaries
60 // eventually become registers.
61 Tmp,
62
63 // This is an immediate that the instruction will materialize. Imm is the immediate that can be
64 // inlined into most instructions, while BigImm indicates a constant materialization and is
65 // usually only usable with Move. Specials may also admit it, for example for stackmaps used for
66 // OSR exit and tail calls.
67 // BitImm is an immediate for Bitwise operation (And, Xor, etc).
68 Imm,
69 BigImm,
70 BitImm,
71 BitImm64,
72
73 // These are the addresses. Instructions may load from (Use), store to (Def), or evaluate
74 // (UseAddr) addresses.
75 SimpleAddr,
76 Addr,
77 ExtendedOffsetAddr,
78 Stack,
79 CallArg,
80 Index,
81
82 // Immediate operands that customize the behavior of an operation. You can think of them as
83 // secondary opcodes. They are always "Use"'d.
84 RelCond,
85 ResCond,
86 DoubleCond,
87 StatusCond,
88 Special,
89 WidthArg
90 };
91
92 enum Temperature : int8_t {
93 Cold,
94 Warm
95 };
96
97 enum Phase : int8_t {
98 Early,
99 Late
100 };
101
102 enum Timing : int8_t {
103 OnlyEarly,
104 OnlyLate,
105 EarlyAndLate
106 };
107
108 enum Role : int8_t {
109 // Use means that the Inst will read from this value before doing anything else.
110 //
111 // For Tmp: The Inst will read this Tmp.
112 // For Arg::addr and friends: The Inst will load from this address.
113 // For Arg::imm and friends: The Inst will materialize and use this immediate.
114 // For RelCond/ResCond/Special: This is the only valid role for these kinds.
115 //
116 // Note that Use of an address does not mean escape. It only means that the instruction will
117 // load from the address before doing anything else. This is a bit tricky; for example
118 // Specials could theoretically squirrel away the address and effectively escape it. However,
119 // this is not legal. On the other hand, any address other than Stack is presumed to be
120 // always escaping, and Stack is presumed to be always escaping if it's Locked.
121 Use,
122
123 // Exactly like Use, except that it also implies that the use is cold: that is, replacing the
124 // use with something on the stack is free.
125 ColdUse,
126
127 // LateUse means that the Inst will read from this value after doing its Def's. Note that LateUse
128 // on an Addr or Index still means Use on the internal temporaries. Note that specifying the
129 // same Tmp once as Def and once as LateUse has undefined behavior: the use may happen before
130 // the def, or it may happen after it.
131 LateUse,
132
133 // Combination of LateUse and ColdUse.
134 LateColdUse,
135
136 // Def means that the Inst will write to this value after doing everything else.
137 //
138 // For Tmp: The Inst will write to this Tmp.
139 // For Arg::addr and friends: The Inst will store to this address.
140 // This isn't valid for any other kinds.
141 //
142 // Like Use of address, Def of address does not mean escape.
143 Def,
144
145 // This is a special variant of Def that implies that the upper bits of the target register are
146 // zero-filled. Specifically, if the Width of a ZDef is less than the largest possible width of
147 // the argument (for example, we're on a 64-bit machine and we have a Width32 ZDef of a GPR) then
148 // this has different implications for the upper bits (i.e. the top 32 bits in our example)
149 // depending on the kind of the argument:
150 //
151 // For register: the upper bits are zero-filled.
152 // For anonymous stack slot: the upper bits are zero-filled.
153 // For address: the upper bits are not touched (i.e. we do a 32-bit store in our example).
154 // For tmp: either the upper bits are not touched or they are zero-filled, and we won't know
155 // which until we lower the tmp to either a StackSlot or a Reg.
156 //
157 // The behavior of ZDef is consistent with what happens when you perform 32-bit operations on a
158 // 64-bit GPR. It's not consistent with what happens with 8-bit or 16-bit Defs on x86 GPRs, or
159 // what happens with float Defs in ARM NEON or X86 SSE. Hence why we have both Def and ZDef.
160 ZDef,
161
162 // This is a combined Use and Def. It means that both things happen.
163 UseDef,
164
165 // This is a combined Use and ZDef. It means that both things happen.
166 UseZDef,
167
168 // This is like Def, but implies that the assignment occurs before the start of the Inst's
169 // execution rather than after. Note that specifying the same Tmp once as EarlyDef and once
170 // as Use has undefined behavior: the use may happen before the def, or it may happen after
171 // it.
172 EarlyDef,
173
174 EarlyZDef,
175
176 // Some instructions need a scratch register. We model this by saying that the temporary is
177 // defined early and used late. This role implies that.
178 Scratch,
179
180 // This is a special kind of use that is only valid for addresses. It means that the
181 // instruction will evaluate the address expression and consume the effective address, but it
182 // will neither load nor store. This is an escaping use, because now the address may be
183 // passed along to who-knows-where. Note that this isn't really a Use of the Arg, but it does
184 // imply that we're Use'ing any registers that the Arg contains.
185 UseAddr
186 };
187
188 enum Signedness : int8_t {
189 Signed,
190 Unsigned
191 };
192
193 // Returns true if the Role implies that the Inst will Use the Arg. It's deliberately false for
194 // UseAddr, since isAnyUse() for an Arg::addr means that we are loading from the address.
195 static bool isAnyUse(Role role)
196 {
197 switch (role) {
198 case Use:
199 case ColdUse:
200 case UseDef:
201 case UseZDef:
202 case LateUse:
203 case LateColdUse:
204 case Scratch:
205 return true;
206 case Def:
207 case ZDef:
208 case UseAddr:
209 case EarlyDef:
210 case EarlyZDef:
211 return false;
212 }
213 ASSERT_NOT_REACHED();
214 }
215
216 static bool isColdUse(Role role)
217 {
218 switch (role) {
219 case ColdUse:
220 case LateColdUse:
221 return true;
222 case Use:
223 case UseDef:
224 case UseZDef:
225 case LateUse:
226 case Def:
227 case ZDef:
228 case UseAddr:
229 case Scratch:
230 case EarlyDef:
231 case EarlyZDef:
232 return false;
233 }
234 ASSERT_NOT_REACHED();
235 }
236
237 static bool isWarmUse(Role role)
238 {
239 return isAnyUse(role) && !isColdUse(role);
240 }
241
242 static Role cooled(Role role)
243 {
244 switch (role) {
245 case ColdUse:
246 case LateColdUse:
247 case UseDef:
248 case UseZDef:
249 case Def:
250 case ZDef:
251 case UseAddr:
252 case Scratch:
253 case EarlyDef:
254 case EarlyZDef:
255 return role;
256 case Use:
257 return ColdUse;
258 case LateUse:
259 return LateColdUse;
260 }
261 ASSERT_NOT_REACHED();
262 }
263
264 static Temperature temperature(Role role)
265 {
266 return isColdUse(role) ? Cold : Warm;
267 }
268
269 static bool activeAt(Role role, Phase phase)
270 {
271 switch (role) {
272 case Use:
273 case ColdUse:
274 case EarlyDef:
275 case EarlyZDef:
276 case UseAddr:
277 return phase == Early;
278 case LateUse:
279 case LateColdUse:
280 case Def:
281 case ZDef:
282 return phase == Late;
283 case UseDef:
284 case UseZDef:
285 case Scratch:
286 return true;
287 }
288 ASSERT_NOT_REACHED();
289 }
290
291 static bool activeAt(Timing timing, Phase phase)
292 {
293 switch (timing) {
294 case OnlyEarly:
295 return phase == Early;
296 case OnlyLate:
297 return phase == Late;
298 case EarlyAndLate:
299 return true;
300 }
301 ASSERT_NOT_REACHED();
302 }
303
304 static Timing timing(Role role)
305 {
306 switch (role) {
307 case Use:
308 case ColdUse:
309 case EarlyDef:
310 case EarlyZDef:
311 case UseAddr:
312 return OnlyEarly;
313 case LateUse:
314 case LateColdUse:
315 case Def:
316 case ZDef:
317 return OnlyLate;
318 case UseDef:
319 case UseZDef:
320 case Scratch:
321 return EarlyAndLate;
322 }
323 ASSERT_NOT_REACHED();
324 }
325
326 template<typename Func>
327 static void forEachPhase(Timing timing, const Func& func)
328 {
329 if (activeAt(timing, Early))
330 func(Early);
331 if (activeAt(timing, Late))
332 func(Late);
333 }
334
335 template<typename Func>
336 static void forEachPhase(Role role, const Func& func)
337 {
338 if (activeAt(role, Early))
339 func(Early);
340 if (activeAt(role, Late))
341 func(Late);
342 }
343
344 // Returns true if the Role implies that the Inst will Use the Arg before doing anything else.
345 static bool isEarlyUse(Role role)
346 {
347 switch (role) {
348 case Use:
349 case ColdUse:
350 case UseDef:
351 case UseZDef:
352 return true;
353 case Def:
354 case ZDef:
355 case UseAddr:
356 case LateUse:
357 case LateColdUse:
358 case Scratch:
359 case EarlyDef:
360 case EarlyZDef:
361 return false;
362 }
363 ASSERT_NOT_REACHED();
364 }
365
366 // Returns true if the Role implies that the Inst will Use the Arg after doing everything else.
367 static bool isLateUse(Role role)
368 {
369 switch (role) {
370 case LateUse:
371 case LateColdUse:
372 case Scratch:
373 return true;
374 case ColdUse:
375 case Use:
376 case UseDef:
377 case UseZDef:
378 case Def:
379 case ZDef:
380 case UseAddr:
381 case EarlyDef:
382 case EarlyZDef:
383 return false;
384 }
385 ASSERT_NOT_REACHED();
386 }
387
388 // Returns true if the Role implies that the Inst will Def the Arg.
389 static bool isAnyDef(Role role)
390 {
391 switch (role) {
392 case Use:
393 case ColdUse:
394 case UseAddr:
395 case LateUse:
396 case LateColdUse:
397 return false;
398 case Def:
399 case UseDef:
400 case ZDef:
401 case UseZDef:
402 case EarlyDef:
403 case EarlyZDef:
404 case Scratch:
405 return true;
406 }
407 ASSERT_NOT_REACHED();
408 }
409
410 // Returns true if the Role implies that the Inst will Def the Arg before start of execution.
411 static bool isEarlyDef(Role role)
412 {
413 switch (role) {
414 case Use:
415 case ColdUse:
416 case UseAddr:
417 case LateUse:
418 case Def:
419 case UseDef:
420 case ZDef:
421 case UseZDef:
422 case LateColdUse:
423 return false;
424 case EarlyDef:
425 case EarlyZDef:
426 case Scratch:
427 return true;
428 }
429 ASSERT_NOT_REACHED();
430 }
431
432 // Returns true if the Role implies that the Inst will Def the Arg after the end of execution.
433 static bool isLateDef(Role role)
434 {
435 switch (role) {
436 case Use:
437 case ColdUse:
438 case UseAddr:
439 case LateUse:
440 case EarlyDef:
441 case EarlyZDef:
442 case Scratch:
443 case LateColdUse:
444 return false;
445 case Def:
446 case UseDef:
447 case ZDef:
448 case UseZDef:
449 return true;
450 }
451 ASSERT_NOT_REACHED();
452 }
453
454 // Returns true if the Role implies that the Inst will ZDef the Arg.
455 static bool isZDef(Role role)
456 {
457 switch (role) {
458 case Use:
459 case ColdUse:
460 case UseAddr:
461 case LateUse:
462 case Def:
463 case UseDef:
464 case EarlyDef:
465 case Scratch:
466 case LateColdUse:
467 return false;
468 case ZDef:
469 case UseZDef:
470 case EarlyZDef:
471 return true;
472 }
473 ASSERT_NOT_REACHED();
474 }
475
476 Arg()
477 : m_kind(Invalid)
478 {
479 }
480
481 Arg(Air::Tmp tmp)
482 : m_kind(Tmp)
483 , m_base(tmp)
484 {
485 }
486
487 Arg(Reg reg)
488 : Arg(Air::Tmp(reg))
489 {
490 }
491
492 static Arg imm(int64_t value)
493 {
494 Arg result;
495 result.m_kind = Imm;
496 result.m_offset = value;
497 return result;
498 }
499
500 static Arg bigImm(int64_t value)
501 {
502 Arg result;
503 result.m_kind = BigImm;
504 result.m_offset = value;
505 return result;
506 }
507
508 static Arg bitImm(int64_t value)
509 {
510 Arg result;
511 result.m_kind = BitImm;
512 result.m_offset = value;
513 return result;
514 }
515
516 static Arg bitImm64(int64_t value)
517 {
518 Arg result;
519 result.m_kind = BitImm64;
520 result.m_offset = value;
521 return result;
522 }
523
524 static Arg immPtr(const void* address)
525 {
526 return bigImm(bitwise_cast<intptr_t>(address));
527 }
528
529 static Arg simpleAddr(Air::Tmp ptr)
530 {
531 ASSERT(ptr.isGP());
532 Arg result;
533 result.m_kind = SimpleAddr;
534 result.m_base = ptr;
535 return result;
536 }
537
538 template<typename Int, typename = Value::IsLegalOffset<Int>>
539 static Arg addr(Air::Tmp base, Int offset)
540 {
541 ASSERT(base.isGP());
542 Arg result;
543 result.m_kind = Addr;
544 result.m_base = base;
545 result.m_offset = offset;
546 return result;
547 }
548
549 template<typename Int, typename = Value::IsLegalOffset<Int>>
550 static Arg extendedOffsetAddr(Int offsetFromFP)
551 {
552 Arg result;
553 result.m_kind = ExtendedOffsetAddr;
554 result.m_base = Air::Tmp(MacroAssembler::framePointerRegister);
555 result.m_offset = offsetFromFP;
556 return result;
557 }
558
559 static Arg addr(Air::Tmp base)
560 {
561 return addr(base, 0);
562 }
563
564 template<typename Int, typename = Value::IsLegalOffset<Int>>
565 static Arg stack(StackSlot* value, Int offset)
566 {
567 Arg result;
568 result.m_kind = Stack;
569 result.m_offset = bitwise_cast<intptr_t>(value);
570 result.m_scale = offset; // I know, yuck.
571 return result;
572 }
573
574 static Arg stack(StackSlot* value)
575 {
576 return stack(value, 0);
577 }
578
579 template<typename Int, typename = Value::IsLegalOffset<Int>>
580 static Arg callArg(Int offset)
581 {
582 Arg result;
583 result.m_kind = CallArg;
584 result.m_offset = offset;
585 return result;
586 }
587
588 // If you don't pass a Width, this optimistically assumes that you're using the right width.
589 static bool isValidScale(unsigned scale, Optional<Width> width = WTF::nullopt)
590 {
591 switch (scale) {
592 case 1:
593 if (isX86() || isARM64())
594 return true;
595 return false;
596 case 2:
597 case 4:
598 case 8:
599 if (isX86())
600 return true;
601 if (isARM64()) {
602 if (!width)
603 return true;
604 return scale == 1 || scale == bytes(*width);
605 }
606 return false;
607 default:
608 return false;
609 }
610 }
611
612 static unsigned logScale(unsigned scale)
613 {
614 switch (scale) {
615 case 1:
616 return 0;
617 case 2:
618 return 1;
619 case 4:
620 return 2;
621 case 8:
622 return 3;
623 default:
624 ASSERT_NOT_REACHED();
625 return 0;
626 }
627 }
628
629 template<typename Int, typename = Value::IsLegalOffset<Int>>
630 static Arg index(Air::Tmp base, Air::Tmp index, unsigned scale, Int offset)
631 {
632 ASSERT(base.isGP());
633 ASSERT(index.isGP());
634 ASSERT(isValidScale(scale));
635 Arg result;
636 result.m_kind = Index;
637 result.m_base = base;
638 result.m_index = index;
639 result.m_scale = static_cast<int32_t>(scale);
640 result.m_offset = offset;
641 return result;
642 }
643
644 static Arg index(Air::Tmp base, Air::Tmp index, unsigned scale = 1)
645 {
646 return Arg::index(base, index, scale, 0);
647 }
648
649 static Arg relCond(MacroAssembler::RelationalCondition condition)
650 {
651 Arg result;
652 result.m_kind = RelCond;
653 result.m_offset = condition;
654 return result;
655 }
656
657 static Arg resCond(MacroAssembler::ResultCondition condition)
658 {
659 Arg result;
660 result.m_kind = ResCond;
661 result.m_offset = condition;
662 return result;
663 }
664
665 static Arg doubleCond(MacroAssembler::DoubleCondition condition)
666 {
667 Arg result;
668 result.m_kind = DoubleCond;
669 result.m_offset = condition;
670 return result;
671 }
672
673 static Arg statusCond(MacroAssembler::StatusCondition condition)
674 {
675 Arg result;
676 result.m_kind = StatusCond;
677 result.m_offset = condition;
678 return result;
679 }
680
681 static Arg special(Air::Special* special)
682 {
683 Arg result;
684 result.m_kind = Special;
685 result.m_offset = bitwise_cast<intptr_t>(special);
686 return result;
687 }
688
689 static Arg widthArg(Width width)
690 {
691 Arg result;
692 result.m_kind = WidthArg;
693 result.m_offset = width;
694 return result;
695 }
696
697 bool operator==(const Arg& other) const
698 {
699 return m_offset == other.m_offset
700 && m_kind == other.m_kind
701 && m_base == other.m_base
702 && m_index == other.m_index
703 && m_scale == other.m_scale;
704 }
705
706 bool operator!=(const Arg& other) const
707 {
708 return !(*this == other);
709 }
710
711 explicit operator bool() const { return *this != Arg(); }
712
713 Kind kind() const
714 {
715 return m_kind;
716 }
717
718 bool isTmp() const
719 {
720 return kind() == Tmp;
721 }
722
723 bool isImm() const
724 {
725 return kind() == Imm;
726 }
727
728 bool isBigImm() const
729 {
730 return kind() == BigImm;
731 }
732
733 bool isBitImm() const
734 {
735 return kind() == BitImm;
736 }
737
738 bool isBitImm64() const
739 {
740 return kind() == BitImm64;
741 }
742
743 bool isSomeImm() const
744 {
745 switch (kind()) {
746 case Imm:
747 case BigImm:
748 case BitImm:
749 case BitImm64:
750 return true;
751 default:
752 return false;
753 }
754 }
755
756 bool isSimpleAddr() const
757 {
758 return kind() == SimpleAddr;
759 }
760
761 bool isAddr() const
762 {
763 return kind() == Addr;
764 }
765
766 bool isExtendedOffsetAddr() const
767 {
768 return kind() == ExtendedOffsetAddr;
769 }
770
771 bool isStack() const
772 {
773 return kind() == Stack;
774 }
775
776 bool isCallArg() const
777 {
778 return kind() == CallArg;
779 }
780
781 bool isIndex() const
782 {
783 return kind() == Index;
784 }
785
786 bool isMemory() const
787 {
788 switch (kind()) {
789 case SimpleAddr:
790 case Addr:
791 case ExtendedOffsetAddr:
792 case Stack:
793 case CallArg:
794 case Index:
795 return true;
796 default:
797 return false;
798 }
799 }
800
801 // Returns true if this is an idiomatic stack reference. It may return false for some kinds of
802 // stack references. The following idioms are recognized:
803 // - the Stack kind
804 // - the CallArg kind
805 // - the ExtendedOffsetAddr kind
806 // - the Addr kind with the base being either SP or FP
807 // Callers of this function are allowed to expect that if it returns true, then it must be one of
808 // these easy-to-recognize kinds. So, making this function recognize more kinds could break things.
809 bool isStackMemory() const;
810
811 bool isRelCond() const
812 {
813 return kind() == RelCond;
814 }
815
816 bool isResCond() const
817 {
818 return kind() == ResCond;
819 }
820
821 bool isDoubleCond() const
822 {
823 return kind() == DoubleCond;
824 }
825
826 bool isStatusCond() const
827 {
828 return kind() == StatusCond;
829 }
830
831 bool isCondition() const
832 {
833 switch (kind()) {
834 case RelCond:
835 case ResCond:
836 case DoubleCond:
837 case StatusCond:
838 return true;
839 default:
840 return false;
841 }
842 }
843
844 bool isSpecial() const
845 {
846 return kind() == Special;
847 }
848
849 bool isWidthArg() const
850 {
851 return kind() == WidthArg;
852 }
853
854 bool isAlive() const
855 {
856 return isTmp() || isStack();
857 }
858
859 Air::Tmp tmp() const
860 {
861 ASSERT(kind() == Tmp);
862 return m_base;
863 }
864
865 int64_t value() const
866 {
867 ASSERT(isSomeImm());
868 return m_offset;
869 }
870
871 template<typename T>
872 bool isRepresentableAs() const
873 {
874 return B3::isRepresentableAs<T>(value());
875 }
876
877 static bool isRepresentableAs(Width width, Signedness signedness, int64_t value)
878 {
879 switch (signedness) {
880 case Signed:
881 switch (width) {
882 case Width8:
883 return B3::isRepresentableAs<int8_t>(value);
884 case Width16:
885 return B3::isRepresentableAs<int16_t>(value);
886 case Width32:
887 return B3::isRepresentableAs<int32_t>(value);
888 case Width64:
889 return B3::isRepresentableAs<int64_t>(value);
890 }
891 RELEASE_ASSERT_NOT_REACHED();
892 case Unsigned:
893 switch (width) {
894 case Width8:
895 return B3::isRepresentableAs<uint8_t>(value);
896 case Width16:
897 return B3::isRepresentableAs<uint16_t>(value);
898 case Width32:
899 return B3::isRepresentableAs<uint32_t>(value);
900 case Width64:
901 return B3::isRepresentableAs<uint64_t>(value);
902 }
903 }
904 RELEASE_ASSERT_NOT_REACHED();
905 }
906
907 bool isRepresentableAs(Width, Signedness) const;
908
909 static int64_t castToType(Width width, Signedness signedness, int64_t value)
910 {
911 switch (signedness) {
912 case Signed:
913 switch (width) {
914 case Width8:
915 return static_cast<int8_t>(value);
916 case Width16:
917 return static_cast<int16_t>(value);
918 case Width32:
919 return static_cast<int32_t>(value);
920 case Width64:
921 return static_cast<int64_t>(value);
922 }
923 RELEASE_ASSERT_NOT_REACHED();
924 case Unsigned:
925 switch (width) {
926 case Width8:
927 return static_cast<uint8_t>(value);
928 case Width16:
929 return static_cast<uint16_t>(value);
930 case Width32:
931 return static_cast<uint32_t>(value);
932 case Width64:
933 return static_cast<uint64_t>(value);
934 }
935 }
936 RELEASE_ASSERT_NOT_REACHED();
937 }
938
939 template<typename T>
940 T asNumber() const
941 {
942 return static_cast<T>(value());
943 }
944
945 void* pointerValue() const
946 {
947 ASSERT(kind() == BigImm);
948 return bitwise_cast<void*>(static_cast<intptr_t>(m_offset));
949 }
950
951 Air::Tmp ptr() const
952 {
953 ASSERT(kind() == SimpleAddr);
954 return m_base;
955 }
956
957 Air::Tmp base() const
958 {
959 ASSERT(kind() == SimpleAddr || kind() == Addr || kind() == ExtendedOffsetAddr || kind() == Index);
960 return m_base;
961 }
962
963 bool hasOffset() const { return isMemory(); }
964
965 Value::OffsetType offset() const
966 {
967 if (kind() == Stack)
968 return static_cast<Value::OffsetType>(m_scale);
969 ASSERT(kind() == Addr || kind() == ExtendedOffsetAddr || kind() == CallArg || kind() == Index);
970 return static_cast<Value::OffsetType>(m_offset);
971 }
972
973 StackSlot* stackSlot() const
974 {
975 ASSERT(kind() == Stack);
976 return bitwise_cast<StackSlot*>(static_cast<uintptr_t>(m_offset));
977 }
978
979 Air::Tmp index() const
980 {
981 ASSERT(kind() == Index);
982 return m_index;
983 }
984
985 unsigned scale() const
986 {
987 ASSERT(kind() == Index);
988 return m_scale;
989 }
990
991 unsigned logScale() const
992 {
993 return logScale(scale());
994 }
995
996 Air::Special* special() const
997 {
998 ASSERT(kind() == Special);
999 return bitwise_cast<Air::Special*>(static_cast<uintptr_t>(m_offset));
1000 }
1001
1002 Width width() const
1003 {
1004 ASSERT(kind() == WidthArg);
1005 return static_cast<Width>(m_offset);
1006 }
1007
1008 bool isGPTmp() const
1009 {
1010 return isTmp() && tmp().isGP();
1011 }
1012
1013 bool isFPTmp() const
1014 {
1015 return isTmp() && tmp().isFP();
1016 }
1017
1018 // Tells us if this Arg can be used in a position that requires a GP value.
1019 bool isGP() const
1020 {
1021 switch (kind()) {
1022 case Imm:
1023 case BigImm:
1024 case BitImm:
1025 case BitImm64:
1026 case SimpleAddr:
1027 case Addr:
1028 case ExtendedOffsetAddr:
1029 case Index:
1030 case Stack:
1031 case CallArg:
1032 case RelCond:
1033 case ResCond:
1034 case DoubleCond:
1035 case StatusCond:
1036 case Special:
1037 case WidthArg:
1038 return true;
1039 case Tmp:
1040 return isGPTmp();
1041 case Invalid:
1042 return false;
1043 }
1044 ASSERT_NOT_REACHED();
1045 }
1046
1047 // Tells us if this Arg can be used in a position that requires a FP value.
1048 bool isFP() const
1049 {
1050 switch (kind()) {
1051 case Imm:
1052 case BitImm:
1053 case BitImm64:
1054 case RelCond:
1055 case ResCond:
1056 case DoubleCond:
1057 case StatusCond:
1058 case Special:
1059 case WidthArg:
1060 case Invalid:
1061 return false;
1062 case SimpleAddr:
1063 case Addr:
1064 case ExtendedOffsetAddr:
1065 case Index:
1066 case Stack:
1067 case CallArg:
1068 case BigImm: // Yes, we allow BigImm as a double immediate. We use this for implementing stackmaps.
1069 return true;
1070 case Tmp:
1071 return isFPTmp();
1072 }
1073 ASSERT_NOT_REACHED();
1074 }
1075
1076 bool hasBank() const
1077 {
1078 switch (kind()) {
1079 case Imm:
1080 case BitImm:
1081 case BitImm64:
1082 case Special:
1083 case Tmp:
1084 return true;
1085 default:
1086 return false;
1087 }
1088 }
1089
1090 // The type is ambiguous for some arg kinds. Call with care.
1091 Bank bank() const
1092 {
1093 return isGP() ? GP : FP;
1094 }
1095
1096 bool isBank(Bank bank) const
1097 {
1098 switch (bank) {
1099 case GP:
1100 return isGP();
1101 case FP:
1102 return isFP();
1103 }
1104 ASSERT_NOT_REACHED();
1105 }
1106
1107 bool canRepresent(Value* value) const;
1108
1109 bool isCompatibleBank(const Arg& other) const;
1110
1111 bool isGPR() const
1112 {
1113 return isTmp() && tmp().isGPR();
1114 }
1115
1116 GPRReg gpr() const
1117 {
1118 return tmp().gpr();
1119 }
1120
1121 bool isFPR() const
1122 {
1123 return isTmp() && tmp().isFPR();
1124 }
1125
1126 FPRReg fpr() const
1127 {
1128 return tmp().fpr();
1129 }
1130
1131 bool isReg() const
1132 {
1133 return isTmp() && tmp().isReg();
1134 }
1135
1136 Reg reg() const
1137 {
1138 return tmp().reg();
1139 }
1140
1141 unsigned gpTmpIndex() const
1142 {
1143 return tmp().gpTmpIndex();
1144 }
1145
1146 unsigned fpTmpIndex() const
1147 {
1148 return tmp().fpTmpIndex();
1149 }
1150
1151 unsigned tmpIndex() const
1152 {
1153 return tmp().tmpIndex();
1154 }
1155
1156 static bool isValidImmForm(int64_t value)
1157 {
1158 if (isX86())
1159 return B3::isRepresentableAs<int32_t>(value);
1160 if (isARM64())
1161 return isUInt12(value);
1162 return false;
1163 }
1164
1165 static bool isValidBitImmForm(int64_t value)
1166 {
1167 if (isX86())
1168 return B3::isRepresentableAs<int32_t>(value);
1169 if (isARM64())
1170 return ARM64LogicalImmediate::create32(value).isValid();
1171 return false;
1172 }
1173
1174 static bool isValidBitImm64Form(int64_t value)
1175 {
1176 if (isX86())
1177 return B3::isRepresentableAs<int32_t>(value);
1178 if (isARM64())
1179 return ARM64LogicalImmediate::create64(value).isValid();
1180 return false;
1181 }
1182
1183 template<typename Int, typename = Value::IsLegalOffset<Int>>
1184 static bool isValidAddrForm(Int offset, Optional<Width> width = WTF::nullopt)
1185 {
1186 if (isX86())
1187 return true;
1188 if (isARM64()) {
1189 if (!width)
1190 return true;
1191
1192 if (isValidSignedImm9(offset))
1193 return true;
1194
1195 switch (*width) {
1196 case Width8:
1197 return isValidScaledUImm12<8>(offset);
1198 case Width16:
1199 return isValidScaledUImm12<16>(offset);
1200 case Width32:
1201 return isValidScaledUImm12<32>(offset);
1202 case Width64:
1203 return isValidScaledUImm12<64>(offset);
1204 }
1205 }
1206 return false;
1207 }
1208
1209 template<typename Int, typename = Value::IsLegalOffset<Int>>
1210 static bool isValidIndexForm(unsigned scale, Int offset, Optional<Width> width = WTF::nullopt)
1211 {
1212 if (!isValidScale(scale, width))
1213 return false;
1214 if (isX86())
1215 return true;
1216 if (isARM64())
1217 return !offset;
1218 return false;
1219 }
1220
1221 // If you don't pass a width then this optimistically assumes that you're using the right width. But
1222 // the width is relevant to validity, so passing a null width is only useful for assertions. Don't
1223 // pass null widths when cascading through Args in the instruction selector!
1224 bool isValidForm(Optional<Width> width = WTF::nullopt) const
1225 {
1226 switch (kind()) {
1227 case Invalid:
1228 return false;
1229 case Tmp:
1230 return true;
1231 case Imm:
1232 return isValidImmForm(value());
1233 case BigImm:
1234 return true;
1235 case BitImm:
1236 return isValidBitImmForm(value());
1237 case BitImm64:
1238 return isValidBitImm64Form(value());
1239 case SimpleAddr:
1240 case ExtendedOffsetAddr:
1241 return true;
1242 case Addr:
1243 case Stack:
1244 case CallArg:
1245 return isValidAddrForm(offset(), width);
1246 case Index:
1247 return isValidIndexForm(scale(), offset(), width);
1248 case RelCond:
1249 case ResCond:
1250 case DoubleCond:
1251 case StatusCond:
1252 case Special:
1253 case WidthArg:
1254 return true;
1255 }
1256 ASSERT_NOT_REACHED();
1257 }
1258
1259 template<typename Functor>
1260 void forEachTmpFast(const Functor& functor)
1261 {
1262 switch (m_kind) {
1263 case Tmp:
1264 case SimpleAddr:
1265 case Addr:
1266 case ExtendedOffsetAddr:
1267 functor(m_base);
1268 break;
1269 case Index:
1270 functor(m_base);
1271 functor(m_index);
1272 break;
1273 default:
1274 break;
1275 }
1276 }
1277
1278 bool usesTmp(Air::Tmp tmp) const;
1279
1280 template<typename Thing>
1281 bool is() const;
1282
1283 template<typename Thing>
1284 Thing as() const;
1285
1286 template<typename Thing, typename Functor>
1287 void forEachFast(const Functor&);
1288
1289 template<typename Thing, typename Functor>
1290 void forEach(Role, Bank, Width, const Functor&);
1291
1292 // This is smart enough to know that an address arg in a Def or UseDef rule will use its
1293 // tmps and never def them. For example, this:
1294 //
1295 // mov %rax, (%rcx)
1296 //
1297 // This defs (%rcx) but uses %rcx.
1298 template<typename Functor>
1299 void forEachTmp(Role argRole, Bank argBank, Width argWidth, const Functor& functor)
1300 {
1301 switch (m_kind) {
1302 case Tmp:
1303 ASSERT(isAnyUse(argRole) || isAnyDef(argRole));
1304 functor(m_base, argRole, argBank, argWidth);
1305 break;
1306 case SimpleAddr:
1307 case Addr:
1308 case ExtendedOffsetAddr:
1309 functor(m_base, Use, GP, argRole == UseAddr ? argWidth : pointerWidth());
1310 break;
1311 case Index:
1312 functor(m_base, Use, GP, argRole == UseAddr ? argWidth : pointerWidth());
1313 functor(m_index, Use, GP, argRole == UseAddr ? argWidth : pointerWidth());
1314 break;
1315 default:
1316 break;
1317 }
1318 }
1319
1320 MacroAssembler::TrustedImm32 asTrustedImm32() const
1321 {
1322 ASSERT(isImm() || isBitImm());
1323 return MacroAssembler::TrustedImm32(static_cast<Value::OffsetType>(m_offset));
1324 }
1325
1326#if USE(JSVALUE64)
1327 MacroAssembler::TrustedImm64 asTrustedImm64() const
1328 {
1329 ASSERT(isBigImm() || isBitImm64());
1330 return MacroAssembler::TrustedImm64(value());
1331 }
1332#endif
1333
1334 MacroAssembler::TrustedImmPtr asTrustedImmPtr() const
1335 {
1336 if (is64Bit())
1337 ASSERT(isBigImm());
1338 else
1339 ASSERT(isImm());
1340 return MacroAssembler::TrustedImmPtr(pointerValue());
1341 }
1342
1343 MacroAssembler::Address asAddress() const
1344 {
1345 if (isSimpleAddr())
1346 return MacroAssembler::Address(m_base.gpr());
1347 ASSERT(isAddr() || isExtendedOffsetAddr());
1348 return MacroAssembler::Address(m_base.gpr(), static_cast<Value::OffsetType>(m_offset));
1349 }
1350
1351 MacroAssembler::BaseIndex asBaseIndex() const
1352 {
1353 ASSERT(isIndex());
1354 return MacroAssembler::BaseIndex(
1355 m_base.gpr(), m_index.gpr(), static_cast<MacroAssembler::Scale>(logScale()),
1356 static_cast<Value::OffsetType>(m_offset));
1357 }
1358
1359 MacroAssembler::RelationalCondition asRelationalCondition() const
1360 {
1361 ASSERT(isRelCond());
1362 return static_cast<MacroAssembler::RelationalCondition>(m_offset);
1363 }
1364
1365 MacroAssembler::ResultCondition asResultCondition() const
1366 {
1367 ASSERT(isResCond());
1368 return static_cast<MacroAssembler::ResultCondition>(m_offset);
1369 }
1370
1371 MacroAssembler::DoubleCondition asDoubleCondition() const
1372 {
1373 ASSERT(isDoubleCond());
1374 return static_cast<MacroAssembler::DoubleCondition>(m_offset);
1375 }
1376
1377 MacroAssembler::StatusCondition asStatusCondition() const
1378 {
1379 ASSERT(isStatusCond());
1380 return static_cast<MacroAssembler::StatusCondition>(m_offset);
1381 }
1382
1383 // Tells you if the Arg is invertible. Only condition arguments are invertible, and even for those, there
1384 // are a few exceptions - notably Overflow and Signed.
1385 bool isInvertible() const
1386 {
1387 switch (kind()) {
1388 case RelCond:
1389 case DoubleCond:
1390 case StatusCond:
1391 return true;
1392 case ResCond:
1393 return MacroAssembler::isInvertible(asResultCondition());
1394 default:
1395 return false;
1396 }
1397 }
1398
1399 // This is valid for condition arguments. It will invert them.
1400 Arg inverted(bool inverted = true) const
1401 {
1402 if (!inverted)
1403 return *this;
1404 switch (kind()) {
1405 case RelCond:
1406 return relCond(MacroAssembler::invert(asRelationalCondition()));
1407 case ResCond:
1408 return resCond(MacroAssembler::invert(asResultCondition()));
1409 case DoubleCond:
1410 return doubleCond(MacroAssembler::invert(asDoubleCondition()));
1411 case StatusCond:
1412 return statusCond(MacroAssembler::invert(asStatusCondition()));
1413 default:
1414 RELEASE_ASSERT_NOT_REACHED();
1415 return Arg();
1416 }
1417 }
1418
1419 Arg flipped(bool flipped = true) const
1420 {
1421 if (!flipped)
1422 return Arg();
1423 return relCond(MacroAssembler::flip(asRelationalCondition()));
1424 }
1425
1426 bool isSignedCond() const
1427 {
1428 return isRelCond() && MacroAssembler::isSigned(asRelationalCondition());
1429 }
1430
1431 bool isUnsignedCond() const
1432 {
1433 return isRelCond() && MacroAssembler::isUnsigned(asRelationalCondition());
1434 }
1435
1436 // This computes a hash for comparing this to JSAir's Arg.
1437 unsigned jsHash() const;
1438
1439 void dump(PrintStream&) const;
1440
1441 Arg(WTF::HashTableDeletedValueType)
1442 : m_base(WTF::HashTableDeletedValue)
1443 {
1444 }
1445
1446 bool isHashTableDeletedValue() const
1447 {
1448 return *this == Arg(WTF::HashTableDeletedValue);
1449 }
1450
1451 unsigned hash() const
1452 {
1453 // This really doesn't have to be that great.
1454 return WTF::IntHash<int64_t>::hash(m_offset) + m_kind + m_scale + m_base.hash() +
1455 m_index.hash();
1456 }
1457
1458private:
1459 int64_t m_offset { 0 };
1460 Kind m_kind { Invalid };
1461 int32_t m_scale { 1 };
1462 Air::Tmp m_base;
1463 Air::Tmp m_index;
1464};
1465
1466struct ArgHash {
1467 static unsigned hash(const Arg& key) { return key.hash(); }
1468 static bool equal(const Arg& a, const Arg& b) { return a == b; }
1469 static const bool safeToCompareToEmptyOrDeleted = true;
1470};
1471
1472} } } // namespace JSC::B3::Air
1473
1474namespace WTF {
1475
1476JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Kind);
1477JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Temperature);
1478JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Phase);
1479JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Timing);
1480JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Role);
1481JS_EXPORT_PRIVATE void printInternal(PrintStream&, JSC::B3::Air::Arg::Signedness);
1482
1483template<typename T> struct DefaultHash;
1484template<> struct DefaultHash<JSC::B3::Air::Arg> {
1485 typedef JSC::B3::Air::ArgHash Hash;
1486};
1487
1488template<typename T> struct HashTraits;
1489template<> struct HashTraits<JSC::B3::Air::Arg> : SimpleClassHashTraits<JSC::B3::Air::Arg> {
1490 // Because m_scale is 1 in the empty value.
1491 static const bool emptyValueIsZero = false;
1492};
1493
1494} // namespace WTF
1495
1496#if ASSERT_DISABLED
1497IGNORE_RETURN_TYPE_WARNINGS_END
1498#endif
1499
1500#endif // ENABLE(B3_JIT)
1501