1 | // Copyright 2018 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #ifndef V8_TURBO_ASSEMBLER_H_ |
6 | #define V8_TURBO_ASSEMBLER_H_ |
7 | |
8 | #include "src/assembler-arch.h" |
9 | #include "src/base/template-utils.h" |
10 | #include "src/builtins/builtins.h" |
11 | #include "src/roots.h" |
12 | |
13 | namespace v8 { |
14 | namespace internal { |
15 | |
16 | // Common base class for platform-specific TurboAssemblers containing |
17 | // platform-independent bits. |
18 | class V8_EXPORT_PRIVATE TurboAssemblerBase : public Assembler { |
19 | public: |
20 | // Constructors are declared public to inherit them in derived classes |
21 | // with `using` directive. |
22 | TurboAssemblerBase(const AssemblerOptions& options, |
23 | std::unique_ptr<AssemblerBuffer> buffer = {}) |
24 | : TurboAssemblerBase(nullptr, options.EnableV8AgnosticCode(), |
25 | CodeObjectRequired::kNo, std::move(buffer)) {} |
26 | |
27 | TurboAssemblerBase(Isolate* isolate, CodeObjectRequired create_code_object, |
28 | std::unique_ptr<AssemblerBuffer> buffer = {}) |
29 | : TurboAssemblerBase(isolate, AssemblerOptions::Default(isolate), |
30 | create_code_object, std::move(buffer)) {} |
31 | |
32 | TurboAssemblerBase(Isolate* isolate, const AssemblerOptions& options, |
33 | CodeObjectRequired create_code_object, |
34 | std::unique_ptr<AssemblerBuffer> buffer = {}); |
35 | |
36 | Isolate* isolate() const { |
37 | DCHECK(!options().v8_agnostic_code); |
38 | return isolate_; |
39 | } |
40 | |
41 | Handle<HeapObject> CodeObject() const { |
42 | DCHECK(!code_object_.is_null()); |
43 | return code_object_; |
44 | } |
45 | |
46 | bool root_array_available() const { return root_array_available_; } |
47 | void set_root_array_available(bool v) { root_array_available_ = v; } |
48 | |
49 | bool trap_on_abort() const { return trap_on_abort_; } |
50 | |
51 | bool should_abort_hard() const { return hard_abort_; } |
52 | void set_abort_hard(bool v) { hard_abort_ = v; } |
53 | |
54 | void set_builtin_index(int i) { maybe_builtin_index_ = i; } |
55 | |
56 | void set_has_frame(bool v) { has_frame_ = v; } |
57 | bool has_frame() const { return has_frame_; } |
58 | |
59 | // Calls the given builtin. If builtins are embedded, the trampoline Code |
60 | // object on the heap is not used. |
61 | virtual void CallBuiltinPointer(Register builtin_pointer) = 0; |
62 | |
63 | // Calls/jumps to the given Code object. If builtins are embedded, the |
64 | // trampoline Code object on the heap is not used. |
65 | virtual void CallCodeObject(Register code_object) = 0; |
66 | virtual void JumpCodeObject(Register code_object) = 0; |
67 | |
68 | // Loads the given Code object's entry point into the destination register. |
69 | virtual void LoadCodeObjectEntry(Register destination, |
70 | Register code_object) = 0; |
71 | |
72 | // Loads the given constant or external reference without embedding its direct |
73 | // pointer. The produced code is isolate-independent. |
74 | void IndirectLoadConstant(Register destination, Handle<HeapObject> object); |
75 | void IndirectLoadExternalReference(Register destination, |
76 | ExternalReference reference); |
77 | |
78 | virtual void LoadFromConstantsTable(Register destination, |
79 | int constant_index) = 0; |
80 | |
81 | // Corresponds to: destination = kRootRegister + offset. |
82 | virtual void LoadRootRegisterOffset(Register destination, |
83 | intptr_t offset) = 0; |
84 | |
85 | // Corresponds to: destination = [kRootRegister + offset]. |
86 | virtual void LoadRootRelative(Register destination, int32_t offset) = 0; |
87 | |
88 | virtual void LoadRoot(Register destination, RootIndex index) = 0; |
89 | |
90 | static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index); |
91 | static int32_t RootRegisterOffsetForBuiltinIndex(int builtin_index); |
92 | |
93 | // Returns the root-relative offset to reference.address(). |
94 | static intptr_t RootRegisterOffsetForExternalReference( |
95 | Isolate* isolate, const ExternalReference& reference); |
96 | |
97 | // Returns the root-relative offset to the external reference table entry, |
98 | // which itself contains reference.address(). |
99 | static int32_t RootRegisterOffsetForExternalReferenceTableEntry( |
100 | Isolate* isolate, const ExternalReference& reference); |
101 | |
102 | // An address is addressable through kRootRegister if it is located within |
103 | // isolate->root_register_addressable_region(). |
104 | static bool IsAddressableThroughRootRegister( |
105 | Isolate* isolate, const ExternalReference& reference); |
106 | |
107 | protected: |
108 | void (int builtin_index); |
109 | |
110 | Isolate* const isolate_ = nullptr; |
111 | |
112 | // This handle will be patched with the code object on installation. |
113 | Handle<HeapObject> code_object_; |
114 | |
115 | // Whether kRootRegister has been initialized. |
116 | bool root_array_available_ = true; |
117 | |
118 | // Immediately trap instead of calling {Abort} when debug code fails. |
119 | bool trap_on_abort_ = FLAG_trap_on_abort; |
120 | |
121 | // Emit a C call to abort instead of a runtime call. |
122 | bool hard_abort_ = false; |
123 | |
124 | // May be set while generating builtins. |
125 | int maybe_builtin_index_ = Builtins::kNoBuiltinId; |
126 | |
127 | bool has_frame_ = false; |
128 | |
129 | DISALLOW_IMPLICIT_CONSTRUCTORS(TurboAssemblerBase); |
130 | }; |
131 | |
132 | // Avoids emitting calls to the {Builtins::kAbort} builtin when emitting debug |
133 | // code during the lifetime of this scope object. For disabling debug code |
134 | // entirely use the {DontEmitDebugCodeScope} instead. |
135 | class HardAbortScope { |
136 | public: |
137 | explicit HardAbortScope(TurboAssemblerBase* assembler) |
138 | : assembler_(assembler), old_value_(assembler->should_abort_hard()) { |
139 | assembler_->set_abort_hard(true); |
140 | } |
141 | ~HardAbortScope() { assembler_->set_abort_hard(old_value_); } |
142 | |
143 | private: |
144 | TurboAssemblerBase* assembler_; |
145 | bool old_value_; |
146 | }; |
147 | |
148 | #ifdef DEBUG |
149 | struct CountIfValidRegisterFunctor { |
150 | template <typename RegType> |
151 | constexpr int operator()(int count, RegType reg) const { |
152 | return count + (reg.is_valid() ? 1 : 0); |
153 | } |
154 | }; |
155 | |
156 | template <typename RegType, typename... RegTypes, |
157 | // All arguments must be either Register or DoubleRegister. |
158 | typename = typename std::enable_if< |
159 | base::is_same<Register, RegType, RegTypes...>::value || |
160 | base::is_same<DoubleRegister, RegType, RegTypes...>::value>::type> |
161 | inline bool AreAliased(RegType first_reg, RegTypes... regs) { |
162 | int num_different_regs = NumRegs(RegType::ListOf(first_reg, regs...)); |
163 | int num_given_regs = |
164 | base::fold(CountIfValidRegisterFunctor{}, 0, first_reg, regs...); |
165 | return num_different_regs < num_given_regs; |
166 | } |
167 | #endif |
168 | |
169 | } // namespace internal |
170 | } // namespace v8 |
171 | |
172 | #endif // V8_TURBO_ASSEMBLER_H_ |
173 | |