1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_BACKEND_CODE_GENERATOR_H_
6#define V8_COMPILER_BACKEND_CODE_GENERATOR_H_
7
8#include "src/base/optional.h"
9#include "src/compiler/backend/gap-resolver.h"
10#include "src/compiler/backend/instruction.h"
11#include "src/compiler/backend/unwinding-info-writer.h"
12#include "src/compiler/osr.h"
13#include "src/deoptimizer.h"
14#include "src/macro-assembler.h"
15#include "src/safepoint-table.h"
16#include "src/source-position-table.h"
17#include "src/trap-handler/trap-handler.h"
18
19namespace v8 {
20namespace internal {
21
22class OptimizedCompilationInfo;
23
24namespace compiler {
25
26// Forward declarations.
27class DeoptimizationExit;
28class FrameAccessState;
29class Linkage;
30class OutOfLineCode;
31
32struct BranchInfo {
33 FlagsCondition condition;
34 Label* true_label;
35 Label* false_label;
36 bool fallthru;
37};
38
39class InstructionOperandIterator {
40 public:
41 InstructionOperandIterator(Instruction* instr, size_t pos)
42 : instr_(instr), pos_(pos) {}
43
44 Instruction* instruction() const { return instr_; }
45 InstructionOperand* Advance() { return instr_->InputAt(pos_++); }
46
47 private:
48 Instruction* instr_;
49 size_t pos_;
50};
51
52enum class DeoptimizationLiteralKind { kObject, kNumber, kString };
53
54// Either a non-null Handle<Object>, a double or a StringConstantBase.
55class DeoptimizationLiteral {
56 public:
57 DeoptimizationLiteral() : object_(), number_(0), string_(nullptr) {}
58 explicit DeoptimizationLiteral(Handle<Object> object)
59 : kind_(DeoptimizationLiteralKind::kObject), object_(object) {
60 DCHECK(!object_.is_null());
61 }
62 explicit DeoptimizationLiteral(double number)
63 : kind_(DeoptimizationLiteralKind::kNumber), number_(number) {}
64 explicit DeoptimizationLiteral(const StringConstantBase* string)
65 : kind_(DeoptimizationLiteralKind::kString), string_(string) {}
66
67 Handle<Object> object() const { return object_; }
68 const StringConstantBase* string() const { return string_; }
69
70 bool operator==(const DeoptimizationLiteral& other) const {
71 return kind_ == other.kind_ && object_.equals(other.object_) &&
72 bit_cast<uint64_t>(number_) == bit_cast<uint64_t>(other.number_) &&
73 bit_cast<intptr_t>(string_) == bit_cast<intptr_t>(other.string_);
74 }
75
76 Handle<Object> Reify(Isolate* isolate) const;
77
78 DeoptimizationLiteralKind kind() const { return kind_; }
79
80 private:
81 DeoptimizationLiteralKind kind_;
82
83 Handle<Object> object_;
84 double number_ = 0;
85 const StringConstantBase* string_ = nullptr;
86};
87
88// Generates native code for a sequence of instructions.
89class V8_EXPORT_PRIVATE CodeGenerator final : public GapResolver::Assembler {
90 public:
91 explicit CodeGenerator(Zone* codegen_zone, Frame* frame, Linkage* linkage,
92 InstructionSequence* instructions,
93 OptimizedCompilationInfo* info, Isolate* isolate,
94 base::Optional<OsrHelper> osr_helper,
95 int start_source_position,
96 JumpOptimizationInfo* jump_opt,
97 PoisoningMitigationLevel poisoning_level,
98 const AssemblerOptions& options, int32_t builtin_index,
99 std::unique_ptr<AssemblerBuffer> = {});
100
101 // Generate native code. After calling AssembleCode, call FinalizeCode to
102 // produce the actual code object. If an error occurs during either phase,
103 // FinalizeCode returns an empty MaybeHandle.
104 void AssembleCode(); // Does not need to run on main thread.
105 MaybeHandle<Code> FinalizeCode();
106
107 OwnedVector<byte> GetSourcePositionTable();
108 OwnedVector<trap_handler::ProtectedInstructionData>
109 GetProtectedInstructions();
110
111 InstructionSequence* instructions() const { return instructions_; }
112 FrameAccessState* frame_access_state() const { return frame_access_state_; }
113 const Frame* frame() const { return frame_access_state_->frame(); }
114 Isolate* isolate() const { return isolate_; }
115 Linkage* linkage() const { return linkage_; }
116
117 Label* GetLabel(RpoNumber rpo) { return &labels_[rpo.ToSize()]; }
118
119 void AddProtectedInstructionLanding(uint32_t instr_offset,
120 uint32_t landing_offset);
121
122 bool wasm_runtime_exception_support() const;
123
124 SourcePosition start_source_position() const {
125 return start_source_position_;
126 }
127
128 void AssembleSourcePosition(Instruction* instr);
129 void AssembleSourcePosition(SourcePosition source_position);
130
131 // Record a safepoint with the given pointer map.
132 void RecordSafepoint(ReferenceMap* references, Safepoint::Kind kind,
133 Safepoint::DeoptMode deopt_mode);
134
135 Zone* zone() const { return zone_; }
136 TurboAssembler* tasm() { return &tasm_; }
137 SafepointTableBuilder* safepoint_table_builder() { return &safepoints_; }
138 size_t GetSafepointTableOffset() const { return safepoints_.GetCodeOffset(); }
139 size_t GetHandlerTableOffset() const { return handler_table_offset_; }
140
141 const ZoneVector<int>& block_starts() const { return block_starts_; }
142 const ZoneVector<int>& instr_starts() const { return instr_starts_; }
143
144 static constexpr int kBinarySearchSwitchMinimalCases = 4;
145
146 private:
147 GapResolver* resolver() { return &resolver_; }
148 SafepointTableBuilder* safepoints() { return &safepoints_; }
149 OptimizedCompilationInfo* info() const { return info_; }
150 OsrHelper* osr_helper() { return &(*osr_helper_); }
151
152 // Create the FrameAccessState object. The Frame is immutable from here on.
153 void CreateFrameAccessState(Frame* frame);
154
155 // Architecture - specific frame finalization.
156 void FinishFrame(Frame* frame);
157
158 // Checks if {block} will appear directly after {current_block_} when
159 // assembling code, in which case, a fall-through can be used.
160 bool IsNextInAssemblyOrder(RpoNumber block) const;
161
162 // Check if a heap object can be materialized by loading from a heap root,
163 // which is cheaper on some platforms than materializing the actual heap
164 // object constant.
165 bool IsMaterializableFromRoot(Handle<HeapObject> object,
166 RootIndex* index_return);
167
168 enum CodeGenResult { kSuccess, kTooManyDeoptimizationBailouts };
169
170 // Assemble instructions for the specified block.
171 CodeGenResult AssembleBlock(const InstructionBlock* block);
172
173 // Inserts mask update at the beginning of an instruction block if the
174 // predecessor blocks ends with a masking branch.
175 void TryInsertBranchPoisoning(const InstructionBlock* block);
176
177 // Initializes the masking register in the prologue of a function.
178 void InitializeSpeculationPoison();
179 // Reset the masking register during execution of a function.
180 void ResetSpeculationPoison();
181 // Generates a mask from the pc passed in {kJavaScriptCallCodeStartRegister}.
182 void GenerateSpeculationPoisonFromCodeStartRegister();
183
184 // Assemble code for the specified instruction.
185 CodeGenResult AssembleInstruction(Instruction* instr,
186 const InstructionBlock* block);
187 void AssembleGaps(Instruction* instr);
188
189 // Compute branch info from given instruction. Returns a valid rpo number
190 // if the branch is redundant, the returned rpo number point to the target
191 // basic block.
192 RpoNumber ComputeBranchInfo(BranchInfo* branch, Instruction* instr);
193
194 // Returns true if a instruction is a tail call that needs to adjust the stack
195 // pointer before execution. The stack slot index to the empty slot above the
196 // adjusted stack pointer is returned in |slot|.
197 bool GetSlotAboveSPBeforeTailCall(Instruction* instr, int* slot);
198
199 // Determines how to call helper stubs depending on the code kind.
200 StubCallMode DetermineStubCallMode() const;
201
202 CodeGenResult AssembleDeoptimizerCall(int deoptimization_id,
203 SourcePosition pos);
204
205 // ===========================================================================
206 // ============= Architecture-specific code generation methods. ==============
207 // ===========================================================================
208
209 CodeGenResult AssembleArchInstruction(Instruction* instr);
210 void AssembleArchJump(RpoNumber target);
211 void AssembleArchBranch(Instruction* instr, BranchInfo* branch);
212
213 // Generates special branch for deoptimization condition.
214 void AssembleArchDeoptBranch(Instruction* instr, BranchInfo* branch);
215
216 void AssembleArchBoolean(Instruction* instr, FlagsCondition condition);
217 void AssembleArchTrap(Instruction* instr, FlagsCondition condition);
218 void AssembleArchBinarySearchSwitchRange(Register input, RpoNumber def_block,
219 std::pair<int32_t, Label*>* begin,
220 std::pair<int32_t, Label*>* end);
221 void AssembleArchBinarySearchSwitch(Instruction* instr);
222 void AssembleArchLookupSwitch(Instruction* instr);
223 void AssembleArchTableSwitch(Instruction* instr);
224
225 // Generates code that checks whether the {kJavaScriptCallCodeStartRegister}
226 // contains the expected pointer to the start of the instruction stream.
227 void AssembleCodeStartRegisterCheck();
228
229 void AssembleBranchPoisoning(FlagsCondition condition, Instruction* instr);
230
231 // When entering a code that is marked for deoptimization, rather continuing
232 // with its execution, we jump to a lazy compiled code. We need to do this
233 // because this code has already been deoptimized and needs to be unlinked
234 // from the JS functions referring it.
235 void BailoutIfDeoptimized();
236
237 // Generates code to poison the stack pointer and implicit register arguments
238 // like the context register and the function register.
239 void AssembleRegisterArgumentPoisoning();
240
241 // Generates an architecture-specific, descriptor-specific prologue
242 // to set up a stack frame.
243 void AssembleConstructFrame();
244
245 // Generates an architecture-specific, descriptor-specific return sequence
246 // to tear down a stack frame.
247 void AssembleReturn(InstructionOperand* pop);
248
249 void AssembleDeconstructFrame();
250
251 // Generates code to manipulate the stack in preparation for a tail call.
252 void AssemblePrepareTailCall();
253
254 // Generates code to pop current frame if it is an arguments adaptor frame.
255 void AssemblePopArgumentsAdaptorFrame(Register args_reg, Register scratch1,
256 Register scratch2, Register scratch3);
257
258 enum PushTypeFlag {
259 kImmediatePush = 0x1,
260 kRegisterPush = 0x2,
261 kStackSlotPush = 0x4,
262 kScalarPush = kRegisterPush | kStackSlotPush
263 };
264
265 using PushTypeFlags = base::Flags<PushTypeFlag>;
266
267 static bool IsValidPush(InstructionOperand source, PushTypeFlags push_type);
268
269 // Generate a list moves from an instruction that are candidates to be turned
270 // into push instructions on platforms that support them. In general, the list
271 // of push candidates are moves to a set of contiguous destination
272 // InstructionOperand locations on the stack that don't clobber values that
273 // are needed for resolve the gap or use values generated by the gap,
274 // i.e. moves that can be hoisted together before the actual gap and assembled
275 // together.
276 static void GetPushCompatibleMoves(Instruction* instr,
277 PushTypeFlags push_type,
278 ZoneVector<MoveOperands*>* pushes);
279
280 class MoveType {
281 public:
282 enum Type {
283 kRegisterToRegister,
284 kRegisterToStack,
285 kStackToRegister,
286 kStackToStack,
287 kConstantToRegister,
288 kConstantToStack
289 };
290
291 // Detect what type of move or swap needs to be performed. Note that these
292 // functions do not take into account the representation (Tagged, FP,
293 // ...etc).
294
295 static Type InferMove(InstructionOperand* source,
296 InstructionOperand* destination);
297 static Type InferSwap(InstructionOperand* source,
298 InstructionOperand* destination);
299 };
300 // Called before a tail call |instr|'s gap moves are assembled and allows
301 // gap-specific pre-processing, e.g. adjustment of the sp for tail calls that
302 // need it before gap moves or conversion of certain gap moves into pushes.
303 void AssembleTailCallBeforeGap(Instruction* instr,
304 int first_unused_stack_slot);
305 // Called after a tail call |instr|'s gap moves are assembled and allows
306 // gap-specific post-processing, e.g. adjustment of the sp for tail calls that
307 // need it after gap moves.
308 void AssembleTailCallAfterGap(Instruction* instr,
309 int first_unused_stack_slot);
310
311 void FinishCode();
312 void MaybeEmitOutOfLineConstantPool();
313
314 // ===========================================================================
315 // ============== Architecture-specific gap resolver methods. ================
316 // ===========================================================================
317
318 // Interface used by the gap resolver to emit moves and swaps.
319 void AssembleMove(InstructionOperand* source,
320 InstructionOperand* destination) final;
321 void AssembleSwap(InstructionOperand* source,
322 InstructionOperand* destination) final;
323
324 // ===========================================================================
325 // =================== Jump table construction methods. ======================
326 // ===========================================================================
327
328 class JumpTable;
329 // Adds a jump table that is emitted after the actual code. Returns label
330 // pointing to the beginning of the table. {targets} is assumed to be static
331 // or zone allocated.
332 Label* AddJumpTable(Label** targets, size_t target_count);
333 // Emits a jump table.
334 void AssembleJumpTable(Label** targets, size_t target_count);
335
336 // ===========================================================================
337 // ================== Deoptimization table construction. =====================
338 // ===========================================================================
339
340 void RecordCallPosition(Instruction* instr);
341 Handle<DeoptimizationData> GenerateDeoptimizationData();
342 int DefineDeoptimizationLiteral(DeoptimizationLiteral literal);
343 DeoptimizationEntry const& GetDeoptimizationEntry(Instruction* instr,
344 size_t frame_state_offset);
345 DeoptimizeKind GetDeoptimizationKind(int deoptimization_id) const;
346 DeoptimizeReason GetDeoptimizationReason(int deoptimization_id) const;
347 int BuildTranslation(Instruction* instr, int pc_offset,
348 size_t frame_state_offset,
349 OutputFrameStateCombine state_combine);
350 void BuildTranslationForFrameStateDescriptor(
351 FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
352 Translation* translation, OutputFrameStateCombine state_combine);
353 void TranslateStateValueDescriptor(StateValueDescriptor* desc,
354 StateValueList* nested,
355 Translation* translation,
356 InstructionOperandIterator* iter);
357 void TranslateFrameStateDescriptorOperands(FrameStateDescriptor* desc,
358 InstructionOperandIterator* iter,
359 Translation* translation);
360 void AddTranslationForOperand(Translation* translation, Instruction* instr,
361 InstructionOperand* op, MachineType type);
362 void MarkLazyDeoptSite();
363
364 DeoptimizationExit* AddDeoptimizationExit(Instruction* instr,
365 size_t frame_state_offset);
366
367 // ===========================================================================
368
369 class DeoptimizationState final : public ZoneObject {
370 public:
371 DeoptimizationState(BailoutId bailout_id, int translation_id, int pc_offset,
372 DeoptimizeKind kind, DeoptimizeReason reason)
373 : bailout_id_(bailout_id),
374 translation_id_(translation_id),
375 pc_offset_(pc_offset),
376 kind_(kind),
377 reason_(reason) {}
378
379 BailoutId bailout_id() const { return bailout_id_; }
380 int translation_id() const { return translation_id_; }
381 int pc_offset() const { return pc_offset_; }
382 DeoptimizeKind kind() const { return kind_; }
383 DeoptimizeReason reason() const { return reason_; }
384
385 private:
386 BailoutId bailout_id_;
387 int translation_id_;
388 int pc_offset_;
389 DeoptimizeKind kind_;
390 DeoptimizeReason reason_;
391 };
392
393 struct HandlerInfo {
394 Label* handler;
395 int pc_offset;
396 };
397
398 friend class OutOfLineCode;
399 friend class CodeGeneratorTester;
400
401 Zone* zone_;
402 Isolate* isolate_;
403 FrameAccessState* frame_access_state_;
404 Linkage* const linkage_;
405 InstructionSequence* const instructions_;
406 UnwindingInfoWriter unwinding_info_writer_;
407 OptimizedCompilationInfo* const info_;
408 Label* const labels_;
409 Label return_label_;
410 RpoNumber current_block_;
411 SourcePosition start_source_position_;
412 SourcePosition current_source_position_;
413 TurboAssembler tasm_;
414 GapResolver resolver_;
415 SafepointTableBuilder safepoints_;
416 ZoneVector<HandlerInfo> handlers_;
417 ZoneDeque<DeoptimizationExit*> deoptimization_exits_;
418 ZoneDeque<DeoptimizationState*> deoptimization_states_;
419 ZoneDeque<DeoptimizationLiteral> deoptimization_literals_;
420 size_t inlined_function_count_ = 0;
421 TranslationBuffer translations_;
422 int handler_table_offset_ = 0;
423 int last_lazy_deopt_pc_ = 0;
424
425 // kArchCallCFunction could be reached either:
426 // kArchCallCFunction;
427 // or:
428 // kArchSaveCallerRegisters;
429 // kArchCallCFunction;
430 // kArchRestoreCallerRegisters;
431 // The boolean is used to distinguish the two cases. In the latter case, we
432 // also need to decide if FP registers need to be saved, which is controlled
433 // by fp_mode_.
434 bool caller_registers_saved_;
435 SaveFPRegsMode fp_mode_;
436
437 JumpTable* jump_tables_;
438 OutOfLineCode* ools_;
439 base::Optional<OsrHelper> osr_helper_;
440 int osr_pc_offset_;
441 int optimized_out_literal_id_;
442 SourcePositionTableBuilder source_position_table_builder_;
443 ZoneVector<trap_handler::ProtectedInstructionData> protected_instructions_;
444 CodeGenResult result_;
445 PoisoningMitigationLevel poisoning_level_;
446 ZoneVector<int> block_starts_;
447 ZoneVector<int> instr_starts_;
448};
449
450} // namespace compiler
451} // namespace internal
452} // namespace v8
453
454#endif // V8_COMPILER_BACKEND_CODE_GENERATOR_H_
455