1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_CODE_H_
6#define V8_OBJECTS_CODE_H_
7
8#include "src/contexts.h"
9#include "src/handler-table.h"
10#include "src/objects.h"
11#include "src/objects/fixed-array.h"
12#include "src/objects/heap-object.h"
13#include "src/objects/struct.h"
14
15// Has to be the last include (doesn't have include guards):
16#include "src/objects/object-macros.h"
17
18namespace v8 {
19namespace internal {
20
21class ByteArray;
22class BytecodeArray;
23class CodeDataContainer;
24class CodeDesc;
25class MaybeObject;
26
27namespace interpreter {
28class Register;
29}
30
31// Code describes objects with on-the-fly generated machine code.
32class Code : public HeapObject {
33 public:
34 NEVER_READ_ONLY_SPACE
35 // Opaque data type for encapsulating code flags like kind, inline
36 // cache state, and arguments count.
37 using Flags = uint32_t;
38
39#define CODE_KIND_LIST(V) \
40 V(OPTIMIZED_FUNCTION) \
41 V(BYTECODE_HANDLER) \
42 V(STUB) \
43 V(BUILTIN) \
44 V(REGEXP) \
45 V(WASM_FUNCTION) \
46 V(WASM_TO_JS_FUNCTION) \
47 V(JS_TO_WASM_FUNCTION) \
48 V(WASM_INTERPRETER_ENTRY) \
49 V(C_WASM_ENTRY)
50
51 enum Kind {
52#define DEFINE_CODE_KIND_ENUM(name) name,
53 CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
54#undef DEFINE_CODE_KIND_ENUM
55 NUMBER_OF_KINDS
56 };
57
58 static const char* Kind2String(Kind kind);
59
60#ifdef ENABLE_DISASSEMBLER
61 const char* GetName(Isolate* isolate) const;
62 V8_EXPORT_PRIVATE void Disassemble(const char* name, std::ostream& os,
63 Address current_pc = kNullAddress);
64#endif
65
66 // [instruction_size]: Size of the native instructions, including embedded
67 // data such as the safepoints table.
68 inline int raw_instruction_size() const;
69 inline void set_raw_instruction_size(int value);
70
71 // Returns the size of the native instructions, including embedded
72 // data such as the safepoints table. For off-heap code objects
73 // this may differ from instruction_size in that this will return the size of
74 // the off-heap instruction stream rather than the on-heap trampoline located
75 // at instruction_start.
76 inline int InstructionSize() const;
77 V8_EXPORT_PRIVATE int OffHeapInstructionSize() const;
78
79 // [relocation_info]: Code relocation information
80 DECL_ACCESSORS(relocation_info, ByteArray)
81
82 // This function should be called only from GC.
83 void ClearEmbeddedObjects(Heap* heap);
84
85 // [deoptimization_data]: Array containing data for deopt.
86 DECL_ACCESSORS(deoptimization_data, FixedArray)
87
88 // [source_position_table]: ByteArray for the source positions table or
89 // SourcePositionTableWithFrameCache.
90 DECL_ACCESSORS(source_position_table, Object)
91 inline ByteArray SourcePositionTable() const;
92 inline ByteArray SourcePositionTableIfCollected() const;
93
94 // [code_data_container]: A container indirection for all mutable fields.
95 DECL_ACCESSORS(code_data_container, CodeDataContainer)
96
97 // [next_code_link]: Link for lists of optimized or deoptimized code.
98 // Note that this field is stored in the {CodeDataContainer} to be mutable.
99 inline Object next_code_link() const;
100 inline void set_next_code_link(Object value);
101
102 // Unchecked accessors to be used during GC.
103 inline ByteArray unchecked_relocation_info() const;
104
105 inline int relocation_size() const;
106
107 // [kind]: Access to specific code kind.
108 inline Kind kind() const;
109
110 inline bool is_optimized_code() const;
111 inline bool is_wasm_code() const;
112
113 // Testers for interpreter builtins.
114 inline bool is_interpreter_trampoline_builtin() const;
115
116 // Tells whether the code checks the optimization marker in the function's
117 // feedback vector.
118 inline bool checks_optimization_marker() const;
119
120 // Tells whether the outgoing parameters of this code are tagged pointers.
121 inline bool has_tagged_params() const;
122
123 // [is_turbofanned]: For kind STUB or OPTIMIZED_FUNCTION, tells whether the
124 // code object was generated by the TurboFan optimizing compiler.
125 inline bool is_turbofanned() const;
126
127 // [can_have_weak_objects]: For kind OPTIMIZED_FUNCTION, tells whether the
128 // embedded objects in code should be treated weakly.
129 inline bool can_have_weak_objects() const;
130 inline void set_can_have_weak_objects(bool value);
131
132 // [builtin_index]: For builtins, tells which builtin index the code object
133 // has. The builtin index is a non-negative integer for builtins, and -1
134 // otherwise.
135 inline int builtin_index() const;
136 inline void set_builtin_index(int id);
137 inline bool is_builtin() const;
138
139 inline bool has_safepoint_info() const;
140
141 // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
142 // reserved in the code prologue.
143 inline int stack_slots() const;
144
145 // [safepoint_table_offset]: If {has_safepoint_info()}, the offset in the
146 // instruction stream where the safepoint table starts.
147 inline int safepoint_table_offset() const;
148 inline void set_safepoint_table_offset(int offset);
149 int safepoint_table_size() const;
150 bool has_safepoint_table() const;
151
152 // [handler_table_offset]: The offset in the instruction stream where the
153 // exception handler table starts.
154 inline int handler_table_offset() const;
155 inline void set_handler_table_offset(int offset);
156 int handler_table_size() const;
157 bool has_handler_table() const;
158
159 // [constant_pool offset]: Offset of the constant pool.
160 // Valid for FLAG_enable_embedded_constant_pool only
161 inline int constant_pool_offset() const;
162 inline void set_constant_pool_offset(int offset);
163 int constant_pool_size() const;
164 bool has_constant_pool() const;
165
166 // [code_comments_offset]: Offset of the code comment section.
167 inline int code_comments_offset() const;
168 inline void set_code_comments_offset(int offset);
169 inline Address code_comments() const;
170 V8_EXPORT_PRIVATE int code_comments_size() const;
171 V8_EXPORT_PRIVATE bool has_code_comments() const;
172
173 // The size of the executable instruction area, without embedded metadata.
174 int ExecutableInstructionSize() const;
175
176 // [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
177 // the code is going to be deoptimized.
178 inline bool marked_for_deoptimization() const;
179 inline void set_marked_for_deoptimization(bool flag);
180
181 // [embedded_objects_cleared]: For kind OPTIMIZED_FUNCTION tells whether
182 // the embedded objects in the code marked for deoptimization were cleared.
183 // Note that embedded_objects_cleared() implies marked_for_deoptimization().
184 inline bool embedded_objects_cleared() const;
185 inline void set_embedded_objects_cleared(bool flag);
186
187 // [deopt_already_counted]: For kind OPTIMIZED_FUNCTION tells whether
188 // the code was already deoptimized.
189 inline bool deopt_already_counted() const;
190 inline void set_deopt_already_counted(bool flag);
191
192 // [is_promise_rejection]: For kind BUILTIN tells whether the
193 // exception thrown by the code will lead to promise rejection or
194 // uncaught if both this and is_exception_caught is set.
195 // Use GetBuiltinCatchPrediction to access this.
196 inline void set_is_promise_rejection(bool flag);
197
198 // [is_exception_caught]: For kind BUILTIN tells whether the
199 // exception thrown by the code will be caught internally or
200 // uncaught if both this and is_promise_rejection is set.
201 // Use GetBuiltinCatchPrediction to access this.
202 inline void set_is_exception_caught(bool flag);
203
204 // [is_off_heap_trampoline]: For kind BUILTIN tells whether
205 // this is a trampoline to an off-heap builtin.
206 inline bool is_off_heap_trampoline() const;
207
208 // [constant_pool]: The constant pool for this function.
209 inline Address constant_pool() const;
210
211 // Get the safepoint entry for the given pc.
212 SafepointEntry GetSafepointEntry(Address pc);
213
214 // The entire code object including its header is copied verbatim to the
215 // snapshot so that it can be written in one, fast, memcpy during
216 // deserialization. The deserializer will overwrite some pointers, rather
217 // like a runtime linker, but the random allocation addresses used in the
218 // mksnapshot process would still be present in the unlinked snapshot data,
219 // which would make snapshot production non-reproducible. This method wipes
220 // out the to-be-overwritten header data for reproducible snapshots.
221 inline void WipeOutHeader();
222
223 // Clear uninitialized padding space. This ensures that the snapshot content
224 // is deterministic. Depending on the V8 build mode there could be no padding.
225 inline void clear_padding();
226 // Initialize the flags field. Similar to clear_padding above this ensure that
227 // the snapshot content is deterministic.
228 inline void initialize_flags(Kind kind, bool has_unwinding_info,
229 bool is_turbofanned, int stack_slots,
230 bool is_off_heap_trampoline);
231
232 // Convert a target address into a code object.
233 static inline Code GetCodeFromTargetAddress(Address address);
234
235 // Convert an entry address into an object.
236 static inline Code GetObjectFromEntryAddress(Address location_of_address);
237
238 // Returns the address of the first instruction.
239 inline Address raw_instruction_start() const;
240
241 // Returns the address of the first instruction. For off-heap code objects
242 // this differs from instruction_start (which would point to the off-heap
243 // trampoline instead).
244 inline Address InstructionStart() const;
245 V8_EXPORT_PRIVATE Address OffHeapInstructionStart() const;
246
247 // Returns the address right after the last instruction.
248 inline Address raw_instruction_end() const;
249
250 // Returns the address right after the last instruction. For off-heap code
251 // objects this differs from instruction_end (which would point to the
252 // off-heap trampoline instead).
253 inline Address InstructionEnd() const;
254 V8_EXPORT_PRIVATE Address OffHeapInstructionEnd() const;
255
256 // Returns the size of the instructions, padding, relocation and unwinding
257 // information.
258 inline int body_size() const;
259
260 // Returns the size of code and its metadata. This includes the size of code
261 // relocation information, deoptimization data and handler table.
262 inline int SizeIncludingMetadata() const;
263
264 // Returns the address of the first relocation info (read backwards!).
265 inline byte* relocation_start() const;
266
267 // Returns the address right after the relocation info (read backwards!).
268 inline byte* relocation_end() const;
269
270 // [has_unwinding_info]: Whether this code object has unwinding information.
271 // If it doesn't, unwinding_information_start() will point to invalid data.
272 //
273 // The body of all code objects has the following layout.
274 //
275 // +--------------------------+ <-- raw_instruction_start()
276 // | instructions |
277 // | ... |
278 // +--------------------------+
279 // | embedded metadata | <-- safepoint_table_offset()
280 // | ... | <-- handler_table_offset()
281 // | | <-- constant_pool_offset()
282 // | | <-- code_comments_offset()
283 // | |
284 // +--------------------------+ <-- raw_instruction_end()
285 //
286 // If has_unwinding_info() is false, raw_instruction_end() points to the first
287 // memory location after the end of the code object. Otherwise, the body
288 // continues as follows:
289 //
290 // +--------------------------+
291 // | padding to the next |
292 // | 8-byte aligned address |
293 // +--------------------------+ <-- raw_instruction_end()
294 // | [unwinding_info_size] |
295 // | as uint64_t |
296 // +--------------------------+ <-- unwinding_info_start()
297 // | unwinding info |
298 // | ... |
299 // +--------------------------+ <-- unwinding_info_end()
300 //
301 // and unwinding_info_end() points to the first memory location after the end
302 // of the code object.
303 //
304 inline bool has_unwinding_info() const;
305
306 // [unwinding_info_size]: Size of the unwinding information.
307 inline int unwinding_info_size() const;
308 inline void set_unwinding_info_size(int value);
309
310 // Returns the address of the unwinding information, if any.
311 inline Address unwinding_info_start() const;
312
313 // Returns the address right after the end of the unwinding information.
314 inline Address unwinding_info_end() const;
315
316 // Code entry point.
317 inline Address entry() const;
318
319 // Returns true if pc is inside this object's instructions.
320 inline bool contains(Address pc);
321
322 // Relocate the code by delta bytes. Called to signal that this code
323 // object has been moved by delta bytes.
324 void Relocate(intptr_t delta);
325
326 // Migrate code from desc without flushing the instruction cache.
327 void CopyFromNoFlush(Heap* heap, const CodeDesc& desc);
328
329 // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
330 // exactly the same size as the RelocInfo in |desc|.
331 static inline void CopyRelocInfoToByteArray(ByteArray dest,
332 const CodeDesc& desc);
333
334 // Flushes the instruction cache for the executable instructions of this code
335 // object. Make sure to call this while the code is still writable.
336 void FlushICache() const;
337
338 // Returns the object size for a given body (used for allocation).
339 static int SizeFor(int body_size) {
340 DCHECK_SIZE_TAG_ALIGNED(body_size);
341 return RoundUp(kHeaderSize + body_size, kCodeAlignment);
342 }
343
344 // Calculate the size of the code object to report for log events. This takes
345 // the layout of the code object into account.
346 inline int ExecutableSize() const;
347
348 DECL_CAST(Code)
349
350 // Dispatched behavior.
351 inline int CodeSize() const;
352
353 DECL_PRINTER(Code)
354 DECL_VERIFIER(Code)
355
356 void PrintDeoptLocation(FILE* out, const char* str, Address pc);
357 bool CanDeoptAt(Address pc);
358
359 void SetMarkedForDeoptimization(const char* reason);
360
361 inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
362
363 bool IsIsolateIndependent(Isolate* isolate);
364
365 inline bool CanContainWeakObjects();
366
367 inline bool IsWeakObject(HeapObject object);
368
369 static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
370
371 // Return true if the function is inlined in the code.
372 bool Inlines(SharedFunctionInfo sfi);
373
374 class OptimizedCodeIterator;
375
376 // Layout description.
377#define CODE_FIELDS(V) \
378 V(kRelocationInfoOffset, kTaggedSize) \
379 V(kDeoptimizationDataOffset, kTaggedSize) \
380 V(kSourcePositionTableOffset, kTaggedSize) \
381 V(kCodeDataContainerOffset, kTaggedSize) \
382 /* Data or code not directly visited by GC directly starts here. */ \
383 /* The serializer needs to copy bytes starting from here verbatim. */ \
384 /* Objects embedded into code is visited via reloc info. */ \
385 V(kDataStart, 0) \
386 V(kInstructionSizeOffset, kIntSize) \
387 V(kFlagsOffset, kIntSize) \
388 V(kSafepointTableOffsetOffset, kIntSize) \
389 V(kHandlerTableOffsetOffset, kIntSize) \
390 V(kConstantPoolOffsetOffset, \
391 FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
392 V(kCodeCommentsOffsetOffset, kIntSize) \
393 V(kBuiltinIndexOffset, kIntSize) \
394 V(kUnalignedHeaderSize, 0) \
395 /* Add padding to align the instruction start following right after */ \
396 /* the Code object header. */ \
397 V(kOptionalPaddingOffset, CODE_POINTER_PADDING(kOptionalPaddingOffset)) \
398 V(kHeaderSize, 0)
399
400 DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
401#undef CODE_FIELDS
402
403 // This documents the amount of free space we have in each Code object header
404 // due to padding for code alignment.
405#if V8_TARGET_ARCH_ARM64
406 static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 20 : 0;
407#elif V8_TARGET_ARCH_MIPS64
408 static constexpr int kHeaderPaddingSize = 0;
409#elif V8_TARGET_ARCH_X64
410 static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 20 : 0;
411#elif V8_TARGET_ARCH_ARM
412 static constexpr int kHeaderPaddingSize = 20;
413#elif V8_TARGET_ARCH_IA32
414 static constexpr int kHeaderPaddingSize = 20;
415#elif V8_TARGET_ARCH_MIPS
416 static constexpr int kHeaderPaddingSize = 20;
417#elif V8_TARGET_ARCH_PPC64
418 static constexpr int kHeaderPaddingSize =
419 FLAG_enable_embedded_constant_pool ? 28 : 0;
420#elif V8_TARGET_ARCH_S390X
421 static constexpr int kHeaderPaddingSize = 0;
422#else
423#error Unknown architecture.
424#endif
425 STATIC_ASSERT(FIELD_SIZE(kOptionalPaddingOffset) == kHeaderPaddingSize);
426
427 inline int GetUnwindingInfoSizeOffset() const;
428
429 class BodyDescriptor;
430
431 // Flags layout. BitField<type, shift, size>.
432#define CODE_FLAGS_BIT_FIELDS(V, _) \
433 V(HasUnwindingInfoField, bool, 1, _) \
434 V(KindField, Kind, 5, _) \
435 V(IsTurbofannedField, bool, 1, _) \
436 V(StackSlotsField, int, 24, _) \
437 V(IsOffHeapTrampoline, bool, 1, _)
438 DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
439#undef CODE_FLAGS_BIT_FIELDS
440 static_assert(NUMBER_OF_KINDS <= KindField::kMax, "Code::KindField size");
441 static_assert(IsOffHeapTrampoline::kNext <= 32,
442 "Code::flags field exhausted");
443
444 // KindSpecificFlags layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
445#define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
446 V(MarkedForDeoptimizationField, bool, 1, _) \
447 V(EmbeddedObjectsClearedField, bool, 1, _) \
448 V(DeoptAlreadyCountedField, bool, 1, _) \
449 V(CanHaveWeakObjectsField, bool, 1, _) \
450 V(IsPromiseRejectionField, bool, 1, _) \
451 V(IsExceptionCaughtField, bool, 1, _)
452 DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
453#undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
454 static_assert(IsExceptionCaughtField::kNext <= 32, "KindSpecificFlags full");
455
456 // The {marked_for_deoptimization} field is accessed from generated code.
457 static const int kMarkedForDeoptimizationBit =
458 MarkedForDeoptimizationField::kShift;
459
460 static const int kArgumentsBits = 16;
461 // Reserve one argument count value as the "don't adapt arguments" sentinel.
462 static const int kMaxArguments = (1 << kArgumentsBits) - 2;
463
464 private:
465 friend class RelocIterator;
466
467 bool is_promise_rejection() const;
468 bool is_exception_caught() const;
469
470 OBJECT_CONSTRUCTORS(Code, HeapObject);
471};
472
473class Code::OptimizedCodeIterator {
474 public:
475 explicit OptimizedCodeIterator(Isolate* isolate);
476 Code Next();
477
478 private:
479 Context next_context_;
480 Code current_code_;
481 Isolate* isolate_;
482
483 DISALLOW_HEAP_ALLOCATION(no_gc)
484 DISALLOW_COPY_AND_ASSIGN(OptimizedCodeIterator);
485};
486
487// CodeDataContainer is a container for all mutable fields associated with its
488// referencing {Code} object. Since {Code} objects reside on write-protected
489// pages within the heap, its header fields need to be immutable. There always
490// is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
491// field {Code::code_data_container} itself is immutable.
492class CodeDataContainer : public HeapObject {
493 public:
494 NEVER_READ_ONLY_SPACE
495 DECL_ACCESSORS(next_code_link, Object)
496 DECL_INT_ACCESSORS(kind_specific_flags)
497
498 // Clear uninitialized padding space. This ensures that the snapshot content
499 // is deterministic.
500 inline void clear_padding();
501
502 DECL_CAST(CodeDataContainer)
503
504 // Dispatched behavior.
505 DECL_PRINTER(CodeDataContainer)
506 DECL_VERIFIER(CodeDataContainer)
507
508// Layout description.
509#define CODE_DATA_FIELDS(V) \
510 /* Weak pointer fields. */ \
511 V(kPointerFieldsStrongEndOffset, 0) \
512 V(kNextCodeLinkOffset, kTaggedSize) \
513 V(kPointerFieldsWeakEndOffset, 0) \
514 /* Raw data fields. */ \
515 V(kKindSpecificFlagsOffset, kIntSize) \
516 V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
517 /* Total size. */ \
518 V(kSize, 0)
519
520 DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
521#undef CODE_DATA_FIELDS
522
523 class BodyDescriptor;
524
525 OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
526};
527
528class AbstractCode : public HeapObject {
529 public:
530 NEVER_READ_ONLY_SPACE
531 // All code kinds and INTERPRETED_FUNCTION.
532 enum Kind {
533#define DEFINE_CODE_KIND_ENUM(name) name,
534 CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
535#undef DEFINE_CODE_KIND_ENUM
536 INTERPRETED_FUNCTION,
537 NUMBER_OF_KINDS
538 };
539
540 static const char* Kind2String(Kind kind);
541
542 int SourcePosition(int offset);
543 int SourceStatementPosition(int offset);
544
545 // Returns the address of the first instruction.
546 inline Address raw_instruction_start();
547
548 // Returns the address of the first instruction. For off-heap code objects
549 // this differs from instruction_start (which would point to the off-heap
550 // trampoline instead).
551 inline Address InstructionStart();
552
553 // Returns the address right after the last instruction.
554 inline Address raw_instruction_end();
555
556 // Returns the address right after the last instruction. For off-heap code
557 // objects this differs from instruction_end (which would point to the
558 // off-heap trampoline instead).
559 inline Address InstructionEnd();
560
561 // Returns the size of the code instructions.
562 inline int raw_instruction_size();
563
564 // Returns the size of the native instructions, including embedded
565 // data such as the safepoints table. For off-heap code objects
566 // this may differ from instruction_size in that this will return the size of
567 // the off-heap instruction stream rather than the on-heap trampoline located
568 // at instruction_start.
569 inline int InstructionSize();
570
571 // Return the source position table.
572 inline ByteArray source_position_table();
573
574 inline Object stack_frame_cache();
575 static void SetStackFrameCache(Handle<AbstractCode> abstract_code,
576 Handle<SimpleNumberDictionary> cache);
577 void DropStackFrameCache();
578
579 // Returns the size of instructions and the metadata.
580 inline int SizeIncludingMetadata();
581
582 // Returns true if pc is inside this object's instructions.
583 inline bool contains(Address pc);
584
585 // Returns the AbstractCode::Kind of the code.
586 inline Kind kind();
587
588 // Calculate the size of the code object to report for log events. This takes
589 // the layout of the code object into account.
590 inline int ExecutableSize();
591
592 DECL_CAST(AbstractCode)
593 inline Code GetCode();
594 inline BytecodeArray GetBytecodeArray();
595
596 // Max loop nesting marker used to postpose OSR. We don't take loop
597 // nesting that is deeper than 5 levels into account.
598 static const int kMaxLoopNestingMarker = 6;
599
600 OBJECT_CONSTRUCTORS(AbstractCode, HeapObject);
601};
602
603// Dependent code is a singly linked list of weak fixed arrays. Each array
604// contains weak pointers to code objects for one dependent group. The suffix of
605// the array can be filled with the undefined value if the number of codes is
606// less than the length of the array.
607//
608// +------+-----------------+--------+--------+-----+--------+-----------+-----+
609// | next | count & group 1 | code 1 | code 2 | ... | code n | undefined | ... |
610// +------+-----------------+--------+--------+-----+--------+-----------+-----+
611// |
612// V
613// +------+-----------------+--------+--------+-----+--------+-----------+-----+
614// | next | count & group 2 | code 1 | code 2 | ... | code m | undefined | ... |
615// +------+-----------------+--------+--------+-----+--------+-----------+-----+
616// |
617// V
618// empty_weak_fixed_array()
619//
620// The list of weak fixed arrays is ordered by dependency groups.
621
622class DependentCode : public WeakFixedArray {
623 public:
624 DECL_CAST(DependentCode)
625
626 enum DependencyGroup {
627 // Group of code that embed a transition to this map, and depend on being
628 // deoptimized when the transition is replaced by a new version.
629 kTransitionGroup,
630 // Group of code that omit run-time prototype checks for prototypes
631 // described by this map. The group is deoptimized whenever an object
632 // described by this map changes shape (and transitions to a new map),
633 // possibly invalidating the assumptions embedded in the code.
634 kPrototypeCheckGroup,
635 // Group of code that depends on global property values in property cells
636 // not being changed.
637 kPropertyCellChangedGroup,
638 // Group of code that omit run-time checks for field(s) introduced by
639 // this map, i.e. for the field type.
640 kFieldOwnerGroup,
641 // Group of code that omit run-time type checks for initial maps of
642 // constructors.
643 kInitialMapChangedGroup,
644 // Group of code that depends on tenuring information in AllocationSites
645 // not being changed.
646 kAllocationSiteTenuringChangedGroup,
647 // Group of code that depends on element transition information in
648 // AllocationSites not being changed.
649 kAllocationSiteTransitionChangedGroup
650 };
651
652 // Register a code dependency of {cell} on {object}.
653 V8_EXPORT_PRIVATE static void InstallDependency(Isolate* isolate,
654 const MaybeObjectHandle& code,
655 Handle<HeapObject> object,
656 DependencyGroup group);
657
658 void DeoptimizeDependentCodeGroup(Isolate* isolate, DependencyGroup group);
659
660 bool MarkCodeForDeoptimization(Isolate* isolate, DependencyGroup group);
661
662 // The following low-level accessors are exposed only for tests.
663 inline DependencyGroup group();
664 inline MaybeObject object_at(int i);
665 inline int count();
666 inline DependentCode next_link();
667
668 private:
669 static const char* DependencyGroupName(DependencyGroup group);
670
671 // Get/Set {object}'s {DependentCode}.
672 static DependentCode GetDependentCode(Handle<HeapObject> object);
673 static void SetDependentCode(Handle<HeapObject> object,
674 Handle<DependentCode> dep);
675
676 static Handle<DependentCode> New(Isolate* isolate, DependencyGroup group,
677 const MaybeObjectHandle& object,
678 Handle<DependentCode> next);
679 static Handle<DependentCode> EnsureSpace(Isolate* isolate,
680 Handle<DependentCode> entries);
681 static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
682 Handle<DependentCode> entries,
683 DependencyGroup group,
684 const MaybeObjectHandle& code);
685
686 // Compact by removing cleared weak cells and return true if there was
687 // any cleared weak cell.
688 bool Compact();
689
690 static int Grow(int number_of_entries) {
691 if (number_of_entries < 5) return number_of_entries + 1;
692 return number_of_entries * 5 / 4;
693 }
694
695 static const int kGroupCount = kAllocationSiteTransitionChangedGroup + 1;
696 static const int kNextLinkIndex = 0;
697 static const int kFlagsIndex = 1;
698 static const int kCodesStartIndex = 2;
699
700 inline void set_next_link(DependentCode next);
701 inline void set_count(int value);
702 inline void set_object_at(int i, MaybeObject object);
703 inline void clear_at(int i);
704 inline void copy(int from, int to);
705
706 inline int flags();
707 inline void set_flags(int flags);
708 class GroupField : public BitField<int, 0, 3> {};
709 class CountField : public BitField<int, 3, 27> {};
710 STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
711
712 OBJECT_CONSTRUCTORS(DependentCode, WeakFixedArray);
713};
714
715// BytecodeArray represents a sequence of interpreter bytecodes.
716class BytecodeArray : public FixedArrayBase {
717 public:
718 enum Age {
719 kNoAgeBytecodeAge = 0,
720 kQuadragenarianBytecodeAge,
721 kQuinquagenarianBytecodeAge,
722 kSexagenarianBytecodeAge,
723 kSeptuagenarianBytecodeAge,
724 kOctogenarianBytecodeAge,
725 kAfterLastBytecodeAge,
726 kFirstBytecodeAge = kNoAgeBytecodeAge,
727 kLastBytecodeAge = kAfterLastBytecodeAge - 1,
728 kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
729 kIsOldBytecodeAge = kSexagenarianBytecodeAge
730 };
731
732 static constexpr int SizeFor(int length) {
733 return OBJECT_POINTER_ALIGN(kHeaderSize + length);
734 }
735
736 // Setter and getter
737 inline byte get(int index) const;
738 inline void set(int index, byte value);
739
740 // Returns data start address.
741 inline Address GetFirstBytecodeAddress();
742
743 // Accessors for frame size.
744 inline int frame_size() const;
745 inline void set_frame_size(int frame_size);
746
747 // Accessor for register count (derived from frame_size).
748 inline int register_count() const;
749
750 // Accessors for parameter count (including implicit 'this' receiver).
751 inline int parameter_count() const;
752 inline void set_parameter_count(int number_of_parameters);
753
754 // Register used to pass the incoming new.target or generator object from the
755 // fucntion call.
756 inline interpreter::Register incoming_new_target_or_generator_register()
757 const;
758 inline void set_incoming_new_target_or_generator_register(
759 interpreter::Register incoming_new_target_or_generator_register);
760
761 // Accessors for OSR loop nesting level.
762 inline int osr_loop_nesting_level() const;
763 inline void set_osr_loop_nesting_level(int depth);
764
765 // Accessors for bytecode's code age.
766 inline Age bytecode_age() const;
767 inline void set_bytecode_age(Age age);
768
769 // Accessors for the constant pool.
770 DECL_ACCESSORS(constant_pool, FixedArray)
771
772 // Accessors for handler table containing offsets of exception handlers.
773 DECL_ACCESSORS(handler_table, ByteArray)
774
775 // Accessors for source position table. Can contain:
776 // * undefined (initial value)
777 // * empty_byte_array (for bytecode generated for functions that will never
778 // have source positions, e.g. native functions).
779 // * ByteArray (when source positions have been collected for the bytecode)
780 // * SourcePositionTableWithFrameCache (as above but with a frame cache)
781 // * exception (when an error occurred while explicitly collecting source
782 // positions for pre-existing bytecode).
783 DECL_ACCESSORS(source_position_table, Object)
784
785 // This must only be called if source position collection has already been
786 // attempted. (If it failed because of an exception then it will return
787 // empty_byte_array).
788 inline ByteArray SourcePositionTable() const;
789 // If source positions have not been collected or an exception has been thrown
790 // this will return empty_byte_array.
791 inline ByteArray SourcePositionTableIfCollected() const;
792 inline bool HasSourcePositionTable() const;
793 inline bool DidSourcePositionGenerationFail() const;
794 inline void ClearFrameCacheFromSourcePositionTable();
795
796 // Indicates that an attempt was made to collect source positions, but that it
797 // failed most likely due to stack exhaustion. When in this state
798 // |SourcePositionTable| will return an empty byte array rather than crashing
799 // as it would if no attempt was ever made to collect source positions.
800 inline void SetSourcePositionsFailedToCollect();
801
802 DECL_CAST(BytecodeArray)
803
804 // Dispatched behavior.
805 inline int BytecodeArraySize();
806
807 inline int raw_instruction_size();
808
809 // Returns the size of bytecode and its metadata. This includes the size of
810 // bytecode, constant pool, source position table, and handler table.
811 inline int SizeIncludingMetadata();
812
813 DECL_PRINTER(BytecodeArray)
814 DECL_VERIFIER(BytecodeArray)
815
816 V8_EXPORT_PRIVATE void Disassemble(std::ostream& os);
817
818 void CopyBytecodesTo(BytecodeArray to);
819
820 // Bytecode aging
821 V8_EXPORT_PRIVATE bool IsOld() const;
822 V8_EXPORT_PRIVATE void MakeOlder();
823
824 // Clear uninitialized padding space. This ensures that the snapshot content
825 // is deterministic.
826 inline void clear_padding();
827
828 // Compares only the bytecode array but not any of the header fields.
829 bool IsBytecodeEqual(const BytecodeArray other) const;
830
831// Layout description.
832#define BYTECODE_ARRAY_FIELDS(V) \
833 /* Pointer fields. */ \
834 V(kConstantPoolOffset, kTaggedSize) \
835 V(kHandlerTableOffset, kTaggedSize) \
836 V(kSourcePositionTableOffset, kTaggedSize) \
837 V(kFrameSizeOffset, kIntSize) \
838 V(kParameterSizeOffset, kIntSize) \
839 V(kIncomingNewTargetOrGeneratorRegisterOffset, kIntSize) \
840 V(kOSRNestingLevelOffset, kCharSize) \
841 V(kBytecodeAgeOffset, kCharSize) \
842 /* Total size. */ \
843 V(kHeaderSize, 0)
844
845 DEFINE_FIELD_OFFSET_CONSTANTS(FixedArrayBase::kHeaderSize,
846 BYTECODE_ARRAY_FIELDS)
847#undef BYTECODE_ARRAY_FIELDS
848
849 // InterpreterEntryTrampoline expects these fields to be next to each other
850 // and writes a 16-bit value to reset them.
851 STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
852 kOSRNestingLevelOffset + kCharSize);
853
854 // Maximal memory consumption for a single BytecodeArray.
855 static const int kMaxSize = 512 * MB;
856 // Maximal length of a single BytecodeArray.
857 static const int kMaxLength = kMaxSize - kHeaderSize;
858
859 class BodyDescriptor;
860
861 OBJECT_CONSTRUCTORS(BytecodeArray, FixedArrayBase);
862};
863
864// DeoptimizationData is a fixed array used to hold the deoptimization data for
865// optimized code. It also contains information about functions that were
866// inlined. If N different functions were inlined then the first N elements of
867// the literal array will contain these functions.
868//
869// It can be empty.
870class DeoptimizationData : public FixedArray {
871 public:
872 // Layout description. Indices in the array.
873 static const int kTranslationByteArrayIndex = 0;
874 static const int kInlinedFunctionCountIndex = 1;
875 static const int kLiteralArrayIndex = 2;
876 static const int kOsrBytecodeOffsetIndex = 3;
877 static const int kOsrPcOffsetIndex = 4;
878 static const int kOptimizationIdIndex = 5;
879 static const int kSharedFunctionInfoIndex = 6;
880 static const int kInliningPositionsIndex = 7;
881 static const int kFirstDeoptEntryIndex = 8;
882
883 // Offsets of deopt entry elements relative to the start of the entry.
884 static const int kBytecodeOffsetRawOffset = 0;
885 static const int kTranslationIndexOffset = 1;
886 static const int kPcOffset = 2;
887 static const int kDeoptEntrySize = 3;
888
889// Simple element accessors.
890#define DECL_ELEMENT_ACCESSORS(name, type) \
891 inline type name() const; \
892 inline void Set##name(type value);
893
894 DECL_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
895 DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
896 DECL_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
897 DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
898 DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
899 DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
900 DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
901 DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
902
903#undef DECL_ELEMENT_ACCESSORS
904
905// Accessors for elements of the ith deoptimization entry.
906#define DECL_ENTRY_ACCESSORS(name, type) \
907 inline type name(int i) const; \
908 inline void Set##name(int i, type value);
909
910 DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
911 DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
912 DECL_ENTRY_ACCESSORS(Pc, Smi)
913
914#undef DECL_ENTRY_ACCESSORS
915
916 inline BailoutId BytecodeOffset(int i);
917
918 inline void SetBytecodeOffset(int i, BailoutId value);
919
920 inline int DeoptCount();
921
922 static const int kNotInlinedIndex = -1;
923
924 // Returns the inlined function at the given position in LiteralArray, or the
925 // outer function if index == kNotInlinedIndex.
926 class SharedFunctionInfo GetInlinedFunction(int index);
927
928 // Allocates a DeoptimizationData.
929 static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
930 AllocationType allocation);
931
932 // Return an empty DeoptimizationData.
933 V8_EXPORT_PRIVATE static Handle<DeoptimizationData> Empty(Isolate* isolate);
934
935 DECL_CAST(DeoptimizationData)
936
937#ifdef ENABLE_DISASSEMBLER
938 void DeoptimizationDataPrint(std::ostream& os); // NOLINT
939#endif
940
941 private:
942 static int IndexForEntry(int i) {
943 return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
944 }
945
946 static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
947
948 OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray);
949};
950
951class SourcePositionTableWithFrameCache : public Tuple2 {
952 public:
953 DECL_ACCESSORS(source_position_table, ByteArray)
954 DECL_ACCESSORS(stack_frame_cache, SimpleNumberDictionary)
955
956 DECL_CAST(SourcePositionTableWithFrameCache)
957
958// Layout description.
959#define SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS(V) \
960 V(kSourcePositionTableIndex, kTaggedSize) \
961 V(kStackFrameCacheIndex, kTaggedSize) \
962 /* Total size. */ \
963 V(kSize, 0)
964
965 DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
966 SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS)
967#undef SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS
968
969 OBJECT_CONSTRUCTORS(SourcePositionTableWithFrameCache, Tuple2);
970};
971
972} // namespace internal
973} // namespace v8
974
975#include "src/objects/object-macros-undef.h"
976
977#endif // V8_OBJECTS_CODE_H_
978