1 | // Copyright 2018 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #ifndef V8_OBJECTS_HEAP_OBJECT_H_ |
6 | #define V8_OBJECTS_HEAP_OBJECT_H_ |
7 | |
8 | #include "src/globals.h" |
9 | #include "src/roots.h" |
10 | |
11 | #include "src/objects.h" |
12 | |
13 | // Has to be the last include (doesn't have include guards): |
14 | #include "src/objects/object-macros.h" |
15 | |
16 | namespace v8 { |
17 | namespace internal { |
18 | |
19 | class Heap; |
20 | |
21 | // HeapObject is the superclass for all classes describing heap allocated |
22 | // objects. |
23 | class HeapObject : public Object { |
24 | public: |
25 | bool is_null() const { return ptr() == kNullAddress; } |
26 | |
27 | // [map]: Contains a map which contains the object's reflective |
28 | // information. |
29 | inline Map map() const; |
30 | inline void set_map(Map value); |
31 | |
32 | inline MapWordSlot map_slot() const; |
33 | |
34 | // The no-write-barrier version. This is OK if the object is white and in |
35 | // new space, or if the value is an immortal immutable object, like the maps |
36 | // of primitive (non-JS) objects like strings, heap numbers etc. |
37 | inline void set_map_no_write_barrier(Map value); |
38 | |
39 | // Get the map using acquire load. |
40 | inline Map synchronized_map() const; |
41 | inline MapWord synchronized_map_word() const; |
42 | |
43 | // Set the map using release store |
44 | inline void synchronized_set_map(Map value); |
45 | inline void synchronized_set_map_word(MapWord map_word); |
46 | |
47 | // Initialize the map immediately after the object is allocated. |
48 | // Do not use this outside Heap. |
49 | inline void set_map_after_allocation( |
50 | Map value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER); |
51 | |
52 | // During garbage collection, the map word of a heap object does not |
53 | // necessarily contain a map pointer. |
54 | inline MapWord map_word() const; |
55 | inline void set_map_word(MapWord map_word); |
56 | |
57 | // TODO(v8:7464): Once RO_SPACE is shared between isolates, this method can be |
58 | // removed as ReadOnlyRoots will be accessible from a global variable. For now |
59 | // this method exists to help remove GetIsolate/GetHeap from HeapObject, in a |
60 | // way that doesn't require passing Isolate/Heap down huge call chains or to |
61 | // places where it might not be safe to access it. |
62 | inline ReadOnlyRoots GetReadOnlyRoots() const; |
63 | |
64 | #define IS_TYPE_FUNCTION_DECL(Type) V8_INLINE bool Is##Type() const; |
65 | HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL) |
66 | #undef IS_TYPE_FUNCTION_DECL |
67 | |
68 | bool IsExternal(Isolate* isolate) const; |
69 | |
70 | // Oddball checks are faster when they are raw pointer comparisons, so the |
71 | // isolate/read-only roots overloads should be preferred where possible. |
72 | #define IS_TYPE_FUNCTION_DECL(Type, Value) \ |
73 | V8_INLINE bool Is##Type(Isolate* isolate) const; \ |
74 | V8_INLINE bool Is##Type(ReadOnlyRoots roots) const; \ |
75 | V8_INLINE bool Is##Type() const; |
76 | ODDBALL_LIST(IS_TYPE_FUNCTION_DECL) |
77 | #undef IS_TYPE_FUNCTION_DECL |
78 | |
79 | V8_INLINE bool IsNullOrUndefined(Isolate* isolate) const; |
80 | V8_INLINE bool IsNullOrUndefined(ReadOnlyRoots roots) const; |
81 | V8_INLINE bool IsNullOrUndefined() const; |
82 | |
83 | #define DECL_STRUCT_PREDICATE(NAME, Name, name) V8_INLINE bool Is##Name() const; |
84 | STRUCT_LIST(DECL_STRUCT_PREDICATE) |
85 | #undef DECL_STRUCT_PREDICATE |
86 | |
87 | // Converts an address to a HeapObject pointer. |
88 | static inline HeapObject FromAddress(Address address); |
89 | |
90 | // Returns the address of this HeapObject. |
91 | inline Address address() const { return ptr() - kHeapObjectTag; } |
92 | |
93 | // Iterates over pointers contained in the object (including the Map). |
94 | // If it's not performance critical iteration use the non-templatized |
95 | // version. |
96 | void Iterate(ObjectVisitor* v); |
97 | |
98 | template <typename ObjectVisitor> |
99 | inline void IterateFast(ObjectVisitor* v); |
100 | |
101 | // Iterates over all pointers contained in the object except the |
102 | // first map pointer. The object type is given in the first |
103 | // parameter. This function does not access the map pointer in the |
104 | // object, and so is safe to call while the map pointer is modified. |
105 | // If it's not performance critical iteration use the non-templatized |
106 | // version. |
107 | void IterateBody(ObjectVisitor* v); |
108 | void IterateBody(Map map, int object_size, ObjectVisitor* v); |
109 | |
110 | template <typename ObjectVisitor> |
111 | inline void IterateBodyFast(ObjectVisitor* v); |
112 | |
113 | template <typename ObjectVisitor> |
114 | inline void IterateBodyFast(Map map, int object_size, ObjectVisitor* v); |
115 | |
116 | // Returns true if the object contains a tagged value at given offset. |
117 | // It is used for invalid slots filtering. If the offset points outside |
118 | // of the object or to the map word, the result is UNDEFINED (!!!). |
119 | V8_EXPORT_PRIVATE bool IsValidSlot(Map map, int offset); |
120 | |
121 | // Returns the heap object's size in bytes |
122 | inline int Size() const; |
123 | |
124 | // Given a heap object's map pointer, returns the heap size in bytes |
125 | // Useful when the map pointer field is used for other purposes. |
126 | // GC internal. |
127 | V8_EXPORT_PRIVATE int SizeFromMap(Map map) const; |
128 | |
129 | // Returns the field at offset in obj, as a read/write Object reference. |
130 | // Does no checking, and is safe to use during GC, while maps are invalid. |
131 | // Does not invoke write barrier, so should only be assigned to |
132 | // during marking GC. |
133 | inline ObjectSlot RawField(int byte_offset) const; |
134 | inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const; |
135 | |
136 | DECL_CAST(HeapObject) |
137 | |
138 | // Return the write barrier mode for this. Callers of this function |
139 | // must be able to present a reference to an DisallowHeapAllocation |
140 | // object as a sign that they are not going to use this function |
141 | // from code that allocates and thus invalidates the returned write |
142 | // barrier mode. |
143 | inline WriteBarrierMode GetWriteBarrierMode( |
144 | const DisallowHeapAllocation& promise); |
145 | |
146 | // Dispatched behavior. |
147 | void HeapObjectShortPrint(std::ostream& os); // NOLINT |
148 | #ifdef OBJECT_PRINT |
149 | void (std::ostream& os, const char* id); // NOLINT |
150 | #endif |
151 | DECL_PRINTER(HeapObject) |
152 | EXPORT_DECL_VERIFIER(HeapObject) |
153 | #ifdef VERIFY_HEAP |
154 | inline void VerifyObjectField(Isolate* isolate, int offset); |
155 | inline void VerifySmiField(int offset); |
156 | inline void VerifyMaybeObjectField(Isolate* isolate, int offset); |
157 | |
158 | // Verify a pointer is a valid HeapObject pointer that points to object |
159 | // areas in the heap. |
160 | static void VerifyHeapPointer(Isolate* isolate, Object p); |
161 | #endif |
162 | |
163 | static inline AllocationAlignment RequiredAlignment(Map map); |
164 | |
165 | // Whether the object needs rehashing. That is the case if the object's |
166 | // content depends on FLAG_hash_seed. When the object is deserialized into |
167 | // a heap with a different hash seed, these objects need to adapt. |
168 | bool NeedsRehashing() const; |
169 | |
170 | // Rehashing support is not implemented for all objects that need rehashing. |
171 | // With objects that need rehashing but cannot be rehashed, rehashing has to |
172 | // be disabled. |
173 | bool CanBeRehashed() const; |
174 | |
175 | // Rehash the object based on the layout inferred from its map. |
176 | void RehashBasedOnMap(ReadOnlyRoots root); |
177 | |
178 | // Layout description. |
179 | #define HEAP_OBJECT_FIELDS(V) \ |
180 | V(kMapOffset, kTaggedSize) \ |
181 | /* Header size. */ \ |
182 | V(, 0) |
183 | |
184 | DEFINE_FIELD_OFFSET_CONSTANTS(Object::kHeaderSize, HEAP_OBJECT_FIELDS) |
185 | #undef HEAP_OBJECT_FIELDS |
186 | |
187 | STATIC_ASSERT(kMapOffset == Internals::kHeapObjectMapOffset); |
188 | |
189 | inline Address GetFieldAddress(int field_offset) const; |
190 | |
191 | protected: |
192 | // Special-purpose constructor for subclasses that have fast paths where |
193 | // their ptr() is a Smi. |
194 | enum class AllowInlineSmiStorage { kRequireHeapObjectTag, kAllowBeingASmi }; |
195 | inline HeapObject(Address ptr, AllowInlineSmiStorage allow_smi); |
196 | |
197 | OBJECT_CONSTRUCTORS(HeapObject, Object); |
198 | }; |
199 | |
200 | // Helper class for objects that can never be in RO space. |
201 | class NeverReadOnlySpaceObject { |
202 | public: |
203 | // The Heap the object was allocated in. Used also to access Isolate. |
204 | static inline Heap* GetHeap(const HeapObject object); |
205 | |
206 | // Convenience method to get current isolate. |
207 | static inline Isolate* GetIsolate(const HeapObject object); |
208 | }; |
209 | |
210 | } // namespace internal |
211 | } // namespace v8 |
212 | |
213 | #include "src/objects/object-macros-undef.h" |
214 | |
215 | #endif // V8_OBJECTS_HEAP_OBJECT_H_ |
216 | |