1 | // Copyright 2018 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #ifndef INCLUDE_V8_INTERNAL_H_ |
6 | #define INCLUDE_V8_INTERNAL_H_ |
7 | |
8 | #include <stddef.h> |
9 | #include <stdint.h> |
10 | #include <string.h> |
11 | #include <type_traits> |
12 | |
13 | #include "v8-version.h" // NOLINT(build/include) |
14 | #include "v8config.h" // NOLINT(build/include) |
15 | |
16 | namespace v8 { |
17 | |
18 | class Context; |
19 | class Data; |
20 | class Isolate; |
21 | |
22 | namespace internal { |
23 | |
24 | class Isolate; |
25 | |
26 | typedef uintptr_t Address; |
27 | static const Address kNullAddress = 0; |
28 | |
29 | /** |
30 | * Configuration of tagging scheme. |
31 | */ |
32 | const int kApiSystemPointerSize = sizeof(void*); |
33 | const int kApiDoubleSize = sizeof(double); |
34 | const int kApiInt32Size = sizeof(int32_t); |
35 | const int kApiInt64Size = sizeof(int64_t); |
36 | |
37 | // Tag information for HeapObject. |
38 | const int kHeapObjectTag = 1; |
39 | const int kWeakHeapObjectTag = 3; |
40 | const int kHeapObjectTagSize = 2; |
41 | const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1; |
42 | |
43 | // Tag information for Smi. |
44 | const int kSmiTag = 0; |
45 | const int kSmiTagSize = 1; |
46 | const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1; |
47 | |
48 | template <size_t tagged_ptr_size> |
49 | struct SmiTagging; |
50 | |
51 | // Smi constants for systems where tagged pointer is a 32-bit value. |
52 | template <> |
53 | struct SmiTagging<4> { |
54 | enum { kSmiShiftSize = 0, kSmiValueSize = 31 }; |
55 | V8_INLINE static int SmiToInt(const internal::Address value) { |
56 | int shift_bits = kSmiTagSize + kSmiShiftSize; |
57 | // Shift down (requires >> to be sign extending). |
58 | return static_cast<int>(static_cast<intptr_t>(value)) >> shift_bits; |
59 | } |
60 | V8_INLINE static constexpr bool IsValidSmi(intptr_t value) { |
61 | // To be representable as an tagged small integer, the two |
62 | // most-significant bits of 'value' must be either 00 or 11 due to |
63 | // sign-extension. To check this we add 01 to the two |
64 | // most-significant bits, and check if the most-significant bit is 0. |
65 | // |
66 | // CAUTION: The original code below: |
67 | // bool result = ((value + 0x40000000) & 0x80000000) == 0; |
68 | // may lead to incorrect results according to the C language spec, and |
69 | // in fact doesn't work correctly with gcc4.1.1 in some cases: The |
70 | // compiler may produce undefined results in case of signed integer |
71 | // overflow. The computation must be done w/ unsigned ints. |
72 | return static_cast<uintptr_t>(value) + 0x40000000U < 0x80000000U; |
73 | } |
74 | }; |
75 | |
76 | // Smi constants for systems where tagged pointer is a 64-bit value. |
77 | template <> |
78 | struct SmiTagging<8> { |
79 | enum { kSmiShiftSize = 31, kSmiValueSize = 32 }; |
80 | V8_INLINE static int SmiToInt(const internal::Address value) { |
81 | int shift_bits = kSmiTagSize + kSmiShiftSize; |
82 | // Shift down and throw away top 32 bits. |
83 | return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits); |
84 | } |
85 | V8_INLINE static constexpr bool IsValidSmi(intptr_t value) { |
86 | // To be representable as a long smi, the value must be a 32-bit integer. |
87 | return (value == static_cast<int32_t>(value)); |
88 | } |
89 | }; |
90 | |
91 | #ifdef V8_COMPRESS_POINTERS |
92 | static_assert( |
93 | kApiSystemPointerSize == kApiInt64Size, |
94 | "Pointer compression can be enabled only for 64-bit architectures" ); |
95 | const int kApiTaggedSize = kApiInt32Size; |
96 | #else |
97 | const int kApiTaggedSize = kApiSystemPointerSize; |
98 | #endif |
99 | |
100 | #ifdef V8_31BIT_SMIS_ON_64BIT_ARCH |
101 | typedef SmiTagging<kApiInt32Size> PlatformSmiTagging; |
102 | #else |
103 | typedef SmiTagging<kApiTaggedSize> PlatformSmiTagging; |
104 | #endif |
105 | |
106 | const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize; |
107 | const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize; |
108 | const int kSmiMinValue = (static_cast<unsigned int>(-1)) << (kSmiValueSize - 1); |
109 | const int kSmiMaxValue = -(kSmiMinValue + 1); |
110 | constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; } |
111 | constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; } |
112 | |
113 | V8_INLINE static constexpr internal::Address IntToSmi(int value) { |
114 | return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) | |
115 | kSmiTag; |
116 | } |
117 | |
118 | /** |
119 | * This class exports constants and functionality from within v8 that |
120 | * is necessary to implement inline functions in the v8 api. Don't |
121 | * depend on functions and constants defined here. |
122 | */ |
123 | class Internals { |
124 | public: |
125 | // These values match non-compiler-dependent values defined within |
126 | // the implementation of v8. |
127 | static const int kHeapObjectMapOffset = 0; |
128 | static const int kMapInstanceTypeOffset = 1 * kApiTaggedSize + kApiInt32Size; |
129 | static const int kStringResourceOffset = |
130 | 1 * kApiTaggedSize + 2 * kApiInt32Size; |
131 | |
132 | static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize; |
133 | static const int kForeignAddressOffset = kApiTaggedSize; |
134 | static const int = 3 * kApiTaggedSize; |
135 | static const int = 2 * kApiTaggedSize; |
136 | static const int = 2 * kApiTaggedSize; |
137 | static const int kEmbedderDataSlotSize = kApiSystemPointerSize; |
138 | static const int kNativeContextEmbedderDataOffset = 7 * kApiTaggedSize; |
139 | static const int kFullStringRepresentationMask = 0x0f; |
140 | static const int kStringEncodingMask = 0x8; |
141 | static const int kExternalTwoByteRepresentationTag = 0x02; |
142 | static const int kExternalOneByteRepresentationTag = 0x0a; |
143 | |
144 | static const uint32_t kNumIsolateDataSlots = 4; |
145 | |
146 | static const int kIsolateEmbedderDataOffset = 0; |
147 | static const int kExternalMemoryOffset = |
148 | kNumIsolateDataSlots * kApiSystemPointerSize; |
149 | static const int kExternalMemoryLimitOffset = |
150 | kExternalMemoryOffset + kApiInt64Size; |
151 | static const int kExternalMemoryAtLastMarkCompactOffset = |
152 | kExternalMemoryLimitOffset + kApiInt64Size; |
153 | static const int kIsolateRootsOffset = |
154 | kExternalMemoryAtLastMarkCompactOffset + kApiInt64Size; |
155 | |
156 | static const int kUndefinedValueRootIndex = 4; |
157 | static const int kTheHoleValueRootIndex = 5; |
158 | static const int kNullValueRootIndex = 6; |
159 | static const int kTrueValueRootIndex = 7; |
160 | static const int kFalseValueRootIndex = 8; |
161 | static const int kEmptyStringRootIndex = 9; |
162 | |
163 | static const int kNodeClassIdOffset = 1 * kApiSystemPointerSize; |
164 | static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3; |
165 | static const int kNodeStateMask = 0x7; |
166 | static const int kNodeStateIsWeakValue = 2; |
167 | static const int kNodeStateIsPendingValue = 3; |
168 | static const int kNodeIsIndependentShift = 3; |
169 | static const int kNodeIsActiveShift = 4; |
170 | |
171 | static const int kFirstNonstringType = 0x80; |
172 | static const int kOddballType = 0x83; |
173 | static const int kForeignType = 0x87; |
174 | static const int kJSSpecialApiObjectType = 0x410; |
175 | static const int kJSApiObjectType = 0x420; |
176 | static const int kJSObjectType = 0x421; |
177 | |
178 | static const int kUndefinedOddballKind = 5; |
179 | static const int kNullOddballKind = 3; |
180 | |
181 | // Constants used by PropertyCallbackInfo to check if we should throw when an |
182 | // error occurs. |
183 | static const int kThrowOnError = 0; |
184 | static const int kDontThrow = 1; |
185 | static const int kInferShouldThrowMode = 2; |
186 | |
187 | // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an |
188 | // incremental GC once the external memory reaches this limit. |
189 | static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024; |
190 | |
191 | V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate); |
192 | V8_INLINE static void CheckInitialized(v8::Isolate* isolate) { |
193 | #ifdef V8_ENABLE_CHECKS |
194 | CheckInitializedImpl(isolate); |
195 | #endif |
196 | } |
197 | |
198 | V8_INLINE static bool HasHeapObjectTag(const internal::Address value) { |
199 | return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag); |
200 | } |
201 | |
202 | V8_INLINE static int SmiValue(const internal::Address value) { |
203 | return PlatformSmiTagging::SmiToInt(value); |
204 | } |
205 | |
206 | V8_INLINE static constexpr internal::Address IntToSmi(int value) { |
207 | return internal::IntToSmi(value); |
208 | } |
209 | |
210 | V8_INLINE static constexpr bool IsValidSmi(intptr_t value) { |
211 | return PlatformSmiTagging::IsValidSmi(value); |
212 | } |
213 | |
214 | V8_INLINE static int GetInstanceType(const internal::Address obj) { |
215 | typedef internal::Address A; |
216 | A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset); |
217 | return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset); |
218 | } |
219 | |
220 | V8_INLINE static int GetOddballKind(const internal::Address obj) { |
221 | return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset)); |
222 | } |
223 | |
224 | V8_INLINE static bool IsExternalTwoByteString(int instance_type) { |
225 | int representation = (instance_type & kFullStringRepresentationMask); |
226 | return representation == kExternalTwoByteRepresentationTag; |
227 | } |
228 | |
229 | V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) { |
230 | uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset; |
231 | return *addr & static_cast<uint8_t>(1U << shift); |
232 | } |
233 | |
234 | V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value, |
235 | int shift) { |
236 | uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset; |
237 | uint8_t mask = static_cast<uint8_t>(1U << shift); |
238 | *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift)); |
239 | } |
240 | |
241 | V8_INLINE static uint8_t GetNodeState(internal::Address* obj) { |
242 | uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset; |
243 | return *addr & kNodeStateMask; |
244 | } |
245 | |
246 | V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) { |
247 | uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset; |
248 | *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value); |
249 | } |
250 | |
251 | V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot, |
252 | void* data) { |
253 | internal::Address addr = reinterpret_cast<internal::Address>(isolate) + |
254 | kIsolateEmbedderDataOffset + |
255 | slot * kApiSystemPointerSize; |
256 | *reinterpret_cast<void**>(addr) = data; |
257 | } |
258 | |
259 | V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate, |
260 | uint32_t slot) { |
261 | internal::Address addr = reinterpret_cast<internal::Address>(isolate) + |
262 | kIsolateEmbedderDataOffset + |
263 | slot * kApiSystemPointerSize; |
264 | return *reinterpret_cast<void* const*>(addr); |
265 | } |
266 | |
267 | V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) { |
268 | internal::Address addr = reinterpret_cast<internal::Address>(isolate) + |
269 | kIsolateRootsOffset + |
270 | index * kApiSystemPointerSize; |
271 | return reinterpret_cast<internal::Address*>(addr); |
272 | } |
273 | |
274 | template <typename T> |
275 | V8_INLINE static T ReadRawField(internal::Address heap_object_ptr, |
276 | int offset) { |
277 | internal::Address addr = heap_object_ptr + offset - kHeapObjectTag; |
278 | #ifdef V8_COMPRESS_POINTERS |
279 | if (sizeof(T) > kApiTaggedSize) { |
280 | // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size |
281 | // fields (external pointers, doubles and BigInt data) are only |
282 | // kTaggedSize aligned so we have to use unaligned pointer friendly way of |
283 | // accessing them in order to avoid undefined behavior in C++ code. |
284 | T r; |
285 | memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T)); |
286 | return r; |
287 | } |
288 | #endif |
289 | return *reinterpret_cast<const T*>(addr); |
290 | } |
291 | |
292 | V8_INLINE static internal::Address ReadTaggedPointerField( |
293 | internal::Address heap_object_ptr, int offset) { |
294 | #ifdef V8_COMPRESS_POINTERS |
295 | int32_t value = ReadRawField<int32_t>(heap_object_ptr, offset); |
296 | internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr); |
297 | return root + static_cast<internal::Address>(static_cast<intptr_t>(value)); |
298 | #else |
299 | return ReadRawField<internal::Address>(heap_object_ptr, offset); |
300 | #endif |
301 | } |
302 | |
303 | V8_INLINE static internal::Address ReadTaggedSignedField( |
304 | internal::Address heap_object_ptr, int offset) { |
305 | #ifdef V8_COMPRESS_POINTERS |
306 | int32_t value = ReadRawField<int32_t>(heap_object_ptr, offset); |
307 | return static_cast<internal::Address>(static_cast<intptr_t>(value)); |
308 | #else |
309 | return ReadRawField<internal::Address>(heap_object_ptr, offset); |
310 | #endif |
311 | } |
312 | |
313 | #ifdef V8_COMPRESS_POINTERS |
314 | // See v8:7703 or src/ptr-compr.* for details about pointer compression. |
315 | static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32; |
316 | static constexpr size_t kPtrComprIsolateRootBias = |
317 | kPtrComprHeapReservationSize / 2; |
318 | static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32; |
319 | |
320 | V8_INLINE static internal::Address GetRootFromOnHeapAddress( |
321 | internal::Address addr) { |
322 | return (addr + kPtrComprIsolateRootBias) & |
323 | -static_cast<intptr_t>(kPtrComprIsolateRootAlignment); |
324 | } |
325 | |
326 | V8_INLINE static internal::Address DecompressTaggedAnyField( |
327 | internal::Address heap_object_ptr, int32_t value) { |
328 | internal::Address root_mask = static_cast<internal::Address>( |
329 | -static_cast<intptr_t>(value & kSmiTagMask)); |
330 | internal::Address root_or_zero = |
331 | root_mask & GetRootFromOnHeapAddress(heap_object_ptr); |
332 | return root_or_zero + |
333 | static_cast<internal::Address>(static_cast<intptr_t>(value)); |
334 | } |
335 | #endif // V8_COMPRESS_POINTERS |
336 | }; |
337 | |
338 | // Only perform cast check for types derived from v8::Data since |
339 | // other types do not implement the Cast method. |
340 | template <bool PerformCheck> |
341 | struct CastCheck { |
342 | template <class T> |
343 | static void Perform(T* data); |
344 | }; |
345 | |
346 | template <> |
347 | template <class T> |
348 | void CastCheck<true>::Perform(T* data) { |
349 | T::Cast(data); |
350 | } |
351 | |
352 | template <> |
353 | template <class T> |
354 | void CastCheck<false>::Perform(T* data) {} |
355 | |
356 | template <class T> |
357 | V8_INLINE void PerformCastCheck(T* data) { |
358 | CastCheck<std::is_base_of<Data, T>::value>::Perform(data); |
359 | } |
360 | |
361 | // {obj} must be the raw tagged pointer representation of a HeapObject |
362 | // that's guaranteed to never be in ReadOnlySpace. |
363 | V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj); |
364 | |
365 | // Returns if we need to throw when an error occurs. This infers the language |
366 | // mode based on the current context and the closure. This returns true if the |
367 | // language mode is strict. |
368 | V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate); |
369 | |
370 | } // namespace internal |
371 | } // namespace v8 |
372 | |
373 | #endif // INCLUDE_V8_INTERNAL_H_ |
374 | |