1 | // Copyright 2017 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #ifndef V8_OBJECTS_MAP_INL_H_ |
6 | #define V8_OBJECTS_MAP_INL_H_ |
7 | |
8 | #include "src/objects/map.h" |
9 | |
10 | #include "src/field-type.h" |
11 | #include "src/heap/heap-write-barrier-inl.h" |
12 | #include "src/layout-descriptor-inl.h" |
13 | #include "src/objects-inl.h" |
14 | #include "src/objects/api-callbacks-inl.h" |
15 | #include "src/objects/cell-inl.h" |
16 | #include "src/objects/descriptor-array-inl.h" |
17 | #include "src/objects/instance-type-inl.h" |
18 | #include "src/objects/prototype-info-inl.h" |
19 | #include "src/objects/shared-function-info.h" |
20 | #include "src/objects/templates-inl.h" |
21 | #include "src/property.h" |
22 | #include "src/transitions.h" |
23 | |
24 | // Has to be the last include (doesn't have include guards): |
25 | #include "src/objects/object-macros.h" |
26 | |
27 | namespace v8 { |
28 | namespace internal { |
29 | |
30 | OBJECT_CONSTRUCTORS_IMPL(Map, HeapObject) |
31 | CAST_ACCESSOR(Map) |
32 | |
33 | DescriptorArray Map::instance_descriptors() const { |
34 | return DescriptorArray::cast(READ_FIELD(*this, kDescriptorsOffset)); |
35 | } |
36 | |
37 | DescriptorArray Map::synchronized_instance_descriptors() const { |
38 | return DescriptorArray::cast(ACQUIRE_READ_FIELD(*this, kDescriptorsOffset)); |
39 | } |
40 | |
41 | void Map::set_synchronized_instance_descriptors(DescriptorArray value, |
42 | WriteBarrierMode mode) { |
43 | RELEASE_WRITE_FIELD(*this, kDescriptorsOffset, value); |
44 | CONDITIONAL_WRITE_BARRIER(*this, kDescriptorsOffset, value, mode); |
45 | } |
46 | |
47 | // A freshly allocated layout descriptor can be set on an existing map. |
48 | // We need to use release-store and acquire-load accessor pairs to ensure |
49 | // that the concurrent marking thread observes initializing stores of the |
50 | // layout descriptor. |
51 | SYNCHRONIZED_ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor, |
52 | kLayoutDescriptorOffset, |
53 | FLAG_unbox_double_fields) |
54 | WEAK_ACCESSORS(Map, raw_transitions, kTransitionsOrPrototypeInfoOffset) |
55 | |
56 | // |bit_field| fields. |
57 | // Concurrent access to |has_prototype_slot| and |has_non_instance_prototype| |
58 | // is explicitly whitelisted here. The former is never modified after the map |
59 | // is setup but it's being read by concurrent marker when pointer compression |
60 | // is enabled. The latter bit can be modified on a live objects. |
61 | BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_non_instance_prototype, |
62 | Map::HasNonInstancePrototypeBit) |
63 | BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::IsCallableBit) |
64 | BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor, |
65 | Map::HasNamedInterceptorBit) |
66 | BIT_FIELD_ACCESSORS(Map, bit_field, has_indexed_interceptor, |
67 | Map::HasIndexedInterceptorBit) |
68 | BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable, Map::IsUndetectableBit) |
69 | BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed, |
70 | Map::IsAccessCheckNeededBit) |
71 | BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor, Map::IsConstructorBit) |
72 | BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_prototype_slot, |
73 | Map::HasPrototypeSlotBit) |
74 | |
75 | // |bit_field2| fields. |
76 | BIT_FIELD_ACCESSORS(Map, bit_field2, is_extensible, Map::IsExtensibleBit) |
77 | BIT_FIELD_ACCESSORS(Map, bit_field2, is_prototype_map, Map::IsPrototypeMapBit) |
78 | BIT_FIELD_ACCESSORS(Map, bit_field2, is_in_retained_map_list, |
79 | Map::IsInRetainedMapListBit) |
80 | |
81 | // |bit_field3| fields. |
82 | BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors, Map::OwnsDescriptorsBit) |
83 | BIT_FIELD_ACCESSORS(Map, bit_field3, has_hidden_prototype, |
84 | Map::HasHiddenPrototypeBit) |
85 | BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::IsDeprecatedBit) |
86 | BIT_FIELD_ACCESSORS(Map, bit_field3, is_migration_target, |
87 | Map::IsMigrationTargetBit) |
88 | BIT_FIELD_ACCESSORS(Map, bit_field3, is_immutable_proto, |
89 | Map::IsImmutablePrototypeBit) |
90 | BIT_FIELD_ACCESSORS(Map, bit_field3, new_target_is_base, |
91 | Map::NewTargetIsBaseBit) |
92 | BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols, |
93 | Map::MayHaveInterestingSymbolsBit) |
94 | BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter, |
95 | Map::ConstructionCounterBits) |
96 | |
97 | InterceptorInfo Map::GetNamedInterceptor() { |
98 | DCHECK(has_named_interceptor()); |
99 | FunctionTemplateInfo info = GetFunctionTemplateInfo(); |
100 | return InterceptorInfo::cast(info->GetNamedPropertyHandler()); |
101 | } |
102 | |
103 | InterceptorInfo Map::GetIndexedInterceptor() { |
104 | DCHECK(has_indexed_interceptor()); |
105 | FunctionTemplateInfo info = GetFunctionTemplateInfo(); |
106 | return InterceptorInfo::cast(info->GetIndexedPropertyHandler()); |
107 | } |
108 | |
109 | bool Map::IsMostGeneralFieldType(Representation representation, |
110 | FieldType field_type) { |
111 | return !representation.IsHeapObject() || field_type->IsAny(); |
112 | } |
113 | |
114 | bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) { |
115 | return instance_type == JS_ARRAY_TYPE || instance_type == JS_VALUE_TYPE || |
116 | instance_type == JS_ARGUMENTS_TYPE; |
117 | } |
118 | |
119 | bool Map::CanHaveFastTransitionableElementsKind() const { |
120 | return CanHaveFastTransitionableElementsKind(instance_type()); |
121 | } |
122 | |
123 | // static |
124 | void Map::GeneralizeIfCanHaveTransitionableFastElementsKind( |
125 | Isolate* isolate, InstanceType instance_type, PropertyConstness* constness, |
126 | Representation* representation, Handle<FieldType>* field_type) { |
127 | if (CanHaveFastTransitionableElementsKind(instance_type)) { |
128 | // We don't support propagation of field generalization through elements |
129 | // kind transitions because they are inserted into the transition tree |
130 | // before field transitions. In order to avoid complexity of handling |
131 | // such a case we ensure that all maps with transitionable elements kinds |
132 | // have the most general field type. |
133 | if (representation->IsHeapObject()) { |
134 | // The field type is either already Any or should become Any if it was |
135 | // something else. |
136 | *field_type = FieldType::Any(isolate); |
137 | } |
138 | } |
139 | } |
140 | |
141 | bool Map::IsUnboxedDoubleField(FieldIndex index) const { |
142 | if (!FLAG_unbox_double_fields) return false; |
143 | if (index.is_hidden_field() || !index.is_inobject()) return false; |
144 | return !layout_descriptor()->IsTagged(index.property_index()); |
145 | } |
146 | |
147 | bool Map::TooManyFastProperties(StoreOrigin store_origin) const { |
148 | if (UnusedPropertyFields() != 0) return false; |
149 | if (is_prototype_map()) return false; |
150 | if (store_origin == StoreOrigin::kNamed) { |
151 | int limit = Max(kMaxFastProperties, GetInObjectProperties()); |
152 | FieldCounts counts = GetFieldCounts(); |
153 | // Only count mutable fields so that objects with large numbers of |
154 | // constant functions do not go to dictionary mode. That would be bad |
155 | // because such objects have often been used as modules. |
156 | int external = counts.mutable_count() - GetInObjectProperties(); |
157 | return external > limit || counts.GetTotal() > kMaxNumberOfDescriptors; |
158 | } else { |
159 | int limit = Max(kFastPropertiesSoftLimit, GetInObjectProperties()); |
160 | int external = NumberOfFields() - GetInObjectProperties(); |
161 | return external > limit; |
162 | } |
163 | } |
164 | |
165 | PropertyDetails Map::GetLastDescriptorDetails() const { |
166 | return instance_descriptors()->GetDetails(LastAdded()); |
167 | } |
168 | |
169 | int Map::LastAdded() const { |
170 | int number_of_own_descriptors = NumberOfOwnDescriptors(); |
171 | DCHECK_GT(number_of_own_descriptors, 0); |
172 | return number_of_own_descriptors - 1; |
173 | } |
174 | |
175 | int Map::NumberOfOwnDescriptors() const { |
176 | return NumberOfOwnDescriptorsBits::decode(bit_field3()); |
177 | } |
178 | |
179 | void Map::SetNumberOfOwnDescriptors(int number) { |
180 | DCHECK_LE(number, instance_descriptors()->number_of_descriptors()); |
181 | CHECK_LE(static_cast<unsigned>(number), |
182 | static_cast<unsigned>(kMaxNumberOfDescriptors)); |
183 | set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number)); |
184 | } |
185 | |
186 | int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); } |
187 | |
188 | void Map::SetEnumLength(int length) { |
189 | if (length != kInvalidEnumCacheSentinel) { |
190 | DCHECK_LE(length, NumberOfOwnDescriptors()); |
191 | CHECK_LE(static_cast<unsigned>(length), |
192 | static_cast<unsigned>(kMaxNumberOfDescriptors)); |
193 | } |
194 | set_bit_field3(EnumLengthBits::update(bit_field3(), length)); |
195 | } |
196 | |
197 | FixedArrayBase Map::GetInitialElements() const { |
198 | FixedArrayBase result; |
199 | if (has_fast_elements() || has_fast_string_wrapper_elements()) { |
200 | result = GetReadOnlyRoots().empty_fixed_array(); |
201 | } else if (has_fast_sloppy_arguments_elements()) { |
202 | result = GetReadOnlyRoots().empty_sloppy_arguments_elements(); |
203 | } else if (has_fixed_typed_array_elements()) { |
204 | result = |
205 | GetReadOnlyRoots().EmptyFixedTypedArrayForTypedArray(elements_kind()); |
206 | } else if (has_dictionary_elements()) { |
207 | result = GetReadOnlyRoots().empty_slow_element_dictionary(); |
208 | } else { |
209 | UNREACHABLE(); |
210 | } |
211 | DCHECK(!ObjectInYoungGeneration(result)); |
212 | return result; |
213 | } |
214 | |
215 | VisitorId Map::visitor_id() const { |
216 | return static_cast<VisitorId>( |
217 | RELAXED_READ_BYTE_FIELD(*this, kVisitorIdOffset)); |
218 | } |
219 | |
220 | void Map::set_visitor_id(VisitorId id) { |
221 | CHECK_LT(static_cast<unsigned>(id), 256); |
222 | RELAXED_WRITE_BYTE_FIELD(*this, kVisitorIdOffset, static_cast<byte>(id)); |
223 | } |
224 | |
225 | int Map::instance_size_in_words() const { |
226 | return RELAXED_READ_BYTE_FIELD(*this, kInstanceSizeInWordsOffset); |
227 | } |
228 | |
229 | void Map::set_instance_size_in_words(int value) { |
230 | RELAXED_WRITE_BYTE_FIELD(*this, kInstanceSizeInWordsOffset, |
231 | static_cast<byte>(value)); |
232 | } |
233 | |
234 | int Map::instance_size() const { |
235 | return instance_size_in_words() << kTaggedSizeLog2; |
236 | } |
237 | |
238 | void Map::set_instance_size(int value) { |
239 | CHECK(IsAligned(value, kTaggedSize)); |
240 | value >>= kTaggedSizeLog2; |
241 | CHECK_LT(static_cast<unsigned>(value), 256); |
242 | set_instance_size_in_words(value); |
243 | } |
244 | |
245 | int Map::inobject_properties_start_or_constructor_function_index() const { |
246 | return RELAXED_READ_BYTE_FIELD( |
247 | *this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset); |
248 | } |
249 | |
250 | void Map::set_inobject_properties_start_or_constructor_function_index( |
251 | int value) { |
252 | CHECK_LT(static_cast<unsigned>(value), 256); |
253 | RELAXED_WRITE_BYTE_FIELD( |
254 | *this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset, |
255 | static_cast<byte>(value)); |
256 | } |
257 | |
258 | int Map::GetInObjectPropertiesStartInWords() const { |
259 | DCHECK(IsJSObjectMap()); |
260 | return inobject_properties_start_or_constructor_function_index(); |
261 | } |
262 | |
263 | void Map::SetInObjectPropertiesStartInWords(int value) { |
264 | CHECK(IsJSObjectMap()); |
265 | set_inobject_properties_start_or_constructor_function_index(value); |
266 | } |
267 | |
268 | int Map::GetInObjectProperties() const { |
269 | DCHECK(IsJSObjectMap()); |
270 | return instance_size_in_words() - GetInObjectPropertiesStartInWords(); |
271 | } |
272 | |
273 | int Map::GetConstructorFunctionIndex() const { |
274 | DCHECK(IsPrimitiveMap()); |
275 | return inobject_properties_start_or_constructor_function_index(); |
276 | } |
277 | |
278 | void Map::SetConstructorFunctionIndex(int value) { |
279 | CHECK(IsPrimitiveMap()); |
280 | set_inobject_properties_start_or_constructor_function_index(value); |
281 | } |
282 | |
283 | int Map::GetInObjectPropertyOffset(int index) const { |
284 | return (GetInObjectPropertiesStartInWords() + index) * kTaggedSize; |
285 | } |
286 | |
287 | Handle<Map> Map::AddMissingTransitionsForTesting( |
288 | Isolate* isolate, Handle<Map> split_map, |
289 | Handle<DescriptorArray> descriptors, |
290 | Handle<LayoutDescriptor> full_layout_descriptor) { |
291 | return AddMissingTransitions(isolate, split_map, descriptors, |
292 | full_layout_descriptor); |
293 | } |
294 | |
295 | InstanceType Map::instance_type() const { |
296 | return static_cast<InstanceType>( |
297 | READ_UINT16_FIELD(*this, kInstanceTypeOffset)); |
298 | } |
299 | |
300 | void Map::set_instance_type(InstanceType value) { |
301 | WRITE_UINT16_FIELD(*this, kInstanceTypeOffset, value); |
302 | } |
303 | |
304 | int Map::UnusedPropertyFields() const { |
305 | int value = used_or_unused_instance_size_in_words(); |
306 | DCHECK_IMPLIES(!IsJSObjectMap(), value == 0); |
307 | int unused; |
308 | if (value >= JSObject::kFieldsAdded) { |
309 | unused = instance_size_in_words() - value; |
310 | } else { |
311 | // For out of object properties "used_or_unused_instance_size_in_words" |
312 | // byte encodes the slack in the property array. |
313 | unused = value; |
314 | } |
315 | return unused; |
316 | } |
317 | |
318 | int Map::UnusedInObjectProperties() const { |
319 | // Like Map::UnusedPropertyFields(), but returns 0 for out of object |
320 | // properties. |
321 | int value = used_or_unused_instance_size_in_words(); |
322 | DCHECK_IMPLIES(!IsJSObjectMap(), value == 0); |
323 | if (value >= JSObject::kFieldsAdded) { |
324 | return instance_size_in_words() - value; |
325 | } |
326 | return 0; |
327 | } |
328 | |
329 | int Map::used_or_unused_instance_size_in_words() const { |
330 | return RELAXED_READ_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset); |
331 | } |
332 | |
333 | void Map::set_used_or_unused_instance_size_in_words(int value) { |
334 | CHECK_LE(static_cast<unsigned>(value), 255); |
335 | RELAXED_WRITE_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset, |
336 | static_cast<byte>(value)); |
337 | } |
338 | |
339 | int Map::UsedInstanceSize() const { |
340 | int words = used_or_unused_instance_size_in_words(); |
341 | if (words < JSObject::kFieldsAdded) { |
342 | // All in-object properties are used and the words is tracking the slack |
343 | // in the property array. |
344 | return instance_size(); |
345 | } |
346 | return words * kTaggedSize; |
347 | } |
348 | |
349 | void Map::SetInObjectUnusedPropertyFields(int value) { |
350 | STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize); |
351 | if (!IsJSObjectMap()) { |
352 | CHECK_EQ(0, value); |
353 | set_used_or_unused_instance_size_in_words(0); |
354 | DCHECK_EQ(0, UnusedPropertyFields()); |
355 | return; |
356 | } |
357 | CHECK_LE(0, value); |
358 | DCHECK_LE(value, GetInObjectProperties()); |
359 | int used_inobject_properties = GetInObjectProperties() - value; |
360 | set_used_or_unused_instance_size_in_words( |
361 | GetInObjectPropertyOffset(used_inobject_properties) / kTaggedSize); |
362 | DCHECK_EQ(value, UnusedPropertyFields()); |
363 | } |
364 | |
365 | void Map::SetOutOfObjectUnusedPropertyFields(int value) { |
366 | STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize); |
367 | CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded); |
368 | // For out of object properties "used_instance_size_in_words" byte encodes |
369 | // the slack in the property array. |
370 | set_used_or_unused_instance_size_in_words(value); |
371 | DCHECK_EQ(value, UnusedPropertyFields()); |
372 | } |
373 | |
374 | void Map::CopyUnusedPropertyFields(Map map) { |
375 | set_used_or_unused_instance_size_in_words( |
376 | map->used_or_unused_instance_size_in_words()); |
377 | DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields()); |
378 | } |
379 | |
380 | void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map) { |
381 | int value = map->used_or_unused_instance_size_in_words(); |
382 | if (value >= JSValue::kFieldsAdded) { |
383 | // Unused in-object fields. Adjust the offset from the object’s start |
384 | // so it matches the distance to the object’s end. |
385 | value += instance_size_in_words() - map->instance_size_in_words(); |
386 | } |
387 | set_used_or_unused_instance_size_in_words(value); |
388 | DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields()); |
389 | } |
390 | |
391 | void Map::AccountAddedPropertyField() { |
392 | // Update used instance size and unused property fields number. |
393 | STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize); |
394 | #ifdef DEBUG |
395 | int new_unused = UnusedPropertyFields() - 1; |
396 | if (new_unused < 0) new_unused += JSObject::kFieldsAdded; |
397 | #endif |
398 | int value = used_or_unused_instance_size_in_words(); |
399 | if (value >= JSObject::kFieldsAdded) { |
400 | if (value == instance_size_in_words()) { |
401 | AccountAddedOutOfObjectPropertyField(0); |
402 | } else { |
403 | // The property is added in-object, so simply increment the counter. |
404 | set_used_or_unused_instance_size_in_words(value + 1); |
405 | } |
406 | } else { |
407 | AccountAddedOutOfObjectPropertyField(value); |
408 | } |
409 | DCHECK_EQ(new_unused, UnusedPropertyFields()); |
410 | } |
411 | |
412 | void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) { |
413 | unused_in_property_array--; |
414 | if (unused_in_property_array < 0) { |
415 | unused_in_property_array += JSObject::kFieldsAdded; |
416 | } |
417 | CHECK_LT(static_cast<unsigned>(unused_in_property_array), |
418 | JSObject::kFieldsAdded); |
419 | set_used_or_unused_instance_size_in_words(unused_in_property_array); |
420 | DCHECK_EQ(unused_in_property_array, UnusedPropertyFields()); |
421 | } |
422 | |
423 | byte Map::bit_field() const { return READ_BYTE_FIELD(*this, kBitFieldOffset); } |
424 | |
425 | void Map::set_bit_field(byte value) { |
426 | WRITE_BYTE_FIELD(*this, kBitFieldOffset, value); |
427 | } |
428 | |
429 | byte Map::relaxed_bit_field() const { |
430 | return RELAXED_READ_BYTE_FIELD(*this, kBitFieldOffset); |
431 | } |
432 | |
433 | void Map::set_relaxed_bit_field(byte value) { |
434 | RELAXED_WRITE_BYTE_FIELD(*this, kBitFieldOffset, value); |
435 | } |
436 | |
437 | byte Map::bit_field2() const { |
438 | return READ_BYTE_FIELD(*this, kBitField2Offset); |
439 | } |
440 | |
441 | void Map::set_bit_field2(byte value) { |
442 | WRITE_BYTE_FIELD(*this, kBitField2Offset, value); |
443 | } |
444 | |
445 | bool Map::is_abandoned_prototype_map() const { |
446 | return is_prototype_map() && !owns_descriptors(); |
447 | } |
448 | |
449 | bool Map::should_be_fast_prototype_map() const { |
450 | if (!prototype_info()->IsPrototypeInfo()) return false; |
451 | return PrototypeInfo::cast(prototype_info())->should_be_fast_map(); |
452 | } |
453 | |
454 | void Map::set_elements_kind(ElementsKind elements_kind) { |
455 | CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount); |
456 | set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind)); |
457 | } |
458 | |
459 | ElementsKind Map::elements_kind() const { |
460 | return Map::ElementsKindBits::decode(bit_field2()); |
461 | } |
462 | |
463 | bool Map::has_fast_smi_elements() const { |
464 | return IsSmiElementsKind(elements_kind()); |
465 | } |
466 | |
467 | bool Map::has_fast_object_elements() const { |
468 | return IsObjectElementsKind(elements_kind()); |
469 | } |
470 | |
471 | bool Map::has_fast_smi_or_object_elements() const { |
472 | return IsSmiOrObjectElementsKind(elements_kind()); |
473 | } |
474 | |
475 | bool Map::has_fast_double_elements() const { |
476 | return IsDoubleElementsKind(elements_kind()); |
477 | } |
478 | |
479 | bool Map::has_fast_elements() const { |
480 | return IsFastElementsKind(elements_kind()); |
481 | } |
482 | |
483 | bool Map::has_sloppy_arguments_elements() const { |
484 | return IsSloppyArgumentsElementsKind(elements_kind()); |
485 | } |
486 | |
487 | bool Map::has_fast_sloppy_arguments_elements() const { |
488 | return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS; |
489 | } |
490 | |
491 | bool Map::has_fast_string_wrapper_elements() const { |
492 | return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS; |
493 | } |
494 | |
495 | bool Map::has_fixed_typed_array_elements() const { |
496 | return IsFixedTypedArrayElementsKind(elements_kind()); |
497 | } |
498 | |
499 | bool Map::has_dictionary_elements() const { |
500 | return IsDictionaryElementsKind(elements_kind()); |
501 | } |
502 | |
503 | bool Map::is_frozen_or_sealed_elements() const { |
504 | return IsFrozenOrSealedElementsKind(elements_kind()); |
505 | } |
506 | |
507 | void Map::set_is_dictionary_map(bool value) { |
508 | uint32_t new_bit_field3 = IsDictionaryMapBit::update(bit_field3(), value); |
509 | new_bit_field3 = IsUnstableBit::update(new_bit_field3, value); |
510 | set_bit_field3(new_bit_field3); |
511 | } |
512 | |
513 | bool Map::is_dictionary_map() const { |
514 | return IsDictionaryMapBit::decode(bit_field3()); |
515 | } |
516 | |
517 | void Map::mark_unstable() { |
518 | set_bit_field3(IsUnstableBit::update(bit_field3(), true)); |
519 | } |
520 | |
521 | bool Map::is_stable() const { return !IsUnstableBit::decode(bit_field3()); } |
522 | |
523 | bool Map::CanBeDeprecated() const { |
524 | int descriptor = LastAdded(); |
525 | for (int i = 0; i <= descriptor; i++) { |
526 | PropertyDetails details = instance_descriptors()->GetDetails(i); |
527 | if (details.representation().IsNone()) return true; |
528 | if (details.representation().IsSmi()) return true; |
529 | if (details.representation().IsDouble()) return true; |
530 | if (details.representation().IsHeapObject()) return true; |
531 | if (details.kind() == kData && details.location() == kDescriptor) { |
532 | return true; |
533 | } |
534 | } |
535 | return false; |
536 | } |
537 | |
538 | void Map::NotifyLeafMapLayoutChange(Isolate* isolate) { |
539 | if (is_stable()) { |
540 | mark_unstable(); |
541 | dependent_code()->DeoptimizeDependentCodeGroup( |
542 | isolate, DependentCode::kPrototypeCheckGroup); |
543 | } |
544 | } |
545 | |
546 | bool Map::CanTransition() const { |
547 | // Only JSObject and subtypes have map transitions and back pointers. |
548 | return InstanceTypeChecker::IsJSObject(instance_type()); |
549 | } |
550 | |
551 | #define DEF_TESTER(Type, ...) \ |
552 | bool Map::Is##Type##Map() const { \ |
553 | return InstanceTypeChecker::Is##Type(instance_type()); \ |
554 | } |
555 | INSTANCE_TYPE_CHECKERS(DEF_TESTER) |
556 | #undef DEF_TESTER |
557 | |
558 | bool Map::IsBooleanMap() const { |
559 | return *this == GetReadOnlyRoots().boolean_map(); |
560 | } |
561 | |
562 | bool Map::IsNullOrUndefinedMap() const { |
563 | return *this == GetReadOnlyRoots().null_map() || |
564 | *this == GetReadOnlyRoots().undefined_map(); |
565 | } |
566 | |
567 | bool Map::IsPrimitiveMap() const { |
568 | return instance_type() <= LAST_PRIMITIVE_TYPE; |
569 | } |
570 | |
571 | HeapObject Map::prototype() const { |
572 | return HeapObject::cast(READ_FIELD(*this, kPrototypeOffset)); |
573 | } |
574 | |
575 | void Map::set_prototype(HeapObject value, WriteBarrierMode mode) { |
576 | DCHECK(value->IsNull() || value->IsJSReceiver()); |
577 | WRITE_FIELD(*this, kPrototypeOffset, value); |
578 | CONDITIONAL_WRITE_BARRIER(*this, kPrototypeOffset, value, mode); |
579 | } |
580 | |
581 | LayoutDescriptor Map::layout_descriptor_gc_safe() const { |
582 | DCHECK(FLAG_unbox_double_fields); |
583 | // The loaded value can be dereferenced on background thread to load the |
584 | // bitmap. We need acquire load in order to ensure that the bitmap |
585 | // initializing stores are also visible to the background thread. |
586 | Object layout_desc = ACQUIRE_READ_FIELD(*this, kLayoutDescriptorOffset); |
587 | return LayoutDescriptor::cast_gc_safe(layout_desc); |
588 | } |
589 | |
590 | bool Map::HasFastPointerLayout() const { |
591 | DCHECK(FLAG_unbox_double_fields); |
592 | // The loaded value is used for SMI check only and is not dereferenced, |
593 | // so relaxed load is safe. |
594 | Object layout_desc = RELAXED_READ_FIELD(*this, kLayoutDescriptorOffset); |
595 | return LayoutDescriptor::IsFastPointerLayout(layout_desc); |
596 | } |
597 | |
598 | void Map::UpdateDescriptors(Isolate* isolate, DescriptorArray descriptors, |
599 | LayoutDescriptor layout_desc, |
600 | int number_of_own_descriptors) { |
601 | SetInstanceDescriptors(isolate, descriptors, number_of_own_descriptors); |
602 | if (FLAG_unbox_double_fields) { |
603 | if (layout_descriptor()->IsSlowLayout()) { |
604 | set_layout_descriptor(layout_desc); |
605 | } |
606 | #ifdef VERIFY_HEAP |
607 | // TODO(ishell): remove these checks from VERIFY_HEAP mode. |
608 | if (FLAG_verify_heap) { |
609 | CHECK(layout_descriptor()->IsConsistentWithMap(*this)); |
610 | CHECK_EQ(Map::GetVisitorId(*this), visitor_id()); |
611 | } |
612 | #else |
613 | SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(*this)); |
614 | DCHECK(visitor_id() == Map::GetVisitorId(*this)); |
615 | #endif |
616 | } |
617 | } |
618 | |
619 | void Map::InitializeDescriptors(Isolate* isolate, DescriptorArray descriptors, |
620 | LayoutDescriptor layout_desc) { |
621 | SetInstanceDescriptors(isolate, descriptors, |
622 | descriptors->number_of_descriptors()); |
623 | |
624 | if (FLAG_unbox_double_fields) { |
625 | set_layout_descriptor(layout_desc); |
626 | #ifdef VERIFY_HEAP |
627 | // TODO(ishell): remove these checks from VERIFY_HEAP mode. |
628 | if (FLAG_verify_heap) { |
629 | CHECK(layout_descriptor()->IsConsistentWithMap(*this)); |
630 | } |
631 | #else |
632 | SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(*this)); |
633 | #endif |
634 | set_visitor_id(Map::GetVisitorId(*this)); |
635 | } |
636 | } |
637 | |
638 | void Map::set_bit_field3(uint32_t bits) { |
639 | RELAXED_WRITE_UINT32_FIELD(*this, kBitField3Offset, bits); |
640 | } |
641 | |
642 | uint32_t Map::bit_field3() const { |
643 | return RELAXED_READ_UINT32_FIELD(*this, kBitField3Offset); |
644 | } |
645 | |
646 | void Map::clear_padding() { |
647 | if (FIELD_SIZE(kOptionalPaddingOffset) == 0) return; |
648 | DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset)); |
649 | memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0, |
650 | FIELD_SIZE(kOptionalPaddingOffset)); |
651 | } |
652 | |
653 | LayoutDescriptor Map::GetLayoutDescriptor() const { |
654 | return FLAG_unbox_double_fields ? layout_descriptor() |
655 | : LayoutDescriptor::FastPointerLayout(); |
656 | } |
657 | |
658 | void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) { |
659 | DescriptorArray descriptors = instance_descriptors(); |
660 | int number_of_own_descriptors = NumberOfOwnDescriptors(); |
661 | DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); |
662 | { |
663 | // The following two operations need to happen before the marking write |
664 | // barrier. |
665 | descriptors->Append(desc); |
666 | SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); |
667 | MarkingBarrierForDescriptorArray(isolate->heap(), *this, descriptors, |
668 | number_of_own_descriptors + 1); |
669 | } |
670 | // Properly mark the map if the {desc} is an "interesting symbol". |
671 | if (desc->GetKey()->IsInterestingSymbol()) { |
672 | set_may_have_interesting_symbols(true); |
673 | } |
674 | PropertyDetails details = desc->GetDetails(); |
675 | if (details.location() == kField) { |
676 | DCHECK_GT(UnusedPropertyFields(), 0); |
677 | AccountAddedPropertyField(); |
678 | } |
679 | |
680 | // This function does not support appending double field descriptors and |
681 | // it should never try to (otherwise, layout descriptor must be updated too). |
682 | #ifdef DEBUG |
683 | DCHECK(details.location() != kField || !details.representation().IsDouble()); |
684 | #endif |
685 | } |
686 | |
687 | HeapObject Map::GetBackPointer() const { |
688 | Object object = constructor_or_backpointer(); |
689 | if (object->IsMap()) { |
690 | return Map::cast(object); |
691 | } |
692 | return GetReadOnlyRoots().undefined_value(); |
693 | } |
694 | |
695 | Map Map::ElementsTransitionMap() { |
696 | DisallowHeapAllocation no_gc; |
697 | // TODO(delphick): While it's safe to pass nullptr for Isolate* here as |
698 | // SearchSpecial doesn't need it, this is really ugly. Perhaps factor out a |
699 | // base class for methods not requiring an Isolate? |
700 | return TransitionsAccessor(nullptr, *this, &no_gc) |
701 | .SearchSpecial(GetReadOnlyRoots().elements_transition_symbol()); |
702 | } |
703 | |
704 | Object Map::prototype_info() const { |
705 | DCHECK(is_prototype_map()); |
706 | return READ_FIELD(*this, Map::kTransitionsOrPrototypeInfoOffset); |
707 | } |
708 | |
709 | void Map::set_prototype_info(Object value, WriteBarrierMode mode) { |
710 | CHECK(is_prototype_map()); |
711 | WRITE_FIELD(*this, Map::kTransitionsOrPrototypeInfoOffset, value); |
712 | CONDITIONAL_WRITE_BARRIER(*this, Map::kTransitionsOrPrototypeInfoOffset, |
713 | value, mode); |
714 | } |
715 | |
716 | void Map::SetBackPointer(Object value, WriteBarrierMode mode) { |
717 | CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE); |
718 | CHECK(value->IsMap()); |
719 | CHECK(GetBackPointer()->IsUndefined()); |
720 | CHECK_IMPLIES(value->IsMap(), Map::cast(value)->GetConstructor() == |
721 | constructor_or_backpointer()); |
722 | set_constructor_or_backpointer(value, mode); |
723 | } |
724 | |
725 | ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset) |
726 | ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset) |
727 | ACCESSORS(Map, constructor_or_backpointer, Object, |
728 | kConstructorOrBackPointerOffset) |
729 | |
730 | bool Map::IsPrototypeValidityCellValid() const { |
731 | Object validity_cell = prototype_validity_cell(); |
732 | Object value = validity_cell->IsSmi() ? Smi::cast(validity_cell) |
733 | : Cell::cast(validity_cell)->value(); |
734 | return value == Smi::FromInt(Map::kPrototypeChainValid); |
735 | } |
736 | |
737 | Object Map::GetConstructor() const { |
738 | Object maybe_constructor = constructor_or_backpointer(); |
739 | // Follow any back pointers. |
740 | while (maybe_constructor->IsMap()) { |
741 | maybe_constructor = |
742 | Map::cast(maybe_constructor)->constructor_or_backpointer(); |
743 | } |
744 | return maybe_constructor; |
745 | } |
746 | |
747 | FunctionTemplateInfo Map::GetFunctionTemplateInfo() const { |
748 | Object constructor = GetConstructor(); |
749 | if (constructor->IsJSFunction()) { |
750 | DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction()); |
751 | return JSFunction::cast(constructor)->shared()->get_api_func_data(); |
752 | } |
753 | DCHECK(constructor->IsFunctionTemplateInfo()); |
754 | return FunctionTemplateInfo::cast(constructor); |
755 | } |
756 | |
757 | void Map::SetConstructor(Object constructor, WriteBarrierMode mode) { |
758 | // Never overwrite a back pointer with a constructor. |
759 | CHECK(!constructor_or_backpointer()->IsMap()); |
760 | set_constructor_or_backpointer(constructor, mode); |
761 | } |
762 | |
763 | Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map) { |
764 | return CopyInitialMap(isolate, map, map->instance_size(), |
765 | map->GetInObjectProperties(), |
766 | map->UnusedPropertyFields()); |
767 | } |
768 | |
769 | bool Map::IsInobjectSlackTrackingInProgress() const { |
770 | return construction_counter() != Map::kNoSlackTracking; |
771 | } |
772 | |
773 | void Map::InobjectSlackTrackingStep(Isolate* isolate) { |
774 | // Slack tracking should only be performed on an initial map. |
775 | DCHECK(GetBackPointer()->IsUndefined()); |
776 | if (!IsInobjectSlackTrackingInProgress()) return; |
777 | int counter = construction_counter(); |
778 | set_construction_counter(counter - 1); |
779 | if (counter == kSlackTrackingCounterEnd) { |
780 | CompleteInobjectSlackTracking(isolate); |
781 | } |
782 | } |
783 | |
784 | int Map::SlackForArraySize(int old_size, int size_limit) { |
785 | const int max_slack = size_limit - old_size; |
786 | CHECK_LE(0, max_slack); |
787 | if (old_size < 4) { |
788 | DCHECK_LE(1, max_slack); |
789 | return 1; |
790 | } |
791 | return Min(max_slack, old_size / 4); |
792 | } |
793 | |
794 | int Map::InstanceSizeFromSlack(int slack) const { |
795 | return instance_size() - slack * kTaggedSize; |
796 | } |
797 | |
798 | OBJECT_CONSTRUCTORS_IMPL(NormalizedMapCache, WeakFixedArray) |
799 | CAST_ACCESSOR(NormalizedMapCache) |
800 | NEVER_READ_ONLY_SPACE_IMPL(NormalizedMapCache) |
801 | |
802 | int NormalizedMapCache::GetIndex(Handle<Map> map) { |
803 | return map->Hash() % NormalizedMapCache::kEntries; |
804 | } |
805 | |
806 | bool HeapObject::IsNormalizedMapCache() const { |
807 | if (!IsWeakFixedArray()) return false; |
808 | if (WeakFixedArray::cast(*this)->length() != NormalizedMapCache::kEntries) { |
809 | return false; |
810 | } |
811 | return true; |
812 | } |
813 | |
814 | } // namespace internal |
815 | } // namespace v8 |
816 | |
817 | #include "src/objects/object-macros-undef.h" |
818 | |
819 | #endif // V8_OBJECTS_MAP_INL_H_ |
820 | |