1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_MAP_INL_H_
6#define V8_OBJECTS_MAP_INL_H_
7
8#include "src/objects/map.h"
9
10#include "src/field-type.h"
11#include "src/heap/heap-write-barrier-inl.h"
12#include "src/layout-descriptor-inl.h"
13#include "src/objects-inl.h"
14#include "src/objects/api-callbacks-inl.h"
15#include "src/objects/cell-inl.h"
16#include "src/objects/descriptor-array-inl.h"
17#include "src/objects/instance-type-inl.h"
18#include "src/objects/prototype-info-inl.h"
19#include "src/objects/shared-function-info.h"
20#include "src/objects/templates-inl.h"
21#include "src/property.h"
22#include "src/transitions.h"
23
24// Has to be the last include (doesn't have include guards):
25#include "src/objects/object-macros.h"
26
27namespace v8 {
28namespace internal {
29
30OBJECT_CONSTRUCTORS_IMPL(Map, HeapObject)
31CAST_ACCESSOR(Map)
32
33DescriptorArray Map::instance_descriptors() const {
34 return DescriptorArray::cast(READ_FIELD(*this, kDescriptorsOffset));
35}
36
37DescriptorArray Map::synchronized_instance_descriptors() const {
38 return DescriptorArray::cast(ACQUIRE_READ_FIELD(*this, kDescriptorsOffset));
39}
40
41void Map::set_synchronized_instance_descriptors(DescriptorArray value,
42 WriteBarrierMode mode) {
43 RELEASE_WRITE_FIELD(*this, kDescriptorsOffset, value);
44 CONDITIONAL_WRITE_BARRIER(*this, kDescriptorsOffset, value, mode);
45}
46
47// A freshly allocated layout descriptor can be set on an existing map.
48// We need to use release-store and acquire-load accessor pairs to ensure
49// that the concurrent marking thread observes initializing stores of the
50// layout descriptor.
51SYNCHRONIZED_ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor,
52 kLayoutDescriptorOffset,
53 FLAG_unbox_double_fields)
54WEAK_ACCESSORS(Map, raw_transitions, kTransitionsOrPrototypeInfoOffset)
55
56// |bit_field| fields.
57// Concurrent access to |has_prototype_slot| and |has_non_instance_prototype|
58// is explicitly whitelisted here. The former is never modified after the map
59// is setup but it's being read by concurrent marker when pointer compression
60// is enabled. The latter bit can be modified on a live objects.
61BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_non_instance_prototype,
62 Map::HasNonInstancePrototypeBit)
63BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::IsCallableBit)
64BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor,
65 Map::HasNamedInterceptorBit)
66BIT_FIELD_ACCESSORS(Map, bit_field, has_indexed_interceptor,
67 Map::HasIndexedInterceptorBit)
68BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable, Map::IsUndetectableBit)
69BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed,
70 Map::IsAccessCheckNeededBit)
71BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor, Map::IsConstructorBit)
72BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_prototype_slot,
73 Map::HasPrototypeSlotBit)
74
75// |bit_field2| fields.
76BIT_FIELD_ACCESSORS(Map, bit_field2, is_extensible, Map::IsExtensibleBit)
77BIT_FIELD_ACCESSORS(Map, bit_field2, is_prototype_map, Map::IsPrototypeMapBit)
78BIT_FIELD_ACCESSORS(Map, bit_field2, is_in_retained_map_list,
79 Map::IsInRetainedMapListBit)
80
81// |bit_field3| fields.
82BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors, Map::OwnsDescriptorsBit)
83BIT_FIELD_ACCESSORS(Map, bit_field3, has_hidden_prototype,
84 Map::HasHiddenPrototypeBit)
85BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::IsDeprecatedBit)
86BIT_FIELD_ACCESSORS(Map, bit_field3, is_migration_target,
87 Map::IsMigrationTargetBit)
88BIT_FIELD_ACCESSORS(Map, bit_field3, is_immutable_proto,
89 Map::IsImmutablePrototypeBit)
90BIT_FIELD_ACCESSORS(Map, bit_field3, new_target_is_base,
91 Map::NewTargetIsBaseBit)
92BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols,
93 Map::MayHaveInterestingSymbolsBit)
94BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
95 Map::ConstructionCounterBits)
96
97InterceptorInfo Map::GetNamedInterceptor() {
98 DCHECK(has_named_interceptor());
99 FunctionTemplateInfo info = GetFunctionTemplateInfo();
100 return InterceptorInfo::cast(info->GetNamedPropertyHandler());
101}
102
103InterceptorInfo Map::GetIndexedInterceptor() {
104 DCHECK(has_indexed_interceptor());
105 FunctionTemplateInfo info = GetFunctionTemplateInfo();
106 return InterceptorInfo::cast(info->GetIndexedPropertyHandler());
107}
108
109bool Map::IsMostGeneralFieldType(Representation representation,
110 FieldType field_type) {
111 return !representation.IsHeapObject() || field_type->IsAny();
112}
113
114bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
115 return instance_type == JS_ARRAY_TYPE || instance_type == JS_VALUE_TYPE ||
116 instance_type == JS_ARGUMENTS_TYPE;
117}
118
119bool Map::CanHaveFastTransitionableElementsKind() const {
120 return CanHaveFastTransitionableElementsKind(instance_type());
121}
122
123// static
124void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
125 Isolate* isolate, InstanceType instance_type, PropertyConstness* constness,
126 Representation* representation, Handle<FieldType>* field_type) {
127 if (CanHaveFastTransitionableElementsKind(instance_type)) {
128 // We don't support propagation of field generalization through elements
129 // kind transitions because they are inserted into the transition tree
130 // before field transitions. In order to avoid complexity of handling
131 // such a case we ensure that all maps with transitionable elements kinds
132 // have the most general field type.
133 if (representation->IsHeapObject()) {
134 // The field type is either already Any or should become Any if it was
135 // something else.
136 *field_type = FieldType::Any(isolate);
137 }
138 }
139}
140
141bool Map::IsUnboxedDoubleField(FieldIndex index) const {
142 if (!FLAG_unbox_double_fields) return false;
143 if (index.is_hidden_field() || !index.is_inobject()) return false;
144 return !layout_descriptor()->IsTagged(index.property_index());
145}
146
147bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
148 if (UnusedPropertyFields() != 0) return false;
149 if (is_prototype_map()) return false;
150 if (store_origin == StoreOrigin::kNamed) {
151 int limit = Max(kMaxFastProperties, GetInObjectProperties());
152 FieldCounts counts = GetFieldCounts();
153 // Only count mutable fields so that objects with large numbers of
154 // constant functions do not go to dictionary mode. That would be bad
155 // because such objects have often been used as modules.
156 int external = counts.mutable_count() - GetInObjectProperties();
157 return external > limit || counts.GetTotal() > kMaxNumberOfDescriptors;
158 } else {
159 int limit = Max(kFastPropertiesSoftLimit, GetInObjectProperties());
160 int external = NumberOfFields() - GetInObjectProperties();
161 return external > limit;
162 }
163}
164
165PropertyDetails Map::GetLastDescriptorDetails() const {
166 return instance_descriptors()->GetDetails(LastAdded());
167}
168
169int Map::LastAdded() const {
170 int number_of_own_descriptors = NumberOfOwnDescriptors();
171 DCHECK_GT(number_of_own_descriptors, 0);
172 return number_of_own_descriptors - 1;
173}
174
175int Map::NumberOfOwnDescriptors() const {
176 return NumberOfOwnDescriptorsBits::decode(bit_field3());
177}
178
179void Map::SetNumberOfOwnDescriptors(int number) {
180 DCHECK_LE(number, instance_descriptors()->number_of_descriptors());
181 CHECK_LE(static_cast<unsigned>(number),
182 static_cast<unsigned>(kMaxNumberOfDescriptors));
183 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
184}
185
186int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }
187
188void Map::SetEnumLength(int length) {
189 if (length != kInvalidEnumCacheSentinel) {
190 DCHECK_LE(length, NumberOfOwnDescriptors());
191 CHECK_LE(static_cast<unsigned>(length),
192 static_cast<unsigned>(kMaxNumberOfDescriptors));
193 }
194 set_bit_field3(EnumLengthBits::update(bit_field3(), length));
195}
196
197FixedArrayBase Map::GetInitialElements() const {
198 FixedArrayBase result;
199 if (has_fast_elements() || has_fast_string_wrapper_elements()) {
200 result = GetReadOnlyRoots().empty_fixed_array();
201 } else if (has_fast_sloppy_arguments_elements()) {
202 result = GetReadOnlyRoots().empty_sloppy_arguments_elements();
203 } else if (has_fixed_typed_array_elements()) {
204 result =
205 GetReadOnlyRoots().EmptyFixedTypedArrayForTypedArray(elements_kind());
206 } else if (has_dictionary_elements()) {
207 result = GetReadOnlyRoots().empty_slow_element_dictionary();
208 } else {
209 UNREACHABLE();
210 }
211 DCHECK(!ObjectInYoungGeneration(result));
212 return result;
213}
214
215VisitorId Map::visitor_id() const {
216 return static_cast<VisitorId>(
217 RELAXED_READ_BYTE_FIELD(*this, kVisitorIdOffset));
218}
219
220void Map::set_visitor_id(VisitorId id) {
221 CHECK_LT(static_cast<unsigned>(id), 256);
222 RELAXED_WRITE_BYTE_FIELD(*this, kVisitorIdOffset, static_cast<byte>(id));
223}
224
225int Map::instance_size_in_words() const {
226 return RELAXED_READ_BYTE_FIELD(*this, kInstanceSizeInWordsOffset);
227}
228
229void Map::set_instance_size_in_words(int value) {
230 RELAXED_WRITE_BYTE_FIELD(*this, kInstanceSizeInWordsOffset,
231 static_cast<byte>(value));
232}
233
234int Map::instance_size() const {
235 return instance_size_in_words() << kTaggedSizeLog2;
236}
237
238void Map::set_instance_size(int value) {
239 CHECK(IsAligned(value, kTaggedSize));
240 value >>= kTaggedSizeLog2;
241 CHECK_LT(static_cast<unsigned>(value), 256);
242 set_instance_size_in_words(value);
243}
244
245int Map::inobject_properties_start_or_constructor_function_index() const {
246 return RELAXED_READ_BYTE_FIELD(
247 *this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
248}
249
250void Map::set_inobject_properties_start_or_constructor_function_index(
251 int value) {
252 CHECK_LT(static_cast<unsigned>(value), 256);
253 RELAXED_WRITE_BYTE_FIELD(
254 *this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
255 static_cast<byte>(value));
256}
257
258int Map::GetInObjectPropertiesStartInWords() const {
259 DCHECK(IsJSObjectMap());
260 return inobject_properties_start_or_constructor_function_index();
261}
262
263void Map::SetInObjectPropertiesStartInWords(int value) {
264 CHECK(IsJSObjectMap());
265 set_inobject_properties_start_or_constructor_function_index(value);
266}
267
268int Map::GetInObjectProperties() const {
269 DCHECK(IsJSObjectMap());
270 return instance_size_in_words() - GetInObjectPropertiesStartInWords();
271}
272
273int Map::GetConstructorFunctionIndex() const {
274 DCHECK(IsPrimitiveMap());
275 return inobject_properties_start_or_constructor_function_index();
276}
277
278void Map::SetConstructorFunctionIndex(int value) {
279 CHECK(IsPrimitiveMap());
280 set_inobject_properties_start_or_constructor_function_index(value);
281}
282
283int Map::GetInObjectPropertyOffset(int index) const {
284 return (GetInObjectPropertiesStartInWords() + index) * kTaggedSize;
285}
286
287Handle<Map> Map::AddMissingTransitionsForTesting(
288 Isolate* isolate, Handle<Map> split_map,
289 Handle<DescriptorArray> descriptors,
290 Handle<LayoutDescriptor> full_layout_descriptor) {
291 return AddMissingTransitions(isolate, split_map, descriptors,
292 full_layout_descriptor);
293}
294
295InstanceType Map::instance_type() const {
296 return static_cast<InstanceType>(
297 READ_UINT16_FIELD(*this, kInstanceTypeOffset));
298}
299
300void Map::set_instance_type(InstanceType value) {
301 WRITE_UINT16_FIELD(*this, kInstanceTypeOffset, value);
302}
303
304int Map::UnusedPropertyFields() const {
305 int value = used_or_unused_instance_size_in_words();
306 DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
307 int unused;
308 if (value >= JSObject::kFieldsAdded) {
309 unused = instance_size_in_words() - value;
310 } else {
311 // For out of object properties "used_or_unused_instance_size_in_words"
312 // byte encodes the slack in the property array.
313 unused = value;
314 }
315 return unused;
316}
317
318int Map::UnusedInObjectProperties() const {
319 // Like Map::UnusedPropertyFields(), but returns 0 for out of object
320 // properties.
321 int value = used_or_unused_instance_size_in_words();
322 DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
323 if (value >= JSObject::kFieldsAdded) {
324 return instance_size_in_words() - value;
325 }
326 return 0;
327}
328
329int Map::used_or_unused_instance_size_in_words() const {
330 return RELAXED_READ_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset);
331}
332
333void Map::set_used_or_unused_instance_size_in_words(int value) {
334 CHECK_LE(static_cast<unsigned>(value), 255);
335 RELAXED_WRITE_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset,
336 static_cast<byte>(value));
337}
338
339int Map::UsedInstanceSize() const {
340 int words = used_or_unused_instance_size_in_words();
341 if (words < JSObject::kFieldsAdded) {
342 // All in-object properties are used and the words is tracking the slack
343 // in the property array.
344 return instance_size();
345 }
346 return words * kTaggedSize;
347}
348
349void Map::SetInObjectUnusedPropertyFields(int value) {
350 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
351 if (!IsJSObjectMap()) {
352 CHECK_EQ(0, value);
353 set_used_or_unused_instance_size_in_words(0);
354 DCHECK_EQ(0, UnusedPropertyFields());
355 return;
356 }
357 CHECK_LE(0, value);
358 DCHECK_LE(value, GetInObjectProperties());
359 int used_inobject_properties = GetInObjectProperties() - value;
360 set_used_or_unused_instance_size_in_words(
361 GetInObjectPropertyOffset(used_inobject_properties) / kTaggedSize);
362 DCHECK_EQ(value, UnusedPropertyFields());
363}
364
365void Map::SetOutOfObjectUnusedPropertyFields(int value) {
366 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
367 CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded);
368 // For out of object properties "used_instance_size_in_words" byte encodes
369 // the slack in the property array.
370 set_used_or_unused_instance_size_in_words(value);
371 DCHECK_EQ(value, UnusedPropertyFields());
372}
373
374void Map::CopyUnusedPropertyFields(Map map) {
375 set_used_or_unused_instance_size_in_words(
376 map->used_or_unused_instance_size_in_words());
377 DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
378}
379
380void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map) {
381 int value = map->used_or_unused_instance_size_in_words();
382 if (value >= JSValue::kFieldsAdded) {
383 // Unused in-object fields. Adjust the offset from the object’s start
384 // so it matches the distance to the object’s end.
385 value += instance_size_in_words() - map->instance_size_in_words();
386 }
387 set_used_or_unused_instance_size_in_words(value);
388 DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
389}
390
391void Map::AccountAddedPropertyField() {
392 // Update used instance size and unused property fields number.
393 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
394#ifdef DEBUG
395 int new_unused = UnusedPropertyFields() - 1;
396 if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
397#endif
398 int value = used_or_unused_instance_size_in_words();
399 if (value >= JSObject::kFieldsAdded) {
400 if (value == instance_size_in_words()) {
401 AccountAddedOutOfObjectPropertyField(0);
402 } else {
403 // The property is added in-object, so simply increment the counter.
404 set_used_or_unused_instance_size_in_words(value + 1);
405 }
406 } else {
407 AccountAddedOutOfObjectPropertyField(value);
408 }
409 DCHECK_EQ(new_unused, UnusedPropertyFields());
410}
411
412void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
413 unused_in_property_array--;
414 if (unused_in_property_array < 0) {
415 unused_in_property_array += JSObject::kFieldsAdded;
416 }
417 CHECK_LT(static_cast<unsigned>(unused_in_property_array),
418 JSObject::kFieldsAdded);
419 set_used_or_unused_instance_size_in_words(unused_in_property_array);
420 DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
421}
422
423byte Map::bit_field() const { return READ_BYTE_FIELD(*this, kBitFieldOffset); }
424
425void Map::set_bit_field(byte value) {
426 WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
427}
428
429byte Map::relaxed_bit_field() const {
430 return RELAXED_READ_BYTE_FIELD(*this, kBitFieldOffset);
431}
432
433void Map::set_relaxed_bit_field(byte value) {
434 RELAXED_WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
435}
436
437byte Map::bit_field2() const {
438 return READ_BYTE_FIELD(*this, kBitField2Offset);
439}
440
441void Map::set_bit_field2(byte value) {
442 WRITE_BYTE_FIELD(*this, kBitField2Offset, value);
443}
444
445bool Map::is_abandoned_prototype_map() const {
446 return is_prototype_map() && !owns_descriptors();
447}
448
449bool Map::should_be_fast_prototype_map() const {
450 if (!prototype_info()->IsPrototypeInfo()) return false;
451 return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
452}
453
454void Map::set_elements_kind(ElementsKind elements_kind) {
455 CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
456 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
457}
458
459ElementsKind Map::elements_kind() const {
460 return Map::ElementsKindBits::decode(bit_field2());
461}
462
463bool Map::has_fast_smi_elements() const {
464 return IsSmiElementsKind(elements_kind());
465}
466
467bool Map::has_fast_object_elements() const {
468 return IsObjectElementsKind(elements_kind());
469}
470
471bool Map::has_fast_smi_or_object_elements() const {
472 return IsSmiOrObjectElementsKind(elements_kind());
473}
474
475bool Map::has_fast_double_elements() const {
476 return IsDoubleElementsKind(elements_kind());
477}
478
479bool Map::has_fast_elements() const {
480 return IsFastElementsKind(elements_kind());
481}
482
483bool Map::has_sloppy_arguments_elements() const {
484 return IsSloppyArgumentsElementsKind(elements_kind());
485}
486
487bool Map::has_fast_sloppy_arguments_elements() const {
488 return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
489}
490
491bool Map::has_fast_string_wrapper_elements() const {
492 return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
493}
494
495bool Map::has_fixed_typed_array_elements() const {
496 return IsFixedTypedArrayElementsKind(elements_kind());
497}
498
499bool Map::has_dictionary_elements() const {
500 return IsDictionaryElementsKind(elements_kind());
501}
502
503bool Map::is_frozen_or_sealed_elements() const {
504 return IsFrozenOrSealedElementsKind(elements_kind());
505}
506
507void Map::set_is_dictionary_map(bool value) {
508 uint32_t new_bit_field3 = IsDictionaryMapBit::update(bit_field3(), value);
509 new_bit_field3 = IsUnstableBit::update(new_bit_field3, value);
510 set_bit_field3(new_bit_field3);
511}
512
513bool Map::is_dictionary_map() const {
514 return IsDictionaryMapBit::decode(bit_field3());
515}
516
517void Map::mark_unstable() {
518 set_bit_field3(IsUnstableBit::update(bit_field3(), true));
519}
520
521bool Map::is_stable() const { return !IsUnstableBit::decode(bit_field3()); }
522
523bool Map::CanBeDeprecated() const {
524 int descriptor = LastAdded();
525 for (int i = 0; i <= descriptor; i++) {
526 PropertyDetails details = instance_descriptors()->GetDetails(i);
527 if (details.representation().IsNone()) return true;
528 if (details.representation().IsSmi()) return true;
529 if (details.representation().IsDouble()) return true;
530 if (details.representation().IsHeapObject()) return true;
531 if (details.kind() == kData && details.location() == kDescriptor) {
532 return true;
533 }
534 }
535 return false;
536}
537
538void Map::NotifyLeafMapLayoutChange(Isolate* isolate) {
539 if (is_stable()) {
540 mark_unstable();
541 dependent_code()->DeoptimizeDependentCodeGroup(
542 isolate, DependentCode::kPrototypeCheckGroup);
543 }
544}
545
546bool Map::CanTransition() const {
547 // Only JSObject and subtypes have map transitions and back pointers.
548 return InstanceTypeChecker::IsJSObject(instance_type());
549}
550
551#define DEF_TESTER(Type, ...) \
552 bool Map::Is##Type##Map() const { \
553 return InstanceTypeChecker::Is##Type(instance_type()); \
554 }
555INSTANCE_TYPE_CHECKERS(DEF_TESTER)
556#undef DEF_TESTER
557
558bool Map::IsBooleanMap() const {
559 return *this == GetReadOnlyRoots().boolean_map();
560}
561
562bool Map::IsNullOrUndefinedMap() const {
563 return *this == GetReadOnlyRoots().null_map() ||
564 *this == GetReadOnlyRoots().undefined_map();
565}
566
567bool Map::IsPrimitiveMap() const {
568 return instance_type() <= LAST_PRIMITIVE_TYPE;
569}
570
571HeapObject Map::prototype() const {
572 return HeapObject::cast(READ_FIELD(*this, kPrototypeOffset));
573}
574
575void Map::set_prototype(HeapObject value, WriteBarrierMode mode) {
576 DCHECK(value->IsNull() || value->IsJSReceiver());
577 WRITE_FIELD(*this, kPrototypeOffset, value);
578 CONDITIONAL_WRITE_BARRIER(*this, kPrototypeOffset, value, mode);
579}
580
581LayoutDescriptor Map::layout_descriptor_gc_safe() const {
582 DCHECK(FLAG_unbox_double_fields);
583 // The loaded value can be dereferenced on background thread to load the
584 // bitmap. We need acquire load in order to ensure that the bitmap
585 // initializing stores are also visible to the background thread.
586 Object layout_desc = ACQUIRE_READ_FIELD(*this, kLayoutDescriptorOffset);
587 return LayoutDescriptor::cast_gc_safe(layout_desc);
588}
589
590bool Map::HasFastPointerLayout() const {
591 DCHECK(FLAG_unbox_double_fields);
592 // The loaded value is used for SMI check only and is not dereferenced,
593 // so relaxed load is safe.
594 Object layout_desc = RELAXED_READ_FIELD(*this, kLayoutDescriptorOffset);
595 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
596}
597
598void Map::UpdateDescriptors(Isolate* isolate, DescriptorArray descriptors,
599 LayoutDescriptor layout_desc,
600 int number_of_own_descriptors) {
601 SetInstanceDescriptors(isolate, descriptors, number_of_own_descriptors);
602 if (FLAG_unbox_double_fields) {
603 if (layout_descriptor()->IsSlowLayout()) {
604 set_layout_descriptor(layout_desc);
605 }
606#ifdef VERIFY_HEAP
607 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
608 if (FLAG_verify_heap) {
609 CHECK(layout_descriptor()->IsConsistentWithMap(*this));
610 CHECK_EQ(Map::GetVisitorId(*this), visitor_id());
611 }
612#else
613 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(*this));
614 DCHECK(visitor_id() == Map::GetVisitorId(*this));
615#endif
616 }
617}
618
619void Map::InitializeDescriptors(Isolate* isolate, DescriptorArray descriptors,
620 LayoutDescriptor layout_desc) {
621 SetInstanceDescriptors(isolate, descriptors,
622 descriptors->number_of_descriptors());
623
624 if (FLAG_unbox_double_fields) {
625 set_layout_descriptor(layout_desc);
626#ifdef VERIFY_HEAP
627 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
628 if (FLAG_verify_heap) {
629 CHECK(layout_descriptor()->IsConsistentWithMap(*this));
630 }
631#else
632 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(*this));
633#endif
634 set_visitor_id(Map::GetVisitorId(*this));
635 }
636}
637
638void Map::set_bit_field3(uint32_t bits) {
639 RELAXED_WRITE_UINT32_FIELD(*this, kBitField3Offset, bits);
640}
641
642uint32_t Map::bit_field3() const {
643 return RELAXED_READ_UINT32_FIELD(*this, kBitField3Offset);
644}
645
646void Map::clear_padding() {
647 if (FIELD_SIZE(kOptionalPaddingOffset) == 0) return;
648 DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
649 memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
650 FIELD_SIZE(kOptionalPaddingOffset));
651}
652
653LayoutDescriptor Map::GetLayoutDescriptor() const {
654 return FLAG_unbox_double_fields ? layout_descriptor()
655 : LayoutDescriptor::FastPointerLayout();
656}
657
658void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
659 DescriptorArray descriptors = instance_descriptors();
660 int number_of_own_descriptors = NumberOfOwnDescriptors();
661 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
662 {
663 // The following two operations need to happen before the marking write
664 // barrier.
665 descriptors->Append(desc);
666 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
667 MarkingBarrierForDescriptorArray(isolate->heap(), *this, descriptors,
668 number_of_own_descriptors + 1);
669 }
670 // Properly mark the map if the {desc} is an "interesting symbol".
671 if (desc->GetKey()->IsInterestingSymbol()) {
672 set_may_have_interesting_symbols(true);
673 }
674 PropertyDetails details = desc->GetDetails();
675 if (details.location() == kField) {
676 DCHECK_GT(UnusedPropertyFields(), 0);
677 AccountAddedPropertyField();
678 }
679
680// This function does not support appending double field descriptors and
681// it should never try to (otherwise, layout descriptor must be updated too).
682#ifdef DEBUG
683 DCHECK(details.location() != kField || !details.representation().IsDouble());
684#endif
685}
686
687HeapObject Map::GetBackPointer() const {
688 Object object = constructor_or_backpointer();
689 if (object->IsMap()) {
690 return Map::cast(object);
691 }
692 return GetReadOnlyRoots().undefined_value();
693}
694
695Map Map::ElementsTransitionMap() {
696 DisallowHeapAllocation no_gc;
697 // TODO(delphick): While it's safe to pass nullptr for Isolate* here as
698 // SearchSpecial doesn't need it, this is really ugly. Perhaps factor out a
699 // base class for methods not requiring an Isolate?
700 return TransitionsAccessor(nullptr, *this, &no_gc)
701 .SearchSpecial(GetReadOnlyRoots().elements_transition_symbol());
702}
703
704Object Map::prototype_info() const {
705 DCHECK(is_prototype_map());
706 return READ_FIELD(*this, Map::kTransitionsOrPrototypeInfoOffset);
707}
708
709void Map::set_prototype_info(Object value, WriteBarrierMode mode) {
710 CHECK(is_prototype_map());
711 WRITE_FIELD(*this, Map::kTransitionsOrPrototypeInfoOffset, value);
712 CONDITIONAL_WRITE_BARRIER(*this, Map::kTransitionsOrPrototypeInfoOffset,
713 value, mode);
714}
715
716void Map::SetBackPointer(Object value, WriteBarrierMode mode) {
717 CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
718 CHECK(value->IsMap());
719 CHECK(GetBackPointer()->IsUndefined());
720 CHECK_IMPLIES(value->IsMap(), Map::cast(value)->GetConstructor() ==
721 constructor_or_backpointer());
722 set_constructor_or_backpointer(value, mode);
723}
724
725ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
726ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset)
727ACCESSORS(Map, constructor_or_backpointer, Object,
728 kConstructorOrBackPointerOffset)
729
730bool Map::IsPrototypeValidityCellValid() const {
731 Object validity_cell = prototype_validity_cell();
732 Object value = validity_cell->IsSmi() ? Smi::cast(validity_cell)
733 : Cell::cast(validity_cell)->value();
734 return value == Smi::FromInt(Map::kPrototypeChainValid);
735}
736
737Object Map::GetConstructor() const {
738 Object maybe_constructor = constructor_or_backpointer();
739 // Follow any back pointers.
740 while (maybe_constructor->IsMap()) {
741 maybe_constructor =
742 Map::cast(maybe_constructor)->constructor_or_backpointer();
743 }
744 return maybe_constructor;
745}
746
747FunctionTemplateInfo Map::GetFunctionTemplateInfo() const {
748 Object constructor = GetConstructor();
749 if (constructor->IsJSFunction()) {
750 DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction());
751 return JSFunction::cast(constructor)->shared()->get_api_func_data();
752 }
753 DCHECK(constructor->IsFunctionTemplateInfo());
754 return FunctionTemplateInfo::cast(constructor);
755}
756
757void Map::SetConstructor(Object constructor, WriteBarrierMode mode) {
758 // Never overwrite a back pointer with a constructor.
759 CHECK(!constructor_or_backpointer()->IsMap());
760 set_constructor_or_backpointer(constructor, mode);
761}
762
763Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map) {
764 return CopyInitialMap(isolate, map, map->instance_size(),
765 map->GetInObjectProperties(),
766 map->UnusedPropertyFields());
767}
768
769bool Map::IsInobjectSlackTrackingInProgress() const {
770 return construction_counter() != Map::kNoSlackTracking;
771}
772
773void Map::InobjectSlackTrackingStep(Isolate* isolate) {
774 // Slack tracking should only be performed on an initial map.
775 DCHECK(GetBackPointer()->IsUndefined());
776 if (!IsInobjectSlackTrackingInProgress()) return;
777 int counter = construction_counter();
778 set_construction_counter(counter - 1);
779 if (counter == kSlackTrackingCounterEnd) {
780 CompleteInobjectSlackTracking(isolate);
781 }
782}
783
784int Map::SlackForArraySize(int old_size, int size_limit) {
785 const int max_slack = size_limit - old_size;
786 CHECK_LE(0, max_slack);
787 if (old_size < 4) {
788 DCHECK_LE(1, max_slack);
789 return 1;
790 }
791 return Min(max_slack, old_size / 4);
792}
793
794int Map::InstanceSizeFromSlack(int slack) const {
795 return instance_size() - slack * kTaggedSize;
796}
797
798OBJECT_CONSTRUCTORS_IMPL(NormalizedMapCache, WeakFixedArray)
799CAST_ACCESSOR(NormalizedMapCache)
800NEVER_READ_ONLY_SPACE_IMPL(NormalizedMapCache)
801
802int NormalizedMapCache::GetIndex(Handle<Map> map) {
803 return map->Hash() % NormalizedMapCache::kEntries;
804}
805
806bool HeapObject::IsNormalizedMapCache() const {
807 if (!IsWeakFixedArray()) return false;
808 if (WeakFixedArray::cast(*this)->length() != NormalizedMapCache::kEntries) {
809 return false;
810 }
811 return true;
812}
813
814} // namespace internal
815} // namespace v8
816
817#include "src/objects/object-macros-undef.h"
818
819#endif // V8_OBJECTS_MAP_INL_H_
820