1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4//
5// Review notes:
6//
7// - The use of macros in these inline functions may seem superfluous
8// but it is absolutely needed to make sure gcc generates optimal
9// code. gcc is not happy when attempting to inline too deep.
10//
11
12#ifndef V8_OBJECTS_INL_H_
13#define V8_OBJECTS_INL_H_
14
15#include "src/objects.h"
16
17#include "src/base/bits.h"
18#include "src/base/tsan.h"
19#include "src/builtins/builtins.h"
20#include "src/conversions.h"
21#include "src/double.h"
22#include "src/handles-inl.h"
23#include "src/heap/factory.h"
24#include "src/heap/heap-write-barrier-inl.h"
25#include "src/keys.h"
26#include "src/lookup-inl.h" // TODO(jkummerow): Drop.
27#include "src/objects/bigint.h"
28#include "src/objects/heap-number-inl.h"
29#include "src/objects/heap-object.h"
30#include "src/objects/js-proxy-inl.h" // TODO(jkummerow): Drop.
31#include "src/objects/literal-objects.h"
32#include "src/objects/oddball.h"
33#include "src/objects/regexp-match-info.h"
34#include "src/objects/scope-info.h"
35#include "src/objects/shared-function-info.h"
36#include "src/objects/slots-inl.h"
37#include "src/objects/smi-inl.h"
38#include "src/objects/templates.h"
39#include "src/property-details.h"
40#include "src/property.h"
41#include "src/v8memory.h"
42
43// Has to be the last include (doesn't have include guards):
44#include "src/objects/object-macros.h"
45
46namespace v8 {
47namespace internal {
48
49PropertyDetails::PropertyDetails(Smi smi) { value_ = smi->value(); }
50
51Smi PropertyDetails::AsSmi() const {
52 // Ensure the upper 2 bits have the same value by sign extending it. This is
53 // necessary to be able to use the 31st bit of the property details.
54 int value = value_ << 1;
55 return Smi::FromInt(value >> 1);
56}
57
58
59int PropertyDetails::field_width_in_words() const {
60 DCHECK_EQ(location(), kField);
61 if (!FLAG_unbox_double_fields) return 1;
62 if (kDoubleSize == kTaggedSize) return 1;
63 return representation().IsDouble() ? kDoubleSize / kTaggedSize : 1;
64}
65
66bool HeapObject::IsSloppyArgumentsElements() const {
67 return IsFixedArrayExact();
68}
69
70bool HeapObject::IsJSSloppyArgumentsObject() const {
71 return IsJSArgumentsObject();
72}
73
74bool HeapObject::IsJSGeneratorObject() const {
75 return map()->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
76 IsJSAsyncFunctionObject() || IsJSAsyncGeneratorObject();
77}
78
79bool HeapObject::IsDataHandler() const {
80 return IsLoadHandler() || IsStoreHandler();
81}
82
83bool HeapObject::IsClassBoilerplate() const { return IsFixedArrayExact(); }
84
85#define IS_TYPE_FUNCTION_DEF(type_) \
86 bool Object::Is##type_() const { \
87 return IsHeapObject() && HeapObject::cast(*this)->Is##type_(); \
88 }
89HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
90#undef IS_TYPE_FUNCTION_DEF
91
92#define IS_TYPE_FUNCTION_DEF(Type, Value) \
93 bool Object::Is##Type(Isolate* isolate) const { \
94 return Is##Type(ReadOnlyRoots(isolate->heap())); \
95 } \
96 bool Object::Is##Type(ReadOnlyRoots roots) const { \
97 return *this == roots.Value(); \
98 } \
99 bool Object::Is##Type() const { \
100 return IsHeapObject() && HeapObject::cast(*this)->Is##Type(); \
101 } \
102 bool HeapObject::Is##Type(Isolate* isolate) const { \
103 return Object::Is##Type(isolate); \
104 } \
105 bool HeapObject::Is##Type(ReadOnlyRoots roots) const { \
106 return Object::Is##Type(roots); \
107 } \
108 bool HeapObject::Is##Type() const { return Is##Type(GetReadOnlyRoots()); }
109ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
110#undef IS_TYPE_FUNCTION_DEF
111
112bool Object::IsNullOrUndefined(Isolate* isolate) const {
113 return IsNullOrUndefined(ReadOnlyRoots(isolate));
114}
115
116bool Object::IsNullOrUndefined(ReadOnlyRoots roots) const {
117 return IsNull(roots) || IsUndefined(roots);
118}
119
120bool Object::IsNullOrUndefined() const {
121 return IsHeapObject() && HeapObject::cast(*this)->IsNullOrUndefined();
122}
123
124bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
125 return Object::IsNullOrUndefined(isolate);
126}
127
128bool HeapObject::IsNullOrUndefined(ReadOnlyRoots roots) const {
129 return Object::IsNullOrUndefined(roots);
130}
131
132bool HeapObject::IsNullOrUndefined() const {
133 return IsNullOrUndefined(GetReadOnlyRoots());
134}
135
136bool HeapObject::IsUniqueName() const {
137 return IsInternalizedString() || IsSymbol();
138}
139
140bool HeapObject::IsFunction() const {
141 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
142 return map()->instance_type() >= FIRST_FUNCTION_TYPE;
143}
144
145bool HeapObject::IsCallable() const { return map()->is_callable(); }
146
147bool HeapObject::IsConstructor() const { return map()->is_constructor(); }
148
149bool HeapObject::IsModuleInfo() const {
150 return map() == GetReadOnlyRoots().module_info_map();
151}
152
153bool HeapObject::IsTemplateInfo() const {
154 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
155}
156
157bool HeapObject::IsConsString() const {
158 if (!IsString()) return false;
159 return StringShape(String::cast(*this)).IsCons();
160}
161
162bool HeapObject::IsThinString() const {
163 if (!IsString()) return false;
164 return StringShape(String::cast(*this)).IsThin();
165}
166
167bool HeapObject::IsSlicedString() const {
168 if (!IsString()) return false;
169 return StringShape(String::cast(*this)).IsSliced();
170}
171
172bool HeapObject::IsSeqString() const {
173 if (!IsString()) return false;
174 return StringShape(String::cast(*this)).IsSequential();
175}
176
177bool HeapObject::IsSeqOneByteString() const {
178 if (!IsString()) return false;
179 return StringShape(String::cast(*this)).IsSequential() &&
180 String::cast(*this)->IsOneByteRepresentation();
181}
182
183bool HeapObject::IsSeqTwoByteString() const {
184 if (!IsString()) return false;
185 return StringShape(String::cast(*this)).IsSequential() &&
186 String::cast(*this)->IsTwoByteRepresentation();
187}
188
189bool HeapObject::IsExternalString() const {
190 if (!IsString()) return false;
191 return StringShape(String::cast(*this)).IsExternal();
192}
193
194bool HeapObject::IsExternalOneByteString() const {
195 if (!IsString()) return false;
196 return StringShape(String::cast(*this)).IsExternal() &&
197 String::cast(*this)->IsOneByteRepresentation();
198}
199
200bool HeapObject::IsExternalTwoByteString() const {
201 if (!IsString()) return false;
202 return StringShape(String::cast(*this)).IsExternal() &&
203 String::cast(*this)->IsTwoByteRepresentation();
204}
205
206bool Object::IsNumber() const { return IsSmi() || IsHeapNumber(); }
207
208bool Object::IsNumeric() const { return IsNumber() || IsBigInt(); }
209
210bool HeapObject::IsFiller() const {
211 InstanceType instance_type = map()->instance_type();
212 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
213}
214
215bool HeapObject::IsJSWeakCollection() const {
216 return IsJSWeakMap() || IsJSWeakSet();
217}
218
219bool HeapObject::IsJSCollection() const { return IsJSMap() || IsJSSet(); }
220
221bool HeapObject::IsPromiseReactionJobTask() const {
222 return IsPromiseFulfillReactionJobTask() || IsPromiseRejectReactionJobTask();
223}
224
225bool HeapObject::IsEnumCache() const { return IsTuple2(); }
226
227bool HeapObject::IsFrameArray() const { return IsFixedArrayExact(); }
228
229bool HeapObject::IsArrayList() const {
230 return map() == GetReadOnlyRoots().array_list_map() ||
231 *this == GetReadOnlyRoots().empty_fixed_array();
232}
233
234bool HeapObject::IsRegExpMatchInfo() const { return IsFixedArrayExact(); }
235
236bool Object::IsLayoutDescriptor() const { return IsSmi() || IsByteArray(); }
237
238bool HeapObject::IsDeoptimizationData() const {
239 // Must be a fixed array.
240 if (!IsFixedArrayExact()) return false;
241
242 // There's no sure way to detect the difference between a fixed array and
243 // a deoptimization data array. Since this is used for asserts we can
244 // check that the length is zero or else the fixed size plus a multiple of
245 // the entry size.
246 int length = FixedArray::cast(*this)->length();
247 if (length == 0) return true;
248
249 length -= DeoptimizationData::kFirstDeoptEntryIndex;
250 return length >= 0 && length % DeoptimizationData::kDeoptEntrySize == 0;
251}
252
253bool HeapObject::IsHandlerTable() const {
254 if (!IsFixedArrayExact()) return false;
255 // There's actually no way to see the difference between a fixed array and
256 // a handler table array.
257 return true;
258}
259
260bool HeapObject::IsTemplateList() const {
261 if (!IsFixedArrayExact()) return false;
262 // There's actually no way to see the difference between a fixed array and
263 // a template list.
264 if (FixedArray::cast(*this)->length() < 1) return false;
265 return true;
266}
267
268bool HeapObject::IsDependentCode() const {
269 if (!IsWeakFixedArray()) return false;
270 // There's actually no way to see the difference between a weak fixed array
271 // and a dependent codes array.
272 return true;
273}
274
275bool HeapObject::IsAbstractCode() const {
276 return IsBytecodeArray() || IsCode();
277}
278
279bool HeapObject::IsStringWrapper() const {
280 return IsJSValue() && JSValue::cast(*this)->value()->IsString();
281}
282
283bool HeapObject::IsBooleanWrapper() const {
284 return IsJSValue() && JSValue::cast(*this)->value()->IsBoolean();
285}
286
287bool HeapObject::IsScriptWrapper() const {
288 return IsJSValue() && JSValue::cast(*this)->value()->IsScript();
289}
290
291bool HeapObject::IsNumberWrapper() const {
292 return IsJSValue() && JSValue::cast(*this)->value()->IsNumber();
293}
294
295bool HeapObject::IsBigIntWrapper() const {
296 return IsJSValue() && JSValue::cast(*this)->value()->IsBigInt();
297}
298
299bool HeapObject::IsSymbolWrapper() const {
300 return IsJSValue() && JSValue::cast(*this)->value()->IsSymbol();
301}
302
303bool HeapObject::IsJSArrayBufferView() const {
304 return IsJSDataView() || IsJSTypedArray();
305}
306
307bool HeapObject::IsStringSet() const { return IsHashTable(); }
308
309bool HeapObject::IsObjectHashSet() const { return IsHashTable(); }
310
311bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
312
313bool HeapObject::IsMapCache() const { return IsHashTable(); }
314
315bool HeapObject::IsObjectHashTable() const { return IsHashTable(); }
316
317bool Object::IsHashTableBase() const { return IsHashTable(); }
318
319bool Object::IsSmallOrderedHashTable() const {
320 return IsSmallOrderedHashSet() || IsSmallOrderedHashMap() ||
321 IsSmallOrderedNameDictionary();
322}
323
324bool Object::IsPrimitive() const {
325 return IsSmi() || HeapObject::cast(*this)->map()->IsPrimitiveMap();
326}
327
328// static
329Maybe<bool> Object::IsArray(Handle<Object> object) {
330 if (object->IsSmi()) return Just(false);
331 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
332 if (heap_object->IsJSArray()) return Just(true);
333 if (!heap_object->IsJSProxy()) return Just(false);
334 return JSProxy::IsArray(Handle<JSProxy>::cast(object));
335}
336
337bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
338
339bool HeapObject::IsAccessCheckNeeded() const {
340 if (IsJSGlobalProxy()) {
341 const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
342 JSGlobalObject global = proxy->GetIsolate()->context()->global_object();
343 return proxy->IsDetachedFrom(global);
344 }
345 return map()->is_access_check_needed();
346}
347
348bool HeapObject::IsStruct() const {
349 switch (map()->instance_type()) {
350#define MAKE_STRUCT_CASE(TYPE, Name, name) \
351 case TYPE: \
352 return true;
353 STRUCT_LIST(MAKE_STRUCT_CASE)
354#undef MAKE_STRUCT_CASE
355 // It is hard to include ALLOCATION_SITE_TYPE in STRUCT_LIST because
356 // that macro is used for many things and AllocationSite needs a few
357 // special cases.
358 case ALLOCATION_SITE_TYPE:
359 return true;
360 case LOAD_HANDLER_TYPE:
361 case STORE_HANDLER_TYPE:
362 return true;
363 case FEEDBACK_CELL_TYPE:
364 return true;
365 case CALL_HANDLER_INFO_TYPE:
366 return true;
367 default:
368 return false;
369 }
370}
371
372#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
373 bool Object::Is##Name() const { \
374 return IsHeapObject() && HeapObject::cast(*this)->Is##Name(); \
375 } \
376 TYPE_CHECKER(Name)
377STRUCT_LIST(MAKE_STRUCT_PREDICATE)
378#undef MAKE_STRUCT_PREDICATE
379
380double Object::Number() const {
381 DCHECK(IsNumber());
382 return IsSmi() ? static_cast<double>(Smi(this->ptr())->value())
383 : HeapNumber::unchecked_cast(*this)->value();
384}
385
386// static
387bool Object::SameNumberValue(double value1, double value2) {
388 // SameNumberValue(NaN, NaN) is true.
389 if (value1 != value2) {
390 return std::isnan(value1) && std::isnan(value2);
391 }
392 // SameNumberValue(0.0, -0.0) is false.
393 return (std::signbit(value1) == std::signbit(value2));
394}
395
396bool Object::IsNaN() const {
397 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(*this)->value());
398}
399
400bool Object::IsMinusZero() const {
401 return this->IsHeapNumber() &&
402 i::IsMinusZero(HeapNumber::cast(*this)->value());
403}
404
405OBJECT_CONSTRUCTORS_IMPL(RegExpMatchInfo, FixedArray)
406OBJECT_CONSTRUCTORS_IMPL(ScopeInfo, FixedArray)
407OBJECT_CONSTRUCTORS_IMPL(BigIntBase, HeapObject)
408OBJECT_CONSTRUCTORS_IMPL(BigInt, BigIntBase)
409OBJECT_CONSTRUCTORS_IMPL(FreshlyAllocatedBigInt, BigIntBase)
410
411// ------------------------------------
412// Cast operations
413
414CAST_ACCESSOR(BigInt)
415CAST_ACCESSOR(RegExpMatchInfo)
416CAST_ACCESSOR(ScopeInfo)
417
418bool Object::HasValidElements() {
419 // Dictionary is covered under FixedArray.
420 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
421}
422
423bool Object::FilterKey(PropertyFilter filter) {
424 DCHECK(!IsPropertyCell());
425 if (filter == PRIVATE_NAMES_ONLY) {
426 if (!IsSymbol()) return true;
427 return !Symbol::cast(*this)->is_private_name();
428 } else if (IsSymbol()) {
429 if (filter & SKIP_SYMBOLS) return true;
430
431 if (Symbol::cast(*this)->is_private()) return true;
432 } else {
433 if (filter & SKIP_STRINGS) return true;
434 }
435 return false;
436}
437
438Representation Object::OptimalRepresentation() {
439 if (!FLAG_track_fields) return Representation::Tagged();
440 if (IsSmi()) {
441 return Representation::Smi();
442 } else if (FLAG_track_double_fields && IsHeapNumber()) {
443 return Representation::Double();
444 } else if (FLAG_track_computed_fields && IsUninitialized()) {
445 return Representation::None();
446 } else if (FLAG_track_heap_object_fields) {
447 DCHECK(IsHeapObject());
448 return Representation::HeapObject();
449 } else {
450 return Representation::Tagged();
451 }
452}
453
454
455ElementsKind Object::OptimalElementsKind() {
456 if (IsSmi()) return PACKED_SMI_ELEMENTS;
457 if (IsNumber()) return PACKED_DOUBLE_ELEMENTS;
458 return PACKED_ELEMENTS;
459}
460
461
462bool Object::FitsRepresentation(Representation representation) {
463 if (FLAG_track_fields && representation.IsSmi()) {
464 return IsSmi();
465 } else if (FLAG_track_double_fields && representation.IsDouble()) {
466 return IsMutableHeapNumber() || IsNumber();
467 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
468 return IsHeapObject();
469 } else if (FLAG_track_fields && representation.IsNone()) {
470 return false;
471 }
472 return true;
473}
474
475bool Object::ToUint32(uint32_t* value) const {
476 if (IsSmi()) {
477 int num = Smi::ToInt(*this);
478 if (num < 0) return false;
479 *value = static_cast<uint32_t>(num);
480 return true;
481 }
482 if (IsHeapNumber()) {
483 double num = HeapNumber::cast(*this)->value();
484 return DoubleToUint32IfEqualToSelf(num, value);
485 }
486 return false;
487}
488
489// static
490MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
491 Handle<Object> object,
492 const char* method_name) {
493 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
494 return ToObjectImpl(isolate, object, method_name);
495}
496
497
498// static
499MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
500 if (input->IsName()) return Handle<Name>::cast(input);
501 return ConvertToName(isolate, input);
502}
503
504// static
505MaybeHandle<Object> Object::ToPropertyKey(Isolate* isolate,
506 Handle<Object> value) {
507 if (value->IsSmi() || HeapObject::cast(*value)->IsName()) return value;
508 return ConvertToPropertyKey(isolate, value);
509}
510
511// static
512MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
513 ToPrimitiveHint hint) {
514 if (input->IsPrimitive()) return input;
515 return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
516}
517
518// static
519MaybeHandle<Object> Object::ToNumber(Isolate* isolate, Handle<Object> input) {
520 if (input->IsNumber()) return input; // Shortcut.
521 return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumber);
522}
523
524// static
525MaybeHandle<Object> Object::ToNumeric(Isolate* isolate, Handle<Object> input) {
526 if (input->IsNumber() || input->IsBigInt()) return input; // Shortcut.
527 return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumeric);
528}
529
530// static
531MaybeHandle<Object> Object::ToInteger(Isolate* isolate, Handle<Object> input) {
532 if (input->IsSmi()) return input;
533 return ConvertToInteger(isolate, input);
534}
535
536// static
537MaybeHandle<Object> Object::ToInt32(Isolate* isolate, Handle<Object> input) {
538 if (input->IsSmi()) return input;
539 return ConvertToInt32(isolate, input);
540}
541
542// static
543MaybeHandle<Object> Object::ToUint32(Isolate* isolate, Handle<Object> input) {
544 if (input->IsSmi()) return handle(Smi::cast(*input)->ToUint32Smi(), isolate);
545 return ConvertToUint32(isolate, input);
546}
547
548// static
549MaybeHandle<String> Object::ToString(Isolate* isolate, Handle<Object> input) {
550 if (input->IsString()) return Handle<String>::cast(input);
551 return ConvertToString(isolate, input);
552}
553
554// static
555MaybeHandle<Object> Object::ToLength(Isolate* isolate, Handle<Object> input) {
556 if (input->IsSmi()) {
557 int value = std::max(Smi::ToInt(*input), 0);
558 return handle(Smi::FromInt(value), isolate);
559 }
560 return ConvertToLength(isolate, input);
561}
562
563// static
564MaybeHandle<Object> Object::ToIndex(Isolate* isolate, Handle<Object> input,
565 MessageTemplate error_index) {
566 if (input->IsSmi() && Smi::ToInt(*input) >= 0) return input;
567 return ConvertToIndex(isolate, input, error_index);
568}
569
570MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
571 Handle<Name> name) {
572 LookupIterator it(isolate, object, name);
573 if (!it.IsFound()) return it.factory()->undefined_value();
574 return GetProperty(&it);
575}
576
577MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
578 uint32_t index) {
579 LookupIterator it(isolate, object, index);
580 if (!it.IsFound()) return it.factory()->undefined_value();
581 return GetProperty(&it);
582}
583
584MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
585 uint32_t index, Handle<Object> value,
586 ShouldThrow should_throw) {
587 LookupIterator it(isolate, object, index);
588 MAYBE_RETURN_NULL(
589 SetProperty(&it, value, StoreOrigin::kMaybeKeyed, Just(should_throw)));
590 return value;
591}
592
593ObjectSlot HeapObject::RawField(int byte_offset) const {
594 return ObjectSlot(FIELD_ADDR(*this, byte_offset));
595}
596
597MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
598 return MaybeObjectSlot(FIELD_ADDR(*this, byte_offset));
599}
600
601MapWord MapWord::FromMap(const Map map) { return MapWord(map.ptr()); }
602
603Map MapWord::ToMap() const { return Map::unchecked_cast(Object(value_)); }
604
605bool MapWord::IsForwardingAddress() const { return HAS_SMI_TAG(value_); }
606
607MapWord MapWord::FromForwardingAddress(HeapObject object) {
608 return MapWord(object->ptr() - kHeapObjectTag);
609}
610
611HeapObject MapWord::ToForwardingAddress() {
612 DCHECK(IsForwardingAddress());
613 return HeapObject::FromAddress(value_);
614}
615
616#ifdef VERIFY_HEAP
617void HeapObject::VerifyObjectField(Isolate* isolate, int offset) {
618 VerifyPointer(isolate, READ_FIELD(*this, offset));
619 STATIC_ASSERT(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
620}
621
622void HeapObject::VerifyMaybeObjectField(Isolate* isolate, int offset) {
623 MaybeObject::VerifyMaybeObjectPointer(isolate,
624 READ_WEAK_FIELD(*this, offset));
625 STATIC_ASSERT(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
626}
627
628void HeapObject::VerifySmiField(int offset) {
629 CHECK(READ_FIELD(*this, offset)->IsSmi());
630 STATIC_ASSERT(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
631}
632
633#endif
634
635ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
636 // TODO(v8:7464): When RO_SPACE is embedded, this will access a global
637 // variable instead.
638 return ReadOnlyRoots(GetHeapFromWritableObject(*this));
639}
640
641Map HeapObject::map() const { return map_word().ToMap(); }
642
643void HeapObject::set_map(Map value) {
644 if (!value.is_null()) {
645#ifdef VERIFY_HEAP
646 GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
647#endif
648 }
649 set_map_word(MapWord::FromMap(value));
650 if (!value.is_null()) {
651 // TODO(1600) We are passing kNullAddress as a slot because maps can never
652 // be on an evacuation candidate.
653 MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
654 }
655}
656
657Map HeapObject::synchronized_map() const {
658 return synchronized_map_word().ToMap();
659}
660
661void HeapObject::synchronized_set_map(Map value) {
662 if (!value.is_null()) {
663#ifdef VERIFY_HEAP
664 GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
665#endif
666 }
667 synchronized_set_map_word(MapWord::FromMap(value));
668 if (!value.is_null()) {
669 // TODO(1600) We are passing kNullAddress as a slot because maps can never
670 // be on an evacuation candidate.
671 MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
672 }
673}
674
675
676// Unsafe accessor omitting write barrier.
677void HeapObject::set_map_no_write_barrier(Map value) {
678 if (!value.is_null()) {
679#ifdef VERIFY_HEAP
680 GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
681#endif
682 }
683 set_map_word(MapWord::FromMap(value));
684}
685
686void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
687 set_map_word(MapWord::FromMap(value));
688 if (mode != SKIP_WRITE_BARRIER) {
689 DCHECK(!value.is_null());
690 // TODO(1600) We are passing kNullAddress as a slot because maps can never
691 // be on an evacuation candidate.
692 MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
693 }
694}
695
696MapWordSlot HeapObject::map_slot() const {
697 return MapWordSlot(FIELD_ADDR(*this, kMapOffset));
698}
699
700MapWord HeapObject::map_word() const {
701 return MapWord(map_slot().Relaxed_Load().ptr());
702}
703
704void HeapObject::set_map_word(MapWord map_word) {
705 map_slot().Relaxed_Store(Object(map_word.value_));
706}
707
708
709MapWord HeapObject::synchronized_map_word() const {
710 return MapWord(map_slot().Acquire_Load().ptr());
711}
712
713void HeapObject::synchronized_set_map_word(MapWord map_word) {
714 map_slot().Release_Store(Object(map_word.value_));
715}
716
717int HeapObject::Size() const { return SizeFromMap(map()); }
718
719inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
720 return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
721}
722
723// This should be in objects/map-inl.h, but can't, because of a cyclic
724// dependency.
725bool Map::IsSpecialReceiverMap() const {
726 bool result = IsSpecialReceiverInstanceType(instance_type());
727 DCHECK_IMPLIES(!result,
728 !has_named_interceptor() && !is_access_check_needed());
729 return result;
730}
731
732inline bool IsCustomElementsReceiverInstanceType(InstanceType instance_type) {
733 return instance_type <= LAST_CUSTOM_ELEMENTS_RECEIVER;
734}
735
736// This should be in objects/map-inl.h, but can't, because of a cyclic
737// dependency.
738bool Map::IsCustomElementsReceiverMap() const {
739 return IsCustomElementsReceiverInstanceType(instance_type());
740}
741
742bool Object::ToArrayLength(uint32_t* index) const {
743 return Object::ToUint32(index);
744}
745
746bool Object::ToArrayIndex(uint32_t* index) const {
747 return Object::ToUint32(index) && *index != kMaxUInt32;
748}
749
750bool Object::GetHeapObjectIfStrong(HeapObject* result) const {
751 return GetHeapObject(result);
752}
753
754bool Object::GetHeapObject(HeapObject* result) const {
755 if (!IsHeapObject()) return false;
756 *result = HeapObject::cast(*this);
757 return true;
758}
759
760HeapObject Object::GetHeapObject() const {
761 DCHECK(IsHeapObject());
762 return HeapObject::cast(*this);
763}
764
765int RegExpMatchInfo::NumberOfCaptureRegisters() {
766 DCHECK_GE(length(), kLastMatchOverhead);
767 Object obj = get(kNumberOfCapturesIndex);
768 return Smi::ToInt(obj);
769}
770
771void RegExpMatchInfo::SetNumberOfCaptureRegisters(int value) {
772 DCHECK_GE(length(), kLastMatchOverhead);
773 set(kNumberOfCapturesIndex, Smi::FromInt(value));
774}
775
776String RegExpMatchInfo::LastSubject() {
777 DCHECK_GE(length(), kLastMatchOverhead);
778 return String::cast(get(kLastSubjectIndex));
779}
780
781void RegExpMatchInfo::SetLastSubject(String value) {
782 DCHECK_GE(length(), kLastMatchOverhead);
783 set(kLastSubjectIndex, value);
784}
785
786Object RegExpMatchInfo::LastInput() {
787 DCHECK_GE(length(), kLastMatchOverhead);
788 return get(kLastInputIndex);
789}
790
791void RegExpMatchInfo::SetLastInput(Object value) {
792 DCHECK_GE(length(), kLastMatchOverhead);
793 set(kLastInputIndex, value);
794}
795
796int RegExpMatchInfo::Capture(int i) {
797 DCHECK_LT(i, NumberOfCaptureRegisters());
798 Object obj = get(kFirstCaptureIndex + i);
799 return Smi::ToInt(obj);
800}
801
802void RegExpMatchInfo::SetCapture(int i, int value) {
803 DCHECK_LT(i, NumberOfCaptureRegisters());
804 set(kFirstCaptureIndex + i, Smi::FromInt(value));
805}
806
807WriteBarrierMode HeapObject::GetWriteBarrierMode(
808 const DisallowHeapAllocation& promise) {
809 return GetWriteBarrierModeForObject(*this, &promise);
810}
811
812// static
813AllocationAlignment HeapObject::RequiredAlignment(Map map) {
814#ifdef V8_COMPRESS_POINTERS
815 // TODO(ishell, v8:8875): Consider using aligned allocations once the
816 // allocation alignment inconsistency is fixed. For now we keep using
817 // unaligned access since both x64 and arm64 architectures (where pointer
818 // compression is supported) allow unaligned access to doubles and full words.
819#endif // V8_COMPRESS_POINTERS
820#ifdef V8_HOST_ARCH_32_BIT
821 int instance_type = map->instance_type();
822 if (instance_type == FIXED_FLOAT64_ARRAY_TYPE ||
823 instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
824 return kDoubleAligned;
825 }
826 if (instance_type == HEAP_NUMBER_TYPE) return kDoubleUnaligned;
827#endif // V8_HOST_ARCH_32_BIT
828 return kWordAligned;
829}
830
831Address HeapObject::GetFieldAddress(int field_offset) const {
832 return FIELD_ADDR(*this, field_offset);
833}
834
835// static
836Maybe<bool> Object::GreaterThan(Isolate* isolate, Handle<Object> x,
837 Handle<Object> y) {
838 Maybe<ComparisonResult> result = Compare(isolate, x, y);
839 if (result.IsJust()) {
840 switch (result.FromJust()) {
841 case ComparisonResult::kGreaterThan:
842 return Just(true);
843 case ComparisonResult::kLessThan:
844 case ComparisonResult::kEqual:
845 case ComparisonResult::kUndefined:
846 return Just(false);
847 }
848 }
849 return Nothing<bool>();
850}
851
852
853// static
854Maybe<bool> Object::GreaterThanOrEqual(Isolate* isolate, Handle<Object> x,
855 Handle<Object> y) {
856 Maybe<ComparisonResult> result = Compare(isolate, x, y);
857 if (result.IsJust()) {
858 switch (result.FromJust()) {
859 case ComparisonResult::kEqual:
860 case ComparisonResult::kGreaterThan:
861 return Just(true);
862 case ComparisonResult::kLessThan:
863 case ComparisonResult::kUndefined:
864 return Just(false);
865 }
866 }
867 return Nothing<bool>();
868}
869
870
871// static
872Maybe<bool> Object::LessThan(Isolate* isolate, Handle<Object> x,
873 Handle<Object> y) {
874 Maybe<ComparisonResult> result = Compare(isolate, x, y);
875 if (result.IsJust()) {
876 switch (result.FromJust()) {
877 case ComparisonResult::kLessThan:
878 return Just(true);
879 case ComparisonResult::kEqual:
880 case ComparisonResult::kGreaterThan:
881 case ComparisonResult::kUndefined:
882 return Just(false);
883 }
884 }
885 return Nothing<bool>();
886}
887
888
889// static
890Maybe<bool> Object::LessThanOrEqual(Isolate* isolate, Handle<Object> x,
891 Handle<Object> y) {
892 Maybe<ComparisonResult> result = Compare(isolate, x, y);
893 if (result.IsJust()) {
894 switch (result.FromJust()) {
895 case ComparisonResult::kEqual:
896 case ComparisonResult::kLessThan:
897 return Just(true);
898 case ComparisonResult::kGreaterThan:
899 case ComparisonResult::kUndefined:
900 return Just(false);
901 }
902 }
903 return Nothing<bool>();
904}
905
906MaybeHandle<Object> Object::GetPropertyOrElement(Isolate* isolate,
907 Handle<Object> object,
908 Handle<Name> name) {
909 LookupIterator it = LookupIterator::PropertyOrElement(isolate, object, name);
910 return GetProperty(&it);
911}
912
913MaybeHandle<Object> Object::SetPropertyOrElement(
914 Isolate* isolate, Handle<Object> object, Handle<Name> name,
915 Handle<Object> value, Maybe<ShouldThrow> should_throw,
916 StoreOrigin store_origin) {
917 LookupIterator it = LookupIterator::PropertyOrElement(isolate, object, name);
918 MAYBE_RETURN_NULL(SetProperty(&it, value, store_origin, should_throw));
919 return value;
920}
921
922MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
923 Handle<Name> name,
924 Handle<JSReceiver> holder) {
925 LookupIterator it = LookupIterator::PropertyOrElement(holder->GetIsolate(),
926 receiver, name, holder);
927 return GetProperty(&it);
928}
929
930// static
931Object Object::GetSimpleHash(Object object) {
932 DisallowHeapAllocation no_gc;
933 if (object->IsSmi()) {
934 uint32_t hash = ComputeUnseededHash(Smi::ToInt(object));
935 return Smi::FromInt(hash & Smi::kMaxValue);
936 }
937 if (object->IsHeapNumber()) {
938 double num = HeapNumber::cast(object)->value();
939 if (std::isnan(num)) return Smi::FromInt(Smi::kMaxValue);
940 // Use ComputeUnseededHash for all values in Signed32 range, including -0,
941 // which is considered equal to 0 because collections use SameValueZero.
942 uint32_t hash;
943 // Check range before conversion to avoid undefined behavior.
944 if (num >= kMinInt && num <= kMaxInt && FastI2D(FastD2I(num)) == num) {
945 hash = ComputeUnseededHash(FastD2I(num));
946 } else {
947 hash = ComputeLongHash(double_to_uint64(num));
948 }
949 return Smi::FromInt(hash & Smi::kMaxValue);
950 }
951 if (object->IsName()) {
952 uint32_t hash = Name::cast(object)->Hash();
953 return Smi::FromInt(hash);
954 }
955 if (object->IsOddball()) {
956 uint32_t hash = Oddball::cast(object)->to_string()->Hash();
957 return Smi::FromInt(hash);
958 }
959 if (object->IsBigInt()) {
960 uint32_t hash = BigInt::cast(object)->Hash();
961 return Smi::FromInt(hash & Smi::kMaxValue);
962 }
963 if (object->IsSharedFunctionInfo()) {
964 uint32_t hash = SharedFunctionInfo::cast(object)->Hash();
965 return Smi::FromInt(hash & Smi::kMaxValue);
966 }
967 DCHECK(object->IsJSReceiver());
968 return object;
969}
970
971Object Object::GetHash() {
972 DisallowHeapAllocation no_gc;
973 Object hash = GetSimpleHash(*this);
974 if (hash->IsSmi()) return hash;
975
976 DCHECK(IsJSReceiver());
977 JSReceiver receiver = JSReceiver::cast(*this);
978 return receiver->GetIdentityHash();
979}
980
981Handle<Object> ObjectHashTableShape::AsHandle(Handle<Object> key) {
982 return key;
983}
984
985Relocatable::Relocatable(Isolate* isolate) {
986 isolate_ = isolate;
987 prev_ = isolate->relocatable_top();
988 isolate->set_relocatable_top(this);
989}
990
991
992Relocatable::~Relocatable() {
993 DCHECK_EQ(isolate_->relocatable_top(), this);
994 isolate_->set_relocatable_top(prev_);
995}
996
997// Predictably converts HeapObject or Address to uint32 by calculating
998// offset of the address in respective MemoryChunk.
999static inline uint32_t ObjectAddressForHashing(Address object) {
1000 uint32_t value = static_cast<uint32_t>(object);
1001 return value & kPageAlignmentMask;
1002}
1003
1004static inline Handle<Object> MakeEntryPair(Isolate* isolate, uint32_t index,
1005 Handle<Object> value) {
1006 Handle<Object> key = isolate->factory()->Uint32ToString(index);
1007 Handle<FixedArray> entry_storage =
1008 isolate->factory()->NewUninitializedFixedArray(2);
1009 {
1010 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
1011 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
1012 }
1013 return isolate->factory()->NewJSArrayWithElements(entry_storage,
1014 PACKED_ELEMENTS, 2);
1015}
1016
1017static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Object> key,
1018 Handle<Object> value) {
1019 Handle<FixedArray> entry_storage =
1020 isolate->factory()->NewUninitializedFixedArray(2);
1021 {
1022 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
1023 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
1024 }
1025 return isolate->factory()->NewJSArrayWithElements(entry_storage,
1026 PACKED_ELEMENTS, 2);
1027}
1028
1029bool ScopeInfo::IsAsmModule() const {
1030 return IsAsmModuleField::decode(Flags());
1031}
1032
1033bool ScopeInfo::HasSimpleParameters() const {
1034 return HasSimpleParametersField::decode(Flags());
1035}
1036
1037#define FIELD_ACCESSORS(name) \
1038 void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
1039 int ScopeInfo::name() const { \
1040 if (length() > 0) { \
1041 return Smi::ToInt(get(k##name)); \
1042 } else { \
1043 return 0; \
1044 } \
1045 }
1046FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(FIELD_ACCESSORS)
1047#undef FIELD_ACCESSORS
1048
1049FreshlyAllocatedBigInt FreshlyAllocatedBigInt::cast(Object object) {
1050 SLOW_DCHECK(object->IsBigInt());
1051 return FreshlyAllocatedBigInt(object->ptr());
1052}
1053
1054} // namespace internal
1055} // namespace v8
1056
1057#include "src/objects/object-macros-undef.h"
1058
1059#endif // V8_OBJECTS_INL_H_
1060