1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_FIXED_ARRAY_INL_H_
6#define V8_OBJECTS_FIXED_ARRAY_INL_H_
7
8#include "src/objects/fixed-array.h"
9
10#include "src/base/tsan.h"
11#include "src/conversions.h"
12#include "src/handles-inl.h"
13#include "src/heap/heap-write-barrier-inl.h"
14#include "src/objects-inl.h"
15#include "src/objects/bigint.h"
16#include "src/objects/compressed-slots.h"
17#include "src/objects/heap-number-inl.h"
18#include "src/objects/map.h"
19#include "src/objects/maybe-object-inl.h"
20#include "src/objects/oddball.h"
21#include "src/objects/slots.h"
22#include "src/roots-inl.h"
23
24// Has to be the last include (doesn't have include guards):
25#include "src/objects/object-macros.h"
26
27namespace v8 {
28namespace internal {
29
30OBJECT_CONSTRUCTORS_IMPL(FixedArrayBase, HeapObject)
31OBJECT_CONSTRUCTORS_IMPL(FixedArray, FixedArrayBase)
32OBJECT_CONSTRUCTORS_IMPL(FixedDoubleArray, FixedArrayBase)
33OBJECT_CONSTRUCTORS_IMPL(FixedTypedArrayBase, FixedArrayBase)
34OBJECT_CONSTRUCTORS_IMPL(ArrayList, FixedArray)
35OBJECT_CONSTRUCTORS_IMPL(ByteArray, FixedArrayBase)
36OBJECT_CONSTRUCTORS_IMPL(TemplateList, FixedArray)
37OBJECT_CONSTRUCTORS_IMPL(WeakFixedArray, HeapObject)
38OBJECT_CONSTRUCTORS_IMPL(WeakArrayList, HeapObject)
39
40FixedArrayBase::FixedArrayBase(Address ptr, AllowInlineSmiStorage allow_smi)
41 : HeapObject(ptr, allow_smi) {
42 SLOW_DCHECK(
43 (allow_smi == AllowInlineSmiStorage::kAllowBeingASmi && IsSmi()) ||
44 IsFixedArrayBase());
45}
46
47ByteArray::ByteArray(Address ptr, AllowInlineSmiStorage allow_smi)
48 : FixedArrayBase(ptr, allow_smi) {
49 SLOW_DCHECK(
50 (allow_smi == AllowInlineSmiStorage::kAllowBeingASmi && IsSmi()) ||
51 IsByteArray());
52}
53
54NEVER_READ_ONLY_SPACE_IMPL(WeakArrayList)
55
56CAST_ACCESSOR(ArrayList)
57CAST_ACCESSOR(ByteArray)
58CAST_ACCESSOR(FixedArray)
59CAST_ACCESSOR(FixedArrayBase)
60CAST_ACCESSOR(FixedDoubleArray)
61CAST_ACCESSOR(FixedTypedArrayBase)
62CAST_ACCESSOR(TemplateList)
63CAST_ACCESSOR(WeakFixedArray)
64CAST_ACCESSOR(WeakArrayList)
65
66SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
67SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
68SMI_ACCESSORS(WeakFixedArray, length, kLengthOffset)
69SYNCHRONIZED_SMI_ACCESSORS(WeakFixedArray, length, kLengthOffset)
70
71SMI_ACCESSORS(WeakArrayList, capacity, kCapacityOffset)
72SYNCHRONIZED_SMI_ACCESSORS(WeakArrayList, capacity, kCapacityOffset)
73SMI_ACCESSORS(WeakArrayList, length, kLengthOffset)
74
75Object FixedArrayBase::unchecked_synchronized_length() const {
76 return ACQUIRE_READ_FIELD(*this, kLengthOffset);
77}
78
79ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
80
81ObjectSlot FixedArray::GetFirstElementAddress() {
82 return RawField(OffsetOfElementAt(0));
83}
84
85bool FixedArray::ContainsOnlySmisOrHoles() {
86 Object the_hole = GetReadOnlyRoots().the_hole_value();
87 ObjectSlot current = GetFirstElementAddress();
88 for (int i = 0; i < length(); ++i, ++current) {
89 Object candidate = *current;
90 if (!candidate->IsSmi() && candidate != the_hole) return false;
91 }
92 return true;
93}
94
95Object FixedArray::get(int index) const {
96 DCHECK(index >= 0 && index < this->length());
97 return RELAXED_READ_FIELD(*this, kHeaderSize + index * kTaggedSize);
98}
99
100Handle<Object> FixedArray::get(FixedArray array, int index, Isolate* isolate) {
101 return handle(array->get(index), isolate);
102}
103
104template <class T>
105MaybeHandle<T> FixedArray::GetValue(Isolate* isolate, int index) const {
106 Object obj = get(index);
107 if (obj->IsUndefined(isolate)) return MaybeHandle<T>();
108 return Handle<T>(T::cast(obj), isolate);
109}
110
111template <class T>
112Handle<T> FixedArray::GetValueChecked(Isolate* isolate, int index) const {
113 Object obj = get(index);
114 CHECK(!obj->IsUndefined(isolate));
115 return Handle<T>(T::cast(obj), isolate);
116}
117
118bool FixedArray::is_the_hole(Isolate* isolate, int index) {
119 return get(index)->IsTheHole(isolate);
120}
121
122void FixedArray::set(int index, Smi value) {
123 DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
124 DCHECK_LT(index, this->length());
125 DCHECK(Object(value).IsSmi());
126 int offset = kHeaderSize + index * kTaggedSize;
127 RELAXED_WRITE_FIELD(*this, offset, value);
128}
129
130void FixedArray::set(int index, Object value) {
131 DCHECK_NE(GetReadOnlyRoots().fixed_cow_array_map(), map());
132 DCHECK(IsFixedArray());
133 DCHECK_GE(index, 0);
134 DCHECK_LT(index, this->length());
135 int offset = kHeaderSize + index * kTaggedSize;
136 RELAXED_WRITE_FIELD(*this, offset, value);
137 WRITE_BARRIER(*this, offset, value);
138}
139
140void FixedArray::set(int index, Object value, WriteBarrierMode mode) {
141 DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
142 DCHECK_GE(index, 0);
143 DCHECK_LT(index, this->length());
144 int offset = kHeaderSize + index * kTaggedSize;
145 RELAXED_WRITE_FIELD(*this, offset, value);
146 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
147}
148
149void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) {
150 DCHECK_NE(array->map(), array->GetReadOnlyRoots().fixed_cow_array_map());
151 DCHECK_GE(index, 0);
152 DCHECK_LT(index, array->length());
153 DCHECK(!ObjectInYoungGeneration(value));
154 RELAXED_WRITE_FIELD(array, kHeaderSize + index * kTaggedSize, value);
155}
156
157void FixedArray::set_undefined(int index) {
158 set_undefined(GetReadOnlyRoots(), index);
159}
160
161void FixedArray::set_undefined(Isolate* isolate, int index) {
162 set_undefined(ReadOnlyRoots(isolate), index);
163}
164
165void FixedArray::set_undefined(ReadOnlyRoots ro_roots, int index) {
166 FixedArray::NoWriteBarrierSet(*this, index, ro_roots.undefined_value());
167}
168
169void FixedArray::set_null(int index) { set_null(GetReadOnlyRoots(), index); }
170
171void FixedArray::set_null(Isolate* isolate, int index) {
172 set_null(ReadOnlyRoots(isolate), index);
173}
174
175void FixedArray::set_null(ReadOnlyRoots ro_roots, int index) {
176 FixedArray::NoWriteBarrierSet(*this, index, ro_roots.null_value());
177}
178
179void FixedArray::set_the_hole(int index) {
180 set_the_hole(GetReadOnlyRoots(), index);
181}
182
183void FixedArray::set_the_hole(Isolate* isolate, int index) {
184 set_the_hole(ReadOnlyRoots(isolate), index);
185}
186
187void FixedArray::set_the_hole(ReadOnlyRoots ro_roots, int index) {
188 FixedArray::NoWriteBarrierSet(*this, index, ro_roots.the_hole_value());
189}
190
191void FixedArray::FillWithHoles(int from, int to) {
192 for (int i = from; i < to; i++) {
193 set_the_hole(i);
194 }
195}
196
197ObjectSlot FixedArray::data_start() {
198 return RawField(OffsetOfElementAt(0));
199}
200
201ObjectSlot FixedArray::RawFieldOfElementAt(int index) {
202 return RawField(OffsetOfElementAt(index));
203}
204
205void FixedArray::MoveElements(Heap* heap, int dst_index, int src_index, int len,
206 WriteBarrierMode mode) {
207 DisallowHeapAllocation no_gc;
208 heap->MoveElements(*this, dst_index, src_index, len, mode);
209}
210
211void FixedArray::CopyElements(Heap* heap, int dst_index, FixedArray src,
212 int src_index, int len, WriteBarrierMode mode) {
213 DisallowHeapAllocation no_gc;
214 heap->CopyElements(*this, src, dst_index, src_index, len, mode);
215}
216
217// Perform a binary search in a fixed array.
218template <SearchMode search_mode, typename T>
219int BinarySearch(T* array, Name name, int valid_entries,
220 int* out_insertion_index) {
221 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == nullptr);
222 int low = 0;
223 int high = array->number_of_entries() - 1;
224 uint32_t hash = name->hash_field();
225 int limit = high;
226
227 DCHECK(low <= high);
228
229 while (low != high) {
230 int mid = low + (high - low) / 2;
231 Name mid_name = array->GetSortedKey(mid);
232 uint32_t mid_hash = mid_name->hash_field();
233
234 if (mid_hash >= hash) {
235 high = mid;
236 } else {
237 low = mid + 1;
238 }
239 }
240
241 for (; low <= limit; ++low) {
242 int sort_index = array->GetSortedKeyIndex(low);
243 Name entry = array->GetKey(sort_index);
244 uint32_t current_hash = entry->hash_field();
245 if (current_hash != hash) {
246 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
247 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
248 }
249 return T::kNotFound;
250 }
251 if (entry == name) {
252 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
253 return sort_index;
254 }
255 return T::kNotFound;
256 }
257 }
258
259 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
260 *out_insertion_index = limit + 1;
261 }
262 return T::kNotFound;
263}
264
265// Perform a linear search in this fixed array. len is the number of entry
266// indices that are valid.
267template <SearchMode search_mode, typename T>
268int LinearSearch(T* array, Name name, int valid_entries,
269 int* out_insertion_index) {
270 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
271 uint32_t hash = name->hash_field();
272 int len = array->number_of_entries();
273 for (int number = 0; number < len; number++) {
274 int sorted_index = array->GetSortedKeyIndex(number);
275 Name entry = array->GetKey(sorted_index);
276 uint32_t current_hash = entry->hash_field();
277 if (current_hash > hash) {
278 *out_insertion_index = sorted_index;
279 return T::kNotFound;
280 }
281 if (entry == name) return sorted_index;
282 }
283 *out_insertion_index = len;
284 return T::kNotFound;
285 } else {
286 DCHECK_LE(valid_entries, array->number_of_entries());
287 DCHECK_NULL(out_insertion_index); // Not supported here.
288 for (int number = 0; number < valid_entries; number++) {
289 if (array->GetKey(number) == name) return number;
290 }
291 return T::kNotFound;
292 }
293}
294
295template <SearchMode search_mode, typename T>
296int Search(T* array, Name name, int valid_entries, int* out_insertion_index) {
297 SLOW_DCHECK(array->IsSortedNoDuplicates());
298
299 if (valid_entries == 0) {
300 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
301 *out_insertion_index = 0;
302 }
303 return T::kNotFound;
304 }
305
306 // Fast case: do linear search for small arrays.
307 const int kMaxElementsForLinearSearch = 8;
308 if (valid_entries <= kMaxElementsForLinearSearch) {
309 return LinearSearch<search_mode>(array, name, valid_entries,
310 out_insertion_index);
311 }
312
313 // Slow case: perform binary search.
314 return BinarySearch<search_mode>(array, name, valid_entries,
315 out_insertion_index);
316}
317
318double FixedDoubleArray::get_scalar(int index) {
319 DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
320 map() != GetReadOnlyRoots().fixed_array_map());
321 DCHECK(index >= 0 && index < this->length());
322 DCHECK(!is_the_hole(index));
323 return READ_DOUBLE_FIELD(*this, kHeaderSize + index * kDoubleSize);
324}
325
326uint64_t FixedDoubleArray::get_representation(int index) {
327 DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
328 map() != GetReadOnlyRoots().fixed_array_map());
329 DCHECK(index >= 0 && index < this->length());
330 int offset = kHeaderSize + index * kDoubleSize;
331 return READ_UINT64_FIELD(*this, offset);
332}
333
334Handle<Object> FixedDoubleArray::get(FixedDoubleArray array, int index,
335 Isolate* isolate) {
336 if (array->is_the_hole(index)) {
337 return ReadOnlyRoots(isolate).the_hole_value_handle();
338 } else {
339 return isolate->factory()->NewNumber(array->get_scalar(index));
340 }
341}
342
343void FixedDoubleArray::set(int index, double value) {
344 DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
345 map() != GetReadOnlyRoots().fixed_array_map());
346 int offset = kHeaderSize + index * kDoubleSize;
347 if (std::isnan(value)) {
348 WRITE_DOUBLE_FIELD(*this, offset, std::numeric_limits<double>::quiet_NaN());
349 } else {
350 WRITE_DOUBLE_FIELD(*this, offset, value);
351 }
352 DCHECK(!is_the_hole(index));
353}
354
355void FixedDoubleArray::set_the_hole(Isolate* isolate, int index) {
356 set_the_hole(index);
357}
358
359void FixedDoubleArray::set_the_hole(int index) {
360 DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
361 map() != GetReadOnlyRoots().fixed_array_map());
362 int offset = kHeaderSize + index * kDoubleSize;
363 WRITE_UINT64_FIELD(*this, offset, kHoleNanInt64);
364}
365
366bool FixedDoubleArray::is_the_hole(Isolate* isolate, int index) {
367 return is_the_hole(index);
368}
369
370bool FixedDoubleArray::is_the_hole(int index) {
371 return get_representation(index) == kHoleNanInt64;
372}
373
374void FixedDoubleArray::MoveElements(Heap* heap, int dst_index, int src_index,
375 int len, WriteBarrierMode mode) {
376 DCHECK_EQ(SKIP_WRITE_BARRIER, mode);
377 double* data_start =
378 reinterpret_cast<double*>(FIELD_ADDR(*this, kHeaderSize));
379 MemMove(data_start + dst_index, data_start + src_index, len * kDoubleSize);
380}
381
382void FixedDoubleArray::FillWithHoles(int from, int to) {
383 for (int i = from; i < to; i++) {
384 set_the_hole(i);
385 }
386}
387
388MaybeObject WeakFixedArray::Get(int index) const {
389 DCHECK(index >= 0 && index < this->length());
390 return RELAXED_READ_WEAK_FIELD(*this, OffsetOfElementAt(index));
391}
392
393void WeakFixedArray::Set(int index, MaybeObject value) {
394 DCHECK_GE(index, 0);
395 DCHECK_LT(index, length());
396 int offset = OffsetOfElementAt(index);
397 RELAXED_WRITE_WEAK_FIELD(*this, offset, value);
398 WEAK_WRITE_BARRIER(*this, offset, value);
399}
400
401void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) {
402 DCHECK_GE(index, 0);
403 DCHECK_LT(index, length());
404 int offset = OffsetOfElementAt(index);
405 RELAXED_WRITE_WEAK_FIELD(*this, offset, value);
406 CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode);
407}
408
409MaybeObjectSlot WeakFixedArray::data_start() {
410 return RawMaybeWeakField(kHeaderSize);
411}
412
413MaybeObjectSlot WeakFixedArray::RawFieldOfElementAt(int index) {
414 return RawMaybeWeakField(OffsetOfElementAt(index));
415}
416
417MaybeObject WeakArrayList::Get(int index) const {
418 DCHECK(index >= 0 && index < this->capacity());
419 return RELAXED_READ_WEAK_FIELD(*this, OffsetOfElementAt(index));
420}
421
422void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) {
423 DCHECK_GE(index, 0);
424 DCHECK_LT(index, this->capacity());
425 int offset = OffsetOfElementAt(index);
426 RELAXED_WRITE_WEAK_FIELD(*this, offset, value);
427 CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode);
428}
429
430MaybeObjectSlot WeakArrayList::data_start() {
431 return RawMaybeWeakField(kHeaderSize);
432}
433
434HeapObject WeakArrayList::Iterator::Next() {
435 if (!array_.is_null()) {
436 while (index_ < array_->length()) {
437 MaybeObject item = array_->Get(index_++);
438 DCHECK(item->IsWeakOrCleared());
439 if (!item->IsCleared()) return item->GetHeapObjectAssumeWeak();
440 }
441 array_ = WeakArrayList();
442 }
443 return HeapObject();
444}
445
446int ArrayList::Length() const {
447 if (FixedArray::cast(*this)->length() == 0) return 0;
448 return Smi::ToInt(FixedArray::cast(*this)->get(kLengthIndex));
449}
450
451void ArrayList::SetLength(int length) {
452 return FixedArray::cast(*this)->set(kLengthIndex, Smi::FromInt(length));
453}
454
455Object ArrayList::Get(int index) const {
456 return FixedArray::cast(*this)->get(kFirstIndex + index);
457}
458
459ObjectSlot ArrayList::Slot(int index) {
460 return RawField(OffsetOfElementAt(kFirstIndex + index));
461}
462
463void ArrayList::Set(int index, Object obj, WriteBarrierMode mode) {
464 FixedArray::cast(*this)->set(kFirstIndex + index, obj, mode);
465}
466
467void ArrayList::Clear(int index, Object undefined) {
468 DCHECK(undefined->IsUndefined());
469 FixedArray::cast(*this)->set(kFirstIndex + index, undefined,
470 SKIP_WRITE_BARRIER);
471}
472
473int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kTaggedSize); }
474
475byte ByteArray::get(int index) const {
476 DCHECK(index >= 0 && index < this->length());
477 return READ_BYTE_FIELD(*this, kHeaderSize + index * kCharSize);
478}
479
480void ByteArray::set(int index, byte value) {
481 DCHECK(index >= 0 && index < this->length());
482 WRITE_BYTE_FIELD(*this, kHeaderSize + index * kCharSize, value);
483}
484
485void ByteArray::copy_in(int index, const byte* buffer, int length) {
486 DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
487 index + length <= this->length());
488 Address dst_addr = FIELD_ADDR(*this, kHeaderSize + index * kCharSize);
489 memcpy(reinterpret_cast<void*>(dst_addr), buffer, length);
490}
491
492void ByteArray::copy_out(int index, byte* buffer, int length) {
493 DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
494 index + length <= this->length());
495 Address src_addr = FIELD_ADDR(*this, kHeaderSize + index * kCharSize);
496 memcpy(buffer, reinterpret_cast<void*>(src_addr), length);
497}
498
499int ByteArray::get_int(int index) const {
500 DCHECK(index >= 0 && index < this->length() / kIntSize);
501 return READ_INT_FIELD(*this, kHeaderSize + index * kIntSize);
502}
503
504void ByteArray::set_int(int index, int value) {
505 DCHECK(index >= 0 && index < this->length() / kIntSize);
506 WRITE_INT_FIELD(*this, kHeaderSize + index * kIntSize, value);
507}
508
509uint32_t ByteArray::get_uint32(int index) const {
510 DCHECK(index >= 0 && index < this->length() / kUInt32Size);
511 return READ_UINT32_FIELD(*this, kHeaderSize + index * kUInt32Size);
512}
513
514void ByteArray::set_uint32(int index, uint32_t value) {
515 DCHECK(index >= 0 && index < this->length() / kUInt32Size);
516 WRITE_UINT32_FIELD(*this, kHeaderSize + index * kUInt32Size, value);
517}
518
519void ByteArray::clear_padding() {
520 int data_size = length() + kHeaderSize;
521 memset(reinterpret_cast<void*>(address() + data_size), 0, Size() - data_size);
522}
523
524ByteArray ByteArray::FromDataStartAddress(Address address) {
525 DCHECK_TAG_ALIGNED(address);
526 return ByteArray::cast(Object(address - kHeaderSize + kHeapObjectTag));
527}
528
529int ByteArray::DataSize() const { return RoundUp(length(), kTaggedSize); }
530
531int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
532
533byte* ByteArray::GetDataStartAddress() {
534 return reinterpret_cast<byte*>(address() + kHeaderSize);
535}
536
537byte* ByteArray::GetDataEndAddress() {
538 return GetDataStartAddress() + length();
539}
540
541template <class T>
542PodArray<T>::PodArray(Address ptr) : ByteArray(ptr) {}
543
544template <class T>
545PodArray<T> PodArray<T>::cast(Object object) {
546 return PodArray<T>(object.ptr());
547}
548
549// static
550template <class T>
551Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
552 AllocationType allocation) {
553 return Handle<PodArray<T>>::cast(
554 isolate->factory()->NewByteArray(length * sizeof(T), allocation));
555}
556
557template <class T>
558int PodArray<T>::length() const {
559 return ByteArray::length() / sizeof(T);
560}
561
562void* FixedTypedArrayBase::external_pointer() const {
563 intptr_t ptr = READ_INTPTR_FIELD(*this, kExternalPointerOffset);
564 return reinterpret_cast<void*>(ptr);
565}
566
567void FixedTypedArrayBase::set_external_pointer(void* value) {
568 intptr_t ptr = reinterpret_cast<intptr_t>(value);
569 WRITE_INTPTR_FIELD(*this, kExternalPointerOffset, ptr);
570}
571
572void* FixedTypedArrayBase::DataPtr() {
573 return reinterpret_cast<void*>(
574 base_pointer()->ptr() + reinterpret_cast<intptr_t>(external_pointer()));
575}
576
577int FixedTypedArrayBase::ElementSize(InstanceType type) {
578 int element_size;
579 switch (type) {
580#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
581 case FIXED_##TYPE##_ARRAY_TYPE: \
582 element_size = sizeof(ctype); \
583 break;
584
585 TYPED_ARRAYS(TYPED_ARRAY_CASE)
586#undef TYPED_ARRAY_CASE
587 default:
588 UNREACHABLE();
589 }
590 return element_size;
591}
592
593int FixedTypedArrayBase::DataSize(InstanceType type) const {
594 if (base_pointer() == Smi::kZero) return 0;
595 return length() * ElementSize(type);
596}
597
598int FixedTypedArrayBase::DataSize() const {
599 return DataSize(map()->instance_type());
600}
601
602size_t FixedTypedArrayBase::ByteLength() const {
603 return static_cast<size_t>(length()) *
604 static_cast<size_t>(ElementSize(map()->instance_type()));
605}
606
607int FixedTypedArrayBase::size() const {
608 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
609}
610
611int FixedTypedArrayBase::TypedArraySize(InstanceType type) const {
612 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
613}
614
615// static
616int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
617 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
618}
619
620uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
621
622uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
623
624int8_t Int8ArrayTraits::defaultValue() { return 0; }
625
626uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
627
628int16_t Int16ArrayTraits::defaultValue() { return 0; }
629
630uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
631
632int32_t Int32ArrayTraits::defaultValue() { return 0; }
633
634float Float32ArrayTraits::defaultValue() {
635 return std::numeric_limits<float>::quiet_NaN();
636}
637
638double Float64ArrayTraits::defaultValue() {
639 return std::numeric_limits<double>::quiet_NaN();
640}
641
642template <class Traits>
643typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
644 // TODO(bmeurer, v8:4153): Solve this differently.
645 // DCHECK((index < this->length()));
646 CHECK_GE(index, 0);
647 return FixedTypedArray<Traits>::get_scalar_from_data_ptr(DataPtr(), index);
648}
649
650// static
651template <class Traits>
652typename Traits::ElementType FixedTypedArray<Traits>::get_scalar_from_data_ptr(
653 void* data_ptr, int index) {
654 typename Traits::ElementType* ptr = reinterpret_cast<ElementType*>(data_ptr);
655 // The JavaScript memory model allows for racy reads and writes to a
656 // SharedArrayBuffer's backing store, which will always be a FixedTypedArray.
657 // ThreadSanitizer will catch these racy accesses and warn about them, so we
658 // disable TSAN for these reads and writes using annotations.
659 //
660 // We don't use relaxed atomics here, as it is not a requirement of the
661 // JavaScript memory model to have tear-free reads of overlapping accesses,
662 // and using relaxed atomics may introduce overhead.
663 TSAN_ANNOTATE_IGNORE_READS_BEGIN;
664 ElementType result;
665 if (COMPRESS_POINTERS_BOOL && alignof(ElementType) > kTaggedSize) {
666 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
667 // fields (external pointers, doubles and BigInt data) are only kTaggedSize
668 // aligned so we have to use unaligned pointer friendly way of accessing
669 // them in order to avoid undefined behavior in C++ code.
670 result = ReadUnalignedValue<ElementType>(reinterpret_cast<Address>(ptr) +
671 index * sizeof(ElementType));
672 } else {
673 result = ptr[index];
674 }
675 TSAN_ANNOTATE_IGNORE_READS_END;
676 return result;
677}
678
679template <class Traits>
680void FixedTypedArray<Traits>::set(int index, ElementType value) {
681 // TODO(bmeurer, v8:4153): Solve this differently.
682 // CHECK((index < this->length()));
683 CHECK_GE(index, 0);
684 // See the comment in FixedTypedArray<Traits>::get_scalar.
685 auto* ptr = reinterpret_cast<ElementType*>(DataPtr());
686 TSAN_ANNOTATE_IGNORE_WRITES_BEGIN;
687 if (COMPRESS_POINTERS_BOOL && alignof(ElementType) > kTaggedSize) {
688 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
689 // fields (external pointers, doubles and BigInt data) are only kTaggedSize
690 // aligned so we have to use unaligned pointer friendly way of accessing
691 // them in order to avoid undefined behavior in C++ code.
692 WriteUnalignedValue<ElementType>(
693 reinterpret_cast<Address>(ptr) + index * sizeof(ElementType), value);
694 } else {
695 ptr[index] = value;
696 }
697 TSAN_ANNOTATE_IGNORE_WRITES_END;
698}
699
700template <class Traits>
701typename Traits::ElementType FixedTypedArray<Traits>::from(int value) {
702 return static_cast<ElementType>(value);
703}
704
705template <>
706inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(int value) {
707 if (value < 0) return 0;
708 if (value > 0xFF) return 0xFF;
709 return static_cast<uint8_t>(value);
710}
711
712template <>
713inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(int value) {
714 UNREACHABLE();
715}
716
717template <>
718inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(int value) {
719 UNREACHABLE();
720}
721
722template <class Traits>
723typename Traits::ElementType FixedTypedArray<Traits>::from(uint32_t value) {
724 return static_cast<ElementType>(value);
725}
726
727template <>
728inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(uint32_t value) {
729 // We need this special case for Uint32 -> Uint8Clamped, because the highest
730 // Uint32 values will be negative as an int, clamping to 0, rather than 255.
731 if (value > 0xFF) return 0xFF;
732 return static_cast<uint8_t>(value);
733}
734
735template <>
736inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(uint32_t value) {
737 UNREACHABLE();
738}
739
740template <>
741inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(uint32_t value) {
742 UNREACHABLE();
743}
744
745template <class Traits>
746typename Traits::ElementType FixedTypedArray<Traits>::from(double value) {
747 return static_cast<ElementType>(DoubleToInt32(value));
748}
749
750template <>
751inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(double value) {
752 // Handle NaNs and less than zero values which clamp to zero.
753 if (!(value > 0)) return 0;
754 if (value > 0xFF) return 0xFF;
755 return static_cast<uint8_t>(lrint(value));
756}
757
758template <>
759inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(double value) {
760 UNREACHABLE();
761}
762
763template <>
764inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(double value) {
765 UNREACHABLE();
766}
767
768template <>
769inline float FixedTypedArray<Float32ArrayTraits>::from(double value) {
770 using limits = std::numeric_limits<float>;
771 if (value > limits::max()) return limits::infinity();
772 if (value < limits::lowest()) return -limits::infinity();
773 return static_cast<float>(value);
774}
775
776template <>
777inline double FixedTypedArray<Float64ArrayTraits>::from(double value) {
778 return value;
779}
780
781template <class Traits>
782typename Traits::ElementType FixedTypedArray<Traits>::from(int64_t value) {
783 UNREACHABLE();
784}
785
786template <class Traits>
787typename Traits::ElementType FixedTypedArray<Traits>::from(uint64_t value) {
788 UNREACHABLE();
789}
790
791template <>
792inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(int64_t value) {
793 return value;
794}
795
796template <>
797inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(uint64_t value) {
798 return value;
799}
800
801template <>
802inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(int64_t value) {
803 return static_cast<uint64_t>(value);
804}
805
806template <>
807inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(uint64_t value) {
808 return static_cast<int64_t>(value);
809}
810
811template <class Traits>
812typename Traits::ElementType FixedTypedArray<Traits>::FromHandle(
813 Handle<Object> value, bool* lossless) {
814 if (value->IsSmi()) {
815 return from(Smi::ToInt(*value));
816 }
817 DCHECK(value->IsHeapNumber());
818 return from(HeapNumber::cast(*value)->value());
819}
820
821template <>
822inline int64_t FixedTypedArray<BigInt64ArrayTraits>::FromHandle(
823 Handle<Object> value, bool* lossless) {
824 DCHECK(value->IsBigInt());
825 return BigInt::cast(*value)->AsInt64(lossless);
826}
827
828template <>
829inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::FromHandle(
830 Handle<Object> value, bool* lossless) {
831 DCHECK(value->IsBigInt());
832 return BigInt::cast(*value)->AsUint64(lossless);
833}
834
835template <class Traits>
836Handle<Object> FixedTypedArray<Traits>::get(Isolate* isolate,
837 FixedTypedArray<Traits> array,
838 int index) {
839 return Traits::ToHandle(isolate, array->get_scalar(index));
840}
841
842template <class Traits>
843void FixedTypedArray<Traits>::SetValue(uint32_t index, Object value) {
844 ElementType cast_value = Traits::defaultValue();
845 if (value->IsSmi()) {
846 int int_value = Smi::ToInt(value);
847 cast_value = from(int_value);
848 } else if (value->IsHeapNumber()) {
849 double double_value = HeapNumber::cast(value)->value();
850 cast_value = from(double_value);
851 } else {
852 // Clamp undefined to the default value. All other types have been
853 // converted to a number type further up in the call chain.
854 DCHECK(value->IsUndefined());
855 }
856 set(index, cast_value);
857}
858
859template <>
860inline void FixedTypedArray<BigInt64ArrayTraits>::SetValue(uint32_t index,
861 Object value) {
862 DCHECK(value->IsBigInt());
863 set(index, BigInt::cast(value)->AsInt64());
864}
865
866template <>
867inline void FixedTypedArray<BigUint64ArrayTraits>::SetValue(uint32_t index,
868 Object value) {
869 DCHECK(value->IsBigInt());
870 set(index, BigInt::cast(value)->AsUint64());
871}
872
873Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
874 return handle(Smi::FromInt(scalar), isolate);
875}
876
877Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
878 uint8_t scalar) {
879 return handle(Smi::FromInt(scalar), isolate);
880}
881
882Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
883 return handle(Smi::FromInt(scalar), isolate);
884}
885
886Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
887 return handle(Smi::FromInt(scalar), isolate);
888}
889
890Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
891 return handle(Smi::FromInt(scalar), isolate);
892}
893
894Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
895 return isolate->factory()->NewNumberFromUint(scalar);
896}
897
898Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
899 return isolate->factory()->NewNumberFromInt(scalar);
900}
901
902Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
903 return isolate->factory()->NewNumber(scalar);
904}
905
906Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
907 return isolate->factory()->NewNumber(scalar);
908}
909
910Handle<Object> BigInt64ArrayTraits::ToHandle(Isolate* isolate, int64_t scalar) {
911 return BigInt::FromInt64(isolate, scalar);
912}
913
914Handle<Object> BigUint64ArrayTraits::ToHandle(Isolate* isolate,
915 uint64_t scalar) {
916 return BigInt::FromUint64(isolate, scalar);
917}
918
919// static
920template <class Traits>
921STATIC_CONST_MEMBER_DEFINITION const InstanceType
922 FixedTypedArray<Traits>::kInstanceType;
923
924template <class Traits>
925FixedTypedArray<Traits>::FixedTypedArray(Address ptr)
926 : FixedTypedArrayBase(ptr) {
927 DCHECK(IsHeapObject() && map()->instance_type() == Traits::kInstanceType);
928}
929
930template <class Traits>
931FixedTypedArray<Traits> FixedTypedArray<Traits>::cast(Object object) {
932 return FixedTypedArray<Traits>(object.ptr());
933}
934
935int TemplateList::length() const {
936 return Smi::ToInt(FixedArray::cast(*this)->get(kLengthIndex));
937}
938
939Object TemplateList::get(int index) const {
940 return FixedArray::cast(*this)->get(kFirstElementIndex + index);
941}
942
943void TemplateList::set(int index, Object value) {
944 FixedArray::cast(*this)->set(kFirstElementIndex + index, value);
945}
946
947} // namespace internal
948} // namespace v8
949
950#include "src/objects/object-macros-undef.h"
951
952#endif // V8_OBJECTS_FIXED_ARRAY_INL_H_
953