1 | // Copyright 2012 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #include "src/elements.h" |
6 | |
7 | #include "src/arguments.h" |
8 | #include "src/conversions.h" |
9 | #include "src/frames.h" |
10 | #include "src/heap/factory.h" |
11 | #include "src/heap/heap-inl.h" // For MaxNumberToStringCacheSize. |
12 | #include "src/heap/heap-write-barrier-inl.h" |
13 | #include "src/isolate-inl.h" |
14 | #include "src/keys.h" |
15 | #include "src/message-template.h" |
16 | #include "src/objects-inl.h" |
17 | #include "src/objects/arguments-inl.h" |
18 | #include "src/objects/hash-table-inl.h" |
19 | #include "src/objects/js-array-buffer-inl.h" |
20 | #include "src/objects/js-array-inl.h" |
21 | #include "src/objects/slots-atomic-inl.h" |
22 | #include "src/objects/slots.h" |
23 | #include "src/utils.h" |
24 | |
25 | // Each concrete ElementsAccessor can handle exactly one ElementsKind, |
26 | // several abstract ElementsAccessor classes are used to allow sharing |
27 | // common code. |
28 | // |
29 | // Inheritance hierarchy: |
30 | // - ElementsAccessorBase (abstract) |
31 | // - FastElementsAccessor (abstract) |
32 | // - FastSmiOrObjectElementsAccessor |
33 | // - FastPackedSmiElementsAccessor |
34 | // - FastHoleySmiElementsAccessor |
35 | // - FastPackedObjectElementsAccessor |
36 | // - FastPackedFrozenObjectElementsAccessor |
37 | // - FastPackedSealedObjectElementsAccessor |
38 | // - FastHoleyObjectElementsAccessor |
39 | // - FastDoubleElementsAccessor |
40 | // - FastPackedDoubleElementsAccessor |
41 | // - FastHoleyDoubleElementsAccessor |
42 | // - TypedElementsAccessor: template, with instantiations: |
43 | // - FixedUint8ElementsAccessor |
44 | // - FixedInt8ElementsAccessor |
45 | // - FixedUint16ElementsAccessor |
46 | // - FixedInt16ElementsAccessor |
47 | // - FixedUint32ElementsAccessor |
48 | // - FixedInt32ElementsAccessor |
49 | // - FixedFloat32ElementsAccessor |
50 | // - FixedFloat64ElementsAccessor |
51 | // - FixedUint8ClampedElementsAccessor |
52 | // - FixedBigUint64ElementsAccessor |
53 | // - FixedBigInt64ElementsAccessor |
54 | // - DictionaryElementsAccessor |
55 | // - SloppyArgumentsElementsAccessor |
56 | // - FastSloppyArgumentsElementsAccessor |
57 | // - SlowSloppyArgumentsElementsAccessor |
58 | // - StringWrapperElementsAccessor |
59 | // - FastStringWrapperElementsAccessor |
60 | // - SlowStringWrapperElementsAccessor |
61 | |
62 | namespace v8 { |
63 | namespace internal { |
64 | |
65 | |
66 | namespace { |
67 | |
68 | |
69 | static const int kPackedSizeNotKnown = -1; |
70 | |
71 | enum Where { AT_START, AT_END }; |
72 | |
73 | |
74 | // First argument in list is the accessor class, the second argument is the |
75 | // accessor ElementsKind, and the third is the backing store class. Use the |
76 | // fast element handler for smi-only arrays. The implementation is currently |
77 | // identical. Note that the order must match that of the ElementsKind enum for |
78 | // the |accessor_array[]| below to work. |
79 | #define ELEMENTS_LIST(V) \ |
80 | V(FastPackedSmiElementsAccessor, PACKED_SMI_ELEMENTS, FixedArray) \ |
81 | V(FastHoleySmiElementsAccessor, HOLEY_SMI_ELEMENTS, FixedArray) \ |
82 | V(FastPackedObjectElementsAccessor, PACKED_ELEMENTS, FixedArray) \ |
83 | V(FastHoleyObjectElementsAccessor, HOLEY_ELEMENTS, FixedArray) \ |
84 | V(FastPackedDoubleElementsAccessor, PACKED_DOUBLE_ELEMENTS, \ |
85 | FixedDoubleArray) \ |
86 | V(FastHoleyDoubleElementsAccessor, HOLEY_DOUBLE_ELEMENTS, FixedDoubleArray) \ |
87 | V(FastPackedSealedObjectElementsAccessor, PACKED_SEALED_ELEMENTS, \ |
88 | FixedArray) \ |
89 | V(FastPackedFrozenObjectElementsAccessor, PACKED_FROZEN_ELEMENTS, \ |
90 | FixedArray) \ |
91 | V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS, NumberDictionary) \ |
92 | V(FastSloppyArgumentsElementsAccessor, FAST_SLOPPY_ARGUMENTS_ELEMENTS, \ |
93 | FixedArray) \ |
94 | V(SlowSloppyArgumentsElementsAccessor, SLOW_SLOPPY_ARGUMENTS_ELEMENTS, \ |
95 | FixedArray) \ |
96 | V(FastStringWrapperElementsAccessor, FAST_STRING_WRAPPER_ELEMENTS, \ |
97 | FixedArray) \ |
98 | V(SlowStringWrapperElementsAccessor, SLOW_STRING_WRAPPER_ELEMENTS, \ |
99 | FixedArray) \ |
100 | V(FixedUint8ElementsAccessor, UINT8_ELEMENTS, FixedUint8Array) \ |
101 | V(FixedInt8ElementsAccessor, INT8_ELEMENTS, FixedInt8Array) \ |
102 | V(FixedUint16ElementsAccessor, UINT16_ELEMENTS, FixedUint16Array) \ |
103 | V(FixedInt16ElementsAccessor, INT16_ELEMENTS, FixedInt16Array) \ |
104 | V(FixedUint32ElementsAccessor, UINT32_ELEMENTS, FixedUint32Array) \ |
105 | V(FixedInt32ElementsAccessor, INT32_ELEMENTS, FixedInt32Array) \ |
106 | V(FixedFloat32ElementsAccessor, FLOAT32_ELEMENTS, FixedFloat32Array) \ |
107 | V(FixedFloat64ElementsAccessor, FLOAT64_ELEMENTS, FixedFloat64Array) \ |
108 | V(FixedUint8ClampedElementsAccessor, UINT8_CLAMPED_ELEMENTS, \ |
109 | FixedUint8ClampedArray) \ |
110 | V(FixedBigUint64ElementsAccessor, BIGUINT64_ELEMENTS, FixedBigUint64Array) \ |
111 | V(FixedBigInt64ElementsAccessor, BIGINT64_ELEMENTS, FixedBigInt64Array) |
112 | |
113 | template<ElementsKind Kind> class ElementsKindTraits { |
114 | public: |
115 | typedef FixedArrayBase BackingStore; |
116 | }; |
117 | |
118 | #define ELEMENTS_TRAITS(Class, KindParam, Store) \ |
119 | template <> \ |
120 | class ElementsKindTraits<KindParam> { \ |
121 | public: /* NOLINT */ \ |
122 | static constexpr ElementsKind Kind = KindParam; \ |
123 | typedef Store BackingStore; \ |
124 | }; \ |
125 | constexpr ElementsKind ElementsKindTraits<KindParam>::Kind; |
126 | ELEMENTS_LIST(ELEMENTS_TRAITS) |
127 | #undef ELEMENTS_TRAITS |
128 | |
129 | V8_WARN_UNUSED_RESULT |
130 | MaybeHandle<Object> ThrowArrayLengthRangeError(Isolate* isolate) { |
131 | THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kInvalidArrayLength), |
132 | Object); |
133 | } |
134 | |
135 | WriteBarrierMode GetWriteBarrierMode(ElementsKind kind) { |
136 | if (IsSmiElementsKind(kind)) return SKIP_WRITE_BARRIER; |
137 | if (IsDoubleElementsKind(kind)) return SKIP_WRITE_BARRIER; |
138 | return UPDATE_WRITE_BARRIER; |
139 | } |
140 | |
141 | void CopyObjectToObjectElements(Isolate* isolate, FixedArrayBase from_base, |
142 | ElementsKind from_kind, uint32_t from_start, |
143 | FixedArrayBase to_base, ElementsKind to_kind, |
144 | uint32_t to_start, int raw_copy_size) { |
145 | ReadOnlyRoots roots(isolate); |
146 | DCHECK(to_base->map() != roots.fixed_cow_array_map()); |
147 | DisallowHeapAllocation no_allocation; |
148 | int copy_size = raw_copy_size; |
149 | if (raw_copy_size < 0) { |
150 | DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
151 | raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
152 | copy_size = Min(from_base->length() - from_start, |
153 | to_base->length() - to_start); |
154 | if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
155 | int start = to_start + copy_size; |
156 | int length = to_base->length() - start; |
157 | if (length > 0) { |
158 | MemsetTagged(FixedArray::cast(to_base)->RawFieldOfElementAt(start), |
159 | roots.the_hole_value(), length); |
160 | } |
161 | } |
162 | } |
163 | DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
164 | (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
165 | if (copy_size == 0) return; |
166 | FixedArray from = FixedArray::cast(from_base); |
167 | FixedArray to = FixedArray::cast(to_base); |
168 | DCHECK(IsSmiOrObjectElementsKind(from_kind)); |
169 | DCHECK(IsSmiOrObjectElementsKind(to_kind)); |
170 | |
171 | WriteBarrierMode write_barrier_mode = |
172 | (IsObjectElementsKind(from_kind) && IsObjectElementsKind(to_kind)) |
173 | ? UPDATE_WRITE_BARRIER |
174 | : SKIP_WRITE_BARRIER; |
175 | to->CopyElements(isolate->heap(), to_start, from, from_start, copy_size, |
176 | write_barrier_mode); |
177 | } |
178 | |
179 | static void CopyDictionaryToObjectElements( |
180 | Isolate* isolate, FixedArrayBase from_base, uint32_t from_start, |
181 | FixedArrayBase to_base, ElementsKind to_kind, uint32_t to_start, |
182 | int raw_copy_size) { |
183 | DisallowHeapAllocation no_allocation; |
184 | NumberDictionary from = NumberDictionary::cast(from_base); |
185 | int copy_size = raw_copy_size; |
186 | if (raw_copy_size < 0) { |
187 | DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
188 | raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
189 | copy_size = from->max_number_key() + 1 - from_start; |
190 | if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
191 | int start = to_start + copy_size; |
192 | int length = to_base->length() - start; |
193 | if (length > 0) { |
194 | MemsetTagged(FixedArray::cast(to_base)->RawFieldOfElementAt(start), |
195 | ReadOnlyRoots(isolate).the_hole_value(), length); |
196 | } |
197 | } |
198 | } |
199 | DCHECK(to_base != from_base); |
200 | DCHECK(IsSmiOrObjectElementsKind(to_kind)); |
201 | if (copy_size == 0) return; |
202 | FixedArray to = FixedArray::cast(to_base); |
203 | uint32_t to_length = to->length(); |
204 | if (to_start + copy_size > to_length) { |
205 | copy_size = to_length - to_start; |
206 | } |
207 | WriteBarrierMode write_barrier_mode = GetWriteBarrierMode(to_kind); |
208 | for (int i = 0; i < copy_size; i++) { |
209 | int entry = from->FindEntry(isolate, i + from_start); |
210 | if (entry != NumberDictionary::kNotFound) { |
211 | Object value = from->ValueAt(entry); |
212 | DCHECK(!value->IsTheHole(isolate)); |
213 | to->set(i + to_start, value, write_barrier_mode); |
214 | } else { |
215 | to->set_the_hole(isolate, i + to_start); |
216 | } |
217 | } |
218 | } |
219 | |
220 | // NOTE: this method violates the handlified function signature convention: |
221 | // raw pointer parameters in the function that allocates. |
222 | // See ElementsAccessorBase::CopyElements() for details. |
223 | static void CopyDoubleToObjectElements(Isolate* isolate, |
224 | FixedArrayBase from_base, |
225 | uint32_t from_start, |
226 | FixedArrayBase to_base, |
227 | uint32_t to_start, int raw_copy_size) { |
228 | int copy_size = raw_copy_size; |
229 | if (raw_copy_size < 0) { |
230 | DisallowHeapAllocation no_allocation; |
231 | DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
232 | raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
233 | copy_size = Min(from_base->length() - from_start, |
234 | to_base->length() - to_start); |
235 | if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
236 | // Also initialize the area that will be copied over since HeapNumber |
237 | // allocation below can cause an incremental marking step, requiring all |
238 | // existing heap objects to be propertly initialized. |
239 | int start = to_start; |
240 | int length = to_base->length() - start; |
241 | if (length > 0) { |
242 | MemsetTagged(FixedArray::cast(to_base)->RawFieldOfElementAt(start), |
243 | ReadOnlyRoots(isolate).the_hole_value(), length); |
244 | } |
245 | } |
246 | } |
247 | |
248 | DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
249 | (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
250 | if (copy_size == 0) return; |
251 | |
252 | // From here on, the code below could actually allocate. Therefore the raw |
253 | // values are wrapped into handles. |
254 | Handle<FixedDoubleArray> from(FixedDoubleArray::cast(from_base), isolate); |
255 | Handle<FixedArray> to(FixedArray::cast(to_base), isolate); |
256 | |
257 | // Use an outer loop to not waste too much time on creating HandleScopes. |
258 | // On the other hand we might overflow a single handle scope depending on |
259 | // the copy_size. |
260 | int offset = 0; |
261 | while (offset < copy_size) { |
262 | HandleScope scope(isolate); |
263 | offset += 100; |
264 | for (int i = offset - 100; i < offset && i < copy_size; ++i) { |
265 | Handle<Object> value = |
266 | FixedDoubleArray::get(*from, i + from_start, isolate); |
267 | to->set(i + to_start, *value, UPDATE_WRITE_BARRIER); |
268 | } |
269 | } |
270 | } |
271 | |
272 | static void CopyDoubleToDoubleElements(FixedArrayBase from_base, |
273 | uint32_t from_start, |
274 | FixedArrayBase to_base, |
275 | uint32_t to_start, int raw_copy_size) { |
276 | DisallowHeapAllocation no_allocation; |
277 | int copy_size = raw_copy_size; |
278 | if (raw_copy_size < 0) { |
279 | DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
280 | raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
281 | copy_size = Min(from_base->length() - from_start, |
282 | to_base->length() - to_start); |
283 | if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
284 | for (int i = to_start + copy_size; i < to_base->length(); ++i) { |
285 | FixedDoubleArray::cast(to_base)->set_the_hole(i); |
286 | } |
287 | } |
288 | } |
289 | DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
290 | (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
291 | if (copy_size == 0) return; |
292 | FixedDoubleArray from = FixedDoubleArray::cast(from_base); |
293 | FixedDoubleArray to = FixedDoubleArray::cast(to_base); |
294 | Address to_address = to->address() + FixedDoubleArray::kHeaderSize; |
295 | Address from_address = from->address() + FixedDoubleArray::kHeaderSize; |
296 | to_address += kDoubleSize * to_start; |
297 | from_address += kDoubleSize * from_start; |
298 | #ifdef V8_COMPRESS_POINTERS |
299 | // TODO(ishell, v8:8875): we use CopyTagged() in order to avoid unaligned |
300 | // access to double values in the arrays. This will no longed be necessary |
301 | // once the allocations alignment issue is fixed. |
302 | int words_per_double = (kDoubleSize / kTaggedSize); |
303 | CopyTagged(to_address, from_address, |
304 | static_cast<size_t>(words_per_double * copy_size)); |
305 | #else |
306 | int words_per_double = (kDoubleSize / kSystemPointerSize); |
307 | CopyWords(to_address, from_address, |
308 | static_cast<size_t>(words_per_double * copy_size)); |
309 | #endif |
310 | } |
311 | |
312 | static void CopySmiToDoubleElements(FixedArrayBase from_base, |
313 | uint32_t from_start, FixedArrayBase to_base, |
314 | uint32_t to_start, int raw_copy_size) { |
315 | DisallowHeapAllocation no_allocation; |
316 | int copy_size = raw_copy_size; |
317 | if (raw_copy_size < 0) { |
318 | DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
319 | raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
320 | copy_size = from_base->length() - from_start; |
321 | if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
322 | for (int i = to_start + copy_size; i < to_base->length(); ++i) { |
323 | FixedDoubleArray::cast(to_base)->set_the_hole(i); |
324 | } |
325 | } |
326 | } |
327 | DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
328 | (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
329 | if (copy_size == 0) return; |
330 | FixedArray from = FixedArray::cast(from_base); |
331 | FixedDoubleArray to = FixedDoubleArray::cast(to_base); |
332 | Object the_hole = from->GetReadOnlyRoots().the_hole_value(); |
333 | for (uint32_t from_end = from_start + static_cast<uint32_t>(copy_size); |
334 | from_start < from_end; from_start++, to_start++) { |
335 | Object hole_or_smi = from->get(from_start); |
336 | if (hole_or_smi == the_hole) { |
337 | to->set_the_hole(to_start); |
338 | } else { |
339 | to->set(to_start, Smi::ToInt(hole_or_smi)); |
340 | } |
341 | } |
342 | } |
343 | |
344 | static void CopyPackedSmiToDoubleElements(FixedArrayBase from_base, |
345 | uint32_t from_start, |
346 | FixedArrayBase to_base, |
347 | uint32_t to_start, int packed_size, |
348 | int raw_copy_size) { |
349 | DisallowHeapAllocation no_allocation; |
350 | int copy_size = raw_copy_size; |
351 | uint32_t to_end; |
352 | if (raw_copy_size < 0) { |
353 | DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
354 | raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
355 | copy_size = packed_size - from_start; |
356 | if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
357 | to_end = to_base->length(); |
358 | for (uint32_t i = to_start + copy_size; i < to_end; ++i) { |
359 | FixedDoubleArray::cast(to_base)->set_the_hole(i); |
360 | } |
361 | } else { |
362 | to_end = to_start + static_cast<uint32_t>(copy_size); |
363 | } |
364 | } else { |
365 | to_end = to_start + static_cast<uint32_t>(copy_size); |
366 | } |
367 | DCHECK(static_cast<int>(to_end) <= to_base->length()); |
368 | DCHECK(packed_size >= 0 && packed_size <= copy_size); |
369 | DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
370 | (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
371 | if (copy_size == 0) return; |
372 | FixedArray from = FixedArray::cast(from_base); |
373 | FixedDoubleArray to = FixedDoubleArray::cast(to_base); |
374 | for (uint32_t from_end = from_start + static_cast<uint32_t>(packed_size); |
375 | from_start < from_end; from_start++, to_start++) { |
376 | Object smi = from->get(from_start); |
377 | DCHECK(!smi->IsTheHole()); |
378 | to->set(to_start, Smi::ToInt(smi)); |
379 | } |
380 | } |
381 | |
382 | static void CopyObjectToDoubleElements(FixedArrayBase from_base, |
383 | uint32_t from_start, |
384 | FixedArrayBase to_base, |
385 | uint32_t to_start, int raw_copy_size) { |
386 | DisallowHeapAllocation no_allocation; |
387 | int copy_size = raw_copy_size; |
388 | if (raw_copy_size < 0) { |
389 | DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
390 | raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
391 | copy_size = from_base->length() - from_start; |
392 | if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
393 | for (int i = to_start + copy_size; i < to_base->length(); ++i) { |
394 | FixedDoubleArray::cast(to_base)->set_the_hole(i); |
395 | } |
396 | } |
397 | } |
398 | DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
399 | (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
400 | if (copy_size == 0) return; |
401 | FixedArray from = FixedArray::cast(from_base); |
402 | FixedDoubleArray to = FixedDoubleArray::cast(to_base); |
403 | Object the_hole = from->GetReadOnlyRoots().the_hole_value(); |
404 | for (uint32_t from_end = from_start + copy_size; |
405 | from_start < from_end; from_start++, to_start++) { |
406 | Object hole_or_object = from->get(from_start); |
407 | if (hole_or_object == the_hole) { |
408 | to->set_the_hole(to_start); |
409 | } else { |
410 | to->set(to_start, hole_or_object->Number()); |
411 | } |
412 | } |
413 | } |
414 | |
415 | static void CopyDictionaryToDoubleElements( |
416 | Isolate* isolate, FixedArrayBase from_base, uint32_t from_start, |
417 | FixedArrayBase to_base, uint32_t to_start, int raw_copy_size) { |
418 | DisallowHeapAllocation no_allocation; |
419 | NumberDictionary from = NumberDictionary::cast(from_base); |
420 | int copy_size = raw_copy_size; |
421 | if (copy_size < 0) { |
422 | DCHECK(copy_size == ElementsAccessor::kCopyToEnd || |
423 | copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
424 | copy_size = from->max_number_key() + 1 - from_start; |
425 | if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
426 | for (int i = to_start + copy_size; i < to_base->length(); ++i) { |
427 | FixedDoubleArray::cast(to_base)->set_the_hole(i); |
428 | } |
429 | } |
430 | } |
431 | if (copy_size == 0) return; |
432 | FixedDoubleArray to = FixedDoubleArray::cast(to_base); |
433 | uint32_t to_length = to->length(); |
434 | if (to_start + copy_size > to_length) { |
435 | copy_size = to_length - to_start; |
436 | } |
437 | for (int i = 0; i < copy_size; i++) { |
438 | int entry = from->FindEntry(isolate, i + from_start); |
439 | if (entry != NumberDictionary::kNotFound) { |
440 | to->set(i + to_start, from->ValueAt(entry)->Number()); |
441 | } else { |
442 | to->set_the_hole(i + to_start); |
443 | } |
444 | } |
445 | } |
446 | |
447 | static void TraceTopFrame(Isolate* isolate) { |
448 | StackFrameIterator it(isolate); |
449 | if (it.done()) { |
450 | PrintF("unknown location (no JavaScript frames present)" ); |
451 | return; |
452 | } |
453 | StackFrame* raw_frame = it.frame(); |
454 | if (raw_frame->is_internal()) { |
455 | Code current_code_object = |
456 | isolate->heap()->GcSafeFindCodeForInnerPointer(raw_frame->pc()); |
457 | if (current_code_object->builtin_index() == |
458 | Builtins::kFunctionPrototypeApply) { |
459 | PrintF("apply from " ); |
460 | it.Advance(); |
461 | raw_frame = it.frame(); |
462 | } |
463 | } |
464 | JavaScriptFrame::PrintTop(isolate, stdout, false, true); |
465 | } |
466 | |
467 | static void SortIndices( |
468 | Isolate* isolate, Handle<FixedArray> indices, uint32_t sort_size, |
469 | WriteBarrierMode write_barrier_mode = UPDATE_WRITE_BARRIER) { |
470 | // Use AtomicSlot wrapper to ensure that std::sort uses atomic load and |
471 | // store operations that are safe for concurrent marking. |
472 | AtomicSlot start(indices->GetFirstElementAddress()); |
473 | std::sort(start, start + sort_size, |
474 | [isolate](Tagged_t elementA, Tagged_t elementB) { |
475 | #ifdef V8_COMPRESS_POINTERS |
476 | Object a(DecompressTaggedAny(isolate->isolate_root(), elementA)); |
477 | Object b(DecompressTaggedAny(isolate->isolate_root(), elementB)); |
478 | #else |
479 | Object a(elementA); |
480 | Object b(elementB); |
481 | #endif |
482 | if (a->IsSmi() || !a->IsUndefined(isolate)) { |
483 | if (!b->IsSmi() && b->IsUndefined(isolate)) { |
484 | return true; |
485 | } |
486 | return a->Number() < b->Number(); |
487 | } |
488 | return !b->IsSmi() && b->IsUndefined(isolate); |
489 | }); |
490 | if (write_barrier_mode != SKIP_WRITE_BARRIER) { |
491 | FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(isolate->heap(), *indices, 0, sort_size); |
492 | } |
493 | } |
494 | |
495 | static Maybe<bool> IncludesValueSlowPath(Isolate* isolate, |
496 | Handle<JSObject> receiver, |
497 | Handle<Object> value, |
498 | uint32_t start_from, uint32_t length) { |
499 | bool search_for_hole = value->IsUndefined(isolate); |
500 | for (uint32_t k = start_from; k < length; ++k) { |
501 | LookupIterator it(isolate, receiver, k); |
502 | if (!it.IsFound()) { |
503 | if (search_for_hole) return Just(true); |
504 | continue; |
505 | } |
506 | Handle<Object> element_k; |
507 | ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k, |
508 | Object::GetProperty(&it), Nothing<bool>()); |
509 | |
510 | if (value->SameValueZero(*element_k)) return Just(true); |
511 | } |
512 | |
513 | return Just(false); |
514 | } |
515 | |
516 | static Maybe<int64_t> IndexOfValueSlowPath(Isolate* isolate, |
517 | Handle<JSObject> receiver, |
518 | Handle<Object> value, |
519 | uint32_t start_from, |
520 | uint32_t length) { |
521 | for (uint32_t k = start_from; k < length; ++k) { |
522 | LookupIterator it(isolate, receiver, k); |
523 | if (!it.IsFound()) { |
524 | continue; |
525 | } |
526 | Handle<Object> element_k; |
527 | ASSIGN_RETURN_ON_EXCEPTION_VALUE( |
528 | isolate, element_k, Object::GetProperty(&it), Nothing<int64_t>()); |
529 | |
530 | if (value->StrictEquals(*element_k)) return Just<int64_t>(k); |
531 | } |
532 | |
533 | return Just<int64_t>(-1); |
534 | } |
535 | |
536 | // The InternalElementsAccessor is a helper class to expose otherwise protected |
537 | // methods to its subclasses. Namely, we don't want to publicly expose methods |
538 | // that take an entry (instead of an index) as an argument. |
539 | class InternalElementsAccessor : public ElementsAccessor { |
540 | public: |
541 | explicit InternalElementsAccessor(const char* name) |
542 | : ElementsAccessor(name) {} |
543 | |
544 | uint32_t GetEntryForIndex(Isolate* isolate, JSObject holder, |
545 | FixedArrayBase backing_store, |
546 | uint32_t index) override = 0; |
547 | |
548 | PropertyDetails GetDetails(JSObject holder, uint32_t entry) override = 0; |
549 | }; |
550 | |
551 | // Base class for element handler implementations. Contains the |
552 | // the common logic for objects with different ElementsKinds. |
553 | // Subclasses must specialize method for which the element |
554 | // implementation differs from the base class implementation. |
555 | // |
556 | // This class is intended to be used in the following way: |
557 | // |
558 | // class SomeElementsAccessor : |
559 | // public ElementsAccessorBase<SomeElementsAccessor, |
560 | // BackingStoreClass> { |
561 | // ... |
562 | // } |
563 | // |
564 | // This is an example of the Curiously Recurring Template Pattern (see |
565 | // http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use |
566 | // CRTP to guarantee aggressive compile time optimizations (i.e. inlining and |
567 | // specialization of SomeElementsAccessor methods). |
568 | template <typename Subclass, typename ElementsTraitsParam> |
569 | class ElementsAccessorBase : public InternalElementsAccessor { |
570 | public: |
571 | explicit ElementsAccessorBase(const char* name) |
572 | : InternalElementsAccessor(name) {} |
573 | |
574 | typedef ElementsTraitsParam ElementsTraits; |
575 | typedef typename ElementsTraitsParam::BackingStore BackingStore; |
576 | |
577 | static ElementsKind kind() { return ElementsTraits::Kind; } |
578 | |
579 | static void ValidateContents(JSObject holder, int length) {} |
580 | |
581 | static void ValidateImpl(JSObject holder) { |
582 | FixedArrayBase fixed_array_base = holder->elements(); |
583 | if (!fixed_array_base->IsHeapObject()) return; |
584 | // Arrays that have been shifted in place can't be verified. |
585 | if (fixed_array_base->IsFiller()) return; |
586 | int length = 0; |
587 | if (holder->IsJSArray()) { |
588 | Object length_obj = JSArray::cast(holder)->length(); |
589 | if (length_obj->IsSmi()) { |
590 | length = Smi::ToInt(length_obj); |
591 | } |
592 | } else { |
593 | length = fixed_array_base->length(); |
594 | } |
595 | Subclass::ValidateContents(holder, length); |
596 | } |
597 | |
598 | void Validate(JSObject holder) final { |
599 | DisallowHeapAllocation no_gc; |
600 | Subclass::ValidateImpl(holder); |
601 | } |
602 | |
603 | static bool IsPackedImpl(JSObject holder, FixedArrayBase backing_store, |
604 | uint32_t start, uint32_t end) { |
605 | DisallowHeapAllocation no_gc; |
606 | if (IsFastPackedElementsKind(kind())) return true; |
607 | Isolate* isolate = holder->GetIsolate(); |
608 | for (uint32_t i = start; i < end; i++) { |
609 | if (!Subclass::HasElementImpl(isolate, holder, i, backing_store, |
610 | ALL_PROPERTIES)) { |
611 | return false; |
612 | } |
613 | } |
614 | return true; |
615 | } |
616 | |
617 | static void TryTransitionResultArrayToPacked(Handle<JSArray> array) { |
618 | if (!IsHoleyElementsKind(kind())) return; |
619 | Handle<FixedArrayBase> backing_store(array->elements(), |
620 | array->GetIsolate()); |
621 | int length = Smi::ToInt(array->length()); |
622 | if (!Subclass::IsPackedImpl(*array, *backing_store, 0, length)) return; |
623 | |
624 | ElementsKind packed_kind = GetPackedElementsKind(kind()); |
625 | Handle<Map> new_map = |
626 | JSObject::GetElementsTransitionMap(array, packed_kind); |
627 | JSObject::MigrateToMap(array, new_map); |
628 | if (FLAG_trace_elements_transitions) { |
629 | JSObject::PrintElementsTransition(stdout, array, kind(), backing_store, |
630 | packed_kind, backing_store); |
631 | } |
632 | } |
633 | |
634 | bool HasElement(JSObject holder, uint32_t index, FixedArrayBase backing_store, |
635 | PropertyFilter filter) final { |
636 | return Subclass::HasElementImpl(holder->GetIsolate(), holder, index, |
637 | backing_store, filter); |
638 | } |
639 | |
640 | static bool HasElementImpl(Isolate* isolate, JSObject holder, uint32_t index, |
641 | FixedArrayBase backing_store, |
642 | PropertyFilter filter = ALL_PROPERTIES) { |
643 | return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index, |
644 | filter) != kMaxUInt32; |
645 | } |
646 | |
647 | bool HasEntry(JSObject holder, uint32_t entry) final { |
648 | return Subclass::HasEntryImpl(holder->GetIsolate(), holder->elements(), |
649 | entry); |
650 | } |
651 | |
652 | static bool HasEntryImpl(Isolate* isolate, FixedArrayBase backing_store, |
653 | uint32_t entry) { |
654 | UNIMPLEMENTED(); |
655 | } |
656 | |
657 | bool HasAccessors(JSObject holder) final { |
658 | return Subclass::HasAccessorsImpl(holder, holder->elements()); |
659 | } |
660 | |
661 | static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) { |
662 | return false; |
663 | } |
664 | |
665 | Handle<Object> Get(Handle<JSObject> holder, uint32_t entry) final { |
666 | return Subclass::GetInternalImpl(holder, entry); |
667 | } |
668 | |
669 | static Handle<Object> GetInternalImpl(Handle<JSObject> holder, |
670 | uint32_t entry) { |
671 | return Subclass::GetImpl(holder->GetIsolate(), holder->elements(), entry); |
672 | } |
673 | |
674 | static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store, |
675 | uint32_t entry) { |
676 | uint32_t index = GetIndexForEntryImpl(backing_store, entry); |
677 | return handle(BackingStore::cast(backing_store)->get(index), isolate); |
678 | } |
679 | |
680 | void Set(Handle<JSObject> holder, uint32_t entry, Object value) final { |
681 | Subclass::SetImpl(holder, entry, value); |
682 | } |
683 | |
684 | void Reconfigure(Handle<JSObject> object, Handle<FixedArrayBase> store, |
685 | uint32_t entry, Handle<Object> value, |
686 | PropertyAttributes attributes) final { |
687 | Subclass::ReconfigureImpl(object, store, entry, value, attributes); |
688 | } |
689 | |
690 | static void ReconfigureImpl(Handle<JSObject> object, |
691 | Handle<FixedArrayBase> store, uint32_t entry, |
692 | Handle<Object> value, |
693 | PropertyAttributes attributes) { |
694 | UNREACHABLE(); |
695 | } |
696 | |
697 | void Add(Handle<JSObject> object, uint32_t index, Handle<Object> value, |
698 | PropertyAttributes attributes, uint32_t new_capacity) final { |
699 | Subclass::AddImpl(object, index, value, attributes, new_capacity); |
700 | } |
701 | |
702 | static void AddImpl(Handle<JSObject> object, uint32_t index, |
703 | Handle<Object> value, PropertyAttributes attributes, |
704 | uint32_t new_capacity) { |
705 | UNREACHABLE(); |
706 | } |
707 | |
708 | uint32_t Push(Handle<JSArray> receiver, Arguments* args, |
709 | uint32_t push_size) final { |
710 | return Subclass::PushImpl(receiver, args, push_size); |
711 | } |
712 | |
713 | static uint32_t PushImpl(Handle<JSArray> receiver, Arguments* args, |
714 | uint32_t push_sized) { |
715 | UNREACHABLE(); |
716 | } |
717 | |
718 | uint32_t Unshift(Handle<JSArray> receiver, Arguments* args, |
719 | uint32_t unshift_size) final { |
720 | return Subclass::UnshiftImpl(receiver, args, unshift_size); |
721 | } |
722 | |
723 | static uint32_t UnshiftImpl(Handle<JSArray> receiver, Arguments* args, |
724 | uint32_t unshift_size) { |
725 | UNREACHABLE(); |
726 | } |
727 | |
728 | Handle<JSObject> Slice(Handle<JSObject> receiver, uint32_t start, |
729 | uint32_t end) final { |
730 | return Subclass::SliceImpl(receiver, start, end); |
731 | } |
732 | |
733 | static Handle<JSObject> SliceImpl(Handle<JSObject> receiver, uint32_t start, |
734 | uint32_t end) { |
735 | UNREACHABLE(); |
736 | } |
737 | |
738 | Handle<Object> Pop(Handle<JSArray> receiver) final { |
739 | return Subclass::PopImpl(receiver); |
740 | } |
741 | |
742 | static Handle<Object> PopImpl(Handle<JSArray> receiver) { |
743 | UNREACHABLE(); |
744 | } |
745 | |
746 | Handle<Object> Shift(Handle<JSArray> receiver) final { |
747 | return Subclass::ShiftImpl(receiver); |
748 | } |
749 | |
750 | static Handle<Object> ShiftImpl(Handle<JSArray> receiver) { |
751 | UNREACHABLE(); |
752 | } |
753 | |
754 | void SetLength(Handle<JSArray> array, uint32_t length) final { |
755 | Subclass::SetLengthImpl(array->GetIsolate(), array, length, |
756 | handle(array->elements(), array->GetIsolate())); |
757 | } |
758 | |
759 | static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array, |
760 | uint32_t length, |
761 | Handle<FixedArrayBase> backing_store) { |
762 | DCHECK(!array->SetLengthWouldNormalize(length)); |
763 | DCHECK(IsFastElementsKind(array->GetElementsKind())); |
764 | uint32_t old_length = 0; |
765 | CHECK(array->length()->ToArrayIndex(&old_length)); |
766 | |
767 | if (old_length < length) { |
768 | ElementsKind kind = array->GetElementsKind(); |
769 | if (!IsHoleyElementsKind(kind)) { |
770 | kind = GetHoleyElementsKind(kind); |
771 | JSObject::TransitionElementsKind(array, kind); |
772 | } |
773 | } |
774 | |
775 | // Check whether the backing store should be shrunk. |
776 | uint32_t capacity = backing_store->length(); |
777 | old_length = Min(old_length, capacity); |
778 | if (length == 0) { |
779 | array->initialize_elements(); |
780 | } else if (length <= capacity) { |
781 | if (IsSmiOrObjectElementsKind(kind())) { |
782 | JSObject::EnsureWritableFastElements(array); |
783 | if (array->elements() != *backing_store) { |
784 | backing_store = handle(array->elements(), isolate); |
785 | } |
786 | } |
787 | if (2 * length + JSObject::kMinAddedElementsCapacity <= capacity) { |
788 | // If more than half the elements won't be used, trim the array. |
789 | // Do not trim from short arrays to prevent frequent trimming on |
790 | // repeated pop operations. |
791 | // Leave some space to allow for subsequent push operations. |
792 | int elements_to_trim = length + 1 == old_length |
793 | ? (capacity - length) / 2 |
794 | : capacity - length; |
795 | isolate->heap()->RightTrimFixedArray(*backing_store, elements_to_trim); |
796 | // Fill the non-trimmed elements with holes. |
797 | BackingStore::cast(*backing_store) |
798 | ->FillWithHoles(length, |
799 | std::min(old_length, capacity - elements_to_trim)); |
800 | } else { |
801 | // Otherwise, fill the unused tail with holes. |
802 | BackingStore::cast(*backing_store)->FillWithHoles(length, old_length); |
803 | } |
804 | } else { |
805 | // Check whether the backing store should be expanded. |
806 | capacity = Max(length, JSObject::NewElementsCapacity(capacity)); |
807 | Subclass::GrowCapacityAndConvertImpl(array, capacity); |
808 | } |
809 | |
810 | array->set_length(Smi::FromInt(length)); |
811 | JSObject::ValidateElements(*array); |
812 | } |
813 | |
814 | uint32_t NumberOfElements(JSObject receiver) final { |
815 | return Subclass::NumberOfElementsImpl(receiver, receiver->elements()); |
816 | } |
817 | |
818 | static uint32_t NumberOfElementsImpl(JSObject receiver, |
819 | FixedArrayBase backing_store) { |
820 | UNREACHABLE(); |
821 | } |
822 | |
823 | static uint32_t GetMaxIndex(JSObject receiver, FixedArrayBase elements) { |
824 | if (receiver->IsJSArray()) { |
825 | DCHECK(JSArray::cast(receiver)->length()->IsSmi()); |
826 | return static_cast<uint32_t>( |
827 | Smi::ToInt(JSArray::cast(receiver)->length())); |
828 | } |
829 | return Subclass::GetCapacityImpl(receiver, elements); |
830 | } |
831 | |
832 | static uint32_t GetMaxNumberOfEntries(JSObject receiver, |
833 | FixedArrayBase elements) { |
834 | return Subclass::GetMaxIndex(receiver, elements); |
835 | } |
836 | |
837 | static Handle<FixedArrayBase> ConvertElementsWithCapacity( |
838 | Handle<JSObject> object, Handle<FixedArrayBase> old_elements, |
839 | ElementsKind from_kind, uint32_t capacity) { |
840 | return ConvertElementsWithCapacity( |
841 | object, old_elements, from_kind, capacity, 0, 0, |
842 | ElementsAccessor::kCopyToEndAndInitializeToHole); |
843 | } |
844 | |
845 | static Handle<FixedArrayBase> ConvertElementsWithCapacity( |
846 | Handle<JSObject> object, Handle<FixedArrayBase> old_elements, |
847 | ElementsKind from_kind, uint32_t capacity, int copy_size) { |
848 | return ConvertElementsWithCapacity(object, old_elements, from_kind, |
849 | capacity, 0, 0, copy_size); |
850 | } |
851 | |
852 | static Handle<FixedArrayBase> ConvertElementsWithCapacity( |
853 | Handle<JSObject> object, Handle<FixedArrayBase> old_elements, |
854 | ElementsKind from_kind, uint32_t capacity, uint32_t src_index, |
855 | uint32_t dst_index, int copy_size) { |
856 | Isolate* isolate = object->GetIsolate(); |
857 | Handle<FixedArrayBase> new_elements; |
858 | if (IsDoubleElementsKind(kind())) { |
859 | new_elements = isolate->factory()->NewFixedDoubleArray(capacity); |
860 | } else { |
861 | new_elements = isolate->factory()->NewUninitializedFixedArray(capacity); |
862 | } |
863 | |
864 | int packed_size = kPackedSizeNotKnown; |
865 | if (IsFastPackedElementsKind(from_kind) && object->IsJSArray()) { |
866 | packed_size = Smi::ToInt(JSArray::cast(*object)->length()); |
867 | } |
868 | |
869 | Subclass::CopyElementsImpl(isolate, *old_elements, src_index, *new_elements, |
870 | from_kind, dst_index, packed_size, copy_size); |
871 | |
872 | return new_elements; |
873 | } |
874 | |
875 | static void TransitionElementsKindImpl(Handle<JSObject> object, |
876 | Handle<Map> to_map) { |
877 | Handle<Map> from_map = handle(object->map(), object->GetIsolate()); |
878 | ElementsKind from_kind = from_map->elements_kind(); |
879 | ElementsKind to_kind = to_map->elements_kind(); |
880 | if (IsHoleyElementsKind(from_kind)) { |
881 | to_kind = GetHoleyElementsKind(to_kind); |
882 | } |
883 | if (from_kind != to_kind) { |
884 | // This method should never be called for any other case. |
885 | DCHECK(IsFastElementsKind(from_kind)); |
886 | DCHECK(IsFastElementsKind(to_kind)); |
887 | DCHECK_NE(TERMINAL_FAST_ELEMENTS_KIND, from_kind); |
888 | |
889 | Handle<FixedArrayBase> from_elements(object->elements(), |
890 | object->GetIsolate()); |
891 | if (object->elements() == |
892 | object->GetReadOnlyRoots().empty_fixed_array() || |
893 | IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind)) { |
894 | // No change is needed to the elements() buffer, the transition |
895 | // only requires a map change. |
896 | JSObject::MigrateToMap(object, to_map); |
897 | } else { |
898 | DCHECK( |
899 | (IsSmiElementsKind(from_kind) && IsDoubleElementsKind(to_kind)) || |
900 | (IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind))); |
901 | uint32_t capacity = static_cast<uint32_t>(object->elements()->length()); |
902 | Handle<FixedArrayBase> elements = ConvertElementsWithCapacity( |
903 | object, from_elements, from_kind, capacity); |
904 | JSObject::SetMapAndElements(object, to_map, elements); |
905 | } |
906 | if (FLAG_trace_elements_transitions) { |
907 | JSObject::PrintElementsTransition( |
908 | stdout, object, from_kind, from_elements, to_kind, |
909 | handle(object->elements(), object->GetIsolate())); |
910 | } |
911 | } |
912 | } |
913 | |
914 | static void GrowCapacityAndConvertImpl(Handle<JSObject> object, |
915 | uint32_t capacity) { |
916 | ElementsKind from_kind = object->GetElementsKind(); |
917 | if (IsSmiOrObjectElementsKind(from_kind)) { |
918 | // Array optimizations rely on the prototype lookups of Array objects |
919 | // always returning undefined. If there is a store to the initial |
920 | // prototype object, make sure all of these optimizations are invalidated. |
921 | object->GetIsolate()->UpdateNoElementsProtectorOnSetLength(object); |
922 | } |
923 | Handle<FixedArrayBase> old_elements(object->elements(), |
924 | object->GetIsolate()); |
925 | // This method should only be called if there's a reason to update the |
926 | // elements. |
927 | DCHECK(IsDoubleElementsKind(from_kind) != IsDoubleElementsKind(kind()) || |
928 | IsDictionaryElementsKind(from_kind) || |
929 | static_cast<uint32_t>(old_elements->length()) < capacity); |
930 | Subclass::BasicGrowCapacityAndConvertImpl(object, old_elements, from_kind, |
931 | kind(), capacity); |
932 | } |
933 | |
934 | static void BasicGrowCapacityAndConvertImpl( |
935 | Handle<JSObject> object, Handle<FixedArrayBase> old_elements, |
936 | ElementsKind from_kind, ElementsKind to_kind, uint32_t capacity) { |
937 | Handle<FixedArrayBase> elements = |
938 | ConvertElementsWithCapacity(object, old_elements, from_kind, capacity); |
939 | |
940 | if (IsHoleyElementsKind(from_kind)) { |
941 | to_kind = GetHoleyElementsKind(to_kind); |
942 | } |
943 | Handle<Map> new_map = JSObject::GetElementsTransitionMap(object, to_kind); |
944 | JSObject::SetMapAndElements(object, new_map, elements); |
945 | |
946 | // Transition through the allocation site as well if present. |
947 | JSObject::UpdateAllocationSite(object, to_kind); |
948 | |
949 | if (FLAG_trace_elements_transitions) { |
950 | JSObject::PrintElementsTransition(stdout, object, from_kind, old_elements, |
951 | to_kind, elements); |
952 | } |
953 | } |
954 | |
955 | void TransitionElementsKind(Handle<JSObject> object, Handle<Map> map) final { |
956 | Subclass::TransitionElementsKindImpl(object, map); |
957 | } |
958 | |
959 | void GrowCapacityAndConvert(Handle<JSObject> object, |
960 | uint32_t capacity) final { |
961 | Subclass::GrowCapacityAndConvertImpl(object, capacity); |
962 | } |
963 | |
964 | bool GrowCapacity(Handle<JSObject> object, uint32_t index) final { |
965 | // This function is intended to be called from optimized code. We don't |
966 | // want to trigger lazy deopts there, so refuse to handle cases that would. |
967 | if (object->map()->is_prototype_map() || |
968 | object->WouldConvertToSlowElements(index)) { |
969 | return false; |
970 | } |
971 | Handle<FixedArrayBase> old_elements(object->elements(), |
972 | object->GetIsolate()); |
973 | uint32_t new_capacity = JSObject::NewElementsCapacity(index + 1); |
974 | DCHECK(static_cast<uint32_t>(old_elements->length()) < new_capacity); |
975 | Handle<FixedArrayBase> elements = |
976 | ConvertElementsWithCapacity(object, old_elements, kind(), new_capacity); |
977 | |
978 | DCHECK_EQ(object->GetElementsKind(), kind()); |
979 | // Transition through the allocation site as well if present. |
980 | if (JSObject::UpdateAllocationSite<AllocationSiteUpdateMode::kCheckOnly>( |
981 | object, kind())) { |
982 | return false; |
983 | } |
984 | |
985 | object->set_elements(*elements); |
986 | return true; |
987 | } |
988 | |
989 | void Delete(Handle<JSObject> obj, uint32_t entry) final { |
990 | Subclass::DeleteImpl(obj, entry); |
991 | } |
992 | |
993 | static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from, |
994 | uint32_t from_start, FixedArrayBase to, |
995 | ElementsKind from_kind, uint32_t to_start, |
996 | int packed_size, int copy_size) { |
997 | UNREACHABLE(); |
998 | } |
999 | |
1000 | void CopyElements(JSObject from_holder, uint32_t from_start, |
1001 | ElementsKind from_kind, Handle<FixedArrayBase> to, |
1002 | uint32_t to_start, int copy_size) final { |
1003 | int packed_size = kPackedSizeNotKnown; |
1004 | bool is_packed = IsFastPackedElementsKind(from_kind) && |
1005 | from_holder->IsJSArray(); |
1006 | if (is_packed) { |
1007 | packed_size = Smi::ToInt(JSArray::cast(from_holder)->length()); |
1008 | if (copy_size >= 0 && packed_size > copy_size) { |
1009 | packed_size = copy_size; |
1010 | } |
1011 | } |
1012 | FixedArrayBase from = from_holder->elements(); |
1013 | // NOTE: the Subclass::CopyElementsImpl() methods |
1014 | // violate the handlified function signature convention: |
1015 | // raw pointer parameters in the function that allocates. This is done |
1016 | // intentionally to avoid ArrayConcat() builtin performance degradation. |
1017 | // |
1018 | // Details: The idea is that allocations actually happen only in case of |
1019 | // copying from object with fast double elements to object with object |
1020 | // elements. In all the other cases there are no allocations performed and |
1021 | // handle creation causes noticeable performance degradation of the builtin. |
1022 | Subclass::CopyElementsImpl(from_holder->GetIsolate(), from, from_start, *to, |
1023 | from_kind, to_start, packed_size, copy_size); |
1024 | } |
1025 | |
1026 | void CopyElements(Isolate* isolate, Handle<FixedArrayBase> source, |
1027 | ElementsKind source_kind, |
1028 | Handle<FixedArrayBase> destination, int size) override { |
1029 | Subclass::CopyElementsImpl(isolate, *source, 0, *destination, source_kind, |
1030 | 0, kPackedSizeNotKnown, size); |
1031 | } |
1032 | |
1033 | void CopyTypedArrayElementsSlice(JSTypedArray source, |
1034 | JSTypedArray destination, size_t start, |
1035 | size_t end) override { |
1036 | Subclass::CopyTypedArrayElementsSliceImpl(source, destination, start, end); |
1037 | } |
1038 | |
1039 | static void CopyTypedArrayElementsSliceImpl(JSTypedArray source, |
1040 | JSTypedArray destination, |
1041 | size_t start, size_t end) { |
1042 | UNREACHABLE(); |
1043 | } |
1044 | |
1045 | Object CopyElements(Handle<Object> source, Handle<JSObject> destination, |
1046 | size_t length, uint32_t offset) final { |
1047 | return Subclass::CopyElementsHandleImpl(source, destination, length, |
1048 | offset); |
1049 | } |
1050 | |
1051 | static Object CopyElementsHandleImpl(Handle<Object> source, |
1052 | Handle<JSObject> destination, |
1053 | size_t length, uint32_t offset) { |
1054 | UNREACHABLE(); |
1055 | } |
1056 | |
1057 | Handle<NumberDictionary> Normalize(Handle<JSObject> object) final { |
1058 | return Subclass::NormalizeImpl( |
1059 | object, handle(object->elements(), object->GetIsolate())); |
1060 | } |
1061 | |
1062 | static Handle<NumberDictionary> NormalizeImpl( |
1063 | Handle<JSObject> object, Handle<FixedArrayBase> elements) { |
1064 | UNREACHABLE(); |
1065 | } |
1066 | |
1067 | Maybe<bool> CollectValuesOrEntries(Isolate* isolate, Handle<JSObject> object, |
1068 | Handle<FixedArray> values_or_entries, |
1069 | bool get_entries, int* nof_items, |
1070 | PropertyFilter filter) override { |
1071 | return Subclass::CollectValuesOrEntriesImpl( |
1072 | isolate, object, values_or_entries, get_entries, nof_items, filter); |
1073 | } |
1074 | |
1075 | static Maybe<bool> CollectValuesOrEntriesImpl( |
1076 | Isolate* isolate, Handle<JSObject> object, |
1077 | Handle<FixedArray> values_or_entries, bool get_entries, int* nof_items, |
1078 | PropertyFilter filter) { |
1079 | DCHECK_EQ(*nof_items, 0); |
1080 | KeyAccumulator accumulator(isolate, KeyCollectionMode::kOwnOnly, |
1081 | ALL_PROPERTIES); |
1082 | Subclass::CollectElementIndicesImpl( |
1083 | object, handle(object->elements(), isolate), &accumulator); |
1084 | Handle<FixedArray> keys = accumulator.GetKeys(); |
1085 | |
1086 | int count = 0; |
1087 | int i = 0; |
1088 | ElementsKind original_elements_kind = object->GetElementsKind(); |
1089 | |
1090 | for (; i < keys->length(); ++i) { |
1091 | Handle<Object> key(keys->get(i), isolate); |
1092 | uint32_t index; |
1093 | if (!key->ToUint32(&index)) continue; |
1094 | |
1095 | DCHECK_EQ(object->GetElementsKind(), original_elements_kind); |
1096 | uint32_t entry = Subclass::GetEntryForIndexImpl( |
1097 | isolate, *object, object->elements(), index, filter); |
1098 | if (entry == kMaxUInt32) continue; |
1099 | PropertyDetails details = Subclass::GetDetailsImpl(*object, entry); |
1100 | |
1101 | Handle<Object> value; |
1102 | if (details.kind() == kData) { |
1103 | value = Subclass::GetImpl(isolate, object->elements(), entry); |
1104 | } else { |
1105 | // This might modify the elements and/or change the elements kind. |
1106 | LookupIterator it(isolate, object, index, LookupIterator::OWN); |
1107 | ASSIGN_RETURN_ON_EXCEPTION_VALUE( |
1108 | isolate, value, Object::GetProperty(&it), Nothing<bool>()); |
1109 | } |
1110 | if (get_entries) value = MakeEntryPair(isolate, index, value); |
1111 | values_or_entries->set(count++, *value); |
1112 | if (object->GetElementsKind() != original_elements_kind) break; |
1113 | } |
1114 | |
1115 | // Slow path caused by changes in elements kind during iteration. |
1116 | for (; i < keys->length(); i++) { |
1117 | Handle<Object> key(keys->get(i), isolate); |
1118 | uint32_t index; |
1119 | if (!key->ToUint32(&index)) continue; |
1120 | |
1121 | if (filter & ONLY_ENUMERABLE) { |
1122 | InternalElementsAccessor* accessor = |
1123 | reinterpret_cast<InternalElementsAccessor*>( |
1124 | object->GetElementsAccessor()); |
1125 | uint32_t entry = accessor->GetEntryForIndex(isolate, *object, |
1126 | object->elements(), index); |
1127 | if (entry == kMaxUInt32) continue; |
1128 | PropertyDetails details = accessor->GetDetails(*object, entry); |
1129 | if (!details.IsEnumerable()) continue; |
1130 | } |
1131 | |
1132 | Handle<Object> value; |
1133 | LookupIterator it(isolate, object, index, LookupIterator::OWN); |
1134 | ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, value, Object::GetProperty(&it), |
1135 | Nothing<bool>()); |
1136 | |
1137 | if (get_entries) value = MakeEntryPair(isolate, index, value); |
1138 | values_or_entries->set(count++, *value); |
1139 | } |
1140 | |
1141 | *nof_items = count; |
1142 | return Just(true); |
1143 | } |
1144 | |
1145 | void CollectElementIndices(Handle<JSObject> object, |
1146 | Handle<FixedArrayBase> backing_store, |
1147 | KeyAccumulator* keys) final { |
1148 | if (keys->filter() & ONLY_ALL_CAN_READ) return; |
1149 | Subclass::CollectElementIndicesImpl(object, backing_store, keys); |
1150 | } |
1151 | |
1152 | static void CollectElementIndicesImpl(Handle<JSObject> object, |
1153 | Handle<FixedArrayBase> backing_store, |
1154 | KeyAccumulator* keys) { |
1155 | DCHECK_NE(DICTIONARY_ELEMENTS, kind()); |
1156 | // Non-dictionary elements can't have all-can-read accessors. |
1157 | uint32_t length = Subclass::GetMaxIndex(*object, *backing_store); |
1158 | PropertyFilter filter = keys->filter(); |
1159 | Isolate* isolate = keys->isolate(); |
1160 | Factory* factory = isolate->factory(); |
1161 | for (uint32_t i = 0; i < length; i++) { |
1162 | if (Subclass::HasElementImpl(isolate, *object, i, *backing_store, |
1163 | filter)) { |
1164 | keys->AddKey(factory->NewNumberFromUint(i)); |
1165 | } |
1166 | } |
1167 | } |
1168 | |
1169 | static Handle<FixedArray> DirectCollectElementIndicesImpl( |
1170 | Isolate* isolate, Handle<JSObject> object, |
1171 | Handle<FixedArrayBase> backing_store, GetKeysConversion convert, |
1172 | PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices, |
1173 | uint32_t insertion_index = 0) { |
1174 | uint32_t length = Subclass::GetMaxIndex(*object, *backing_store); |
1175 | uint32_t const kMaxStringTableEntries = |
1176 | isolate->heap()->MaxNumberToStringCacheSize(); |
1177 | for (uint32_t i = 0; i < length; i++) { |
1178 | if (Subclass::HasElementImpl(isolate, *object, i, *backing_store, |
1179 | filter)) { |
1180 | if (convert == GetKeysConversion::kConvertToString) { |
1181 | bool use_cache = i < kMaxStringTableEntries; |
1182 | Handle<String> index_string = |
1183 | isolate->factory()->Uint32ToString(i, use_cache); |
1184 | list->set(insertion_index, *index_string); |
1185 | } else { |
1186 | list->set(insertion_index, Smi::FromInt(i)); |
1187 | } |
1188 | insertion_index++; |
1189 | } |
1190 | } |
1191 | *nof_indices = insertion_index; |
1192 | return list; |
1193 | } |
1194 | |
1195 | MaybeHandle<FixedArray> PrependElementIndices( |
1196 | Handle<JSObject> object, Handle<FixedArrayBase> backing_store, |
1197 | Handle<FixedArray> keys, GetKeysConversion convert, |
1198 | PropertyFilter filter) final { |
1199 | return Subclass::PrependElementIndicesImpl(object, backing_store, keys, |
1200 | convert, filter); |
1201 | } |
1202 | |
1203 | static MaybeHandle<FixedArray> PrependElementIndicesImpl( |
1204 | Handle<JSObject> object, Handle<FixedArrayBase> backing_store, |
1205 | Handle<FixedArray> keys, GetKeysConversion convert, |
1206 | PropertyFilter filter) { |
1207 | Isolate* isolate = object->GetIsolate(); |
1208 | uint32_t nof_property_keys = keys->length(); |
1209 | uint32_t initial_list_length = |
1210 | Subclass::GetMaxNumberOfEntries(*object, *backing_store); |
1211 | |
1212 | initial_list_length += nof_property_keys; |
1213 | if (initial_list_length > FixedArray::kMaxLength || |
1214 | initial_list_length < nof_property_keys) { |
1215 | return isolate->Throw<FixedArray>(isolate->factory()->NewRangeError( |
1216 | MessageTemplate::kInvalidArrayLength)); |
1217 | } |
1218 | |
1219 | // Collect the element indices into a new list. |
1220 | MaybeHandle<FixedArray> raw_array = |
1221 | isolate->factory()->TryNewFixedArray(initial_list_length); |
1222 | Handle<FixedArray> combined_keys; |
1223 | |
1224 | // If we have a holey backing store try to precisely estimate the backing |
1225 | // store size as a last emergency measure if we cannot allocate the big |
1226 | // array. |
1227 | if (!raw_array.ToHandle(&combined_keys)) { |
1228 | if (IsHoleyOrDictionaryElementsKind(kind())) { |
1229 | // If we overestimate the result list size we might end up in the |
1230 | // large-object space which doesn't free memory on shrinking the list. |
1231 | // Hence we try to estimate the final size for holey backing stores more |
1232 | // precisely here. |
1233 | initial_list_length = |
1234 | Subclass::NumberOfElementsImpl(*object, *backing_store); |
1235 | initial_list_length += nof_property_keys; |
1236 | } |
1237 | combined_keys = isolate->factory()->NewFixedArray(initial_list_length); |
1238 | } |
1239 | |
1240 | uint32_t nof_indices = 0; |
1241 | bool needs_sorting = IsDictionaryElementsKind(kind()) || |
1242 | IsSloppyArgumentsElementsKind(kind()); |
1243 | combined_keys = Subclass::DirectCollectElementIndicesImpl( |
1244 | isolate, object, backing_store, |
1245 | needs_sorting ? GetKeysConversion::kKeepNumbers : convert, filter, |
1246 | combined_keys, &nof_indices); |
1247 | |
1248 | if (needs_sorting) { |
1249 | SortIndices(isolate, combined_keys, nof_indices); |
1250 | // Indices from dictionary elements should only be converted after |
1251 | // sorting. |
1252 | if (convert == GetKeysConversion::kConvertToString) { |
1253 | for (uint32_t i = 0; i < nof_indices; i++) { |
1254 | Handle<Object> index_string = isolate->factory()->Uint32ToString( |
1255 | combined_keys->get(i)->Number()); |
1256 | combined_keys->set(i, *index_string); |
1257 | } |
1258 | } |
1259 | } |
1260 | |
1261 | // Copy over the passed-in property keys. |
1262 | CopyObjectToObjectElements(isolate, *keys, PACKED_ELEMENTS, 0, |
1263 | *combined_keys, PACKED_ELEMENTS, nof_indices, |
1264 | nof_property_keys); |
1265 | |
1266 | // For holey elements and arguments we might have to shrink the collected |
1267 | // keys since the estimates might be off. |
1268 | if (IsHoleyOrDictionaryElementsKind(kind()) || |
1269 | IsSloppyArgumentsElementsKind(kind())) { |
1270 | // Shrink combined_keys to the final size. |
1271 | int final_size = nof_indices + nof_property_keys; |
1272 | DCHECK_LE(final_size, combined_keys->length()); |
1273 | return FixedArray::ShrinkOrEmpty(isolate, combined_keys, final_size); |
1274 | } |
1275 | |
1276 | return combined_keys; |
1277 | } |
1278 | |
1279 | void AddElementsToKeyAccumulator(Handle<JSObject> receiver, |
1280 | KeyAccumulator* accumulator, |
1281 | AddKeyConversion convert) final { |
1282 | Subclass::AddElementsToKeyAccumulatorImpl(receiver, accumulator, convert); |
1283 | } |
1284 | |
1285 | static uint32_t GetCapacityImpl(JSObject holder, |
1286 | FixedArrayBase backing_store) { |
1287 | return backing_store->length(); |
1288 | } |
1289 | |
1290 | uint32_t GetCapacity(JSObject holder, FixedArrayBase backing_store) final { |
1291 | return Subclass::GetCapacityImpl(holder, backing_store); |
1292 | } |
1293 | |
1294 | static Object FillImpl(Handle<JSObject> receiver, Handle<Object> obj_value, |
1295 | uint32_t start, uint32_t end) { |
1296 | UNREACHABLE(); |
1297 | } |
1298 | |
1299 | Object Fill(Handle<JSObject> receiver, Handle<Object> obj_value, |
1300 | uint32_t start, uint32_t end) override { |
1301 | return Subclass::FillImpl(receiver, obj_value, start, end); |
1302 | } |
1303 | |
1304 | static Maybe<bool> IncludesValueImpl(Isolate* isolate, |
1305 | Handle<JSObject> receiver, |
1306 | Handle<Object> value, |
1307 | uint32_t start_from, uint32_t length) { |
1308 | return IncludesValueSlowPath(isolate, receiver, value, start_from, length); |
1309 | } |
1310 | |
1311 | Maybe<bool> IncludesValue(Isolate* isolate, Handle<JSObject> receiver, |
1312 | Handle<Object> value, uint32_t start_from, |
1313 | uint32_t length) final { |
1314 | return Subclass::IncludesValueImpl(isolate, receiver, value, start_from, |
1315 | length); |
1316 | } |
1317 | |
1318 | static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate, |
1319 | Handle<JSObject> receiver, |
1320 | Handle<Object> value, |
1321 | uint32_t start_from, uint32_t length) { |
1322 | return IndexOfValueSlowPath(isolate, receiver, value, start_from, length); |
1323 | } |
1324 | |
1325 | Maybe<int64_t> IndexOfValue(Isolate* isolate, Handle<JSObject> receiver, |
1326 | Handle<Object> value, uint32_t start_from, |
1327 | uint32_t length) final { |
1328 | return Subclass::IndexOfValueImpl(isolate, receiver, value, start_from, |
1329 | length); |
1330 | } |
1331 | |
1332 | static Maybe<int64_t> LastIndexOfValueImpl(Handle<JSObject> receiver, |
1333 | Handle<Object> value, |
1334 | uint32_t start_from) { |
1335 | UNREACHABLE(); |
1336 | } |
1337 | |
1338 | Maybe<int64_t> LastIndexOfValue(Handle<JSObject> receiver, |
1339 | Handle<Object> value, |
1340 | uint32_t start_from) final { |
1341 | return Subclass::LastIndexOfValueImpl(receiver, value, start_from); |
1342 | } |
1343 | |
1344 | static void ReverseImpl(JSObject receiver) { UNREACHABLE(); } |
1345 | |
1346 | void Reverse(JSObject receiver) final { Subclass::ReverseImpl(receiver); } |
1347 | |
1348 | static uint32_t GetIndexForEntryImpl(FixedArrayBase backing_store, |
1349 | uint32_t entry) { |
1350 | return entry; |
1351 | } |
1352 | |
1353 | static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject holder, |
1354 | FixedArrayBase backing_store, |
1355 | uint32_t index, PropertyFilter filter) { |
1356 | DCHECK(IsFastElementsKind(kind()) || IsFrozenOrSealedElementsKind(kind())); |
1357 | uint32_t length = Subclass::GetMaxIndex(holder, backing_store); |
1358 | if (IsHoleyElementsKind(kind())) { |
1359 | return index < length && |
1360 | !BackingStore::cast(backing_store) |
1361 | ->is_the_hole(isolate, index) |
1362 | ? index |
1363 | : kMaxUInt32; |
1364 | } else { |
1365 | return index < length ? index : kMaxUInt32; |
1366 | } |
1367 | } |
1368 | |
1369 | uint32_t GetEntryForIndex(Isolate* isolate, JSObject holder, |
1370 | FixedArrayBase backing_store, |
1371 | uint32_t index) final { |
1372 | return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index, |
1373 | ALL_PROPERTIES); |
1374 | } |
1375 | |
1376 | static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store, |
1377 | uint32_t entry) { |
1378 | return PropertyDetails(kData, NONE, PropertyCellType::kNoCell); |
1379 | } |
1380 | |
1381 | static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) { |
1382 | return PropertyDetails(kData, NONE, PropertyCellType::kNoCell); |
1383 | } |
1384 | |
1385 | PropertyDetails GetDetails(JSObject holder, uint32_t entry) final { |
1386 | return Subclass::GetDetailsImpl(holder, entry); |
1387 | } |
1388 | |
1389 | Handle<FixedArray> CreateListFromArrayLike(Isolate* isolate, |
1390 | Handle<JSObject> object, |
1391 | uint32_t length) final { |
1392 | return Subclass::CreateListFromArrayLikeImpl(isolate, object, length); |
1393 | } |
1394 | |
1395 | static Handle<FixedArray> CreateListFromArrayLikeImpl(Isolate* isolate, |
1396 | Handle<JSObject> object, |
1397 | uint32_t length) { |
1398 | UNREACHABLE(); |
1399 | } |
1400 | |
1401 | private: |
1402 | DISALLOW_COPY_AND_ASSIGN(ElementsAccessorBase); |
1403 | }; |
1404 | |
1405 | |
1406 | class DictionaryElementsAccessor |
1407 | : public ElementsAccessorBase<DictionaryElementsAccessor, |
1408 | ElementsKindTraits<DICTIONARY_ELEMENTS> > { |
1409 | public: |
1410 | explicit DictionaryElementsAccessor(const char* name) |
1411 | : ElementsAccessorBase<DictionaryElementsAccessor, |
1412 | ElementsKindTraits<DICTIONARY_ELEMENTS> >(name) {} |
1413 | |
1414 | static uint32_t GetMaxIndex(JSObject receiver, FixedArrayBase elements) { |
1415 | // We cannot properly estimate this for dictionaries. |
1416 | UNREACHABLE(); |
1417 | } |
1418 | |
1419 | static uint32_t GetMaxNumberOfEntries(JSObject receiver, |
1420 | FixedArrayBase backing_store) { |
1421 | return NumberOfElementsImpl(receiver, backing_store); |
1422 | } |
1423 | |
1424 | static uint32_t NumberOfElementsImpl(JSObject receiver, |
1425 | FixedArrayBase backing_store) { |
1426 | NumberDictionary dict = NumberDictionary::cast(backing_store); |
1427 | return dict->NumberOfElements(); |
1428 | } |
1429 | |
1430 | static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array, |
1431 | uint32_t length, |
1432 | Handle<FixedArrayBase> backing_store) { |
1433 | Handle<NumberDictionary> dict = |
1434 | Handle<NumberDictionary>::cast(backing_store); |
1435 | int capacity = dict->Capacity(); |
1436 | uint32_t old_length = 0; |
1437 | CHECK(array->length()->ToArrayLength(&old_length)); |
1438 | { |
1439 | DisallowHeapAllocation no_gc; |
1440 | ReadOnlyRoots roots(isolate); |
1441 | if (length < old_length) { |
1442 | if (dict->requires_slow_elements()) { |
1443 | // Find last non-deletable element in range of elements to be |
1444 | // deleted and adjust range accordingly. |
1445 | for (int entry = 0; entry < capacity; entry++) { |
1446 | Object index = dict->KeyAt(entry); |
1447 | if (dict->IsKey(roots, index)) { |
1448 | uint32_t number = static_cast<uint32_t>(index->Number()); |
1449 | if (length <= number && number < old_length) { |
1450 | PropertyDetails details = dict->DetailsAt(entry); |
1451 | if (!details.IsConfigurable()) length = number + 1; |
1452 | } |
1453 | } |
1454 | } |
1455 | } |
1456 | |
1457 | if (length == 0) { |
1458 | // Flush the backing store. |
1459 | array->initialize_elements(); |
1460 | } else { |
1461 | // Remove elements that should be deleted. |
1462 | int removed_entries = 0; |
1463 | for (int entry = 0; entry < capacity; entry++) { |
1464 | Object index = dict->KeyAt(entry); |
1465 | if (dict->IsKey(roots, index)) { |
1466 | uint32_t number = static_cast<uint32_t>(index->Number()); |
1467 | if (length <= number && number < old_length) { |
1468 | dict->ClearEntry(isolate, entry); |
1469 | removed_entries++; |
1470 | } |
1471 | } |
1472 | } |
1473 | |
1474 | if (removed_entries > 0) { |
1475 | // Update the number of elements. |
1476 | dict->ElementsRemoved(removed_entries); |
1477 | } |
1478 | } |
1479 | } |
1480 | } |
1481 | |
1482 | Handle<Object> length_obj = isolate->factory()->NewNumberFromUint(length); |
1483 | array->set_length(*length_obj); |
1484 | } |
1485 | |
1486 | static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from, |
1487 | uint32_t from_start, FixedArrayBase to, |
1488 | ElementsKind from_kind, uint32_t to_start, |
1489 | int packed_size, int copy_size) { |
1490 | UNREACHABLE(); |
1491 | } |
1492 | |
1493 | static Handle<JSObject> SliceImpl(Handle<JSObject> receiver, uint32_t start, |
1494 | uint32_t end) { |
1495 | Isolate* isolate = receiver->GetIsolate(); |
1496 | uint32_t result_length = end < start ? 0u : end - start; |
1497 | |
1498 | // Result must also be a dictionary. |
1499 | Handle<JSArray> result_array = |
1500 | isolate->factory()->NewJSArray(0, HOLEY_ELEMENTS); |
1501 | JSObject::NormalizeElements(result_array); |
1502 | result_array->set_length(Smi::FromInt(result_length)); |
1503 | Handle<NumberDictionary> source_dict( |
1504 | NumberDictionary::cast(receiver->elements()), isolate); |
1505 | int entry_count = source_dict->Capacity(); |
1506 | ReadOnlyRoots roots(isolate); |
1507 | for (int i = 0; i < entry_count; i++) { |
1508 | Object key = source_dict->KeyAt(i); |
1509 | if (!source_dict->ToKey(roots, i, &key)) continue; |
1510 | uint64_t key_value = NumberToInt64(key); |
1511 | if (key_value >= start && key_value < end) { |
1512 | Handle<NumberDictionary> dest_dict( |
1513 | NumberDictionary::cast(result_array->elements()), isolate); |
1514 | Handle<Object> value(source_dict->ValueAt(i), isolate); |
1515 | PropertyDetails details = source_dict->DetailsAt(i); |
1516 | PropertyAttributes attr = details.attributes(); |
1517 | AddImpl(result_array, static_cast<uint32_t>(key_value) - start, value, |
1518 | attr, 0); |
1519 | } |
1520 | } |
1521 | |
1522 | return result_array; |
1523 | } |
1524 | |
1525 | static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) { |
1526 | Handle<NumberDictionary> dict(NumberDictionary::cast(obj->elements()), |
1527 | obj->GetIsolate()); |
1528 | dict = NumberDictionary::DeleteEntry(obj->GetIsolate(), dict, entry); |
1529 | obj->set_elements(*dict); |
1530 | } |
1531 | |
1532 | static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) { |
1533 | DisallowHeapAllocation no_gc; |
1534 | NumberDictionary dict = NumberDictionary::cast(backing_store); |
1535 | if (!dict->requires_slow_elements()) return false; |
1536 | int capacity = dict->Capacity(); |
1537 | ReadOnlyRoots roots = holder->GetReadOnlyRoots(); |
1538 | for (int i = 0; i < capacity; i++) { |
1539 | Object key = dict->KeyAt(i); |
1540 | if (!dict->IsKey(roots, key)) continue; |
1541 | PropertyDetails details = dict->DetailsAt(i); |
1542 | if (details.kind() == kAccessor) return true; |
1543 | } |
1544 | return false; |
1545 | } |
1546 | |
1547 | static Object GetRaw(FixedArrayBase store, uint32_t entry) { |
1548 | NumberDictionary backing_store = NumberDictionary::cast(store); |
1549 | return backing_store->ValueAt(entry); |
1550 | } |
1551 | |
1552 | static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store, |
1553 | uint32_t entry) { |
1554 | return handle(GetRaw(backing_store, entry), isolate); |
1555 | } |
1556 | |
1557 | static inline void SetImpl(Handle<JSObject> holder, uint32_t entry, |
1558 | Object value) { |
1559 | SetImpl(holder->elements(), entry, value); |
1560 | } |
1561 | |
1562 | static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry, |
1563 | Object value) { |
1564 | NumberDictionary::cast(backing_store)->ValueAtPut(entry, value); |
1565 | } |
1566 | |
1567 | static void ReconfigureImpl(Handle<JSObject> object, |
1568 | Handle<FixedArrayBase> store, uint32_t entry, |
1569 | Handle<Object> value, |
1570 | PropertyAttributes attributes) { |
1571 | NumberDictionary dictionary = NumberDictionary::cast(*store); |
1572 | if (attributes != NONE) object->RequireSlowElements(dictionary); |
1573 | dictionary->ValueAtPut(entry, *value); |
1574 | PropertyDetails details = dictionary->DetailsAt(entry); |
1575 | details = PropertyDetails(kData, attributes, PropertyCellType::kNoCell, |
1576 | details.dictionary_index()); |
1577 | |
1578 | dictionary->DetailsAtPut(object->GetIsolate(), entry, details); |
1579 | } |
1580 | |
1581 | static void AddImpl(Handle<JSObject> object, uint32_t index, |
1582 | Handle<Object> value, PropertyAttributes attributes, |
1583 | uint32_t new_capacity) { |
1584 | PropertyDetails details(kData, attributes, PropertyCellType::kNoCell); |
1585 | Handle<NumberDictionary> dictionary = |
1586 | object->HasFastElements() || object->HasFastStringWrapperElements() |
1587 | ? JSObject::NormalizeElements(object) |
1588 | : handle(NumberDictionary::cast(object->elements()), |
1589 | object->GetIsolate()); |
1590 | Handle<NumberDictionary> new_dictionary = NumberDictionary::Add( |
1591 | object->GetIsolate(), dictionary, index, value, details); |
1592 | new_dictionary->UpdateMaxNumberKey(index, object); |
1593 | if (attributes != NONE) object->RequireSlowElements(*new_dictionary); |
1594 | if (dictionary.is_identical_to(new_dictionary)) return; |
1595 | object->set_elements(*new_dictionary); |
1596 | } |
1597 | |
1598 | static bool HasEntryImpl(Isolate* isolate, FixedArrayBase store, |
1599 | uint32_t entry) { |
1600 | DisallowHeapAllocation no_gc; |
1601 | NumberDictionary dict = NumberDictionary::cast(store); |
1602 | Object index = dict->KeyAt(entry); |
1603 | return !index->IsTheHole(isolate); |
1604 | } |
1605 | |
1606 | static uint32_t GetIndexForEntryImpl(FixedArrayBase store, uint32_t entry) { |
1607 | DisallowHeapAllocation no_gc; |
1608 | NumberDictionary dict = NumberDictionary::cast(store); |
1609 | uint32_t result = 0; |
1610 | CHECK(dict->KeyAt(entry)->ToArrayIndex(&result)); |
1611 | return result; |
1612 | } |
1613 | |
1614 | static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject holder, |
1615 | FixedArrayBase store, uint32_t index, |
1616 | PropertyFilter filter) { |
1617 | DisallowHeapAllocation no_gc; |
1618 | NumberDictionary dictionary = NumberDictionary::cast(store); |
1619 | int entry = dictionary->FindEntry(isolate, index); |
1620 | if (entry == NumberDictionary::kNotFound) return kMaxUInt32; |
1621 | if (filter != ALL_PROPERTIES) { |
1622 | PropertyDetails details = dictionary->DetailsAt(entry); |
1623 | PropertyAttributes attr = details.attributes(); |
1624 | if ((attr & filter) != 0) return kMaxUInt32; |
1625 | } |
1626 | return static_cast<uint32_t>(entry); |
1627 | } |
1628 | |
1629 | static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) { |
1630 | return GetDetailsImpl(holder->elements(), entry); |
1631 | } |
1632 | |
1633 | static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store, |
1634 | uint32_t entry) { |
1635 | return NumberDictionary::cast(backing_store)->DetailsAt(entry); |
1636 | } |
1637 | |
1638 | static uint32_t FilterKey(Handle<NumberDictionary> dictionary, int entry, |
1639 | Object raw_key, PropertyFilter filter) { |
1640 | DCHECK(raw_key->IsNumber()); |
1641 | DCHECK_LE(raw_key->Number(), kMaxUInt32); |
1642 | PropertyDetails details = dictionary->DetailsAt(entry); |
1643 | PropertyAttributes attr = details.attributes(); |
1644 | if ((attr & filter) != 0) return kMaxUInt32; |
1645 | return static_cast<uint32_t>(raw_key->Number()); |
1646 | } |
1647 | |
1648 | static uint32_t GetKeyForEntryImpl(Isolate* isolate, |
1649 | Handle<NumberDictionary> dictionary, |
1650 | int entry, PropertyFilter filter) { |
1651 | DisallowHeapAllocation no_gc; |
1652 | Object raw_key = dictionary->KeyAt(entry); |
1653 | if (!dictionary->IsKey(ReadOnlyRoots(isolate), raw_key)) return kMaxUInt32; |
1654 | return FilterKey(dictionary, entry, raw_key, filter); |
1655 | } |
1656 | |
1657 | static void CollectElementIndicesImpl(Handle<JSObject> object, |
1658 | Handle<FixedArrayBase> backing_store, |
1659 | KeyAccumulator* keys) { |
1660 | if (keys->filter() & SKIP_STRINGS) return; |
1661 | Isolate* isolate = keys->isolate(); |
1662 | Handle<NumberDictionary> dictionary = |
1663 | Handle<NumberDictionary>::cast(backing_store); |
1664 | int capacity = dictionary->Capacity(); |
1665 | Handle<FixedArray> elements = isolate->factory()->NewFixedArray( |
1666 | GetMaxNumberOfEntries(*object, *backing_store)); |
1667 | int insertion_index = 0; |
1668 | PropertyFilter filter = keys->filter(); |
1669 | ReadOnlyRoots roots(isolate); |
1670 | for (int i = 0; i < capacity; i++) { |
1671 | Object raw_key = dictionary->KeyAt(i); |
1672 | if (!dictionary->IsKey(roots, raw_key)) continue; |
1673 | uint32_t key = FilterKey(dictionary, i, raw_key, filter); |
1674 | if (key == kMaxUInt32) { |
1675 | keys->AddShadowingKey(raw_key); |
1676 | continue; |
1677 | } |
1678 | elements->set(insertion_index, raw_key); |
1679 | insertion_index++; |
1680 | } |
1681 | SortIndices(isolate, elements, insertion_index); |
1682 | for (int i = 0; i < insertion_index; i++) { |
1683 | keys->AddKey(elements->get(i)); |
1684 | } |
1685 | } |
1686 | |
1687 | static Handle<FixedArray> DirectCollectElementIndicesImpl( |
1688 | Isolate* isolate, Handle<JSObject> object, |
1689 | Handle<FixedArrayBase> backing_store, GetKeysConversion convert, |
1690 | PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices, |
1691 | uint32_t insertion_index = 0) { |
1692 | if (filter & SKIP_STRINGS) return list; |
1693 | if (filter & ONLY_ALL_CAN_READ) return list; |
1694 | |
1695 | Handle<NumberDictionary> dictionary = |
1696 | Handle<NumberDictionary>::cast(backing_store); |
1697 | uint32_t capacity = dictionary->Capacity(); |
1698 | for (uint32_t i = 0; i < capacity; i++) { |
1699 | uint32_t key = GetKeyForEntryImpl(isolate, dictionary, i, filter); |
1700 | if (key == kMaxUInt32) continue; |
1701 | Handle<Object> index = isolate->factory()->NewNumberFromUint(key); |
1702 | list->set(insertion_index, *index); |
1703 | insertion_index++; |
1704 | } |
1705 | *nof_indices = insertion_index; |
1706 | return list; |
1707 | } |
1708 | |
1709 | static void AddElementsToKeyAccumulatorImpl(Handle<JSObject> receiver, |
1710 | KeyAccumulator* accumulator, |
1711 | AddKeyConversion convert) { |
1712 | Isolate* isolate = accumulator->isolate(); |
1713 | Handle<NumberDictionary> dictionary( |
1714 | NumberDictionary::cast(receiver->elements()), isolate); |
1715 | int capacity = dictionary->Capacity(); |
1716 | ReadOnlyRoots roots(isolate); |
1717 | for (int i = 0; i < capacity; i++) { |
1718 | Object k = dictionary->KeyAt(i); |
1719 | if (!dictionary->IsKey(roots, k)) continue; |
1720 | Object value = dictionary->ValueAt(i); |
1721 | DCHECK(!value->IsTheHole(isolate)); |
1722 | DCHECK(!value->IsAccessorPair()); |
1723 | DCHECK(!value->IsAccessorInfo()); |
1724 | accumulator->AddKey(value, convert); |
1725 | } |
1726 | } |
1727 | |
1728 | static bool IncludesValueFastPath(Isolate* isolate, Handle<JSObject> receiver, |
1729 | Handle<Object> value, uint32_t start_from, |
1730 | uint32_t length, Maybe<bool>* result) { |
1731 | DisallowHeapAllocation no_gc; |
1732 | NumberDictionary dictionary = NumberDictionary::cast(receiver->elements()); |
1733 | int capacity = dictionary->Capacity(); |
1734 | Object the_hole = ReadOnlyRoots(isolate).the_hole_value(); |
1735 | Object undefined = ReadOnlyRoots(isolate).undefined_value(); |
1736 | |
1737 | // Scan for accessor properties. If accessors are present, then elements |
1738 | // must be accessed in order via the slow path. |
1739 | bool found = false; |
1740 | for (int i = 0; i < capacity; ++i) { |
1741 | Object k = dictionary->KeyAt(i); |
1742 | if (k == the_hole) continue; |
1743 | if (k == undefined) continue; |
1744 | |
1745 | uint32_t index; |
1746 | if (!k->ToArrayIndex(&index) || index < start_from || index >= length) { |
1747 | continue; |
1748 | } |
1749 | |
1750 | if (dictionary->DetailsAt(i).kind() == kAccessor) { |
1751 | // Restart from beginning in slow path, otherwise we may observably |
1752 | // access getters out of order |
1753 | return false; |
1754 | } else if (!found) { |
1755 | Object element_k = dictionary->ValueAt(i); |
1756 | if (value->SameValueZero(element_k)) found = true; |
1757 | } |
1758 | } |
1759 | |
1760 | *result = Just(found); |
1761 | return true; |
1762 | } |
1763 | |
1764 | static Maybe<bool> IncludesValueImpl(Isolate* isolate, |
1765 | Handle<JSObject> receiver, |
1766 | Handle<Object> value, |
1767 | uint32_t start_from, uint32_t length) { |
1768 | DCHECK(JSObject::PrototypeHasNoElements(isolate, *receiver)); |
1769 | bool search_for_hole = value->IsUndefined(isolate); |
1770 | |
1771 | if (!search_for_hole) { |
1772 | Maybe<bool> result = Nothing<bool>(); |
1773 | if (DictionaryElementsAccessor::IncludesValueFastPath( |
1774 | isolate, receiver, value, start_from, length, &result)) { |
1775 | return result; |
1776 | } |
1777 | } |
1778 | ElementsKind original_elements_kind = receiver->GetElementsKind(); |
1779 | USE(original_elements_kind); |
1780 | Handle<NumberDictionary> dictionary( |
1781 | NumberDictionary::cast(receiver->elements()), isolate); |
1782 | // Iterate through entire range, as accessing elements out of order is |
1783 | // observable |
1784 | for (uint32_t k = start_from; k < length; ++k) { |
1785 | DCHECK_EQ(receiver->GetElementsKind(), original_elements_kind); |
1786 | int entry = dictionary->FindEntry(isolate, k); |
1787 | if (entry == NumberDictionary::kNotFound) { |
1788 | if (search_for_hole) return Just(true); |
1789 | continue; |
1790 | } |
1791 | |
1792 | PropertyDetails details = GetDetailsImpl(*dictionary, entry); |
1793 | switch (details.kind()) { |
1794 | case kData: { |
1795 | Object element_k = dictionary->ValueAt(entry); |
1796 | if (value->SameValueZero(element_k)) return Just(true); |
1797 | break; |
1798 | } |
1799 | case kAccessor: { |
1800 | LookupIterator it(isolate, receiver, k, |
1801 | LookupIterator::OWN_SKIP_INTERCEPTOR); |
1802 | DCHECK(it.IsFound()); |
1803 | DCHECK_EQ(it.state(), LookupIterator::ACCESSOR); |
1804 | Handle<Object> element_k; |
1805 | |
1806 | ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k, |
1807 | Object::GetPropertyWithAccessor(&it), |
1808 | Nothing<bool>()); |
1809 | |
1810 | if (value->SameValueZero(*element_k)) return Just(true); |
1811 | |
1812 | // Bailout to slow path if elements on prototype changed |
1813 | if (!JSObject::PrototypeHasNoElements(isolate, *receiver)) { |
1814 | return IncludesValueSlowPath(isolate, receiver, value, k + 1, |
1815 | length); |
1816 | } |
1817 | |
1818 | // Continue if elements unchanged |
1819 | if (*dictionary == receiver->elements()) continue; |
1820 | |
1821 | // Otherwise, bailout or update elements |
1822 | |
1823 | // If switched to initial elements, return true if searching for |
1824 | // undefined, and false otherwise. |
1825 | if (receiver->map()->GetInitialElements() == receiver->elements()) { |
1826 | return Just(search_for_hole); |
1827 | } |
1828 | |
1829 | // If switched to fast elements, continue with the correct accessor. |
1830 | if (receiver->GetElementsKind() != DICTIONARY_ELEMENTS) { |
1831 | ElementsAccessor* accessor = receiver->GetElementsAccessor(); |
1832 | return accessor->IncludesValue(isolate, receiver, value, k + 1, |
1833 | length); |
1834 | } |
1835 | dictionary = |
1836 | handle(NumberDictionary::cast(receiver->elements()), isolate); |
1837 | break; |
1838 | } |
1839 | } |
1840 | } |
1841 | return Just(false); |
1842 | } |
1843 | |
1844 | static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate, |
1845 | Handle<JSObject> receiver, |
1846 | Handle<Object> value, |
1847 | uint32_t start_from, uint32_t length) { |
1848 | DCHECK(JSObject::PrototypeHasNoElements(isolate, *receiver)); |
1849 | |
1850 | ElementsKind original_elements_kind = receiver->GetElementsKind(); |
1851 | USE(original_elements_kind); |
1852 | Handle<NumberDictionary> dictionary( |
1853 | NumberDictionary::cast(receiver->elements()), isolate); |
1854 | // Iterate through entire range, as accessing elements out of order is |
1855 | // observable. |
1856 | for (uint32_t k = start_from; k < length; ++k) { |
1857 | DCHECK_EQ(receiver->GetElementsKind(), original_elements_kind); |
1858 | int entry = dictionary->FindEntry(isolate, k); |
1859 | if (entry == NumberDictionary::kNotFound) continue; |
1860 | |
1861 | PropertyDetails details = GetDetailsImpl(*dictionary, entry); |
1862 | switch (details.kind()) { |
1863 | case kData: { |
1864 | Object element_k = dictionary->ValueAt(entry); |
1865 | if (value->StrictEquals(element_k)) { |
1866 | return Just<int64_t>(k); |
1867 | } |
1868 | break; |
1869 | } |
1870 | case kAccessor: { |
1871 | LookupIterator it(isolate, receiver, k, |
1872 | LookupIterator::OWN_SKIP_INTERCEPTOR); |
1873 | DCHECK(it.IsFound()); |
1874 | DCHECK_EQ(it.state(), LookupIterator::ACCESSOR); |
1875 | Handle<Object> element_k; |
1876 | |
1877 | ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k, |
1878 | Object::GetPropertyWithAccessor(&it), |
1879 | Nothing<int64_t>()); |
1880 | |
1881 | if (value->StrictEquals(*element_k)) return Just<int64_t>(k); |
1882 | |
1883 | // Bailout to slow path if elements on prototype changed. |
1884 | if (!JSObject::PrototypeHasNoElements(isolate, *receiver)) { |
1885 | return IndexOfValueSlowPath(isolate, receiver, value, k + 1, |
1886 | length); |
1887 | } |
1888 | |
1889 | // Continue if elements unchanged. |
1890 | if (*dictionary == receiver->elements()) continue; |
1891 | |
1892 | // Otherwise, bailout or update elements. |
1893 | if (receiver->GetElementsKind() != DICTIONARY_ELEMENTS) { |
1894 | // Otherwise, switch to slow path. |
1895 | return IndexOfValueSlowPath(isolate, receiver, value, k + 1, |
1896 | length); |
1897 | } |
1898 | dictionary = |
1899 | handle(NumberDictionary::cast(receiver->elements()), isolate); |
1900 | break; |
1901 | } |
1902 | } |
1903 | } |
1904 | return Just<int64_t>(-1); |
1905 | } |
1906 | |
1907 | static void ValidateContents(JSObject holder, int length) { |
1908 | DisallowHeapAllocation no_gc; |
1909 | #if DEBUG |
1910 | DCHECK_EQ(holder->map()->elements_kind(), DICTIONARY_ELEMENTS); |
1911 | if (!FLAG_enable_slow_asserts) return; |
1912 | ReadOnlyRoots roots = holder->GetReadOnlyRoots(); |
1913 | NumberDictionary dictionary = NumberDictionary::cast(holder->elements()); |
1914 | // Validate the requires_slow_elements and max_number_key values. |
1915 | int capacity = dictionary->Capacity(); |
1916 | bool requires_slow_elements = false; |
1917 | int max_key = 0; |
1918 | for (int i = 0; i < capacity; ++i) { |
1919 | Object k; |
1920 | if (!dictionary->ToKey(roots, i, &k)) continue; |
1921 | DCHECK_LE(0.0, k->Number()); |
1922 | if (k->Number() > NumberDictionary::kRequiresSlowElementsLimit) { |
1923 | requires_slow_elements = true; |
1924 | } else { |
1925 | max_key = Max(max_key, Smi::ToInt(k)); |
1926 | } |
1927 | } |
1928 | if (requires_slow_elements) { |
1929 | DCHECK(dictionary->requires_slow_elements()); |
1930 | } else if (!dictionary->requires_slow_elements()) { |
1931 | DCHECK_LE(max_key, dictionary->max_number_key()); |
1932 | } |
1933 | #endif |
1934 | } |
1935 | }; |
1936 | |
1937 | |
1938 | // Super class for all fast element arrays. |
1939 | template <typename Subclass, typename KindTraits> |
1940 | class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> { |
1941 | public: |
1942 | explicit FastElementsAccessor(const char* name) |
1943 | : ElementsAccessorBase<Subclass, KindTraits>(name) {} |
1944 | |
1945 | typedef typename KindTraits::BackingStore BackingStore; |
1946 | |
1947 | static Handle<NumberDictionary> NormalizeImpl(Handle<JSObject> object, |
1948 | Handle<FixedArrayBase> store) { |
1949 | Isolate* isolate = object->GetIsolate(); |
1950 | ElementsKind kind = Subclass::kind(); |
1951 | |
1952 | // Ensure that notifications fire if the array or object prototypes are |
1953 | // normalizing. |
1954 | if (IsSmiOrObjectElementsKind(kind) || |
1955 | kind == FAST_STRING_WRAPPER_ELEMENTS) { |
1956 | isolate->UpdateNoElementsProtectorOnNormalizeElements(object); |
1957 | } |
1958 | |
1959 | int capacity = object->GetFastElementsUsage(); |
1960 | Handle<NumberDictionary> dictionary = |
1961 | NumberDictionary::New(isolate, capacity); |
1962 | |
1963 | PropertyDetails details = PropertyDetails::Empty(); |
1964 | int j = 0; |
1965 | int max_number_key = -1; |
1966 | for (int i = 0; j < capacity; i++) { |
1967 | if (IsHoleyElementsKind(kind)) { |
1968 | if (BackingStore::cast(*store)->is_the_hole(isolate, i)) continue; |
1969 | } |
1970 | max_number_key = i; |
1971 | Handle<Object> value = Subclass::GetImpl(isolate, *store, i); |
1972 | dictionary = |
1973 | NumberDictionary::Add(isolate, dictionary, i, value, details); |
1974 | j++; |
1975 | } |
1976 | |
1977 | if (max_number_key > 0) { |
1978 | dictionary->UpdateMaxNumberKey(static_cast<uint32_t>(max_number_key), |
1979 | object); |
1980 | } |
1981 | return dictionary; |
1982 | } |
1983 | |
1984 | static void DeleteAtEnd(Handle<JSObject> obj, |
1985 | Handle<BackingStore> backing_store, uint32_t entry) { |
1986 | uint32_t length = static_cast<uint32_t>(backing_store->length()); |
1987 | Isolate* isolate = obj->GetIsolate(); |
1988 | for (; entry > 0; entry--) { |
1989 | if (!backing_store->is_the_hole(isolate, entry - 1)) break; |
1990 | } |
1991 | if (entry == 0) { |
1992 | FixedArray empty = ReadOnlyRoots(isolate).empty_fixed_array(); |
1993 | // Dynamically ask for the elements kind here since we manually redirect |
1994 | // the operations for argument backing stores. |
1995 | if (obj->GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS) { |
1996 | SloppyArgumentsElements::cast(obj->elements())->set_arguments(empty); |
1997 | } else { |
1998 | obj->set_elements(empty); |
1999 | } |
2000 | return; |
2001 | } |
2002 | |
2003 | isolate->heap()->RightTrimFixedArray(*backing_store, length - entry); |
2004 | } |
2005 | |
2006 | static void DeleteCommon(Handle<JSObject> obj, uint32_t entry, |
2007 | Handle<FixedArrayBase> store) { |
2008 | DCHECK(obj->HasSmiOrObjectElements() || obj->HasDoubleElements() || |
2009 | obj->HasFastArgumentsElements() || |
2010 | obj->HasFastStringWrapperElements()); |
2011 | Handle<BackingStore> backing_store = Handle<BackingStore>::cast(store); |
2012 | if (!obj->IsJSArray() && |
2013 | entry == static_cast<uint32_t>(store->length()) - 1) { |
2014 | DeleteAtEnd(obj, backing_store, entry); |
2015 | return; |
2016 | } |
2017 | |
2018 | Isolate* isolate = obj->GetIsolate(); |
2019 | backing_store->set_the_hole(isolate, entry); |
2020 | |
2021 | // TODO(verwaest): Move this out of elements.cc. |
2022 | // If an old space backing store is larger than a certain size and |
2023 | // has too few used values, normalize it. |
2024 | const int kMinLengthForSparsenessCheck = 64; |
2025 | if (backing_store->length() < kMinLengthForSparsenessCheck) return; |
2026 | // TODO(ulan): Check if it works with young large objects. |
2027 | if (ObjectInYoungGeneration(*backing_store)) return; |
2028 | uint32_t length = 0; |
2029 | if (obj->IsJSArray()) { |
2030 | JSArray::cast(*obj)->length()->ToArrayLength(&length); |
2031 | } else { |
2032 | length = static_cast<uint32_t>(store->length()); |
2033 | } |
2034 | |
2035 | // To avoid doing the check on every delete, use a counter-based heuristic. |
2036 | const int kLengthFraction = 16; |
2037 | // The above constant must be large enough to ensure that we check for |
2038 | // normalization frequently enough. At a minimum, it should be large |
2039 | // enough to reliably hit the "window" of remaining elements count where |
2040 | // normalization would be beneficial. |
2041 | STATIC_ASSERT(kLengthFraction >= |
2042 | NumberDictionary::kEntrySize * |
2043 | NumberDictionary::kPreferFastElementsSizeFactor); |
2044 | size_t current_counter = isolate->elements_deletion_counter(); |
2045 | if (current_counter < length / kLengthFraction) { |
2046 | isolate->set_elements_deletion_counter(current_counter + 1); |
2047 | return; |
2048 | } |
2049 | // Reset the counter whenever the full check is performed. |
2050 | isolate->set_elements_deletion_counter(0); |
2051 | |
2052 | if (!obj->IsJSArray()) { |
2053 | uint32_t i; |
2054 | for (i = entry + 1; i < length; i++) { |
2055 | if (!backing_store->is_the_hole(isolate, i)) break; |
2056 | } |
2057 | if (i == length) { |
2058 | DeleteAtEnd(obj, backing_store, entry); |
2059 | return; |
2060 | } |
2061 | } |
2062 | int num_used = 0; |
2063 | for (int i = 0; i < backing_store->length(); ++i) { |
2064 | if (!backing_store->is_the_hole(isolate, i)) { |
2065 | ++num_used; |
2066 | // Bail out if a number dictionary wouldn't be able to save much space. |
2067 | if (NumberDictionary::kPreferFastElementsSizeFactor * |
2068 | NumberDictionary::ComputeCapacity(num_used) * |
2069 | NumberDictionary::kEntrySize > |
2070 | static_cast< |
---|