1 | // Copyright 2018 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #ifndef V8_OBJECTS_JS_OBJECTS_INL_H_ |
6 | #define V8_OBJECTS_JS_OBJECTS_INL_H_ |
7 | |
8 | #include "src/objects/js-objects.h" |
9 | |
10 | #include "src/feedback-vector.h" |
11 | #include "src/field-index-inl.h" |
12 | #include "src/heap/heap-write-barrier.h" |
13 | #include "src/keys.h" |
14 | #include "src/lookup-inl.h" |
15 | #include "src/objects/embedder-data-slot-inl.h" |
16 | #include "src/objects/feedback-cell-inl.h" |
17 | #include "src/objects/hash-table-inl.h" |
18 | #include "src/objects/heap-number-inl.h" |
19 | #include "src/objects/property-array-inl.h" |
20 | #include "src/objects/shared-function-info.h" |
21 | #include "src/objects/slots.h" |
22 | #include "src/objects/smi-inl.h" |
23 | #include "src/prototype-inl.h" |
24 | |
25 | // Has to be the last include (doesn't have include guards): |
26 | #include "src/objects/object-macros.h" |
27 | |
28 | namespace v8 { |
29 | namespace internal { |
30 | |
31 | OBJECT_CONSTRUCTORS_IMPL(JSReceiver, HeapObject) |
32 | OBJECT_CONSTRUCTORS_IMPL(JSObject, JSReceiver) |
33 | OBJECT_CONSTRUCTORS_IMPL(JSAsyncFromSyncIterator, JSObject) |
34 | OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction, JSObject) |
35 | OBJECT_CONSTRUCTORS_IMPL(JSDate, JSObject) |
36 | OBJECT_CONSTRUCTORS_IMPL(JSFunction, JSObject) |
37 | OBJECT_CONSTRUCTORS_IMPL(JSGlobalObject, JSObject) |
38 | OBJECT_CONSTRUCTORS_IMPL(JSGlobalProxy, JSObject) |
39 | JSIteratorResult::JSIteratorResult(Address ptr) : JSObject(ptr) {} |
40 | OBJECT_CONSTRUCTORS_IMPL(JSMessageObject, JSObject) |
41 | OBJECT_CONSTRUCTORS_IMPL(JSStringIterator, JSObject) |
42 | OBJECT_CONSTRUCTORS_IMPL(JSValue, JSObject) |
43 | |
44 | NEVER_READ_ONLY_SPACE_IMPL(JSReceiver) |
45 | |
46 | CAST_ACCESSOR(JSAsyncFromSyncIterator) |
47 | CAST_ACCESSOR(JSBoundFunction) |
48 | CAST_ACCESSOR(JSDate) |
49 | CAST_ACCESSOR(JSFunction) |
50 | CAST_ACCESSOR(JSGlobalObject) |
51 | CAST_ACCESSOR(JSGlobalProxy) |
52 | CAST_ACCESSOR(JSIteratorResult) |
53 | CAST_ACCESSOR(JSMessageObject) |
54 | CAST_ACCESSOR(JSObject) |
55 | CAST_ACCESSOR(JSReceiver) |
56 | CAST_ACCESSOR(JSStringIterator) |
57 | CAST_ACCESSOR(JSValue) |
58 | |
59 | MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate, |
60 | Handle<JSReceiver> receiver, |
61 | Handle<Name> name) { |
62 | LookupIterator it(isolate, receiver, name, receiver); |
63 | if (!it.IsFound()) return it.factory()->undefined_value(); |
64 | return Object::GetProperty(&it); |
65 | } |
66 | |
67 | MaybeHandle<Object> JSReceiver::GetElement(Isolate* isolate, |
68 | Handle<JSReceiver> receiver, |
69 | uint32_t index) { |
70 | LookupIterator it(isolate, receiver, index, receiver); |
71 | if (!it.IsFound()) return it.factory()->undefined_value(); |
72 | return Object::GetProperty(&it); |
73 | } |
74 | |
75 | Handle<Object> JSReceiver::GetDataProperty(Handle<JSReceiver> object, |
76 | Handle<Name> name) { |
77 | LookupIterator it(object, name, object, |
78 | LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR); |
79 | if (!it.IsFound()) return it.factory()->undefined_value(); |
80 | return GetDataProperty(&it); |
81 | } |
82 | |
83 | MaybeHandle<HeapObject> JSReceiver::GetPrototype(Isolate* isolate, |
84 | Handle<JSReceiver> receiver) { |
85 | // We don't expect access checks to be needed on JSProxy objects. |
86 | DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject()); |
87 | PrototypeIterator iter(isolate, receiver, kStartAtReceiver, |
88 | PrototypeIterator::END_AT_NON_HIDDEN); |
89 | do { |
90 | if (!iter.AdvanceFollowingProxies()) return MaybeHandle<HeapObject>(); |
91 | } while (!iter.IsAtEnd()); |
92 | return PrototypeIterator::GetCurrent(iter); |
93 | } |
94 | |
95 | MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate, |
96 | Handle<JSReceiver> receiver, |
97 | const char* name) { |
98 | Handle<String> str = isolate->factory()->InternalizeUtf8String(name); |
99 | return GetProperty(isolate, receiver, str); |
100 | } |
101 | |
102 | // static |
103 | V8_WARN_UNUSED_RESULT MaybeHandle<FixedArray> JSReceiver::OwnPropertyKeys( |
104 | Handle<JSReceiver> object) { |
105 | return KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly, |
106 | ALL_PROPERTIES, |
107 | GetKeysConversion::kConvertToString); |
108 | } |
109 | |
110 | bool JSObject::PrototypeHasNoElements(Isolate* isolate, JSObject object) { |
111 | DisallowHeapAllocation no_gc; |
112 | HeapObject prototype = HeapObject::cast(object->map()->prototype()); |
113 | ReadOnlyRoots roots(isolate); |
114 | HeapObject null = roots.null_value(); |
115 | FixedArrayBase empty_fixed_array = roots.empty_fixed_array(); |
116 | FixedArrayBase empty_slow_element_dictionary = |
117 | roots.empty_slow_element_dictionary(); |
118 | while (prototype != null) { |
119 | Map map = prototype->map(); |
120 | if (map->IsCustomElementsReceiverMap()) return false; |
121 | FixedArrayBase elements = JSObject::cast(prototype)->elements(); |
122 | if (elements != empty_fixed_array && |
123 | elements != empty_slow_element_dictionary) { |
124 | return false; |
125 | } |
126 | prototype = HeapObject::cast(map->prototype()); |
127 | } |
128 | return true; |
129 | } |
130 | |
131 | ACCESSORS(JSReceiver, raw_properties_or_hash, Object, kPropertiesOrHashOffset) |
132 | |
133 | FixedArrayBase JSObject::elements() const { |
134 | Object array = READ_FIELD(*this, kElementsOffset); |
135 | return FixedArrayBase::cast(array); |
136 | } |
137 | |
138 | void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) { |
139 | JSObject::ValidateElements(*object); |
140 | ElementsKind elements_kind = object->map()->elements_kind(); |
141 | if (!IsObjectElementsKind(elements_kind)) { |
142 | if (IsHoleyElementsKind(elements_kind)) { |
143 | TransitionElementsKind(object, HOLEY_ELEMENTS); |
144 | } else { |
145 | TransitionElementsKind(object, PACKED_ELEMENTS); |
146 | } |
147 | } |
148 | } |
149 | |
150 | template <typename TSlot> |
151 | void JSObject::EnsureCanContainElements(Handle<JSObject> object, TSlot objects, |
152 | uint32_t count, |
153 | EnsureElementsMode mode) { |
154 | static_assert(std::is_same<TSlot, FullObjectSlot>::value || |
155 | std::is_same<TSlot, ObjectSlot>::value, |
156 | "Only ObjectSlot and FullObjectSlot are expected here" ); |
157 | ElementsKind current_kind = object->GetElementsKind(); |
158 | ElementsKind target_kind = current_kind; |
159 | { |
160 | DisallowHeapAllocation no_allocation; |
161 | DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS); |
162 | bool is_holey = IsHoleyElementsKind(current_kind); |
163 | if (current_kind == HOLEY_ELEMENTS) return; |
164 | Object the_hole = object->GetReadOnlyRoots().the_hole_value(); |
165 | for (uint32_t i = 0; i < count; ++i, ++objects) { |
166 | Object current = *objects; |
167 | if (current == the_hole) { |
168 | is_holey = true; |
169 | target_kind = GetHoleyElementsKind(target_kind); |
170 | } else if (!current->IsSmi()) { |
171 | if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) { |
172 | if (IsSmiElementsKind(target_kind)) { |
173 | if (is_holey) { |
174 | target_kind = HOLEY_DOUBLE_ELEMENTS; |
175 | } else { |
176 | target_kind = PACKED_DOUBLE_ELEMENTS; |
177 | } |
178 | } |
179 | } else if (is_holey) { |
180 | target_kind = HOLEY_ELEMENTS; |
181 | break; |
182 | } else { |
183 | target_kind = PACKED_ELEMENTS; |
184 | } |
185 | } |
186 | } |
187 | } |
188 | if (target_kind != current_kind) { |
189 | TransitionElementsKind(object, target_kind); |
190 | } |
191 | } |
192 | |
193 | void JSObject::EnsureCanContainElements(Handle<JSObject> object, |
194 | Handle<FixedArrayBase> elements, |
195 | uint32_t length, |
196 | EnsureElementsMode mode) { |
197 | ReadOnlyRoots roots = object->GetReadOnlyRoots(); |
198 | if (elements->map() != roots.fixed_double_array_map()) { |
199 | DCHECK(elements->map() == roots.fixed_array_map() || |
200 | elements->map() == roots.fixed_cow_array_map()); |
201 | if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) { |
202 | mode = DONT_ALLOW_DOUBLE_ELEMENTS; |
203 | } |
204 | ObjectSlot objects = |
205 | Handle<FixedArray>::cast(elements)->GetFirstElementAddress(); |
206 | EnsureCanContainElements(object, objects, length, mode); |
207 | return; |
208 | } |
209 | |
210 | DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS); |
211 | if (object->GetElementsKind() == HOLEY_SMI_ELEMENTS) { |
212 | TransitionElementsKind(object, HOLEY_DOUBLE_ELEMENTS); |
213 | } else if (object->GetElementsKind() == PACKED_SMI_ELEMENTS) { |
214 | Handle<FixedDoubleArray> double_array = |
215 | Handle<FixedDoubleArray>::cast(elements); |
216 | for (uint32_t i = 0; i < length; ++i) { |
217 | if (double_array->is_the_hole(i)) { |
218 | TransitionElementsKind(object, HOLEY_DOUBLE_ELEMENTS); |
219 | return; |
220 | } |
221 | } |
222 | TransitionElementsKind(object, PACKED_DOUBLE_ELEMENTS); |
223 | } |
224 | } |
225 | |
226 | void JSObject::SetMapAndElements(Handle<JSObject> object, Handle<Map> new_map, |
227 | Handle<FixedArrayBase> value) { |
228 | JSObject::MigrateToMap(object, new_map); |
229 | DCHECK((object->map()->has_fast_smi_or_object_elements() || |
230 | (*value == object->GetReadOnlyRoots().empty_fixed_array()) || |
231 | object->map()->has_fast_string_wrapper_elements()) == |
232 | (value->map() == object->GetReadOnlyRoots().fixed_array_map() || |
233 | value->map() == object->GetReadOnlyRoots().fixed_cow_array_map())); |
234 | DCHECK((*value == object->GetReadOnlyRoots().empty_fixed_array()) || |
235 | (object->map()->has_fast_double_elements() == |
236 | value->IsFixedDoubleArray())); |
237 | object->set_elements(*value); |
238 | } |
239 | |
240 | void JSObject::set_elements(FixedArrayBase value, WriteBarrierMode mode) { |
241 | WRITE_FIELD(*this, kElementsOffset, value); |
242 | CONDITIONAL_WRITE_BARRIER(*this, kElementsOffset, value, mode); |
243 | } |
244 | |
245 | void JSObject::initialize_elements() { |
246 | FixedArrayBase elements = map()->GetInitialElements(); |
247 | WRITE_FIELD(*this, kElementsOffset, elements); |
248 | } |
249 | |
250 | InterceptorInfo JSObject::GetIndexedInterceptor() { |
251 | return map()->GetIndexedInterceptor(); |
252 | } |
253 | |
254 | InterceptorInfo JSObject::GetNamedInterceptor() { |
255 | return map()->GetNamedInterceptor(); |
256 | } |
257 | |
258 | int JSObject::() const { return GetHeaderSize(map()); } |
259 | |
260 | int JSObject::(const Map map) { |
261 | // Check for the most common kind of JavaScript object before |
262 | // falling into the generic switch. This speeds up the internal |
263 | // field operations considerably on average. |
264 | InstanceType instance_type = map->instance_type(); |
265 | return instance_type == JS_OBJECT_TYPE |
266 | ? JSObject::kHeaderSize |
267 | : GetHeaderSize(instance_type, map->has_prototype_slot()); |
268 | } |
269 | |
270 | // static |
271 | int JSObject::GetEmbedderFieldsStartOffset(const Map map) { |
272 | // Embedder fields are located after the object header. |
273 | return GetHeaderSize(map); |
274 | } |
275 | |
276 | int JSObject::GetEmbedderFieldsStartOffset() { |
277 | return GetEmbedderFieldsStartOffset(map()); |
278 | } |
279 | |
280 | // static |
281 | int JSObject::GetEmbedderFieldCount(const Map map) { |
282 | int instance_size = map->instance_size(); |
283 | if (instance_size == kVariableSizeSentinel) return 0; |
284 | // Embedder fields are located after the object header, whereas in-object |
285 | // properties are located at the end of the object. We don't have to round up |
286 | // the header size here because division by kEmbedderDataSlotSizeInTaggedSlots |
287 | // will swallow potential padding in case of (kTaggedSize != |
288 | // kSystemPointerSize) anyway. |
289 | return (((instance_size - GetEmbedderFieldsStartOffset(map)) >> |
290 | kTaggedSizeLog2) - |
291 | map->GetInObjectProperties()) / |
292 | kEmbedderDataSlotSizeInTaggedSlots; |
293 | } |
294 | |
295 | int JSObject::GetEmbedderFieldCount() const { |
296 | return GetEmbedderFieldCount(map()); |
297 | } |
298 | |
299 | int JSObject::GetEmbedderFieldOffset(int index) { |
300 | DCHECK_LT(static_cast<unsigned>(index), |
301 | static_cast<unsigned>(GetEmbedderFieldCount())); |
302 | return GetEmbedderFieldsStartOffset() + (kEmbedderDataSlotSize * index); |
303 | } |
304 | |
305 | Object JSObject::GetEmbedderField(int index) { |
306 | return EmbedderDataSlot(*this, index).load_tagged(); |
307 | } |
308 | |
309 | void JSObject::SetEmbedderField(int index, Object value) { |
310 | EmbedderDataSlot::store_tagged(*this, index, value); |
311 | } |
312 | |
313 | void JSObject::SetEmbedderField(int index, Smi value) { |
314 | EmbedderDataSlot(*this, index).store_smi(value); |
315 | } |
316 | |
317 | bool JSObject::IsUnboxedDoubleField(FieldIndex index) { |
318 | if (!FLAG_unbox_double_fields) return false; |
319 | return map()->IsUnboxedDoubleField(index); |
320 | } |
321 | |
322 | // Access fast-case object properties at index. The use of these routines |
323 | // is needed to correctly distinguish between properties stored in-object and |
324 | // properties stored in the properties array. |
325 | Object JSObject::RawFastPropertyAt(FieldIndex index) { |
326 | DCHECK(!IsUnboxedDoubleField(index)); |
327 | if (index.is_inobject()) { |
328 | return READ_FIELD(*this, index.offset()); |
329 | } else { |
330 | return property_array()->get(index.outobject_array_index()); |
331 | } |
332 | } |
333 | |
334 | double JSObject::RawFastDoublePropertyAt(FieldIndex index) { |
335 | DCHECK(IsUnboxedDoubleField(index)); |
336 | return READ_DOUBLE_FIELD(*this, index.offset()); |
337 | } |
338 | |
339 | uint64_t JSObject::RawFastDoublePropertyAsBitsAt(FieldIndex index) { |
340 | DCHECK(IsUnboxedDoubleField(index)); |
341 | return READ_UINT64_FIELD(*this, index.offset()); |
342 | } |
343 | |
344 | void JSObject::RawFastPropertyAtPut(FieldIndex index, Object value) { |
345 | if (index.is_inobject()) { |
346 | int offset = index.offset(); |
347 | WRITE_FIELD(*this, offset, value); |
348 | WRITE_BARRIER(*this, offset, value); |
349 | } else { |
350 | property_array()->set(index.outobject_array_index(), value); |
351 | } |
352 | } |
353 | |
354 | void JSObject::RawFastDoublePropertyAsBitsAtPut(FieldIndex index, |
355 | uint64_t bits) { |
356 | // Double unboxing is enabled only on 64-bit platforms without pointer |
357 | // compression. |
358 | DCHECK_EQ(kDoubleSize, kTaggedSize); |
359 | Address field_addr = FIELD_ADDR(*this, index.offset()); |
360 | base::Relaxed_Store(reinterpret_cast<base::AtomicWord*>(field_addr), |
361 | static_cast<base::AtomicWord>(bits)); |
362 | } |
363 | |
364 | void JSObject::FastPropertyAtPut(FieldIndex index, Object value) { |
365 | if (IsUnboxedDoubleField(index)) { |
366 | DCHECK(value->IsMutableHeapNumber()); |
367 | // Ensure that all bits of the double value are preserved. |
368 | RawFastDoublePropertyAsBitsAtPut( |
369 | index, MutableHeapNumber::cast(value)->value_as_bits()); |
370 | } else { |
371 | RawFastPropertyAtPut(index, value); |
372 | } |
373 | } |
374 | |
375 | void JSObject::WriteToField(int descriptor, PropertyDetails details, |
376 | Object value) { |
377 | DCHECK_EQ(kField, details.location()); |
378 | DCHECK_EQ(kData, details.kind()); |
379 | DisallowHeapAllocation no_gc; |
380 | FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor); |
381 | if (details.representation().IsDouble()) { |
382 | // Nothing more to be done. |
383 | if (value->IsUninitialized()) { |
384 | return; |
385 | } |
386 | // Manipulating the signaling NaN used for the hole and uninitialized |
387 | // double field sentinel in C++, e.g. with bit_cast or value()/set_value(), |
388 | // will change its value on ia32 (the x87 stack is used to return values |
389 | // and stores to the stack silently clear the signalling bit). |
390 | uint64_t bits; |
391 | if (value->IsSmi()) { |
392 | bits = bit_cast<uint64_t>(static_cast<double>(Smi::ToInt(value))); |
393 | } else { |
394 | DCHECK(value->IsHeapNumber()); |
395 | bits = HeapNumber::cast(value)->value_as_bits(); |
396 | } |
397 | if (IsUnboxedDoubleField(index)) { |
398 | RawFastDoublePropertyAsBitsAtPut(index, bits); |
399 | } else { |
400 | auto box = MutableHeapNumber::cast(RawFastPropertyAt(index)); |
401 | box->set_value_as_bits(bits); |
402 | } |
403 | } else { |
404 | RawFastPropertyAtPut(index, value); |
405 | } |
406 | } |
407 | |
408 | int JSObject::GetInObjectPropertyOffset(int index) { |
409 | return map()->GetInObjectPropertyOffset(index); |
410 | } |
411 | |
412 | Object JSObject::InObjectPropertyAt(int index) { |
413 | int offset = GetInObjectPropertyOffset(index); |
414 | return READ_FIELD(*this, offset); |
415 | } |
416 | |
417 | Object JSObject::InObjectPropertyAtPut(int index, Object value, |
418 | WriteBarrierMode mode) { |
419 | // Adjust for the number of properties stored in the object. |
420 | int offset = GetInObjectPropertyOffset(index); |
421 | WRITE_FIELD(*this, offset, value); |
422 | CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); |
423 | return value; |
424 | } |
425 | |
426 | void JSObject::InitializeBody(Map map, int start_offset, |
427 | Object pre_allocated_value, Object filler_value) { |
428 | DCHECK_IMPLIES(filler_value->IsHeapObject(), |
429 | !ObjectInYoungGeneration(filler_value)); |
430 | DCHECK_IMPLIES(pre_allocated_value->IsHeapObject(), |
431 | !ObjectInYoungGeneration(pre_allocated_value)); |
432 | int size = map->instance_size(); |
433 | int offset = start_offset; |
434 | if (filler_value != pre_allocated_value) { |
435 | int end_of_pre_allocated_offset = |
436 | size - (map->UnusedPropertyFields() * kTaggedSize); |
437 | DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset); |
438 | while (offset < end_of_pre_allocated_offset) { |
439 | WRITE_FIELD(*this, offset, pre_allocated_value); |
440 | offset += kTaggedSize; |
441 | } |
442 | } |
443 | while (offset < size) { |
444 | WRITE_FIELD(*this, offset, filler_value); |
445 | offset += kTaggedSize; |
446 | } |
447 | } |
448 | |
449 | Object JSBoundFunction::raw_bound_target_function() const { |
450 | return READ_FIELD(*this, kBoundTargetFunctionOffset); |
451 | } |
452 | |
453 | ACCESSORS(JSBoundFunction, bound_target_function, JSReceiver, |
454 | kBoundTargetFunctionOffset) |
455 | ACCESSORS(JSBoundFunction, bound_this, Object, kBoundThisOffset) |
456 | ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset) |
457 | |
458 | ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell, kFeedbackCellOffset) |
459 | |
460 | ACCESSORS(JSGlobalObject, native_context, NativeContext, kNativeContextOffset) |
461 | ACCESSORS(JSGlobalObject, global_proxy, JSGlobalProxy, kGlobalProxyOffset) |
462 | |
463 | ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset) |
464 | |
465 | FeedbackVector JSFunction::feedback_vector() const { |
466 | DCHECK(has_feedback_vector()); |
467 | return FeedbackVector::cast(raw_feedback_cell()->value()); |
468 | } |
469 | |
470 | ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const { |
471 | DCHECK(has_closure_feedback_cell_array()); |
472 | return ClosureFeedbackCellArray::cast(raw_feedback_cell()->value()); |
473 | } |
474 | |
475 | // Code objects that are marked for deoptimization are not considered to be |
476 | // optimized. This is because the JSFunction might have been already |
477 | // deoptimized but its code() still needs to be unlinked, which will happen on |
478 | // its next activation. |
479 | // TODO(jupvfranco): rename this function. Maybe RunOptimizedCode, |
480 | // or IsValidOptimizedCode. |
481 | bool JSFunction::IsOptimized() { |
482 | return is_compiled() && code()->kind() == Code::OPTIMIZED_FUNCTION && |
483 | !code()->marked_for_deoptimization(); |
484 | } |
485 | |
486 | bool JSFunction::HasOptimizedCode() { |
487 | return IsOptimized() || |
488 | (has_feedback_vector() && feedback_vector()->has_optimized_code() && |
489 | !feedback_vector()->optimized_code()->marked_for_deoptimization()); |
490 | } |
491 | |
492 | bool JSFunction::HasOptimizationMarker() { |
493 | return has_feedback_vector() && feedback_vector()->has_optimization_marker(); |
494 | } |
495 | |
496 | void JSFunction::ClearOptimizationMarker() { |
497 | DCHECK(has_feedback_vector()); |
498 | feedback_vector()->ClearOptimizationMarker(); |
499 | } |
500 | |
501 | // Optimized code marked for deoptimization will tier back down to running |
502 | // interpreted on its next activation, and already doesn't count as IsOptimized. |
503 | bool JSFunction::IsInterpreted() { |
504 | return is_compiled() && (code()->is_interpreter_trampoline_builtin() || |
505 | (code()->kind() == Code::OPTIMIZED_FUNCTION && |
506 | code()->marked_for_deoptimization())); |
507 | } |
508 | |
509 | bool JSFunction::ChecksOptimizationMarker() { |
510 | return code()->checks_optimization_marker(); |
511 | } |
512 | |
513 | bool JSFunction::IsMarkedForOptimization() { |
514 | return has_feedback_vector() && feedback_vector()->optimization_marker() == |
515 | OptimizationMarker::kCompileOptimized; |
516 | } |
517 | |
518 | bool JSFunction::IsMarkedForConcurrentOptimization() { |
519 | return has_feedback_vector() && |
520 | feedback_vector()->optimization_marker() == |
521 | OptimizationMarker::kCompileOptimizedConcurrent; |
522 | } |
523 | |
524 | bool JSFunction::IsInOptimizationQueue() { |
525 | return has_feedback_vector() && feedback_vector()->optimization_marker() == |
526 | OptimizationMarker::kInOptimizationQueue; |
527 | } |
528 | |
529 | void JSFunction::CompleteInobjectSlackTrackingIfActive() { |
530 | if (!has_prototype_slot()) return; |
531 | if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) { |
532 | initial_map()->CompleteInobjectSlackTracking(GetIsolate()); |
533 | } |
534 | } |
535 | |
536 | AbstractCode JSFunction::abstract_code() { |
537 | if (IsInterpreted()) { |
538 | return AbstractCode::cast(shared()->GetBytecodeArray()); |
539 | } else { |
540 | return AbstractCode::cast(code()); |
541 | } |
542 | } |
543 | |
544 | int JSFunction::length() { return shared()->length(); } |
545 | |
546 | Code JSFunction::code() const { |
547 | return Code::cast(RELAXED_READ_FIELD(*this, kCodeOffset)); |
548 | } |
549 | |
550 | void JSFunction::set_code(Code value) { |
551 | DCHECK(!ObjectInYoungGeneration(value)); |
552 | RELAXED_WRITE_FIELD(*this, kCodeOffset, value); |
553 | MarkingBarrier(*this, RawField(kCodeOffset), value); |
554 | } |
555 | |
556 | void JSFunction::set_code_no_write_barrier(Code value) { |
557 | DCHECK(!ObjectInYoungGeneration(value)); |
558 | RELAXED_WRITE_FIELD(*this, kCodeOffset, value); |
559 | } |
560 | |
561 | SharedFunctionInfo JSFunction::shared() const { |
562 | return SharedFunctionInfo::cast( |
563 | RELAXED_READ_FIELD(*this, kSharedFunctionInfoOffset)); |
564 | } |
565 | |
566 | void JSFunction::set_shared(SharedFunctionInfo value, WriteBarrierMode mode) { |
567 | // Release semantics to support acquire read in NeedsResetDueToFlushedBytecode |
568 | RELEASE_WRITE_FIELD(*this, kSharedFunctionInfoOffset, value); |
569 | CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfoOffset, value, mode); |
570 | } |
571 | |
572 | void JSFunction::ClearOptimizedCodeSlot(const char* reason) { |
573 | if (has_feedback_vector() && feedback_vector()->has_optimized_code()) { |
574 | if (FLAG_trace_opt) { |
575 | PrintF("[evicting entry from optimizing code feedback slot (%s) for " , |
576 | reason); |
577 | ShortPrint(); |
578 | PrintF("]\n" ); |
579 | } |
580 | feedback_vector()->ClearOptimizedCode(); |
581 | } |
582 | } |
583 | |
584 | void JSFunction::SetOptimizationMarker(OptimizationMarker marker) { |
585 | DCHECK(has_feedback_vector()); |
586 | DCHECK(ChecksOptimizationMarker()); |
587 | DCHECK(!HasOptimizedCode()); |
588 | |
589 | feedback_vector()->SetOptimizationMarker(marker); |
590 | } |
591 | |
592 | bool JSFunction::has_feedback_vector() const { |
593 | return shared()->is_compiled() && |
594 | raw_feedback_cell()->value()->IsFeedbackVector(); |
595 | } |
596 | |
597 | bool JSFunction::has_closure_feedback_cell_array() const { |
598 | return shared()->is_compiled() && |
599 | raw_feedback_cell()->value()->IsClosureFeedbackCellArray(); |
600 | } |
601 | |
602 | Context JSFunction::context() { |
603 | return Context::cast(READ_FIELD(*this, kContextOffset)); |
604 | } |
605 | |
606 | bool JSFunction::has_context() const { |
607 | return READ_FIELD(*this, kContextOffset)->IsContext(); |
608 | } |
609 | |
610 | JSGlobalProxy JSFunction::global_proxy() { return context()->global_proxy(); } |
611 | |
612 | NativeContext JSFunction::native_context() { |
613 | return context()->native_context(); |
614 | } |
615 | |
616 | void JSFunction::set_context(Object value) { |
617 | DCHECK(value->IsUndefined() || value->IsContext()); |
618 | WRITE_FIELD(*this, kContextOffset, value); |
619 | WRITE_BARRIER(*this, kContextOffset, value); |
620 | } |
621 | |
622 | ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, Object, |
623 | kPrototypeOrInitialMapOffset, map()->has_prototype_slot()) |
624 | |
625 | bool JSFunction::has_prototype_slot() const { |
626 | return map()->has_prototype_slot(); |
627 | } |
628 | |
629 | Map JSFunction::initial_map() { return Map::cast(prototype_or_initial_map()); } |
630 | |
631 | bool JSFunction::has_initial_map() { |
632 | DCHECK(has_prototype_slot()); |
633 | return prototype_or_initial_map()->IsMap(); |
634 | } |
635 | |
636 | bool JSFunction::has_instance_prototype() { |
637 | DCHECK(has_prototype_slot()); |
638 | return has_initial_map() || !prototype_or_initial_map()->IsTheHole(); |
639 | } |
640 | |
641 | bool JSFunction::has_prototype() { |
642 | DCHECK(has_prototype_slot()); |
643 | return map()->has_non_instance_prototype() || has_instance_prototype(); |
644 | } |
645 | |
646 | bool JSFunction::has_prototype_property() { |
647 | return (has_prototype_slot() && IsConstructor()) || |
648 | IsGeneratorFunction(shared()->kind()); |
649 | } |
650 | |
651 | bool JSFunction::PrototypeRequiresRuntimeLookup() { |
652 | return !has_prototype_property() || map()->has_non_instance_prototype(); |
653 | } |
654 | |
655 | HeapObject JSFunction::instance_prototype() { |
656 | DCHECK(has_instance_prototype()); |
657 | if (has_initial_map()) return initial_map()->prototype(); |
658 | // When there is no initial map and the prototype is a JSReceiver, the |
659 | // initial map field is used for the prototype field. |
660 | return HeapObject::cast(prototype_or_initial_map()); |
661 | } |
662 | |
663 | Object JSFunction::prototype() { |
664 | DCHECK(has_prototype()); |
665 | // If the function's prototype property has been set to a non-JSReceiver |
666 | // value, that value is stored in the constructor field of the map. |
667 | if (map()->has_non_instance_prototype()) { |
668 | Object prototype = map()->GetConstructor(); |
669 | // The map must have a prototype in that field, not a back pointer. |
670 | DCHECK(!prototype->IsMap()); |
671 | DCHECK(!prototype->IsFunctionTemplateInfo()); |
672 | return prototype; |
673 | } |
674 | return instance_prototype(); |
675 | } |
676 | |
677 | bool JSFunction::is_compiled() const { |
678 | return code()->builtin_index() != Builtins::kCompileLazy && |
679 | shared()->is_compiled(); |
680 | } |
681 | |
682 | bool JSFunction::NeedsResetDueToFlushedBytecode() { |
683 | // Do a raw read for shared and code fields here since this function may be |
684 | // called on a concurrent thread and the JSFunction might not be fully |
685 | // initialized yet. |
686 | Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset); |
687 | Object maybe_code = RELAXED_READ_FIELD(*this, kCodeOffset); |
688 | |
689 | if (!maybe_shared->IsSharedFunctionInfo() || !maybe_code->IsCode()) { |
690 | return false; |
691 | } |
692 | |
693 | SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared); |
694 | Code code = Code::cast(maybe_code); |
695 | return !shared->is_compiled() && |
696 | code->builtin_index() != Builtins::kCompileLazy; |
697 | } |
698 | |
699 | void JSFunction::ResetIfBytecodeFlushed() { |
700 | if (FLAG_flush_bytecode && NeedsResetDueToFlushedBytecode()) { |
701 | // Bytecode was flushed and function is now uncompiled, reset JSFunction |
702 | // by setting code to CompileLazy and clearing the feedback vector. |
703 | set_code(GetIsolate()->builtins()->builtin(i::Builtins::kCompileLazy)); |
704 | raw_feedback_cell()->set_value( |
705 | ReadOnlyRoots(GetIsolate()).undefined_value()); |
706 | } |
707 | } |
708 | |
709 | ACCESSORS(JSValue, value, Object, kValueOffset) |
710 | |
711 | ACCESSORS(JSDate, value, Object, kValueOffset) |
712 | ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset) |
713 | ACCESSORS(JSDate, year, Object, kYearOffset) |
714 | ACCESSORS(JSDate, month, Object, kMonthOffset) |
715 | ACCESSORS(JSDate, day, Object, kDayOffset) |
716 | ACCESSORS(JSDate, weekday, Object, kWeekdayOffset) |
717 | ACCESSORS(JSDate, hour, Object, kHourOffset) |
718 | ACCESSORS(JSDate, min, Object, kMinOffset) |
719 | ACCESSORS(JSDate, sec, Object, kSecOffset) |
720 | |
721 | MessageTemplate JSMessageObject::type() const { |
722 | Object value = READ_FIELD(*this, kMessageTypeOffset); |
723 | return MessageTemplateFromInt(Smi::ToInt(value)); |
724 | } |
725 | void JSMessageObject::set_type(MessageTemplate value) { |
726 | WRITE_FIELD(*this, kMessageTypeOffset, Smi::FromInt(static_cast<int>(value))); |
727 | } |
728 | ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset) |
729 | ACCESSORS(JSMessageObject, script, Script, kScriptOffset) |
730 | ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset) |
731 | SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset) |
732 | SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset) |
733 | SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset) |
734 | |
735 | ElementsKind JSObject::GetElementsKind() const { |
736 | ElementsKind kind = map()->elements_kind(); |
737 | #if VERIFY_HEAP && DEBUG |
738 | FixedArrayBase fixed_array = |
739 | FixedArrayBase::unchecked_cast(READ_FIELD(*this, kElementsOffset)); |
740 | |
741 | // If a GC was caused while constructing this object, the elements |
742 | // pointer may point to a one pointer filler map. |
743 | if (ElementsAreSafeToExamine()) { |
744 | Map map = fixed_array->map(); |
745 | if (IsSmiOrObjectElementsKind(kind)) { |
746 | DCHECK(map == GetReadOnlyRoots().fixed_array_map() || |
747 | map == GetReadOnlyRoots().fixed_cow_array_map()); |
748 | } else if (IsDoubleElementsKind(kind)) { |
749 | DCHECK(fixed_array->IsFixedDoubleArray() || |
750 | fixed_array == GetReadOnlyRoots().empty_fixed_array()); |
751 | } else if (kind == DICTIONARY_ELEMENTS) { |
752 | DCHECK(fixed_array->IsFixedArray()); |
753 | DCHECK(fixed_array->IsDictionary()); |
754 | } else { |
755 | DCHECK(kind > DICTIONARY_ELEMENTS || IsFrozenOrSealedElementsKind(kind)); |
756 | } |
757 | DCHECK(!IsSloppyArgumentsElementsKind(kind) || |
758 | (elements()->IsFixedArray() && elements()->length() >= 2)); |
759 | } |
760 | #endif |
761 | return kind; |
762 | } |
763 | |
764 | bool JSObject::HasObjectElements() { |
765 | return IsObjectElementsKind(GetElementsKind()); |
766 | } |
767 | |
768 | bool JSObject::HasSmiElements() { return IsSmiElementsKind(GetElementsKind()); } |
769 | |
770 | bool JSObject::HasSmiOrObjectElements() { |
771 | return IsSmiOrObjectElementsKind(GetElementsKind()); |
772 | } |
773 | |
774 | bool JSObject::HasDoubleElements() { |
775 | return IsDoubleElementsKind(GetElementsKind()); |
776 | } |
777 | |
778 | bool JSObject::HasHoleyElements() { |
779 | return IsHoleyElementsKind(GetElementsKind()); |
780 | } |
781 | |
782 | bool JSObject::HasFastElements() { |
783 | return IsFastElementsKind(GetElementsKind()); |
784 | } |
785 | |
786 | bool JSObject::HasFastPackedElements() { |
787 | return IsFastPackedElementsKind(GetElementsKind()); |
788 | } |
789 | |
790 | bool JSObject::HasDictionaryElements() { |
791 | return GetElementsKind() == DICTIONARY_ELEMENTS; |
792 | } |
793 | |
794 | bool JSObject::HasPackedElements() { |
795 | return GetElementsKind() == PACKED_ELEMENTS; |
796 | } |
797 | |
798 | bool JSObject::HasFrozenOrSealedElements() { |
799 | return IsFrozenOrSealedElementsKind(GetElementsKind()); |
800 | } |
801 | |
802 | bool JSObject::HasFastArgumentsElements() { |
803 | return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS; |
804 | } |
805 | |
806 | bool JSObject::HasSlowArgumentsElements() { |
807 | return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS; |
808 | } |
809 | |
810 | bool JSObject::HasSloppyArgumentsElements() { |
811 | return IsSloppyArgumentsElementsKind(GetElementsKind()); |
812 | } |
813 | |
814 | bool JSObject::HasStringWrapperElements() { |
815 | return IsStringWrapperElementsKind(GetElementsKind()); |
816 | } |
817 | |
818 | bool JSObject::HasFastStringWrapperElements() { |
819 | return GetElementsKind() == FAST_STRING_WRAPPER_ELEMENTS; |
820 | } |
821 | |
822 | bool JSObject::HasSlowStringWrapperElements() { |
823 | return GetElementsKind() == SLOW_STRING_WRAPPER_ELEMENTS; |
824 | } |
825 | |
826 | bool JSObject::HasFixedTypedArrayElements() { |
827 | DCHECK(!elements().is_null()); |
828 | return map()->has_fixed_typed_array_elements(); |
829 | } |
830 | |
831 | #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \ |
832 | bool JSObject::HasFixed##Type##Elements() { \ |
833 | FixedArrayBase array = elements(); \ |
834 | return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \ |
835 | } |
836 | |
837 | TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK) |
838 | |
839 | #undef FIXED_TYPED_ELEMENTS_CHECK |
840 | |
841 | bool JSObject::HasNamedInterceptor() { return map()->has_named_interceptor(); } |
842 | |
843 | bool JSObject::HasIndexedInterceptor() { |
844 | return map()->has_indexed_interceptor(); |
845 | } |
846 | |
847 | void JSGlobalObject::set_global_dictionary(GlobalDictionary dictionary) { |
848 | DCHECK(IsJSGlobalObject()); |
849 | set_raw_properties_or_hash(dictionary); |
850 | } |
851 | |
852 | GlobalDictionary JSGlobalObject::global_dictionary() { |
853 | DCHECK(!HasFastProperties()); |
854 | DCHECK(IsJSGlobalObject()); |
855 | return GlobalDictionary::cast(raw_properties_or_hash()); |
856 | } |
857 | |
858 | NumberDictionary JSObject::element_dictionary() { |
859 | DCHECK(HasDictionaryElements() || HasSlowStringWrapperElements()); |
860 | return NumberDictionary::cast(elements()); |
861 | } |
862 | |
863 | void JSReceiver::initialize_properties() { |
864 | ReadOnlyRoots roots = GetReadOnlyRoots(); |
865 | DCHECK(!ObjectInYoungGeneration(roots.empty_fixed_array())); |
866 | DCHECK(!ObjectInYoungGeneration(roots.empty_property_dictionary())); |
867 | if (map()->is_dictionary_map()) { |
868 | WRITE_FIELD(*this, kPropertiesOrHashOffset, |
869 | roots.empty_property_dictionary()); |
870 | } else { |
871 | WRITE_FIELD(*this, kPropertiesOrHashOffset, roots.empty_fixed_array()); |
872 | } |
873 | } |
874 | |
875 | bool JSReceiver::HasFastProperties() const { |
876 | DCHECK( |
877 | raw_properties_or_hash()->IsSmi() || |
878 | (raw_properties_or_hash()->IsDictionary() == map()->is_dictionary_map())); |
879 | return !map()->is_dictionary_map(); |
880 | } |
881 | |
882 | NameDictionary JSReceiver::property_dictionary() const { |
883 | DCHECK(!IsJSGlobalObject()); |
884 | DCHECK(!HasFastProperties()); |
885 | |
886 | Object prop = raw_properties_or_hash(); |
887 | if (prop->IsSmi()) { |
888 | return GetReadOnlyRoots().empty_property_dictionary(); |
889 | } |
890 | |
891 | return NameDictionary::cast(prop); |
892 | } |
893 | |
894 | // TODO(gsathya): Pass isolate directly to this function and access |
895 | // the heap from this. |
896 | PropertyArray JSReceiver::property_array() const { |
897 | DCHECK(HasFastProperties()); |
898 | |
899 | Object prop = raw_properties_or_hash(); |
900 | if (prop->IsSmi() || prop == GetReadOnlyRoots().empty_fixed_array()) { |
901 | return GetReadOnlyRoots().empty_property_array(); |
902 | } |
903 | |
904 | return PropertyArray::cast(prop); |
905 | } |
906 | |
907 | Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object, |
908 | Handle<Name> name) { |
909 | LookupIterator it = LookupIterator::PropertyOrElement(object->GetIsolate(), |
910 | object, name, object); |
911 | return HasProperty(&it); |
912 | } |
913 | |
914 | Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object, |
915 | uint32_t index) { |
916 | if (object->IsJSModuleNamespace()) return Just(false); |
917 | |
918 | if (object->IsJSObject()) { // Shortcut. |
919 | LookupIterator it(object->GetIsolate(), object, index, object, |
920 | LookupIterator::OWN); |
921 | return HasProperty(&it); |
922 | } |
923 | |
924 | Maybe<PropertyAttributes> attributes = |
925 | JSReceiver::GetOwnPropertyAttributes(object, index); |
926 | MAYBE_RETURN(attributes, Nothing<bool>()); |
927 | return Just(attributes.FromJust() != ABSENT); |
928 | } |
929 | |
930 | Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes( |
931 | Handle<JSReceiver> object, Handle<Name> name) { |
932 | LookupIterator it = LookupIterator::PropertyOrElement(object->GetIsolate(), |
933 | object, name, object); |
934 | return GetPropertyAttributes(&it); |
935 | } |
936 | |
937 | Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes( |
938 | Handle<JSReceiver> object, Handle<Name> name) { |
939 | LookupIterator it = LookupIterator::PropertyOrElement( |
940 | object->GetIsolate(), object, name, object, LookupIterator::OWN); |
941 | return GetPropertyAttributes(&it); |
942 | } |
943 | |
944 | Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes( |
945 | Handle<JSReceiver> object, uint32_t index) { |
946 | LookupIterator it(object->GetIsolate(), object, index, object, |
947 | LookupIterator::OWN); |
948 | return GetPropertyAttributes(&it); |
949 | } |
950 | |
951 | Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) { |
952 | LookupIterator it(object->GetIsolate(), object, index, object); |
953 | return HasProperty(&it); |
954 | } |
955 | |
956 | Maybe<PropertyAttributes> JSReceiver::GetElementAttributes( |
957 | Handle<JSReceiver> object, uint32_t index) { |
958 | Isolate* isolate = object->GetIsolate(); |
959 | LookupIterator it(isolate, object, index, object); |
960 | return GetPropertyAttributes(&it); |
961 | } |
962 | |
963 | Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes( |
964 | Handle<JSReceiver> object, uint32_t index) { |
965 | Isolate* isolate = object->GetIsolate(); |
966 | LookupIterator it(isolate, object, index, object, LookupIterator::OWN); |
967 | return GetPropertyAttributes(&it); |
968 | } |
969 | |
970 | bool JSGlobalObject::IsDetached() { |
971 | return global_proxy()->IsDetachedFrom(*this); |
972 | } |
973 | |
974 | bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject global) const { |
975 | const PrototypeIterator iter(this->GetIsolate(), *this); |
976 | return iter.GetCurrent() != global; |
977 | } |
978 | |
979 | inline int JSGlobalProxy::SizeWithEmbedderFields(int embedder_field_count) { |
980 | DCHECK_GE(embedder_field_count, 0); |
981 | return kSize + embedder_field_count * kEmbedderDataSlotSize; |
982 | } |
983 | |
984 | ACCESSORS(JSIteratorResult, value, Object, kValueOffset) |
985 | ACCESSORS(JSIteratorResult, done, Object, kDoneOffset) |
986 | |
987 | ACCESSORS(JSAsyncFromSyncIterator, sync_iterator, JSReceiver, |
988 | kSyncIteratorOffset) |
989 | ACCESSORS(JSAsyncFromSyncIterator, next, Object, kNextOffset) |
990 | |
991 | ACCESSORS(JSStringIterator, string, String, kStringOffset) |
992 | SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset) |
993 | |
994 | static inline bool ShouldConvertToSlowElements(JSObject object, |
995 | uint32_t capacity, |
996 | uint32_t index, |
997 | uint32_t* new_capacity) { |
998 | STATIC_ASSERT(JSObject::kMaxUncheckedOldFastElementsLength <= |
999 | JSObject::kMaxUncheckedFastElementsLength); |
1000 | if (index < capacity) { |
1001 | *new_capacity = capacity; |
1002 | return false; |
1003 | } |
1004 | if (index - capacity >= JSObject::kMaxGap) return true; |
1005 | *new_capacity = JSObject::NewElementsCapacity(index + 1); |
1006 | DCHECK_LT(index, *new_capacity); |
1007 | // TODO(ulan): Check if it works with young large objects. |
1008 | if (*new_capacity <= JSObject::kMaxUncheckedOldFastElementsLength || |
1009 | (*new_capacity <= JSObject::kMaxUncheckedFastElementsLength && |
1010 | ObjectInYoungGeneration(object))) { |
1011 | return false; |
1012 | } |
1013 | // If the fast-case backing storage takes up much more memory than a |
1014 | // dictionary backing storage would, the object should have slow elements. |
1015 | int used_elements = object->GetFastElementsUsage(); |
1016 | uint32_t size_threshold = NumberDictionary::kPreferFastElementsSizeFactor * |
1017 | NumberDictionary::ComputeCapacity(used_elements) * |
1018 | NumberDictionary::kEntrySize; |
1019 | return size_threshold <= *new_capacity; |
1020 | } |
1021 | |
1022 | } // namespace internal |
1023 | } // namespace v8 |
1024 | |
1025 | #include "src/objects/object-macros-undef.h" |
1026 | |
1027 | #endif // V8_OBJECTS_JS_OBJECTS_INL_H_ |
1028 | |