1 | // Copyright 2009 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #include "src/global-handles.h" |
6 | |
7 | #include "src/api-inl.h" |
8 | #include "src/base/compiler-specific.h" |
9 | #include "src/cancelable-task.h" |
10 | #include "src/heap/embedder-tracing.h" |
11 | #include "src/heap/heap-write-barrier-inl.h" |
12 | #include "src/objects-inl.h" |
13 | #include "src/objects/slots.h" |
14 | #include "src/task-utils.h" |
15 | #include "src/v8.h" |
16 | #include "src/visitors.h" |
17 | #include "src/vm-state-inl.h" |
18 | |
19 | namespace v8 { |
20 | namespace internal { |
21 | |
22 | namespace { |
23 | |
24 | constexpr size_t kBlockSize = 256; |
25 | |
26 | } // namespace |
27 | |
28 | template <class _NodeType> |
29 | class GlobalHandles::NodeBlock final { |
30 | public: |
31 | using BlockType = NodeBlock<_NodeType>; |
32 | using NodeType = _NodeType; |
33 | |
34 | V8_INLINE static NodeBlock* From(NodeType* node); |
35 | |
36 | NodeBlock(GlobalHandles* global_handles, |
37 | GlobalHandles::NodeSpace<NodeType>* space, |
38 | NodeBlock* next) V8_NOEXCEPT : next_(next), |
39 | global_handles_(global_handles), |
40 | space_(space) {} |
41 | |
42 | NodeType* at(size_t index) { return &nodes_[index]; } |
43 | const NodeType* at(size_t index) const { return &nodes_[index]; } |
44 | GlobalHandles::NodeSpace<NodeType>* space() const { return space_; } |
45 | GlobalHandles* global_handles() const { return global_handles_; } |
46 | |
47 | V8_INLINE bool IncreaseUsage(); |
48 | V8_INLINE bool DecreaseUsage(); |
49 | |
50 | V8_INLINE void ListAdd(NodeBlock** top); |
51 | V8_INLINE void ListRemove(NodeBlock** top); |
52 | |
53 | NodeBlock* next() const { return next_; } |
54 | NodeBlock* next_used() const { return next_used_; } |
55 | |
56 | private: |
57 | NodeType nodes_[kBlockSize]; |
58 | NodeBlock* const next_; |
59 | GlobalHandles* const global_handles_; |
60 | GlobalHandles::NodeSpace<NodeType>* const space_; |
61 | NodeBlock* next_used_ = nullptr; |
62 | NodeBlock* prev_used_ = nullptr; |
63 | uint32_t used_nodes_ = 0; |
64 | |
65 | DISALLOW_COPY_AND_ASSIGN(NodeBlock); |
66 | }; |
67 | |
68 | template <class NodeType> |
69 | GlobalHandles::NodeBlock<NodeType>* GlobalHandles::NodeBlock<NodeType>::From( |
70 | NodeType* node) { |
71 | uintptr_t ptr = |
72 | reinterpret_cast<uintptr_t>(node) - sizeof(NodeType) * node->index(); |
73 | BlockType* block = reinterpret_cast<BlockType*>(ptr); |
74 | DCHECK_EQ(node, block->at(node->index())); |
75 | return block; |
76 | } |
77 | |
78 | template <class NodeType> |
79 | bool GlobalHandles::NodeBlock<NodeType>::IncreaseUsage() { |
80 | DCHECK_LT(used_nodes_, kBlockSize); |
81 | return used_nodes_++ == 0; |
82 | } |
83 | |
84 | template <class NodeType> |
85 | void GlobalHandles::NodeBlock<NodeType>::ListAdd(BlockType** top) { |
86 | BlockType* old_top = *top; |
87 | *top = this; |
88 | next_used_ = old_top; |
89 | prev_used_ = nullptr; |
90 | if (old_top != nullptr) { |
91 | old_top->prev_used_ = this; |
92 | } |
93 | } |
94 | |
95 | template <class NodeType> |
96 | bool GlobalHandles::NodeBlock<NodeType>::DecreaseUsage() { |
97 | DCHECK_GT(used_nodes_, 0); |
98 | return --used_nodes_ == 0; |
99 | } |
100 | |
101 | template <class NodeType> |
102 | void GlobalHandles::NodeBlock<NodeType>::ListRemove(BlockType** top) { |
103 | if (next_used_ != nullptr) next_used_->prev_used_ = prev_used_; |
104 | if (prev_used_ != nullptr) prev_used_->next_used_ = next_used_; |
105 | if (this == *top) { |
106 | *top = next_used_; |
107 | } |
108 | } |
109 | |
110 | template <class BlockType> |
111 | class GlobalHandles::NodeIterator final { |
112 | public: |
113 | using NodeType = typename BlockType::NodeType; |
114 | |
115 | // Iterator traits. |
116 | using iterator_category = std::forward_iterator_tag; |
117 | using difference_type = std::ptrdiff_t; |
118 | using value_type = NodeType*; |
119 | using reference = value_type; |
120 | using pointer = value_type*; |
121 | |
122 | explicit NodeIterator(BlockType* block) V8_NOEXCEPT : block_(block) {} |
123 | NodeIterator(NodeIterator&& other) V8_NOEXCEPT : block_(other.block_), |
124 | index_(other.index_) {} |
125 | |
126 | bool operator==(const NodeIterator& other) const { |
127 | return block_ == other.block_; |
128 | } |
129 | bool operator!=(const NodeIterator& other) const { |
130 | return block_ != other.block_; |
131 | } |
132 | |
133 | NodeIterator& operator++() { |
134 | if (++index_ < kBlockSize) return *this; |
135 | index_ = 0; |
136 | block_ = block_->next_used(); |
137 | return *this; |
138 | } |
139 | |
140 | NodeType* operator*() { return block_->at(index_); } |
141 | NodeType* operator->() { return block_->at(index_); } |
142 | |
143 | private: |
144 | BlockType* block_ = nullptr; |
145 | size_t index_ = 0; |
146 | |
147 | DISALLOW_COPY_AND_ASSIGN(NodeIterator); |
148 | }; |
149 | |
150 | template <class NodeType> |
151 | class GlobalHandles::NodeSpace final { |
152 | public: |
153 | using BlockType = NodeBlock<NodeType>; |
154 | using iterator = NodeIterator<BlockType>; |
155 | |
156 | static NodeSpace* From(NodeType* node); |
157 | static void Release(NodeType* node); |
158 | |
159 | explicit NodeSpace(GlobalHandles* global_handles) V8_NOEXCEPT |
160 | : global_handles_(global_handles) {} |
161 | ~NodeSpace(); |
162 | |
163 | V8_INLINE NodeType* Acquire(Object object); |
164 | |
165 | iterator begin() { return iterator(first_used_block_); } |
166 | iterator end() { return iterator(nullptr); } |
167 | |
168 | private: |
169 | void PutNodesOnFreeList(BlockType* block); |
170 | V8_INLINE void Free(NodeType* node); |
171 | |
172 | GlobalHandles* const global_handles_; |
173 | BlockType* first_block_ = nullptr; |
174 | BlockType* first_used_block_ = nullptr; |
175 | NodeType* first_free_ = nullptr; |
176 | }; |
177 | |
178 | template <class NodeType> |
179 | GlobalHandles::NodeSpace<NodeType>::~NodeSpace() { |
180 | auto* block = first_block_; |
181 | while (block != nullptr) { |
182 | auto* tmp = block->next(); |
183 | delete block; |
184 | block = tmp; |
185 | } |
186 | } |
187 | |
188 | template <class NodeType> |
189 | NodeType* GlobalHandles::NodeSpace<NodeType>::Acquire(Object object) { |
190 | if (first_free_ == nullptr) { |
191 | first_block_ = new BlockType(global_handles_, this, first_block_); |
192 | PutNodesOnFreeList(first_block_); |
193 | } |
194 | DCHECK_NOT_NULL(first_free_); |
195 | NodeType* node = first_free_; |
196 | first_free_ = first_free_->next_free(); |
197 | node->Acquire(object); |
198 | BlockType* block = BlockType::From(node); |
199 | if (block->IncreaseUsage()) { |
200 | block->ListAdd(&first_used_block_); |
201 | } |
202 | global_handles_->isolate()->counters()->global_handles()->Increment(); |
203 | global_handles_->handles_count_++; |
204 | DCHECK(node->IsInUse()); |
205 | return node; |
206 | } |
207 | |
208 | template <class NodeType> |
209 | void GlobalHandles::NodeSpace<NodeType>::PutNodesOnFreeList(BlockType* block) { |
210 | for (int32_t i = kBlockSize - 1; i >= 0; --i) { |
211 | NodeType* node = block->at(i); |
212 | const uint8_t index = static_cast<uint8_t>(i); |
213 | DCHECK_EQ(i, index); |
214 | node->set_index(index); |
215 | node->Free(first_free_); |
216 | first_free_ = node; |
217 | } |
218 | } |
219 | |
220 | template <class NodeType> |
221 | void GlobalHandles::NodeSpace<NodeType>::Release(NodeType* node) { |
222 | BlockType* block = BlockType::From(node); |
223 | block->space()->Free(node); |
224 | } |
225 | |
226 | template <class NodeType> |
227 | void GlobalHandles::NodeSpace<NodeType>::Free(NodeType* node) { |
228 | node->Release(first_free_); |
229 | first_free_ = node; |
230 | BlockType* block = BlockType::From(node); |
231 | if (block->DecreaseUsage()) { |
232 | block->ListRemove(&first_used_block_); |
233 | } |
234 | global_handles_->isolate()->counters()->global_handles()->Decrement(); |
235 | global_handles_->handles_count_--; |
236 | } |
237 | |
238 | template <class Child> |
239 | class NodeBase { |
240 | public: |
241 | static Child* FromLocation(Address* location) { |
242 | return reinterpret_cast<Child*>(location); |
243 | } |
244 | |
245 | NodeBase() { |
246 | DCHECK_EQ(offsetof(NodeBase, object_), 0); |
247 | DCHECK_EQ(offsetof(NodeBase, class_id_), Internals::kNodeClassIdOffset); |
248 | DCHECK_EQ(offsetof(NodeBase, flags_), Internals::kNodeFlagsOffset); |
249 | } |
250 | |
251 | #ifdef ENABLE_HANDLE_ZAPPING |
252 | ~NodeBase() { |
253 | ClearFields(); |
254 | data_.next_free = nullptr; |
255 | index_ = 0; |
256 | } |
257 | #endif |
258 | |
259 | void Free(Child* free_list) { |
260 | ClearFields(); |
261 | AsChild()->MarkAsFree(); |
262 | data_.next_free = free_list; |
263 | } |
264 | |
265 | void Acquire(Object object) { |
266 | DCHECK(!AsChild()->IsInUse()); |
267 | CheckFieldsAreCleared(); |
268 | object_ = object.ptr(); |
269 | AsChild()->MarkAsUsed(); |
270 | data_.parameter = nullptr; |
271 | DCHECK(AsChild()->IsInUse()); |
272 | } |
273 | |
274 | void Release(Child* free_list) { |
275 | DCHECK(AsChild()->IsInUse()); |
276 | Free(free_list); |
277 | DCHECK(!AsChild()->IsInUse()); |
278 | } |
279 | |
280 | Object object() const { return Object(object_); } |
281 | FullObjectSlot location() { return FullObjectSlot(&object_); } |
282 | Handle<Object> handle() { return Handle<Object>(&object_); } |
283 | |
284 | uint8_t index() const { return index_; } |
285 | void set_index(uint8_t value) { index_ = value; } |
286 | |
287 | uint16_t wrapper_class_id() const { return class_id_; } |
288 | bool has_wrapper_class_id() const { |
289 | return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId; |
290 | } |
291 | |
292 | // Accessors for next free node in the free list. |
293 | Child* next_free() { |
294 | DCHECK(!AsChild()->IsInUse()); |
295 | return data_.next_free; |
296 | } |
297 | |
298 | void set_parameter(void* parameter) { |
299 | DCHECK(AsChild()->IsInUse()); |
300 | data_.parameter = parameter; |
301 | } |
302 | void* parameter() const { |
303 | DCHECK(AsChild()->IsInUse()); |
304 | return data_.parameter; |
305 | } |
306 | |
307 | protected: |
308 | Child* AsChild() { return reinterpret_cast<Child*>(this); } |
309 | const Child* AsChild() const { return reinterpret_cast<const Child*>(this); } |
310 | |
311 | void ClearFields() { |
312 | // Zap the values for eager trapping. |
313 | object_ = kGlobalHandleZapValue; |
314 | class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
315 | AsChild()->ClearImplFields(); |
316 | } |
317 | |
318 | void CheckFieldsAreCleared() { |
319 | DCHECK_EQ(kGlobalHandleZapValue, object_); |
320 | DCHECK_EQ(v8::HeapProfiler::kPersistentHandleNoClassId, class_id_); |
321 | AsChild()->CheckImplFieldsAreCleared(); |
322 | } |
323 | |
324 | // Storage for object pointer. |
325 | // |
326 | // Placed first to avoid offset computation. The stored data is equivalent to |
327 | // an Object. It is stored as a plain Address for convenience (smallest number |
328 | // of casts), and because it is a private implementation detail: the public |
329 | // interface provides type safety. |
330 | Address object_; |
331 | |
332 | // Class id set by the embedder. |
333 | uint16_t class_id_; |
334 | |
335 | // Index in the containing handle block. |
336 | uint8_t index_; |
337 | |
338 | uint8_t flags_; |
339 | |
340 | // The meaning of this field depends on node state: |
341 | // - Node in free list: Stores next free node pointer. |
342 | // - Otherwise, specific to the node implementation. |
343 | union { |
344 | Child* next_free; |
345 | void* parameter; |
346 | } data_; |
347 | }; |
348 | |
349 | namespace { |
350 | |
351 | void (JSObject jsobject, void** embedder_fields, int len) { |
352 | int field_count = jsobject->GetEmbedderFieldCount(); |
353 | for (int i = 0; i < len; ++i) { |
354 | if (field_count == i) break; |
355 | void* pointer; |
356 | if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(&pointer)) { |
357 | embedder_fields[i] = pointer; |
358 | } |
359 | } |
360 | } |
361 | |
362 | } // namespace |
363 | |
364 | class GlobalHandles::Node final : public NodeBase<GlobalHandles::Node> { |
365 | public: |
366 | // State transition diagram: |
367 | // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE } |
368 | enum State { |
369 | FREE = 0, |
370 | NORMAL, // Normal global handle. |
371 | WEAK, // Flagged as weak but not yet finalized. |
372 | PENDING, // Has been recognized as only reachable by weak handles. |
373 | NEAR_DEATH, // Callback has informed the handle is near death. |
374 | NUMBER_OF_NODE_STATES |
375 | }; |
376 | |
377 | Node() { |
378 | STATIC_ASSERT(static_cast<int>(NodeState::kMask) == |
379 | Internals::kNodeStateMask); |
380 | STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue); |
381 | STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue); |
382 | STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) == |
383 | Internals::kNodeIsIndependentShift); |
384 | STATIC_ASSERT(static_cast<int>(IsActive::kShift) == |
385 | Internals::kNodeIsActiveShift); |
386 | set_in_young_list(false); |
387 | } |
388 | |
389 | void Zap() { |
390 | DCHECK(IsInUse()); |
391 | // Zap the values for eager trapping. |
392 | object_ = kGlobalHandleZapValue; |
393 | } |
394 | |
395 | const char* label() const { |
396 | return state() == NORMAL ? reinterpret_cast<char*>(data_.parameter) |
397 | : nullptr; |
398 | } |
399 | |
400 | // State and flag accessors. |
401 | |
402 | State state() const { |
403 | return NodeState::decode(flags_); |
404 | } |
405 | void set_state(State state) { |
406 | flags_ = NodeState::update(flags_, state); |
407 | } |
408 | |
409 | bool is_independent() { return IsIndependent::decode(flags_); } |
410 | void set_independent(bool v) { flags_ = IsIndependent::update(flags_, v); } |
411 | |
412 | bool is_active() { |
413 | return IsActive::decode(flags_); |
414 | } |
415 | void set_active(bool v) { |
416 | flags_ = IsActive::update(flags_, v); |
417 | } |
418 | |
419 | bool is_in_young_list() const { return IsInYoungList::decode(flags_); } |
420 | void set_in_young_list(bool v) { flags_ = IsInYoungList::update(flags_, v); } |
421 | |
422 | WeaknessType weakness_type() const { |
423 | return NodeWeaknessType::decode(flags_); |
424 | } |
425 | void set_weakness_type(WeaknessType weakness_type) { |
426 | flags_ = NodeWeaknessType::update(flags_, weakness_type); |
427 | } |
428 | |
429 | bool IsWeak() const { return state() == WEAK; } |
430 | |
431 | bool IsInUse() const { return state() != FREE; } |
432 | |
433 | bool IsPhantomCallback() const { |
434 | return weakness_type() == PHANTOM_WEAK || |
435 | weakness_type() == PHANTOM_WEAK_2_EMBEDDER_FIELDS; |
436 | } |
437 | |
438 | bool IsPhantomResetHandle() const { |
439 | return weakness_type() == PHANTOM_WEAK_RESET_HANDLE; |
440 | } |
441 | |
442 | bool IsFinalizerHandle() const { return weakness_type() == FINALIZER_WEAK; } |
443 | |
444 | bool IsPendingPhantomCallback() const { |
445 | return state() == PENDING && IsPhantomCallback(); |
446 | } |
447 | |
448 | bool IsPendingPhantomResetHandle() const { |
449 | return state() == PENDING && IsPhantomResetHandle(); |
450 | } |
451 | |
452 | bool IsPendingFinalizer() const { |
453 | return state() == PENDING && weakness_type() == FINALIZER_WEAK; |
454 | } |
455 | |
456 | bool IsPending() const { return state() == PENDING; } |
457 | |
458 | bool IsRetainer() const { |
459 | return state() != FREE && |
460 | !(state() == NEAR_DEATH && weakness_type() != FINALIZER_WEAK); |
461 | } |
462 | |
463 | bool IsStrongRetainer() const { return state() == NORMAL; } |
464 | |
465 | bool IsWeakRetainer() const { |
466 | return state() == WEAK || state() == PENDING || |
467 | (state() == NEAR_DEATH && weakness_type() == FINALIZER_WEAK); |
468 | } |
469 | |
470 | void MarkPending() { |
471 | DCHECK(state() == WEAK); |
472 | set_state(PENDING); |
473 | } |
474 | |
475 | bool has_callback() const { return weak_callback_ != nullptr; } |
476 | |
477 | // Accessors for next free node in the free list. |
478 | Node* next_free() { |
479 | DCHECK_EQ(FREE, state()); |
480 | return data_.next_free; |
481 | } |
482 | |
483 | void MakeWeak(void* parameter, |
484 | WeakCallbackInfo<void>::Callback phantom_callback, |
485 | v8::WeakCallbackType type) { |
486 | DCHECK_NOT_NULL(phantom_callback); |
487 | DCHECK(IsInUse()); |
488 | CHECK_NE(object_, kGlobalHandleZapValue); |
489 | set_state(WEAK); |
490 | switch (type) { |
491 | case v8::WeakCallbackType::kParameter: |
492 | set_weakness_type(PHANTOM_WEAK); |
493 | break; |
494 | case v8::WeakCallbackType::kInternalFields: |
495 | set_weakness_type(PHANTOM_WEAK_2_EMBEDDER_FIELDS); |
496 | break; |
497 | case v8::WeakCallbackType::kFinalizer: |
498 | set_weakness_type(FINALIZER_WEAK); |
499 | break; |
500 | } |
501 | set_parameter(parameter); |
502 | weak_callback_ = phantom_callback; |
503 | } |
504 | |
505 | void MakeWeak(Address** location_addr) { |
506 | DCHECK(IsInUse()); |
507 | CHECK_NE(object_, kGlobalHandleZapValue); |
508 | set_state(WEAK); |
509 | set_weakness_type(PHANTOM_WEAK_RESET_HANDLE); |
510 | set_parameter(location_addr); |
511 | weak_callback_ = nullptr; |
512 | } |
513 | |
514 | void* ClearWeakness() { |
515 | DCHECK(IsInUse()); |
516 | void* p = parameter(); |
517 | set_state(NORMAL); |
518 | set_parameter(nullptr); |
519 | return p; |
520 | } |
521 | |
522 | void AnnotateStrongRetainer(const char* label) { |
523 | DCHECK_EQ(state(), NORMAL); |
524 | data_.parameter = const_cast<char*>(label); |
525 | } |
526 | |
527 | void CollectPhantomCallbackData( |
528 | std::vector<std::pair<Node*, PendingPhantomCallback>>* |
529 | pending_phantom_callbacks) { |
530 | DCHECK(weakness_type() == PHANTOM_WEAK || |
531 | weakness_type() == PHANTOM_WEAK_2_EMBEDDER_FIELDS); |
532 | DCHECK(state() == PENDING); |
533 | DCHECK_NOT_NULL(weak_callback_); |
534 | |
535 | void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr, |
536 | nullptr}; |
537 | if (weakness_type() != PHANTOM_WEAK && object()->IsJSObject()) { |
538 | ExtractInternalFields(JSObject::cast(object()), embedder_fields, |
539 | v8::kEmbedderFieldsInWeakCallback); |
540 | } |
541 | |
542 | // Zap with something dangerous. |
543 | location().store(Object(0xCA11)); |
544 | |
545 | pending_phantom_callbacks->push_back(std::make_pair( |
546 | this, |
547 | PendingPhantomCallback(weak_callback_, parameter(), embedder_fields))); |
548 | DCHECK(IsInUse()); |
549 | set_state(NEAR_DEATH); |
550 | } |
551 | |
552 | void ResetPhantomHandle() { |
553 | DCHECK_EQ(PHANTOM_WEAK_RESET_HANDLE, weakness_type()); |
554 | DCHECK_EQ(PENDING, state()); |
555 | DCHECK_NULL(weak_callback_); |
556 | Address** handle = reinterpret_cast<Address**>(parameter()); |
557 | *handle = nullptr; |
558 | NodeSpace<Node>::Release(this); |
559 | } |
560 | |
561 | void PostGarbageCollectionProcessing(Isolate* isolate) { |
562 | // This method invokes a finalizer. Updating the method name would require |
563 | // adjusting CFI blacklist as weak_callback_ is invoked on the wrong type. |
564 | CHECK(IsPendingFinalizer()); |
565 | CHECK(!is_active()); |
566 | set_state(NEAR_DEATH); |
567 | // Check that we are not passing a finalized external string to |
568 | // the callback. |
569 | DCHECK(!object()->IsExternalOneByteString() || |
570 | ExternalOneByteString::cast(object())->resource() != nullptr); |
571 | DCHECK(!object()->IsExternalTwoByteString() || |
572 | ExternalTwoByteString::cast(object())->resource() != nullptr); |
573 | // Leaving V8. |
574 | VMState<EXTERNAL> vmstate(isolate); |
575 | HandleScope handle_scope(isolate); |
576 | void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr, |
577 | nullptr}; |
578 | v8::WeakCallbackInfo<void> data(reinterpret_cast<v8::Isolate*>(isolate), |
579 | parameter(), embedder_fields, nullptr); |
580 | weak_callback_(data); |
581 | // For finalizers the handle must have either been reset or made strong. |
582 | // Both cases reset the state. |
583 | CHECK_NE(NEAR_DEATH, state()); |
584 | } |
585 | |
586 | void MarkAsFree() { set_state(FREE); } |
587 | void MarkAsUsed() { set_state(NORMAL); } |
588 | |
589 | GlobalHandles* global_handles() { |
590 | return NodeBlock<Node>::From(this)->global_handles(); |
591 | } |
592 | |
593 | private: |
594 | // Fields that are not used for managing node memory. |
595 | void ClearImplFields() { |
596 | set_independent(false); |
597 | set_active(false); |
598 | weak_callback_ = nullptr; |
599 | } |
600 | |
601 | void CheckImplFieldsAreCleared() { |
602 | DCHECK(!is_independent()); |
603 | DCHECK(!is_active()); |
604 | DCHECK_EQ(nullptr, weak_callback_); |
605 | } |
606 | |
607 | // This stores three flags (independent, partially_dependent and |
608 | // in_young_list) and a State. |
609 | class NodeState : public BitField8<State, 0, 3> {}; |
610 | class IsIndependent : public BitField8<bool, NodeState::kNext, 1> {}; |
611 | // The following two fields are mutually exclusive |
612 | class IsActive : public BitField8<bool, IsIndependent::kNext, 1> {}; |
613 | class IsInYoungList : public BitField8<bool, IsActive::kNext, 1> {}; |
614 | class NodeWeaknessType |
615 | : public BitField8<WeaknessType, IsInYoungList::kNext, 2> {}; |
616 | |
617 | // Handle specific callback - might be a weak reference in disguise. |
618 | WeakCallbackInfo<void>::Callback weak_callback_; |
619 | |
620 | friend class NodeBase<Node>; |
621 | |
622 | DISALLOW_COPY_AND_ASSIGN(Node); |
623 | }; |
624 | |
625 | class GlobalHandles::TracedNode final |
626 | : public NodeBase<GlobalHandles::TracedNode> { |
627 | public: |
628 | TracedNode() { set_in_young_list(false); } |
629 | |
630 | enum State { FREE = 0, NORMAL, NEAR_DEATH }; |
631 | |
632 | State state() const { return NodeState::decode(flags_); } |
633 | void set_state(State state) { flags_ = NodeState::update(flags_, state); } |
634 | |
635 | void MarkAsFree() { set_state(FREE); } |
636 | void MarkAsUsed() { set_state(NORMAL); } |
637 | bool IsInUse() const { return state() != FREE; } |
638 | bool IsRetainer() const { return state() == NORMAL; } |
639 | bool IsPhantomResetHandle() const { return callback_ == nullptr; } |
640 | |
641 | bool is_in_young_list() const { return IsInYoungList::decode(flags_); } |
642 | void set_in_young_list(bool v) { flags_ = IsInYoungList::update(flags_, v); } |
643 | |
644 | bool is_root() const { return IsRoot::decode(flags_); } |
645 | void set_root(bool v) { flags_ = IsRoot::update(flags_, v); } |
646 | |
647 | void SetFinalizationCallback(void* parameter, |
648 | WeakCallbackInfo<void>::Callback callback) { |
649 | set_parameter(parameter); |
650 | callback_ = callback; |
651 | } |
652 | bool HasFinalizationCallback() const { return callback_ != nullptr; } |
653 | |
654 | void CollectPhantomCallbackData( |
655 | std::vector<std::pair<TracedNode*, PendingPhantomCallback>>* |
656 | pending_phantom_callbacks) { |
657 | DCHECK(IsInUse()); |
658 | DCHECK_NOT_NULL(callback_); |
659 | |
660 | void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr, |
661 | nullptr}; |
662 | ExtractInternalFields(JSObject::cast(object()), embedder_fields, |
663 | v8::kEmbedderFieldsInWeakCallback); |
664 | |
665 | // Zap with something dangerous. |
666 | location().store(Object(0xCA11)); |
667 | |
668 | pending_phantom_callbacks->push_back(std::make_pair( |
669 | this, PendingPhantomCallback(callback_, parameter(), embedder_fields))); |
670 | set_state(NEAR_DEATH); |
671 | } |
672 | |
673 | void ResetPhantomHandle() { |
674 | DCHECK(IsInUse()); |
675 | Address** handle = reinterpret_cast<Address**>(data_.parameter); |
676 | *handle = nullptr; |
677 | NodeSpace<TracedNode>::Release(this); |
678 | DCHECK(!IsInUse()); |
679 | } |
680 | |
681 | protected: |
682 | class NodeState : public BitField8<State, 0, 2> {}; |
683 | class IsInYoungList : public BitField8<bool, NodeState::kNext, 1> {}; |
684 | class IsRoot : public BitField8<bool, IsInYoungList::kNext, 1> {}; |
685 | |
686 | void ClearImplFields() { |
687 | set_root(true); |
688 | callback_ = nullptr; |
689 | } |
690 | |
691 | void CheckImplFieldsAreCleared() const { |
692 | DCHECK(is_root()); |
693 | DCHECK_NULL(callback_); |
694 | } |
695 | |
696 | WeakCallbackInfo<void>::Callback callback_; |
697 | |
698 | friend class NodeBase<GlobalHandles::TracedNode>; |
699 | |
700 | DISALLOW_COPY_AND_ASSIGN(TracedNode); |
701 | }; |
702 | |
703 | GlobalHandles::GlobalHandles(Isolate* isolate) |
704 | : isolate_(isolate), |
705 | regular_nodes_(new NodeSpace<GlobalHandles::Node>(this)), |
706 | traced_nodes_(new NodeSpace<GlobalHandles::TracedNode>(this)) {} |
707 | |
708 | GlobalHandles::~GlobalHandles() { regular_nodes_.reset(nullptr); } |
709 | |
710 | Handle<Object> GlobalHandles::Create(Object value) { |
711 | GlobalHandles::Node* result = regular_nodes_->Acquire(value); |
712 | if (ObjectInYoungGeneration(value) && !result->is_in_young_list()) { |
713 | young_nodes_.push_back(result); |
714 | result->set_in_young_list(true); |
715 | } |
716 | return result->handle(); |
717 | } |
718 | |
719 | Handle<Object> GlobalHandles::Create(Address value) { |
720 | return Create(Object(value)); |
721 | } |
722 | |
723 | Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot) { |
724 | GlobalHandles::TracedNode* result = traced_nodes_->Acquire(value); |
725 | if (ObjectInYoungGeneration(value) && !result->is_in_young_list()) { |
726 | traced_young_nodes_.push_back(result); |
727 | result->set_in_young_list(true); |
728 | } |
729 | result->set_parameter(slot); |
730 | return result->handle(); |
731 | } |
732 | |
733 | Handle<Object> GlobalHandles::CreateTraced(Address value, Address* slot) { |
734 | return CreateTraced(Object(value), slot); |
735 | } |
736 | |
737 | Handle<Object> GlobalHandles::CopyGlobal(Address* location) { |
738 | DCHECK_NOT_NULL(location); |
739 | GlobalHandles* global_handles = |
740 | Node::FromLocation(location)->global_handles(); |
741 | #ifdef VERIFY_HEAP |
742 | if (i::FLAG_verify_heap) { |
743 | Object(*location)->ObjectVerify(global_handles->isolate()); |
744 | } |
745 | #endif // VERIFY_HEAP |
746 | return global_handles->Create(*location); |
747 | } |
748 | |
749 | void GlobalHandles::MoveGlobal(Address** from, Address** to) { |
750 | DCHECK_NOT_NULL(*from); |
751 | DCHECK_NOT_NULL(*to); |
752 | DCHECK_EQ(*from, *to); |
753 | Node* node = Node::FromLocation(*from); |
754 | if (node->IsWeak() && node->IsPhantomResetHandle()) { |
755 | node->set_parameter(to); |
756 | } |
757 | |
758 | // - Strong handles do not require fixups. |
759 | // - Weak handles with finalizers and callbacks are too general to fix up. For |
760 | // those the callers need to ensure consistency. |
761 | } |
762 | |
763 | void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) { |
764 | DCHECK_NOT_NULL(*from); |
765 | DCHECK_NOT_NULL(*to); |
766 | DCHECK_EQ(*from, *to); |
767 | TracedNode* node = TracedNode::FromLocation(*from); |
768 | // Only set the backpointer for clearing a phantom handle when there is no |
769 | // finalization callback attached. As soon as a callback is attached to a node |
770 | // the embedder is on its own when resetting a handle. |
771 | if (!node->HasFinalizationCallback()) { |
772 | node->set_parameter(to); |
773 | } |
774 | } |
775 | |
776 | void GlobalHandles::Destroy(Address* location) { |
777 | if (location != nullptr) { |
778 | NodeSpace<Node>::Release(Node::FromLocation(location)); |
779 | } |
780 | } |
781 | |
782 | void GlobalHandles::DestroyTraced(Address* location) { |
783 | if (location != nullptr) { |
784 | NodeSpace<TracedNode>::Release(TracedNode::FromLocation(location)); |
785 | } |
786 | } |
787 | |
788 | void GlobalHandles::SetFinalizationCallbackForTraced( |
789 | Address* location, void* parameter, |
790 | WeakCallbackInfo<void>::Callback callback) { |
791 | TracedNode::FromLocation(location)->SetFinalizationCallback(parameter, |
792 | callback); |
793 | } |
794 | |
795 | typedef v8::WeakCallbackInfo<void>::Callback GenericCallback; |
796 | |
797 | void GlobalHandles::MakeWeak(Address* location, void* parameter, |
798 | GenericCallback phantom_callback, |
799 | v8::WeakCallbackType type) { |
800 | Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type); |
801 | } |
802 | |
803 | void GlobalHandles::MakeWeak(Address** location_addr) { |
804 | Node::FromLocation(*location_addr)->MakeWeak(location_addr); |
805 | } |
806 | |
807 | void* GlobalHandles::ClearWeakness(Address* location) { |
808 | return Node::FromLocation(location)->ClearWeakness(); |
809 | } |
810 | |
811 | void GlobalHandles::AnnotateStrongRetainer(Address* location, |
812 | const char* label) { |
813 | Node::FromLocation(location)->AnnotateStrongRetainer(label); |
814 | } |
815 | |
816 | bool GlobalHandles::IsWeak(Address* location) { |
817 | return Node::FromLocation(location)->IsWeak(); |
818 | } |
819 | |
820 | DISABLE_CFI_PERF |
821 | void GlobalHandles::IterateWeakRootsForFinalizers(RootVisitor* v) { |
822 | for (Node* node : *regular_nodes_) { |
823 | if (node->IsWeakRetainer() && node->state() == Node::PENDING) { |
824 | DCHECK(!node->IsPhantomCallback()); |
825 | DCHECK(!node->IsPhantomResetHandle()); |
826 | // Finalizers need to survive. |
827 | v->VisitRootPointer(Root::kGlobalHandles, node->label(), |
828 | node->location()); |
829 | } |
830 | } |
831 | } |
832 | |
833 | DISABLE_CFI_PERF |
834 | void GlobalHandles::IterateWeakRootsForPhantomHandles( |
835 | WeakSlotCallbackWithHeap should_reset_handle) { |
836 | for (Node* node : *regular_nodes_) { |
837 | if (node->IsWeakRetainer() && |
838 | should_reset_handle(isolate()->heap(), node->location())) { |
839 | if (node->IsPhantomResetHandle()) { |
840 | node->MarkPending(); |
841 | node->ResetPhantomHandle(); |
842 | ++number_of_phantom_handle_resets_; |
843 | } else if (node->IsPhantomCallback()) { |
844 | node->MarkPending(); |
845 | node->CollectPhantomCallbackData(®ular_pending_phantom_callbacks_); |
846 | } |
847 | } |
848 | } |
849 | for (TracedNode* node : *traced_nodes_) { |
850 | if (node->IsInUse() && |
851 | should_reset_handle(isolate()->heap(), node->location())) { |
852 | if (node->IsPhantomResetHandle()) { |
853 | node->ResetPhantomHandle(); |
854 | ++number_of_phantom_handle_resets_; |
855 | } else { |
856 | node->CollectPhantomCallbackData(&traced_pending_phantom_callbacks_); |
857 | } |
858 | } |
859 | } |
860 | } |
861 | |
862 | void GlobalHandles::IterateWeakRootsIdentifyFinalizers( |
863 | WeakSlotCallbackWithHeap should_reset_handle) { |
864 | for (Node* node : *regular_nodes_) { |
865 | if (node->IsWeak() && |
866 | should_reset_handle(isolate()->heap(), node->location())) { |
867 | if (node->IsFinalizerHandle()) { |
868 | node->MarkPending(); |
869 | } |
870 | } |
871 | } |
872 | } |
873 | |
874 | void GlobalHandles::IdentifyWeakUnmodifiedObjects( |
875 | WeakSlotCallback is_unmodified) { |
876 | for (Node* node : young_nodes_) { |
877 | if (node->IsWeak() && !is_unmodified(node->location())) { |
878 | node->set_active(true); |
879 | } |
880 | } |
881 | |
882 | LocalEmbedderHeapTracer* const tracer = |
883 | isolate()->heap()->local_embedder_heap_tracer(); |
884 | for (TracedNode* node : traced_young_nodes_) { |
885 | if (node->IsInUse()) { |
886 | DCHECK(node->is_root()); |
887 | if (is_unmodified(node->location())) { |
888 | v8::Value* value = ToApi<v8::Value>(node->handle()); |
889 | node->set_root(tracer->IsRootForNonTracingGC( |
890 | *reinterpret_cast<v8::TracedGlobal<v8::Value>*>(&value))); |
891 | } |
892 | } |
893 | } |
894 | } |
895 | |
896 | void GlobalHandles::IterateYoungStrongAndDependentRoots(RootVisitor* v) { |
897 | for (Node* node : young_nodes_) { |
898 | if (node->IsStrongRetainer() || |
899 | (node->IsWeakRetainer() && !node->is_independent() && |
900 | node->is_active())) { |
901 | v->VisitRootPointer(Root::kGlobalHandles, node->label(), |
902 | node->location()); |
903 | } |
904 | } |
905 | for (TracedNode* node : traced_young_nodes_) { |
906 | if (node->IsInUse() && node->is_root()) { |
907 | v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); |
908 | } |
909 | } |
910 | } |
911 | |
912 | void GlobalHandles::MarkYoungWeakUnmodifiedObjectsPending( |
913 | WeakSlotCallbackWithHeap is_dead) { |
914 | for (Node* node : young_nodes_) { |
915 | DCHECK(node->is_in_young_list()); |
916 | if ((node->is_independent() || !node->is_active()) && node->IsWeak() && |
917 | is_dead(isolate_->heap(), node->location())) { |
918 | if (!node->IsPhantomCallback() && !node->IsPhantomResetHandle()) { |
919 | node->MarkPending(); |
920 | } |
921 | } |
922 | } |
923 | } |
924 | |
925 | void GlobalHandles::IterateYoungWeakUnmodifiedRootsForFinalizers( |
926 | RootVisitor* v) { |
927 | for (Node* node : young_nodes_) { |
928 | DCHECK(node->is_in_young_list()); |
929 | if ((node->is_independent() || !node->is_active()) && |
930 | node->IsWeakRetainer() && (node->state() == Node::PENDING)) { |
931 | DCHECK(!node->IsPhantomCallback()); |
932 | DCHECK(!node->IsPhantomResetHandle()); |
933 | // Finalizers need to survive. |
934 | v->VisitRootPointer(Root::kGlobalHandles, node->label(), |
935 | node->location()); |
936 | } |
937 | } |
938 | } |
939 | |
940 | void GlobalHandles::IterateYoungWeakUnmodifiedRootsForPhantomHandles( |
941 | RootVisitor* v, WeakSlotCallbackWithHeap should_reset_handle) { |
942 | for (Node* node : young_nodes_) { |
943 | DCHECK(node->is_in_young_list()); |
944 | if ((node->is_independent() || !node->is_active()) && |
945 | node->IsWeakRetainer() && (node->state() != Node::PENDING)) { |
946 | if (should_reset_handle(isolate_->heap(), node->location())) { |
947 | DCHECK(node->IsPhantomResetHandle() || node->IsPhantomCallback()); |
948 | if (node->IsPhantomResetHandle()) { |
949 | node->MarkPending(); |
950 | node->ResetPhantomHandle(); |
951 | ++number_of_phantom_handle_resets_; |
952 | } else if (node->IsPhantomCallback()) { |
953 | node->MarkPending(); |
954 | node->CollectPhantomCallbackData(®ular_pending_phantom_callbacks_); |
955 | } else { |
956 | UNREACHABLE(); |
957 | } |
958 | } else { |
959 | // Node survived and needs to be visited. |
960 | v->VisitRootPointer(Root::kGlobalHandles, node->label(), |
961 | node->location()); |
962 | } |
963 | } |
964 | } |
965 | for (TracedNode* node : traced_young_nodes_) { |
966 | if (!node->IsInUse()) continue; |
967 | |
968 | DCHECK_IMPLIES(node->is_root(), |
969 | !should_reset_handle(isolate_->heap(), node->location())); |
970 | if (should_reset_handle(isolate_->heap(), node->location())) { |
971 | if (node->IsPhantomResetHandle()) { |
972 | node->ResetPhantomHandle(); |
973 | ++number_of_phantom_handle_resets_; |
974 | } else { |
975 | node->CollectPhantomCallbackData(&traced_pending_phantom_callbacks_); |
976 | } |
977 | } else { |
978 | if (!node->is_root()) { |
979 | node->set_root(true); |
980 | v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); |
981 | } |
982 | } |
983 | } |
984 | } |
985 | |
986 | void GlobalHandles::InvokeSecondPassPhantomCallbacksFromTask() { |
987 | DCHECK(second_pass_callbacks_task_posted_); |
988 | second_pass_callbacks_task_posted_ = false; |
989 | TRACE_EVENT0("v8" , "V8.GCPhantomHandleProcessingCallback" ); |
990 | isolate()->heap()->CallGCPrologueCallbacks( |
991 | GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags); |
992 | InvokeSecondPassPhantomCallbacks(); |
993 | isolate()->heap()->CallGCEpilogueCallbacks( |
994 | GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags); |
995 | } |
996 | |
997 | void GlobalHandles::InvokeSecondPassPhantomCallbacks() { |
998 | while (!second_pass_callbacks_.empty()) { |
999 | auto callback = second_pass_callbacks_.back(); |
1000 | second_pass_callbacks_.pop_back(); |
1001 | callback.Invoke(isolate(), PendingPhantomCallback::kSecondPass); |
1002 | } |
1003 | } |
1004 | |
1005 | size_t GlobalHandles::PostScavengeProcessing(unsigned post_processing_count) { |
1006 | size_t freed_nodes = 0; |
1007 | for (Node* node : young_nodes_) { |
1008 | // Filter free nodes. |
1009 | if (!node->IsRetainer()) continue; |
1010 | |
1011 | // Reset active state for all affected nodes. |
1012 | node->set_active(false); |
1013 | |
1014 | if (node->IsPending()) { |
1015 | DCHECK(node->has_callback()); |
1016 | DCHECK(node->IsPendingFinalizer()); |
1017 | node->PostGarbageCollectionProcessing(isolate_); |
1018 | } |
1019 | if (InRecursiveGC(post_processing_count)) return freed_nodes; |
1020 | |
1021 | if (!node->IsRetainer()) freed_nodes++; |
1022 | } |
1023 | return freed_nodes; |
1024 | } |
1025 | |
1026 | size_t GlobalHandles::PostMarkSweepProcessing(unsigned post_processing_count) { |
1027 | size_t freed_nodes = 0; |
1028 | for (Node* node : *regular_nodes_) { |
1029 | // Filter free nodes. |
1030 | if (!node->IsRetainer()) continue; |
1031 | |
1032 | // Reset active state for all affected nodes. |
1033 | node->set_active(false); |
1034 | |
1035 | if (node->IsPending()) { |
1036 | DCHECK(node->has_callback()); |
1037 | DCHECK(node->IsPendingFinalizer()); |
1038 | node->PostGarbageCollectionProcessing(isolate_); |
1039 | } |
1040 | if (InRecursiveGC(post_processing_count)) return freed_nodes; |
1041 | |
1042 | if (!node->IsRetainer()) freed_nodes++; |
1043 | } |
1044 | return freed_nodes; |
1045 | } |
1046 | |
1047 | template <typename T> |
1048 | void GlobalHandles::UpdateAndCompactListOfYoungNode( |
1049 | std::vector<T*>* node_list) { |
1050 | size_t last = 0; |
1051 | for (T* node : *node_list) { |
1052 | DCHECK(node->is_in_young_list()); |
1053 | if (node->IsInUse()) { |
1054 | if (ObjectInYoungGeneration(node->object())) { |
1055 | (*node_list)[last++] = node; |
1056 | isolate_->heap()->IncrementNodesCopiedInNewSpace(); |
1057 | } else { |
1058 | node->set_in_young_list(false); |
1059 | isolate_->heap()->IncrementNodesPromoted(); |
1060 | } |
1061 | } else { |
1062 | node->set_in_young_list(false); |
1063 | isolate_->heap()->IncrementNodesDiedInNewSpace(); |
1064 | } |
1065 | } |
1066 | DCHECK_LE(last, node_list->size()); |
1067 | node_list->resize(last); |
1068 | node_list->shrink_to_fit(); |
1069 | } |
1070 | |
1071 | void GlobalHandles::UpdateListOfYoungNodes() { |
1072 | UpdateAndCompactListOfYoungNode(&young_nodes_); |
1073 | UpdateAndCompactListOfYoungNode(&traced_young_nodes_); |
1074 | } |
1075 | |
1076 | template <typename T> |
1077 | size_t GlobalHandles::InvokeFirstPassWeakCallbacks( |
1078 | std::vector<std::pair<T*, PendingPhantomCallback>>* pending) { |
1079 | size_t freed_nodes = 0; |
1080 | std::vector<std::pair<T*, PendingPhantomCallback>> pending_phantom_callbacks; |
1081 | pending_phantom_callbacks.swap(*pending); |
1082 | { |
1083 | // The initial pass callbacks must simply clear the nodes. |
1084 | for (auto& pair : pending_phantom_callbacks) { |
1085 | T* node = pair.first; |
1086 | DCHECK_EQ(T::NEAR_DEATH, node->state()); |
1087 | pair.second.Invoke(isolate(), PendingPhantomCallback::kFirstPass); |
1088 | |
1089 | // Transition to second pass. It is required that the first pass callback |
1090 | // resets the handle using |v8::PersistentBase::Reset|. Also see comments |
1091 | // on |v8::WeakCallbackInfo|. |
1092 | CHECK_WITH_MSG(T::FREE == node->state(), |
1093 | "Handle not reset in first callback. See comments on " |
1094 | "|v8::WeakCallbackInfo|." ); |
1095 | |
1096 | if (pair.second.callback()) second_pass_callbacks_.push_back(pair.second); |
1097 | freed_nodes++; |
1098 | } |
1099 | } |
1100 | return freed_nodes; |
1101 | } |
1102 | |
1103 | size_t GlobalHandles::InvokeFirstPassWeakCallbacks() { |
1104 | return InvokeFirstPassWeakCallbacks(®ular_pending_phantom_callbacks_) + |
1105 | InvokeFirstPassWeakCallbacks(&traced_pending_phantom_callbacks_); |
1106 | } |
1107 | |
1108 | void GlobalHandles::InvokeOrScheduleSecondPassPhantomCallbacks( |
1109 | bool synchronous_second_pass) { |
1110 | if (!second_pass_callbacks_.empty()) { |
1111 | if (FLAG_optimize_for_size || FLAG_predictable || synchronous_second_pass) { |
1112 | isolate()->heap()->CallGCPrologueCallbacks( |
1113 | GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags); |
1114 | InvokeSecondPassPhantomCallbacks(); |
1115 | isolate()->heap()->CallGCEpilogueCallbacks( |
1116 | GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags); |
1117 | } else if (!second_pass_callbacks_task_posted_) { |
1118 | second_pass_callbacks_task_posted_ = true; |
1119 | auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner( |
1120 | reinterpret_cast<v8::Isolate*>(isolate())); |
1121 | taskrunner->PostTask(MakeCancelableTask( |
1122 | isolate(), [this] { InvokeSecondPassPhantomCallbacksFromTask(); })); |
1123 | } |
1124 | } |
1125 | } |
1126 | |
1127 | void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate, |
1128 | InvocationType type) { |
1129 | Data::Callback* callback_addr = nullptr; |
1130 | if (type == kFirstPass) { |
1131 | callback_addr = &callback_; |
1132 | } |
1133 | Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_, |
1134 | embedder_fields_, callback_addr); |
1135 | Data::Callback callback = callback_; |
1136 | callback_ = nullptr; |
1137 | callback(data); |
1138 | } |
1139 | |
1140 | bool GlobalHandles::InRecursiveGC(unsigned gc_processing_counter) { |
1141 | return gc_processing_counter != post_gc_processing_count_; |
1142 | } |
1143 | |
1144 | size_t GlobalHandles::PostGarbageCollectionProcessing( |
1145 | GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) { |
1146 | // Process weak global handle callbacks. This must be done after the |
1147 | // GC is completely done, because the callbacks may invoke arbitrary |
1148 | // API functions. |
1149 | DCHECK_EQ(Heap::NOT_IN_GC, isolate_->heap()->gc_state()); |
1150 | const unsigned post_processing_count = ++post_gc_processing_count_; |
1151 | size_t freed_nodes = 0; |
1152 | bool synchronous_second_pass = |
1153 | isolate_->heap()->IsTearingDown() || |
1154 | (gc_callback_flags & |
1155 | (kGCCallbackFlagForced | kGCCallbackFlagCollectAllAvailableGarbage | |
1156 | kGCCallbackFlagSynchronousPhantomCallbackProcessing)) != 0; |
1157 | InvokeOrScheduleSecondPassPhantomCallbacks(synchronous_second_pass); |
1158 | if (InRecursiveGC(post_processing_count)) return freed_nodes; |
1159 | |
1160 | freed_nodes += Heap::IsYoungGenerationCollector(collector) |
1161 | ? PostScavengeProcessing(post_processing_count) |
1162 | : PostMarkSweepProcessing(post_processing_count); |
1163 | if (InRecursiveGC(post_processing_count)) return freed_nodes; |
1164 | |
1165 | UpdateListOfYoungNodes(); |
1166 | return freed_nodes; |
1167 | } |
1168 | |
1169 | void GlobalHandles::IterateStrongRoots(RootVisitor* v) { |
1170 | for (Node* node : *regular_nodes_) { |
1171 | if (node->IsStrongRetainer()) { |
1172 | v->VisitRootPointer(Root::kGlobalHandles, node->label(), |
1173 | node->location()); |
1174 | } |
1175 | } |
1176 | } |
1177 | |
1178 | void GlobalHandles::IterateWeakRoots(RootVisitor* v) { |
1179 | for (Node* node : *regular_nodes_) { |
1180 | if (node->IsWeak()) { |
1181 | v->VisitRootPointer(Root::kGlobalHandles, node->label(), |
1182 | node->location()); |
1183 | } |
1184 | } |
1185 | for (TracedNode* node : *traced_nodes_) { |
1186 | if (node->IsInUse()) { |
1187 | v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); |
1188 | } |
1189 | } |
1190 | } |
1191 | |
1192 | DISABLE_CFI_PERF |
1193 | void GlobalHandles::IterateAllRoots(RootVisitor* v) { |
1194 | for (Node* node : *regular_nodes_) { |
1195 | if (node->IsRetainer()) { |
1196 | v->VisitRootPointer(Root::kGlobalHandles, node->label(), |
1197 | node->location()); |
1198 | } |
1199 | } |
1200 | for (TracedNode* node : *traced_nodes_) { |
1201 | if (node->IsRetainer()) { |
1202 | v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); |
1203 | } |
1204 | } |
1205 | } |
1206 | |
1207 | DISABLE_CFI_PERF |
1208 | void GlobalHandles::IterateAllYoungRoots(RootVisitor* v) { |
1209 | for (Node* node : young_nodes_) { |
1210 | if (node->IsRetainer()) { |
1211 | v->VisitRootPointer(Root::kGlobalHandles, node->label(), |
1212 | node->location()); |
1213 | } |
1214 | } |
1215 | for (TracedNode* node : traced_young_nodes_) { |
1216 | if (node->IsRetainer()) { |
1217 | v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); |
1218 | } |
1219 | } |
1220 | } |
1221 | |
1222 | DISABLE_CFI_PERF |
1223 | void GlobalHandles::ApplyPersistentHandleVisitor( |
1224 | v8::PersistentHandleVisitor* visitor, GlobalHandles::Node* node) { |
1225 | v8::Value* value = ToApi<v8::Value>(node->handle()); |
1226 | visitor->VisitPersistentHandle( |
1227 | reinterpret_cast<v8::Persistent<v8::Value>*>(&value), |
1228 | node->wrapper_class_id()); |
1229 | } |
1230 | |
1231 | DISABLE_CFI_PERF |
1232 | void GlobalHandles::IterateAllRootsWithClassIds( |
1233 | v8::PersistentHandleVisitor* visitor) { |
1234 | for (Node* node : *regular_nodes_) { |
1235 | if (node->IsRetainer() && node->has_wrapper_class_id()) { |
1236 | ApplyPersistentHandleVisitor(visitor, node); |
1237 | } |
1238 | } |
1239 | } |
1240 | |
1241 | DISABLE_CFI_PERF |
1242 | void GlobalHandles::IterateTracedNodes( |
1243 | v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor) { |
1244 | for (TracedNode* node : *traced_nodes_) { |
1245 | if (node->IsInUse()) { |
1246 | v8::Value* value = ToApi<v8::Value>(node->handle()); |
1247 | visitor->VisitTracedGlobalHandle( |
1248 | *reinterpret_cast<v8::TracedGlobal<v8::Value>*>(&value)); |
1249 | } |
1250 | } |
1251 | } |
1252 | |
1253 | DISABLE_CFI_PERF |
1254 | void GlobalHandles::IterateAllYoungRootsWithClassIds( |
1255 | v8::PersistentHandleVisitor* visitor) { |
1256 | for (Node* node : young_nodes_) { |
1257 | if (node->IsRetainer() && node->has_wrapper_class_id()) { |
1258 | ApplyPersistentHandleVisitor(visitor, node); |
1259 | } |
1260 | } |
1261 | } |
1262 | |
1263 | DISABLE_CFI_PERF |
1264 | void GlobalHandles::IterateYoungWeakRootsWithClassIds( |
1265 | v8::PersistentHandleVisitor* visitor) { |
1266 | for (Node* node : young_nodes_) { |
1267 | if (node->has_wrapper_class_id() && node->IsWeak()) { |
1268 | ApplyPersistentHandleVisitor(visitor, node); |
1269 | } |
1270 | } |
1271 | } |
1272 | |
1273 | void GlobalHandles::RecordStats(HeapStats* stats) { |
1274 | *stats->global_handle_count = 0; |
1275 | *stats->weak_global_handle_count = 0; |
1276 | *stats->pending_global_handle_count = 0; |
1277 | *stats->near_death_global_handle_count = 0; |
1278 | *stats->free_global_handle_count = 0; |
1279 | for (Node* node : *regular_nodes_) { |
1280 | *stats->global_handle_count += 1; |
1281 | if (node->state() == Node::WEAK) { |
1282 | *stats->weak_global_handle_count += 1; |
1283 | } else if (node->state() == Node::PENDING) { |
1284 | *stats->pending_global_handle_count += 1; |
1285 | } else if (node->state() == Node::NEAR_DEATH) { |
1286 | *stats->near_death_global_handle_count += 1; |
1287 | } else if (node->state() == Node::FREE) { |
1288 | *stats->free_global_handle_count += 1; |
1289 | } |
1290 | } |
1291 | } |
1292 | |
1293 | #ifdef DEBUG |
1294 | |
1295 | void GlobalHandles::PrintStats() { |
1296 | int total = 0; |
1297 | int weak = 0; |
1298 | int pending = 0; |
1299 | int near_death = 0; |
1300 | int destroyed = 0; |
1301 | |
1302 | for (Node* node : *regular_nodes_) { |
1303 | total++; |
1304 | if (node->state() == Node::WEAK) weak++; |
1305 | if (node->state() == Node::PENDING) pending++; |
1306 | if (node->state() == Node::NEAR_DEATH) near_death++; |
1307 | if (node->state() == Node::FREE) destroyed++; |
1308 | } |
1309 | |
1310 | PrintF("Global Handle Statistics:\n" ); |
1311 | PrintF(" allocated memory = %" PRIuS "B\n" , total * sizeof(Node)); |
1312 | PrintF(" # weak = %d\n" , weak); |
1313 | PrintF(" # pending = %d\n" , pending); |
1314 | PrintF(" # near_death = %d\n" , near_death); |
1315 | PrintF(" # free = %d\n" , destroyed); |
1316 | PrintF(" # total = %d\n" , total); |
1317 | } |
1318 | |
1319 | |
1320 | void GlobalHandles::Print() { |
1321 | PrintF("Global handles:\n" ); |
1322 | for (Node* node : *regular_nodes_) { |
1323 | PrintF(" handle %p to %p%s\n" , node->location().ToVoidPtr(), |
1324 | reinterpret_cast<void*>(node->object()->ptr()), |
1325 | node->IsWeak() ? " (weak)" : "" ); |
1326 | } |
1327 | } |
1328 | |
1329 | #endif |
1330 | |
1331 | EternalHandles::~EternalHandles() { |
1332 | for (Address* block : blocks_) delete[] block; |
1333 | } |
1334 | |
1335 | void EternalHandles::IterateAllRoots(RootVisitor* visitor) { |
1336 | int limit = size_; |
1337 | for (Address* block : blocks_) { |
1338 | DCHECK_GT(limit, 0); |
1339 | visitor->VisitRootPointers(Root::kEternalHandles, nullptr, |
1340 | FullObjectSlot(block), |
1341 | FullObjectSlot(block + Min(limit, kSize))); |
1342 | limit -= kSize; |
1343 | } |
1344 | } |
1345 | |
1346 | void EternalHandles::IterateYoungRoots(RootVisitor* visitor) { |
1347 | for (int index : young_node_indices_) { |
1348 | visitor->VisitRootPointer(Root::kEternalHandles, nullptr, |
1349 | FullObjectSlot(GetLocation(index))); |
1350 | } |
1351 | } |
1352 | |
1353 | void EternalHandles::PostGarbageCollectionProcessing() { |
1354 | size_t last = 0; |
1355 | for (int index : young_node_indices_) { |
1356 | if (ObjectInYoungGeneration(Object(*GetLocation(index)))) { |
1357 | young_node_indices_[last++] = index; |
1358 | } |
1359 | } |
1360 | DCHECK_LE(last, young_node_indices_.size()); |
1361 | young_node_indices_.resize(last); |
1362 | } |
1363 | |
1364 | void EternalHandles::Create(Isolate* isolate, Object object, int* index) { |
1365 | DCHECK_EQ(kInvalidIndex, *index); |
1366 | if (object == Object()) return; |
1367 | Object the_hole = ReadOnlyRoots(isolate).the_hole_value(); |
1368 | DCHECK_NE(the_hole, object); |
1369 | int block = size_ >> kShift; |
1370 | int offset = size_ & kMask; |
1371 | // Need to resize. |
1372 | if (offset == 0) { |
1373 | Address* next_block = new Address[kSize]; |
1374 | MemsetPointer(FullObjectSlot(next_block), the_hole, kSize); |
1375 | blocks_.push_back(next_block); |
1376 | } |
1377 | DCHECK_EQ(the_hole->ptr(), blocks_[block][offset]); |
1378 | blocks_[block][offset] = object->ptr(); |
1379 | if (ObjectInYoungGeneration(object)) { |
1380 | young_node_indices_.push_back(size_); |
1381 | } |
1382 | *index = size_++; |
1383 | } |
1384 | |
1385 | } // namespace internal |
1386 | } // namespace v8 |
1387 | |