1 | // Copyright 2011 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #include "src/heap/objects-visiting.h" |
6 | |
7 | #include "src/heap/heap-inl.h" |
8 | #include "src/heap/mark-compact-inl.h" |
9 | #include "src/heap/objects-visiting-inl.h" |
10 | |
11 | namespace v8 { |
12 | namespace internal { |
13 | |
14 | // We don't record weak slots during marking or scavenges. Instead we do it |
15 | // once when we complete mark-compact cycle. Note that write barrier has no |
16 | // effect if we are already in the middle of compacting mark-sweep cycle and we |
17 | // have to record slots manually. |
18 | static bool MustRecordSlots(Heap* heap) { |
19 | return heap->gc_state() == Heap::MARK_COMPACT && |
20 | heap->mark_compact_collector()->is_compacting(); |
21 | } |
22 | |
23 | |
24 | template <class T> |
25 | struct WeakListVisitor; |
26 | |
27 | template <class T> |
28 | Object VisitWeakList(Heap* heap, Object list, WeakObjectRetainer* retainer) { |
29 | Object undefined = ReadOnlyRoots(heap).undefined_value(); |
30 | Object head = undefined; |
31 | T tail; |
32 | bool record_slots = MustRecordSlots(heap); |
33 | |
34 | while (list != undefined) { |
35 | // Check whether to keep the candidate in the list. |
36 | T candidate = T::cast(list); |
37 | |
38 | Object retained = retainer->RetainAs(list); |
39 | |
40 | // Move to the next element before the WeakNext is cleared. |
41 | list = WeakListVisitor<T>::WeakNext(candidate); |
42 | |
43 | if (retained != Object()) { |
44 | if (head == undefined) { |
45 | // First element in the list. |
46 | head = retained; |
47 | } else { |
48 | // Subsequent elements in the list. |
49 | DCHECK(!tail.is_null()); |
50 | WeakListVisitor<T>::SetWeakNext(tail, retained); |
51 | if (record_slots) { |
52 | HeapObject slot_holder = WeakListVisitor<T>::WeakNextHolder(tail); |
53 | int slot_offset = WeakListVisitor<T>::WeakNextOffset(); |
54 | ObjectSlot slot = slot_holder.RawField(slot_offset); |
55 | MarkCompactCollector::RecordSlot(slot_holder, slot, |
56 | HeapObject::cast(retained)); |
57 | } |
58 | } |
59 | // Retained object is new tail. |
60 | DCHECK(!retained->IsUndefined(heap->isolate())); |
61 | candidate = T::cast(retained); |
62 | tail = candidate; |
63 | |
64 | // tail is a live object, visit it. |
65 | WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer); |
66 | |
67 | } else { |
68 | WeakListVisitor<T>::VisitPhantomObject(heap, candidate); |
69 | } |
70 | } |
71 | |
72 | // Terminate the list if there is one or more elements. |
73 | if (!tail.is_null()) WeakListVisitor<T>::SetWeakNext(tail, undefined); |
74 | return head; |
75 | } |
76 | |
77 | template <class T> |
78 | static void ClearWeakList(Heap* heap, Object list) { |
79 | Object undefined = ReadOnlyRoots(heap).undefined_value(); |
80 | while (list != undefined) { |
81 | T candidate = T::cast(list); |
82 | list = WeakListVisitor<T>::WeakNext(candidate); |
83 | WeakListVisitor<T>::SetWeakNext(candidate, undefined); |
84 | } |
85 | } |
86 | |
87 | template <> |
88 | struct WeakListVisitor<Code> { |
89 | static void SetWeakNext(Code code, Object next) { |
90 | code->code_data_container()->set_next_code_link(next, |
91 | UPDATE_WEAK_WRITE_BARRIER); |
92 | } |
93 | |
94 | static Object WeakNext(Code code) { |
95 | return code->code_data_container()->next_code_link(); |
96 | } |
97 | |
98 | static HeapObject WeakNextHolder(Code code) { |
99 | return code->code_data_container(); |
100 | } |
101 | |
102 | static int WeakNextOffset() { return CodeDataContainer::kNextCodeLinkOffset; } |
103 | |
104 | static void VisitLiveObject(Heap*, Code, WeakObjectRetainer*) {} |
105 | |
106 | static void VisitPhantomObject(Heap* heap, Code code) { |
107 | // Even though the code is dying, its code_data_container can still be |
108 | // alive. Clear the next_code_link slot to avoid a dangling pointer. |
109 | SetWeakNext(code, ReadOnlyRoots(heap).undefined_value()); |
110 | } |
111 | }; |
112 | |
113 | |
114 | template <> |
115 | struct WeakListVisitor<Context> { |
116 | static void SetWeakNext(Context context, Object next) { |
117 | context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER); |
118 | } |
119 | |
120 | static Object WeakNext(Context context) { |
121 | return context->next_context_link(); |
122 | } |
123 | |
124 | static HeapObject WeakNextHolder(Context context) { return context; } |
125 | |
126 | static int WeakNextOffset() { |
127 | return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); |
128 | } |
129 | |
130 | static void VisitLiveObject(Heap* heap, Context context, |
131 | WeakObjectRetainer* retainer) { |
132 | if (heap->gc_state() == Heap::MARK_COMPACT) { |
133 | // Record the slots of the weak entries in the native context. |
134 | for (int idx = Context::FIRST_WEAK_SLOT; |
135 | idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) { |
136 | ObjectSlot slot = context->RawField(Context::OffsetOfElementAt(idx)); |
137 | MarkCompactCollector::RecordSlot(context, slot, |
138 | HeapObject::cast(*slot)); |
139 | } |
140 | // Code objects are always allocated in Code space, we do not have to |
141 | // visit them during scavenges. |
142 | DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST); |
143 | DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST); |
144 | } |
145 | } |
146 | |
147 | template <class T> |
148 | static void DoWeakList(Heap* heap, Context context, |
149 | WeakObjectRetainer* retainer, int index) { |
150 | // Visit the weak list, removing dead intermediate elements. |
151 | Object list_head = VisitWeakList<T>(heap, context->get(index), retainer); |
152 | |
153 | // Update the list head. |
154 | context->set(index, list_head, UPDATE_WRITE_BARRIER); |
155 | |
156 | if (MustRecordSlots(heap)) { |
157 | // Record the updated slot if necessary. |
158 | ObjectSlot head_slot = context->RawField(FixedArray::SizeFor(index)); |
159 | heap->mark_compact_collector()->RecordSlot(context, head_slot, |
160 | HeapObject::cast(list_head)); |
161 | } |
162 | } |
163 | |
164 | static void VisitPhantomObject(Heap* heap, Context context) { |
165 | ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST)); |
166 | ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST)); |
167 | } |
168 | }; |
169 | |
170 | |
171 | template <> |
172 | struct WeakListVisitor<AllocationSite> { |
173 | static void SetWeakNext(AllocationSite obj, Object next) { |
174 | obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER); |
175 | } |
176 | |
177 | static Object WeakNext(AllocationSite obj) { return obj->weak_next(); } |
178 | |
179 | static HeapObject WeakNextHolder(AllocationSite obj) { return obj; } |
180 | |
181 | static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; } |
182 | |
183 | static void VisitLiveObject(Heap*, AllocationSite, WeakObjectRetainer*) {} |
184 | |
185 | static void VisitPhantomObject(Heap*, AllocationSite) {} |
186 | }; |
187 | |
188 | template Object VisitWeakList<Context>(Heap* heap, Object list, |
189 | WeakObjectRetainer* retainer); |
190 | |
191 | template Object VisitWeakList<AllocationSite>(Heap* heap, Object list, |
192 | WeakObjectRetainer* retainer); |
193 | } // namespace internal |
194 | } // namespace v8 |
195 | |