1 | // Copyright 2013 the V8 project authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #include "src/profiler/heap-snapshot-generator.h" |
6 | |
7 | #include <utility> |
8 | |
9 | #include "src/api-inl.h" |
10 | #include "src/assembler-inl.h" |
11 | #include "src/conversions.h" |
12 | #include "src/debug/debug.h" |
13 | #include "src/global-handles.h" |
14 | #include "src/layout-descriptor.h" |
15 | #include "src/objects-body-descriptors.h" |
16 | #include "src/objects-inl.h" |
17 | #include "src/objects/allocation-site-inl.h" |
18 | #include "src/objects/api-callbacks.h" |
19 | #include "src/objects/cell-inl.h" |
20 | #include "src/objects/feedback-cell-inl.h" |
21 | #include "src/objects/hash-table-inl.h" |
22 | #include "src/objects/js-array-buffer-inl.h" |
23 | #include "src/objects/js-array-inl.h" |
24 | #include "src/objects/js-collection-inl.h" |
25 | #include "src/objects/js-generator-inl.h" |
26 | #include "src/objects/js-promise-inl.h" |
27 | #include "src/objects/js-regexp-inl.h" |
28 | #include "src/objects/literal-objects-inl.h" |
29 | #include "src/objects/slots-inl.h" |
30 | #include "src/objects/struct-inl.h" |
31 | #include "src/profiler/allocation-tracker.h" |
32 | #include "src/profiler/heap-profiler.h" |
33 | #include "src/profiler/heap-snapshot-generator-inl.h" |
34 | #include "src/prototype.h" |
35 | #include "src/transitions-inl.h" |
36 | #include "src/vector.h" |
37 | #include "src/visitors.h" |
38 | |
39 | namespace v8 { |
40 | namespace internal { |
41 | |
42 | HeapGraphEdge::HeapGraphEdge(Type type, const char* name, HeapEntry* from, |
43 | HeapEntry* to) |
44 | : bit_field_(TypeField::encode(type) | |
45 | FromIndexField::encode(from->index())), |
46 | to_entry_(to), |
47 | name_(name) { |
48 | DCHECK(type == kContextVariable |
49 | || type == kProperty |
50 | || type == kInternal |
51 | || type == kShortcut |
52 | || type == kWeak); |
53 | } |
54 | |
55 | HeapGraphEdge::HeapGraphEdge(Type type, int index, HeapEntry* from, |
56 | HeapEntry* to) |
57 | : bit_field_(TypeField::encode(type) | |
58 | FromIndexField::encode(from->index())), |
59 | to_entry_(to), |
60 | index_(index) { |
61 | DCHECK(type == kElement || type == kHidden); |
62 | } |
63 | |
64 | HeapEntry::HeapEntry(HeapSnapshot* snapshot, int index, Type type, |
65 | const char* name, SnapshotObjectId id, size_t self_size, |
66 | unsigned trace_node_id) |
67 | : type_(type), |
68 | index_(index), |
69 | children_count_(0), |
70 | self_size_(self_size), |
71 | snapshot_(snapshot), |
72 | name_(name), |
73 | id_(id), |
74 | trace_node_id_(trace_node_id) { |
75 | DCHECK_GE(index, 0); |
76 | } |
77 | |
78 | void HeapEntry::SetNamedReference(HeapGraphEdge::Type type, |
79 | const char* name, |
80 | HeapEntry* entry) { |
81 | ++children_count_; |
82 | snapshot_->edges().emplace_back(type, name, this, entry); |
83 | } |
84 | |
85 | void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type, |
86 | int index, |
87 | HeapEntry* entry) { |
88 | ++children_count_; |
89 | snapshot_->edges().emplace_back(type, index, this, entry); |
90 | } |
91 | |
92 | void HeapEntry::SetNamedAutoIndexReference(HeapGraphEdge::Type type, |
93 | const char* description, |
94 | HeapEntry* child, |
95 | StringsStorage* names) { |
96 | int index = children_count_ + 1; |
97 | const char* name = description |
98 | ? names->GetFormatted("%d / %s" , index, description) |
99 | : names->GetName(index); |
100 | SetNamedReference(type, name, child); |
101 | } |
102 | |
103 | void HeapEntry::Print( |
104 | const char* prefix, const char* edge_name, int max_depth, int indent) { |
105 | STATIC_ASSERT(sizeof(unsigned) == sizeof(id())); |
106 | base::OS::Print("%6" PRIuS " @%6u %*c %s%s: " , self_size(), id(), indent, ' ', |
107 | prefix, edge_name); |
108 | if (type() != kString) { |
109 | base::OS::Print("%s %.40s\n" , TypeAsString(), name_); |
110 | } else { |
111 | base::OS::Print("\"" ); |
112 | const char* c = name_; |
113 | while (*c && (c - name_) <= 40) { |
114 | if (*c != '\n') |
115 | base::OS::Print("%c" , *c); |
116 | else |
117 | base::OS::Print("\\n" ); |
118 | ++c; |
119 | } |
120 | base::OS::Print("\"\n" ); |
121 | } |
122 | if (--max_depth == 0) return; |
123 | for (auto i = children_begin(); i != children_end(); ++i) { |
124 | HeapGraphEdge& edge = **i; |
125 | const char* edge_prefix = "" ; |
126 | EmbeddedVector<char, 64> index; |
127 | const char* edge_name = index.start(); |
128 | switch (edge.type()) { |
129 | case HeapGraphEdge::kContextVariable: |
130 | edge_prefix = "#" ; |
131 | edge_name = edge.name(); |
132 | break; |
133 | case HeapGraphEdge::kElement: |
134 | SNPrintF(index, "%d" , edge.index()); |
135 | break; |
136 | case HeapGraphEdge::kInternal: |
137 | edge_prefix = "$" ; |
138 | edge_name = edge.name(); |
139 | break; |
140 | case HeapGraphEdge::kProperty: |
141 | edge_name = edge.name(); |
142 | break; |
143 | case HeapGraphEdge::kHidden: |
144 | edge_prefix = "$" ; |
145 | SNPrintF(index, "%d" , edge.index()); |
146 | break; |
147 | case HeapGraphEdge::kShortcut: |
148 | edge_prefix = "^" ; |
149 | edge_name = edge.name(); |
150 | break; |
151 | case HeapGraphEdge::kWeak: |
152 | edge_prefix = "w" ; |
153 | edge_name = edge.name(); |
154 | break; |
155 | default: |
156 | SNPrintF(index, "!!! unknown edge type: %d " , edge.type()); |
157 | } |
158 | edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2); |
159 | } |
160 | } |
161 | |
162 | const char* HeapEntry::TypeAsString() { |
163 | switch (type()) { |
164 | case kHidden: return "/hidden/" ; |
165 | case kObject: return "/object/" ; |
166 | case kClosure: return "/closure/" ; |
167 | case kString: return "/string/" ; |
168 | case kCode: return "/code/" ; |
169 | case kArray: return "/array/" ; |
170 | case kRegExp: return "/regexp/" ; |
171 | case kHeapNumber: return "/number/" ; |
172 | case kNative: return "/native/" ; |
173 | case kSynthetic: return "/synthetic/" ; |
174 | case kConsString: return "/concatenated string/" ; |
175 | case kSlicedString: return "/sliced string/" ; |
176 | case kSymbol: return "/symbol/" ; |
177 | case kBigInt: |
178 | return "/bigint/" ; |
179 | default: return "???" ; |
180 | } |
181 | } |
182 | |
183 | HeapSnapshot::HeapSnapshot(HeapProfiler* profiler) : profiler_(profiler) { |
184 | // It is very important to keep objects that form a heap snapshot |
185 | // as small as possible. Check assumptions about data structure sizes. |
186 | STATIC_ASSERT((kSystemPointerSize == 4 && sizeof(HeapGraphEdge) == 12) || |
187 | (kSystemPointerSize == 8 && sizeof(HeapGraphEdge) == 24)); |
188 | STATIC_ASSERT((kSystemPointerSize == 4 && sizeof(HeapEntry) == 28) || |
189 | (kSystemPointerSize == 8 && sizeof(HeapEntry) == 40)); |
190 | memset(&gc_subroot_entries_, 0, sizeof(gc_subroot_entries_)); |
191 | } |
192 | |
193 | void HeapSnapshot::Delete() { |
194 | profiler_->RemoveSnapshot(this); |
195 | } |
196 | |
197 | void HeapSnapshot::RememberLastJSObjectId() { |
198 | max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id(); |
199 | } |
200 | |
201 | void HeapSnapshot::AddSyntheticRootEntries() { |
202 | AddRootEntry(); |
203 | AddGcRootsEntry(); |
204 | SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId; |
205 | for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) { |
206 | AddGcSubrootEntry(static_cast<Root>(root), id); |
207 | id += HeapObjectsMap::kObjectIdStep; |
208 | } |
209 | DCHECK_EQ(HeapObjectsMap::kFirstAvailableObjectId, id); |
210 | } |
211 | |
212 | void HeapSnapshot::AddRootEntry() { |
213 | DCHECK_NULL(root_entry_); |
214 | DCHECK(entries_.empty()); // Root entry must be the first one. |
215 | root_entry_ = AddEntry(HeapEntry::kSynthetic, "" , |
216 | HeapObjectsMap::kInternalRootObjectId, 0, 0); |
217 | DCHECK_EQ(1u, entries_.size()); |
218 | DCHECK_EQ(root_entry_, &entries_.front()); |
219 | } |
220 | |
221 | void HeapSnapshot::AddGcRootsEntry() { |
222 | DCHECK_NULL(gc_roots_entry_); |
223 | gc_roots_entry_ = AddEntry(HeapEntry::kSynthetic, "(GC roots)" , |
224 | HeapObjectsMap::kGcRootsObjectId, 0, 0); |
225 | } |
226 | |
227 | void HeapSnapshot::AddGcSubrootEntry(Root root, SnapshotObjectId id) { |
228 | DCHECK_NULL(gc_subroot_entries_[static_cast<int>(root)]); |
229 | gc_subroot_entries_[static_cast<int>(root)] = |
230 | AddEntry(HeapEntry::kSynthetic, RootVisitor::RootName(root), id, 0, 0); |
231 | } |
232 | |
233 | void HeapSnapshot::AddLocation(HeapEntry* entry, int scriptId, int line, |
234 | int col) { |
235 | locations_.emplace_back(entry->index(), scriptId, line, col); |
236 | } |
237 | |
238 | HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type, |
239 | const char* name, |
240 | SnapshotObjectId id, |
241 | size_t size, |
242 | unsigned trace_node_id) { |
243 | DCHECK(!is_complete()); |
244 | entries_.emplace_back(this, static_cast<int>(entries_.size()), type, name, id, |
245 | size, trace_node_id); |
246 | return &entries_.back(); |
247 | } |
248 | |
249 | void HeapSnapshot::FillChildren() { |
250 | DCHECK(children().empty()); |
251 | int children_index = 0; |
252 | for (HeapEntry& entry : entries()) { |
253 | children_index = entry.set_children_index(children_index); |
254 | } |
255 | DCHECK_EQ(edges().size(), static_cast<size_t>(children_index)); |
256 | children().resize(edges().size()); |
257 | for (HeapGraphEdge& edge : edges()) { |
258 | edge.from()->add_child(&edge); |
259 | } |
260 | } |
261 | |
262 | HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) { |
263 | if (entries_by_id_cache_.empty()) { |
264 | CHECK(is_complete()); |
265 | entries_by_id_cache_.reserve(entries_.size()); |
266 | for (HeapEntry& entry : entries_) { |
267 | entries_by_id_cache_.emplace(entry.id(), &entry); |
268 | } |
269 | } |
270 | auto it = entries_by_id_cache_.find(id); |
271 | return it != entries_by_id_cache_.end() ? it->second : nullptr; |
272 | } |
273 | |
274 | void HeapSnapshot::Print(int max_depth) { |
275 | root()->Print("" , "" , max_depth, 0); |
276 | } |
277 | |
278 | // We split IDs on evens for embedder objects (see |
279 | // HeapObjectsMap::GenerateId) and odds for native objects. |
280 | const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1; |
281 | const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId = |
282 | HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep; |
283 | const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId = |
284 | HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep; |
285 | const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId = |
286 | HeapObjectsMap::kGcRootsFirstSubrootId + |
287 | static_cast<int>(Root::kNumberOfRoots) * HeapObjectsMap::kObjectIdStep; |
288 | |
289 | HeapObjectsMap::HeapObjectsMap(Heap* heap) |
290 | : next_id_(kFirstAvailableObjectId), heap_(heap) { |
291 | // The dummy element at zero index is needed as entries_map_ cannot hold |
292 | // an entry with zero value. Otherwise it's impossible to tell if |
293 | // LookupOrInsert has added a new item or just returning exisiting one |
294 | // having the value of zero. |
295 | entries_.emplace_back(0, kNullAddress, 0, true); |
296 | } |
297 | |
298 | bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) { |
299 | DCHECK_NE(kNullAddress, to); |
300 | DCHECK_NE(kNullAddress, from); |
301 | if (from == to) return false; |
302 | void* from_value = entries_map_.Remove(reinterpret_cast<void*>(from), |
303 | ComputeAddressHash(from)); |
304 | if (from_value == nullptr) { |
305 | // It may occur that some untracked object moves to an address X and there |
306 | // is a tracked object at that address. In this case we should remove the |
307 | // entry as we know that the object has died. |
308 | void* to_value = entries_map_.Remove(reinterpret_cast<void*>(to), |
309 | ComputeAddressHash(to)); |
310 | if (to_value != nullptr) { |
311 | int to_entry_info_index = |
312 | static_cast<int>(reinterpret_cast<intptr_t>(to_value)); |
313 | entries_.at(to_entry_info_index).addr = kNullAddress; |
314 | } |
315 | } else { |
316 | base::HashMap::Entry* to_entry = entries_map_.LookupOrInsert( |
317 | reinterpret_cast<void*>(to), ComputeAddressHash(to)); |
318 | if (to_entry->value != nullptr) { |
319 | // We found the existing entry with to address for an old object. |
320 | // Without this operation we will have two EntryInfo's with the same |
321 | // value in addr field. It is bad because later at RemoveDeadEntries |
322 | // one of this entry will be removed with the corresponding entries_map_ |
323 | // entry. |
324 | int to_entry_info_index = |
325 | static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value)); |
326 | entries_.at(to_entry_info_index).addr = kNullAddress; |
327 | } |
328 | int from_entry_info_index = |
329 | static_cast<int>(reinterpret_cast<intptr_t>(from_value)); |
330 | entries_.at(from_entry_info_index).addr = to; |
331 | // Size of an object can change during its life, so to keep information |
332 | // about the object in entries_ consistent, we have to adjust size when the |
333 | // object is migrated. |
334 | if (FLAG_heap_profiler_trace_objects) { |
335 | PrintF("Move object from %p to %p old size %6d new size %6d\n" , |
336 | reinterpret_cast<void*>(from), reinterpret_cast<void*>(to), |
337 | entries_.at(from_entry_info_index).size, object_size); |
338 | } |
339 | entries_.at(from_entry_info_index).size = object_size; |
340 | to_entry->value = from_value; |
341 | } |
342 | return from_value != nullptr; |
343 | } |
344 | |
345 | |
346 | void HeapObjectsMap::UpdateObjectSize(Address addr, int size) { |
347 | FindOrAddEntry(addr, size, false); |
348 | } |
349 | |
350 | |
351 | SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) { |
352 | base::HashMap::Entry* entry = entries_map_.Lookup( |
353 | reinterpret_cast<void*>(addr), ComputeAddressHash(addr)); |
354 | if (entry == nullptr) return 0; |
355 | int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); |
356 | EntryInfo& entry_info = entries_.at(entry_index); |
357 | DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy()); |
358 | return entry_info.id; |
359 | } |
360 | |
361 | |
362 | SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, |
363 | unsigned int size, |
364 | bool accessed) { |
365 | DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy()); |
366 | base::HashMap::Entry* entry = entries_map_.LookupOrInsert( |
367 | reinterpret_cast<void*>(addr), ComputeAddressHash(addr)); |
368 | if (entry->value != nullptr) { |
369 | int entry_index = |
370 | static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); |
371 | EntryInfo& entry_info = entries_.at(entry_index); |
372 | entry_info.accessed = accessed; |
373 | if (FLAG_heap_profiler_trace_objects) { |
374 | PrintF("Update object size : %p with old size %d and new size %d\n" , |
375 | reinterpret_cast<void*>(addr), entry_info.size, size); |
376 | } |
377 | entry_info.size = size; |
378 | return entry_info.id; |
379 | } |
380 | entry->value = reinterpret_cast<void*>(entries_.size()); |
381 | SnapshotObjectId id = next_id_; |
382 | next_id_ += kObjectIdStep; |
383 | entries_.push_back(EntryInfo(id, addr, size, accessed)); |
384 | DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy()); |
385 | return id; |
386 | } |
387 | |
388 | void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); } |
389 | |
390 | void HeapObjectsMap::UpdateHeapObjectsMap() { |
391 | if (FLAG_heap_profiler_trace_objects) { |
392 | PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n" , |
393 | entries_map_.occupancy()); |
394 | } |
395 | heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags, |
396 | GarbageCollectionReason::kHeapProfiler); |
397 | HeapIterator iterator(heap_); |
398 | for (HeapObject obj = iterator.next(); !obj.is_null(); |
399 | obj = iterator.next()) { |
400 | FindOrAddEntry(obj->address(), obj->Size()); |
401 | if (FLAG_heap_profiler_trace_objects) { |
402 | PrintF("Update object : %p %6d. Next address is %p\n" , |
403 | reinterpret_cast<void*>(obj->address()), obj->Size(), |
404 | reinterpret_cast<void*>(obj->address() + obj->Size())); |
405 | } |
406 | } |
407 | RemoveDeadEntries(); |
408 | if (FLAG_heap_profiler_trace_objects) { |
409 | PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n" , |
410 | entries_map_.occupancy()); |
411 | } |
412 | } |
413 | |
414 | SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream, |
415 | int64_t* timestamp_us) { |
416 | UpdateHeapObjectsMap(); |
417 | time_intervals_.emplace_back(next_id_); |
418 | int prefered_chunk_size = stream->GetChunkSize(); |
419 | std::vector<v8::HeapStatsUpdate> stats_buffer; |
420 | DCHECK(!entries_.empty()); |
421 | EntryInfo* entry_info = &entries_.front(); |
422 | EntryInfo* end_entry_info = &entries_.back() + 1; |
423 | for (size_t time_interval_index = 0; |
424 | time_interval_index < time_intervals_.size(); ++time_interval_index) { |
425 | TimeInterval& time_interval = time_intervals_[time_interval_index]; |
426 | SnapshotObjectId time_interval_id = time_interval.id; |
427 | uint32_t entries_size = 0; |
428 | EntryInfo* start_entry_info = entry_info; |
429 | while (entry_info < end_entry_info && entry_info->id < time_interval_id) { |
430 | entries_size += entry_info->size; |
431 | ++entry_info; |
432 | } |
433 | uint32_t entries_count = |
434 | static_cast<uint32_t>(entry_info - start_entry_info); |
435 | if (time_interval.count != entries_count || |
436 | time_interval.size != entries_size) { |
437 | stats_buffer.emplace_back(static_cast<uint32_t>(time_interval_index), |
438 | time_interval.count = entries_count, |
439 | time_interval.size = entries_size); |
440 | if (static_cast<int>(stats_buffer.size()) >= prefered_chunk_size) { |
441 | OutputStream::WriteResult result = stream->WriteHeapStatsChunk( |
442 | &stats_buffer.front(), static_cast<int>(stats_buffer.size())); |
443 | if (result == OutputStream::kAbort) return last_assigned_id(); |
444 | stats_buffer.clear(); |
445 | } |
446 | } |
447 | } |
448 | DCHECK(entry_info == end_entry_info); |
449 | if (!stats_buffer.empty()) { |
450 | OutputStream::WriteResult result = stream->WriteHeapStatsChunk( |
451 | &stats_buffer.front(), static_cast<int>(stats_buffer.size())); |
452 | if (result == OutputStream::kAbort) return last_assigned_id(); |
453 | } |
454 | stream->EndOfStream(); |
455 | if (timestamp_us) { |
456 | *timestamp_us = |
457 | (time_intervals_.back().timestamp - time_intervals_.front().timestamp) |
458 | .InMicroseconds(); |
459 | } |
460 | return last_assigned_id(); |
461 | } |
462 | |
463 | |
464 | void HeapObjectsMap::RemoveDeadEntries() { |
465 | DCHECK(entries_.size() > 0 && entries_.at(0).id == 0 && |
466 | entries_.at(0).addr == kNullAddress); |
467 | size_t first_free_entry = 1; |
468 | for (size_t i = 1; i < entries_.size(); ++i) { |
469 | EntryInfo& entry_info = entries_.at(i); |
470 | if (entry_info.accessed) { |
471 | if (first_free_entry != i) { |
472 | entries_.at(first_free_entry) = entry_info; |
473 | } |
474 | entries_.at(first_free_entry).accessed = false; |
475 | base::HashMap::Entry* entry = |
476 | entries_map_.Lookup(reinterpret_cast<void*>(entry_info.addr), |
477 | ComputeAddressHash(entry_info.addr)); |
478 | DCHECK(entry); |
479 | entry->value = reinterpret_cast<void*>(first_free_entry); |
480 | ++first_free_entry; |
481 | } else { |
482 | if (entry_info.addr) { |
483 | entries_map_.Remove(reinterpret_cast<void*>(entry_info.addr), |
484 | ComputeAddressHash(entry_info.addr)); |
485 | } |
486 | } |
487 | } |
488 | entries_.erase(entries_.begin() + first_free_entry, entries_.end()); |
489 | |
490 | DCHECK(static_cast<uint32_t>(entries_.size()) - 1 == |
491 | entries_map_.occupancy()); |
492 | } |
493 | |
494 | V8HeapExplorer::V8HeapExplorer(HeapSnapshot* snapshot, |
495 | SnapshottingProgressReportingInterface* progress, |
496 | v8::HeapProfiler::ObjectNameResolver* resolver) |
497 | : heap_(snapshot->profiler()->heap_object_map()->heap()), |
498 | snapshot_(snapshot), |
499 | names_(snapshot_->profiler()->names()), |
500 | heap_object_map_(snapshot_->profiler()->heap_object_map()), |
501 | progress_(progress), |
502 | generator_(nullptr), |
503 | global_object_name_resolver_(resolver) {} |
504 | |
505 | HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) { |
506 | return AddEntry(HeapObject::cast(Object(reinterpret_cast<Address>(ptr)))); |
507 | } |
508 | |
509 | void V8HeapExplorer::(HeapEntry* entry, HeapObject object) { |
510 | if (object->IsJSFunction()) { |
511 | JSFunction func = JSFunction::cast(object); |
512 | ExtractLocationForJSFunction(entry, func); |
513 | |
514 | } else if (object->IsJSGeneratorObject()) { |
515 | JSGeneratorObject gen = JSGeneratorObject::cast(object); |
516 | ExtractLocationForJSFunction(entry, gen->function()); |
517 | |
518 | } else if (object->IsJSObject()) { |
519 | JSObject obj = JSObject::cast(object); |
520 | JSFunction maybe_constructor = GetConstructor(obj); |
521 | |
522 | if (!maybe_constructor.is_null()) { |
523 | ExtractLocationForJSFunction(entry, maybe_constructor); |
524 | } |
525 | } |
526 | } |
527 | |
528 | void V8HeapExplorer::(HeapEntry* entry, |
529 | JSFunction func) { |
530 | if (!func->shared()->script()->IsScript()) return; |
531 | Script script = Script::cast(func->shared()->script()); |
532 | int scriptId = script->id(); |
533 | int start = func->shared()->StartPosition(); |
534 | int line = script->GetLineNumber(start); |
535 | int col = script->GetColumnNumber(start); |
536 | snapshot_->AddLocation(entry, scriptId, line, col); |
537 | } |
538 | |
539 | HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) { |
540 | if (object->IsJSFunction()) { |
541 | JSFunction func = JSFunction::cast(object); |
542 | SharedFunctionInfo shared = func->shared(); |
543 | const char* name = names_->GetName(shared->Name()); |
544 | return AddEntry(object, HeapEntry::kClosure, name); |
545 | } else if (object->IsJSBoundFunction()) { |
546 | return AddEntry(object, HeapEntry::kClosure, "native_bind" ); |
547 | } else if (object->IsJSRegExp()) { |
548 | JSRegExp re = JSRegExp::cast(object); |
549 | return AddEntry(object, |
550 | HeapEntry::kRegExp, |
551 | names_->GetName(re->Pattern())); |
552 | } else if (object->IsJSObject()) { |
553 | const char* name = names_->GetName( |
554 | GetConstructorName(JSObject::cast(object))); |
555 | if (object->IsJSGlobalObject()) { |
556 | auto it = objects_tags_.find(JSGlobalObject::cast(object)); |
557 | if (it != objects_tags_.end()) { |
558 | name = names_->GetFormatted("%s / %s" , name, it->second); |
559 | } |
560 | } |
561 | return AddEntry(object, HeapEntry::kObject, name); |
562 | } else if (object->IsString()) { |
563 | String string = String::cast(object); |
564 | if (string->IsConsString()) { |
565 | return AddEntry(object, HeapEntry::kConsString, "(concatenated string)" ); |
566 | } else if (string->IsSlicedString()) { |
567 | return AddEntry(object, HeapEntry::kSlicedString, "(sliced string)" ); |
568 | } else { |
569 | return AddEntry(object, HeapEntry::kString, |
570 | names_->GetName(String::cast(object))); |
571 | } |
572 | } else if (object->IsSymbol()) { |
573 | if (Symbol::cast(object)->is_private()) |
574 | return AddEntry(object, HeapEntry::kHidden, "private symbol" ); |
575 | else |
576 | return AddEntry(object, HeapEntry::kSymbol, "symbol" ); |
577 | } else if (object->IsBigInt()) { |
578 | return AddEntry(object, HeapEntry::kBigInt, "bigint" ); |
579 | } else if (object->IsCode()) { |
580 | return AddEntry(object, HeapEntry::kCode, "" ); |
581 | } else if (object->IsSharedFunctionInfo()) { |
582 | String name = SharedFunctionInfo::cast(object)->Name(); |
583 | return AddEntry(object, HeapEntry::kCode, names_->GetName(name)); |
584 | } else if (object->IsScript()) { |
585 | Object name = Script::cast(object)->name(); |
586 | return AddEntry( |
587 | object, HeapEntry::kCode, |
588 | name->IsString() ? names_->GetName(String::cast(name)) : "" ); |
589 | } else if (object->IsNativeContext()) { |
590 | return AddEntry(object, HeapEntry::kHidden, "system / NativeContext" ); |
591 | } else if (object->IsContext()) { |
592 | return AddEntry(object, HeapEntry::kObject, "system / Context" ); |
593 | } else if (object->IsFixedArray() || object->IsFixedDoubleArray() || |
594 | object->IsByteArray()) { |
595 | return AddEntry(object, HeapEntry::kArray, "" ); |
596 | } else if (object->IsHeapNumber()) { |
597 | return AddEntry(object, HeapEntry::kHeapNumber, "number" ); |
598 | } |
599 | return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object)); |
600 | } |
601 | |
602 | HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type, |
603 | const char* name) { |
604 | return AddEntry(object->address(), type, name, object->Size()); |
605 | } |
606 | |
607 | HeapEntry* V8HeapExplorer::AddEntry(Address address, |
608 | HeapEntry::Type type, |
609 | const char* name, |
610 | size_t size) { |
611 | SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry( |
612 | address, static_cast<unsigned int>(size)); |
613 | unsigned trace_node_id = 0; |
614 | if (AllocationTracker* allocation_tracker = |
615 | snapshot_->profiler()->allocation_tracker()) { |
616 | trace_node_id = |
617 | allocation_tracker->address_to_trace()->GetTraceNodeId(address); |
618 | } |
619 | return snapshot_->AddEntry(type, name, object_id, size, trace_node_id); |
620 | } |
621 | |
622 | const char* V8HeapExplorer::GetSystemEntryName(HeapObject object) { |
623 | switch (object->map()->instance_type()) { |
624 | case MAP_TYPE: |
625 | switch (Map::cast(object)->instance_type()) { |
626 | #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \ |
627 | case instance_type: return "system / Map (" #Name ")"; |
628 | STRING_TYPE_LIST(MAKE_STRING_MAP_CASE) |
629 | #undef MAKE_STRING_MAP_CASE |
630 | default: return "system / Map" ; |
631 | } |
632 | case CELL_TYPE: return "system / Cell" ; |
633 | case PROPERTY_CELL_TYPE: return "system / PropertyCell" ; |
634 | case FOREIGN_TYPE: return "system / Foreign" ; |
635 | case ODDBALL_TYPE: return "system / Oddball" ; |
636 | case ALLOCATION_SITE_TYPE: |
637 | return "system / AllocationSite" ; |
638 | #define MAKE_STRUCT_CASE(TYPE, Name, name) \ |
639 | case TYPE: \ |
640 | return "system / " #Name; |
641 | STRUCT_LIST(MAKE_STRUCT_CASE) |
642 | #undef MAKE_STRUCT_CASE |
643 | default: return "system" ; |
644 | } |
645 | } |
646 | |
647 | int V8HeapExplorer::EstimateObjectsCount() { |
648 | HeapIterator it(heap_, HeapIterator::kFilterUnreachable); |
649 | int objects_count = 0; |
650 | while (!it.next().is_null()) ++objects_count; |
651 | return objects_count; |
652 | } |
653 | |
654 | class : public ObjectVisitor { |
655 | public: |
656 | (V8HeapExplorer* generator, HeapObject parent_obj, |
657 | HeapEntry* parent) |
658 | : generator_(generator), |
659 | parent_obj_(parent_obj), |
660 | parent_start_(parent_obj_.RawMaybeWeakField(0)), |
661 | parent_end_(parent_obj_.RawMaybeWeakField(parent_obj_->Size())), |
662 | parent_(parent), |
663 | next_index_(0) {} |
664 | void (HeapObject host, ObjectSlot start, |
665 | ObjectSlot end) override { |
666 | VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end)); |
667 | } |
668 | void (HeapObject host, MaybeObjectSlot start, |
669 | MaybeObjectSlot end) override { |
670 | // [start,end) must be a sub-region of [parent_start_, parent_end), i.e. |
671 | // all the slots must point inside the object. |
672 | CHECK_LE(parent_start_, start); |
673 | CHECK_LE(end, parent_end_); |
674 | for (MaybeObjectSlot p = start; p < end; ++p) { |
675 | int field_index = static_cast<int>(p - parent_start_); |
676 | if (generator_->visited_fields_[field_index]) { |
677 | generator_->visited_fields_[field_index] = false; |
678 | continue; |
679 | } |
680 | HeapObject heap_object; |
681 | if ((*p)->GetHeapObject(&heap_object)) { |
682 | VisitHeapObjectImpl(heap_object, field_index); |
683 | } |
684 | } |
685 | } |
686 | |
687 | void (Code host, RelocInfo* rinfo) override { |
688 | Code target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
689 | VisitHeapObjectImpl(target, -1); |
690 | } |
691 | |
692 | void (Code host, RelocInfo* rinfo) override { |
693 | VisitHeapObjectImpl(rinfo->target_object(), -1); |
694 | } |
695 | |
696 | private: |
697 | V8_INLINE void (HeapObject heap_object, int field_index) { |
698 | DCHECK_LE(-1, field_index); |
699 | // The last parameter {field_offset} is only used to check some well-known |
700 | // skipped references, so passing -1 * kTaggedSize for objects embedded |
701 | // into code is fine. |
702 | generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, |
703 | heap_object, field_index * kTaggedSize); |
704 | } |
705 | |
706 | V8HeapExplorer* ; |
707 | HeapObject ; |
708 | MaybeObjectSlot ; |
709 | MaybeObjectSlot ; |
710 | HeapEntry* ; |
711 | int ; |
712 | }; |
713 | |
714 | void V8HeapExplorer::(HeapEntry* entry, HeapObject obj) { |
715 | if (obj->IsJSGlobalProxy()) { |
716 | ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj)); |
717 | } else if (obj->IsJSArrayBuffer()) { |
718 | ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj)); |
719 | } else if (obj->IsJSObject()) { |
720 | if (obj->IsJSWeakSet()) { |
721 | ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj)); |
722 | } else if (obj->IsJSWeakMap()) { |
723 | ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj)); |
724 | } else if (obj->IsJSSet()) { |
725 | ExtractJSCollectionReferences(entry, JSSet::cast(obj)); |
726 | } else if (obj->IsJSMap()) { |
727 | ExtractJSCollectionReferences(entry, JSMap::cast(obj)); |
728 | } else if (obj->IsJSPromise()) { |
729 | ExtractJSPromiseReferences(entry, JSPromise::cast(obj)); |
730 | } else if (obj->IsJSGeneratorObject()) { |
731 | ExtractJSGeneratorObjectReferences(entry, JSGeneratorObject::cast(obj)); |
732 | } |
733 | ExtractJSObjectReferences(entry, JSObject::cast(obj)); |
734 | } else if (obj->IsString()) { |
735 | ExtractStringReferences(entry, String::cast(obj)); |
736 | } else if (obj->IsSymbol()) { |
737 | ExtractSymbolReferences(entry, Symbol::cast(obj)); |
738 | } else if (obj->IsMap()) { |
739 | ExtractMapReferences(entry, Map::cast(obj)); |
740 | } else if (obj->IsSharedFunctionInfo()) { |
741 | ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj)); |
742 | } else if (obj->IsScript()) { |
743 | ExtractScriptReferences(entry, Script::cast(obj)); |
744 | } else if (obj->IsAccessorInfo()) { |
745 | ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj)); |
746 | } else if (obj->IsAccessorPair()) { |
747 | ExtractAccessorPairReferences(entry, AccessorPair::cast(obj)); |
748 | } else if (obj->IsCode()) { |
749 | ExtractCodeReferences(entry, Code::cast(obj)); |
750 | } else if (obj->IsCell()) { |
751 | ExtractCellReferences(entry, Cell::cast(obj)); |
752 | } else if (obj->IsFeedbackCell()) { |
753 | ExtractFeedbackCellReferences(entry, FeedbackCell::cast(obj)); |
754 | } else if (obj->IsPropertyCell()) { |
755 | ExtractPropertyCellReferences(entry, PropertyCell::cast(obj)); |
756 | } else if (obj->IsAllocationSite()) { |
757 | ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj)); |
758 | } else if (obj->IsArrayBoilerplateDescription()) { |
759 | ExtractArrayBoilerplateDescriptionReferences( |
760 | entry, ArrayBoilerplateDescription::cast(obj)); |
761 | } else if (obj->IsFeedbackVector()) { |
762 | ExtractFeedbackVectorReferences(entry, FeedbackVector::cast(obj)); |
763 | } else if (obj->IsDescriptorArray()) { |
764 | ExtractDescriptorArrayReferences(entry, DescriptorArray::cast(obj)); |
765 | } else if (obj->IsWeakFixedArray()) { |
766 | ExtractWeakArrayReferences(WeakFixedArray::kHeaderSize, entry, |
767 | WeakFixedArray::cast(obj)); |
768 | } else if (obj->IsWeakArrayList()) { |
769 | ExtractWeakArrayReferences(WeakArrayList::kHeaderSize, entry, |
770 | WeakArrayList::cast(obj)); |
771 | } else if (obj->IsContext()) { |
772 | ExtractContextReferences(entry, Context::cast(obj)); |
773 | } else if (obj->IsEphemeronHashTable()) { |
774 | ExtractEphemeronHashTableReferences(entry, EphemeronHashTable::cast(obj)); |
775 | } else if (obj->IsFixedArray()) { |
776 | ExtractFixedArrayReferences(entry, FixedArray::cast(obj)); |
777 | } |
778 | } |
779 | |
780 | void V8HeapExplorer::(HeapEntry* entry, |
781 | JSGlobalProxy proxy) { |
782 | SetInternalReference(entry, "native_context" , proxy->native_context(), |
783 | JSGlobalProxy::kNativeContextOffset); |
784 | } |
785 | |
786 | void V8HeapExplorer::(HeapEntry* entry, |
787 | JSObject js_obj) { |
788 | HeapObject obj = js_obj; |
789 | ExtractPropertyReferences(js_obj, entry); |
790 | ExtractElementReferences(js_obj, entry); |
791 | ExtractInternalReferences(js_obj, entry); |
792 | Isolate* isolate = Isolate::FromHeap(heap_); |
793 | PrototypeIterator iter(isolate, js_obj); |
794 | ReadOnlyRoots roots(isolate); |
795 | SetPropertyReference(entry, roots.proto_string(), iter.GetCurrent()); |
796 | if (obj->IsJSBoundFunction()) { |
797 | JSBoundFunction js_fun = JSBoundFunction::cast(obj); |
798 | TagObject(js_fun->bound_arguments(), "(bound arguments)" ); |
799 | SetInternalReference(entry, "bindings" , js_fun->bound_arguments(), |
800 | JSBoundFunction::kBoundArgumentsOffset); |
801 | SetInternalReference(entry, "bound_this" , js_fun->bound_this(), |
802 | JSBoundFunction::kBoundThisOffset); |
803 | SetInternalReference(entry, "bound_function" , |
804 | js_fun->bound_target_function(), |
805 | JSBoundFunction::kBoundTargetFunctionOffset); |
806 | FixedArray bindings = js_fun->bound_arguments(); |
807 | for (int i = 0; i < bindings->length(); i++) { |
808 | const char* reference_name = names_->GetFormatted("bound_argument_%d" , i); |
809 | SetNativeBindReference(entry, reference_name, bindings->get(i)); |
810 | } |
811 | } else if (obj->IsJSFunction()) { |
812 | JSFunction js_fun = JSFunction::cast(js_obj); |
813 | if (js_fun->has_prototype_slot()) { |
814 | Object proto_or_map = js_fun->prototype_or_initial_map(); |
815 | if (!proto_or_map->IsTheHole(isolate)) { |
816 | if (!proto_or_map->IsMap()) { |
817 | SetPropertyReference(entry, roots.prototype_string(), proto_or_map, |
818 | nullptr, |
819 | JSFunction::kPrototypeOrInitialMapOffset); |
820 | } else { |
821 | SetPropertyReference(entry, roots.prototype_string(), |
822 | js_fun->prototype()); |
823 | SetInternalReference(entry, "initial_map" , proto_or_map, |
824 | JSFunction::kPrototypeOrInitialMapOffset); |
825 | } |
826 | } |
827 | } |
828 | SharedFunctionInfo shared_info = js_fun->shared(); |
829 | TagObject(js_fun->raw_feedback_cell(), "(function feedback cell)" ); |
830 | SetInternalReference(entry, "feedback_cell" , js_fun->raw_feedback_cell(), |
831 | JSFunction::kFeedbackCellOffset); |
832 | TagObject(shared_info, "(shared function info)" ); |
833 | SetInternalReference(entry, "shared" , shared_info, |
834 | JSFunction::kSharedFunctionInfoOffset); |
835 | TagObject(js_fun->context(), "(context)" ); |
836 | SetInternalReference(entry, "context" , js_fun->context(), |
837 | JSFunction::kContextOffset); |
838 | SetInternalReference(entry, "code" , js_fun->code(), |
839 | JSFunction::kCodeOffset); |
840 | } else if (obj->IsJSGlobalObject()) { |
841 | JSGlobalObject global_obj = JSGlobalObject::cast(obj); |
842 | SetInternalReference(entry, "native_context" , global_obj->native_context(), |
843 | JSGlobalObject::kNativeContextOffset); |
844 | SetInternalReference(entry, "global_proxy" , global_obj->global_proxy(), |
845 | JSGlobalObject::kGlobalProxyOffset); |
846 | STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize == |
847 | 2 * kTaggedSize); |
848 | } else if (obj->IsJSArrayBufferView()) { |
849 | JSArrayBufferView view = JSArrayBufferView::cast(obj); |
850 | SetInternalReference(entry, "buffer" , view->buffer(), |
851 | JSArrayBufferView::kBufferOffset); |
852 | } |
853 | |
854 | TagObject(js_obj->raw_properties_or_hash(), "(object properties)" ); |
855 | SetInternalReference(entry, "properties" , js_obj->raw_properties_or_hash(), |
856 | JSObject::kPropertiesOrHashOffset); |
857 | |
858 | TagObject(js_obj->elements(), "(object elements)" ); |
859 | SetInternalReference(entry, "elements" , js_obj->elements(), |
860 | JSObject::kElementsOffset); |
861 | } |
862 | |
863 | void V8HeapExplorer::(HeapEntry* entry, String string) { |
864 | if (string->IsConsString()) { |
865 | ConsString cs = ConsString::cast(string); |
866 | SetInternalReference(entry, "first" , cs->first(), ConsString::kFirstOffset); |
867 | SetInternalReference(entry, "second" , cs->second(), |
868 | ConsString::kSecondOffset); |
869 | } else if (string->IsSlicedString()) { |
870 | SlicedString ss = SlicedString::cast(string); |
871 | SetInternalReference(entry, "parent" , ss->parent(), |
872 | SlicedString::kParentOffset); |
873 | } else if (string->IsThinString()) { |
874 | ThinString ts = ThinString::cast(string); |
875 | SetInternalReference(entry, "actual" , ts->actual(), |
876 | ThinString::kActualOffset); |
877 | } |
878 | } |
879 | |
880 | void V8HeapExplorer::(HeapEntry* entry, Symbol symbol) { |
881 | SetInternalReference(entry, "name" , symbol->name(), Symbol::kNameOffset); |
882 | } |
883 | |
884 | void V8HeapExplorer::(HeapEntry* entry, |
885 | JSCollection collection) { |
886 | SetInternalReference(entry, "table" , collection->table(), |
887 | JSCollection::kTableOffset); |
888 | } |
889 | |
890 | void V8HeapExplorer::(HeapEntry* entry, |
891 | JSWeakCollection obj) { |
892 | SetInternalReference(entry, "table" , obj->table(), |
893 | JSWeakCollection::kTableOffset); |
894 | } |
895 | |
896 | void V8HeapExplorer::( |
897 | HeapEntry* entry, EphemeronHashTable table) { |
898 | for (int i = 0, capacity = table->Capacity(); i < capacity; ++i) { |
899 | int key_index = EphemeronHashTable::EntryToIndex(i) + |
900 | EphemeronHashTable::kEntryKeyIndex; |
901 | int value_index = EphemeronHashTable::EntryToValueIndex(i); |
902 | Object key = table->get(key_index); |
903 | Object value = table->get(value_index); |
904 | SetWeakReference(entry, key_index, key, |
905 | table->OffsetOfElementAt(key_index)); |
906 | SetWeakReference(entry, value_index, value, |
907 | table->OffsetOfElementAt(value_index)); |
908 | HeapEntry* key_entry = GetEntry(key); |
909 | HeapEntry* value_entry = GetEntry(value); |
910 | if (key_entry && value_entry) { |
911 | const char* edge_name = |
912 | names_->GetFormatted("key %s in WeakMap" , key_entry->name()); |
913 | key_entry->SetNamedAutoIndexReference(HeapGraphEdge::kInternal, edge_name, |
914 | value_entry, names_); |
915 | } |
916 | } |
917 | } |
918 | |
919 | // These static arrays are used to prevent excessive code-size in |
920 | // ExtractContextReferences below, which would happen if we called |
921 | // SetInternalReference for every native context field in a macro. |
922 | static const struct { |
923 | int index; |
924 | const char* name; |
925 | } native_context_names[] = { |
926 | #define CONTEXT_FIELD_INDEX_NAME(index, _, name) {Context::index, #name}, |
927 | NATIVE_CONTEXT_FIELDS(CONTEXT_FIELD_INDEX_NAME) |
928 | #undef CONTEXT_FIELD_INDEX_NAME |
929 | }; |
930 | |
931 | void V8HeapExplorer::(HeapEntry* entry, |
932 | Context context) { |
933 | if (!context->IsNativeContext() && context->is_declaration_context()) { |
934 | ScopeInfo scope_info = context->scope_info(); |
935 | // Add context allocated locals. |
936 | int context_locals = scope_info->ContextLocalCount(); |
937 | for (int i = 0; i < context_locals; ++i) { |
938 | String local_name = scope_info->ContextLocalName(i); |
939 | int idx = Context::MIN_CONTEXT_SLOTS + i; |
940 | SetContextReference(entry, local_name, context->get(idx), |
941 | Context::OffsetOfElementAt(idx)); |
942 | } |
943 | if (scope_info->HasFunctionName()) { |
944 | String name = String::cast(scope_info->FunctionName()); |
945 | int idx = scope_info->FunctionContextSlotIndex(name); |
946 | if (idx >= 0) { |
947 | SetContextReference(entry, name, context->get(idx), |
948 | Context::OffsetOfElementAt(idx)); |
949 | } |
950 | } |
951 | } |
952 | |
953 | SetInternalReference( |
954 | entry, "scope_info" , context->get(Context::SCOPE_INFO_INDEX), |
955 | FixedArray::OffsetOfElementAt(Context::SCOPE_INFO_INDEX)); |
956 | SetInternalReference(entry, "previous" , context->get(Context::PREVIOUS_INDEX), |
957 | FixedArray::OffsetOfElementAt(Context::PREVIOUS_INDEX)); |
958 | SetInternalReference(entry, "extension" , |
959 | context->get(Context::EXTENSION_INDEX), |
960 | FixedArray::OffsetOfElementAt(Context::EXTENSION_INDEX)); |
961 | SetInternalReference( |
962 | entry, "native_context" , context->get(Context::NATIVE_CONTEXT_INDEX), |
963 | FixedArray::OffsetOfElementAt(Context::NATIVE_CONTEXT_INDEX)); |
964 | |
965 | if (context->IsNativeContext()) { |
966 | TagObject(context->normalized_map_cache(), "(context norm. map cache)" ); |
967 | TagObject(context->embedder_data(), "(context data)" ); |
968 | for (size_t i = 0; i < arraysize(native_context_names); i++) { |
969 | int index = native_context_names[i].index; |
970 | const char* name = native_context_names[i].name; |
971 | SetInternalReference(entry, name, context->get(index), |
972 | FixedArray::OffsetOfElementAt(index)); |
973 | } |
974 | |
975 | SetWeakReference( |
976 | entry, "optimized_code_list" , |
977 | context->get(Context::OPTIMIZED_CODE_LIST), |
978 | FixedArray::OffsetOfElementAt(Context::OPTIMIZED_CODE_LIST)); |
979 | SetWeakReference( |
980 | entry, "deoptimized_code_list" , |
981 | context->get(Context::DEOPTIMIZED_CODE_LIST), |
982 | FixedArray::OffsetOfElementAt(Context::DEOPTIMIZED_CODE_LIST)); |
983 | STATIC_ASSERT(Context::OPTIMIZED_CODE_LIST == Context::FIRST_WEAK_SLOT); |
984 | STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 == |
985 | Context::NATIVE_CONTEXT_SLOTS); |
986 | STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 3 == |
987 | Context::NATIVE_CONTEXT_SLOTS); |
988 | } |
989 | } |
990 | |
991 | void V8HeapExplorer::(HeapEntry* entry, Map map) { |
992 | MaybeObject maybe_raw_transitions_or_prototype_info = map->raw_transitions(); |
993 | HeapObject raw_transitions_or_prototype_info; |
994 | if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfWeak( |
995 | &raw_transitions_or_prototype_info)) { |
996 | DCHECK(raw_transitions_or_prototype_info->IsMap()); |
997 | SetWeakReference(entry, "transition" , raw_transitions_or_prototype_info, |
998 | Map::kTransitionsOrPrototypeInfoOffset); |
999 | } else if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfStrong( |
1000 | &raw_transitions_or_prototype_info)) { |
1001 | if (raw_transitions_or_prototype_info->IsTransitionArray()) { |
1002 | TransitionArray transitions = |
1003 | TransitionArray::cast(raw_transitions_or_prototype_info); |
1004 | if (map->CanTransition() && transitions->HasPrototypeTransitions()) { |
1005 | TagObject(transitions->GetPrototypeTransitions(), |
1006 | "(prototype transitions)" ); |
1007 | } |
1008 | TagObject(transitions, "(transition array)" ); |
1009 | SetInternalReference(entry, "transitions" , transitions, |
1010 | Map::kTransitionsOrPrototypeInfoOffset); |
1011 | } else if (raw_transitions_or_prototype_info->IsTuple3() || |
1012 | raw_transitions_or_prototype_info->IsFixedArray()) { |
1013 | TagObject(raw_transitions_or_prototype_info, "(transition)" ); |
1014 | SetInternalReference(entry, "transition" , |
1015 | raw_transitions_or_prototype_info, |
1016 | Map::kTransitionsOrPrototypeInfoOffset); |
1017 | } else if (map->is_prototype_map()) { |
1018 | TagObject(raw_transitions_or_prototype_info, "prototype_info" ); |
1019 | SetInternalReference(entry, "prototype_info" , |
1020 | raw_transitions_or_prototype_info, |
1021 | Map::kTransitionsOrPrototypeInfoOffset); |
1022 | } |
1023 | } |
1024 | DescriptorArray descriptors = map->instance_descriptors(); |
1025 | TagObject(descriptors, "(map descriptors)" ); |
1026 | SetInternalReference(entry, "descriptors" , descriptors, |
1027 | Map::kDescriptorsOffset); |
1028 | SetInternalReference(entry, "prototype" , map->prototype(), |
1029 | Map::kPrototypeOffset); |
1030 | if (FLAG_unbox_double_fields) { |
1031 | SetInternalReference(entry, "layout_descriptor" , map->layout_descriptor(), |
1032 | Map::kLayoutDescriptorOffset); |
1033 | } |
1034 | Object constructor_or_backpointer = map->constructor_or_backpointer(); |
1035 | if (constructor_or_backpointer->IsMap()) { |
1036 | TagObject(constructor_or_backpointer, "(back pointer)" ); |
1037 | SetInternalReference(entry, "back_pointer" , constructor_or_backpointer, |
1038 | Map::kConstructorOrBackPointerOffset); |
1039 | } else if (constructor_or_backpointer->IsFunctionTemplateInfo()) { |
1040 | TagObject(constructor_or_backpointer, "(constructor function data)" ); |
1041 | SetInternalReference(entry, "constructor_function_data" , |
1042 | constructor_or_backpointer, |
1043 | Map::kConstructorOrBackPointerOffset); |
1044 | } else { |
1045 | SetInternalReference(entry, "constructor" , constructor_or_backpointer, |
1046 | Map::kConstructorOrBackPointerOffset); |
1047 | } |
1048 | TagObject(map->dependent_code(), "(dependent code)" ); |
1049 | SetInternalReference(entry, "dependent_code" , map->dependent_code(), |
1050 | Map::kDependentCodeOffset); |
1051 | } |
1052 | |
1053 | void V8HeapExplorer::( |
1054 | HeapEntry* entry, SharedFunctionInfo shared) { |
1055 | String shared_name = shared->DebugName(); |
1056 | const char* name = nullptr; |
1057 | if (shared_name != ReadOnlyRoots(heap_).empty_string()) { |
1058 | name = names_->GetName(shared_name); |
1059 | TagObject(shared->GetCode(), names_->GetFormatted("(code for %s)" , name)); |
1060 | } else { |
1061 | TagObject(shared->GetCode(), |
1062 | names_->GetFormatted( |
1063 | "(%s code)" , Code::Kind2String(shared->GetCode()->kind()))); |
1064 | } |
1065 | |
1066 | if (shared->name_or_scope_info()->IsScopeInfo()) { |
1067 | TagObject(shared->name_or_scope_info(), "(function scope info)" ); |
1068 | } |
1069 | SetInternalReference(entry, "name_or_scope_info" , |
1070 | shared->name_or_scope_info(), |
1071 | SharedFunctionInfo::kNameOrScopeInfoOffset); |
1072 | SetInternalReference(entry, "script_or_debug_info" , |
1073 | shared->script_or_debug_info(), |
1074 | SharedFunctionInfo::kScriptOrDebugInfoOffset); |
1075 | SetInternalReference(entry, "function_data" , shared->function_data(), |
1076 | SharedFunctionInfo::kFunctionDataOffset); |
1077 | SetInternalReference( |
1078 | entry, "raw_outer_scope_info_or_feedback_metadata" , |
1079 | shared->raw_outer_scope_info_or_feedback_metadata(), |
1080 | SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset); |
1081 | } |
1082 | |
1083 | void V8HeapExplorer::(HeapEntry* entry, Script script) { |
1084 | SetInternalReference(entry, "source" , script->source(), |
1085 | Script::kSourceOffset); |
1086 | SetInternalReference(entry, "name" , script->name(), Script::kNameOffset); |
1087 | SetInternalReference(entry, "context_data" , script->context_data(), |
1088 | Script::kContextOffset); |
1089 | TagObject(script->line_ends(), "(script line ends)" ); |
1090 | SetInternalReference(entry, "line_ends" , script->line_ends(), |
1091 | Script::kLineEndsOffset); |
1092 | } |
1093 | |
1094 | void V8HeapExplorer::(HeapEntry* entry, |
1095 | AccessorInfo accessor_info) { |
1096 | SetInternalReference(entry, "name" , accessor_info->name(), |
1097 | AccessorInfo::kNameOffset); |
1098 | SetInternalReference(entry, "expected_receiver_type" , |
1099 | accessor_info->expected_receiver_type(), |
1100 | AccessorInfo::kExpectedReceiverTypeOffset); |
1101 | SetInternalReference(entry, "getter" , accessor_info->getter(), |
1102 | AccessorInfo::kGetterOffset); |
1103 | SetInternalReference(entry, "setter" , accessor_info->setter(), |
1104 | AccessorInfo::kSetterOffset); |
1105 | SetInternalReference(entry, "data" , accessor_info->data(), |
1106 | AccessorInfo::kDataOffset); |
1107 | } |
1108 | |
1109 | void V8HeapExplorer::(HeapEntry* entry, |
1110 | AccessorPair accessors) { |
1111 | SetInternalReference(entry, "getter" , accessors->getter(), |
1112 | AccessorPair::kGetterOffset); |
1113 | SetInternalReference(entry, "setter" , accessors->setter(), |
1114 | AccessorPair::kSetterOffset); |
1115 | } |
1116 | |
1117 | void V8HeapExplorer::TagBuiltinCodeObject(Code code, const char* name) { |
1118 | TagObject(code, names_->GetFormatted("(%s builtin)" , name)); |
1119 | } |
1120 | |
1121 | void V8HeapExplorer::(HeapEntry* entry, Code code) { |
1122 | TagObject(code->relocation_info(), "(code relocation info)" ); |
1123 | SetInternalReference(entry, "relocation_info" , code->relocation_info(), |
1124 | Code::kRelocationInfoOffset); |
1125 | TagObject(code->deoptimization_data(), "(code deopt data)" ); |
1126 | SetInternalReference(entry, "deoptimization_data" , |
1127 | code->deoptimization_data(), |
1128 | Code::kDeoptimizationDataOffset); |
1129 | TagObject(code->source_position_table(), "(source position table)" ); |
1130 | SetInternalReference(entry, "source_position_table" , |
1131 | code->source_position_table(), |
1132 | Code::kSourcePositionTableOffset); |
1133 | } |
1134 | |
1135 | void V8HeapExplorer::(HeapEntry* entry, Cell cell) { |
1136 | SetInternalReference(entry, "value" , cell->value(), Cell::kValueOffset); |
1137 | } |
1138 | |
1139 | void V8HeapExplorer::(HeapEntry* entry, |
1140 | FeedbackCell feedback_cell) { |
1141 | TagObject(feedback_cell, "(feedback cell)" ); |
1142 | SetInternalReference(entry, "value" , feedback_cell->value(), |
1143 | FeedbackCell::kValueOffset); |
1144 | } |
1145 | |
1146 | void V8HeapExplorer::(HeapEntry* entry, |
1147 | PropertyCell cell) { |
1148 | SetInternalReference(entry, "value" , cell->value(), |
1149 | PropertyCell::kValueOffset); |
1150 | TagObject(cell->dependent_code(), "(dependent code)" ); |
1151 | SetInternalReference(entry, "dependent_code" , cell->dependent_code(), |
1152 | PropertyCell::kDependentCodeOffset); |
1153 | } |
1154 | |
1155 | void V8HeapExplorer::(HeapEntry* entry, |
1156 | AllocationSite site) { |
1157 | SetInternalReference(entry, "transition_info" , |
1158 | site->transition_info_or_boilerplate(), |
1159 | AllocationSite::kTransitionInfoOrBoilerplateOffset); |
1160 | SetInternalReference(entry, "nested_site" , site->nested_site(), |
1161 | AllocationSite::kNestedSiteOffset); |
1162 | TagObject(site->dependent_code(), "(dependent code)" ); |
1163 | SetInternalReference(entry, "dependent_code" , site->dependent_code(), |
1164 | AllocationSite::kDependentCodeOffset); |
1165 | } |
1166 | |
1167 | void V8HeapExplorer::( |
1168 | HeapEntry* entry, ArrayBoilerplateDescription value) { |
1169 | SetInternalReference(entry, "constant_elements" , value->constant_elements(), |
1170 | ArrayBoilerplateDescription::kConstantElementsOffset); |
1171 | } |
1172 | |
1173 | class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator { |
1174 | public: |
1175 | JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer) |
1176 | : size_(size) |
1177 | , explorer_(explorer) { |
1178 | } |
1179 | HeapEntry* AllocateEntry(HeapThing ptr) override { |
1180 | return explorer_->AddEntry(reinterpret_cast<Address>(ptr), |
1181 | HeapEntry::kNative, "system / JSArrayBufferData" , |
1182 | size_); |
1183 | } |
1184 | private: |
1185 | size_t size_; |
1186 | V8HeapExplorer* explorer_; |
1187 | }; |
1188 | |
1189 | void V8HeapExplorer::(HeapEntry* entry, |
1190 | JSArrayBuffer buffer) { |
1191 | // Setup a reference to a native memory backing_store object. |
1192 | if (!buffer->backing_store()) return; |
1193 | size_t data_size = buffer->byte_length(); |
1194 | JSArrayBufferDataEntryAllocator allocator(data_size, this); |
1195 | HeapEntry* data_entry = |
1196 | generator_->FindOrAddEntry(buffer->backing_store(), &allocator); |
1197 | entry->SetNamedReference(HeapGraphEdge::kInternal, "backing_store" , |
1198 | data_entry); |
1199 | } |
1200 | |
1201 | void V8HeapExplorer::(HeapEntry* entry, |
1202 | JSPromise promise) { |
1203 | SetInternalReference(entry, "reactions_or_result" , |
1204 | promise->reactions_or_result(), |
1205 | JSPromise::kReactionsOrResultOffset); |
1206 | } |
1207 | |
1208 | void V8HeapExplorer::( |
1209 | HeapEntry* entry, JSGeneratorObject generator) { |
1210 | SetInternalReference(entry, "function" , generator->function(), |
1211 | JSGeneratorObject::kFunctionOffset); |
1212 | SetInternalReference(entry, "context" , generator->context(), |
1213 | JSGeneratorObject::kContextOffset); |
1214 | SetInternalReference(entry, "receiver" , generator->receiver(), |
1215 | JSGeneratorObject::kReceiverOffset); |
1216 | SetInternalReference(entry, "parameters_and_registers" , |
1217 | generator->parameters_and_registers(), |
1218 | JSGeneratorObject::kParametersAndRegistersOffset); |
1219 | } |
1220 | |
1221 | void V8HeapExplorer::(HeapEntry* entry, |
1222 | FixedArray array) { |
1223 | for (int i = 0, l = array->length(); i < l; ++i) { |
1224 | DCHECK(!HasWeakHeapObjectTag(array->get(i))); |
1225 | SetInternalReference(entry, i, array->get(i), array->OffsetOfElementAt(i)); |
1226 | } |
1227 | } |
1228 | |
1229 | void V8HeapExplorer::( |
1230 | HeapEntry* entry, FeedbackVector feedback_vector) { |
1231 | MaybeObject code = feedback_vector->optimized_code_weak_or_smi(); |
1232 | HeapObject code_heap_object; |
1233 | if (code->GetHeapObjectIfWeak(&code_heap_object)) { |
1234 | SetWeakReference(entry, "optimized code" , code_heap_object, |
1235 | FeedbackVector::kOptimizedCodeOffset); |
1236 | } |
1237 | } |
1238 | |
1239 | void V8HeapExplorer::(HeapEntry* entry, |
1240 | DescriptorArray array) { |
1241 | SetInternalReference(entry, "enum_cache" , array->enum_cache(), |
1242 | DescriptorArray::kEnumCacheOffset); |
1243 | MaybeObjectSlot start = MaybeObjectSlot(array->GetDescriptorSlot(0)); |
1244 | MaybeObjectSlot end = MaybeObjectSlot( |
1245 | array->GetDescriptorSlot(array->number_of_all_descriptors())); |
1246 | for (int i = 0; start + i < end; ++i) { |
1247 | MaybeObjectSlot slot = start + i; |
1248 | int offset = static_cast<int>(slot.address() - array->address()); |
1249 | MaybeObject object = *slot; |
1250 | HeapObject heap_object; |
1251 | if (object->GetHeapObjectIfWeak(&heap_object)) { |
1252 | SetWeakReference(entry, i, heap_object, offset); |
1253 | } else if (object->GetHeapObjectIfStrong(&heap_object)) { |
1254 | SetInternalReference(entry, i, heap_object, offset); |
1255 | } |
1256 | } |
1257 | } |
1258 | |
1259 | template <typename T> |
1260 | void V8HeapExplorer::(int , |
1261 | HeapEntry* entry, T array) { |
1262 | for (int i = 0; i < array->length(); ++i) { |
1263 | MaybeObject object = array->Get(i); |
1264 | HeapObject heap_object; |
1265 | if (object->GetHeapObjectIfWeak(&heap_object)) { |
1266 | SetWeakReference(entry, i, heap_object, header_size + i * kTaggedSize); |
1267 | } else if (object->GetHeapObjectIfStrong(&heap_object)) { |
1268 | SetInternalReference(entry, i, heap_object, |
1269 | header_size + i * kTaggedSize); |
1270 | } |
1271 | } |
1272 | } |
1273 | |
1274 | void V8HeapExplorer::(JSObject js_obj, |
1275 | HeapEntry* entry) { |
1276 | Isolate* isolate = js_obj->GetIsolate(); |
1277 | if (js_obj->HasFastProperties()) { |
1278 | DescriptorArray descs = js_obj->map()->instance_descriptors(); |
1279 | int real_size = js_obj->map()->NumberOfOwnDescriptors(); |
1280 | for (int i = 0; i < real_size; i++) { |
1281 | PropertyDetails details = descs->GetDetails(i); |
1282 | switch (details.location()) { |
1283 | case kField: { |
1284 | Representation r = details.representation(); |
1285 | if (r.IsSmi() || r.IsDouble()) break; |
1286 | |
1287 | Name k = descs->GetKey(i); |
1288 | FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i); |
1289 | Object value = js_obj->RawFastPropertyAt(field_index); |
1290 | int field_offset = |
1291 | field_index.is_inobject() ? field_index.offset() : -1; |
1292 | |
1293 | SetDataOrAccessorPropertyReference(details.kind(), entry, k, value, |
1294 | nullptr, field_offset); |
1295 | break; |
1296 | } |
1297 | case kDescriptor: |
1298 | SetDataOrAccessorPropertyReference(details.kind(), entry, |
1299 | descs->GetKey(i), |
1300 | descs->GetStrongValue(i)); |
1301 | break; |
1302 | } |
1303 | } |
1304 | } else if (js_obj->IsJSGlobalObject()) { |
1305 | // We assume that global objects can only have slow properties. |
1306 | GlobalDictionary dictionary = |
1307 | JSGlobalObject::cast(js_obj)->global_dictionary(); |
1308 | int length = dictionary->Capacity(); |
1309 | ReadOnlyRoots roots(isolate); |
1310 | for (int i = 0; i < length; ++i) { |
1311 | if (!dictionary->IsKey(roots, dictionary->KeyAt(i))) continue; |
1312 | PropertyCell cell = dictionary->CellAt(i); |
1313 | Name name = cell->name(); |
1314 | Object value = cell->value(); |
1315 | PropertyDetails details = cell->property_details(); |
1316 | SetDataOrAccessorPropertyReference(details.kind(), entry, name, value); |
1317 | } |
1318 | } else { |
1319 | NameDictionary dictionary = js_obj->property_dictionary(); |
1320 | int length = dictionary->Capacity(); |
1321 | ReadOnlyRoots roots(isolate); |
1322 | for (int i = 0; i < length; ++i) { |
1323 | Object k = dictionary->KeyAt(i); |
1324 | if (!dictionary->IsKey(roots, k)) continue; |
1325 | Object value = dictionary->ValueAt(i); |
1326 | PropertyDetails details = dictionary->DetailsAt(i); |
1327 | SetDataOrAccessorPropertyReference(details.kind(), entry, Name::cast(k), |
1328 | value); |
1329 | } |
1330 | } |
1331 | } |
1332 | |
1333 | void V8HeapExplorer::(HeapEntry* entry, Name key, |
1334 | Object callback_obj, |
1335 | int field_offset) { |
1336 | if (!callback_obj->IsAccessorPair()) return; |
1337 | AccessorPair accessors = AccessorPair::cast(callback_obj); |
1338 | SetPropertyReference(entry, key, accessors, nullptr, field_offset); |
1339 | Object getter = accessors->getter(); |
1340 | if (!getter->IsOddball()) { |
1341 | SetPropertyReference(entry, key, getter, "get %s" ); |
1342 | } |
1343 | Object setter = accessors->setter(); |
1344 | if (!setter->IsOddball()) { |
1345 | SetPropertyReference(entry, key, setter, "set %s" ); |
1346 | } |
1347 | } |
1348 | |
1349 | void V8HeapExplorer::(JSObject js_obj, |
1350 | HeapEntry* entry) { |
1351 | ReadOnlyRoots roots = js_obj->GetReadOnlyRoots(); |
1352 | if (js_obj->HasObjectElements()) { |
1353 | FixedArray elements = FixedArray::cast(js_obj->elements()); |
1354 | int length = js_obj->IsJSArray() |
1355 | ? Smi::ToInt(JSArray::cast(js_obj)->length()) |
1356 | : elements->length(); |
1357 | for (int i = 0; i < length; ++i) { |
1358 | if (!elements->get(i)->IsTheHole(roots)) { |
1359 | SetElementReference(entry, i, elements->get(i)); |
1360 | } |
1361 | } |
1362 | } else if (js_obj->HasDictionaryElements()) { |
1363 | NumberDictionary dictionary = js_obj->element_dictionary(); |
1364 | int length = dictionary->Capacity(); |
1365 | for (int i = 0; i < length; ++i) { |
1366 | Object k = dictionary->KeyAt(i); |
1367 | if (!dictionary->IsKey(roots, k)) continue; |
1368 | DCHECK(k->IsNumber()); |
1369 | uint32_t index = static_cast<uint32_t>(k->Number()); |
1370 | SetElementReference(entry, index, dictionary->ValueAt(i)); |
1371 | } |
1372 | } |
1373 | } |
1374 | |
1375 | void V8HeapExplorer::(JSObject js_obj, |
1376 | HeapEntry* entry) { |
1377 | int length = js_obj->GetEmbedderFieldCount(); |
1378 | for (int i = 0; i < length; ++i) { |
1379 | Object o = js_obj->GetEmbedderField(i); |
1380 | SetInternalReference(entry, i, o, js_obj->GetEmbedderFieldOffset(i)); |
1381 | } |
1382 | } |
1383 | |
1384 | JSFunction V8HeapExplorer::GetConstructor(JSReceiver receiver) { |
1385 | Isolate* isolate = receiver->GetIsolate(); |
1386 | DisallowHeapAllocation no_gc; |
1387 | HandleScope scope(isolate); |
1388 | MaybeHandle<JSFunction> maybe_constructor = |
1389 | JSReceiver::GetConstructor(handle(receiver, isolate)); |
1390 | |
1391 | if (maybe_constructor.is_null()) return JSFunction(); |
1392 | |
1393 | return *maybe_constructor.ToHandleChecked(); |
1394 | } |
1395 | |
1396 | String V8HeapExplorer::GetConstructorName(JSObject object) { |
1397 | Isolate* isolate = object->GetIsolate(); |
1398 | if (object->IsJSFunction()) return ReadOnlyRoots(isolate).closure_string(); |
1399 | DisallowHeapAllocation no_gc; |
1400 | HandleScope scope(isolate); |
1401 | return *JSReceiver::GetConstructorName(handle(object, isolate)); |
1402 | } |
1403 | |
1404 | HeapEntry* V8HeapExplorer::GetEntry(Object obj) { |
1405 | return obj->IsHeapObject() ? generator_->FindOrAddEntry( |
1406 | reinterpret_cast<void*>(obj.ptr()), this) |
1407 | : nullptr; |
1408 | } |
1409 | |
1410 | class : public RootVisitor { |
1411 | public: |
1412 | explicit (V8HeapExplorer* explorer) |
1413 | : explorer_(explorer), visiting_weak_roots_(false) {} |
1414 | |
1415 | void () { visiting_weak_roots_ = true; } |
1416 | |
1417 | void (Root root, const char* description, |
1418 | FullObjectSlot object) override { |
1419 | if (root == Root::kBuiltins) { |
1420 | explorer_->TagBuiltinCodeObject(Code::cast(*object), description); |
1421 | } |
1422 | explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_, |
1423 | *object); |
1424 | } |
1425 | |
1426 | void (Root root, const char* description, |
1427 | FullObjectSlot start, FullObjectSlot end) override { |
1428 | for (FullObjectSlot p = start; p < end; ++p) { |
1429 | VisitRootPointer(root, description, p); |
1430 | } |
1431 | } |
1432 | |
1433 | private: |
1434 | V8HeapExplorer* ; |
1435 | bool ; |
1436 | }; |
1437 | |
1438 | bool V8HeapExplorer::IterateAndExtractReferences( |
1439 | HeapSnapshotGenerator* generator) { |
1440 | generator_ = generator; |
1441 | |
1442 | // Create references to the synthetic roots. |
1443 | SetRootGcRootsReference(); |
1444 | for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) { |
1445 | SetGcRootsReference(static_cast<Root>(root)); |
1446 | } |
1447 | |
1448 | // Make sure builtin code objects get their builtin tags |
1449 | // first. Otherwise a particular JSFunction object could set |
1450 | // its custom name to a generic builtin. |
1451 | RootsReferencesExtractor (this); |
1452 | ReadOnlyRoots(heap_).Iterate(&extractor); |
1453 | heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG); |
1454 | extractor.SetVisitingWeakRoots(); |
1455 | heap_->IterateWeakGlobalHandles(&extractor); |
1456 | |
1457 | bool interrupted = false; |
1458 | |
1459 | HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); |
1460 | // Heap iteration with filtering must be finished in any case. |
1461 | for (HeapObject obj = iterator.next(); !obj.is_null(); |
1462 | obj = iterator.next(), progress_->ProgressStep()) { |
1463 | if (interrupted) continue; |
1464 | |
1465 | size_t max_pointer = obj->Size() / kTaggedSize; |
1466 | if (max_pointer > visited_fields_.size()) { |
1467 | // Clear the current bits. |
1468 | std::vector<bool>().swap(visited_fields_); |
1469 | // Reallocate to right size. |
1470 | visited_fields_.resize(max_pointer, false); |
1471 | } |
1472 | |
1473 | HeapEntry* entry = GetEntry(obj); |
1474 | ExtractReferences(entry, obj); |
1475 | SetInternalReference(entry, "map" , obj->map(), HeapObject::kMapOffset); |
1476 | // Extract unvisited fields as hidden references and restore tags |
1477 | // of visited fields. |
1478 | IndexedReferencesExtractor (this, obj, entry); |
1479 | obj->Iterate(&refs_extractor); |
1480 | |
1481 | // Ensure visited_fields_ doesn't leak to the next object. |
1482 | for (size_t i = 0; i < max_pointer; ++i) { |
1483 | DCHECK(!visited_fields_[i]); |
1484 | } |
1485 | |
1486 | // Extract location for specific object types |
1487 | ExtractLocation(entry, obj); |
1488 | |
1489 | if (!progress_->ProgressReport(false)) interrupted = true; |
1490 | } |
1491 | |
1492 | generator_ = nullptr; |
1493 | return interrupted ? false : progress_->ProgressReport(true); |
1494 | } |
1495 | |
1496 | bool V8HeapExplorer::IsEssentialObject(Object object) { |
1497 | ReadOnlyRoots roots(heap_); |
1498 | return object->IsHeapObject() && !object->IsOddball() && |
1499 | object != roots.empty_byte_array() && |
1500 | object != roots.empty_fixed_array() && |
1501 | object != roots.empty_weak_fixed_array() && |
1502 | object != roots.empty_descriptor_array() && |
1503 | object != roots.fixed_array_map() && object != roots.cell_map() && |
1504 | object != roots.global_property_cell_map() && |
1505 | object != roots.shared_function_info_map() && |
1506 | object != roots.free_space_map() && |
1507 | object != roots.one_pointer_filler_map() && |
1508 | object != roots.two_pointer_filler_map(); |
1509 | } |
1510 | |
1511 | bool V8HeapExplorer::IsEssentialHiddenReference(Object parent, |
1512 | int field_offset) { |
1513 | if (parent->IsAllocationSite() && |
1514 | field_offset == AllocationSite::kWeakNextOffset) |
1515 | return false; |
1516 | if (parent->IsCodeDataContainer() && |
1517 | field_offset == CodeDataContainer::kNextCodeLinkOffset) |
1518 | return false; |
1519 | if (parent->IsContext() && |
1520 | field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK)) |
1521 | return false; |
1522 | return true; |
1523 | } |
1524 | |
1525 | void V8HeapExplorer::SetContextReference(HeapEntry* parent_entry, |
1526 | String reference_name, |
1527 | Object child_obj, int field_offset) { |
1528 | HeapEntry* child_entry = GetEntry(child_obj); |
1529 | if (child_entry == nullptr) return; |
1530 | parent_entry->SetNamedReference(HeapGraphEdge::kContextVariable, |
1531 | names_->GetName(reference_name), child_entry); |
1532 | MarkVisitedField(field_offset); |
1533 | } |
1534 | |
1535 | void V8HeapExplorer::MarkVisitedField(int offset) { |
1536 | if (offset < 0) return; |
1537 | int index = offset / kTaggedSize; |
1538 | DCHECK(!visited_fields_[index]); |
1539 | visited_fields_[index] = true; |
1540 | } |
1541 | |
1542 | void V8HeapExplorer::SetNativeBindReference(HeapEntry* parent_entry, |
1543 | const char* reference_name, |
1544 | Object child_obj) { |
1545 | HeapEntry* child_entry = GetEntry(child_obj); |
1546 | if (child_entry == nullptr) return; |
1547 | parent_entry->SetNamedReference(HeapGraphEdge::kShortcut, reference_name, |
1548 | child_entry); |
1549 | } |
1550 | |
1551 | void V8HeapExplorer::SetElementReference(HeapEntry* parent_entry, int index, |
1552 | Object child_obj) { |
1553 | HeapEntry* child_entry = GetEntry(child_obj); |
1554 | if (child_entry == nullptr) return; |
1555 | parent_entry->SetIndexedReference(HeapGraphEdge::kElement, index, |
1556 | child_entry); |
1557 | } |
1558 | |
1559 | void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry, |
1560 | const char* reference_name, |
1561 | Object child_obj, int field_offset) { |
1562 | HeapEntry* child_entry = GetEntry(child_obj); |
1563 | if (child_entry == nullptr) return; |
1564 | if (IsEssentialObject(child_obj)) { |
1565 | parent_entry->SetNamedReference(HeapGraphEdge::kInternal, reference_name, |
1566 | child_entry); |
1567 | } |
1568 | MarkVisitedField(field_offset); |
1569 | } |
1570 | |
1571 | void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry, int index, |
1572 | Object child_obj, int field_offset) { |
1573 | HeapEntry* child_entry = GetEntry(child_obj); |
1574 | if (child_entry == nullptr) return; |
1575 | if (IsEssentialObject(child_obj)) { |
1576 | parent_entry->SetNamedReference(HeapGraphEdge::kInternal, |
1577 | names_->GetName(index), child_entry); |
1578 | } |
1579 | MarkVisitedField(field_offset); |
1580 | } |
1581 | |
1582 | void V8HeapExplorer::SetHiddenReference(HeapObject parent_obj, |
1583 | HeapEntry* parent_entry, int index, |
1584 | Object child_obj, int field_offset) { |
1585 | DCHECK_EQ(parent_entry, GetEntry(parent_obj)); |
1586 | HeapEntry* child_entry = GetEntry(child_obj); |
1587 | if (child_entry != nullptr && IsEssentialObject(child_obj) && |
1588 | IsEssentialHiddenReference(parent_obj, field_offset)) { |
1589 | parent_entry->SetIndexedReference(HeapGraphEdge::kHidden, index, |
1590 | child_entry); |
1591 | } |
1592 | } |
1593 | |
1594 | void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry, |
1595 | const char* reference_name, |
1596 | Object child_obj, int field_offset) { |
1597 | HeapEntry* child_entry = GetEntry(child_obj); |
1598 | if (child_entry == nullptr) return; |
1599 | if (IsEssentialObject(child_obj)) { |
1600 | parent_entry->SetNamedReference(HeapGraphEdge::kWeak, reference_name, |
1601 | child_entry); |
1602 | } |
1603 | MarkVisitedField(field_offset); |
1604 | } |
1605 | |
1606 | void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry, int index, |
1607 | Object child_obj, int field_offset) { |
1608 | HeapEntry* child_entry = GetEntry(child_obj); |
1609 | if (child_entry == nullptr) return; |
1610 | if (IsEssentialObject(child_obj)) { |
1611 | parent_entry->SetNamedReference( |
1612 | HeapGraphEdge::kWeak, names_->GetFormatted("%d" , index), child_entry); |
1613 | } |
1614 | MarkVisitedField(field_offset); |
1615 | } |
1616 | |
1617 | void V8HeapExplorer::SetDataOrAccessorPropertyReference( |
1618 | PropertyKind kind, HeapEntry* parent_entry, Name reference_name, |
1619 | Object child_obj, const char* name_format_string, int field_offset) { |
1620 | if (kind == kAccessor) { |
1621 | ExtractAccessorPairProperty(parent_entry, reference_name, child_obj, |
1622 | field_offset); |
1623 | } else { |
1624 | SetPropertyReference(parent_entry, reference_name, child_obj, |
1625 | name_format_string, field_offset); |
1626 | } |
1627 | } |
1628 | |
1629 | void V8HeapExplorer::SetPropertyReference(HeapEntry* parent_entry, |
1630 | Name reference_name, Object child_obj, |
1631 | const char* name_format_string, |
1632 | int field_offset) { |
1633 | HeapEntry* child_entry = GetEntry(child_obj); |
1634 | if (child_entry == nullptr) return; |
1635 | HeapGraphEdge::Type type = |
1636 | reference_name->IsSymbol() || String::cast(reference_name)->length() > 0 |
1637 | ? HeapGraphEdge::kProperty |
1638 | : HeapGraphEdge::kInternal; |
1639 | const char* name = |
1640 | name_format_string != nullptr && reference_name->IsString() |
1641 | ? names_->GetFormatted( |
1642 | name_format_string, |
1643 | String::cast(reference_name) |
1644 | ->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL) |
1645 | .get()) |
1646 | : names_->GetName(reference_name); |
1647 | |
1648 | parent_entry->SetNamedReference(type, name, child_entry); |
1649 | MarkVisitedField(field_offset); |
1650 | } |
1651 | |
1652 | void V8HeapExplorer::SetRootGcRootsReference() { |
1653 | snapshot_->root()->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, |
1654 | snapshot_->gc_roots()); |
1655 | } |
1656 | |
1657 | void V8HeapExplorer::SetUserGlobalReference(Object child_obj) { |
1658 | HeapEntry* child_entry = GetEntry(child_obj); |
1659 | DCHECK_NOT_NULL(child_entry); |
1660 | snapshot_->root()->SetNamedAutoIndexReference(HeapGraphEdge::kShortcut, |
1661 | nullptr, child_entry, names_); |
1662 | } |
1663 | |
1664 | void V8HeapExplorer::SetGcRootsReference(Root root) { |
1665 | snapshot_->gc_roots()->SetIndexedAutoIndexReference( |
1666 | HeapGraphEdge::kElement, snapshot_->gc_subroot(root)); |
1667 | } |
1668 | |
1669 | void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description, |
1670 | bool is_weak, Object child_obj) { |
1671 | HeapEntry* child_entry = GetEntry(child_obj); |
1672 | if (child_entry == nullptr) return; |
1673 | const char* name = GetStrongGcSubrootName(child_obj); |
1674 | HeapGraphEdge::Type edge_type = |
1675 | is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kInternal; |
1676 | if (name != nullptr) { |
1677 | snapshot_->gc_subroot(root)->SetNamedReference(edge_type, name, |
1678 | child_entry); |
1679 | } else { |
1680 | snapshot_->gc_subroot(root)->SetNamedAutoIndexReference( |
1681 | edge_type, description, child_entry, names_); |
1682 | } |
1683 | |
1684 | // For full heap snapshots we do not emit user roots but rather rely on |
1685 | // regular GC roots to retain objects. |
1686 | if (FLAG_raw_heap_snapshots) return; |
1687 | |
1688 | // Add a shortcut to JS global object reference at snapshot root. |
1689 | // That allows the user to easily find global objects. They are |
1690 | // also used as starting points in distance calculations. |
1691 | if (is_weak || !child_obj->IsNativeContext()) return; |
1692 | |
1693 | JSGlobalObject global = Context::cast(child_obj)->global_object(); |
1694 | if (!global->IsJSGlobalObject()) return; |
1695 | |
1696 | if (!user_roots_.insert(global).second) return; |
1697 | |
1698 | SetUserGlobalReference(global); |
1699 | } |
1700 | |
1701 | const char* V8HeapExplorer::GetStrongGcSubrootName(Object object) { |
1702 | if (strong_gc_subroot_names_.empty()) { |
1703 | Isolate* isolate = Isolate::FromHeap(heap_); |
1704 | for (RootIndex root_index = RootIndex::kFirstStrongOrReadOnlyRoot; |
1705 | root_index <= RootIndex::kLastStrongOrReadOnlyRoot; ++root_index) { |
1706 | const char* name = RootsTable::name(root_index); |
1707 | strong_gc_subroot_names_.emplace(isolate->root(root_index), name); |
1708 | } |
1709 | CHECK(!strong_gc_subroot_names_.empty()); |
1710 | } |
1711 | auto it = strong_gc_subroot_names_.find(object); |
1712 | return it != strong_gc_subroot_names_.end() ? it->second : nullptr; |
1713 | } |
1714 | |
1715 | void V8HeapExplorer::TagObject(Object obj, const char* tag) { |
1716 | if (IsEssentialObject(obj)) { |
1717 | HeapEntry* entry = GetEntry(obj); |
1718 | if (entry->name()[0] == '\0') { |
1719 | entry->set_name(tag); |
1720 | } |
1721 | } |
1722 | } |
1723 | |
1724 | class GlobalObjectsEnumerator : public RootVisitor { |
1725 | public: |
1726 | void VisitRootPointers(Root root, const char* description, |
1727 | FullObjectSlot start, FullObjectSlot end) override { |
1728 | for (FullObjectSlot p = start; p < end; ++p) { |
1729 | if (!(*p)->IsNativeContext()) continue; |
1730 | JSObject proxy = Context::cast(*p)->global_proxy(); |
1731 | if (!proxy->IsJSGlobalProxy()) continue; |
1732 | Object global = proxy->map()->prototype(); |
1733 | if (!global->IsJSGlobalObject()) continue; |
1734 | objects_.push_back(Handle<JSGlobalObject>(JSGlobalObject::cast(global), |
1735 | proxy->GetIsolate())); |
1736 | } |
1737 | } |
1738 | int count() const { return static_cast<int>(objects_.size()); } |
1739 | Handle<JSGlobalObject>& at(int i) { return objects_[i]; } |
1740 | |
1741 | private: |
1742 | std::vector<Handle<JSGlobalObject>> objects_; |
1743 | }; |
1744 | |
1745 | |
1746 | // Modifies heap. Must not be run during heap traversal. |
1747 | void V8HeapExplorer::TagGlobalObjects() { |
1748 | Isolate* isolate = Isolate::FromHeap(heap_); |
1749 | HandleScope scope(isolate); |
1750 | GlobalObjectsEnumerator enumerator; |
1751 | isolate->global_handles()->IterateAllRoots(&enumerator); |
1752 | std::vector<const char*> urls(enumerator.count()); |
1753 | for (int i = 0, l = enumerator.count(); i < l; ++i) { |
1754 | urls[i] = global_object_name_resolver_ |
1755 | ? global_object_name_resolver_->GetName(Utils::ToLocal( |
1756 | Handle<JSObject>::cast(enumerator.at(i)))) |
1757 | : nullptr; |
1758 | } |
1759 | |
1760 | DisallowHeapAllocation no_allocation; |
1761 | for (int i = 0, l = enumerator.count(); i < l; ++i) { |
1762 | if (urls[i]) objects_tags_.emplace(*enumerator.at(i), urls[i]); |
1763 | } |
1764 | } |
1765 | |
1766 | class EmbedderGraphImpl : public EmbedderGraph { |
1767 | public: |
1768 | struct Edge { |
1769 | Node* from; |
1770 | Node* to; |
1771 | const char* name; |
1772 | }; |
1773 | |
1774 | class V8NodeImpl : public Node { |
1775 | public: |
1776 | explicit V8NodeImpl(Object object) : object_(object) {} |
1777 | Object GetObject() { return object_; } |
1778 | |
1779 | // Node overrides. |
1780 | bool IsEmbedderNode() override { return false; } |
1781 | const char* Name() override { |
1782 | // The name should be retrieved via GetObject(). |
1783 | UNREACHABLE(); |
1784 | return "" ; |
1785 | } |
1786 | size_t SizeInBytes() override { |
1787 | // The size should be retrieved via GetObject(). |
1788 | UNREACHABLE(); |
1789 | return 0; |
1790 | } |
1791 | |
1792 | private: |
1793 | Object object_; |
1794 | }; |
1795 | |
1796 | Node* V8Node(const v8::Local<v8::Value>& value) final { |
1797 | Handle<Object> object = v8::Utils::OpenHandle(*value); |
1798 | DCHECK(!object.is_null()); |
1799 | return AddNode(std::unique_ptr<Node>(new V8NodeImpl(*object))); |
1800 | } |
1801 | |
1802 | Node* AddNode(std::unique_ptr<Node> node) final { |
1803 | Node* result = node.get(); |
1804 | nodes_.push_back(std::move(node)); |
1805 | return result; |
1806 | } |
1807 | |
1808 | void AddEdge(Node* from, Node* to, const char* name) final { |
1809 | edges_.push_back({from, to, name}); |
1810 | } |
1811 | |
1812 | const std::vector<std::unique_ptr<Node>>& nodes() { return nodes_; } |
1813 | const std::vector<Edge>& edges() { return edges_; } |
1814 | |
1815 | private: |
1816 | std::vector<std::unique_ptr<Node>> nodes_; |
1817 | std::vector<Edge> edges_; |
1818 | }; |
1819 | |
1820 | class EmbedderGraphEntriesAllocator : public HeapEntriesAllocator { |
1821 | public: |
1822 | explicit EmbedderGraphEntriesAllocator(HeapSnapshot* snapshot) |
1823 | : snapshot_(snapshot), |
1824 | names_(snapshot_->profiler()->names()), |
1825 | heap_object_map_(snapshot_->profiler()->heap_object_map()) {} |
1826 | HeapEntry* AllocateEntry(HeapThing ptr) override; |
1827 | |
1828 | private: |
1829 | HeapSnapshot* snapshot_; |
1830 | StringsStorage* names_; |
1831 | HeapObjectsMap* heap_object_map_; |
1832 | }; |
1833 | |
1834 | namespace { |
1835 | |
1836 | const char* EmbedderGraphNodeName(StringsStorage* names, |
1837 | EmbedderGraphImpl::Node* node) { |
1838 | const char* prefix = node->NamePrefix(); |
1839 | return prefix ? names->GetFormatted("%s %s" , prefix, node->Name()) |
1840 | : names->GetCopy(node->Name()); |
1841 | } |
1842 | |
1843 | HeapEntry::Type EmbedderGraphNodeType(EmbedderGraphImpl::Node* node) { |
1844 | return node->IsRootNode() ? HeapEntry::kSynthetic : HeapEntry::kNative; |
1845 | } |
1846 | |
1847 | // Merges the names of an embedder node and its wrapper node. |
1848 | // If the wrapper node name contains a tag suffix (part after '/') then the |
1849 | // result is the embedder node name concatenated with the tag suffix. |
1850 | // Otherwise, the result is the embedder node name. |
1851 | const char* MergeNames(StringsStorage* names, const char* embedder_name, |
1852 | const char* wrapper_name) { |
1853 | const char* suffix = strchr(wrapper_name, '/'); |
1854 | return suffix ? names->GetFormatted("%s %s" , embedder_name, suffix) |
1855 | : embedder_name; |
1856 | } |
1857 | |
1858 | } // anonymous namespace |
1859 | |
1860 | HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(HeapThing ptr) { |
1861 | EmbedderGraphImpl::Node* node = |
1862 | reinterpret_cast<EmbedderGraphImpl::Node*>(ptr); |
1863 | DCHECK(node->IsEmbedderNode()); |
1864 | size_t size = node->SizeInBytes(); |
1865 | return snapshot_->AddEntry( |
1866 | EmbedderGraphNodeType(node), EmbedderGraphNodeName(names_, node), |
1867 | static_cast<SnapshotObjectId>(reinterpret_cast<uintptr_t>(node) << 1), |
1868 | static_cast<int>(size), 0); |
1869 | } |
1870 | |
1871 | NativeObjectsExplorer::NativeObjectsExplorer( |
1872 | HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress) |
1873 | : isolate_( |
1874 | Isolate::FromHeap(snapshot->profiler()->heap_object_map()->heap())), |
1875 | snapshot_(snapshot), |
1876 | names_(snapshot_->profiler()->names()), |
1877 | embedder_graph_entries_allocator_( |
1878 | new EmbedderGraphEntriesAllocator(snapshot)) {} |
1879 | |
1880 | HeapEntry* NativeObjectsExplorer::EntryForEmbedderGraphNode( |
1881 | EmbedderGraphImpl::Node* node) { |
1882 | EmbedderGraphImpl::Node* wrapper = node->WrapperNode(); |
1883 | if (wrapper) { |
1884 | node = wrapper; |
1885 | } |
1886 | if (node->IsEmbedderNode()) { |
1887 | return generator_->FindOrAddEntry(node, |
1888 | embedder_graph_entries_allocator_.get()); |
1889 | } else { |
1890 | EmbedderGraphImpl::V8NodeImpl* v8_node = |
1891 | static_cast<EmbedderGraphImpl::V8NodeImpl*>(node); |
1892 | Object object = v8_node->GetObject(); |
1893 | if (object->IsSmi()) return nullptr; |
1894 | return generator_->FindEntry( |
1895 | reinterpret_cast<void*>(Object::cast(object).ptr())); |
1896 | } |
1897 | } |
1898 | |
1899 | bool NativeObjectsExplorer::IterateAndExtractReferences( |
1900 | HeapSnapshotGenerator* generator) { |
1901 | generator_ = generator; |
1902 | |
1903 | if (FLAG_heap_profiler_use_embedder_graph && |
1904 | snapshot_->profiler()->HasBuildEmbedderGraphCallback()) { |
1905 | v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_)); |
1906 | DisallowHeapAllocation no_allocation; |
1907 | EmbedderGraphImpl graph; |
1908 | snapshot_->profiler()->BuildEmbedderGraph(isolate_, &graph); |
1909 | for (const auto& node : graph.nodes()) { |
1910 | if (node->IsRootNode()) { |
1911 | snapshot_->root()->SetIndexedAutoIndexReference( |
1912 | HeapGraphEdge::kElement, EntryForEmbedderGraphNode(node.get())); |
1913 | } |
1914 | // Adjust the name and the type of the V8 wrapper node. |
1915 | auto wrapper = node->WrapperNode(); |
1916 | if (wrapper) { |
1917 | HeapEntry* wrapper_entry = EntryForEmbedderGraphNode(wrapper); |
1918 | wrapper_entry->set_name( |
1919 | MergeNames(names_, EmbedderGraphNodeName(names_, node.get()), |
1920 | wrapper_entry->name())); |
1921 | wrapper_entry->set_type(EmbedderGraphNodeType(node.get())); |
1922 | } |
1923 | } |
1924 | // Fill edges of the graph. |
1925 | for (const auto& edge : graph.edges()) { |
1926 | HeapEntry* from = EntryForEmbedderGraphNode(edge.from); |
1927 | // |from| and |to| can be nullptr if the corresponding node is a V8 node |
1928 | // pointing to a Smi. |
1929 | if (!from) continue; |
1930 | HeapEntry* to = EntryForEmbedderGraphNode(edge.to); |
1931 | if (!to) continue; |
1932 | if (edge.name == nullptr) { |
1933 | from->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, to); |
1934 | } else { |
1935 | from->SetNamedReference(HeapGraphEdge::kInternal, |
1936 | names_->GetCopy(edge.name), to); |
1937 | } |
1938 | } |
1939 | } |
1940 | generator_ = nullptr; |
1941 | return true; |
1942 | } |
1943 | |
1944 | HeapSnapshotGenerator::HeapSnapshotGenerator( |
1945 | HeapSnapshot* snapshot, |
1946 | v8::ActivityControl* control, |
1947 | v8::HeapProfiler::ObjectNameResolver* resolver, |
1948 | Heap* heap) |
1949 | : snapshot_(snapshot), |
1950 | control_(control), |
1951 | v8_heap_explorer_(snapshot_, this, resolver), |
1952 | dom_explorer_(snapshot_, this), |
1953 | heap_(heap) { |
1954 | } |
1955 | |
1956 | namespace { |
1957 | class NullContextScope { |
1958 | public: |
1959 | explicit NullContextScope(Isolate* isolate) |
1960 | : isolate_(isolate), prev_(isolate->context()) { |
1961 | isolate_->set_context(Context()); |
1962 | } |
1963 | ~NullContextScope() { isolate_->set_context(prev_); } |
1964 | |
1965 | private: |
1966 | Isolate* isolate_; |
1967 | Context prev_; |
1968 | }; |
1969 | } // namespace |
1970 | |
1971 | bool HeapSnapshotGenerator::GenerateSnapshot() { |
1972 | v8_heap_explorer_.TagGlobalObjects(); |
1973 | |
1974 | // TODO(1562) Profiler assumes that any object that is in the heap after |
1975 | // full GC is reachable from the root when computing dominators. |
1976 | // This is not true for weakly reachable objects. |
1977 | // As a temporary solution we call GC twice. |
1978 | heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags, |
1979 | GarbageCollectionReason::kHeapProfiler); |
1980 | heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags, |
1981 | GarbageCollectionReason::kHeapProfiler); |
1982 | |
1983 | NullContextScope null_context_scope(Isolate::FromHeap(heap_)); |
1984 | |
1985 | #ifdef VERIFY_HEAP |
1986 | Heap* debug_heap = heap_; |
1987 | if (FLAG_verify_heap) { |
1988 | debug_heap->Verify(); |
1989 | } |
1990 | #endif |
1991 | |
1992 | InitProgressCounter(); |
1993 | |
1994 | #ifdef VERIFY_HEAP |
1995 | if (FLAG_verify_heap) { |
1996 | debug_heap->Verify(); |
1997 | } |
1998 | #endif |
1999 | |
2000 | snapshot_->AddSyntheticRootEntries(); |
2001 | |
2002 | if (!FillReferences()) return false; |
2003 | |
2004 | snapshot_->FillChildren(); |
2005 | snapshot_->RememberLastJSObjectId(); |
2006 | |
2007 | progress_counter_ = progress_total_; |
2008 | if (!ProgressReport(true)) return false; |
2009 | return true; |
2010 | } |
2011 | |
2012 | void HeapSnapshotGenerator::ProgressStep() { |
2013 | ++progress_counter_; |
2014 | } |
2015 | |
2016 | bool HeapSnapshotGenerator::ProgressReport(bool force) { |
2017 | const int kProgressReportGranularity = 10000; |
2018 | if (control_ != nullptr && |
2019 | (force || progress_counter_ % kProgressReportGranularity == 0)) { |
2020 | return control_->ReportProgressValue(progress_counter_, progress_total_) == |
2021 | v8::ActivityControl::kContinue; |
2022 | } |
2023 | return true; |
2024 | } |
2025 | |
2026 | void HeapSnapshotGenerator::InitProgressCounter() { |
2027 | if (control_ == nullptr) return; |
2028 | // The +1 ensures that intermediate ProgressReport calls will never signal |
2029 | // that the work is finished (i.e. progress_counter_ == progress_total_). |
2030 | // Only the forced ProgressReport() at the end of GenerateSnapshot() |
2031 | // should signal that the work is finished because signalling finished twice |
2032 | // breaks the DevTools frontend. |
2033 | progress_total_ = v8_heap_explorer_.EstimateObjectsCount() + 1; |
2034 | progress_counter_ = 0; |
2035 | } |
2036 | |
2037 | bool HeapSnapshotGenerator::FillReferences() { |
2038 | return v8_heap_explorer_.IterateAndExtractReferences(this) && |
2039 | dom_explorer_.IterateAndExtractReferences(this); |
2040 | } |
2041 | |
2042 | template<int bytes> struct MaxDecimalDigitsIn; |
2043 | template<> struct MaxDecimalDigitsIn<4> { |
2044 | static const int kSigned = 11; |
2045 | static const int kUnsigned = 10; |
2046 | }; |
2047 | template<> struct MaxDecimalDigitsIn<8> { |
2048 | static const int kSigned = 20; |
2049 | static const int kUnsigned = 20; |
2050 | }; |
2051 | |
2052 | class OutputStreamWriter { |
2053 | public: |
2054 | explicit OutputStreamWriter(v8::OutputStream* stream) |
2055 | : stream_(stream), |
2056 | chunk_size_(stream->GetChunkSize()), |
2057 | chunk_(chunk_size_), |
2058 | chunk_pos_(0), |
2059 | aborted_(false) { |
2060 | DCHECK_GT(chunk_size_, 0); |
2061 | } |
2062 | bool aborted() { return aborted_; } |
2063 | void AddCharacter(char c) { |
2064 | DCHECK_NE(c, '\0'); |
2065 | DCHECK(chunk_pos_ < chunk_size_); |
2066 | chunk_[chunk_pos_++] = c; |
2067 | MaybeWriteChunk(); |
2068 | } |
2069 | void AddString(const char* s) { |
2070 | AddSubstring(s, StrLength(s)); |
2071 | } |
2072 | void AddSubstring(const char* s, int n) { |
2073 | if (n <= 0) return; |
2074 | DCHECK(static_cast<size_t>(n) <= strlen(s)); |
2075 | const char* s_end = s + n; |
2076 | while (s < s_end) { |
2077 | int s_chunk_size = |
2078 | Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s)); |
2079 | DCHECK_GT(s_chunk_size, 0); |
2080 | MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size); |
2081 | s += s_chunk_size; |
2082 | chunk_pos_ += s_chunk_size; |
2083 | MaybeWriteChunk(); |
2084 | } |
2085 | } |
2086 | void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u" ); } |
2087 | void Finalize() { |
2088 | if (aborted_) return; |
2089 | DCHECK(chunk_pos_ < chunk_size_); |
2090 | if (chunk_pos_ != 0) { |
2091 | WriteChunk(); |
2092 | } |
2093 | stream_->EndOfStream(); |
2094 | } |
2095 | |
2096 | private: |
2097 | template<typename T> |
2098 | void AddNumberImpl(T n, const char* format) { |
2099 | // Buffer for the longest value plus trailing \0 |
2100 | static const int kMaxNumberSize = |
2101 | MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1; |
2102 | if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) { |
2103 | int result = SNPrintF( |
2104 | chunk_.SubVector(chunk_pos_, chunk_size_), format, n); |
2105 | DCHECK_NE(result, -1); |
2106 | chunk_pos_ += result; |
2107 | MaybeWriteChunk(); |
2108 | } else { |
2109 | EmbeddedVector<char, kMaxNumberSize> buffer; |
2110 | int result = SNPrintF(buffer, format, n); |
2111 | USE(result); |
2112 | DCHECK_NE(result, -1); |
2113 | AddString(buffer.start()); |
2114 | } |
2115 | } |
2116 | void MaybeWriteChunk() { |
2117 | DCHECK(chunk_pos_ <= chunk_size_); |
2118 | if (chunk_pos_ == chunk_size_) { |
2119 | WriteChunk(); |
2120 | } |
2121 | } |
2122 | void WriteChunk() { |
2123 | if (aborted_) return; |
2124 | if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) == |
2125 | v8::OutputStream::kAbort) aborted_ = true; |
2126 | chunk_pos_ = 0; |
2127 | } |
2128 | |
2129 | v8::OutputStream* stream_; |
2130 | int chunk_size_; |
2131 | ScopedVector<char> chunk_; |
2132 | int chunk_pos_; |
2133 | bool aborted_; |
2134 | }; |
2135 | |
2136 | |
2137 | // type, name|index, to_node. |
2138 | const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3; |
2139 | // type, name, id, self_size, edge_count, trace_node_id. |
2140 | const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6; |
2141 | |
2142 | void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) { |
2143 | if (AllocationTracker* allocation_tracker = |
2144 | snapshot_->profiler()->allocation_tracker()) { |
2145 | allocation_tracker->PrepareForSerialization(); |
2146 | } |
2147 | DCHECK_NULL(writer_); |
2148 | writer_ = new OutputStreamWriter(stream); |
2149 | SerializeImpl(); |
2150 | delete writer_; |
2151 | writer_ = nullptr; |
2152 | } |
2153 | |
2154 | |
2155 | void HeapSnapshotJSONSerializer::SerializeImpl() { |
2156 | DCHECK_EQ(0, snapshot_->root()->index()); |
2157 | writer_->AddCharacter('{'); |
2158 | writer_->AddString("\"snapshot\":{" ); |
2159 | SerializeSnapshot(); |
2160 | if (writer_->aborted()) return; |
2161 | writer_->AddString("},\n" ); |
2162 | writer_->AddString("\"nodes\":[" ); |
2163 | SerializeNodes(); |
2164 | if (writer_->aborted()) return; |
2165 | writer_->AddString("],\n" ); |
2166 | writer_->AddString("\"edges\":[" ); |
2167 | SerializeEdges(); |
2168 | if (writer_->aborted()) return; |
2169 | writer_->AddString("],\n" ); |
2170 | |
2171 | writer_->AddString("\"trace_function_infos\":[" ); |
2172 | SerializeTraceNodeInfos(); |
2173 | if (writer_->aborted()) return; |
2174 | writer_->AddString("],\n" ); |
2175 | writer_->AddString("\"trace_tree\":[" ); |
2176 | SerializeTraceTree(); |
2177 | if (writer_->aborted()) return; |
2178 | writer_->AddString("],\n" ); |
2179 | |
2180 | writer_->AddString("\"samples\":[" ); |
2181 | SerializeSamples(); |
2182 | if (writer_->aborted()) return; |
2183 | writer_->AddString("],\n" ); |
2184 | |
2185 | writer_->AddString("\"locations\":[" ); |
2186 | SerializeLocations(); |
2187 | if (writer_->aborted()) return; |
2188 | writer_->AddString("],\n" ); |
2189 | |
2190 | writer_->AddString("\"strings\":[" ); |
2191 | SerializeStrings(); |
2192 | if (writer_->aborted()) return; |
2193 | writer_->AddCharacter(']'); |
2194 | writer_->AddCharacter('}'); |
2195 | writer_->Finalize(); |
2196 | } |
2197 | |
2198 | |
2199 | int HeapSnapshotJSONSerializer::GetStringId(const char* s) { |
2200 | base::HashMap::Entry* cache_entry = |
2201 | strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s)); |
2202 | if (cache_entry->value == nullptr) { |
2203 | cache_entry->value = reinterpret_cast<void*>(next_string_id_++); |
2204 | } |
2205 | return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); |
2206 | } |
2207 | |
2208 | |
2209 | namespace { |
2210 | |
2211 | template<size_t size> struct ToUnsigned; |
2212 | |
2213 | template<> struct ToUnsigned<4> { |
2214 | typedef uint32_t Type; |
2215 | }; |
2216 | |
2217 | template<> struct ToUnsigned<8> { |
2218 | typedef uint64_t Type; |
2219 | }; |
2220 | |
2221 | } // namespace |
2222 | |
2223 | |
2224 | template<typename T> |
2225 | static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) { |
2226 | STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned |
2227 | int number_of_digits = 0; |
2228 | T t = value; |
2229 | do { |
2230 | ++number_of_digits; |
2231 | } while (t /= 10); |
2232 | |
2233 | buffer_pos += number_of_digits; |
2234 | int result = buffer_pos; |
2235 | do { |
2236 | int last_digit = static_cast<int>(value % 10); |
2237 | buffer[--buffer_pos] = '0' + last_digit; |
2238 | value /= 10; |
2239 | } while (value); |
2240 | return result; |
2241 | } |
2242 | |
2243 | |
2244 | template<typename T> |
2245 | static int utoa(T value, const Vector<char>& buffer, int buffer_pos) { |
2246 | typename ToUnsigned<sizeof(value)>::Type unsigned_value = value; |
2247 | STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value)); |
2248 | return utoa_impl(unsigned_value, buffer, buffer_pos); |
2249 | } |
2250 | |
2251 | |
2252 | void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge, |
2253 | bool first_edge) { |
2254 | // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0 |
2255 | static const int kBufferSize = |
2256 | MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT |
2257 | EmbeddedVector<char, kBufferSize> buffer; |
2258 | int edge_name_or_index = edge->type() == HeapGraphEdge::kElement |
2259 | || edge->type() == HeapGraphEdge::kHidden |
2260 | ? edge->index() : GetStringId(edge->name()); |
2261 | int buffer_pos = 0; |
2262 | if (!first_edge) { |
2263 | buffer[buffer_pos++] = ','; |
2264 | } |
2265 | buffer_pos = utoa(edge->type(), buffer, buffer_pos); |
2266 | buffer[buffer_pos++] = ','; |
2267 | buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos); |
2268 | buffer[buffer_pos++] = ','; |
2269 | buffer_pos = utoa(to_node_index(edge->to()), buffer, buffer_pos); |
2270 | buffer[buffer_pos++] = '\n'; |
2271 | buffer[buffer_pos++] = '\0'; |
2272 | writer_->AddString(buffer.start()); |
2273 | } |
2274 | |
2275 | void HeapSnapshotJSONSerializer::SerializeEdges() { |
2276 | std::vector<HeapGraphEdge*>& edges = snapshot_->children(); |
2277 | for (size_t i = 0; i < edges.size(); ++i) { |
2278 | DCHECK(i == 0 || |
2279 | edges[i - 1]->from()->index() <= edges[i]->from()->index()); |
2280 | SerializeEdge(edges[i], i == 0); |
2281 | if (writer_->aborted()) return; |
2282 | } |
2283 | } |
2284 | |
2285 | void HeapSnapshotJSONSerializer::SerializeNode(const HeapEntry* entry) { |
2286 | // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0 |
2287 | static const int kBufferSize = |
2288 | 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT |
2289 | + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT |
2290 | + 6 + 1 + 1; |
2291 | EmbeddedVector<char, kBufferSize> buffer; |
2292 | int buffer_pos = 0; |
2293 | if (to_node_index(entry) != 0) { |
2294 | buffer[buffer_pos++] = ','; |
2295 | } |
2296 | buffer_pos = utoa(entry->type(), buffer, buffer_pos); |
2297 | buffer[buffer_pos++] = ','; |
2298 | buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos); |
2299 | buffer[buffer_pos++] = ','; |
2300 | buffer_pos = utoa(entry->id(), buffer, buffer_pos); |
2301 | buffer[buffer_pos++] = ','; |
2302 | buffer_pos = utoa(entry->self_size(), buffer, buffer_pos); |
2303 | buffer[buffer_pos++] = ','; |
2304 | buffer_pos = utoa(entry->children_count(), buffer, buffer_pos); |
2305 | buffer[buffer_pos++] = ','; |
2306 | buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos); |
2307 | buffer[buffer_pos++] = '\n'; |
2308 | buffer[buffer_pos++] = '\0'; |
2309 | writer_->AddString(buffer.start()); |
2310 | } |
2311 | |
2312 | void HeapSnapshotJSONSerializer::SerializeNodes() { |
2313 | const std::deque<HeapEntry>& entries = snapshot_->entries(); |
2314 | for (const HeapEntry& entry : entries) { |
2315 | SerializeNode(&entry); |
2316 | if (writer_->aborted()) return; |
2317 | } |
2318 | } |
2319 | |
2320 | void HeapSnapshotJSONSerializer::SerializeSnapshot() { |
2321 | writer_->AddString("\"meta\":" ); |
2322 | // The object describing node serialization layout. |
2323 | // We use a set of macros to improve readability. |
2324 | |
2325 | // clang-format off |
2326 | #define JSON_A(s) "[" s "]" |
2327 | #define JSON_O(s) "{" s "}" |
2328 | #define JSON_S(s) "\"" s "\"" |
2329 | writer_->AddString(JSON_O( |
2330 | JSON_S("node_fields" ) ":" JSON_A( |
2331 | JSON_S("type" ) "," |
2332 | JSON_S("name" ) "," |
2333 | JSON_S("id" ) "," |
2334 | JSON_S("self_size" ) "," |
2335 | JSON_S("edge_count" ) "," |
2336 | JSON_S("trace_node_id" )) "," |
2337 | JSON_S("node_types" ) ":" JSON_A( |
2338 | JSON_A( |
2339 | JSON_S("hidden" ) "," |
2340 | JSON_S("array" ) "," |
2341 | JSON_S("string" ) "," |
2342 | JSON_S("object" ) "," |
2343 | JSON_S("code" ) "," |
2344 | JSON_S("closure" ) "," |
2345 | JSON_S("regexp" ) "," |
2346 | JSON_S("number" ) "," |
2347 | JSON_S("native" ) "," |
2348 | JSON_S("synthetic" ) "," |
2349 | JSON_S("concatenated string" ) "," |
2350 | JSON_S("sliced string" ) "," |
2351 | JSON_S("symbol" ) "," |
2352 | JSON_S("bigint" )) "," |
2353 | JSON_S("string" ) "," |
2354 | JSON_S("number" ) "," |
2355 | JSON_S("number" ) "," |
2356 | JSON_S("number" ) "," |
2357 | JSON_S("number" ) "," |
2358 | JSON_S("number" )) "," |
2359 | JSON_S("edge_fields" ) ":" JSON_A( |
2360 | JSON_S("type" ) "," |
2361 | JSON_S("name_or_index" ) "," |
2362 | JSON_S("to_node" )) "," |
2363 | JSON_S("edge_types" ) ":" JSON_A( |
2364 | JSON_A( |
2365 | JSON_S("context" ) "," |
2366 | JSON_S("element" ) "," |
2367 | JSON_S("property" ) "," |
2368 | JSON_S("internal" ) "," |
2369 | JSON_S("hidden" ) "," |
2370 | JSON_S("shortcut" ) "," |
2371 | JSON_S("weak" )) "," |
2372 | JSON_S("string_or_number" ) "," |
2373 | JSON_S("node" )) "," |
2374 | JSON_S("trace_function_info_fields" ) ":" JSON_A( |
2375 | JSON_S("function_id" ) "," |
2376 | JSON_S("name" ) "," |
2377 | JSON_S("script_name" ) "," |
2378 | JSON_S("script_id" ) "," |
2379 | JSON_S("line" ) "," |
2380 | JSON_S("column" )) "," |
2381 | JSON_S("trace_node_fields" ) ":" JSON_A( |
2382 | JSON_S("id" ) "," |
2383 | JSON_S("function_info_index" ) "," |
2384 | JSON_S("count" ) "," |
2385 | JSON_S("size" ) "," |
2386 | JSON_S("children" )) "," |
2387 | JSON_S("sample_fields" ) ":" JSON_A( |
2388 | JSON_S("timestamp_us" ) "," |
2389 | JSON_S("last_assigned_id" )) "," |
2390 | JSON_S("location_fields" ) ":" JSON_A( |
2391 | JSON_S("object_index" ) "," |
2392 | JSON_S("script_id" ) "," |
2393 | JSON_S("line" ) "," |
2394 | JSON_S("column" )))); |
2395 | // clang-format on |
2396 | #undef JSON_S |
2397 | #undef JSON_O |
2398 | #undef JSON_A |
2399 | writer_->AddString(",\"node_count\":" ); |
2400 | writer_->AddNumber(static_cast<unsigned>(snapshot_->entries().size())); |
2401 | writer_->AddString(",\"edge_count\":" ); |
2402 | writer_->AddNumber(static_cast<double>(snapshot_->edges().size())); |
2403 | writer_->AddString(",\"trace_function_count\":" ); |
2404 | uint32_t count = 0; |
2405 | AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); |
2406 | if (tracker) { |
2407 | count = static_cast<uint32_t>(tracker->function_info_list().size()); |
2408 | } |
2409 | writer_->AddNumber(count); |
2410 | } |
2411 | |
2412 | |
2413 | static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) { |
2414 | static const char hex_chars[] = "0123456789ABCDEF" ; |
2415 | w->AddString("\\u" ); |
2416 | w->AddCharacter(hex_chars[(u >> 12) & 0xF]); |
2417 | w->AddCharacter(hex_chars[(u >> 8) & 0xF]); |
2418 | w->AddCharacter(hex_chars[(u >> 4) & 0xF]); |
2419 | w->AddCharacter(hex_chars[u & 0xF]); |
2420 | } |
2421 | |
2422 | |
2423 | void HeapSnapshotJSONSerializer::SerializeTraceTree() { |
2424 | AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); |
2425 | if (!tracker) return; |
2426 | AllocationTraceTree* traces = tracker->trace_tree(); |
2427 | SerializeTraceNode(traces->root()); |
2428 | } |
2429 | |
2430 | |
2431 | void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) { |
2432 | // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0 |
2433 | const int kBufferSize = |
2434 | 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT |
2435 | + 4 + 1 + 1; |
2436 | EmbeddedVector<char, kBufferSize> buffer; |
2437 | int buffer_pos = 0; |
2438 | buffer_pos = utoa(node->id(), buffer, buffer_pos); |
2439 | buffer[buffer_pos++] = ','; |
2440 | buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos); |
2441 | buffer[buffer_pos++] = ','; |
2442 | buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos); |
2443 | buffer[buffer_pos++] = ','; |
2444 | buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos); |
2445 | buffer[buffer_pos++] = ','; |
2446 | buffer[buffer_pos++] = '['; |
2447 | buffer[buffer_pos++] = '\0'; |
2448 | writer_->AddString(buffer.start()); |
2449 | |
2450 | int i = 0; |
2451 | for (AllocationTraceNode* child : node->children()) { |
2452 | if (i++ > 0) { |
2453 | writer_->AddCharacter(','); |
2454 | } |
2455 | SerializeTraceNode(child); |
2456 | } |
2457 | writer_->AddCharacter(']'); |
2458 | } |
2459 | |
2460 | |
2461 | // 0-based position is converted to 1-based during the serialization. |
2462 | static int SerializePosition(int position, const Vector<char>& buffer, |
2463 | int buffer_pos) { |
2464 | if (position == -1) { |
2465 | buffer[buffer_pos++] = '0'; |
2466 | } else { |
2467 | DCHECK_GE(position, 0); |
2468 | buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos); |
2469 | } |
2470 | return buffer_pos; |
2471 | } |
2472 | |
2473 | |
2474 | void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() { |
2475 | AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); |
2476 | if (!tracker) return; |
2477 | // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0 |
2478 | const int kBufferSize = |
2479 | 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT |
2480 | + 6 + 1 + 1; |
2481 | EmbeddedVector<char, kBufferSize> buffer; |
2482 | int i = 0; |
2483 | for (AllocationTracker::FunctionInfo* info : tracker->function_info_list()) { |
2484 | int buffer_pos = 0; |
2485 | if (i++ > 0) { |
2486 | buffer[buffer_pos++] = ','; |
2487 | } |
2488 | buffer_pos = utoa(info->function_id, buffer, buffer_pos); |
2489 | buffer[buffer_pos++] = ','; |
2490 | buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos); |
2491 | buffer[buffer_pos++] = ','; |
2492 | buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos); |
2493 | buffer[buffer_pos++] = ','; |
2494 | // The cast is safe because script id is a non-negative Smi. |
2495 | buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer, |
2496 | buffer_pos); |
2497 | buffer[buffer_pos++] = ','; |
2498 | buffer_pos = SerializePosition(info->line, buffer, buffer_pos); |
2499 | buffer[buffer_pos++] = ','; |
2500 | buffer_pos = SerializePosition(info->column, buffer, buffer_pos); |
2501 | buffer[buffer_pos++] = '\n'; |
2502 | buffer[buffer_pos++] = '\0'; |
2503 | writer_->AddString(buffer.start()); |
2504 | } |
2505 | } |
2506 | |
2507 | |
2508 | void HeapSnapshotJSONSerializer::SerializeSamples() { |
2509 | const std::vector<HeapObjectsMap::TimeInterval>& samples = |
2510 | snapshot_->profiler()->heap_object_map()->samples(); |
2511 | if (samples.empty()) return; |
2512 | base::TimeTicks start_time = samples[0].timestamp; |
2513 | // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0 |
2514 | const int kBufferSize = MaxDecimalDigitsIn<sizeof( |
2515 | base::TimeDelta().InMicroseconds())>::kUnsigned + |
2516 | MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned + |
2517 | 2 + 1 + 1; |
2518 | EmbeddedVector<char, kBufferSize> buffer; |
2519 | int i = 0; |
2520 | for (const HeapObjectsMap::TimeInterval& sample : samples) { |
2521 | int buffer_pos = 0; |
2522 | if (i++ > 0) { |
2523 | buffer[buffer_pos++] = ','; |
2524 | } |
2525 | base::TimeDelta time_delta = sample.timestamp - start_time; |
2526 | buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos); |
2527 | buffer[buffer_pos++] = ','; |
2528 | buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos); |
2529 | buffer[buffer_pos++] = '\n'; |
2530 | buffer[buffer_pos++] = '\0'; |
2531 | writer_->AddString(buffer.start()); |
2532 | } |
2533 | } |
2534 | |
2535 | |
2536 | void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) { |
2537 | writer_->AddCharacter('\n'); |
2538 | writer_->AddCharacter('\"'); |
2539 | for ( ; *s != '\0'; ++s) { |
2540 | switch (*s) { |
2541 | case '\b': |
2542 | writer_->AddString("\\b" ); |
2543 | continue; |
2544 | case '\f': |
2545 | writer_->AddString("\\f" ); |
2546 | continue; |
2547 | case '\n': |
2548 | writer_->AddString("\\n" ); |
2549 | continue; |
2550 | case '\r': |
2551 | writer_->AddString("\\r" ); |
2552 | continue; |
2553 | case '\t': |
2554 | writer_->AddString("\\t" ); |
2555 | continue; |
2556 | case '\"': |
2557 | case '\\': |
2558 | writer_->AddCharacter('\\'); |
2559 | writer_->AddCharacter(*s); |
2560 | continue; |
2561 | default: |
2562 | if (*s > 31 && *s < 128) { |
2563 | writer_->AddCharacter(*s); |
2564 | } else if (*s <= 31) { |
2565 | // Special character with no dedicated literal. |
2566 | WriteUChar(writer_, *s); |
2567 | } else { |
2568 | // Convert UTF-8 into \u UTF-16 literal. |
2569 | size_t length = 1, cursor = 0; |
2570 | for ( ; length <= 4 && *(s + length) != '\0'; ++length) { } |
2571 | unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor); |
2572 | if (c != unibrow::Utf8::kBadChar) { |
2573 | WriteUChar(writer_, c); |
2574 | DCHECK_NE(cursor, 0); |
2575 | s += cursor - 1; |
2576 | } else { |
2577 | writer_->AddCharacter('?'); |
2578 | } |
2579 | } |
2580 | } |
2581 | } |
2582 | writer_->AddCharacter('\"'); |
2583 | } |
2584 | |
2585 | |
2586 | void HeapSnapshotJSONSerializer::SerializeStrings() { |
2587 | ScopedVector<const unsigned char*> sorted_strings( |
2588 | strings_.occupancy() + 1); |
2589 | for (base::HashMap::Entry* entry = strings_.Start(); entry != nullptr; |
2590 | entry = strings_.Next(entry)) { |
2591 | int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value)); |
2592 | sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key); |
2593 | } |
2594 | writer_->AddString("\"<dummy>\"" ); |
2595 | for (int i = 1; i < sorted_strings.length(); ++i) { |
2596 | writer_->AddCharacter(','); |
2597 | SerializeString(sorted_strings[i]); |
2598 | if (writer_->aborted()) return; |
2599 | } |
2600 | } |
2601 | |
2602 | void HeapSnapshotJSONSerializer::SerializeLocation( |
2603 | const SourceLocation& location) { |
2604 | // The buffer needs space for 4 unsigned ints, 3 commas, \n and \0 |
2605 | static const int kBufferSize = |
2606 | MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 4 + 3 + 2; |
2607 | EmbeddedVector<char, kBufferSize> buffer; |
2608 | int buffer_pos = 0; |
2609 | buffer_pos = utoa(to_node_index(location.entry_index), buffer, buffer_pos); |
2610 | buffer[buffer_pos++] = ','; |
2611 | buffer_pos = utoa(location.scriptId, buffer, buffer_pos); |
2612 | buffer[buffer_pos++] = ','; |
2613 | buffer_pos = utoa(location.line, buffer, buffer_pos); |
2614 | buffer[buffer_pos++] = ','; |
2615 | buffer_pos = utoa(location.col, buffer, buffer_pos); |
2616 | buffer[buffer_pos++] = '\n'; |
2617 | buffer[buffer_pos++] = '\0'; |
2618 | writer_->AddString(buffer.start()); |
2619 | } |
2620 | |
2621 | void HeapSnapshotJSONSerializer::SerializeLocations() { |
2622 | const std::vector<SourceLocation>& locations = snapshot_->locations(); |
2623 | for (size_t i = 0; i < locations.size(); i++) { |
2624 | if (i > 0) writer_->AddCharacter(','); |
2625 | SerializeLocation(locations[i]); |
2626 | if (writer_->aborted()) return; |
2627 | } |
2628 | } |
2629 | |
2630 | } // namespace internal |
2631 | } // namespace v8 |
2632 | |