1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/debug/debug-coverage.h"
6
7#include "src/ast/ast.h"
8#include "src/base/hashmap.h"
9#include "src/debug/debug.h"
10#include "src/deoptimizer.h"
11#include "src/frames-inl.h"
12#include "src/isolate.h"
13#include "src/objects.h"
14#include "src/objects/debug-objects-inl.h"
15
16namespace v8 {
17namespace internal {
18
19class SharedToCounterMap
20 : public base::TemplateHashMapImpl<SharedFunctionInfo, uint32_t,
21 base::KeyEqualityMatcher<Object>,
22 base::DefaultAllocationPolicy> {
23 public:
24 using Entry = base::TemplateHashMapEntry<SharedFunctionInfo, uint32_t>;
25 inline void Add(SharedFunctionInfo key, uint32_t count) {
26 Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; });
27 uint32_t old_count = entry->value;
28 if (UINT32_MAX - count < old_count) {
29 entry->value = UINT32_MAX;
30 } else {
31 entry->value = old_count + count;
32 }
33 }
34
35 inline uint32_t Get(SharedFunctionInfo key) {
36 Entry* entry = Lookup(key, Hash(key));
37 if (entry == nullptr) return 0;
38 return entry->value;
39 }
40
41 private:
42 static uint32_t Hash(SharedFunctionInfo key) {
43 return static_cast<uint32_t>(key.ptr());
44 }
45
46 DisallowHeapAllocation no_gc;
47};
48
49namespace {
50int StartPosition(SharedFunctionInfo info) {
51 int start = info->function_token_position();
52 if (start == kNoSourcePosition) start = info->StartPosition();
53 return start;
54}
55
56bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
57 int a_start = StartPosition(a);
58 int b_start = StartPosition(b);
59 if (a_start == b_start) return a->EndPosition() > b->EndPosition();
60 return a_start < b_start;
61}
62
63bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
64 DCHECK_NE(kNoSourcePosition, a.start);
65 DCHECK_NE(kNoSourcePosition, b.start);
66 if (a.start == b.start) return a.end > b.end;
67 return a.start < b.start;
68}
69
70void SortBlockData(std::vector<CoverageBlock>& v) {
71 // Sort according to the block nesting structure.
72 std::sort(v.begin(), v.end(), CompareCoverageBlock);
73}
74
75std::vector<CoverageBlock> GetSortedBlockData(SharedFunctionInfo shared) {
76 DCHECK(shared->HasCoverageInfo());
77
78 CoverageInfo coverage_info =
79 CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
80
81 std::vector<CoverageBlock> result;
82 if (coverage_info->SlotCount() == 0) return result;
83
84 for (int i = 0; i < coverage_info->SlotCount(); i++) {
85 const int start_pos = coverage_info->StartSourcePosition(i);
86 const int until_pos = coverage_info->EndSourcePosition(i);
87 const int count = coverage_info->BlockCount(i);
88
89 DCHECK_NE(kNoSourcePosition, start_pos);
90 result.emplace_back(start_pos, until_pos, count);
91 }
92
93 SortBlockData(result);
94
95 return result;
96}
97
98// A utility class to simplify logic for performing passes over block coverage
99// ranges. Provides access to the implicit tree structure of ranges (i.e. access
100// to parent and sibling blocks), and supports efficient in-place editing and
101// deletion. The underlying backing store is the array of CoverageBlocks stored
102// on the CoverageFunction.
103class CoverageBlockIterator final {
104 public:
105 explicit CoverageBlockIterator(CoverageFunction* function)
106 : function_(function),
107 ended_(false),
108 delete_current_(false),
109 read_index_(-1),
110 write_index_(-1) {
111 DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
112 CompareCoverageBlock));
113 }
114
115 ~CoverageBlockIterator() {
116 Finalize();
117 DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
118 CompareCoverageBlock));
119 }
120
121 bool HasNext() const {
122 return read_index_ + 1 < static_cast<int>(function_->blocks.size());
123 }
124
125 bool Next() {
126 if (!HasNext()) {
127 if (!ended_) MaybeWriteCurrent();
128 ended_ = true;
129 return false;
130 }
131
132 // If a block has been deleted, subsequent iteration moves trailing blocks
133 // to their updated position within the array.
134 MaybeWriteCurrent();
135
136 if (read_index_ == -1) {
137 // Initialize the nesting stack with the function range.
138 nesting_stack_.emplace_back(function_->start, function_->end,
139 function_->count);
140 } else if (!delete_current_) {
141 nesting_stack_.emplace_back(GetBlock());
142 }
143
144 delete_current_ = false;
145 read_index_++;
146
147 DCHECK(IsActive());
148
149 CoverageBlock& block = GetBlock();
150 while (nesting_stack_.size() > 1 &&
151 nesting_stack_.back().end <= block.start) {
152 nesting_stack_.pop_back();
153 }
154
155 DCHECK_IMPLIES(block.start >= function_->end,
156 block.end == kNoSourcePosition);
157 DCHECK_NE(block.start, kNoSourcePosition);
158 DCHECK_LE(block.end, GetParent().end);
159
160 return true;
161 }
162
163 CoverageBlock& GetBlock() {
164 DCHECK(IsActive());
165 return function_->blocks[read_index_];
166 }
167
168 CoverageBlock& GetNextBlock() {
169 DCHECK(IsActive());
170 DCHECK(HasNext());
171 return function_->blocks[read_index_ + 1];
172 }
173
174 CoverageBlock& GetPreviousBlock() {
175 DCHECK(IsActive());
176 DCHECK_GT(read_index_, 0);
177 return function_->blocks[read_index_ - 1];
178 }
179
180 CoverageBlock& GetParent() {
181 DCHECK(IsActive());
182 return nesting_stack_.back();
183 }
184
185 bool HasSiblingOrChild() {
186 DCHECK(IsActive());
187 return HasNext() && GetNextBlock().start < GetParent().end;
188 }
189
190 CoverageBlock& GetSiblingOrChild() {
191 DCHECK(HasSiblingOrChild());
192 DCHECK(IsActive());
193 return GetNextBlock();
194 }
195
196 // A range is considered to be at top level if its parent range is the
197 // function range.
198 bool IsTopLevel() const { return nesting_stack_.size() == 1; }
199
200 void DeleteBlock() {
201 DCHECK(!delete_current_);
202 DCHECK(IsActive());
203 delete_current_ = true;
204 }
205
206 private:
207 void MaybeWriteCurrent() {
208 if (delete_current_) return;
209 if (read_index_ >= 0 && write_index_ != read_index_) {
210 function_->blocks[write_index_] = function_->blocks[read_index_];
211 }
212 write_index_++;
213 }
214
215 void Finalize() {
216 while (Next()) {
217 // Just iterate to the end.
218 }
219 function_->blocks.resize(write_index_);
220 }
221
222 bool IsActive() const { return read_index_ >= 0 && !ended_; }
223
224 CoverageFunction* function_;
225 std::vector<CoverageBlock> nesting_stack_;
226 bool ended_;
227 bool delete_current_;
228 int read_index_;
229 int write_index_;
230};
231
232bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) {
233 return lhs.start == rhs.start && lhs.end == rhs.end;
234}
235
236void MergeDuplicateRanges(CoverageFunction* function) {
237 CoverageBlockIterator iter(function);
238
239 while (iter.Next() && iter.HasNext()) {
240 CoverageBlock& block = iter.GetBlock();
241 CoverageBlock& next_block = iter.GetNextBlock();
242
243 if (!HaveSameSourceRange(block, next_block)) continue;
244
245 DCHECK_NE(kNoSourcePosition, block.end); // Non-singleton range.
246 next_block.count = std::max(block.count, next_block.count);
247 iter.DeleteBlock();
248 }
249}
250
251// Rewrite position singletons (produced by unconditional control flow
252// like return statements, and by continuation counters) into source
253// ranges that end at the next sibling range or the end of the parent
254// range, whichever comes first.
255void RewritePositionSingletonsToRanges(CoverageFunction* function) {
256 CoverageBlockIterator iter(function);
257
258 while (iter.Next()) {
259 CoverageBlock& block = iter.GetBlock();
260 CoverageBlock& parent = iter.GetParent();
261
262 if (block.start >= function->end) {
263 DCHECK_EQ(block.end, kNoSourcePosition);
264 iter.DeleteBlock();
265 } else if (block.end == kNoSourcePosition) {
266 // The current block ends at the next sibling block (if it exists) or the
267 // end of the parent block otherwise.
268 if (iter.HasSiblingOrChild()) {
269 block.end = iter.GetSiblingOrChild().start;
270 } else if (iter.IsTopLevel()) {
271 // See https://crbug.com/v8/6661. Functions are special-cased because
272 // we never want the closing brace to be uncovered. This is mainly to
273 // avoid a noisy UI.
274 block.end = parent.end - 1;
275 } else {
276 block.end = parent.end;
277 }
278 }
279 }
280}
281
282void MergeConsecutiveRanges(CoverageFunction* function) {
283 CoverageBlockIterator iter(function);
284
285 while (iter.Next()) {
286 CoverageBlock& block = iter.GetBlock();
287
288 if (iter.HasSiblingOrChild()) {
289 CoverageBlock& sibling = iter.GetSiblingOrChild();
290 if (sibling.start == block.end && sibling.count == block.count) {
291 // Best-effort: this pass may miss mergeable siblings in the presence of
292 // child blocks.
293 sibling.start = block.start;
294 iter.DeleteBlock();
295 }
296 }
297 }
298}
299
300void MergeNestedRanges(CoverageFunction* function) {
301 CoverageBlockIterator iter(function);
302
303 while (iter.Next()) {
304 CoverageBlock& block = iter.GetBlock();
305 CoverageBlock& parent = iter.GetParent();
306
307 if (parent.count == block.count) {
308 // Transformation may not be valid if sibling blocks exist with a
309 // differing count.
310 iter.DeleteBlock();
311 }
312 }
313}
314
315void FilterAliasedSingletons(CoverageFunction* function) {
316 CoverageBlockIterator iter(function);
317
318 iter.Next(); // Advance once since we reference the previous block later.
319
320 while (iter.Next()) {
321 CoverageBlock& previous_block = iter.GetPreviousBlock();
322 CoverageBlock& block = iter.GetBlock();
323
324 bool is_singleton = block.end == kNoSourcePosition;
325 bool aliases_start = block.start == previous_block.start;
326
327 if (is_singleton && aliases_start) {
328 // The previous block must have a full range since duplicate singletons
329 // have already been merged.
330 DCHECK_NE(previous_block.end, kNoSourcePosition);
331 // Likewise, the next block must have another start position since
332 // singletons are sorted to the end.
333 DCHECK_IMPLIES(iter.HasNext(), iter.GetNextBlock().start != block.start);
334 iter.DeleteBlock();
335 }
336 }
337}
338
339void FilterUncoveredRanges(CoverageFunction* function) {
340 CoverageBlockIterator iter(function);
341
342 while (iter.Next()) {
343 CoverageBlock& block = iter.GetBlock();
344 CoverageBlock& parent = iter.GetParent();
345 if (block.count == 0 && parent.count == 0) iter.DeleteBlock();
346 }
347}
348
349void FilterEmptyRanges(CoverageFunction* function) {
350 CoverageBlockIterator iter(function);
351
352 while (iter.Next()) {
353 CoverageBlock& block = iter.GetBlock();
354 if (block.start == block.end) iter.DeleteBlock();
355 }
356}
357
358void ClampToBinary(CoverageFunction* function) {
359 CoverageBlockIterator iter(function);
360
361 while (iter.Next()) {
362 CoverageBlock& block = iter.GetBlock();
363 if (block.count > 0) block.count = 1;
364 }
365}
366
367void ResetAllBlockCounts(SharedFunctionInfo shared) {
368 DCHECK(shared->HasCoverageInfo());
369
370 CoverageInfo coverage_info =
371 CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
372
373 for (int i = 0; i < coverage_info->SlotCount(); i++) {
374 coverage_info->ResetBlockCount(i);
375 }
376}
377
378bool IsBlockMode(debug::CoverageMode mode) {
379 switch (mode) {
380 case debug::CoverageMode::kBlockBinary:
381 case debug::CoverageMode::kBlockCount:
382 return true;
383 default:
384 return false;
385 }
386}
387
388bool IsBinaryMode(debug::CoverageMode mode) {
389 switch (mode) {
390 case debug::CoverageMode::kBlockBinary:
391 case debug::CoverageMode::kPreciseBinary:
392 return true;
393 default:
394 return false;
395 }
396}
397
398void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
399 debug::CoverageMode mode) {
400 DCHECK(IsBlockMode(mode));
401
402 function->has_block_coverage = true;
403 function->blocks = GetSortedBlockData(info);
404
405 // If in binary mode, only report counts of 0/1.
406 if (mode == debug::CoverageMode::kBlockBinary) ClampToBinary(function);
407
408 // Remove singleton ranges with the same start position as a full range and
409 // throw away their counts.
410 // Singleton ranges are only intended to split existing full ranges and should
411 // never expand into a full range. Consider 'if (cond) { ... } else { ... }'
412 // as a problematic example; if the then-block produces a continuation
413 // singleton, it would incorrectly expand into the else range.
414 // For more context, see https://crbug.com/v8/8237.
415 FilterAliasedSingletons(function);
416
417 // Rewrite all singletons (created e.g. by continuations and unconditional
418 // control flow) to ranges.
419 RewritePositionSingletonsToRanges(function);
420
421 // Merge nested and consecutive ranges with identical counts.
422 // Note that it's necessary to merge duplicate ranges prior to merging nested
423 // changes in order to avoid invalid transformations. See crbug.com/827530.
424 MergeConsecutiveRanges(function);
425
426 SortBlockData(function->blocks);
427 MergeDuplicateRanges(function);
428 MergeNestedRanges(function);
429
430 MergeConsecutiveRanges(function);
431
432 // Filter out ranges with count == 0 unless the immediate parent range has
433 // a count != 0.
434 FilterUncoveredRanges(function);
435
436 // Filter out ranges of zero length.
437 FilterEmptyRanges(function);
438
439 // Reset all counters on the DebugInfo to zero.
440 ResetAllBlockCounts(info);
441}
442} // anonymous namespace
443
444std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
445 DCHECK(!isolate->is_best_effort_code_coverage());
446 std::unique_ptr<Coverage> result =
447 Collect(isolate, isolate->code_coverage_mode());
448 if (!isolate->is_collecting_type_profile() &&
449 (isolate->is_precise_binary_code_coverage() ||
450 isolate->is_block_binary_code_coverage())) {
451 // We do not have to hold onto feedback vectors for invocations we already
452 // reported. So we can reset the list.
453 isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
454 }
455 return result;
456}
457
458std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
459 return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
460}
461
462std::unique_ptr<Coverage> Coverage::Collect(
463 Isolate* isolate, v8::debug::CoverageMode collectionMode) {
464 SharedToCounterMap counter_map;
465
466 const bool reset_count =
467 collectionMode != v8::debug::CoverageMode::kBestEffort;
468
469 switch (isolate->code_coverage_mode()) {
470 case v8::debug::CoverageMode::kBlockBinary:
471 case v8::debug::CoverageMode::kBlockCount:
472 case v8::debug::CoverageMode::kPreciseBinary:
473 case v8::debug::CoverageMode::kPreciseCount: {
474 // Feedback vectors are already listed to prevent losing them to GC.
475 DCHECK(isolate->factory()
476 ->feedback_vectors_for_profiling_tools()
477 ->IsArrayList());
478 Handle<ArrayList> list = Handle<ArrayList>::cast(
479 isolate->factory()->feedback_vectors_for_profiling_tools());
480 for (int i = 0; i < list->Length(); i++) {
481 FeedbackVector vector = FeedbackVector::cast(list->Get(i));
482 SharedFunctionInfo shared = vector->shared_function_info();
483 DCHECK(shared->IsSubjectToDebugging());
484 uint32_t count = static_cast<uint32_t>(vector->invocation_count());
485 if (reset_count) vector->clear_invocation_count();
486 counter_map.Add(shared, count);
487 }
488 break;
489 }
490 case v8::debug::CoverageMode::kBestEffort: {
491 DCHECK(!isolate->factory()
492 ->feedback_vectors_for_profiling_tools()
493 ->IsArrayList());
494 DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode);
495 HeapIterator heap_iterator(isolate->heap());
496 for (HeapObject current_obj = heap_iterator.next();
497 !current_obj.is_null(); current_obj = heap_iterator.next()) {
498 if (!current_obj->IsFeedbackVector()) continue;
499 FeedbackVector vector = FeedbackVector::cast(current_obj);
500 SharedFunctionInfo shared = vector->shared_function_info();
501 if (!shared->IsSubjectToDebugging()) continue;
502 uint32_t count = static_cast<uint32_t>(vector->invocation_count());
503 counter_map.Add(shared, count);
504 }
505 break;
506 }
507 }
508
509 // Iterate shared function infos of every script and build a mapping
510 // between source ranges and invocation counts.
511 std::unique_ptr<Coverage> result(new Coverage());
512 Script::Iterator scripts(isolate);
513 for (Script script = scripts.Next(); !script.is_null();
514 script = scripts.Next()) {
515 if (!script->IsUserJavaScript()) continue;
516
517 // Create and add new script data.
518 Handle<Script> script_handle(script, isolate);
519 result->emplace_back(script_handle);
520 std::vector<CoverageFunction>* functions = &result->back().functions;
521
522 std::vector<SharedFunctionInfo> sorted;
523
524 {
525 // Sort functions by start position, from outer to inner functions.
526 SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
527 for (SharedFunctionInfo info = infos.Next(); !info.is_null();
528 info = infos.Next()) {
529 sorted.push_back(info);
530 }
531 std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
532 }
533
534 // Stack to track nested functions, referring function by index.
535 std::vector<size_t> nesting;
536
537 // Use sorted list to reconstruct function nesting.
538 for (SharedFunctionInfo info : sorted) {
539 int start = StartPosition(info);
540 int end = info->EndPosition();
541 uint32_t count = counter_map.Get(info);
542 // Find the correct outer function based on start position.
543 while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
544 nesting.pop_back();
545 }
546 if (count != 0) {
547 switch (collectionMode) {
548 case v8::debug::CoverageMode::kBlockCount:
549 case v8::debug::CoverageMode::kPreciseCount:
550 break;
551 case v8::debug::CoverageMode::kBlockBinary:
552 case v8::debug::CoverageMode::kPreciseBinary:
553 count = info->has_reported_binary_coverage() ? 0 : 1;
554 info->set_has_reported_binary_coverage(true);
555 break;
556 case v8::debug::CoverageMode::kBestEffort:
557 count = 1;
558 break;
559 }
560 }
561
562 Handle<String> name(info->DebugName(), isolate);
563 CoverageFunction function(start, end, count, name);
564
565 if (IsBlockMode(collectionMode) && info->HasCoverageInfo()) {
566 CollectBlockCoverage(&function, info, collectionMode);
567 }
568
569 // Only include a function range if itself or its parent function is
570 // covered, or if it contains non-trivial block coverage.
571 bool is_covered = (count != 0);
572 bool parent_is_covered =
573 (!nesting.empty() && functions->at(nesting.back()).count != 0);
574 bool has_block_coverage = !function.blocks.empty();
575 if (is_covered || parent_is_covered || has_block_coverage) {
576 nesting.push_back(functions->size());
577 functions->emplace_back(function);
578 }
579 }
580
581 // Remove entries for scripts that have no coverage.
582 if (functions->empty()) result->pop_back();
583 }
584 return result;
585}
586
587void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
588 switch (mode) {
589 case debug::CoverageMode::kBestEffort:
590 // Note that DevTools switches back to best-effort coverage once the
591 // recording is stopped. Since we delete coverage infos at that point, any
592 // following coverage recording (without reloads) will be at function
593 // granularity.
594 isolate->debug()->RemoveAllCoverageInfos();
595 if (!isolate->is_collecting_type_profile()) {
596 isolate->SetFeedbackVectorsForProfilingTools(
597 ReadOnlyRoots(isolate).undefined_value());
598 }
599 break;
600 case debug::CoverageMode::kBlockBinary:
601 case debug::CoverageMode::kBlockCount:
602 case debug::CoverageMode::kPreciseBinary:
603 case debug::CoverageMode::kPreciseCount: {
604 HandleScope scope(isolate);
605
606 // Remove all optimized function. Optimized and inlined functions do not
607 // increment invocation count.
608 Deoptimizer::DeoptimizeAll(isolate);
609
610 // Root all feedback vectors to avoid early collection.
611 isolate->MaybeInitializeVectorListFromHeap();
612
613 HeapIterator heap_iterator(isolate->heap());
614 for (HeapObject o = heap_iterator.next(); !o.is_null();
615 o = heap_iterator.next()) {
616 if (IsBinaryMode(mode) && o->IsSharedFunctionInfo()) {
617 // If collecting binary coverage, reset
618 // SFI::has_reported_binary_coverage to avoid optimizing / inlining
619 // functions before they have reported coverage.
620 SharedFunctionInfo shared = SharedFunctionInfo::cast(o);
621 shared->set_has_reported_binary_coverage(false);
622 } else if (o->IsFeedbackVector()) {
623 // In any case, clear any collected invocation counts.
624 FeedbackVector::cast(o)->clear_invocation_count();
625 }
626 }
627
628 break;
629 }
630 }
631 isolate->set_code_coverage_mode(mode);
632}
633
634} // namespace internal
635} // namespace v8
636