summaryrefslogtreecommitdiff
path: root/cli/tools/coverage
diff options
context:
space:
mode:
Diffstat (limited to 'cli/tools/coverage')
-rw-r--r--cli/tools/coverage/json_types.rs58
-rw-r--r--cli/tools/coverage/merge.rs840
-rw-r--r--cli/tools/coverage/mod.rs652
-rw-r--r--cli/tools/coverage/range_tree.rs207
4 files changed, 1757 insertions, 0 deletions
diff --git a/cli/tools/coverage/json_types.rs b/cli/tools/coverage/json_types.rs
new file mode 100644
index 000000000..1e46cc7fd
--- /dev/null
+++ b/cli/tools/coverage/json_types.rs
@@ -0,0 +1,58 @@
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+use serde::Deserialize;
+use serde::Serialize;
+
+#[derive(Debug, Eq, PartialEq, Serialize, Deserialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct CoverageRange {
+ /// Start byte index.
+ pub start_offset: usize,
+ /// End byte index.
+ pub end_offset: usize,
+ pub count: i64,
+}
+
+#[derive(Debug, Eq, PartialEq, Serialize, Deserialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct FunctionCoverage {
+ pub function_name: String,
+ pub ranges: Vec<CoverageRange>,
+ pub is_block_coverage: bool,
+}
+
+#[derive(Debug, Eq, PartialEq, Serialize, Deserialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct ScriptCoverage {
+ pub script_id: String,
+ pub url: String,
+ pub functions: Vec<FunctionCoverage>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct StartPreciseCoverageParameters {
+ pub call_count: bool,
+ pub detailed: bool,
+ pub allow_triggered_updates: bool,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct StartPreciseCoverageReturnObject {
+ pub timestamp: f64,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TakePreciseCoverageReturnObject {
+ pub result: Vec<ScriptCoverage>,
+ pub timestamp: f64,
+}
+
+// TODO(bartlomieju): remove me
+#[derive(Eq, PartialEq, Clone, Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ProcessCoverage {
+ pub result: Vec<ScriptCoverage>,
+}
diff --git a/cli/tools/coverage/merge.rs b/cli/tools/coverage/merge.rs
new file mode 100644
index 000000000..70e60edc2
--- /dev/null
+++ b/cli/tools/coverage/merge.rs
@@ -0,0 +1,840 @@
+// Forked from https://github.com/demurgos/v8-coverage/tree/d0ca18da8740198681e0bc68971b0a6cdb11db3e/rust
+// Copyright 2021 Charles Samborski. All rights reserved. MIT license.
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+use super::json_types::CoverageRange;
+use super::json_types::FunctionCoverage;
+use super::json_types::ProcessCoverage;
+use super::json_types::ScriptCoverage;
+use super::range_tree::RangeTree;
+use super::range_tree::RangeTreeArena;
+use std::collections::BTreeMap;
+use std::collections::BTreeSet;
+use std::collections::HashMap;
+use std::iter::Peekable;
+
+pub fn merge_processes(
+ mut processes: Vec<ProcessCoverage>,
+) -> Option<ProcessCoverage> {
+ if processes.len() <= 1 {
+ return processes.pop();
+ }
+ let mut url_to_scripts: BTreeMap<String, Vec<ScriptCoverage>> =
+ BTreeMap::new();
+ for process_cov in processes {
+ for script_cov in process_cov.result {
+ url_to_scripts
+ .entry(script_cov.url.clone())
+ .or_insert_with(Vec::new)
+ .push(script_cov);
+ }
+ }
+
+ let result: Vec<ScriptCoverage> = url_to_scripts
+ .into_iter()
+ .enumerate()
+ .map(|(script_id, (_, scripts))| (script_id, scripts))
+ .map(|(script_id, scripts)| {
+ let mut merged: ScriptCoverage = merge_scripts(scripts.to_vec()).unwrap();
+ merged.script_id = script_id.to_string();
+ merged
+ })
+ .collect();
+
+ Some(ProcessCoverage { result })
+}
+
+pub fn merge_scripts(
+ mut scripts: Vec<ScriptCoverage>,
+) -> Option<ScriptCoverage> {
+ if scripts.len() <= 1 {
+ return scripts.pop();
+ }
+ let (script_id, url) = {
+ let first: &ScriptCoverage = &scripts[0];
+ (first.script_id.clone(), first.url.clone())
+ };
+ let mut range_to_funcs: BTreeMap<Range, Vec<FunctionCoverage>> =
+ BTreeMap::new();
+ for script_cov in scripts {
+ for func_cov in script_cov.functions {
+ let root_range = {
+ let root_range_cov: &CoverageRange = &func_cov.ranges[0];
+ Range {
+ start: root_range_cov.start_offset,
+ end: root_range_cov.end_offset,
+ }
+ };
+ range_to_funcs
+ .entry(root_range)
+ .or_insert_with(Vec::new)
+ .push(func_cov);
+ }
+ }
+
+ let functions: Vec<FunctionCoverage> = range_to_funcs
+ .into_iter()
+ .map(|(_, funcs)| merge_functions(funcs).unwrap())
+ .collect();
+
+ Some(ScriptCoverage {
+ script_id,
+ url,
+ functions,
+ })
+}
+
+#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
+struct Range {
+ start: usize,
+ end: usize,
+}
+
+impl Ord for Range {
+ fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
+ if self.start != other.start {
+ self.start.cmp(&other.start)
+ } else {
+ other.end.cmp(&self.end)
+ }
+ }
+}
+
+impl PartialOrd for Range {
+ fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> {
+ if self.start != other.start {
+ self.start.partial_cmp(&other.start)
+ } else {
+ other.end.partial_cmp(&self.end)
+ }
+ }
+}
+
+pub fn merge_functions(
+ mut funcs: Vec<FunctionCoverage>,
+) -> Option<FunctionCoverage> {
+ if funcs.len() <= 1 {
+ return funcs.pop();
+ }
+ let function_name = funcs[0].function_name.clone();
+ let rta_capacity: usize =
+ funcs.iter().fold(0, |acc, func| acc + func.ranges.len());
+ let rta = RangeTreeArena::with_capacity(rta_capacity);
+ let mut trees: Vec<&mut RangeTree> = Vec::new();
+ for func in funcs {
+ if let Some(tree) = RangeTree::from_sorted_ranges(&rta, &func.ranges) {
+ trees.push(tree);
+ }
+ }
+ let merged =
+ RangeTree::normalize(&rta, merge_range_trees(&rta, trees).unwrap());
+ let ranges = merged.to_ranges();
+ let is_block_coverage: bool = !(ranges.len() == 1 && ranges[0].count == 0);
+
+ Some(FunctionCoverage {
+ function_name,
+ ranges,
+ is_block_coverage,
+ })
+}
+
+fn merge_range_trees<'a>(
+ rta: &'a RangeTreeArena<'a>,
+ mut trees: Vec<&'a mut RangeTree<'a>>,
+) -> Option<&'a mut RangeTree<'a>> {
+ if trees.len() <= 1 {
+ return trees.pop();
+ }
+ let (start, end) = {
+ let first = &trees[0];
+ (first.start, first.end)
+ };
+ let delta: i64 = trees.iter().fold(0, |acc, tree| acc + tree.delta);
+ let children = merge_range_tree_children(rta, trees);
+
+ Some(rta.alloc(RangeTree::new(start, end, delta, children)))
+}
+
+struct StartEvent<'a> {
+ offset: usize,
+ trees: Vec<(usize, &'a mut RangeTree<'a>)>,
+}
+
+fn into_start_events<'a>(trees: Vec<&'a mut RangeTree<'a>>) -> Vec<StartEvent> {
+ let mut result: BTreeMap<usize, Vec<(usize, &'a mut RangeTree<'a>)>> =
+ BTreeMap::new();
+ for (parent_index, tree) in trees.into_iter().enumerate() {
+ for child in tree.children.drain(..) {
+ result
+ .entry(child.start)
+ .or_insert_with(Vec::new)
+ .push((parent_index, child));
+ }
+ }
+ result
+ .into_iter()
+ .map(|(offset, trees)| StartEvent { offset, trees })
+ .collect()
+}
+
+struct StartEventQueue<'a> {
+ pending: Option<StartEvent<'a>>,
+ queue: Peekable<::std::vec::IntoIter<StartEvent<'a>>>,
+}
+
+impl<'a> StartEventQueue<'a> {
+ pub fn new(queue: Vec<StartEvent<'a>>) -> StartEventQueue<'a> {
+ StartEventQueue {
+ pending: None,
+ queue: queue.into_iter().peekable(),
+ }
+ }
+
+ pub(crate) fn set_pending_offset(&mut self, offset: usize) {
+ self.pending = Some(StartEvent {
+ offset,
+ trees: Vec::new(),
+ });
+ }
+
+ pub(crate) fn push_pending_tree(
+ &mut self,
+ tree: (usize, &'a mut RangeTree<'a>),
+ ) {
+ self.pending = self.pending.take().map(|mut start_event| {
+ start_event.trees.push(tree);
+ start_event
+ });
+ }
+}
+
+impl<'a> Iterator for StartEventQueue<'a> {
+ type Item = StartEvent<'a>;
+
+ fn next(&mut self) -> Option<<Self as Iterator>::Item> {
+ let pending_offset: Option<usize> = match &self.pending {
+ Some(ref start_event) if !start_event.trees.is_empty() => {
+ Some(start_event.offset)
+ }
+ _ => None,
+ };
+
+ match pending_offset {
+ Some(pending_offset) => {
+ let queue_offset =
+ self.queue.peek().map(|start_event| start_event.offset);
+ match queue_offset {
+ None => self.pending.take(),
+ Some(queue_offset) => {
+ if pending_offset < queue_offset {
+ self.pending.take()
+ } else {
+ let mut result = self.queue.next().unwrap();
+ if pending_offset == queue_offset {
+ let pending_trees = self.pending.take().unwrap().trees;
+ result.trees.extend(pending_trees.into_iter())
+ }
+ Some(result)
+ }
+ }
+ }
+ }
+ None => self.queue.next(),
+ }
+ }
+}
+
+fn merge_range_tree_children<'a>(
+ rta: &'a RangeTreeArena<'a>,
+ parent_trees: Vec<&'a mut RangeTree<'a>>,
+) -> Vec<&'a mut RangeTree<'a>> {
+ let mut flat_children: Vec<Vec<&'a mut RangeTree<'a>>> =
+ Vec::with_capacity(parent_trees.len());
+ let mut wrapped_children: Vec<Vec<&'a mut RangeTree<'a>>> =
+ Vec::with_capacity(parent_trees.len());
+ let mut open_range: Option<Range> = None;
+
+ for _parent_tree in parent_trees.iter() {
+ flat_children.push(Vec::new());
+ wrapped_children.push(Vec::new());
+ }
+
+ let mut start_event_queue =
+ StartEventQueue::new(into_start_events(parent_trees));
+
+ let mut parent_to_nested: HashMap<usize, Vec<&'a mut RangeTree<'a>>> =
+ HashMap::new();
+
+ while let Some(event) = start_event_queue.next() {
+ open_range = if let Some(open_range) = open_range {
+ if open_range.end <= event.offset {
+ for (parent_index, nested) in parent_to_nested {
+ wrapped_children[parent_index].push(rta.alloc(RangeTree::new(
+ open_range.start,
+ open_range.end,
+ 0,
+ nested,
+ )));
+ }
+ parent_to_nested = HashMap::new();
+ None
+ } else {
+ Some(open_range)
+ }
+ } else {
+ None
+ };
+
+ match open_range {
+ Some(open_range) => {
+ for (parent_index, tree) in event.trees {
+ let child = if tree.end > open_range.end {
+ let (left, right) = RangeTree::split(rta, tree, open_range.end);
+ start_event_queue.push_pending_tree((parent_index, right));
+ left
+ } else {
+ tree
+ };
+ parent_to_nested
+ .entry(parent_index)
+ .or_insert_with(Vec::new)
+ .push(child);
+ }
+ }
+ None => {
+ let mut open_range_end: usize = event.offset + 1;
+ for (_, ref tree) in &event.trees {
+ open_range_end = if tree.end > open_range_end {
+ tree.end
+ } else {
+ open_range_end
+ };
+ }
+ for (parent_index, tree) in event.trees {
+ if tree.end == open_range_end {
+ flat_children[parent_index].push(tree);
+ continue;
+ }
+ parent_to_nested
+ .entry(parent_index)
+ .or_insert_with(Vec::new)
+ .push(tree);
+ }
+ start_event_queue.set_pending_offset(open_range_end);
+ open_range = Some(Range {
+ start: event.offset,
+ end: open_range_end,
+ });
+ }
+ }
+ }
+ if let Some(open_range) = open_range {
+ for (parent_index, nested) in parent_to_nested {
+ wrapped_children[parent_index].push(rta.alloc(RangeTree::new(
+ open_range.start,
+ open_range.end,
+ 0,
+ nested,
+ )));
+ }
+ }
+
+ let child_forests: Vec<Vec<&'a mut RangeTree<'a>>> = flat_children
+ .into_iter()
+ .zip(wrapped_children.into_iter())
+ .map(|(flat, wrapped)| merge_children_lists(flat, wrapped))
+ .collect();
+
+ let events = get_child_events_from_forests(&child_forests);
+
+ let mut child_forests: Vec<
+ Peekable<::std::vec::IntoIter<&'a mut RangeTree<'a>>>,
+ > = child_forests
+ .into_iter()
+ .map(|forest| forest.into_iter().peekable())
+ .collect();
+
+ let mut result: Vec<&'a mut RangeTree<'a>> = Vec::new();
+ for event in events.iter() {
+ let mut matching_trees: Vec<&'a mut RangeTree<'a>> = Vec::new();
+ for (_parent_index, children) in child_forests.iter_mut().enumerate() {
+ let next_tree: Option<&'a mut RangeTree<'a>> = {
+ if children.peek().map_or(false, |tree| tree.start == *event) {
+ children.next()
+ } else {
+ None
+ }
+ };
+ if let Some(next_tree) = next_tree {
+ matching_trees.push(next_tree);
+ }
+ }
+ if let Some(merged) = merge_range_trees(rta, matching_trees) {
+ result.push(merged);
+ }
+ }
+
+ result
+}
+
+fn get_child_events_from_forests<'a>(
+ forests: &[Vec<&'a mut RangeTree<'a>>],
+) -> BTreeSet<usize> {
+ let mut event_set: BTreeSet<usize> = BTreeSet::new();
+ for forest in forests {
+ for tree in forest {
+ event_set.insert(tree.start);
+ event_set.insert(tree.end);
+ }
+ }
+ event_set
+}
+
+// TODO: itertools?
+// https://play.integer32.com/?gist=ad2cd20d628e647a5dbdd82e68a15cb6&version=stable&mode=debug&edition=2015
+fn merge_children_lists<'a>(
+ a: Vec<&'a mut RangeTree<'a>>,
+ b: Vec<&'a mut RangeTree<'a>>,
+) -> Vec<&'a mut RangeTree<'a>> {
+ let mut merged: Vec<&'a mut RangeTree<'a>> = Vec::new();
+ let mut a = a.into_iter();
+ let mut b = b.into_iter();
+ let mut next_a = a.next();
+ let mut next_b = b.next();
+ loop {
+ match (next_a, next_b) {
+ (Some(tree_a), Some(tree_b)) => {
+ if tree_a.start < tree_b.start {
+ merged.push(tree_a);
+ next_a = a.next();
+ next_b = Some(tree_b);
+ } else {
+ merged.push(tree_b);
+ next_a = Some(tree_a);
+ next_b = b.next();
+ }
+ }
+ (Some(tree_a), None) => {
+ merged.push(tree_a);
+ merged.extend(a);
+ break;
+ }
+ (None, Some(tree_b)) => {
+ merged.push(tree_b);
+ merged.extend(b);
+ break;
+ }
+ (None, None) => break,
+ }
+ }
+
+ merged
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ // use test_generator::test_resources;
+
+ #[test]
+ fn empty() {
+ let inputs: Vec<ProcessCoverage> = Vec::new();
+ let expected: Option<ProcessCoverage> = None;
+
+ assert_eq!(merge_processes(inputs), expected);
+ }
+
+ #[test]
+ fn two_flat_trees() {
+ let inputs: Vec<ProcessCoverage> = vec![
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 1,
+ }],
+ }],
+ }],
+ },
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 2,
+ }],
+ }],
+ }],
+ },
+ ];
+ let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 3,
+ }],
+ }],
+ }],
+ });
+
+ assert_eq!(merge_processes(inputs), expected);
+ }
+
+ #[test]
+ fn two_trees_with_matching_children() {
+ let inputs: Vec<ProcessCoverage> = vec![
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 10,
+ },
+ CoverageRange {
+ start_offset: 3,
+ end_offset: 6,
+ count: 1,
+ },
+ ],
+ }],
+ }],
+ },
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 20,
+ },
+ CoverageRange {
+ start_offset: 3,
+ end_offset: 6,
+ count: 2,
+ },
+ ],
+ }],
+ }],
+ },
+ ];
+ let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 30,
+ },
+ CoverageRange {
+ start_offset: 3,
+ end_offset: 6,
+ count: 3,
+ },
+ ],
+ }],
+ }],
+ });
+
+ assert_eq!(merge_processes(inputs), expected);
+ }
+
+ #[test]
+ fn two_trees_with_partially_overlapping_children() {
+ let inputs: Vec<ProcessCoverage> = vec![
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 10,
+ },
+ CoverageRange {
+ start_offset: 2,
+ end_offset: 5,
+ count: 1,
+ },
+ ],
+ }],
+ }],
+ },
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 20,
+ },
+ CoverageRange {
+ start_offset: 4,
+ end_offset: 7,
+ count: 2,
+ },
+ ],
+ }],
+ }],
+ },
+ ];
+ let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 30,
+ },
+ CoverageRange {
+ start_offset: 2,
+ end_offset: 5,
+ count: 21,
+ },
+ CoverageRange {
+ start_offset: 4,
+ end_offset: 5,
+ count: 3,
+ },
+ CoverageRange {
+ start_offset: 5,
+ end_offset: 7,
+ count: 12,
+ },
+ ],
+ }],
+ }],
+ });
+
+ assert_eq!(merge_processes(inputs), expected);
+ }
+
+ #[test]
+ fn two_trees_with_with_complementary_children_summing_to_the_same_count() {
+ let inputs: Vec<ProcessCoverage> = vec![
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 1,
+ },
+ CoverageRange {
+ start_offset: 1,
+ end_offset: 8,
+ count: 6,
+ },
+ CoverageRange {
+ start_offset: 1,
+ end_offset: 5,
+ count: 5,
+ },
+ CoverageRange {
+ start_offset: 5,
+ end_offset: 8,
+ count: 7,
+ },
+ ],
+ }],
+ }],
+ },
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 4,
+ },
+ CoverageRange {
+ start_offset: 1,
+ end_offset: 8,
+ count: 8,
+ },
+ CoverageRange {
+ start_offset: 1,
+ end_offset: 5,
+ count: 9,
+ },
+ CoverageRange {
+ start_offset: 5,
+ end_offset: 8,
+ count: 7,
+ },
+ ],
+ }],
+ }],
+ },
+ ];
+ let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 5,
+ },
+ CoverageRange {
+ start_offset: 1,
+ end_offset: 8,
+ count: 14,
+ },
+ ],
+ }],
+ }],
+ });
+
+ assert_eq!(merge_processes(inputs), expected);
+ }
+
+ #[test]
+ fn merges_a_similar_sliding_chain_a_bc() {
+ let inputs: Vec<ProcessCoverage> = vec![
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 7,
+ count: 10,
+ },
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 4,
+ count: 1,
+ },
+ ],
+ }],
+ }],
+ },
+ ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 7,
+ count: 20,
+ },
+ CoverageRange {
+ start_offset: 1,
+ end_offset: 6,
+ count: 11,
+ },
+ CoverageRange {
+ start_offset: 2,
+ end_offset: 5,
+ count: 2,
+ },
+ ],
+ }],
+ }],
+ },
+ ];
+ let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
+ result: vec![ScriptCoverage {
+ script_id: String::from("0"),
+ url: String::from("/lib.js"),
+ functions: vec![FunctionCoverage {
+ function_name: String::from("lib"),
+ is_block_coverage: true,
+ ranges: vec![
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 7,
+ count: 30,
+ },
+ CoverageRange {
+ start_offset: 0,
+ end_offset: 6,
+ count: 21,
+ },
+ CoverageRange {
+ start_offset: 1,
+ end_offset: 5,
+ count: 12,
+ },
+ CoverageRange {
+ start_offset: 2,
+ end_offset: 4,
+ count: 3,
+ },
+ ],
+ }],
+ }],
+ });
+
+ assert_eq!(merge_processes(inputs), expected);
+ }
+}
diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs
new file mode 100644
index 000000000..1c14859ea
--- /dev/null
+++ b/cli/tools/coverage/mod.rs
@@ -0,0 +1,652 @@
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+use crate::colors;
+use crate::flags::CoverageFlags;
+use crate::flags::Flags;
+use crate::fs_util::collect_files;
+use crate::proc_state::ProcState;
+use crate::source_maps::SourceMapGetter;
+use crate::tools::fmt::format_json;
+
+use deno_ast::MediaType;
+use deno_ast::ModuleSpecifier;
+use deno_core::anyhow::anyhow;
+use deno_core::anyhow::Context;
+use deno_core::error::AnyError;
+use deno_core::serde_json;
+use deno_core::url::Url;
+use deno_core::LocalInspectorSession;
+use regex::Regex;
+use sourcemap::SourceMap;
+use std::fs;
+use std::fs::File;
+use std::io::BufWriter;
+use std::io::Write;
+use std::path::PathBuf;
+use text_lines::TextLines;
+use uuid::Uuid;
+
+mod json_types;
+mod merge;
+mod range_tree;
+
+use json_types::*;
+
+pub struct CoverageCollector {
+ pub dir: PathBuf,
+ session: LocalInspectorSession,
+}
+
+impl CoverageCollector {
+ pub fn new(dir: PathBuf, session: LocalInspectorSession) -> Self {
+ Self { dir, session }
+ }
+
+ async fn enable_debugger(&mut self) -> Result<(), AnyError> {
+ self.session.post_message("Debugger.enable", None).await?;
+ Ok(())
+ }
+
+ async fn enable_profiler(&mut self) -> Result<(), AnyError> {
+ self.session.post_message("Profiler.enable", None).await?;
+ Ok(())
+ }
+
+ async fn disable_debugger(&mut self) -> Result<(), AnyError> {
+ self.session.post_message("Debugger.disable", None).await?;
+ Ok(())
+ }
+
+ async fn disable_profiler(&mut self) -> Result<(), AnyError> {
+ self.session.post_message("Profiler.disable", None).await?;
+ Ok(())
+ }
+
+ async fn start_precise_coverage(
+ &mut self,
+ parameters: StartPreciseCoverageParameters,
+ ) -> Result<StartPreciseCoverageReturnObject, AnyError> {
+ let parameters_value = serde_json::to_value(parameters)?;
+ let return_value = self
+ .session
+ .post_message("Profiler.startPreciseCoverage", Some(parameters_value))
+ .await?;
+
+ let return_object = serde_json::from_value(return_value)?;
+
+ Ok(return_object)
+ }
+
+ async fn take_precise_coverage(
+ &mut self,
+ ) -> Result<TakePreciseCoverageReturnObject, AnyError> {
+ let return_value = self
+ .session
+ .post_message("Profiler.takePreciseCoverage", None)
+ .await?;
+
+ let return_object = serde_json::from_value(return_value)?;
+
+ Ok(return_object)
+ }
+
+ pub async fn start_collecting(&mut self) -> Result<(), AnyError> {
+ self.enable_debugger().await?;
+ self.enable_profiler().await?;
+ self
+ .start_precise_coverage(StartPreciseCoverageParameters {
+ call_count: true,
+ detailed: true,
+ allow_triggered_updates: false,
+ })
+ .await?;
+
+ Ok(())
+ }
+
+ pub async fn stop_collecting(&mut self) -> Result<(), AnyError> {
+ fs::create_dir_all(&self.dir)?;
+
+ let script_coverages = self.take_precise_coverage().await?.result;
+ for script_coverage in script_coverages {
+ let filename = format!("{}.json", Uuid::new_v4());
+ let filepath = self.dir.join(filename);
+
+ let mut out = BufWriter::new(File::create(filepath)?);
+ let coverage = serde_json::to_string(&script_coverage)?;
+ let formated_coverage =
+ format_json(&coverage, &Default::default()).unwrap_or(coverage);
+
+ out.write_all(formated_coverage.as_bytes())?;
+ out.flush()?;
+ }
+
+ self.disable_debugger().await?;
+ self.disable_profiler().await?;
+
+ Ok(())
+ }
+}
+
+struct BranchCoverageItem {
+ line_index: usize,
+ block_number: usize,
+ branch_number: usize,
+ taken: Option<i64>,
+ is_hit: bool,
+}
+
+struct FunctionCoverageItem {
+ name: String,
+ line_index: usize,
+ execution_count: i64,
+}
+
+struct CoverageReport {
+ url: ModuleSpecifier,
+ named_functions: Vec<FunctionCoverageItem>,
+ branches: Vec<BranchCoverageItem>,
+ found_lines: Vec<(usize, i64)>,
+}
+
+fn generate_coverage_report(
+ script_coverage: &ScriptCoverage,
+ script_source: &str,
+ maybe_source_map: &Option<Vec<u8>>,
+) -> CoverageReport {
+ let maybe_source_map = maybe_source_map
+ .as_ref()
+ .map(|source_map| SourceMap::from_slice(source_map).unwrap());
+ let text_lines = TextLines::new(script_source);
+
+ let comment_spans = deno_ast::lex(script_source, MediaType::JavaScript)
+ .into_iter()
+ .filter(|item| {
+ matches!(item.inner, deno_ast::TokenOrComment::Comment { .. })
+ })
+ .map(|item| item.span)
+ .collect::<Vec<_>>();
+
+ let url = Url::parse(&script_coverage.url).unwrap();
+ let mut coverage_report = CoverageReport {
+ url,
+ named_functions: Vec::with_capacity(
+ script_coverage
+ .functions
+ .iter()
+ .filter(|f| !f.function_name.is_empty())
+ .count(),
+ ),
+ branches: Vec::new(),
+ found_lines: Vec::new(),
+ };
+
+ for function in &script_coverage.functions {
+ if function.function_name.is_empty() {
+ continue;
+ }
+
+ let source_line_index =
+ text_lines.line_index(function.ranges[0].start_offset);
+ let line_index = if let Some(source_map) = maybe_source_map.as_ref() {
+ source_map
+ .tokens()
+ .find(|token| token.get_dst_line() as usize == source_line_index)
+ .map(|token| token.get_src_line() as usize)
+ .unwrap_or(0)
+ } else {
+ source_line_index
+ };
+
+ coverage_report.named_functions.push(FunctionCoverageItem {
+ name: function.function_name.clone(),
+ line_index,
+ execution_count: function.ranges[0].count,
+ });
+ }
+
+ for (block_number, function) in script_coverage.functions.iter().enumerate() {
+ let block_hits = function.ranges[0].count;
+ for (branch_number, range) in function.ranges[1..].iter().enumerate() {
+ let source_line_index = text_lines.line_index(range.start_offset);
+ let line_index = if let Some(source_map) = maybe_source_map.as_ref() {
+ source_map
+ .tokens()
+ .find(|token| token.get_dst_line() as usize == source_line_index)
+ .map(|token| token.get_src_line() as usize)
+ .unwrap_or(0)
+ } else {
+ source_line_index
+ };
+
+ // From https://manpages.debian.org/unstable/lcov/geninfo.1.en.html:
+ //
+ // Block number and branch number are gcc internal IDs for the branch. Taken is either '-'
+ // if the basic block containing the branch was never executed or a number indicating how
+ // often that branch was taken.
+ //
+ // However with the data we get from v8 coverage profiles it seems we can't actually hit
+ // this as appears it won't consider any nested branches it hasn't seen but its here for
+ // the sake of accuracy.
+ let taken = if block_hits > 0 {
+ Some(range.count)
+ } else {
+ None
+ };
+
+ coverage_report.branches.push(BranchCoverageItem {
+ line_index,
+ block_number,
+ branch_number,
+ taken,
+ is_hit: range.count > 0,
+ })
+ }
+ }
+
+ // TODO(caspervonb): collect uncovered ranges on the lines so that we can highlight specific
+ // parts of a line in color (word diff style) instead of the entire line.
+ let mut line_counts = Vec::with_capacity(text_lines.lines_count());
+ for line_index in 0..text_lines.lines_count() {
+ let line_start_offset = text_lines.line_start(line_index);
+ let line_end_offset = text_lines.line_end(line_index);
+ let ignore = comment_spans.iter().any(|span| {
+ (span.lo.0 as usize) <= line_start_offset
+ && (span.hi.0 as usize) >= line_end_offset
+ }) || script_source[line_start_offset..line_end_offset]
+ .trim()
+ .is_empty();
+ let mut count = 0;
+
+ if ignore {
+ count = 1;
+ } else {
+ // Count the hits of ranges that include the entire line which will always be at-least one
+ // as long as the code has been evaluated.
+ for function in &script_coverage.functions {
+ for range in &function.ranges {
+ if range.start_offset <= line_start_offset
+ && range.end_offset >= line_end_offset
+ {
+ count += range.count;
+ }
+ }
+ }
+
+ // We reset the count if any block with a zero count overlaps with the line range.
+ for function in &script_coverage.functions {
+ for range in &function.ranges {
+ if range.count > 0 {
+ continue;
+ }
+
+ let overlaps = range.start_offset < line_end_offset
+ && range.end_offset > line_start_offset;
+ if overlaps {
+ count = 0;
+ }
+ }
+ }
+ }
+
+ line_counts.push(count);
+ }
+
+ coverage_report.found_lines =
+ if let Some(source_map) = maybe_source_map.as_ref() {
+ let mut found_lines = line_counts
+ .iter()
+ .enumerate()
+ .map(|(index, count)| {
+ // get all the mappings from this destination line to a different src line
+ let mut results = source_map
+ .tokens()
+ .filter(move |token| token.get_dst_line() as usize == index)
+ .map(move |token| (token.get_src_line() as usize, *count))
+ .collect::<Vec<_>>();
+ // only keep the results that point at different src lines
+ results.sort_unstable_by_key(|(index, _)| *index);
+ results.dedup_by_key(|(index, _)| *index);
+ results.into_iter()
+ })
+ .flatten()
+ .collect::<Vec<(usize, i64)>>();
+
+ found_lines.sort_unstable_by_key(|(index, _)| *index);
+ // combine duplicated lines
+ for i in (1..found_lines.len()).rev() {
+ if found_lines[i].0 == found_lines[i - 1].0 {
+ found_lines[i - 1].1 += found_lines[i].1;
+ found_lines.remove(i);
+ }
+ }
+ found_lines
+ } else {
+ line_counts
+ .into_iter()
+ .enumerate()
+ .map(|(index, count)| (index, count))
+ .collect::<Vec<(usize, i64)>>()
+ };
+
+ coverage_report
+}
+
+enum CoverageReporterKind {
+ Pretty,
+ Lcov,
+}
+
+fn create_reporter(
+ kind: CoverageReporterKind,
+) -> Box<dyn CoverageReporter + Send> {
+ match kind {
+ CoverageReporterKind::Lcov => Box::new(LcovCoverageReporter::new()),
+ CoverageReporterKind::Pretty => Box::new(PrettyCoverageReporter::new()),
+ }
+}
+
+trait CoverageReporter {
+ fn report(&mut self, coverage_report: &CoverageReport, file_text: &str);
+
+ fn done(&mut self);
+}
+
+struct LcovCoverageReporter {}
+
+impl LcovCoverageReporter {
+ pub fn new() -> LcovCoverageReporter {
+ LcovCoverageReporter {}
+ }
+}
+
+impl CoverageReporter for LcovCoverageReporter {
+ fn report(&mut self, coverage_report: &CoverageReport, _file_text: &str) {
+ let file_path = coverage_report
+ .url
+ .to_file_path()
+ .ok()
+ .map(|p| p.to_str().map(|p| p.to_string()))
+ .flatten()
+ .unwrap_or_else(|| coverage_report.url.to_string());
+ println!("SF:{}", file_path);
+
+ for function in &coverage_report.named_functions {
+ println!("FN:{},{}", function.line_index + 1, function.name);
+ }
+
+ for function in &coverage_report.named_functions {
+ println!("FNDA:{},{}", function.execution_count, function.name);
+ }
+
+ let functions_found = coverage_report.named_functions.len();
+ println!("FNF:{}", functions_found);
+ let functions_hit = coverage_report
+ .named_functions
+ .iter()
+ .filter(|f| f.execution_count > 0)
+ .count();
+ println!("FNH:{}", functions_hit);
+
+ for branch in &coverage_report.branches {
+ let taken = if let Some(taken) = &branch.taken {
+ taken.to_string()
+ } else {
+ "-".to_string()
+ };
+
+ println!(
+ "BRDA:{},{},{},{}",
+ branch.line_index + 1,
+ branch.block_number,
+ branch.branch_number,
+ taken
+ );
+ }
+
+ let branches_found = coverage_report.branches.len();
+ println!("BRF:{}", branches_found);
+ let branches_hit =
+ coverage_report.branches.iter().filter(|b| b.is_hit).count();
+ println!("BRH:{}", branches_hit);
+
+ for (index, count) in &coverage_report.found_lines {
+ println!("DA:{},{}", index + 1, count);
+ }
+
+ let lines_hit = coverage_report
+ .found_lines
+ .iter()
+ .filter(|(_, count)| *count != 0)
+ .count();
+ println!("LH:{}", lines_hit);
+
+ let lines_found = coverage_report.found_lines.len();
+ println!("LF:{}", lines_found);
+
+ println!("end_of_record");
+ }
+
+ fn done(&mut self) {}
+}
+
+struct PrettyCoverageReporter {}
+
+impl PrettyCoverageReporter {
+ pub fn new() -> PrettyCoverageReporter {
+ PrettyCoverageReporter {}
+ }
+}
+
+impl CoverageReporter for PrettyCoverageReporter {
+ fn report(&mut self, coverage_report: &CoverageReport, file_text: &str) {
+ let lines = file_text.split('\n').collect::<Vec<_>>();
+ print!("cover {} ... ", coverage_report.url);
+
+ let hit_lines = coverage_report
+ .found_lines
+ .iter()
+ .filter(|(_, count)| *count > 0)
+ .map(|(index, _)| *index);
+
+ let missed_lines = coverage_report
+ .found_lines
+ .iter()
+ .filter(|(_, count)| *count == 0)
+ .map(|(index, _)| *index);
+
+ let lines_found = coverage_report.found_lines.len();
+ let lines_hit = hit_lines.count();
+ let line_ratio = lines_hit as f32 / lines_found as f32;
+
+ let line_coverage =
+ format!("{:.3}% ({}/{})", line_ratio * 100.0, lines_hit, lines_found);
+
+ if line_ratio >= 0.9 {
+ println!("{}", colors::green(&line_coverage));
+ } else if line_ratio >= 0.75 {
+ println!("{}", colors::yellow(&line_coverage));
+ } else {
+ println!("{}", colors::red(&line_coverage));
+ }
+
+ let mut last_line = None;
+ for line_index in missed_lines {
+ const WIDTH: usize = 4;
+ const SEPERATOR: &str = "|";
+
+ // Put a horizontal separator between disjoint runs of lines
+ if let Some(last_line) = last_line {
+ if last_line + 1 != line_index {
+ let dash = colors::gray("-".repeat(WIDTH + 1));
+ println!("{}{}{}", dash, colors::gray(SEPERATOR), dash);
+ }
+ }
+
+ println!(
+ "{:width$} {} {}",
+ line_index + 1,
+ colors::gray(SEPERATOR),
+ colors::red(&lines[line_index]),
+ width = WIDTH
+ );
+
+ last_line = Some(line_index);
+ }
+ }
+
+ fn done(&mut self) {}
+}
+
+fn collect_coverages(
+ files: Vec<PathBuf>,
+ ignore: Vec<PathBuf>,
+) -> Result<Vec<ScriptCoverage>, AnyError> {
+ let mut coverages: Vec<ScriptCoverage> = Vec::new();
+ let file_paths = collect_files(&files, &ignore, |file_path| {
+ file_path.extension().map_or(false, |ext| ext == "json")
+ })?;
+
+ for file_path in file_paths {
+ let json = fs::read_to_string(file_path.as_path())?;
+ let new_coverage: ScriptCoverage = serde_json::from_str(&json)?;
+ coverages.push(new_coverage);
+ }
+
+ coverages.sort_by_key(|k| k.url.clone());
+
+ Ok(coverages)
+}
+
+fn filter_coverages(
+ coverages: Vec<ScriptCoverage>,
+ include: Vec<String>,
+ exclude: Vec<String>,
+) -> Vec<ScriptCoverage> {
+ let include: Vec<Regex> =
+ include.iter().map(|e| Regex::new(e).unwrap()).collect();
+
+ let exclude: Vec<Regex> =
+ exclude.iter().map(|e| Regex::new(e).unwrap()).collect();
+
+ coverages
+ .into_iter()
+ .filter(|e| {
+ let is_internal = e.url.starts_with("deno:")
+ || e.url.ends_with("__anonymous__")
+ || e.url.ends_with("$deno$test.js");
+
+ let is_included = include.iter().any(|p| p.is_match(&e.url));
+ let is_excluded = exclude.iter().any(|p| p.is_match(&e.url));
+
+ (include.is_empty() || is_included) && !is_excluded && !is_internal
+ })
+ .collect::<Vec<ScriptCoverage>>()
+}
+
+pub async fn cover_files(
+ flags: Flags,
+ coverage_flags: CoverageFlags,
+) -> Result<(), AnyError> {
+ let ps = ProcState::build(flags).await?;
+
+ let script_coverages =
+ collect_coverages(coverage_flags.files, coverage_flags.ignore)?;
+ let script_coverages = filter_coverages(
+ script_coverages,
+ coverage_flags.include,
+ coverage_flags.exclude,
+ );
+
+ let proc_coverages: Vec<_> = script_coverages
+ .into_iter()
+ .map(|cov| ProcessCoverage { result: vec![cov] })
+ .collect();
+
+ let script_coverages = if let Some(c) = merge::merge_processes(proc_coverages)
+ {
+ c.result
+ } else {
+ vec![]
+ };
+
+ let reporter_kind = if coverage_flags.lcov {
+ CoverageReporterKind::Lcov
+ } else {
+ CoverageReporterKind::Pretty
+ };
+
+ let mut reporter = create_reporter(reporter_kind);
+
+ for script_coverage in script_coverages {
+ let module_specifier =
+ deno_core::resolve_url_or_path(&script_coverage.url)?;
+
+ let maybe_file = if module_specifier.scheme() == "file" {
+ ps.file_fetcher.get_source(&module_specifier)
+ } else {
+ ps.file_fetcher
+ .fetch_cached(&module_specifier, 10)
+ .with_context(|| {
+ format!("Failed to fetch \"{}\" from cache.", module_specifier)
+ })?
+ };
+ let file = maybe_file.ok_or_else(|| {
+ anyhow!("Failed to fetch \"{}\" from cache.
+ Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
+ module_specifier
+ )
+ })?;
+
+ // Check if file was transpiled
+ let transpiled_source = match file.media_type {
+ MediaType::JavaScript
+ | MediaType::Unknown
+ | MediaType::Cjs
+ | MediaType::Mjs
+ | MediaType::Json => file.source.as_ref().clone(),
+ MediaType::Dts | MediaType::Dmts | MediaType::Dcts => "".to_string(),
+ MediaType::TypeScript
+ | MediaType::Jsx
+ | MediaType::Mts
+ | MediaType::Cts
+ | MediaType::Tsx => {
+ let emit_path = ps
+ .dir
+ .gen_cache
+ .get_cache_filename_with_extension(&file.specifier, "js")
+ .unwrap_or_else(|| {
+ unreachable!("Unable to get cache filename: {}", &file.specifier)
+ });
+ match ps.dir.gen_cache.get(&emit_path) {
+ Ok(b) => String::from_utf8(b).unwrap(),
+ Err(_) => {
+ return Err(anyhow!(
+ "Missing transpiled source code for: \"{}\".
+ Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
+ file.specifier,
+ ))
+ }
+ }
+ }
+ MediaType::Wasm | MediaType::TsBuildInfo | MediaType::SourceMap => {
+ unreachable!()
+ }
+ };
+
+ let original_source = &file.source;
+ let maybe_source_map = ps.get_source_map(&script_coverage.url);
+
+ let coverage_report = generate_coverage_report(
+ &script_coverage,
+ &transpiled_source,
+ &maybe_source_map,
+ );
+
+ reporter.report(&coverage_report, original_source);
+ }
+
+ reporter.done();
+
+ Ok(())
+}
diff --git a/cli/tools/coverage/range_tree.rs b/cli/tools/coverage/range_tree.rs
new file mode 100644
index 000000000..24d0a9ffc
--- /dev/null
+++ b/cli/tools/coverage/range_tree.rs
@@ -0,0 +1,207 @@
+// Forked from https://github.com/demurgos/v8-coverage/tree/d0ca18da8740198681e0bc68971b0a6cdb11db3e/rust
+// Copyright 2021 Charles Samborski. All rights reserved. MIT license.
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+use super::json_types::CoverageRange;
+use std::iter::Peekable;
+use typed_arena::Arena;
+
+pub struct RangeTreeArena<'a>(Arena<RangeTree<'a>>);
+
+impl<'a> RangeTreeArena<'a> {
+ #[cfg(test)]
+ pub fn new() -> Self {
+ RangeTreeArena(Arena::new())
+ }
+
+ pub fn with_capacity(n: usize) -> Self {
+ RangeTreeArena(Arena::with_capacity(n))
+ }
+
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc(&'a self, value: RangeTree<'a>) -> &'a mut RangeTree<'a> {
+ self.0.alloc(value)
+ }
+}
+
+#[derive(Eq, PartialEq, Debug)]
+pub struct RangeTree<'a> {
+ pub start: usize,
+ pub end: usize,
+ pub delta: i64,
+ pub children: Vec<&'a mut RangeTree<'a>>,
+}
+
+impl<'rt> RangeTree<'rt> {
+ pub fn new<'a>(
+ start: usize,
+ end: usize,
+ delta: i64,
+ children: Vec<&'a mut RangeTree<'a>>,
+ ) -> RangeTree<'a> {
+ RangeTree {
+ start,
+ end,
+ delta,
+ children,
+ }
+ }
+
+ pub fn split<'a>(
+ rta: &'a RangeTreeArena<'a>,
+ tree: &'a mut RangeTree<'a>,
+ value: usize,
+ ) -> (&'a mut RangeTree<'a>, &'a mut RangeTree<'a>) {
+ let mut left_children: Vec<&'a mut RangeTree<'a>> = Vec::new();
+ let mut right_children: Vec<&'a mut RangeTree<'a>> = Vec::new();
+ for child in tree.children.iter_mut() {
+ if child.end <= value {
+ left_children.push(child);
+ } else if value <= child.start {
+ right_children.push(child);
+ } else {
+ let (left_child, right_child) = Self::split(rta, child, value);
+ left_children.push(left_child);
+ right_children.push(right_child);
+ }
+ }
+
+ let left = RangeTree::new(tree.start, value, tree.delta, left_children);
+ let right = RangeTree::new(value, tree.end, tree.delta, right_children);
+ (rta.alloc(left), rta.alloc(right))
+ }
+
+ pub fn normalize<'a>(
+ rta: &'a RangeTreeArena<'a>,
+ tree: &'a mut RangeTree<'a>,
+ ) -> &'a mut RangeTree<'a> {
+ tree.children = {
+ let mut children: Vec<&'a mut RangeTree<'a>> = Vec::new();
+ let mut chain: Vec<&'a mut RangeTree<'a>> = Vec::new();
+ for child in tree.children.drain(..) {
+ let is_chain_end: bool =
+ match chain.last().map(|tree| (tree.delta, tree.end)) {
+ Some((delta, chain_end)) => {
+ (delta, chain_end) != (child.delta, child.start)
+ }
+ None => false,
+ };
+ if is_chain_end {
+ let mut chain_iter = chain.drain(..);
+ let mut head: &'a mut RangeTree<'a> = chain_iter.next().unwrap();
+ for tree in chain_iter {
+ head.end = tree.end;
+ for sub_child in tree.children.drain(..) {
+ sub_child.delta += tree.delta - head.delta;
+ head.children.push(sub_child);
+ }
+ }
+ children.push(RangeTree::normalize(rta, head));
+ }
+ chain.push(child)
+ }
+ if !chain.is_empty() {
+ let mut chain_iter = chain.drain(..);
+ let mut head: &'a mut RangeTree<'a> = chain_iter.next().unwrap();
+ for tree in chain_iter {
+ head.end = tree.end;
+ for sub_child in tree.children.drain(..) {
+ sub_child.delta += tree.delta - head.delta;
+ head.children.push(sub_child);
+ }
+ }
+ children.push(RangeTree::normalize(rta, head));
+ }
+
+ if children.len() == 1
+ && children[0].start == tree.start
+ && children[0].end == tree.end
+ {
+ let normalized = children.remove(0);
+ normalized.delta += tree.delta;
+ return normalized;
+ }
+
+ children
+ };
+
+ tree
+ }
+
+ pub fn to_ranges(&self) -> Vec<CoverageRange> {
+ let mut ranges: Vec<CoverageRange> = Vec::new();
+ let mut stack: Vec<(&RangeTree, i64)> = vec![(self, 0)];
+ while let Some((cur, parent_count)) = stack.pop() {
+ let count: i64 = parent_count + cur.delta;
+ ranges.push(CoverageRange {
+ start_offset: cur.start,
+ end_offset: cur.end,
+ count,
+ });
+ for child in cur.children.iter().rev() {
+ stack.push((child, count))
+ }
+ }
+ ranges
+ }
+
+ pub fn from_sorted_ranges<'a>(
+ rta: &'a RangeTreeArena<'a>,
+ ranges: &[CoverageRange],
+ ) -> Option<&'a mut RangeTree<'a>> {
+ Self::from_sorted_ranges_inner(
+ rta,
+ &mut ranges.iter().peekable(),
+ ::std::usize::MAX,
+ 0,
+ )
+ }
+
+ fn from_sorted_ranges_inner<'a, 'b, 'c: 'b>(
+ rta: &'a RangeTreeArena<'a>,
+ ranges: &'b mut Peekable<impl Iterator<Item = &'c CoverageRange>>,
+ parent_end: usize,
+ parent_count: i64,
+ ) -> Option<&'a mut RangeTree<'a>> {
+ let has_range: bool = match ranges.peek() {
+ None => false,
+ Some(range) => range.start_offset < parent_end,
+ };
+ if !has_range {
+ return None;
+ }
+ let range = ranges.next().unwrap();
+ let start: usize = range.start_offset;
+ let end: usize = range.end_offset;
+ let count: i64 = range.count;
+ let delta: i64 = count - parent_count;
+ let mut children: Vec<&mut RangeTree> = Vec::new();
+ while let Some(child) =
+ Self::from_sorted_ranges_inner(rta, ranges, end, count)
+ {
+ children.push(child);
+ }
+ Some(rta.alloc(RangeTree::new(start, end, delta, children)))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn from_sorted_ranges_empty() {
+ let rta = RangeTreeArena::new();
+ let inputs: Vec<CoverageRange> = vec![CoverageRange {
+ start_offset: 0,
+ end_offset: 9,
+ count: 1,
+ }];
+ let actual: Option<&mut RangeTree> =
+ RangeTree::from_sorted_ranges(&rta, &inputs);
+ let expected: Option<&mut RangeTree> =
+ Some(rta.alloc(RangeTree::new(0, 9, 1, Vec::new())));
+
+ assert_eq!(actual, expected);
+ }
+}