summaryrefslogtreecommitdiff
path: root/cli/tools
diff options
context:
space:
mode:
Diffstat (limited to 'cli/tools')
-rw-r--r--cli/tools/test/fmt.rs74
-rw-r--r--cli/tools/test/mod.rs103
-rw-r--r--cli/tools/test/reporters/common.rs210
-rw-r--r--cli/tools/test/reporters/dot.rs207
-rw-r--r--cli/tools/test/reporters/mod.rs3
-rw-r--r--cli/tools/test/reporters/pretty.rs229
6 files changed, 533 insertions, 293 deletions
diff --git a/cli/tools/test/fmt.rs b/cli/tools/test/fmt.rs
new file mode 100644
index 000000000..d7b357a4b
--- /dev/null
+++ b/cli/tools/test/fmt.rs
@@ -0,0 +1,74 @@
+// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
+
+use super::*;
+
+pub fn to_relative_path_or_remote_url(cwd: &Url, path_or_url: &str) -> String {
+ let url = Url::parse(path_or_url).unwrap();
+ if url.scheme() == "file" {
+ if let Some(mut r) = cwd.make_relative(&url) {
+ if !r.starts_with("../") {
+ r = format!("./{r}");
+ }
+ return r;
+ }
+ }
+ path_or_url.to_string()
+}
+
+fn abbreviate_test_error(js_error: &JsError) -> JsError {
+ let mut js_error = js_error.clone();
+ let frames = std::mem::take(&mut js_error.frames);
+
+ // check if there are any stack frames coming from user code
+ let should_filter = frames.iter().any(|f| {
+ if let Some(file_name) = &f.file_name {
+ !(file_name.starts_with("[ext:") || file_name.starts_with("ext:"))
+ } else {
+ true
+ }
+ });
+
+ if should_filter {
+ let mut frames = frames
+ .into_iter()
+ .rev()
+ .skip_while(|f| {
+ if let Some(file_name) = &f.file_name {
+ file_name.starts_with("[ext:") || file_name.starts_with("ext:")
+ } else {
+ false
+ }
+ })
+ .collect::<Vec<_>>();
+ frames.reverse();
+ js_error.frames = frames;
+ } else {
+ js_error.frames = frames;
+ }
+
+ js_error.cause = js_error
+ .cause
+ .as_ref()
+ .map(|e| Box::new(abbreviate_test_error(e)));
+ js_error.aggregated = js_error
+ .aggregated
+ .as_ref()
+ .map(|es| es.iter().map(abbreviate_test_error).collect());
+ js_error
+}
+
+// This function prettifies `JsError` and applies some changes specifically for
+// test runner purposes:
+//
+// - filter out stack frames:
+// - if stack trace consists of mixed user and internal code, the frames
+// below the first user code frame are filtered out
+// - if stack trace consists only of internal code it is preserved as is
+pub fn format_test_error(js_error: &JsError) -> String {
+ let mut js_error = abbreviate_test_error(js_error);
+ js_error.exception_message = js_error
+ .exception_message
+ .trim_start_matches("Uncaught ")
+ .to_string();
+ format_js_error(&js_error)
+}
diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs
index a2abc2a32..c3fb6f772 100644
--- a/cli/tools/test/mod.rs
+++ b/cli/tools/test/mod.rs
@@ -4,6 +4,7 @@ use crate::args::CliOptions;
use crate::args::FilesConfig;
use crate::args::Flags;
use crate::args::TestFlags;
+use crate::args::TestReporterConfig;
use crate::colors;
use crate::display;
use crate::factory::CliFactory;
@@ -80,8 +81,12 @@ use tokio::sync::mpsc::unbounded_channel;
use tokio::sync::mpsc::UnboundedSender;
use tokio::sync::mpsc::WeakUnboundedSender;
+pub mod fmt;
mod reporters;
+
+pub use fmt::format_test_error;
use reporters::CompoundTestReporter;
+use reporters::DotTestReporter;
use reporters::JunitTestReporter;
use reporters::PrettyTestReporter;
use reporters::TestReporter;
@@ -355,7 +360,7 @@ struct TestSpecifiersOptions {
fail_fast: Option<NonZeroUsize>,
log_level: Option<log::Level>,
specifier: TestSpecifierOptions,
- junit_path: Option<String>,
+ reporter: TestReporterConfig,
}
#[derive(Debug, Clone)]
@@ -388,79 +393,29 @@ impl TestSummary {
}
fn get_test_reporter(options: &TestSpecifiersOptions) -> Box<dyn TestReporter> {
- let pretty = Box::new(PrettyTestReporter::new(
- options.concurrent_jobs.get() > 1,
- options.log_level != Some(Level::Error),
- ));
- if let Some(junit_path) = &options.junit_path {
- let junit = Box::new(JunitTestReporter::new(junit_path.clone()));
- // If junit is writing to stdout, only enable the junit reporter
- if junit_path == "-" {
- junit
- } else {
- Box::new(CompoundTestReporter::new(vec![pretty, junit]))
- }
- } else {
- pretty
- }
-}
-
-fn abbreviate_test_error(js_error: &JsError) -> JsError {
- let mut js_error = js_error.clone();
- let frames = std::mem::take(&mut js_error.frames);
-
- // check if there are any stack frames coming from user code
- let should_filter = frames.iter().any(|f| {
- if let Some(file_name) = &f.file_name {
- !(file_name.starts_with("[ext:") || file_name.starts_with("ext:"))
- } else {
- true
+ let parallel = options.concurrent_jobs.get() > 1;
+ match &options.reporter {
+ TestReporterConfig::Dot => Box::new(DotTestReporter::new()),
+ TestReporterConfig::Pretty => Box::new(PrettyTestReporter::new(
+ parallel,
+ options.log_level != Some(Level::Error),
+ )),
+ TestReporterConfig::Junit(path) => {
+ let junit = Box::new(JunitTestReporter::new(path.clone()));
+ // If junit is writing to stdout, only enable the junit reporter
+ if path == "-" {
+ junit
+ } else {
+ Box::new(CompoundTestReporter::new(vec![
+ Box::new(PrettyTestReporter::new(
+ parallel,
+ options.log_level != Some(Level::Error),
+ )),
+ junit,
+ ]))
+ }
}
- });
-
- if should_filter {
- let mut frames = frames
- .into_iter()
- .rev()
- .skip_while(|f| {
- if let Some(file_name) = &f.file_name {
- file_name.starts_with("[ext:") || file_name.starts_with("ext:")
- } else {
- false
- }
- })
- .collect::<Vec<_>>();
- frames.reverse();
- js_error.frames = frames;
- } else {
- js_error.frames = frames;
}
-
- js_error.cause = js_error
- .cause
- .as_ref()
- .map(|e| Box::new(abbreviate_test_error(e)));
- js_error.aggregated = js_error
- .aggregated
- .as_ref()
- .map(|es| es.iter().map(abbreviate_test_error).collect());
- js_error
-}
-
-// This function prettifies `JsError` and applies some changes specifically for
-// test runner purposes:
-//
-// - filter out stack frames:
-// - if stack trace consists of mixed user and internal code, the frames
-// below the first user code frame are filtered out
-// - if stack trace consists only of internal code it is preserved as is
-pub fn format_test_error(js_error: &JsError) -> String {
- let mut js_error = abbreviate_test_error(js_error);
- js_error.exception_message = js_error
- .exception_message
- .trim_start_matches("Uncaught ")
- .to_string();
- format_js_error(&js_error)
}
/// Test a single specifier as documentation containing test programs, an executable test module or
@@ -1206,7 +1161,7 @@ pub async fn run_tests(
concurrent_jobs: test_options.concurrent_jobs,
fail_fast: test_options.fail_fast,
log_level,
- junit_path: test_options.junit_path,
+ reporter: test_options.reporter,
specifier: TestSpecifierOptions {
filter: TestFilter::from_flag(&test_options.filter),
shuffle: test_options.shuffle,
@@ -1337,7 +1292,7 @@ pub async fn run_tests_with_watch(
concurrent_jobs: test_options.concurrent_jobs,
fail_fast: test_options.fail_fast,
log_level,
- junit_path: test_options.junit_path,
+ reporter: test_options.reporter,
specifier: TestSpecifierOptions {
filter: TestFilter::from_flag(&test_options.filter),
shuffle: test_options.shuffle,
diff --git a/cli/tools/test/reporters/common.rs b/cli/tools/test/reporters/common.rs
new file mode 100644
index 000000000..ce1aad602
--- /dev/null
+++ b/cli/tools/test/reporters/common.rs
@@ -0,0 +1,210 @@
+// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
+
+use super::fmt::format_test_error;
+use super::fmt::to_relative_path_or_remote_url;
+use super::*;
+
+pub(super) fn format_test_step_ancestry(
+ desc: &TestStepDescription,
+ tests: &IndexMap<usize, TestDescription>,
+ test_steps: &IndexMap<usize, TestStepDescription>,
+) -> String {
+ let root;
+ let mut ancestor_names = vec![];
+ let mut current_desc = desc;
+ loop {
+ if let Some(step_desc) = test_steps.get(&current_desc.parent_id) {
+ ancestor_names.push(&step_desc.name);
+ current_desc = step_desc;
+ } else {
+ root = tests.get(&current_desc.parent_id).unwrap();
+ break;
+ }
+ }
+ ancestor_names.reverse();
+ let mut result = String::new();
+ result.push_str(&root.name);
+ result.push_str(" ... ");
+ for name in ancestor_names {
+ result.push_str(name);
+ result.push_str(" ... ");
+ }
+ result.push_str(&desc.name);
+ result
+}
+
+pub fn format_test_for_summary(cwd: &Url, desc: &TestDescription) -> String {
+ format!(
+ "{} {}",
+ &desc.name,
+ colors::gray(format!(
+ "=> {}:{}:{}",
+ to_relative_path_or_remote_url(cwd, &desc.location.file_name),
+ desc.location.line_number,
+ desc.location.column_number
+ ))
+ )
+}
+
+pub fn format_test_step_for_summary(
+ cwd: &Url,
+ desc: &TestStepDescription,
+ tests: &IndexMap<usize, TestDescription>,
+ test_steps: &IndexMap<usize, TestStepDescription>,
+) -> String {
+ let long_name = format_test_step_ancestry(desc, tests, test_steps);
+ format!(
+ "{} {}",
+ long_name,
+ colors::gray(format!(
+ "=> {}:{}:{}",
+ to_relative_path_or_remote_url(cwd, &desc.location.file_name),
+ desc.location.line_number,
+ desc.location.column_number
+ ))
+ )
+}
+
+pub(super) fn report_sigint(
+ cwd: &Url,
+ tests_pending: &HashSet<usize>,
+ tests: &IndexMap<usize, TestDescription>,
+ test_steps: &IndexMap<usize, TestStepDescription>,
+) {
+ if tests_pending.is_empty() {
+ return;
+ }
+ let mut formatted_pending = BTreeSet::new();
+ for id in tests_pending {
+ if let Some(desc) = tests.get(id) {
+ formatted_pending.insert(format_test_for_summary(cwd, desc));
+ }
+ if let Some(desc) = test_steps.get(id) {
+ formatted_pending
+ .insert(format_test_step_for_summary(cwd, desc, tests, test_steps));
+ }
+ }
+ println!(
+ "\n{} The following tests were pending:\n",
+ colors::intense_blue("SIGINT")
+ );
+ for entry in formatted_pending {
+ println!("{}", entry);
+ }
+ println!();
+}
+
+pub(super) fn report_summary(
+ cwd: &Url,
+ summary: &TestSummary,
+ elapsed: &Duration,
+) {
+ if !summary.failures.is_empty() || !summary.uncaught_errors.is_empty() {
+ #[allow(clippy::type_complexity)] // Type alias doesn't look better here
+ let mut failures_by_origin: BTreeMap<
+ String,
+ (Vec<(&TestDescription, &TestFailure)>, Option<&JsError>),
+ > = BTreeMap::default();
+ let mut failure_titles = vec![];
+ for (description, failure) in &summary.failures {
+ let (failures, _) = failures_by_origin
+ .entry(description.origin.clone())
+ .or_default();
+ failures.push((description, failure));
+ }
+
+ for (origin, js_error) in &summary.uncaught_errors {
+ let (_, uncaught_error) =
+ failures_by_origin.entry(origin.clone()).or_default();
+ let _ = uncaught_error.insert(js_error.as_ref());
+ }
+
+ // note: the trailing whitespace is intentional to get a red background
+ println!("\n{}\n", colors::white_bold_on_red(" ERRORS "));
+ for (origin, (failures, uncaught_error)) in failures_by_origin {
+ for (description, failure) in failures {
+ if !failure.hide_in_summary() {
+ let failure_title = format_test_for_summary(cwd, description);
+ println!("{}", &failure_title);
+ println!("{}: {}", colors::red_bold("error"), failure.to_string());
+ println!();
+ failure_titles.push(failure_title);
+ }
+ }
+ if let Some(js_error) = uncaught_error {
+ let failure_title = format!(
+ "{} (uncaught error)",
+ to_relative_path_or_remote_url(cwd, &origin)
+ );
+ println!("{}", &failure_title);
+ println!(
+ "{}: {}",
+ colors::red_bold("error"),
+ format_test_error(js_error)
+ );
+ println!("This error was not caught from a test and caused the test runner to fail on the referenced module.");
+ println!("It most likely originated from a dangling promise, event/timeout handler or top-level code.");
+ println!();
+ failure_titles.push(failure_title);
+ }
+ }
+ // note: the trailing whitespace is intentional to get a red background
+ println!("{}\n", colors::white_bold_on_red(" FAILURES "));
+ for failure_title in failure_titles {
+ println!("{failure_title}");
+ }
+ }
+
+ let status = if summary.has_failed() {
+ colors::red("FAILED").to_string()
+ } else {
+ colors::green("ok").to_string()
+ };
+
+ let get_steps_text = |count: usize| -> String {
+ if count == 0 {
+ String::new()
+ } else if count == 1 {
+ " (1 step)".to_string()
+ } else {
+ format!(" ({count} steps)")
+ }
+ };
+
+ let mut summary_result = String::new();
+
+ write!(
+ summary_result,
+ "{} passed{} | {} failed{}",
+ summary.passed,
+ get_steps_text(summary.passed_steps),
+ summary.failed,
+ get_steps_text(summary.failed_steps),
+ )
+ .unwrap();
+
+ let ignored_steps = get_steps_text(summary.ignored_steps);
+ if summary.ignored > 0 || !ignored_steps.is_empty() {
+ write!(
+ summary_result,
+ " | {} ignored{}",
+ summary.ignored, ignored_steps
+ )
+ .unwrap()
+ }
+
+ if summary.measured > 0 {
+ write!(summary_result, " | {} measured", summary.measured,).unwrap();
+ }
+
+ if summary.filtered_out > 0 {
+ write!(summary_result, " | {} filtered out", summary.filtered_out).unwrap()
+ };
+
+ println!(
+ "\n{} | {} {}\n",
+ status,
+ summary_result,
+ colors::gray(format!("({})", display::human_elapsed(elapsed.as_millis()))),
+ );
+}
diff --git a/cli/tools/test/reporters/dot.rs b/cli/tools/test/reporters/dot.rs
new file mode 100644
index 000000000..4aa3fd89e
--- /dev/null
+++ b/cli/tools/test/reporters/dot.rs
@@ -0,0 +1,207 @@
+// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
+
+use super::common;
+use super::fmt::to_relative_path_or_remote_url;
+use super::*;
+
+pub struct DotTestReporter {
+ n: usize,
+ width: usize,
+ cwd: Url,
+ summary: TestSummary,
+}
+
+impl DotTestReporter {
+ pub fn new() -> DotTestReporter {
+ let console_width = if let Some(size) = crate::util::console::console_size()
+ {
+ size.cols as usize
+ } else {
+ 0
+ };
+ let console_width = (console_width as f32 * 0.8) as usize;
+ DotTestReporter {
+ n: 0,
+ width: console_width,
+ cwd: Url::from_directory_path(std::env::current_dir().unwrap()).unwrap(),
+ summary: TestSummary::new(),
+ }
+ }
+
+ fn print_status(&mut self, status: String) {
+ // Non-TTY console prints every result on a separate line.
+ if self.width == 0 {
+ println!("{}", status);
+ return;
+ }
+
+ if self.n != 0 && self.n % self.width == 0 {
+ println!();
+ }
+ self.n += 1;
+
+ print!("{}", status);
+ }
+
+ fn print_test_step_result(&mut self, result: &TestStepResult) {
+ let status = match result {
+ TestStepResult::Ok => fmt_ok(),
+ TestStepResult::Ignored => fmt_ignored(),
+ TestStepResult::Failed(_failure) => fmt_failed(),
+ };
+ self.print_status(status);
+ }
+
+ fn print_test_result(&mut self, result: &TestResult) {
+ let status = match result {
+ TestResult::Ok => fmt_ok(),
+ TestResult::Ignored => fmt_ignored(),
+ TestResult::Failed(_failure) => fmt_failed(),
+ TestResult::Cancelled => fmt_cancelled(),
+ };
+
+ self.print_status(status);
+ }
+}
+
+fn fmt_ok() -> String {
+ colors::gray(".").to_string()
+}
+
+fn fmt_ignored() -> String {
+ colors::cyan(",").to_string()
+}
+
+fn fmt_failed() -> String {
+ colors::red_bold("!").to_string()
+}
+
+fn fmt_cancelled() -> String {
+ colors::gray("!").to_string()
+}
+
+impl TestReporter for DotTestReporter {
+ fn report_register(&mut self, _description: &TestDescription) {}
+
+ fn report_plan(&mut self, plan: &TestPlan) {
+ self.summary.total += plan.total;
+ self.summary.filtered_out += plan.filtered_out;
+ }
+
+ fn report_wait(&mut self, _description: &TestDescription) {
+ // flush for faster feedback when line buffered
+ std::io::stdout().flush().unwrap();
+ }
+
+ fn report_output(&mut self, _output: &[u8]) {}
+
+ fn report_result(
+ &mut self,
+ description: &TestDescription,
+ result: &TestResult,
+ _elapsed: u64,
+ ) {
+ match &result {
+ TestResult::Ok => {
+ self.summary.passed += 1;
+ }
+ TestResult::Ignored => {
+ self.summary.ignored += 1;
+ }
+ TestResult::Failed(failure) => {
+ self.summary.failed += 1;
+ self
+ .summary
+ .failures
+ .push((description.clone(), failure.clone()));
+ }
+ TestResult::Cancelled => {
+ self.summary.failed += 1;
+ }
+ }
+
+ self.print_test_result(result);
+ }
+
+ fn report_uncaught_error(&mut self, origin: &str, error: Box<JsError>) {
+ self.summary.failed += 1;
+ self
+ .summary
+ .uncaught_errors
+ .push((origin.to_string(), error));
+
+ println!(
+ "Uncaught error from {} {}",
+ to_relative_path_or_remote_url(&self.cwd, origin),
+ colors::red("FAILED")
+ );
+ }
+
+ fn report_step_register(&mut self, _description: &TestStepDescription) {}
+
+ fn report_step_wait(&mut self, _description: &TestStepDescription) {
+ // flush for faster feedback when line buffered
+ std::io::stdout().flush().unwrap();
+ }
+
+ fn report_step_result(
+ &mut self,
+ desc: &TestStepDescription,
+ result: &TestStepResult,
+ _elapsed: u64,
+ tests: &IndexMap<usize, TestDescription>,
+ test_steps: &IndexMap<usize, TestStepDescription>,
+ ) {
+ match &result {
+ TestStepResult::Ok => {
+ self.summary.passed_steps += 1;
+ }
+ TestStepResult::Ignored => {
+ self.summary.ignored_steps += 1;
+ }
+ TestStepResult::Failed(failure) => {
+ self.summary.failed_steps += 1;
+ self.summary.failures.push((
+ TestDescription {
+ id: desc.id,
+ name: common::format_test_step_ancestry(desc, tests, test_steps),
+ ignore: false,
+ only: false,
+ origin: desc.origin.clone(),
+ location: desc.location.clone(),
+ },
+ failure.clone(),
+ ))
+ }
+ }
+
+ self.print_test_step_result(result);
+ }
+
+ fn report_summary(
+ &mut self,
+ elapsed: &Duration,
+ _tests: &IndexMap<usize, TestDescription>,
+ _test_steps: &IndexMap<usize, TestStepDescription>,
+ ) {
+ common::report_summary(&self.cwd, &self.summary, elapsed);
+ }
+
+ fn report_sigint(
+ &mut self,
+ tests_pending: &HashSet<usize>,
+ tests: &IndexMap<usize, TestDescription>,
+ test_steps: &IndexMap<usize, TestStepDescription>,
+ ) {
+ common::report_sigint(&self.cwd, tests_pending, tests, test_steps);
+ }
+
+ fn flush_report(
+ &mut self,
+ _elapsed: &Duration,
+ _tests: &IndexMap<usize, TestDescription>,
+ _test_steps: &IndexMap<usize, TestStepDescription>,
+ ) -> anyhow::Result<()> {
+ Ok(())
+ }
+}
diff --git a/cli/tools/test/reporters/mod.rs b/cli/tools/test/reporters/mod.rs
index a3270ad3e..a95c729dd 100644
--- a/cli/tools/test/reporters/mod.rs
+++ b/cli/tools/test/reporters/mod.rs
@@ -2,11 +2,14 @@
use super::*;
+mod common;
mod compound;
+mod dot;
mod junit;
mod pretty;
pub use compound::CompoundTestReporter;
+pub use dot::DotTestReporter;
pub use junit::JunitTestReporter;
pub use pretty::PrettyTestReporter;
diff --git a/cli/tools/test/reporters/pretty.rs b/cli/tools/test/reporters/pretty.rs
index e184d870c..394a7c490 100644
--- a/cli/tools/test/reporters/pretty.rs
+++ b/cli/tools/test/reporters/pretty.rs
@@ -1,5 +1,7 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
+use super::common;
+use super::fmt::to_relative_path_or_remote_url;
use super::*;
pub struct PrettyTestReporter {
@@ -39,7 +41,7 @@ impl PrettyTestReporter {
"{}",
colors::gray(format!(
"{} => ",
- self.to_relative_path_or_remote_url(&description.origin)
+ to_relative_path_or_remote_url(&self.cwd, &description.origin)
))
);
}
@@ -50,19 +52,6 @@ impl PrettyTestReporter {
self.scope_test_id = Some(description.id);
}
- fn to_relative_path_or_remote_url(&self, path_or_url: &str) -> String {
- let url = Url::parse(path_or_url).unwrap();
- if url.scheme() == "file" {
- if let Some(mut r) = self.cwd.make_relative(&url) {
- if !r.starts_with("../") {
- r = format!("./{r}");
- }
- return r;
- }
- }
- path_or_url.to_string()
- }
-
fn force_report_step_wait(&mut self, description: &TestStepDescription) {
self.write_output_end();
if !self.in_new_line {
@@ -137,68 +126,6 @@ impl PrettyTestReporter {
self.did_have_user_output = false;
}
}
-
- fn format_test_step_ancestry(
- &self,
- desc: &TestStepDescription,
- tests: &IndexMap<usize, TestDescription>,
- test_steps: &IndexMap<usize, TestStepDescription>,
- ) -> String {
- let root;
- let mut ancestor_names = vec![];
- let mut current_desc = desc;
- loop {
- if let Some(step_desc) = test_steps.get(&current_desc.parent_id) {
- ancestor_names.push(&step_desc.name);
- current_desc = step_desc;
- } else {
- root = tests.get(&current_desc.parent_id).unwrap();
- break;
- }
- }
- ancestor_names.reverse();
- let mut result = String::new();
- result.push_str(&root.name);
- result.push_str(" ... ");
- for name in ancestor_names {
- result.push_str(name);
- result.push_str(" ... ");
- }
- result.push_str(&desc.name);
- result
- }
-
- fn format_test_for_summary(&self, desc: &TestDescription) -> String {
- format!(
- "{} {}",
- &desc.name,
- colors::gray(format!(
- "=> {}:{}:{}",
- self.to_relative_path_or_remote_url(&desc.location.file_name),
- desc.location.line_number,
- desc.location.column_number
- ))
- )
- }
-
- fn format_test_step_for_summary(
- &self,
- desc: &TestStepDescription,
- tests: &IndexMap<usize, TestDescription>,
- test_steps: &IndexMap<usize, TestStepDescription>,
- ) -> String {
- let long_name = self.format_test_step_ancestry(desc, tests, test_steps);
- format!(
- "{} {}",
- long_name,
- colors::gray(format!(
- "=> {}:{}:{}",
- self.to_relative_path_or_remote_url(&desc.location.file_name),
- desc.location.line_number,
- desc.location.column_number
- ))
- )
- }
}
impl TestReporter for PrettyTestReporter {
@@ -216,7 +143,7 @@ impl TestReporter for PrettyTestReporter {
"running {} {} from {}",
plan.total,
inflection,
- self.to_relative_path_or_remote_url(&plan.origin)
+ to_relative_path_or_remote_url(&self.cwd, &plan.origin)
))
);
self.in_new_line = true;
@@ -314,7 +241,7 @@ impl TestReporter for PrettyTestReporter {
}
println!(
"Uncaught error from {} {}",
- self.to_relative_path_or_remote_url(origin),
+ to_relative_path_or_remote_url(&self.cwd, origin),
colors::red("FAILED")
);
self.in_new_line = true;
@@ -349,7 +276,7 @@ impl TestReporter for PrettyTestReporter {
self.summary.failures.push((
TestDescription {
id: desc.id,
- name: self.format_test_step_ancestry(desc, tests, test_steps),
+ name: common::format_test_step_ancestry(desc, tests, test_steps),
ignore: false,
only: false,
origin: desc.origin.clone(),
@@ -366,9 +293,9 @@ impl TestReporter for PrettyTestReporter {
"{} {} ...",
colors::gray(format!(
"{} =>",
- self.to_relative_path_or_remote_url(&desc.origin)
+ to_relative_path_or_remote_url(&self.cwd, &desc.origin)
)),
- self.format_test_step_ancestry(desc, tests, test_steps)
+ common::format_test_step_ancestry(desc, tests, test_steps)
);
self.in_new_line = false;
self.scope_test_id = Some(desc.id);
@@ -398,123 +325,7 @@ impl TestReporter for PrettyTestReporter {
_tests: &IndexMap<usize, TestDescription>,
_test_steps: &IndexMap<usize, TestStepDescription>,
) {
- if !self.summary.failures.is_empty()
- || !self.summary.uncaught_errors.is_empty()
- {
- #[allow(clippy::type_complexity)] // Type alias doesn't look better here
- let mut failures_by_origin: BTreeMap<
- String,
- (Vec<(&TestDescription, &TestFailure)>, Option<&JsError>),
- > = BTreeMap::default();
- let mut failure_titles = vec![];
- for (description, failure) in &self.summary.failures {
- let (failures, _) = failures_by_origin
- .entry(description.origin.clone())
- .or_default();
- failures.push((description, failure));
- }
-
- for (origin, js_error) in &self.summary.uncaught_errors {
- let (_, uncaught_error) =
- failures_by_origin.entry(origin.clone()).or_default();
- let _ = uncaught_error.insert(js_error.as_ref());
- }
- // note: the trailing whitespace is intentional to get a red background
- println!("\n{}\n", colors::white_bold_on_red(" ERRORS "));
- for (origin, (failures, uncaught_error)) in failures_by_origin {
- for (description, failure) in failures {
- if !failure.hide_in_summary() {
- let failure_title = self.format_test_for_summary(description);
- println!("{}", &failure_title);
- println!("{}: {}", colors::red_bold("error"), failure.to_string());
- println!();
- failure_titles.push(failure_title);
- }
- }
- if let Some(js_error) = uncaught_error {
- let failure_title = format!(
- "{} (uncaught error)",
- self.to_relative_path_or_remote_url(&origin)
- );
- println!("{}", &failure_title);
- println!(
- "{}: {}",
- colors::red_bold("error"),
- format_test_error(js_error)
- );
- println!("This error was not caught from a test and caused the test runner to fail on the referenced module.");
- println!("It most likely originated from a dangling promise, event/timeout handler or top-level code.");
- println!();
- failure_titles.push(failure_title);
- }
- }
- // note: the trailing whitespace is intentional to get a red background
- println!("{}\n", colors::white_bold_on_red(" FAILURES "));
- for failure_title in failure_titles {
- println!("{failure_title}");
- }
- }
-
- let status = if self.summary.has_failed() {
- colors::red("FAILED").to_string()
- } else {
- colors::green("ok").to_string()
- };
-
- let get_steps_text = |count: usize| -> String {
- if count == 0 {
- String::new()
- } else if count == 1 {
- " (1 step)".to_string()
- } else {
- format!(" ({count} steps)")
- }
- };
-
- let mut summary_result = String::new();
-
- write!(
- summary_result,
- "{} passed{} | {} failed{}",
- self.summary.passed,
- get_steps_text(self.summary.passed_steps),
- self.summary.failed,
- get_steps_text(self.summary.failed_steps),
- )
- .unwrap();
-
- let ignored_steps = get_steps_text(self.summary.ignored_steps);
- if self.summary.ignored > 0 || !ignored_steps.is_empty() {
- write!(
- summary_result,
- " | {} ignored{}",
- self.summary.ignored, ignored_steps
- )
- .unwrap()
- }
-
- if self.summary.measured > 0 {
- write!(summary_result, " | {} measured", self.summary.measured,).unwrap();
- }
-
- if self.summary.filtered_out > 0 {
- write!(
- summary_result,
- " | {} filtered out",
- self.summary.filtered_out
- )
- .unwrap()
- };
-
- println!(
- "\n{} | {} {}\n",
- status,
- summary_result,
- colors::gray(format!(
- "({})",
- display::human_elapsed(elapsed.as_millis())
- )),
- );
+ common::report_summary(&self.cwd, &self.summary, elapsed);
self.in_new_line = true;
}
@@ -524,27 +335,7 @@ impl TestReporter for PrettyTestReporter {
tests: &IndexMap<usize, TestDescription>,
test_steps: &IndexMap<usize, TestStepDescription>,
) {
- if tests_pending.is_empty() {
- return;
- }
- let mut formatted_pending = BTreeSet::new();
- for id in tests_pending {
- if let Some(desc) = tests.get(id) {
- formatted_pending.insert(self.format_test_for_summary(desc));
- }
- if let Some(desc) = test_steps.get(id) {
- formatted_pending
- .insert(self.format_test_step_for_summary(desc, tests, test_steps));
- }
- }
- println!(
- "\n{} The following tests were pending:\n",
- colors::intense_blue("SIGINT")
- );
- for entry in formatted_pending {
- println!("{}", entry);
- }
- println!();
+ common::report_sigint(&self.cwd, tests_pending, tests, test_steps);
self.in_new_line = true;
}