diff options
author | haturau <135221985+haturatu@users.noreply.github.com> | 2024-10-13 10:44:12 +0900 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-10-13 10:44:12 +0900 |
commit | cedfd657a6a2f04d841db2b3fc3d7694de95eada (patch) | |
tree | 0d5d1c95cfd35bc46c230ea71f4dbf23aa0aaeea | |
parent | 4da77059dfd5b8d7591aa8e3b9f04386fbdce221 (diff) | |
parent | 64c304a45265705832ebb4ab4e9ef19f899ac911 (diff) |
Merge branch 'denoland:main' into main
42 files changed, 593 insertions, 259 deletions
diff --git a/Cargo.lock b/Cargo.lock index 0e6b4fb6a..c6297d0b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1340,6 +1340,7 @@ version = "0.165.0" dependencies = [ "async-trait", "deno_core", + "thiserror", "tokio", "uuid", ] @@ -1353,6 +1354,7 @@ dependencies = [ "rusqlite", "serde", "sha2", + "thiserror", "tokio", ] @@ -1384,6 +1386,7 @@ dependencies = [ "deno_webgpu", "image", "serde", + "thiserror", ] [[package]] @@ -1464,6 +1467,7 @@ dependencies = [ "chrono", "deno_core", "saffron", + "thiserror", "tokio", ] @@ -2113,6 +2117,7 @@ dependencies = [ "rustls-tokio-stream", "rustls-webpki", "serde", + "thiserror", "tokio", "webpki-roots", ] @@ -7153,18 +7158,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 258712ca9..d59e5ac1a 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -1342,7 +1342,7 @@ pub fn flags_from_vec(args: Vec<OsString>) -> clap::error::Result<Flags> { } match subcommand.as_str() { - "add" => add_parse(&mut flags, &mut m), + "add" => add_parse(&mut flags, &mut m)?, "remove" => remove_parse(&mut flags, &mut m), "bench" => bench_parse(&mut flags, &mut m)?, "bundle" => bundle_parse(&mut flags, &mut m), @@ -1675,6 +1675,7 @@ You can add multiple dependencies at once: .action(ArgAction::Append), ) .arg(add_dev_arg()) + .arg(allow_scripts_arg()) }) } @@ -1717,7 +1718,7 @@ If you specify a directory instead of a file, the path is expanded to all contai UnstableArgsConfig::ResolutionAndRuntime, ) .defer(|cmd| { - runtime_args(cmd, true, false) + runtime_args(cmd, true, false, true) .arg(check_arg(true)) .arg( Arg::new("json") @@ -1881,7 +1882,7 @@ On the first invocation with deno will download the proper binary and cache it i UnstableArgsConfig::ResolutionAndRuntime, ) .defer(|cmd| { - runtime_args(cmd, true, false) + runtime_args(cmd, true, false, true) .arg(check_arg(true)) .arg( Arg::new("include") @@ -2202,7 +2203,7 @@ This command has implicit access to all permissions. UnstableArgsConfig::ResolutionAndRuntime, ) .defer(|cmd| { - runtime_args(cmd, false, true) + runtime_args(cmd, false, true, true) .arg(check_arg(false)) .arg(executable_ext_arg()) .arg( @@ -2501,7 +2502,7 @@ The installation root is determined, in order of precedence: These must be added to the path manually if required."), UnstableArgsConfig::ResolutionAndRuntime) .visible_alias("i") .defer(|cmd| { - permission_args(runtime_args(cmd, false, true), Some("global")) + permission_args(runtime_args(cmd, false, true, false), Some("global")) .arg(check_arg(true)) .arg(allow_scripts_arg()) .arg( @@ -2767,7 +2768,7 @@ It is especially useful for quick prototyping and checking snippets of code. TypeScript is supported, however it is not type-checked, only transpiled." ), UnstableArgsConfig::ResolutionAndRuntime) - .defer(|cmd| runtime_args(cmd, true, true) + .defer(|cmd| runtime_args(cmd, true, true, true) .arg(check_arg(false)) .arg( Arg::new("eval-file") @@ -2799,7 +2800,7 @@ TypeScript is supported, however it is not type-checked, only transpiled." } fn run_args(command: Command, top_level: bool) -> Command { - runtime_args(command, true, true) + runtime_args(command, true, true, true) .arg(check_arg(false)) .arg(watch_arg(true)) .arg(hmr_arg(true)) @@ -2855,7 +2856,7 @@ Start a server defined in server.ts: Start a server defined in server.ts, watching for changes and running on port 5050: <p(245)>deno serve --watch --port 5050 server.ts</> -<y>Read more:</> <c>https://docs.deno.com/go/serve</>"), UnstableArgsConfig::ResolutionAndRuntime), true, true) +<y>Read more:</> <c>https://docs.deno.com/go/serve</>"), UnstableArgsConfig::ResolutionAndRuntime), true, true, true) .arg( Arg::new("port") .long("port") @@ -2929,7 +2930,7 @@ or <c>**/__tests__/**</>: UnstableArgsConfig::ResolutionAndRuntime ) .defer(|cmd| - runtime_args(cmd, true, true) + runtime_args(cmd, true, true, true) .arg(check_arg(true)) .arg( Arg::new("ignore") @@ -3642,6 +3643,7 @@ fn runtime_args( app: Command, include_perms: bool, include_inspector: bool, + include_allow_scripts: bool, ) -> Command { let app = compile_args(app); let app = if include_perms { @@ -3654,6 +3656,11 @@ fn runtime_args( } else { app }; + let app = if include_allow_scripts { + app.arg(allow_scripts_arg()) + } else { + app + }; app .arg(frozen_lockfile_arg()) .arg(cached_only_arg()) @@ -4235,8 +4242,13 @@ fn allow_scripts_arg_parse( Ok(()) } -fn add_parse(flags: &mut Flags, matches: &mut ArgMatches) { +fn add_parse( + flags: &mut Flags, + matches: &mut ArgMatches, +) -> clap::error::Result<()> { + allow_scripts_arg_parse(flags, matches)?; flags.subcommand = DenoSubcommand::Add(add_parse_inner(matches, None)); + Ok(()) } fn add_parse_inner( @@ -4262,7 +4274,7 @@ fn bench_parse( ) -> clap::error::Result<()> { flags.type_check_mode = TypeCheckMode::Local; - runtime_args_parse(flags, matches, true, false)?; + runtime_args_parse(flags, matches, true, false, true)?; ext_arg_parse(flags, matches); // NOTE: `deno bench` always uses `--no-prompt`, tests shouldn't ever do @@ -4352,7 +4364,7 @@ fn compile_parse( matches: &mut ArgMatches, ) -> clap::error::Result<()> { flags.type_check_mode = TypeCheckMode::Local; - runtime_args_parse(flags, matches, true, false)?; + runtime_args_parse(flags, matches, true, false, true)?; let mut script = matches.remove_many::<String>("script_arg").unwrap(); let source_file = script.next().unwrap(); @@ -4527,7 +4539,7 @@ fn eval_parse( flags: &mut Flags, matches: &mut ArgMatches, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, false, true)?; + runtime_args_parse(flags, matches, false, true, false)?; unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); flags.allow_all(); @@ -4620,7 +4632,7 @@ fn install_parse( flags: &mut Flags, matches: &mut ArgMatches, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, false)?; let global = matches.get_flag("global"); if global { @@ -4846,7 +4858,7 @@ fn repl_parse( flags: &mut Flags, matches: &mut ArgMatches, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, true)?; unsafely_ignore_certificate_errors_parse(flags, matches); let eval_files = matches @@ -4879,7 +4891,7 @@ fn run_parse( mut app: Command, bare: bool, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, true)?; ext_arg_parse(flags, matches); flags.code_cache_enabled = !matches.get_flag("no-code-cache"); @@ -4920,7 +4932,7 @@ fn serve_parse( let worker_count = parallel_arg_parse(matches).map(|v| v.get()); - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, true)?; // If the user didn't pass --allow-net, add this port to the network // allowlist. If the host is 0.0.0.0, we add :{port} and allow the same network perms // as if it was passed to --allow-net directly. @@ -5015,7 +5027,7 @@ fn test_parse( matches: &mut ArgMatches, ) -> clap::error::Result<()> { flags.type_check_mode = TypeCheckMode::Local; - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, true)?; ext_arg_parse(flags, matches); // NOTE: `deno test` always uses `--no-prompt`, tests shouldn't ever do @@ -5380,6 +5392,7 @@ fn runtime_args_parse( matches: &mut ArgMatches, include_perms: bool, include_inspector: bool, + include_allow_scripts: bool, ) -> clap::error::Result<()> { unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); compile_args_parse(flags, matches)?; @@ -5391,6 +5404,9 @@ fn runtime_args_parse( if include_inspector { inspect_arg_parse(flags, matches); } + if include_allow_scripts { + allow_scripts_arg_parse(flags, matches)?; + } location_arg_parse(flags, matches); v8_flags_arg_parse(flags, matches); seed_arg_parse(flags, matches); @@ -8862,8 +8878,12 @@ mod tests { #[test] fn test_no_colon_in_value_name() { - let app = - runtime_args(Command::new("test_inspect_completion_value"), true, true); + let app = runtime_args( + Command::new("test_inspect_completion_value"), + true, + true, + false, + ); let inspect_args = app .get_arguments() .filter(|arg| arg.get_id() == "inspect") diff --git a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs index b358c3585..5735f5248 100644 --- a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs +++ b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs @@ -2,6 +2,8 @@ use super::bin_entries::BinEntries; use crate::args::LifecycleScriptsConfig; +use crate::task_runner::TaskStdio; +use crate::util::progress_bar::ProgressBar; use deno_core::anyhow::Context; use deno_npm::resolution::NpmResolutionSnapshot; use deno_runtime::deno_io::FromRawIoHandle; @@ -148,6 +150,7 @@ impl<'a> LifecycleScripts<'a> { snapshot: &NpmResolutionSnapshot, packages: &[NpmResolutionPackage], root_node_modules_dir_path: Option<&Path>, + progress_bar: &ProgressBar, ) -> Result<(), AnyError> { self.warn_not_run_scripts()?; let get_package_path = @@ -201,7 +204,15 @@ impl<'a> LifecycleScripts<'a> { { continue; } - let exit_code = crate::task_runner::run_task( + let _guard = progress_bar.update_with_prompt( + crate::util::progress_bar::ProgressMessagePrompt::Initialize, + &format!("{}: running '{script_name}' script", package.id.nv), + ); + let crate::task_runner::TaskResult { + exit_code, + stderr, + stdout, + } = crate::task_runner::run_task( crate::task_runner::RunTaskOptions { task_name: script_name, script, @@ -211,15 +222,37 @@ impl<'a> LifecycleScripts<'a> { init_cwd, argv: &[], root_node_modules_dir: root_node_modules_dir_path, + stdio: Some(crate::task_runner::TaskIo { + stderr: TaskStdio::piped(), + stdout: TaskStdio::piped(), + }), }, ) .await?; + let stdout = stdout.unwrap(); + let stderr = stderr.unwrap(); if exit_code != 0 { log::warn!( - "error: script '{}' in '{}' failed with exit code {}", + "error: script '{}' in '{}' failed with exit code {}{}{}", script_name, package.id.nv, exit_code, + if !stdout.trim_ascii().is_empty() { + format!( + "\nstdout:\n{}\n", + String::from_utf8_lossy(&stdout).trim() + ) + } else { + String::new() + }, + if !stderr.trim_ascii().is_empty() { + format!( + "\nstderr:\n{}\n", + String::from_utf8_lossy(&stderr).trim() + ) + } else { + String::new() + }, ); failed_packages.push(&package.id.nv); // assume if earlier script fails, later ones will fail too diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index 63a972a43..54f7576ad 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -713,6 +713,7 @@ async fn sync_resolution_with_fs( snapshot, &package_partitions.packages, Some(root_node_modules_dir_path), + progress_bar, ) .await?; diff --git a/cli/task_runner.rs b/cli/task_runner.rs index ab7163bc9..418043b23 100644 --- a/cli/task_runner.rs +++ b/cli/task_runner.rs @@ -16,8 +16,11 @@ use deno_task_shell::ExecutableCommand; use deno_task_shell::ExecuteResult; use deno_task_shell::ShellCommand; use deno_task_shell::ShellCommandContext; +use deno_task_shell::ShellPipeReader; +use deno_task_shell::ShellPipeWriter; use lazy_regex::Lazy; use regex::Regex; +use tokio::task::JoinHandle; use tokio::task::LocalSet; use crate::npm::CliNpmResolver; @@ -36,6 +39,35 @@ pub fn get_script_with_args(script: &str, argv: &[String]) -> String { script.trim().to_owned() } +pub struct TaskStdio(Option<ShellPipeReader>, ShellPipeWriter); + +impl TaskStdio { + pub fn stdout() -> Self { + Self(None, ShellPipeWriter::stdout()) + } + pub fn stderr() -> Self { + Self(None, ShellPipeWriter::stderr()) + } + pub fn piped() -> Self { + let (r, w) = deno_task_shell::pipe(); + Self(Some(r), w) + } +} + +pub struct TaskIo { + pub stdout: TaskStdio, + pub stderr: TaskStdio, +} + +impl Default for TaskIo { + fn default() -> Self { + Self { + stderr: TaskStdio::stderr(), + stdout: TaskStdio::stdout(), + } + } +} + pub struct RunTaskOptions<'a> { pub task_name: &'a str, pub script: &'a str, @@ -45,24 +77,69 @@ pub struct RunTaskOptions<'a> { pub argv: &'a [String], pub custom_commands: HashMap<String, Rc<dyn ShellCommand>>, pub root_node_modules_dir: Option<&'a Path>, + pub stdio: Option<TaskIo>, } pub type TaskCustomCommands = HashMap<String, Rc<dyn ShellCommand>>; -pub async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> { +pub struct TaskResult { + pub exit_code: i32, + pub stdout: Option<Vec<u8>>, + pub stderr: Option<Vec<u8>>, +} + +pub async fn run_task( + opts: RunTaskOptions<'_>, +) -> Result<TaskResult, AnyError> { let script = get_script_with_args(opts.script, opts.argv); let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{}'.", opts.task_name))?; let env_vars = prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir); + let state = + deno_task_shell::ShellState::new(env_vars, opts.cwd, opts.custom_commands); + let stdio = opts.stdio.unwrap_or_default(); + let ( + TaskStdio(stdout_read, stdout_write), + TaskStdio(stderr_read, stderr_write), + ) = (stdio.stdout, stdio.stderr); + + fn read(reader: ShellPipeReader) -> JoinHandle<Result<Vec<u8>, AnyError>> { + tokio::task::spawn_blocking(move || { + let mut buf = Vec::new(); + reader.pipe_to(&mut buf)?; + Ok(buf) + }) + } + + let stdout = stdout_read.map(read); + let stderr = stderr_read.map(read); + let local = LocalSet::new(); - let future = deno_task_shell::execute( - seq_list, - env_vars, - opts.cwd, - opts.custom_commands, - ); - Ok(local.run_until(future).await) + let future = async move { + let exit_code = deno_task_shell::execute_with_pipes( + seq_list, + state, + ShellPipeReader::stdin(), + stdout_write, + stderr_write, + ) + .await; + Ok::<_, AnyError>(TaskResult { + exit_code, + stdout: if let Some(stdout) = stdout { + Some(stdout.await??) + } else { + None + }, + stderr: if let Some(stderr) = stderr { + Some(stderr.await??) + } else { + None + }, + }) + }; + local.run_until(future).await } fn prepare_env_vars( diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 464b65d98..502b09d2c 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -182,17 +182,21 @@ async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> { &task_runner::get_script_with_args(script, cli_options.argv()), ); - task_runner::run_task(task_runner::RunTaskOptions { - task_name, - script, - cwd, - env_vars, - custom_commands, - init_cwd: opts.cli_options.initial_cwd(), - argv: cli_options.argv(), - root_node_modules_dir: npm_resolver.root_node_modules_path(), - }) - .await + Ok( + task_runner::run_task(task_runner::RunTaskOptions { + task_name, + script, + cwd, + env_vars, + custom_commands, + init_cwd: opts.cli_options.initial_cwd(), + argv: cli_options.argv(), + root_node_modules_dir: npm_resolver.root_node_modules_path(), + stdio: None, + }) + .await? + .exit_code, + ) } fn output_task(task_name: &str, script: &str) { diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 3caf7b9ef..b19c4ce15 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -16,5 +16,6 @@ path = "lib.rs" [dependencies] async-trait.workspace = true deno_core.workspace = true +thiserror.workspace = true tokio.workspace = true uuid.workspace = true diff --git a/ext/broadcast_channel/in_memory_broadcast_channel.rs b/ext/broadcast_channel/in_memory_broadcast_channel.rs index 00b52a9d6..61dc68e17 100644 --- a/ext/broadcast_channel/in_memory_broadcast_channel.rs +++ b/ext/broadcast_channel/in_memory_broadcast_channel.rs @@ -3,13 +3,13 @@ use std::sync::Arc; use async_trait::async_trait; -use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use tokio::sync::broadcast; use tokio::sync::mpsc; use uuid::Uuid; use crate::BroadcastChannel; +use crate::BroadcastChannelError; #[derive(Clone)] pub struct InMemoryBroadcastChannel(Arc<Mutex<broadcast::Sender<Message>>>); @@ -41,7 +41,7 @@ impl Default for InMemoryBroadcastChannel { impl BroadcastChannel for InMemoryBroadcastChannel { type Resource = InMemoryBroadcastChannelResource; - fn subscribe(&self) -> Result<Self::Resource, AnyError> { + fn subscribe(&self) -> Result<Self::Resource, BroadcastChannelError> { let (cancel_tx, cancel_rx) = mpsc::unbounded_channel(); let broadcast_rx = self.0.lock().subscribe(); let rx = tokio::sync::Mutex::new((broadcast_rx, cancel_rx)); @@ -53,7 +53,10 @@ impl BroadcastChannel for InMemoryBroadcastChannel { }) } - fn unsubscribe(&self, resource: &Self::Resource) -> Result<(), AnyError> { + fn unsubscribe( + &self, + resource: &Self::Resource, + ) -> Result<(), BroadcastChannelError> { Ok(resource.cancel_tx.send(())?) } @@ -62,7 +65,7 @@ impl BroadcastChannel for InMemoryBroadcastChannel { resource: &Self::Resource, name: String, data: Vec<u8>, - ) -> Result<(), AnyError> { + ) -> Result<(), BroadcastChannelError> { let name = Arc::new(name); let data = Arc::new(data); let uuid = resource.uuid; @@ -73,7 +76,7 @@ impl BroadcastChannel for InMemoryBroadcastChannel { async fn recv( &self, resource: &Self::Resource, - ) -> Result<Option<crate::Message>, AnyError> { + ) -> Result<Option<crate::Message>, BroadcastChannelError> { let mut g = resource.rx.lock().await; let (broadcast_rx, cancel_rx) = &mut *g; loop { diff --git a/ext/broadcast_channel/lib.rs b/ext/broadcast_channel/lib.rs index 47c48656d..c1de118a3 100644 --- a/ext/broadcast_channel/lib.rs +++ b/ext/broadcast_channel/lib.rs @@ -10,34 +10,69 @@ use std::path::PathBuf; use std::rc::Rc; use async_trait::async_trait; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::JsBuffer; use deno_core::OpState; use deno_core::Resource; use deno_core::ResourceId; +use tokio::sync::broadcast::error::SendError as BroadcastSendError; +use tokio::sync::mpsc::error::SendError as MpscSendError; pub const UNSTABLE_FEATURE_NAME: &str = "broadcast-channel"; +#[derive(Debug, thiserror::Error)] +pub enum BroadcastChannelError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + MPSCSendError(MpscSendError<Box<dyn std::fmt::Debug + Send + Sync>>), + #[error(transparent)] + BroadcastSendError( + BroadcastSendError<Box<dyn std::fmt::Debug + Send + Sync>>, + ), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + +impl<T: std::fmt::Debug + Send + Sync + 'static> From<MpscSendError<T>> + for BroadcastChannelError +{ + fn from(value: MpscSendError<T>) -> Self { + BroadcastChannelError::MPSCSendError(MpscSendError(Box::new(value.0))) + } +} +impl<T: std::fmt::Debug + Send + Sync + 'static> From<BroadcastSendError<T>> + for BroadcastChannelError +{ + fn from(value: BroadcastSendError<T>) -> Self { + BroadcastChannelError::BroadcastSendError(BroadcastSendError(Box::new( + value.0, + ))) + } +} + #[async_trait] pub trait BroadcastChannel: Clone { type Resource: Resource; - fn subscribe(&self) -> Result<Self::Resource, AnyError>; + fn subscribe(&self) -> Result<Self::Resource, BroadcastChannelError>; - fn unsubscribe(&self, resource: &Self::Resource) -> Result<(), AnyError>; + fn unsubscribe( + &self, + resource: &Self::Resource, + ) -> Result<(), BroadcastChannelError>; async fn send( &self, resource: &Self::Resource, name: String, data: Vec<u8>, - ) -> Result<(), AnyError>; + ) -> Result<(), BroadcastChannelError>; async fn recv( &self, resource: &Self::Resource, - ) -> Result<Option<Message>, AnyError>; + ) -> Result<Option<Message>, BroadcastChannelError>; } pub type Message = (String, Vec<u8>); @@ -46,7 +81,7 @@ pub type Message = (String, Vec<u8>); #[smi] pub fn op_broadcast_subscribe<BC>( state: &mut OpState, -) -> Result<ResourceId, AnyError> +) -> Result<ResourceId, BroadcastChannelError> where BC: BroadcastChannel + 'static, { @@ -62,11 +97,14 @@ where pub fn op_broadcast_unsubscribe<BC>( state: &mut OpState, #[smi] rid: ResourceId, -) -> Result<(), AnyError> +) -> Result<(), BroadcastChannelError> where BC: BroadcastChannel + 'static, { - let resource = state.resource_table.get::<BC::Resource>(rid)?; + let resource = state + .resource_table + .get::<BC::Resource>(rid) + .map_err(BroadcastChannelError::Resource)?; let bc = state.borrow::<BC>(); bc.unsubscribe(&resource) } @@ -77,11 +115,15 @@ pub async fn op_broadcast_send<BC>( #[smi] rid: ResourceId, #[string] name: String, #[buffer] buf: JsBuffer, -) -> Result<(), AnyError> +) -> Result<(), BroadcastChannelError> where BC: BroadcastChannel + 'static, { - let resource = state.borrow().resource_table.get::<BC::Resource>(rid)?; + let resource = state + .borrow() + .resource_table + .get::<BC::Resource>(rid) + .map_err(BroadcastChannelError::Resource)?; let bc = state.borrow().borrow::<BC>().clone(); bc.send(&resource, name, buf.to_vec()).await } @@ -91,11 +133,15 @@ where pub async fn op_broadcast_recv<BC>( state: Rc<RefCell<OpState>>, #[smi] rid: ResourceId, -) -> Result<Option<Message>, AnyError> +) -> Result<Option<Message>, BroadcastChannelError> where BC: BroadcastChannel + 'static, { - let resource = state.borrow().resource_table.get::<BC::Resource>(rid)?; + let resource = state + .borrow() + .resource_table + .get::<BC::Resource>(rid) + .map_err(BroadcastChannelError::Resource)?; let bc = state.borrow().borrow::<BC>().clone(); bc.recv(&resource).await } diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 71ca34380..9d876fcb7 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -19,4 +19,5 @@ deno_core.workspace = true rusqlite.workspace = true serde.workspace = true sha2.workspace = true +thiserror.workspace = true tokio.workspace = true diff --git a/ext/cache/lib.rs b/ext/cache/lib.rs index f6d758b95..08661c349 100644 --- a/ext/cache/lib.rs +++ b/ext/cache/lib.rs @@ -7,7 +7,6 @@ use std::sync::Arc; use async_trait::async_trait; use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; @@ -19,6 +18,20 @@ use deno_core::ResourceId; mod sqlite; pub use sqlite::SqliteBackedCache; +#[derive(Debug, thiserror::Error)] +pub enum CacheError { + #[error(transparent)] + Sqlite(#[from] rusqlite::Error), + #[error(transparent)] + JoinError(#[from] tokio::task::JoinError), + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + Other(deno_core::error::AnyError), + #[error(transparent)] + Io(#[from] std::io::Error), +} + #[derive(Clone)] pub struct CreateCache<C: Cache + 'static>(pub Arc<dyn Fn() -> C>); @@ -92,26 +105,31 @@ pub struct CacheDeleteRequest { pub trait Cache: Clone + 'static { type CacheMatchResourceType: Resource; - async fn storage_open(&self, cache_name: String) -> Result<i64, AnyError>; - async fn storage_has(&self, cache_name: String) -> Result<bool, AnyError>; - async fn storage_delete(&self, cache_name: String) -> Result<bool, AnyError>; + async fn storage_open(&self, cache_name: String) -> Result<i64, CacheError>; + async fn storage_has(&self, cache_name: String) -> Result<bool, CacheError>; + async fn storage_delete( + &self, + cache_name: String, + ) -> Result<bool, CacheError>; /// Put a resource into the cache. async fn put( &self, request_response: CachePutRequest, resource: Option<Rc<dyn Resource>>, - ) -> Result<(), AnyError>; + ) -> Result<(), CacheError>; async fn r#match( &self, request: CacheMatchRequest, ) -> Result< Option<(CacheMatchResponseMeta, Option<Self::CacheMatchResourceType>)>, - AnyError, + CacheError, >; - async fn delete(&self, request: CacheDeleteRequest) - -> Result<bool, AnyError>; + async fn delete( + &self, + request: CacheDeleteRequest, + ) -> Result<bool, CacheError>; } #[op2(async)] @@ -119,7 +137,7 @@ pub trait Cache: Clone + 'static { pub async fn op_cache_storage_open<CA>( state: Rc<RefCell<OpState>>, #[string] cache_name: String, -) -> Result<i64, AnyError> +) -> Result<i64, CacheError> where CA: Cache, { @@ -131,7 +149,7 @@ where pub async fn op_cache_storage_has<CA>( state: Rc<RefCell<OpState>>, #[string] cache_name: String, -) -> Result<bool, AnyError> +) -> Result<bool, CacheError> where CA: Cache, { @@ -143,7 +161,7 @@ where pub async fn op_cache_storage_delete<CA>( state: Rc<RefCell<OpState>>, #[string] cache_name: String, -) -> Result<bool, AnyError> +) -> Result<bool, CacheError> where CA: Cache, { @@ -155,13 +173,19 @@ where pub async fn op_cache_put<CA>( state: Rc<RefCell<OpState>>, #[serde] request_response: CachePutRequest, -) -> Result<(), AnyError> +) -> Result<(), CacheError> where CA: Cache, { let cache = get_cache::<CA>(&state)?; let resource = match request_response.response_rid { - Some(rid) => Some(state.borrow_mut().resource_table.take_any(rid)?), + Some(rid) => Some( + state + .borrow_mut() + .resource_table + .take_any(rid) + .map_err(CacheError::Resource)?, + ), None => None, }; cache.put(request_response, resource).await @@ -172,7 +196,7 @@ where pub async fn op_cache_match<CA>( state: Rc<RefCell<OpState>>, #[serde] request: CacheMatchRequest, -) -> Result<Option<CacheMatchResponse>, AnyError> +) -> Result<Option<CacheMatchResponse>, CacheError> where CA: Cache, { @@ -191,7 +215,7 @@ where pub async fn op_cache_delete<CA>( state: Rc<RefCell<OpState>>, #[serde] request: CacheDeleteRequest, -) -> Result<bool, AnyError> +) -> Result<bool, CacheError> where CA: Cache, { @@ -199,7 +223,7 @@ where cache.delete(request).await } -pub fn get_cache<CA>(state: &Rc<RefCell<OpState>>) -> Result<CA, AnyError> +pub fn get_cache<CA>(state: &Rc<RefCell<OpState>>) -> Result<CA, CacheError> where CA: Cache, { @@ -211,7 +235,9 @@ where state.put(cache); Ok(state.borrow::<CA>().clone()) } else { - Err(type_error("CacheStorage is not available in this context")) + Err(CacheError::Other(type_error( + "CacheStorage is not available in this context", + ))) } } diff --git a/ext/cache/sqlite.rs b/ext/cache/sqlite.rs index c3c55dd5e..e4991c32f 100644 --- a/ext/cache/sqlite.rs +++ b/ext/cache/sqlite.rs @@ -30,6 +30,7 @@ use crate::serialize_headers; use crate::vary_header_matches; use crate::Cache; use crate::CacheDeleteRequest; +use crate::CacheError; use crate::CacheMatchRequest; use crate::CacheMatchResponseMeta; use crate::CachePutRequest; @@ -102,7 +103,7 @@ impl Cache for SqliteBackedCache { /// Open a cache storage. Internally, this creates a row in the /// sqlite db if the cache doesn't exist and returns the internal id /// of the cache. - async fn storage_open(&self, cache_name: String) -> Result<i64, AnyError> { + async fn storage_open(&self, cache_name: String) -> Result<i64, CacheError> { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); spawn_blocking(move || { @@ -121,14 +122,14 @@ impl Cache for SqliteBackedCache { )?; let responses_dir = get_responses_dir(cache_storage_dir, cache_id); std::fs::create_dir_all(responses_dir)?; - Ok::<i64, AnyError>(cache_id) + Ok::<i64, CacheError>(cache_id) }) .await? } /// Check if a cache with the provided name exists. /// Note: this doesn't check the disk, it only checks the sqlite db. - async fn storage_has(&self, cache_name: String) -> Result<bool, AnyError> { + async fn storage_has(&self, cache_name: String) -> Result<bool, CacheError> { let db = self.connection.clone(); spawn_blocking(move || { let db = db.lock(); @@ -140,13 +141,16 @@ impl Cache for SqliteBackedCache { Ok(count > 0) }, )?; - Ok::<bool, AnyError>(cache_exists) + Ok::<bool, CacheError>(cache_exists) }) .await? } /// Delete a cache storage. Internally, this deletes the row in the sqlite db. - async fn storage_delete(&self, cache_name: String) -> Result<bool, AnyError> { + async fn storage_delete( + &self, + cache_name: String, + ) -> Result<bool, CacheError> { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); spawn_blocking(move || { @@ -167,7 +171,7 @@ impl Cache for SqliteBackedCache { std::fs::remove_dir_all(cache_dir)?; } } - Ok::<bool, AnyError>(maybe_cache_id.is_some()) + Ok::<bool, CacheError>(maybe_cache_id.is_some()) }) .await? } @@ -176,10 +180,12 @@ impl Cache for SqliteBackedCache { &self, request_response: CachePutRequest, resource: Option<Rc<dyn Resource>>, - ) -> Result<(), AnyError> { + ) -> Result<(), CacheError> { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); - let now = SystemTime::now().duration_since(UNIX_EPOCH)?; + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("SystemTime is before unix epoch"); if let Some(resource) = resource { let body_key = hash(&format!( @@ -193,7 +199,11 @@ impl Cache for SqliteBackedCache { let mut file = tokio::fs::File::create(response_path).await?; let mut buf = BufMutView::new(64 * 1024); loop { - let (size, buf2) = resource.clone().read_byob(buf).await?; + let (size, buf2) = resource + .clone() + .read_byob(buf) + .await + .map_err(CacheError::Other)?; if size == 0 { break; } @@ -224,7 +234,7 @@ impl Cache for SqliteBackedCache { request: CacheMatchRequest, ) -> Result< Option<(CacheMatchResponseMeta, Option<CacheResponseResource>)>, - AnyError, + CacheError, > { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); @@ -290,19 +300,17 @@ impl Cache for SqliteBackedCache { } Err(err) => return Err(err.into()), }; - return Ok(Some((cache_meta, Some(CacheResponseResource::new(file))))); - } - Some((cache_meta, None)) => { - return Ok(Some((cache_meta, None))); + Ok(Some((cache_meta, Some(CacheResponseResource::new(file))))) } - None => return Ok(None), + Some((cache_meta, None)) => Ok(Some((cache_meta, None))), + None => Ok(None), } } async fn delete( &self, request: CacheDeleteRequest, - ) -> Result<bool, AnyError> { + ) -> Result<bool, CacheError> { let db = self.connection.clone(); spawn_blocking(move || { // TODO(@satyarohith): remove the response body from disk if one exists @@ -311,17 +319,17 @@ impl Cache for SqliteBackedCache { "DELETE FROM request_response_list WHERE cache_id = ?1 AND request_url = ?2", (request.cache_id, &request.request_url), )?; - Ok::<bool, AnyError>(rows_effected > 0) + Ok::<bool, CacheError>(rows_effected > 0) }) .await? } } async fn insert_cache_asset( - db: Arc<Mutex<rusqlite::Connection>>, + db: Arc<Mutex<Connection>>, put: CachePutRequest, response_body_key: Option<String>, -) -> Result<Option<String>, deno_core::anyhow::Error> { +) -> Result<Option<String>, CacheError> { spawn_blocking(move || { let maybe_response_body = { let db = db.lock(); @@ -339,7 +347,7 @@ async fn insert_cache_asset( response_body_key, put.response_status, put.response_status_text, - SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs(), + SystemTime::now().duration_since(UNIX_EPOCH).expect("SystemTime is before unix epoch").as_secs(), ), |row| { let response_body_key: Option<String> = row.get(0)?; @@ -347,7 +355,7 @@ async fn insert_cache_asset( }, )? }; - Ok::<Option<String>, AnyError>(maybe_response_body) + Ok::<Option<String>, CacheError>(maybe_response_body) }).await? } diff --git a/ext/canvas/Cargo.toml b/ext/canvas/Cargo.toml index 47c37560e..78c674348 100644 --- a/ext/canvas/Cargo.toml +++ b/ext/canvas/Cargo.toml @@ -18,3 +18,4 @@ deno_core.workspace = true deno_webgpu.workspace = true image = { version = "0.24.7", default-features = false, features = ["png"] } serde = { workspace = true, features = ["derive"] } +thiserror.workspace = true diff --git a/ext/canvas/lib.rs b/ext/canvas/lib.rs index 72173f133..defb288ac 100644 --- a/ext/canvas/lib.rs +++ b/ext/canvas/lib.rs @@ -1,7 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::ToJsBuffer; use image::imageops::FilterType; @@ -13,6 +11,14 @@ use serde::Deserialize; use serde::Serialize; use std::path::PathBuf; +#[derive(Debug, thiserror::Error)] +pub enum CanvasError { + #[error("Color type '{0:?}' not supported")] + UnsupportedColorType(ColorType), + #[error(transparent)] + Image(#[from] image::ImageError), +} + #[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] enum ImageResizeQuality { @@ -43,7 +49,7 @@ struct ImageProcessArgs { fn op_image_process( #[buffer] buf: &[u8], #[serde] args: ImageProcessArgs, -) -> Result<ToJsBuffer, AnyError> { +) -> ToJsBuffer { let view = RgbaImage::from_vec(args.width, args.height, buf.to_vec()).unwrap(); @@ -105,7 +111,7 @@ fn op_image_process( } } - Ok(image_out.to_vec().into()) + image_out.to_vec().into() } #[derive(Debug, Serialize)] @@ -117,17 +123,16 @@ struct DecodedPng { #[op2] #[serde] -fn op_image_decode_png(#[buffer] buf: &[u8]) -> Result<DecodedPng, AnyError> { +fn op_image_decode_png( + #[buffer] buf: &[u8], +) -> Result<DecodedPng, CanvasError> { let png = image::codecs::png::PngDecoder::new(buf)?; let (width, height) = png.dimensions(); // TODO(@crowlKats): maybe use DynamicImage https://docs.rs/image/0.24.7/image/enum.DynamicImage.html ? if png.color_type() != ColorType::Rgba8 { - return Err(type_error(format!( - "Color type '{:?}' not supported", - png.color_type() - ))); + return Err(CanvasError::UnsupportedColorType(png.color_type())); } // read_image will assert that the buffer is the correct size, so we need to fill it with zeros diff --git a/ext/cron/Cargo.toml b/ext/cron/Cargo.toml index a773521f5..10f09b57c 100644 --- a/ext/cron/Cargo.toml +++ b/ext/cron/Cargo.toml @@ -19,4 +19,5 @@ async-trait.workspace = true chrono = { workspace = true, features = ["now"] } deno_core.workspace = true saffron.workspace = true +thiserror.workspace = true tokio.workspace = true diff --git a/ext/cron/interface.rs b/ext/cron/interface.rs index 01b1d1789..a19525cc4 100644 --- a/ext/cron/interface.rs +++ b/ext/cron/interface.rs @@ -1,17 +1,17 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use crate::CronError; use async_trait::async_trait; -use deno_core::error::AnyError; pub trait CronHandler { type EH: CronHandle + 'static; - fn create(&self, spec: CronSpec) -> Result<Self::EH, AnyError>; + fn create(&self, spec: CronSpec) -> Result<Self::EH, CronError>; } #[async_trait(?Send)] pub trait CronHandle { - async fn next(&self, prev_success: bool) -> Result<bool, AnyError>; + async fn next(&self, prev_success: bool) -> Result<bool, CronError>; fn close(&self); } diff --git a/ext/cron/lib.rs b/ext/cron/lib.rs index e350e4d69..feffb5e51 100644 --- a/ext/cron/lib.rs +++ b/ext/cron/lib.rs @@ -7,16 +7,13 @@ use std::borrow::Cow; use std::cell::RefCell; use std::rc::Rc; +pub use crate::interface::*; use deno_core::error::get_custom_error_class; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; use deno_core::ResourceId; -pub use crate::interface::*; - pub const UNSTABLE_FEATURE_NAME: &str = "cron"; deno_core::extension!(deno_cron, @@ -49,6 +46,28 @@ impl<EH: CronHandle + 'static> Resource for CronResource<EH> { } } +#[derive(Debug, thiserror::Error)] +pub enum CronError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error("Cron name cannot exceed 64 characters: current length {0}")] + NameExceeded(usize), + #[error("Invalid cron name: only alphanumeric characters, whitespace, hyphens, and underscores are allowed")] + NameInvalid, + #[error("Cron with this name already exists")] + AlreadyExists, + #[error("Too many crons")] + TooManyCrons, + #[error("Invalid cron schedule")] + InvalidCron, + #[error("Invalid backoff schedule")] + InvalidBackoff, + #[error(transparent)] + AcquireError(#[from] tokio::sync::AcquireError), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + #[op2] #[smi] fn op_cron_create<C>( @@ -56,7 +75,7 @@ fn op_cron_create<C>( #[string] name: String, #[string] cron_schedule: String, #[serde] backoff_schedule: Option<Vec<u32>>, -) -> Result<ResourceId, AnyError> +) -> Result<ResourceId, CronError> where C: CronHandler + 'static, { @@ -90,7 +109,7 @@ async fn op_cron_next<C>( state: Rc<RefCell<OpState>>, #[smi] rid: ResourceId, prev_success: bool, -) -> Result<bool, AnyError> +) -> Result<bool, CronError> where C: CronHandler + 'static, { @@ -102,7 +121,7 @@ where if get_custom_error_class(&err) == Some("BadResource") { return Ok(false); } else { - return Err(err); + return Err(CronError::Resource(err)); } } }; @@ -112,17 +131,14 @@ where cron_handler.next(prev_success).await } -fn validate_cron_name(name: &str) -> Result<(), AnyError> { +fn validate_cron_name(name: &str) -> Result<(), CronError> { if name.len() > 64 { - return Err(type_error(format!( - "Cron name cannot exceed 64 characters: current length {}", - name.len() - ))); + return Err(CronError::NameExceeded(name.len())); } if !name.chars().all(|c| { c.is_ascii_whitespace() || c.is_ascii_alphanumeric() || c == '_' || c == '-' }) { - return Err(type_error("Invalid cron name: only alphanumeric characters, whitespace, hyphens, and underscores are allowed")); + return Err(CronError::NameInvalid); } Ok(()) } diff --git a/ext/cron/local.rs b/ext/cron/local.rs index dd60e750a..1110baadb 100644 --- a/ext/cron/local.rs +++ b/ext/cron/local.rs @@ -10,8 +10,6 @@ use std::rc::Weak; use std::sync::Arc; use async_trait::async_trait; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::FutureExt; use deno_core::unsync::spawn; @@ -21,6 +19,7 @@ use tokio::sync::mpsc::WeakSender; use tokio::sync::OwnedSemaphorePermit; use tokio::sync::Semaphore; +use crate::CronError; use crate::CronHandle; use crate::CronHandler; use crate::CronSpec; @@ -81,7 +80,7 @@ impl LocalCronHandler { async fn cron_loop( runtime_state: Rc<RefCell<RuntimeState>>, mut cron_schedule_rx: mpsc::Receiver<(String, bool)>, - ) -> Result<(), AnyError> { + ) -> Result<(), CronError> { loop { let earliest_deadline = runtime_state .borrow() @@ -154,7 +153,7 @@ impl LocalCronHandler { impl RuntimeState { fn get_ready_crons( &mut self, - ) -> Result<Vec<(String, WeakSender<()>)>, AnyError> { + ) -> Result<Vec<(String, WeakSender<()>)>, CronError> { let now = chrono::Utc::now().timestamp_millis() as u64; let ready = { @@ -191,7 +190,7 @@ impl RuntimeState { impl CronHandler for LocalCronHandler { type EH = CronExecutionHandle; - fn create(&self, spec: CronSpec) -> Result<Self::EH, AnyError> { + fn create(&self, spec: CronSpec) -> Result<Self::EH, CronError> { // Ensure that the cron loop is started. self.cron_loop_join_handle.get_or_init(|| { let (cron_schedule_tx, cron_schedule_rx) = @@ -208,17 +207,17 @@ impl CronHandler for LocalCronHandler { let mut runtime_state = self.runtime_state.borrow_mut(); if runtime_state.crons.len() > MAX_CRONS { - return Err(type_error("Too many crons")); + return Err(CronError::TooManyCrons); } if runtime_state.crons.contains_key(&spec.name) { - return Err(type_error("Cron with this name already exists")); + return Err(CronError::AlreadyExists); } // Validate schedule expression. spec .cron_schedule .parse::<saffron::Cron>() - .map_err(|_| type_error("Invalid cron schedule"))?; + .map_err(|_| CronError::InvalidCron)?; // Validate backoff_schedule. if let Some(backoff_schedule) = &spec.backoff_schedule { @@ -263,7 +262,7 @@ struct Inner { #[async_trait(?Send)] impl CronHandle for CronExecutionHandle { - async fn next(&self, prev_success: bool) -> Result<bool, AnyError> { + async fn next(&self, prev_success: bool) -> Result<bool, CronError> { self.inner.borrow_mut().permit.take(); if self @@ -300,7 +299,7 @@ impl CronHandle for CronExecutionHandle { } } -fn compute_next_deadline(cron_expression: &str) -> Result<u64, AnyError> { +fn compute_next_deadline(cron_expression: &str) -> Result<u64, CronError> { let now = chrono::Utc::now(); if let Ok(test_schedule) = env::var("DENO_CRON_TEST_SCHEDULE_OFFSET") { @@ -311,19 +310,21 @@ fn compute_next_deadline(cron_expression: &str) -> Result<u64, AnyError> { let cron = cron_expression .parse::<saffron::Cron>() - .map_err(|_| anyhow::anyhow!("invalid cron expression"))?; + .map_err(|_| CronError::InvalidCron)?; let Some(next_deadline) = cron.next_after(now) else { - return Err(anyhow::anyhow!("invalid cron expression")); + return Err(CronError::InvalidCron); }; Ok(next_deadline.timestamp_millis() as u64) } -fn validate_backoff_schedule(backoff_schedule: &[u32]) -> Result<(), AnyError> { +fn validate_backoff_schedule( + backoff_schedule: &[u32], +) -> Result<(), CronError> { if backoff_schedule.len() > MAX_BACKOFF_COUNT { - return Err(type_error("Invalid backoff schedule")); + return Err(CronError::InvalidBackoff); } if backoff_schedule.iter().any(|s| *s > MAX_BACKOFF_MS) { - return Err(type_error("Invalid backoff schedule")); + return Err(CronError::InvalidBackoff); } Ok(()) } diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index 5bc04ceb5..a68d144b5 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -250,7 +250,7 @@ pub fn op_tls_cert_resolver_resolve_error( #[string] sni: String, #[string] error: String, ) { - lookup.resolve(sni, Err(anyhow!(error))) + lookup.resolve(sni, Err(error)) } #[op2] diff --git a/ext/node/polyfills/util.ts b/ext/node/polyfills/util.ts index 586fae17e..d82b288b0 100644 --- a/ext/node/polyfills/util.ts +++ b/ext/node/polyfills/util.ts @@ -39,6 +39,7 @@ import { formatWithOptions, inspect, stripVTControlCharacters, + styleText, } from "ext:deno_node/internal/util/inspect.mjs"; import { codes } from "ext:deno_node/internal/error_codes.ts"; import types from "node:util/types"; @@ -63,6 +64,7 @@ export { parseArgs, promisify, stripVTControlCharacters, + styleText, types, }; @@ -354,4 +356,5 @@ export default { debuglog, debug: debuglog, isDeepStrictEqual, + styleText, }; diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 6c1554241..9f7bffe67 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -21,5 +21,6 @@ rustls-pemfile.workspace = true rustls-tokio-stream.workspace = true rustls-webpki.workspace = true serde.workspace = true +thiserror.workspace = true tokio.workspace = true webpki-roots.workspace = true diff --git a/ext/tls/lib.rs b/ext/tls/lib.rs index c4d548ccf..883d2995e 100644 --- a/ext/tls/lib.rs +++ b/ext/tls/lib.rs @@ -9,17 +9,12 @@ pub use rustls_tokio_stream::*; pub use webpki; pub use webpki_roots; -use deno_core::anyhow::anyhow; -use deno_core::error::custom_error; -use deno_core::error::AnyError; - use rustls::client::danger::HandshakeSignatureValid; use rustls::client::danger::ServerCertVerified; use rustls::client::danger::ServerCertVerifier; use rustls::client::WebPkiServerVerifier; use rustls::ClientConfig; use rustls::DigitallySignedStruct; -use rustls::Error; use rustls::RootCertStore; use rustls_pemfile::certs; use rustls_pemfile::ec_private_keys; @@ -35,12 +30,30 @@ use std::sync::Arc; mod tls_key; pub use tls_key::*; +#[derive(Debug, thiserror::Error)] +pub enum TlsError { + #[error(transparent)] + Rustls(#[from] rustls::Error), + #[error("Unable to add pem file to certificate store: {0}")] + UnableAddPemFileToCert(std::io::Error), + #[error("Unable to decode certificate")] + CertInvalid, + #[error("No certificates found in certificate data")] + CertsNotFound, + #[error("No keys found in key data")] + KeysNotFound, + #[error("Unable to decode key")] + KeyDecode, +} + /// Lazily resolves the root cert store. /// /// This was done because the root cert store is not needed in all cases /// and takes a bit of time to initialize. pub trait RootCertStoreProvider: Send + Sync { - fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError>; + fn get_or_try_init( + &self, + ) -> Result<&RootCertStore, deno_core::error::AnyError>; } // This extension has no runtime apis, it only exports some shared native functions. @@ -77,7 +90,7 @@ impl ServerCertVerifier for NoCertificateVerification { server_name: &rustls::pki_types::ServerName<'_>, ocsp_response: &[u8], now: rustls::pki_types::UnixTime, - ) -> Result<ServerCertVerified, Error> { + ) -> Result<ServerCertVerified, rustls::Error> { if self.ic_allowlist.is_empty() { return Ok(ServerCertVerified::assertion()); } @@ -89,7 +102,9 @@ impl ServerCertVerifier for NoCertificateVerification { _ => { // NOTE(bartlomieju): `ServerName` is a non-exhaustive enum // so we have this catch all errors here. - return Err(Error::General("Unknown `ServerName` variant".to_string())); + return Err(rustls::Error::General( + "Unknown `ServerName` variant".to_string(), + )); } }; if self.ic_allowlist.contains(&dns_name_or_ip_address) { @@ -110,7 +125,7 @@ impl ServerCertVerifier for NoCertificateVerification { message: &[u8], cert: &rustls::pki_types::CertificateDer, dss: &DigitallySignedStruct, - ) -> Result<HandshakeSignatureValid, Error> { + ) -> Result<HandshakeSignatureValid, rustls::Error> { if self.ic_allowlist.is_empty() { return Ok(HandshakeSignatureValid::assertion()); } @@ -126,7 +141,7 @@ impl ServerCertVerifier for NoCertificateVerification { message: &[u8], cert: &rustls::pki_types::CertificateDer, dss: &DigitallySignedStruct, - ) -> Result<HandshakeSignatureValid, Error> { + ) -> Result<HandshakeSignatureValid, rustls::Error> { if self.ic_allowlist.is_empty() { return Ok(HandshakeSignatureValid::assertion()); } @@ -178,7 +193,7 @@ pub fn create_client_config( unsafely_ignore_certificate_errors: Option<Vec<String>>, maybe_cert_chain_and_key: TlsKeys, socket_use: SocketUse, -) -> Result<ClientConfig, AnyError> { +) -> Result<ClientConfig, TlsError> { if let Some(ic_allowlist) = unsafely_ignore_certificate_errors { let client_config = ClientConfig::builder() .dangerous() @@ -214,10 +229,7 @@ pub fn create_client_config( root_cert_store.add(cert)?; } Err(e) => { - return Err(anyhow!( - "Unable to add pem file to certificate store: {}", - e - )); + return Err(TlsError::UnableAddPemFileToCert(e)); } } } @@ -255,74 +267,61 @@ fn add_alpn(client: &mut ClientConfig, socket_use: SocketUse) { pub fn load_certs( reader: &mut dyn BufRead, -) -> Result<Vec<CertificateDer<'static>>, AnyError> { +) -> Result<Vec<CertificateDer<'static>>, TlsError> { let certs: Result<Vec<_>, _> = certs(reader).collect(); - let certs = certs - .map_err(|_| custom_error("InvalidData", "Unable to decode certificate"))?; + let certs = certs.map_err(|_| TlsError::CertInvalid)?; if certs.is_empty() { - return Err(cert_not_found_err()); + return Err(TlsError::CertsNotFound); } Ok(certs) } -fn key_decode_err() -> AnyError { - custom_error("InvalidData", "Unable to decode key") -} - -fn key_not_found_err() -> AnyError { - custom_error("InvalidData", "No keys found in key data") -} - -fn cert_not_found_err() -> AnyError { - custom_error("InvalidData", "No certificates found in certificate data") -} - /// Starts with -----BEGIN RSA PRIVATE KEY----- fn load_rsa_keys( mut bytes: &[u8], -) -> Result<Vec<PrivateKeyDer<'static>>, AnyError> { +) -> Result<Vec<PrivateKeyDer<'static>>, TlsError> { let keys: Result<Vec<_>, _> = rsa_private_keys(&mut bytes).collect(); - let keys = keys.map_err(|_| key_decode_err())?; + let keys = keys.map_err(|_| TlsError::KeyDecode)?; Ok(keys.into_iter().map(PrivateKeyDer::Pkcs1).collect()) } /// Starts with -----BEGIN EC PRIVATE KEY----- fn load_ec_keys( mut bytes: &[u8], -) -> Result<Vec<PrivateKeyDer<'static>>, AnyError> { +) -> Result<Vec<PrivateKeyDer<'static>>, TlsError> { let keys: Result<Vec<_>, std::io::Error> = ec_private_keys(&mut bytes).collect(); - let keys2 = keys.map_err(|_| key_decode_err())?; + let keys2 = keys.map_err(|_| TlsError::KeyDecode)?; Ok(keys2.into_iter().map(PrivateKeyDer::Sec1).collect()) } /// Starts with -----BEGIN PRIVATE KEY----- fn load_pkcs8_keys( mut bytes: &[u8], -) -> Result<Vec<PrivateKeyDer<'static>>, AnyError> { +) -> Result<Vec<PrivateKeyDer<'static>>, TlsError> { let keys: Result<Vec<_>, std::io::Error> = pkcs8_private_keys(&mut bytes).collect(); - let keys2 = keys.map_err(|_| key_decode_err())?; + let keys2 = keys.map_err(|_| TlsError::KeyDecode)?; Ok(keys2.into_iter().map(PrivateKeyDer::Pkcs8).collect()) } fn filter_invalid_encoding_err( - to_be_filtered: Result<HandshakeSignatureValid, Error>, -) -> Result<HandshakeSignatureValid, Error> { + to_be_filtered: Result<HandshakeSignatureValid, rustls::Error>, +) -> Result<HandshakeSignatureValid, rustls::Error> { match to_be_filtered { - Err(Error::InvalidCertificate(rustls::CertificateError::BadEncoding)) => { - Ok(HandshakeSignatureValid::assertion()) - } + Err(rustls::Error::InvalidCertificate( + rustls::CertificateError::BadEncoding, + )) => Ok(HandshakeSignatureValid::assertion()), res => res, } } pub fn load_private_keys( bytes: &[u8], -) -> Result<Vec<PrivateKeyDer<'static>>, AnyError> { +) -> Result<Vec<PrivateKeyDer<'static>>, TlsError> { let mut keys = load_rsa_keys(bytes)?; if keys.is_empty() { @@ -334,7 +333,7 @@ pub fn load_private_keys( } if keys.is_empty() { - return Err(key_not_found_err()); + return Err(TlsError::KeysNotFound); } Ok(keys) diff --git a/ext/tls/tls_key.rs b/ext/tls/tls_key.rs index 66fac86f8..b7baa604b 100644 --- a/ext/tls/tls_key.rs +++ b/ext/tls/tls_key.rs @@ -11,8 +11,6 @@ //! key lookup can handle closing one end of the pair, in which case they will just //! attempt to clean up the associated resources. -use deno_core::anyhow::anyhow; -use deno_core::error::AnyError; use deno_core::futures::future::poll_fn; use deno_core::futures::future::Either; use deno_core::futures::FutureExt; @@ -33,7 +31,19 @@ use tokio::sync::oneshot; use webpki::types::CertificateDer; use webpki::types::PrivateKeyDer; -type ErrorType = Rc<AnyError>; +#[derive(Debug, thiserror::Error)] +pub enum TlsKeyError { + #[error(transparent)] + Rustls(#[from] rustls::Error), + #[error("Failed: {0}")] + Failed(ErrorType), + #[error(transparent)] + JoinError(#[from] tokio::task::JoinError), + #[error(transparent)] + RecvError(#[from] tokio::sync::broadcast::error::RecvError), +} + +type ErrorType = Arc<Box<str>>; /// A TLS certificate/private key pair. /// see https://docs.rs/rustls-pki-types/latest/rustls_pki_types/#cloning-private-keys @@ -114,7 +124,7 @@ impl TlsKeyResolver { &self, sni: String, alpn: Vec<Vec<u8>>, - ) -> Result<Arc<ServerConfig>, AnyError> { + ) -> Result<Arc<ServerConfig>, TlsKeyError> { let key = self.resolve(sni).await?; let mut tls_config = ServerConfig::builder() @@ -183,7 +193,7 @@ impl TlsKeyResolver { pub fn resolve( &self, sni: String, - ) -> impl Future<Output = Result<TlsKey, AnyError>> { + ) -> impl Future<Output = Result<TlsKey, TlsKeyError>> { let mut cache = self.inner.cache.borrow_mut(); let mut recv = match cache.get(&sni) { None => { @@ -194,7 +204,7 @@ impl TlsKeyResolver { } Some(TlsKeyState::Resolving(recv)) => recv.resubscribe(), Some(TlsKeyState::Resolved(res)) => { - return Either::Left(ready(res.clone().map_err(|_| anyhow!("Failed")))); + return Either::Left(ready(res.clone().map_err(TlsKeyError::Failed))); } }; drop(cache); @@ -212,7 +222,7 @@ impl TlsKeyResolver { // Someone beat us to it } } - res.map_err(|_| anyhow!("Failed")) + res.map_err(TlsKeyError::Failed) }); Either::Right(async move { handle.await? }) } @@ -247,13 +257,13 @@ impl TlsKeyLookup { } /// Resolve a previously polled item. - pub fn resolve(&self, sni: String, res: Result<TlsKey, AnyError>) { + pub fn resolve(&self, sni: String, res: Result<TlsKey, String>) { _ = self .pending .borrow_mut() .remove(&sni) .unwrap() - .send(res.map_err(Rc::new)); + .send(res.map_err(|e| Arc::new(e.into_boxed_str()))); } } diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 9e320040b..b8043516b 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -349,6 +349,7 @@ pub fn create_ws_client_config( TlsKeys::Null, socket_use, ) + .map_err(|e| e.into()) } /// Headers common to both http/1.1 and h2 requests. diff --git a/runtime/errors.rs b/runtime/errors.rs index 694402773..4c6aeab98 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -9,10 +9,15 @@ //! Diagnostics are compile-time type errors, whereas JsErrors are runtime //! exceptions. +use deno_broadcast_channel::BroadcastChannelError; +use deno_cache::CacheError; +use deno_canvas::CanvasError; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::url; use deno_core::ModuleResolutionError; +use deno_cron::CronError; +use deno_tls::TlsError; use std::env; use std::error::Error; use std::io; @@ -153,12 +158,79 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str { } } +fn get_tls_error_class(e: &TlsError) -> &'static str { + match e { + TlsError::Rustls(_) => "Error", + TlsError::UnableAddPemFileToCert(e) => get_io_error_class(e), + TlsError::CertInvalid + | TlsError::CertsNotFound + | TlsError::KeysNotFound + | TlsError::KeyDecode => "InvalidData", + } +} + +pub fn get_cron_error_class(e: &CronError) -> &'static str { + match e { + CronError::Resource(e) => { + deno_core::error::get_custom_error_class(e).unwrap_or("Error") + } + CronError::NameExceeded(_) => "TypeError", + CronError::NameInvalid => "TypeError", + CronError::AlreadyExists => "TypeError", + CronError::TooManyCrons => "TypeError", + CronError::InvalidCron => "TypeError", + CronError::InvalidBackoff => "TypeError", + CronError::AcquireError(_) => "Error", + CronError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + } +} + +fn get_canvas_error(e: &CanvasError) -> &'static str { + match e { + CanvasError::UnsupportedColorType(_) => "TypeError", + CanvasError::Image(_) => "Error", + } +} + +pub fn get_cache_error(error: &CacheError) -> &'static str { + match error { + CacheError::Sqlite(_) => "Error", + CacheError::JoinError(_) => "Error", + CacheError::Resource(err) => { + deno_core::error::get_custom_error_class(err).unwrap_or("Error") + } + CacheError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + CacheError::Io(err) => get_io_error_class(err), + } +} + +fn get_broadcast_channel_error(error: &BroadcastChannelError) -> &'static str { + match error { + BroadcastChannelError::Resource(err) => { + deno_core::error::get_custom_error_class(err).unwrap() + } + BroadcastChannelError::MPSCSendError(_) => "Error", + BroadcastChannelError::BroadcastSendError(_) => "Error", + BroadcastChannelError::Other(err) => { + get_error_class_name(err).unwrap_or("Error") + } + } +} + pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { deno_core::error::get_custom_error_class(e) .or_else(|| deno_webgpu::error::get_error_class_name(e)) .or_else(|| deno_web::get_error_class_name(e)) .or_else(|| deno_webstorage::get_not_supported_error_class_name(e)) .or_else(|| deno_websocket::get_network_error_class_name(e)) + .or_else(|| e.downcast_ref::<TlsError>().map(get_tls_error_class)) + .or_else(|| e.downcast_ref::<CronError>().map(get_cron_error_class)) + .or_else(|| e.downcast_ref::<CanvasError>().map(get_canvas_error)) + .or_else(|| e.downcast_ref::<CacheError>().map(get_cache_error)) + .or_else(|| { + e.downcast_ref::<BroadcastChannelError>() + .map(get_broadcast_channel_error) + }) .or_else(|| { e.downcast_ref::<dlopen2::Error>() .map(get_dlopen_error_class) diff --git a/tests/registry/npm/@denotest/better-say-hello/1.0.0/say-hello.js b/tests/registry/npm/@denotest/better-say-hello/1.0.0/say-hello.js index 0b8d63cf4..5243d4902 100644 --- a/tests/registry/npm/@denotest/better-say-hello/1.0.0/say-hello.js +++ b/tests/registry/npm/@denotest/better-say-hello/1.0.0/say-hello.js @@ -1,2 +1,2 @@ import { sayBetterHello } from "./index.js"; -sayBetterHello();
\ No newline at end of file +sayBetterHello(); diff --git a/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/index.js b/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/index.js index 4eb9b107a..242fe92f5 100644 --- a/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/index.js +++ b/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/index.js @@ -1,5 +1,3 @@ -modules.export = { +module.exports = { value: 42 }; - -console.log('index.js', modules.export.value);
\ No newline at end of file diff --git a/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/package.json b/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/package.json index 3c6fa005f..085ab6414 100644 --- a/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/package.json +++ b/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/package.json @@ -6,6 +6,7 @@ "install": "echo install && cli-esm 'hello from install script'", "postinstall": "echo postinstall && npx cowsay postinstall" }, + "exports": "./index.js", "dependencies": { "@denotest/bin": "1.0.0" } diff --git a/tests/registry/npm/@denotest/say-hello/1.0.0/say-hello.js b/tests/registry/npm/@denotest/say-hello/1.0.0/say-hello.js index 8751b8e47..66d02cde5 100644 --- a/tests/registry/npm/@denotest/say-hello/1.0.0/say-hello.js +++ b/tests/registry/npm/@denotest/say-hello/1.0.0/say-hello.js @@ -1,2 +1,5 @@ import { sayHello } from "./index.js"; -console.log(sayHello());
\ No newline at end of file +console.log(sayHello()); +import path from "node:path"; +import fs from "node:fs"; +fs.writeSync(fs.openSync(path.join(process.env.INIT_CWD, "say-hello-output.txt"), "w"), sayHello()); diff --git a/tests/specs/install/install_entrypoint/lifecycle.out b/tests/specs/install/install_entrypoint/lifecycle.out index 8eae8ee12..2fd279875 100644 --- a/tests/specs/install/install_entrypoint/lifecycle.out +++ b/tests/specs/install/install_entrypoint/lifecycle.out @@ -5,18 +5,7 @@ Download http://localhost:4260/@denotest/node-lifecycle-scripts/1.0.0.tgz Download http://localhost:4260/@denotest/bin/1.0.0.tgz Initialize @denotest/node-lifecycle-scripts@1.0.0 Initialize @denotest/bin@1.0.0 +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'preinstall' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'install' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'postinstall' script [UNORDERED_END] -preinstall -deno preinstall.js -node preinstall.js -install -hello from install script -postinstall[WILDCARD] - _____________ -< postinstall > - ------------- - \ ^__^ - \ (oo)\_______ - (__)\ )\/\ - ||----w | - || || diff --git a/tests/specs/npm/lifecycle_scripts/__test__.jsonc b/tests/specs/npm/lifecycle_scripts/__test__.jsonc index b798b968f..589c131dd 100644 --- a/tests/specs/npm/lifecycle_scripts/__test__.jsonc +++ b/tests/specs/npm/lifecycle_scripts/__test__.jsonc @@ -5,7 +5,7 @@ "steps": [ { "args": "cache --allow-scripts=npm:@denotest/node-addon main.js", - "output": "[WILDCARD]gyp info ok \n" + "output": "[WILDCARD]Initialize @denotest/node-addon@1.0.0: running 'install' script\n" }, { "args": "run -A main.js", @@ -38,7 +38,7 @@ "steps": [ { // without running scripts (should warn) - "args": "cache all_lifecycles.js", + "args": "run all_lifecycles.js", "output": "all_lifecycles_not_run.out" }, { @@ -51,6 +51,23 @@ } ] }, + "deno_run_lifecycle_scripts": { + "steps": [ + { + // without running scripts (should warn) + "args": "run all_lifecycles.js", + "output": "all_lifecycles_not_run.out" + }, + { + // now run scripts + "args": "run --allow-scripts all_lifecycles.js", + // this test package covers running preinstall, install, and postinstall scripts + // it also exercises running bin packages (esbuild in this case), using `node` in a script + // (with and without node-only CLI flags), and using `npx` in a script + "output": "all_lifecycles_deno_run.out" + } + ] + }, "global_lifecycle_scripts": { "steps": [ { @@ -79,14 +96,12 @@ { // without running scripts (should warn) "args": "run all_lifecycles.js", - "output": "only_warns_first1.out", - "exitCode": 1 + "output": "only_warns_first1.out" }, { // without running scripts (does not warn) "args": "run all_lifecycles.js", - "output": "only_warns_first2.out", - "exitCode": 1 + "output": "only_warns_first2.out" }, { // should warn because this is an explicit install @@ -128,6 +143,13 @@ // we run the install script, we should use the correct binary (relative to the package) "args": "cache --allow-scripts conflicting_bin.js", "output": "conflicting_bin.out" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('./say-hello-output.txt'))" + ], + "output": "conflicting_bin2.out" } ] }, diff --git a/tests/specs/npm/lifecycle_scripts/all_lifecycles.out b/tests/specs/npm/lifecycle_scripts/all_lifecycles.out index 956c006dd..bdbb2b08e 100644 --- a/tests/specs/npm/lifecycle_scripts/all_lifecycles.out +++ b/tests/specs/npm/lifecycle_scripts/all_lifecycles.out @@ -1,14 +1,3 @@ -preinstall -deno preinstall.js -node preinstall.js -install -hello from install script -postinstall[WILDCARD] - _____________ -< postinstall > - ------------- - \ ^__^ - \ (oo)\_______ - (__)\ )\/\ - ||----w | - || || +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'preinstall' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'install' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'postinstall' script diff --git a/tests/specs/npm/lifecycle_scripts/all_lifecycles_deno_run.out b/tests/specs/npm/lifecycle_scripts/all_lifecycles_deno_run.out new file mode 100644 index 000000000..5f06f6214 --- /dev/null +++ b/tests/specs/npm/lifecycle_scripts/all_lifecycles_deno_run.out @@ -0,0 +1,4 @@ +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'preinstall' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'install' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'postinstall' script +value is 42 diff --git a/tests/specs/npm/lifecycle_scripts/all_lifecycles_not_run.out b/tests/specs/npm/lifecycle_scripts/all_lifecycles_not_run.out index 09324c845..cdfdeabe2 100644 --- a/tests/specs/npm/lifecycle_scripts/all_lifecycles_not_run.out +++ b/tests/specs/npm/lifecycle_scripts/all_lifecycles_not_run.out @@ -12,3 +12,4 @@ Warning The following packages contained npm lifecycle scripts (preinstall/insta ┠─ This may cause the packages to not work correctly. ┖─ To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`: deno install --allow-scripts=npm:@denotest/node-lifecycle-scripts@1.0.0 +value is 42 diff --git a/tests/specs/npm/lifecycle_scripts/conflicting_bin.out b/tests/specs/npm/lifecycle_scripts/conflicting_bin.out index c70aafdd5..bae5275dc 100644 --- a/tests/specs/npm/lifecycle_scripts/conflicting_bin.out +++ b/tests/specs/npm/lifecycle_scripts/conflicting_bin.out @@ -8,6 +8,5 @@ Download http://localhost:4260/@denotest/say-hello/1.0.0.tgz Initialize @denotest/better-say-hello@1.0.0 Initialize @denotest/say-hello-on-install@1.0.0 Initialize @denotest/say-hello@1.0.0 +Initialize @denotest/say-hello-on-install@1.0.0: running 'install' script [UNORDERED_END] -install script -@denotest/say-hello says hello! diff --git a/tests/specs/npm/lifecycle_scripts/conflicting_bin2.out b/tests/specs/npm/lifecycle_scripts/conflicting_bin2.out new file mode 100644 index 000000000..b4640b02c --- /dev/null +++ b/tests/specs/npm/lifecycle_scripts/conflicting_bin2.out @@ -0,0 +1 @@ +@denotest/say-hello says hello! diff --git a/tests/specs/npm/lifecycle_scripts/future_install_all_lifecycles.out b/tests/specs/npm/lifecycle_scripts/future_install_all_lifecycles.out index f62079d3f..bdbb2b08e 100644 --- a/tests/specs/npm/lifecycle_scripts/future_install_all_lifecycles.out +++ b/tests/specs/npm/lifecycle_scripts/future_install_all_lifecycles.out @@ -1,15 +1,3 @@ -preinstall -deno preinstall.js -node preinstall.js -install -hello from install script -postinstall -[WILDCARD] - _____________ -< postinstall > - ------------- - \ ^__^ - \ (oo)\_______ - (__)\ )\/\ - ||----w | - || || +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'preinstall' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'install' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'postinstall' script diff --git a/tests/specs/npm/lifecycle_scripts/no_deno_json.out b/tests/specs/npm/lifecycle_scripts/no_deno_json.out index 38a461449..8509d8f9f 100644 --- a/tests/specs/npm/lifecycle_scripts/no_deno_json.out +++ b/tests/specs/npm/lifecycle_scripts/no_deno_json.out @@ -5,7 +5,6 @@ Download http://localhost:4260/@denotest/lifecycle-scripts-cjs/1.0.0.tgz Download http://localhost:4260/@denotest/bin/1.0.0.tgz Initialize @denotest/lifecycle-scripts-cjs@1.0.0 Initialize @denotest/bin@1.0.0 +Initialize @denotest/lifecycle-scripts-cjs@1.0.0: running 'preinstall' script +Initialize @denotest/lifecycle-scripts-cjs@1.0.0: running 'install' script [UNORDERED_END] -preinstall -install -hello from install script diff --git a/tests/specs/npm/lifecycle_scripts/node_gyp_not_found.out b/tests/specs/npm/lifecycle_scripts/node_gyp_not_found.out index 06c856cd9..81577e6ba 100644 --- a/tests/specs/npm/lifecycle_scripts/node_gyp_not_found.out +++ b/tests/specs/npm/lifecycle_scripts/node_gyp_not_found.out @@ -2,8 +2,11 @@ Download http://localhost:4260/@denotest/node-addon-implicit-node-gyp Download http://localhost:4260/@denotest/node-addon-implicit-node-gyp/1.0.0.tgz Initialize @denotest/node-addon-implicit-node-gyp@1.0.0 +Initialize @denotest/node-addon-implicit-node-gyp@1.0.0: running 'install' script [UNORDERED_END] Warning node-gyp was used in a script, but was not listed as a dependency. Either add it as a dependency or install it globally (e.g. `npm install -g node-gyp`) -[WILDCARD] error: script 'install' in '@denotest/node-addon-implicit-node-gyp@1.0.0' failed with exit code 1 +stderr: +Error launching 'node-gyp': [WILDCARD] + error: failed to run scripts for packages: @denotest/node-addon-implicit-node-gyp@1.0.0 diff --git a/tests/specs/npm/lifecycle_scripts/only_warns_first1.out b/tests/specs/npm/lifecycle_scripts/only_warns_first1.out index 947777b5b..cdfdeabe2 100644 --- a/tests/specs/npm/lifecycle_scripts/only_warns_first1.out +++ b/tests/specs/npm/lifecycle_scripts/only_warns_first1.out @@ -12,5 +12,4 @@ Warning The following packages contained npm lifecycle scripts (preinstall/insta ┠─ This may cause the packages to not work correctly. ┖─ To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`: deno install --allow-scripts=npm:@denotest/node-lifecycle-scripts@1.0.0 -error: Uncaught SyntaxError: The requested module 'npm:@denotest/node-lifecycle-scripts' does not provide an export named 'value' -[WILDCARD] +value is 42 diff --git a/tests/specs/npm/lifecycle_scripts/only_warns_first2.out b/tests/specs/npm/lifecycle_scripts/only_warns_first2.out index f6a02c727..9e7806159 100644 --- a/tests/specs/npm/lifecycle_scripts/only_warns_first2.out +++ b/tests/specs/npm/lifecycle_scripts/only_warns_first2.out @@ -1,3 +1 @@ -[# note no warning] -error: Uncaught SyntaxError: The requested module 'npm:@denotest/node-lifecycle-scripts' does not provide an export named 'value' -[WILDCARD] +value is 42 diff --git a/tests/unit_node/util_test.ts b/tests/unit_node/util_test.ts index edd500262..a47c231a1 100644 --- a/tests/unit_node/util_test.ts +++ b/tests/unit_node/util_test.ts @@ -348,3 +348,8 @@ Deno.test("[util] aborted()", async () => { await promise; assertEquals(done, true); }); + +Deno.test("[util] styleText()", () => { + const redText = util.styleText("red", "error"); + assertEquals(redText, "\x1B[31merror\x1B[39m"); +}); |