diff options
Diffstat (limited to 'cli')
141 files changed, 7381 insertions, 9823 deletions
diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 8d1d1d124..374f3dae5 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "2.0.0" +version = "2.0.6" authors.workspace = true default-run = "deno" edition.workspace = true @@ -38,6 +38,11 @@ path = "./bench/lsp_bench_standalone.rs" [features] default = ["upgrade", "__vendored_zlib_ng"] +# A feature that enables heap profiling with dhat on Linux. +# 1. Compile with `cargo build --profile=release-with-debug --features=dhat-heap` +# 2. Run the executable. It will output a dhat-heap.json file. +# 3. Open the json file in https://nnethercote.github.io/dh_view/dh_view.html +dhat-heap = ["dhat"] # A feature that enables the upgrade subcommand and the background check for # available updates (of deno binary). This is typically disabled for (Linux) # distribution packages. @@ -64,12 +69,12 @@ winres.workspace = true [dependencies] deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } -deno_cache_dir = { workspace = true } -deno_config = { version = "=0.37.1", features = ["workspace", "sync"] } +deno_cache_dir.workspace = true +deno_config.workspace = true deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } -deno_doc = { version = "0.153.0", features = ["html"] } -deno_graph = { version = "=0.83.3" } -deno_lint = { version = "=0.67.0", features = ["docs"] } +deno_doc = { version = "0.156.0", default-features = false, features = ["rust", "html", "syntect"] } +deno_graph = { version = "=0.84.1" } +deno_lint = { version = "=0.68.0", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true deno_package_json.workspace = true @@ -79,9 +84,7 @@ deno_runtime = { workspace = true, features = ["include_js_files_for_snapshottin deno_semver.workspace = true deno_task_shell = "=0.18.1" deno_terminal.workspace = true -eszip = "=0.79.1" -libsui = "0.4.0" -napi_sym.workspace = true +libsui = "0.5.0" node_resolver.workspace = true anstream = "0.6.14" @@ -94,16 +97,17 @@ chrono = { workspace = true, features = ["now"] } clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] } clap_complete = "=4.5.24" clap_complete_fig = "=4.5.2" -color-print = "0.3.5" +color-print.workspace = true console_static_text.workspace = true dashmap.workspace = true data-encoding.workspace = true +dhat = { version = "0.3.3", optional = true } dissimilar = "=1.0.4" dotenvy = "0.15.7" -dprint-plugin-json = "=0.19.3" -dprint-plugin-jupyter = "=0.1.3" +dprint-plugin-json = "=0.19.4" +dprint-plugin-jupyter = "=0.1.5" dprint-plugin-markdown = "=0.17.8" -dprint-plugin-typescript = "=0.93.0" +dprint-plugin-typescript = "=0.93.2" env_logger = "=0.10.0" fancy-regex = "=0.10.0" faster-hex.workspace = true @@ -117,15 +121,15 @@ http-body-util.workspace = true hyper-util.workspace = true import_map = { version = "=0.20.1", features = ["ext"] } indexmap.workspace = true -jsonc-parser.workspace = true -jupyter_runtime = { package = "runtimelib", version = "=0.14.0" } +jsonc-parser = { workspace = true, features = ["cst", "serde"] } +jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] } lazy-regex.workspace = true libc.workspace = true libz-sys.workspace = true log = { workspace = true, features = ["serde"] } lsp-types.workspace = true -malva = "=0.10.1" -markup_fmt = "=0.13.1" +malva = "=0.11.0" +markup_fmt = "=0.15.0" memmem.workspace = true monch.workspace = true notify.workspace = true @@ -162,7 +166,6 @@ typed-arena = "=2.0.2" uuid = { workspace = true, features = ["serde"] } walkdir = "=2.3.2" which.workspace = true -yoke.workspace = true zeromq.workspace = true zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] } zstd.workspace = true @@ -170,14 +173,12 @@ zstd.workspace = true [target.'cfg(windows)'.dependencies] junction.workspace = true winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } -windows-sys.workspace = true [target.'cfg(unix)'.dependencies] nix.workspace = true [dev-dependencies] deno_bench_util.workspace = true -libuv-sys-lite = "=1.48.2" pretty_assertions.workspace = true test_util.workspace = true diff --git a/cli/args/flags.rs b/cli/args/flags.rs index d59e5ac1a..39db12b5f 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -36,6 +36,7 @@ use deno_path_util::normalize_path; use deno_path_util::url_to_file_path; use deno_runtime::deno_permissions::PermissionsOptions; use deno_runtime::deno_permissions::SysDescriptor; +use deno_runtime::ops::otel::OtelConfig; use log::debug; use log::Level; use serde::Deserialize; @@ -575,7 +576,7 @@ fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> { pub struct UnstableConfig { // TODO(bartlomieju): remove in Deno 2.5 pub legacy_flag_enabled: bool, // --unstable - pub bare_node_builtins: bool, // --unstable-bare-node-builts + pub bare_node_builtins: bool, pub sloppy_imports: bool, pub features: Vec<String>, // --unstabe-kv --unstable-cron } @@ -612,7 +613,7 @@ pub struct Flags { pub internal: InternalFlags, pub ignore: Vec<String>, pub import_map_path: Option<String>, - pub env_file: Option<String>, + pub env_file: Option<Vec<String>>, pub inspect_brk: Option<SocketAddr>, pub inspect_wait: Option<SocketAddr>, pub inspect: Option<SocketAddr>, @@ -967,6 +968,24 @@ impl Flags { args } + pub fn otel_config(&self) -> Option<OtelConfig> { + if self + .unstable_config + .features + .contains(&String::from("otel")) + { + Some(OtelConfig { + runtime_name: Cow::Borrowed("deno"), + runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno), + deterministic: std::env::var("DENO_UNSTABLE_OTEL_DETERMINISTIC") + .is_ok(), + ..Default::default() + }) + } else { + None + } + } + /// Extract the paths the config file should be discovered from. /// /// Returns `None` if the config file should not be auto-discovered. @@ -1177,9 +1196,9 @@ static DENO_HELP: &str = cstr!( <y>Dependency management:</> <g>add</> Add dependencies - <p(245)>deno add @std/assert | deno add npm:express</> - <g>install</> Install script as an executable - <g>uninstall</> Uninstall a script previously installed with deno install + <p(245)>deno add jsr:@std/assert | deno add npm:express</> + <g>install</> Installs dependencies either in the local project or globally to a bin directory + <g>uninstall</> Uninstalls a dependency or an executable script in the installation root's bin directory <g>remove</> Remove dependencies from the configuration file <y>Tooling:</> @@ -1528,7 +1547,7 @@ pub fn clap_root() -> Command { ); run_args(Command::new("deno"), true) - .args(unstable_args(UnstableArgsConfig::ResolutionAndRuntime)) + .with_unstable_args(UnstableArgsConfig::ResolutionAndRuntime) .next_line_help(false) .bin_name("deno") .styles( @@ -1630,7 +1649,7 @@ fn command( ) -> Command { Command::new(name) .about(about) - .args(unstable_args(unstable_args_config)) + .with_unstable_args(unstable_args_config) } fn help_subcommand(app: &Command) -> Command { @@ -1658,10 +1677,10 @@ fn add_subcommand() -> Command { "add", cstr!( "Add dependencies to your configuration file. - <p(245)>deno add @std/path</> + <p(245)>deno add jsr:@std/path</> You can add multiple dependencies at once: - <p(245)>deno add @std/path @std/assert</>" + <p(245)>deno add jsr:@std/path jsr:@std/assert</>" ), UnstableArgsConfig::None, ) @@ -1855,6 +1874,7 @@ Unless --reload is specified, this command will not re-download already cached d .required_unless_present("help") .value_hint(ValueHint::FilePath), ) + .arg(frozen_lockfile_arg()) .arg(allow_import_arg()) } ) @@ -1919,6 +1939,7 @@ On the first invocation with deno will download the proper binary and cache it i ]) .help_heading(COMPILE_HEADING), ) + .arg(no_code_cache_arg()) .arg( Arg::new("no-terminal") .long("no-terminal") @@ -2272,7 +2293,7 @@ Ignore formatting a file by adding an ignore comment at the top of the file: "sass", "less", "html", "svelte", "vue", "astro", "yml", "yaml", "ipynb", ]) - .help_heading(FMT_HEADING), + .help_heading(FMT_HEADING).requires("files"), ) .arg( Arg::new("ignore") @@ -2469,7 +2490,7 @@ in the package cache. If no dependency is specified, installs all dependencies l If the <p(245)>--entrypoint</> flag is passed, installs the dependencies of the specified entrypoint(s). <p(245)>deno install</> - <p(245)>deno install @std/bytes</> + <p(245)>deno install jsr:@std/bytes</> <p(245)>deno install npm:chalk</> <p(245)>deno install --entrypoint entry1.ts entry2.ts</> @@ -2768,8 +2789,13 @@ It is especially useful for quick prototyping and checking snippets of code. TypeScript is supported, however it is not type-checked, only transpiled." ), UnstableArgsConfig::ResolutionAndRuntime) - .defer(|cmd| runtime_args(cmd, true, true, true) - .arg(check_arg(false)) + .defer(|cmd| { + let cmd = compile_args_without_check_args(cmd); + let cmd = inspect_args(cmd); + let cmd = permission_args(cmd, None); + let cmd = runtime_misc_args(cmd); + + cmd .arg( Arg::new("eval-file") .long("eval-file") @@ -2788,7 +2814,7 @@ TypeScript is supported, however it is not type-checked, only transpiled." .after_help(cstr!("<y>Environment variables:</> <g>DENO_REPL_HISTORY</> Set REPL history file path. History file is disabled when the value is empty. <p(245)>[default: $DENO_DIR/deno_history.txt]</>")) - ) + }) .arg(env_file_arg()) .arg( Arg::new("args") @@ -3381,8 +3407,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command { .value_name("IP_OR_HOSTNAME") .help("Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary") .value_parser(flags_net::validator) - .hide(true) - ; + .hide(true); if let Some(requires) = requires { arg = arg.requires(requires) } @@ -3661,6 +3686,10 @@ fn runtime_args( } else { app }; + runtime_misc_args(app) +} + +fn runtime_misc_args(app: Command) -> Command { app .arg(frozen_lockfile_arg()) .arg(cached_only_arg()) @@ -3747,12 +3776,14 @@ fn env_file_arg() -> Arg { .help(cstr!( "Load environment variables from local file <p(245)>Only the first environment variable with a given key is used. - Existing process environment variables are not overwritten.</>" + Existing process environment variables are not overwritten, so if variables with the same names already exist in the environment, their values will be preserved. + Where multiple declarations for the same environment variable exist in your .env file, the first one encountered is applied. This is determined by the order of the files you pass as arguments.</>" )) .value_hint(ValueHint::FilePath) .default_missing_value(".env") .require_equals(true) .num_args(0..=1) + .action(ArgAction::Append) } fn reload_arg() -> Arg { @@ -4142,23 +4173,29 @@ enum UnstableArgsConfig { ResolutionAndRuntime, } -struct UnstableArgsIter { - idx: usize, - cfg: UnstableArgsConfig, +trait CommandExt { + fn with_unstable_args(self, cfg: UnstableArgsConfig) -> Self; } -impl Iterator for UnstableArgsIter { - type Item = Arg; +impl CommandExt for Command { + fn with_unstable_args(self, cfg: UnstableArgsConfig) -> Self { + let mut next_display_order = { + let mut value = 1000; + move || { + value += 1; + value + } + }; - fn next(&mut self) -> Option<Self::Item> { - let arg = if self.idx == 0 { + let mut cmd = self.arg( Arg::new("unstable") - .long("unstable") - .help(cstr!("Enable all unstable features and APIs. Instead of using this flag, consider enabling individual unstable features + .long("unstable") + .help(cstr!("Enable all unstable features and APIs. Instead of using this flag, consider enabling individual unstable features <p(245)>To view the list of individual unstable feature flags, run this command again with --help=unstable</>")) - .action(ArgAction::SetTrue) - .hide(matches!(self.cfg, UnstableArgsConfig::None)) - } else if self.idx == 1 { + .action(ArgAction::SetTrue) + .hide(matches!(cfg, UnstableArgsConfig::None)) + .display_order(next_display_order()) + ).arg( Arg::new("unstable-bare-node-builtins") .long("unstable-bare-node-builtins") .help("Enable unstable bare node builtins feature") @@ -4166,20 +4203,36 @@ impl Iterator for UnstableArgsIter { .value_parser(FalseyValueParser::new()) .action(ArgAction::SetTrue) .hide(true) - .long_help(match self.cfg { + .long_help(match cfg { UnstableArgsConfig::None => None, UnstableArgsConfig::ResolutionOnly | UnstableArgsConfig::ResolutionAndRuntime => Some("true"), }) .help_heading(UNSTABLE_HEADING) - } else if self.idx == 2 { + .display_order(next_display_order()), + ).arg( + Arg::new("unstable-detect-cjs") + .long("unstable-detect-cjs") + .help("Reads the package.json type field in a project to treat .js files as .cjs") + .value_parser(FalseyValueParser::new()) + .action(ArgAction::SetTrue) + .hide(true) + .long_help(match cfg { + UnstableArgsConfig::None => None, + UnstableArgsConfig::ResolutionOnly + | UnstableArgsConfig::ResolutionAndRuntime => Some("true"), + }) + .help_heading(UNSTABLE_HEADING) + .display_order(next_display_order()) + ).arg( Arg::new("unstable-byonm") .long("unstable-byonm") .value_parser(FalseyValueParser::new()) .action(ArgAction::SetTrue) .hide(true) .help_heading(UNSTABLE_HEADING) - } else if self.idx == 3 { + .display_order(next_display_order()), + ).arg( Arg::new("unstable-sloppy-imports") .long("unstable-sloppy-imports") .help("Enable unstable resolving of specifiers by extension probing, .js to .ts, and directory probing") @@ -4187,40 +4240,39 @@ impl Iterator for UnstableArgsIter { .value_parser(FalseyValueParser::new()) .action(ArgAction::SetTrue) .hide(true) - .long_help(match self.cfg { + .long_help(match cfg { UnstableArgsConfig::None => None, UnstableArgsConfig::ResolutionOnly | UnstableArgsConfig::ResolutionAndRuntime => Some("true") }) .help_heading(UNSTABLE_HEADING) - } else if self.idx > 3 { - let granular_flag = crate::UNSTABLE_GRANULAR_FLAGS.get(self.idx - 4)?; - Arg::new(format!("unstable-{}", granular_flag.name)) - .long(format!("unstable-{}", granular_flag.name)) - .help(granular_flag.help_text) - .action(ArgAction::SetTrue) - .hide(true) - .help_heading(UNSTABLE_HEADING) - // we don't render long help, so using it here as a sort of metadata - .long_help(if granular_flag.show_in_help { - match self.cfg { - UnstableArgsConfig::None | UnstableArgsConfig::ResolutionOnly => { - None + .display_order(next_display_order()) + ); + + for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() { + cmd = cmd.arg( + Arg::new(format!("unstable-{}", granular_flag.name)) + .long(format!("unstable-{}", granular_flag.name)) + .help(granular_flag.help_text) + .action(ArgAction::SetTrue) + .hide(true) + .help_heading(UNSTABLE_HEADING) + // we don't render long help, so using it here as a sort of metadata + .long_help(if granular_flag.show_in_help { + match cfg { + UnstableArgsConfig::None | UnstableArgsConfig::ResolutionOnly => { + None + } + UnstableArgsConfig::ResolutionAndRuntime => Some("true"), } - UnstableArgsConfig::ResolutionAndRuntime => Some("true"), - } - } else { - None - }) - } else { - return None; - }; - self.idx += 1; - Some(arg.display_order(self.idx + 1000)) - } -} + } else { + None + }) + .display_order(next_display_order()), + ); + } -fn unstable_args(cfg: UnstableArgsConfig) -> impl IntoIterator<Item = Arg> { - UnstableArgsIter { idx: 0, cfg } + cmd + } } fn allow_scripts_arg_parse( @@ -4342,6 +4394,7 @@ fn check_parse( flags.type_check_mode = TypeCheckMode::Local; compile_args_without_check_parse(flags, matches)?; unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); + frozen_lockfile_arg_parse(flags, matches); let files = matches.remove_many::<String>("file").unwrap().collect(); if matches.get_flag("all") || matches.get_flag("remote") { flags.type_check_mode = TypeCheckMode::All; @@ -4379,6 +4432,8 @@ fn compile_parse( }; ext_arg_parse(flags, matches); + flags.code_cache_enabled = !matches.get_flag("no-code-cache"); + flags.subcommand = DenoSubcommand::Compile(CompileFlags { source_file, output, @@ -4858,8 +4913,18 @@ fn repl_parse( flags: &mut Flags, matches: &mut ArgMatches, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, true, true, true)?; - unsafely_ignore_certificate_errors_parse(flags, matches); + unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); + compile_args_without_check_parse(flags, matches)?; + cached_only_arg_parse(flags, matches); + frozen_lockfile_arg_parse(flags, matches); + permission_args_parse(flags, matches)?; + inspect_arg_parse(flags, matches); + location_arg_parse(flags, matches); + v8_flags_arg_parse(flags, matches); + seed_arg_parse(flags, matches); + enable_testing_features_arg_parse(flags, matches); + env_file_arg_parse(flags, matches); + strace_ops_parse(flags, matches); let eval_files = matches .remove_many::<String>("eval-file") @@ -5427,7 +5492,9 @@ fn import_map_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) { } fn env_file_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) { - flags.env_file = matches.remove_one::<String>("env-file"); + flags.env_file = matches + .get_many::<String>("env-file") + .map(|values| values.cloned().collect()); } fn reload_arg_parse( @@ -6758,6 +6825,32 @@ mod tests { ..Flags::default() } ); + + let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html"]); + assert!(r.is_err()); + let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html", "./**"]); + assert_eq!( + r.unwrap(), + Flags { + subcommand: DenoSubcommand::Fmt(FmtFlags { + check: false, + files: FileFlags { + include: vec!["./**".to_string()], + ignore: vec![], + }, + use_tabs: None, + line_width: None, + indent_width: None, + single_quote: None, + prose_wrap: None, + no_semicolons: None, + unstable_component: false, + watch: Default::default(), + }), + ext: Some("html".to_string()), + ..Flags::default() + } + ); } #[test] @@ -7337,7 +7430,7 @@ mod tests { allow_all: true, ..Default::default() }, - env_file: Some(".example.env".to_owned()), + env_file: Some(vec![".example.env".to_owned()]), ..Flags::default() } ); @@ -7406,7 +7499,7 @@ mod tests { #[test] fn repl_with_flags() { #[rustfmt::skip] - let r = flags_from_vec(svec!["deno", "repl", "-A", "--import-map", "import_map.json", "--no-remote", "--config", "tsconfig.json", "--no-check", "--reload", "--lock", "lock.json", "--cert", "example.crt", "--cached-only", "--location", "https:foo", "--v8-flags=--help", "--seed", "1", "--inspect=127.0.0.1:9229", "--unsafely-ignore-certificate-errors", "--env=.example.env"]); + let r = flags_from_vec(svec!["deno", "repl", "-A", "--import-map", "import_map.json", "--no-remote", "--config", "tsconfig.json", "--reload", "--lock", "lock.json", "--cert", "example.crt", "--cached-only", "--location", "https:foo", "--v8-flags=--help", "--seed", "1", "--inspect=127.0.0.1:9229", "--unsafely-ignore-certificate-errors", "--env=.example.env"]); assert_eq!( r.unwrap(), Flags { @@ -7431,7 +7524,7 @@ mod tests { allow_all: true, ..Default::default() }, - env_file: Some(".example.env".to_owned()), + env_file: Some(vec![".example.env".to_owned()]), unsafely_ignore_certificate_errors: Some(vec![]), ..Flags::default() } @@ -7454,7 +7547,6 @@ mod tests { allow_write: Some(vec![]), ..Default::default() }, - type_check_mode: TypeCheckMode::None, ..Flags::default() } ); @@ -7476,7 +7568,6 @@ mod tests { eval: None, is_default_command: false, }), - type_check_mode: TypeCheckMode::None, ..Flags::default() } ); @@ -8081,7 +8172,7 @@ mod tests { subcommand: DenoSubcommand::Run(RunFlags::new_default( "script.ts".to_string(), )), - env_file: Some(".env".to_owned()), + env_file: Some(vec![".env".to_owned()]), code_cache_enabled: true, ..Flags::default() } @@ -8097,7 +8188,7 @@ mod tests { subcommand: DenoSubcommand::Run(RunFlags::new_default( "script.ts".to_string(), )), - env_file: Some(".env".to_owned()), + env_file: Some(vec![".env".to_owned()]), code_cache_enabled: true, ..Flags::default() } @@ -8130,7 +8221,7 @@ mod tests { subcommand: DenoSubcommand::Run(RunFlags::new_default( "script.ts".to_string(), )), - env_file: Some(".another_env".to_owned()), + env_file: Some(vec![".another_env".to_owned()]), code_cache_enabled: true, ..Flags::default() } @@ -8151,7 +8242,29 @@ mod tests { subcommand: DenoSubcommand::Run(RunFlags::new_default( "script.ts".to_string(), )), - env_file: Some(".another_env".to_owned()), + env_file: Some(vec![".another_env".to_owned()]), + code_cache_enabled: true, + ..Flags::default() + } + ); + } + + #[test] + fn run_multiple_env_file_defined() { + let r = flags_from_vec(svec![ + "deno", + "run", + "--env-file", + "--env-file=.two_env", + "script.ts" + ]); + assert_eq!( + r.unwrap(), + Flags { + subcommand: DenoSubcommand::Run(RunFlags::new_default( + "script.ts".to_string(), + )), + env_file: Some(vec![".env".to_owned(), ".two_env".to_owned()]), code_cache_enabled: true, ..Flags::default() } @@ -8294,7 +8407,7 @@ mod tests { allow_read: Some(vec![]), ..Default::default() }, - env_file: Some(".example.env".to_owned()), + env_file: Some(vec![".example.env".to_owned()]), ..Flags::default() } ); @@ -9930,6 +10043,7 @@ mod tests { include: vec![] }), type_check_mode: TypeCheckMode::Local, + code_cache_enabled: true, ..Flags::default() } ); @@ -9938,7 +10052,7 @@ mod tests { #[test] fn compile_with_flags() { #[rustfmt::skip] - let r = flags_from_vec(svec!["deno", "compile", "--import-map", "import_map.json", "--no-remote", "--config", "tsconfig.json", "--no-check", "--unsafely-ignore-certificate-errors", "--reload", "--lock", "lock.json", "--cert", "example.crt", "--cached-only", "--location", "https:foo", "--allow-read", "--allow-net", "--v8-flags=--help", "--seed", "1", "--no-terminal", "--icon", "favicon.ico", "--output", "colors", "--env=.example.env", "https://examples.deno.land/color-logging.ts", "foo", "bar", "-p", "8080"]); + let r = flags_from_vec(svec!["deno", "compile", "--import-map", "import_map.json", "--no-code-cache", "--no-remote", "--config", "tsconfig.json", "--no-check", "--unsafely-ignore-certificate-errors", "--reload", "--lock", "lock.json", "--cert", "example.crt", "--cached-only", "--location", "https:foo", "--allow-read", "--allow-net", "--v8-flags=--help", "--seed", "1", "--no-terminal", "--icon", "favicon.ico", "--output", "colors", "--env=.example.env", "https://examples.deno.land/color-logging.ts", "foo", "bar", "-p", "8080"]); assert_eq!( r.unwrap(), Flags { @@ -9954,6 +10068,7 @@ mod tests { }), import_map_path: Some("import_map.json".to_string()), no_remote: true, + code_cache_enabled: false, config_flag: ConfigFlag::Path("tsconfig.json".to_owned()), type_check_mode: TypeCheckMode::None, reload: true, @@ -9969,7 +10084,7 @@ mod tests { unsafely_ignore_certificate_errors: Some(vec![]), v8_flags: svec!["--help", "--random-seed=1"], seed: Some(1), - env_file: Some(".example.env".to_owned()), + env_file: Some(vec![".example.env".to_owned()]), ..Flags::default() } ); diff --git a/cli/args/flags_net.rs b/cli/args/flags_net.rs index 88ffcf0e4..abfcf2838 100644 --- a/cli/args/flags_net.rs +++ b/cli/args/flags_net.rs @@ -51,7 +51,7 @@ pub fn parse(paths: Vec<String>) -> clap::error::Result<Vec<String>> { } } else { NetDescriptor::parse(&host_and_port).map_err(|e| { - clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}")) + clap::Error::raw(clap::error::ErrorKind::InvalidValue, e.to_string()) })?; out.push(host_and_port) } diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 07906a86a..ec75d7a10 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -7,6 +7,7 @@ mod import_map; mod lockfile; mod package_json; +use deno_ast::MediaType; use deno_ast::SourceMapOption; use deno_config::deno_json::NodeModulesDirMode; use deno_config::workspace::CreateResolverOptions; @@ -27,13 +28,13 @@ use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; use deno_path_util::normalize_path; +use deno_runtime::ops::otel::OtelConfig; use deno_semver::npm::NpmPackageReqReference; use import_map::resolve_import_map_value_from_specifier; pub use deno_config::deno_json::BenchConfig; pub use deno_config::deno_json::ConfigFile; pub use deno_config::deno_json::FmtOptionsConfig; -pub use deno_config::deno_json::JsxImportSourceConfig; pub use deno_config::deno_json::LintRulesConfig; pub use deno_config::deno_json::ProseWrap; pub use deno_config::deno_json::TsConfig; @@ -46,6 +47,7 @@ pub use flags::*; pub use lockfile::CliLockfile; pub use lockfile::CliLockfileReadFromPathOptions; pub use package_json::NpmInstallDepsProvider; +pub use package_json::PackageJsonDepValueParseWithLocationError; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; @@ -200,6 +202,8 @@ pub fn ts_config_to_transpile_and_emit_options( precompile_jsx_dynamic_props: None, transform_jsx, var_decl_imports: false, + // todo(dsherret): support verbatim_module_syntax here properly + verbatim_module_syntax: false, }, deno_ast::EmitOptions { inline_sources: options.inline_sources, @@ -578,6 +582,7 @@ fn discover_npmrc( let resolved = npmrc .as_resolved(npm_registry_url()) .context("Failed to resolve .npmrc options")?; + log::debug!(".npmrc found at: '{}'", path.display()); Ok(Arc::new(resolved)) } @@ -818,10 +823,8 @@ impl CliOptions { }; let msg = format!("DANGER: TLS certificate validation is disabled {}", domains); - #[allow(clippy::print_stderr)] { - // use eprintln instead of log::warn so this always gets shown - eprintln!("{}", colors::yellow(msg)); + log::error!("{}", colors::yellow(msg)); } } @@ -865,12 +868,8 @@ impl CliOptions { } else { &[] }; - let config_parse_options = deno_config::deno_json::ConfigParseOptions { - include_task_comments: matches!( - flags.subcommand, - DenoSubcommand::Task(..) - ), - }; + let config_parse_options = + deno_config::deno_json::ConfigParseOptions::default(); let discover_pkg_json = flags.config_flag != ConfigFlag::Disabled && !flags.no_npm && !has_flag_env_var("DENO_NO_PACKAGE_JSON"); @@ -963,6 +962,9 @@ impl CliOptions { match self.sub_command() { DenoSubcommand::Cache(_) => GraphKind::All, DenoSubcommand::Check(_) => GraphKind::TypesOnly, + DenoSubcommand::Install(InstallFlags { + kind: InstallKind::Local(_), + }) => GraphKind::All, _ => self.type_check_mode().as_graph_kind(), } } @@ -1122,7 +1124,11 @@ impl CliOptions { } } - pub fn env_file_name(&self) -> Option<&String> { + pub fn otel_config(&self) -> Option<OtelConfig> { + self.flags.otel_config() + } + + pub fn env_file_name(&self) -> Option<&Vec<String>> { self.flags.env_file.as_ref() } @@ -1130,21 +1136,34 @@ impl CliOptions { self .main_module_cell .get_or_init(|| { - let main_module = match &self.flags.subcommand { + Ok(match &self.flags.subcommand { DenoSubcommand::Compile(compile_flags) => { resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())? } DenoSubcommand::Eval(_) => { - resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())? + resolve_url_or_path("./$deno$eval.mts", self.initial_cwd())? } DenoSubcommand::Repl(_) => { - resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())? + resolve_url_or_path("./$deno$repl.mts", self.initial_cwd())? } DenoSubcommand::Run(run_flags) => { if run_flags.is_stdin() { - resolve_url_or_path("./$deno$stdin.ts", self.initial_cwd())? + resolve_url_or_path("./$deno$stdin.mts", self.initial_cwd())? } else { - resolve_url_or_path(&run_flags.script, self.initial_cwd())? + let url = + resolve_url_or_path(&run_flags.script, self.initial_cwd())?; + if self.is_node_main() + && url.scheme() == "file" + && MediaType::from_specifier(&url) == MediaType::Unknown + { + try_resolve_node_binary_main_entrypoint( + &run_flags.script, + self.initial_cwd(), + )? + .unwrap_or(url) + } else { + url + } } } DenoSubcommand::Serve(run_flags) => { @@ -1153,9 +1172,7 @@ impl CliOptions { _ => { bail!("No main module.") } - }; - - Ok(main_module) + }) }) .as_ref() .map_err(|err| deno_core::anyhow::anyhow!("{}", err)) @@ -1204,7 +1221,7 @@ impl CliOptions { // This is triggered via a secret environment variable which is used // for functionality like child_process.fork. Users should NOT depend // on this functionality. - pub fn is_npm_main(&self) -> bool { + pub fn is_node_main(&self) -> bool { NPM_PROCESS_STATE.is_some() } @@ -1450,6 +1467,12 @@ impl CliOptions { }) = &self.flags.subcommand { *hmr + } else if let DenoSubcommand::Serve(ServeFlags { + watch: Some(WatchFlagsWithPaths { hmr, .. }), + .. + }) = &self.flags.subcommand + { + *hmr } else { false } @@ -1576,6 +1599,13 @@ impl CliOptions { || self.workspace().has_unstable("bare-node-builtins") } + pub fn detect_cjs(&self) -> bool { + // only enabled when there's a package.json in order to not have a + // perf penalty for non-npm Deno projects of searching for the closest + // package.json beside each module + self.workspace().package_jsons().next().is_some() || self.is_node_main() + } + fn byonm_enabled(&self) -> bool { // check if enabled via unstable self.node_modules_dir().ok().flatten() == Some(NodeModulesDirMode::Manual) @@ -1586,6 +1616,15 @@ impl CliOptions { } pub fn use_byonm(&self) -> bool { + if matches!( + self.sub_command(), + DenoSubcommand::Install(_) + | DenoSubcommand::Add(_) + | DenoSubcommand::Remove(_) + ) { + // For `deno install/add/remove` we want to force the managed resolver so it can set up `node_modules/` directory. + return false; + } if self.node_modules_dir().ok().flatten().is_none() && self.maybe_node_modules_folder.is_some() && self @@ -1620,21 +1659,16 @@ impl CliOptions { }); if !from_config_file.is_empty() { - // collect unstable granular flags - let mut all_valid_unstable_flags: Vec<&str> = - crate::UNSTABLE_GRANULAR_FLAGS - .iter() - .map(|granular_flag| granular_flag.name) - .collect(); - - let mut another_unstable_flags = Vec::from([ - "sloppy-imports", - "byonm", - "bare-node-builtins", - "fmt-component", - ]); - // add more unstable flags to the same vector holding granular flags - all_valid_unstable_flags.append(&mut another_unstable_flags); + let all_valid_unstable_flags: Vec<&str> = crate::UNSTABLE_GRANULAR_FLAGS + .iter() + .map(|granular_flag| granular_flag.name) + .chain([ + "sloppy-imports", + "byonm", + "bare-node-builtins", + "fmt-component", + ]) + .collect(); // check and warn if the unstable flag of config file isn't supported, by // iterating through the vector holding the unstable flags @@ -1667,6 +1701,10 @@ impl CliOptions { if let DenoSubcommand::Run(RunFlags { watch: Some(WatchFlagsWithPaths { paths, .. }), .. + }) + | DenoSubcommand::Serve(ServeFlags { + watch: Some(WatchFlagsWithPaths { paths, .. }), + .. }) = &self.flags.subcommand { full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path))); @@ -1766,6 +1804,36 @@ fn resolve_node_modules_folder( Ok(Some(canonicalize_path_maybe_not_exists(&path)?)) } +fn try_resolve_node_binary_main_entrypoint( + specifier: &str, + initial_cwd: &Path, +) -> Result<Option<Url>, AnyError> { + // node allows running files at paths without a `.js` extension + // or at directories with an index.js file + let path = deno_core::normalize_path(initial_cwd.join(specifier)); + if path.is_dir() { + let index_file = path.join("index.js"); + Ok(if index_file.is_file() { + Some(deno_path_util::url_from_file_path(&index_file)?) + } else { + None + }) + } else { + let path = path.with_extension( + path + .extension() + .and_then(|s| s.to_str()) + .map(|s| format!("{}.js", s)) + .unwrap_or("js".to_string()), + ); + if path.is_file() { + Ok(Some(deno_path_util::url_from_file_path(&path)?)) + } else { + Ok(None) + } + } +} + fn resolve_import_map_specifier( maybe_import_map_path: Option<&str>, maybe_config_file: Option<&ConfigFile>, @@ -1867,19 +1935,22 @@ pub fn config_to_deno_graph_workspace_member( }) } -fn load_env_variables_from_env_file(filename: Option<&String>) { - let Some(env_file_name) = filename else { +fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) { + let Some(env_file_names) = filename else { return; }; - match from_filename(env_file_name) { - Ok(_) => (), - Err(error) => { - match error { + + for env_file_name in env_file_names.iter().rev() { + match from_filename(env_file_name) { + Ok(_) => (), + Err(error) => { + match error { dotenvy::Error::LineParse(line, index)=> log::info!("{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",colors::yellow("Warning"), env_file_name, index, line), dotenvy::Error::Io(_)=> log::info!("{} The `--env-file` flag was used, but the environment file specified '{}' was not found.",colors::yellow("Warning"),env_file_name), dotenvy::Error::EnvVar(_)=> log::info!("{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",colors::yellow("Warning"),env_file_name), _ => log::info!("{} Unknown failure occurred with the specified environment file: {}", colors::yellow("Warning"), env_file_name), } + } } } } diff --git a/cli/args/package_json.rs b/cli/args/package_json.rs index 2ef39a30d..7dc75550c 100644 --- a/cli/args/package_json.rs +++ b/cli/args/package_json.rs @@ -5,10 +5,12 @@ use std::sync::Arc; use deno_config::workspace::Workspace; use deno_core::serde_json; +use deno_core::url::Url; use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValueParseError; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; +use thiserror::Error; #[derive(Debug)] pub struct InstallNpmRemotePkg { @@ -23,11 +25,20 @@ pub struct InstallNpmWorkspacePkg { pub target_dir: PathBuf, } +#[derive(Debug, Error, Clone)] +#[error("Failed to install '{}'\n at {}", alias, location)] +pub struct PackageJsonDepValueParseWithLocationError { + pub location: Url, + pub alias: String, + #[source] + pub source: PackageJsonDepValueParseError, +} + #[derive(Debug, Default)] pub struct NpmInstallDepsProvider { remote_pkgs: Vec<InstallNpmRemotePkg>, workspace_pkgs: Vec<InstallNpmWorkspacePkg>, - pkg_json_dep_errors: Vec<PackageJsonDepValueParseError>, + pkg_json_dep_errors: Vec<PackageJsonDepValueParseWithLocationError>, } impl NpmInstallDepsProvider { @@ -89,7 +100,13 @@ impl NpmInstallDepsProvider { let dep = match dep { Ok(dep) => dep, Err(err) => { - pkg_json_dep_errors.push(err); + pkg_json_dep_errors.push( + PackageJsonDepValueParseWithLocationError { + location: pkg_json.specifier(), + alias, + source: err, + }, + ); continue; } }; @@ -150,7 +167,9 @@ impl NpmInstallDepsProvider { &self.workspace_pkgs } - pub fn pkg_json_dep_errors(&self) -> &[PackageJsonDepValueParseError] { + pub fn pkg_json_dep_errors( + &self, + ) -> &[PackageJsonDepValueParseWithLocationError] { &self.pkg_json_dep_errors } } diff --git a/cli/bench/encode_into.js b/cli/bench/encode_into.js index 11f5a56d9..ab5e11b04 100644 --- a/cli/bench/encode_into.js +++ b/cli/bench/encode_into.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals let [total, count] = typeof Deno !== "undefined" ? Deno.args diff --git a/cli/bench/getrandom.js b/cli/bench/getrandom.js index 3c3ec4aa1..fe99bbcbd 100644 --- a/cli/bench/getrandom.js +++ b/cli/bench/getrandom.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals let [total, count] = typeof Deno !== "undefined" ? Deno.args diff --git a/cli/bench/http.rs b/cli/bench/http.rs deleted file mode 100644 index f739b76ba..000000000 --- a/cli/bench/http.rs +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::collections::HashMap; -use std::net::TcpStream; -use std::path::Path; -use std::process::Command; -use std::sync::atomic::AtomicU16; -use std::sync::atomic::Ordering; -use std::time::Duration; -use std::time::Instant; - -use super::Result; - -pub use test_util::parse_wrk_output; -pub use test_util::WrkOutput as HttpBenchmarkResult; -// Some of the benchmarks in this file have been renamed. In case the history -// somehow gets messed up: -// "node_http" was once called "node" -// "deno_tcp" was once called "deno" -// "deno_http" was once called "deno_net_http" - -const DURATION: &str = "10s"; - -pub fn benchmark( - target_path: &Path, -) -> Result<HashMap<String, HttpBenchmarkResult>> { - let deno_exe = test_util::deno_exe_path(); - let deno_exe = deno_exe.to_string(); - - let hyper_hello_exe = target_path.join("test_server"); - let hyper_hello_exe = hyper_hello_exe.to_str().unwrap(); - - let mut res = HashMap::new(); - let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let http_dir = manifest_dir.join("bench").join("http"); - for entry in std::fs::read_dir(&http_dir)? { - let entry = entry?; - let pathbuf = entry.path(); - let path = pathbuf.to_str().unwrap(); - if path.ends_with(".lua") { - continue; - } - let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap(); - - let lua_script = http_dir.join(format!("{file_stem}.lua")); - let mut maybe_lua = None; - if lua_script.exists() { - maybe_lua = Some(lua_script.to_str().unwrap()); - } - - let port = get_port(); - // deno run -A --unstable-net <path> <addr> - res.insert( - file_stem.to_string(), - run( - &[ - deno_exe.as_str(), - "run", - "--allow-all", - "--unstable-net", - "--enable-testing-features-do-not-use", - path, - &server_addr(port), - ], - port, - None, - None, - maybe_lua, - )?, - ); - } - - res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?); - - Ok(res) -} - -fn run( - server_cmd: &[&str], - port: u16, - env: Option<Vec<(String, String)>>, - origin_cmd: Option<&[&str]>, - lua_script: Option<&str>, -) -> Result<HttpBenchmarkResult> { - // Wait for port 4544 to become available. - // TODO Need to use SO_REUSEPORT with tokio::net::TcpListener. - std::thread::sleep(Duration::from_secs(5)); - - let mut origin = None; - if let Some(cmd) = origin_cmd { - let mut com = Command::new(cmd[0]); - com.args(&cmd[1..]); - if let Some(env) = env.clone() { - com.envs(env); - } - origin = Some(com.spawn()?); - }; - - println!("{}", server_cmd.join(" ")); - let mut server = { - let mut com = Command::new(server_cmd[0]); - com.args(&server_cmd[1..]); - if let Some(env) = env { - com.envs(env); - } - com.spawn()? - }; - - // Wait for server to wake up. - let now = Instant::now(); - let addr = format!("127.0.0.1:{port}"); - while now.elapsed().as_secs() < 30 { - if TcpStream::connect(&addr).is_ok() { - break; - } - std::thread::sleep(Duration::from_millis(10)); - } - TcpStream::connect(&addr).expect("Failed to connect to server in time"); - println!("Server took {} ms to start", now.elapsed().as_millis()); - - let wrk = test_util::prebuilt_tool_path("wrk"); - assert!(wrk.is_file()); - - let addr = format!("http://{addr}/"); - let wrk = wrk.to_string(); - let mut wrk_cmd = vec![wrk.as_str(), "-d", DURATION, "--latency", &addr]; - - if let Some(lua_script) = lua_script { - wrk_cmd.push("-s"); - wrk_cmd.push(lua_script); - } - - println!("{}", wrk_cmd.join(" ")); - let output = test_util::run_collect(&wrk_cmd, None, None, None, true).0; - - std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy. - - println!("{output}"); - assert!( - server.try_wait()?.map(|s| s.success()).unwrap_or(true), - "server ended with error" - ); - - server.kill()?; - if let Some(mut origin) = origin { - origin.kill()?; - } - - Ok(parse_wrk_output(&output)) -} - -static NEXT_PORT: AtomicU16 = AtomicU16::new(4544); -pub(crate) fn get_port() -> u16 { - let p = NEXT_PORT.load(Ordering::SeqCst); - NEXT_PORT.store(p.wrapping_add(1), Ordering::SeqCst); - p -} - -fn server_addr(port: u16) -> String { - format!("0.0.0.0:{port}") -} - -fn hyper_http(exe: &str) -> Result<HttpBenchmarkResult> { - let port = get_port(); - println!("http_benchmark testing RUST hyper"); - run(&[exe, &port.to_string()], port, None, None, None) -} diff --git a/cli/bench/http/deno_flash_hono_router.js b/cli/bench/http/deno_flash_hono_router.js deleted file mode 100644 index baced0cec..000000000 --- a/cli/bench/http/deno_flash_hono_router.js +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -import { Hono } from "https://deno.land/x/hono@v2.0.9/mod.ts"; - -const addr = Deno.args[0] || "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); - -const app = new Hono(); -app.get("/", (c) => c.text("Hello, World!")); - -Deno.serve({ port: Number(port), hostname }, app.fetch); diff --git a/cli/bench/http/deno_flash_send_file.js b/cli/bench/http/deno_flash_send_file.js deleted file mode 100644 index bf8541f8b..000000000 --- a/cli/bench/http/deno_flash_send_file.js +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -const addr = Deno.args[0] || "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); -const { serve } = Deno; - -const path = new URL("../testdata/128k.bin", import.meta.url).pathname; - -function handler() { - const file = Deno.openSync(path); - return new Response(file.readable); -} - -serve({ hostname, port: Number(port) }, handler); diff --git a/cli/bench/http/deno_http_read_headers.lua b/cli/bench/http/deno_http_read_headers.lua deleted file mode 100644 index 64f1923ff..000000000 --- a/cli/bench/http/deno_http_read_headers.lua +++ /dev/null @@ -1,5 +0,0 @@ -wrk.headers["foo"] = "bar" -wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36" -wrk.headers["Viewport-Width"] = "1920" -wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9" -wrk.headers["Accept-Language"] = "en,la;q=0.9"
\ No newline at end of file diff --git a/cli/bench/http/deno_http_serve.js b/cli/bench/http/deno_http_serve.js deleted file mode 100644 index 639982ce6..000000000 --- a/cli/bench/http/deno_http_serve.js +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -const addr = Deno.args[0] ?? "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); -const { serve } = Deno; - -function handler() { - return new Response("Hello World"); -} - -serve({ hostname, port: Number(port), reusePort: true }, handler); diff --git a/cli/bench/http/deno_post_bin.lua b/cli/bench/http/deno_post_bin.lua deleted file mode 100644 index c8f5d3e3f..000000000 --- a/cli/bench/http/deno_post_bin.lua +++ /dev/null @@ -1,5 +0,0 @@ -wrk.method = "POST" -wrk.headers["Content-Type"] = "application/octet-stream" - -file = io.open("./cli/bench/testdata/128k.bin", "rb") -wrk.body = file:read("*a")
\ No newline at end of file diff --git a/cli/bench/http/deno_post_json.lua b/cli/bench/http/deno_post_json.lua deleted file mode 100644 index cc6c4e226..000000000 --- a/cli/bench/http/deno_post_json.lua +++ /dev/null @@ -1,3 +0,0 @@ -wrk.method = "POST" -wrk.headers["Content-Type"] = "application/json" -wrk.body = '{"hello":"deno"}'
\ No newline at end of file diff --git a/cli/bench/http/deno_reactdom_ssr_flash.jsx b/cli/bench/http/deno_reactdom_ssr_flash.jsx deleted file mode 100644 index eaabf8912..000000000 --- a/cli/bench/http/deno_reactdom_ssr_flash.jsx +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -import { renderToReadableStream } from "https://esm.run/react-dom/server"; -import * as React from "https://esm.run/react"; -const { serve } = Deno; -const addr = Deno.args[0] || "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); - -const App = () => ( - <html> - <body> - <h1>Hello World</h1> - </body> - </html> -); - -const headers = { - headers: { - "Content-Type": "text/html", - }, -}; - -serve({ hostname, port: Number(port) }, async () => { - return new Response(await renderToReadableStream(<App />), headers); -}); diff --git a/cli/bench/http/deno_tcp.ts b/cli/bench/http/deno_tcp.ts deleted file mode 100644 index b79591073..000000000 --- a/cli/bench/http/deno_tcp.ts +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -// Used for benchmarking Deno's networking. -// TODO(bartlomieju): Replace this with a real HTTP server once -// https://github.com/denoland/deno/issues/726 is completed. -// Note: this is a keep-alive server. -// deno-lint-ignore-file no-console -const addr = Deno.args[0] || "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); -const listener = Deno.listen({ hostname, port: Number(port) }); -const response = new TextEncoder().encode( - "HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n", -); -async function handle(conn: Deno.Conn): Promise<void> { - const buffer = new Uint8Array(1024); - try { - while (true) { - await conn.read(buffer); - await conn.write(response); - } - } catch (e) { - if ( - !(e instanceof Deno.errors.BrokenPipe) && - !(e instanceof Deno.errors.ConnectionReset) - ) { - throw e; - } - } - conn.close(); -} - -console.log("Listening on", addr); -for await (const conn of listener) { - handle(conn); -} diff --git a/cli/bench/lsp.rs b/cli/bench/lsp.rs index b088865c6..7baaffca7 100644 --- a/cli/bench/lsp.rs +++ b/cli/bench/lsp.rs @@ -150,7 +150,11 @@ fn bench_big_file_edits(deno_exe: &Path) -> Duration { .deno_exe(deno_exe) .build(); client.initialize_default(); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.change_configuration(json!({ "deno": { "enable": true } })); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.write_notification( "textDocument/didOpen", @@ -206,6 +210,8 @@ fn bench_code_lens(deno_exe: &Path) -> Duration { .deno_exe(deno_exe) .build(); client.initialize_default(); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.change_configuration(json!({ "deno": { "enable": true, "codeLens": { @@ -214,6 +220,8 @@ fn bench_code_lens(deno_exe: &Path) -> Duration { "test": true, }, } })); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.write_notification( "textDocument/didOpen", @@ -257,7 +265,11 @@ fn bench_find_replace(deno_exe: &Path) -> Duration { .deno_exe(deno_exe) .build(); client.initialize_default(); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.change_configuration(json!({ "deno": { "enable": true } })); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); for i in 0..10 { client.write_notification( @@ -341,7 +353,11 @@ fn bench_startup_shutdown(deno_exe: &Path) -> Duration { .deno_exe(deno_exe) .build(); client.initialize_default(); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.change_configuration(json!({ "deno": { "enable": true } })); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.write_notification( "textDocument/didOpen", diff --git a/cli/bench/lsp_bench_standalone.rs b/cli/bench/lsp_bench_standalone.rs index 9c4f264ec..3c946cfbe 100644 --- a/cli/bench/lsp_bench_standalone.rs +++ b/cli/bench/lsp_bench_standalone.rs @@ -13,7 +13,11 @@ use test_util::lsp::LspClientBuilder; fn incremental_change_wait(bench: &mut Bencher) { let mut client = LspClientBuilder::new().use_diagnostic_sync(false).build(); client.initialize_default(); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.change_configuration(json!({ "deno": { "enable": true } })); + let (method, _): (String, Option<Value>) = client.read_notification(); + assert_eq!(method, "deno/didRefreshDenoConfigurationTree"); client.write_notification( "textDocument/didOpen", diff --git a/cli/bench/main.rs b/cli/bench/main.rs index 72fa7e963..c3c42d248 100644 --- a/cli/bench/main.rs +++ b/cli/bench/main.rs @@ -17,7 +17,6 @@ use std::process::Stdio; use std::time::SystemTime; use test_util::PathRef; -mod http; mod lsp; fn read_json(filename: &Path) -> Result<Value> { @@ -345,9 +344,11 @@ struct BenchResult { binary_size: HashMap<String, i64>, bundle_size: HashMap<String, i64>, cargo_deps: usize, + // TODO(bartlomieju): remove max_latency: HashMap<String, f64>, max_memory: HashMap<String, i64>, lsp_exec_time: HashMap<String, i64>, + // TODO(bartlomieju): remove req_per_sec: HashMap<String, i64>, syscall_count: HashMap<String, i64>, thread_count: HashMap<String, i64>, @@ -362,7 +363,6 @@ async fn main() -> Result<()> { "binary_size", "cargo_deps", "lsp", - "http", "strace", "mem_usage", ]; @@ -427,21 +427,6 @@ async fn main() -> Result<()> { new_data.lsp_exec_time = lsp_exec_times; } - if benchmarks.contains(&"http") && cfg!(not(target_os = "windows")) { - let stats = http::benchmark(target_dir.as_path())?; - let req_per_sec = stats - .iter() - .map(|(name, result)| (name.clone(), result.requests as i64)) - .collect(); - new_data.req_per_sec = req_per_sec; - let max_latency = stats - .iter() - .map(|(name, result)| (name.clone(), result.latency)) - .collect(); - - new_data.max_latency = max_latency; - } - if cfg!(target_os = "linux") && benchmarks.contains(&"strace") { use std::io::Read; diff --git a/cli/bench/op_now.js b/cli/bench/op_now.js index bcc3ea3c5..7c1427c80 100644 --- a/cli/bench/op_now.js +++ b/cli/bench/op_now.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals const queueMicrotask = globalThis.queueMicrotask || process.nextTick; let [total, count] = typeof Deno !== "undefined" diff --git a/cli/bench/secure_curves.js b/cli/bench/secure_curves.js index 02d248b23..912b75ccc 100644 --- a/cli/bench/secure_curves.js +++ b/cli/bench/secure_curves.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals let [total, count] = typeof Deno !== "undefined" ? Deno.args diff --git a/cli/bench/tty.js b/cli/bench/tty.js index 248a90113..e494e76af 100644 --- a/cli/bench/tty.js +++ b/cli/bench/tty.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals const queueMicrotask = globalThis.queueMicrotask || process.nextTick; let [total, count] = typeof Deno !== "undefined" diff --git a/cli/bench/url_parse.js b/cli/bench/url_parse.js index 367cf73f4..9cb0045f6 100644 --- a/cli/bench/url_parse.js +++ b/cli/bench/url_parse.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals const queueMicrotask = globalThis.queueMicrotask || process.nextTick; let [total, count] = typeof Deno !== "undefined" diff --git a/cli/bench/write_file.js b/cli/bench/write_file.js index 104a23a8d..747503ce2 100644 --- a/cli/bench/write_file.js +++ b/cli/bench/write_file.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals const queueMicrotask = globalThis.queueMicrotask || process.nextTick; let [total, count] = typeof Deno !== "undefined" diff --git a/cli/build.rs b/cli/build.rs index aa5d3d18c..3d9866128 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -365,6 +365,9 @@ fn main() { return; } + deno_napi::print_linker_flags("deno"); + deno_napi::print_linker_flags("denort"); + // Host snapshots won't work when cross compiling. let target = env::var("TARGET").unwrap(); let host = env::var("HOST").unwrap(); @@ -374,58 +377,6 @@ fn main() { panic!("Cross compiling with snapshot is not supported."); } - let symbols_file_name = match env::consts::OS { - "android" | "freebsd" | "openbsd" => { - "generated_symbol_exports_list_linux.def".to_string() - } - os => format!("generated_symbol_exports_list_{}.def", os), - }; - let symbols_path = std::path::Path::new("napi") - .join(symbols_file_name) - .canonicalize() - .expect( - "Missing symbols list! Generate using tools/napi/generate_symbols_lists.js", - ); - - println!("cargo:rustc-rerun-if-changed={}", symbols_path.display()); - - #[cfg(target_os = "windows")] - println!( - "cargo:rustc-link-arg-bin=deno=/DEF:{}", - symbols_path.display() - ); - - #[cfg(target_os = "macos")] - println!( - "cargo:rustc-link-arg-bin=deno=-Wl,-exported_symbols_list,{}", - symbols_path.display() - ); - - #[cfg(target_os = "linux")] - { - // If a custom compiler is set, the glibc version is not reliable. - // Here, we assume that if a custom compiler is used, that it will be modern enough to support a dynamic symbol list. - if env::var("CC").is_err() - && glibc_version::get_version() - .map(|ver| ver.major <= 2 && ver.minor < 35) - .unwrap_or(false) - { - println!("cargo:warning=Compiling with all symbols exported, this will result in a larger binary. Please use glibc 2.35 or later for an optimised build."); - println!("cargo:rustc-link-arg-bin=deno=-rdynamic"); - } else { - println!( - "cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}", - symbols_path.display() - ); - } - } - - #[cfg(target_os = "android")] - println!( - "cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}", - symbols_path.display() - ); - // To debug snapshot issues uncomment: // op_fetch_asset::trace_serializer(); @@ -449,6 +400,24 @@ fn main() { println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap()); println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap()); + if cfg!(windows) { + // these dls load slowly, so delay loading them + let dlls = [ + // webgpu + "d3dcompiler_47", + "OPENGL32", + // network related functions + "iphlpapi", + ]; + for dll in dlls { + println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll"); + println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll"); + } + // enable delay loading + println!("cargo:rustc-link-arg-bin=deno=delayimp.lib"); + println!("cargo:rustc-link-arg-bin=denort=delayimp.lib"); + } + let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); let o = PathBuf::from(env::var_os("OUT_DIR").unwrap()); diff --git a/cli/cache/cache_db.rs b/cli/cache/cache_db.rs index b24078f29..329ed2d97 100644 --- a/cli/cache/cache_db.rs +++ b/cli/cache/cache_db.rs @@ -57,7 +57,7 @@ impl rusqlite::types::FromSql for CacheDBHash { } /// What should the cache should do on failure? -#[derive(Default)] +#[derive(Debug, Default)] pub enum CacheFailure { /// Return errors if failure mode otherwise unspecified. #[default] @@ -69,6 +69,7 @@ pub enum CacheFailure { } /// Configuration SQL and other parameters for a [`CacheDB`]. +#[derive(Debug)] pub struct CacheDBConfiguration { /// SQL to run for a new database. pub table_initializer: &'static str, @@ -98,6 +99,7 @@ impl CacheDBConfiguration { } } +#[derive(Debug)] enum ConnectionState { Connected(Connection), Blackhole, @@ -106,7 +108,7 @@ enum ConnectionState { /// A cache database that eagerly initializes itself off-thread, preventing initialization operations /// from blocking the main thread. -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct CacheDB { // TODO(mmastrac): We can probably simplify our thread-safe implementation here conn: Arc<Mutex<OnceCell<ConnectionState>>>, diff --git a/cli/cache/code_cache.rs b/cli/cache/code_cache.rs index abcd0d46a..b1d9ae757 100644 --- a/cli/cache/code_cache.rs +++ b/cli/cache/code_cache.rs @@ -1,10 +1,14 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::sync::Arc; + use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_runtime::code_cache; use deno_runtime::deno_webstorage::rusqlite::params; +use crate::worker::CliCodeCache; + use super::cache_db::CacheDB; use super::cache_db::CacheDBConfiguration; use super::cache_db::CacheDBHash; @@ -82,6 +86,12 @@ impl CodeCache { } } +impl CliCodeCache for CodeCache { + fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache> { + self + } +} + impl code_cache::CodeCache for CodeCache { fn get_sync( &self, diff --git a/cli/cache/emit.rs b/cli/cache/emit.rs index 6807f06c1..3c9eecfcb 100644 --- a/cli/cache/emit.rs +++ b/cli/cache/emit.rs @@ -10,6 +10,7 @@ use deno_core::unsync::sync::AtomicFlag; use super::DiskCache; /// The cache that stores previously emitted files. +#[derive(Debug)] pub struct EmitCache { disk_cache: DiskCache, emit_failed_flag: AtomicFlag, @@ -39,7 +40,7 @@ impl EmitCache { &self, specifier: &ModuleSpecifier, expected_source_hash: u64, - ) -> Option<Vec<u8>> { + ) -> Option<String> { let emit_filename = self.get_emit_filename(specifier)?; let bytes = self.disk_cache.get(&emit_filename).ok()?; self @@ -91,6 +92,7 @@ impl EmitCache { const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata="; +#[derive(Debug)] struct EmitFileSerializer { cli_version: &'static str, } @@ -100,7 +102,7 @@ impl EmitFileSerializer { &self, mut bytes: Vec<u8>, expected_source_hash: u64, - ) -> Option<Vec<u8>> { + ) -> Option<String> { let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?; let (content, last_line) = bytes.split_at(last_newline_index); let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?; @@ -120,7 +122,7 @@ impl EmitFileSerializer { // everything looks good, truncate and return it bytes.truncate(content.len()); - Some(bytes) + String::from_utf8(bytes).ok() } pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> { @@ -170,8 +172,6 @@ mod test { }, emit_failed_flag: Default::default(), }; - let to_string = - |bytes: Vec<u8>| -> String { String::from_utf8(bytes).unwrap() }; let specifier1 = ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts")) @@ -188,13 +188,10 @@ mod test { assert_eq!(cache.get_emit_code(&specifier1, 5), None); // providing the correct source hash assert_eq!( - cache.get_emit_code(&specifier1, 10).map(to_string), + cache.get_emit_code(&specifier1, 10), Some(emit_code1.clone()), ); - assert_eq!( - cache.get_emit_code(&specifier2, 2).map(to_string), - Some(emit_code2) - ); + assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2)); // try changing the cli version (should not load previous ones) let cache = EmitCache { @@ -215,18 +212,12 @@ mod test { }, emit_failed_flag: Default::default(), }; - assert_eq!( - cache.get_emit_code(&specifier1, 5).map(to_string), - Some(emit_code1) - ); + assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1)); // adding when already exists should not cause issue let emit_code3 = "asdf".to_string(); cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes()); assert_eq!(cache.get_emit_code(&specifier1, 5), None); - assert_eq!( - cache.get_emit_code(&specifier1, 20).map(to_string), - Some(emit_code3) - ); + assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3)); } } diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index 628502c50..50fc135dd 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -8,11 +8,9 @@ use crate::file_fetcher::FetchOptions; use crate::file_fetcher::FetchPermissionsOptionRef; use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileOrRedirect; -use crate::npm::CliNpmResolver; use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries_and_fs; use crate::util::fs::AtomicWriteFileFsAdapter; -use crate::util::path::specifier_has_extension; use deno_ast::MediaType; use deno_core::futures; @@ -22,7 +20,9 @@ use deno_graph::source::CacheInfo; use deno_graph::source::LoadFuture; use deno_graph::source::LoadResponse; use deno_graph::source::Loader; +use deno_runtime::deno_fs; use deno_runtime::deno_permissions::PermissionsContainer; +use node_resolver::InNpmPackageChecker; use std::collections::HashMap; use std::path::Path; use std::path::PathBuf; @@ -182,28 +182,31 @@ pub struct FetchCacherOptions { /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// a concise interface to the DENO_DIR when building module graphs. pub struct FetchCacher { - file_fetcher: Arc<FileFetcher>, pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>, + file_fetcher: Arc<FileFetcher>, + fs: Arc<dyn deno_fs::FileSystem>, global_http_cache: Arc<GlobalHttpCache>, - npm_resolver: Arc<dyn CliNpmResolver>, + in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, module_info_cache: Arc<ModuleInfoCache>, permissions: PermissionsContainer, - cache_info_enabled: bool, is_deno_publish: bool, + cache_info_enabled: bool, } impl FetchCacher { pub fn new( file_fetcher: Arc<FileFetcher>, + fs: Arc<dyn deno_fs::FileSystem>, global_http_cache: Arc<GlobalHttpCache>, - npm_resolver: Arc<dyn CliNpmResolver>, + in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, module_info_cache: Arc<ModuleInfoCache>, options: FetchCacherOptions, ) -> Self { Self { file_fetcher, + fs, global_http_cache, - npm_resolver, + in_npm_pkg_checker, module_info_cache, file_header_overrides: options.file_header_overrides, permissions: options.permissions, @@ -258,28 +261,21 @@ impl Loader for FetchCacher { ) -> LoadFuture { use deno_graph::source::CacheSetting as LoaderCacheSetting; - if specifier.scheme() == "file" { - if specifier.path().contains("/node_modules/") { - // The specifier might be in a completely different symlinked tree than - // what the node_modules url is in (ex. `/my-project-1/node_modules` - // symlinked to `/my-project-2/node_modules`), so first we checked if the path - // is in a node_modules dir to avoid needlessly canonicalizing, then now compare - // against the canonicalized specifier. - let specifier = - crate::node::resolve_specifier_into_node_modules(specifier); - if self.npm_resolver.in_npm_package(&specifier) { - return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::External { specifier }, - )))); - } - } - - // make local CJS modules external to the graph - if specifier_has_extension(specifier, "cjs") { + if specifier.scheme() == "file" + && specifier.path().contains("/node_modules/") + { + // The specifier might be in a completely different symlinked tree than + // what the node_modules url is in (ex. `/my-project-1/node_modules` + // symlinked to `/my-project-2/node_modules`), so first we checked if the path + // is in a node_modules dir to avoid needlessly canonicalizing, then now compare + // against the canonicalized specifier. + let specifier = crate::node::resolve_specifier_into_node_modules( + specifier, + self.fs.as_ref(), + ); + if self.in_npm_pkg_checker.in_npm_package(&specifier) { return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::External { - specifier: specifier.clone(), - }, + LoadResponse::External { specifier }, )))); } } @@ -325,6 +321,7 @@ impl Loader for FetchCacher { } else { FetchPermissionsOptionRef::DynamicContainer(&permissions) }, + maybe_auth: None, maybe_accept: None, maybe_cache_setting: maybe_cache_setting.as_ref(), }, diff --git a/cli/cache/module_info.rs b/cli/cache/module_info.rs index 4dbb01c37..060a6f4f0 100644 --- a/cli/cache/module_info.rs +++ b/cli/cache/module_info.rs @@ -44,18 +44,32 @@ pub static MODULE_INFO_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration { /// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable /// performance improvement because when it exists we can skip parsing a module for /// deno_graph. +#[derive(Debug)] pub struct ModuleInfoCache { conn: CacheDB, + parsed_source_cache: Arc<ParsedSourceCache>, } impl ModuleInfoCache { #[cfg(test)] - pub fn new_in_memory(version: &'static str) -> Self { - Self::new(CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version)) + pub fn new_in_memory( + version: &'static str, + parsed_source_cache: Arc<ParsedSourceCache>, + ) -> Self { + Self::new( + CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version), + parsed_source_cache, + ) } - pub fn new(conn: CacheDB) -> Self { - Self { conn } + pub fn new( + conn: CacheDB, + parsed_source_cache: Arc<ParsedSourceCache>, + ) -> Self { + Self { + conn, + parsed_source_cache, + } } /// Useful for testing: re-create this cache DB with a different current version. @@ -63,6 +77,7 @@ impl ModuleInfoCache { pub(crate) fn recreate_with_version(self, version: &'static str) -> Self { Self { conn: self.conn.recreate_with_version(version), + parsed_source_cache: self.parsed_source_cache, } } @@ -113,13 +128,10 @@ impl ModuleInfoCache { Ok(()) } - pub fn as_module_analyzer<'a>( - &'a self, - parsed_source_cache: &'a Arc<ParsedSourceCache>, - ) -> ModuleInfoCacheModuleAnalyzer<'a> { + pub fn as_module_analyzer(&self) -> ModuleInfoCacheModuleAnalyzer { ModuleInfoCacheModuleAnalyzer { module_info_cache: self, - parsed_source_cache, + parsed_source_cache: &self.parsed_source_cache, } } } @@ -129,31 +141,99 @@ pub struct ModuleInfoCacheModuleAnalyzer<'a> { parsed_source_cache: &'a Arc<ParsedSourceCache>, } -#[async_trait::async_trait(?Send)] -impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { - async fn analyze( +impl<'a> ModuleInfoCacheModuleAnalyzer<'a> { + fn load_cached_module_info( &self, specifier: &ModuleSpecifier, - source: Arc<str>, media_type: MediaType, - ) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> { - // attempt to load from the cache - let source_hash = CacheDBHash::from_source(&source); + source_hash: CacheDBHash, + ) -> Option<ModuleInfo> { match self.module_info_cache.get_module_info( specifier, media_type, source_hash, ) { - Ok(Some(info)) => return Ok(info), - Ok(None) => {} + Ok(Some(info)) => Some(info), + Ok(None) => None, Err(err) => { log::debug!( "Error loading module cache info for {}. {:#}", specifier, err ); + None } } + } + + fn save_module_info_to_cache( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + source_hash: CacheDBHash, + module_info: &ModuleInfo, + ) { + if let Err(err) = self.module_info_cache.set_module_info( + specifier, + media_type, + source_hash, + module_info, + ) { + log::debug!( + "Error saving module cache info for {}. {:#}", + specifier, + err + ); + } + } + + pub fn analyze_sync( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + source: &Arc<str>, + ) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> { + // attempt to load from the cache + let source_hash = CacheDBHash::from_source(source); + if let Some(info) = + self.load_cached_module_info(specifier, media_type, source_hash) + { + return Ok(info); + } + + // otherwise, get the module info from the parsed source cache + let parser = self.parsed_source_cache.as_capturing_parser(); + let analyzer = ParserModuleAnalyzer::new(&parser); + let module_info = + analyzer.analyze_sync(specifier, source.clone(), media_type)?; + + // then attempt to cache it + self.save_module_info_to_cache( + specifier, + media_type, + source_hash, + &module_info, + ); + + Ok(module_info) + } +} + +#[async_trait::async_trait(?Send)] +impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { + async fn analyze( + &self, + specifier: &ModuleSpecifier, + source: Arc<str>, + media_type: MediaType, + ) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> { + // attempt to load from the cache + let source_hash = CacheDBHash::from_source(&source); + if let Some(info) = + self.load_cached_module_info(specifier, media_type, source_hash) + { + return Ok(info); + } // otherwise, get the module info from the parsed source cache let module_info = deno_core::unsync::spawn_blocking({ @@ -169,18 +249,12 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { .unwrap()?; // then attempt to cache it - if let Err(err) = self.module_info_cache.set_module_info( + self.save_module_info_to_cache( specifier, media_type, source_hash, &module_info, - ) { - log::debug!( - "Error saving module cache info for {}. {:#}", - specifier, - err - ); - } + ); Ok(module_info) } @@ -202,7 +276,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 { Tsx => 11, Json => 12, Wasm => 13, - TsBuildInfo => 14, + Css => 14, SourceMap => 15, Unknown => 16, } @@ -217,7 +291,7 @@ mod test { #[test] pub fn module_info_cache_general_use() { - let cache = ModuleInfoCache::new_in_memory("1.0.0"); + let cache = ModuleInfoCache::new_in_memory("1.0.0", Default::default()); let specifier1 = ModuleSpecifier::parse("https://localhost/mod.ts").unwrap(); let specifier2 = diff --git a/cli/cache/parsed_source.rs b/cli/cache/parsed_source.rs index e956361f4..7e819ae99 100644 --- a/cli/cache/parsed_source.rs +++ b/cli/cache/parsed_source.rs @@ -7,9 +7,9 @@ use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_ast::ParsedSource; use deno_core::parking_lot::Mutex; -use deno_graph::CapturingModuleParser; -use deno_graph::DefaultModuleParser; -use deno_graph::ModuleParser; +use deno_graph::CapturingEsParser; +use deno_graph::DefaultEsParser; +use deno_graph::EsParser; use deno_graph::ParseOptions; use deno_graph::ParsedSourceStore; @@ -46,7 +46,7 @@ impl<'a> LazyGraphSourceParser<'a> { } } -#[derive(Default)] +#[derive(Debug, Default)] pub struct ParsedSourceCache { sources: Mutex<HashMap<ModuleSpecifier, ParsedSource>>, } @@ -57,12 +57,11 @@ impl ParsedSourceCache { module: &deno_graph::JsModule, ) -> Result<ParsedSource, deno_ast::ParseDiagnostic> { let parser = self.as_capturing_parser(); - // this will conditionally parse because it's using a CapturingModuleParser - parser.parse_module(ParseOptions { + // this will conditionally parse because it's using a CapturingEsParser + parser.parse_program(ParseOptions { specifier: &module.specifier, source: module.source.clone(), media_type: module.media_type, - // don't bother enabling because this method is currently only used for vendoring scope_analysis: false, }) } @@ -86,10 +85,9 @@ impl ParsedSourceCache { specifier, source, media_type, - // don't bother enabling because this method is currently only used for emitting scope_analysis: false, }; - DefaultModuleParser.parse_module(options) + DefaultEsParser.parse_program(options) } /// Frees the parsed source from memory. @@ -99,8 +97,8 @@ impl ParsedSourceCache { /// Creates a parser that will reuse a ParsedSource from the store /// if it exists, or else parse. - pub fn as_capturing_parser(&self) -> CapturingModuleParser { - CapturingModuleParser::new(None, self) + pub fn as_capturing_parser(&self) -> CapturingEsParser { + CapturingEsParser::new(None, self) } } diff --git a/cli/clippy.toml b/cli/clippy.toml index e20c56c47..f1c25acfb 100644 --- a/cli/clippy.toml +++ b/cli/clippy.toml @@ -1,5 +1,6 @@ disallowed-methods = [ { path = "reqwest::Client::new", reason = "create an HttpClient via an HttpClientProvider instead" }, + { path = "std::process::exit", reason = "use deno_runtime::exit instead" }, ] disallowed-types = [ { path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" }, diff --git a/cli/emit.rs b/cli/emit.rs index b3f4a4477..3cd23b7ab 100644 --- a/cli/emit.rs +++ b/cli/emit.rs @@ -3,24 +3,28 @@ use crate::cache::EmitCache; use crate::cache::FastInsecureHasher; use crate::cache::ParsedSourceCache; +use crate::resolver::CjsTracker; +use deno_ast::ModuleKind; use deno_ast::SourceMapOption; use deno_ast::SourceRange; use deno_ast::SourceRanged; use deno_ast::SourceRangedForSpanned; +use deno_ast::TranspileModuleOptions; use deno_ast::TranspileResult; use deno_core::error::AnyError; use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; -use deno_core::ModuleCodeBytes; use deno_core::ModuleSpecifier; use deno_graph::MediaType; use deno_graph::Module; use deno_graph::ModuleGraph; use std::sync::Arc; +#[derive(Debug)] pub struct Emitter { + cjs_tracker: Arc<CjsTracker>, emit_cache: Arc<EmitCache>, parsed_source_cache: Arc<ParsedSourceCache>, transpile_and_emit_options: @@ -31,6 +35,7 @@ pub struct Emitter { impl Emitter { pub fn new( + cjs_tracker: Arc<CjsTracker>, emit_cache: Arc<EmitCache>, parsed_source_cache: Arc<ParsedSourceCache>, transpile_options: deno_ast::TranspileOptions, @@ -43,6 +48,7 @@ impl Emitter { hasher.finish() }; Self { + cjs_tracker, emit_cache, parsed_source_cache, transpile_and_emit_options: Arc::new((transpile_options, emit_options)), @@ -60,20 +66,19 @@ impl Emitter { continue; }; - let is_emittable = matches!( - module.media_type, - MediaType::TypeScript - | MediaType::Mts - | MediaType::Cts - | MediaType::Jsx - | MediaType::Tsx - ); - if is_emittable { + if module.media_type.is_emittable() { futures.push( self .emit_parsed_source( &module.specifier, module.media_type, + ModuleKind::from_is_cjs( + self.cjs_tracker.is_cjs_with_known_is_script( + &module.specifier, + module.media_type, + module.is_script, + )?, + ), &module.source, ) .boxed_local(), @@ -92,9 +97,10 @@ impl Emitter { pub fn maybe_cached_emit( &self, specifier: &ModuleSpecifier, + module_kind: deno_ast::ModuleKind, source: &str, - ) -> Option<Vec<u8>> { - let source_hash = self.get_source_hash(source); + ) -> Option<String> { + let source_hash = self.get_source_hash(module_kind, source); self.emit_cache.get_emit_code(specifier, source_hash) } @@ -102,25 +108,27 @@ impl Emitter { &self, specifier: &ModuleSpecifier, media_type: MediaType, + module_kind: deno_ast::ModuleKind, source: &Arc<str>, - ) -> Result<ModuleCodeBytes, AnyError> { + ) -> Result<String, AnyError> { // Note: keep this in sync with the sync version below let helper = EmitParsedSourceHelper(self); - match helper.pre_emit_parsed_source(specifier, source) { + match helper.pre_emit_parsed_source(specifier, module_kind, source) { PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::NotCached { source_hash } => { let parsed_source_cache = self.parsed_source_cache.clone(); let transpile_and_emit_options = self.transpile_and_emit_options.clone(); - let transpile_result = deno_core::unsync::spawn_blocking({ + let transpiled_source = deno_core::unsync::spawn_blocking({ let specifier = specifier.clone(); let source = source.clone(); move || -> Result<_, AnyError> { EmitParsedSourceHelper::transpile( &parsed_source_cache, &specifier, - source.clone(), media_type, + module_kind, + source.clone(), &transpile_and_emit_options.0, &transpile_and_emit_options.1, ) @@ -128,11 +136,12 @@ impl Emitter { }) .await .unwrap()?; - Ok(helper.post_emit_parsed_source( + helper.post_emit_parsed_source( specifier, - transpile_result, + &transpiled_source, source_hash, - )) + ); + Ok(transpiled_source) } } } @@ -141,26 +150,29 @@ impl Emitter { &self, specifier: &ModuleSpecifier, media_type: MediaType, + module_kind: deno_ast::ModuleKind, source: &Arc<str>, - ) -> Result<ModuleCodeBytes, AnyError> { + ) -> Result<String, AnyError> { // Note: keep this in sync with the async version above let helper = EmitParsedSourceHelper(self); - match helper.pre_emit_parsed_source(specifier, source) { + match helper.pre_emit_parsed_source(specifier, module_kind, source) { PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::NotCached { source_hash } => { - let transpile_result = EmitParsedSourceHelper::transpile( + let transpiled_source = EmitParsedSourceHelper::transpile( &self.parsed_source_cache, specifier, - source.clone(), media_type, + module_kind, + source.clone(), &self.transpile_and_emit_options.0, &self.transpile_and_emit_options.1, )?; - Ok(helper.post_emit_parsed_source( + helper.post_emit_parsed_source( specifier, - transpile_result, + &transpiled_source, source_hash, - )) + ); + Ok(transpiled_source) } } } @@ -190,10 +202,20 @@ impl Emitter { // this statement is probably wrong) let mut options = self.transpile_and_emit_options.1.clone(); options.source_map = SourceMapOption::None; + let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( + specifier, + media_type, + parsed_source.compute_is_script(), + )?; let transpiled_source = parsed_source - .transpile(&self.transpile_and_emit_options.0, &options)? - .into_source() - .into_string()?; + .transpile( + &self.transpile_and_emit_options.0, + &deno_ast::TranspileModuleOptions { + module_kind: Some(ModuleKind::from_is_cjs(is_cjs)), + }, + &options, + )? + .into_source(); Ok(transpiled_source.text) } MediaType::JavaScript @@ -204,7 +226,7 @@ impl Emitter { | MediaType::Dcts | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => { // clear this specifier from the parsed source cache as it's now out of date @@ -217,16 +239,17 @@ impl Emitter { /// A hashing function that takes the source code and uses the global emit /// options then generates a string hash which can be stored to /// determine if the cached emit is valid or not. - fn get_source_hash(&self, source_text: &str) -> u64 { + fn get_source_hash(&self, module_kind: ModuleKind, source_text: &str) -> u64 { FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash .write_str(source_text) .write_u64(self.transpile_and_emit_options_hash) + .write_hashable(module_kind) .finish() } } enum PreEmitResult { - Cached(ModuleCodeBytes), + Cached(String), NotCached { source_hash: u64 }, } @@ -237,14 +260,15 @@ impl<'a> EmitParsedSourceHelper<'a> { pub fn pre_emit_parsed_source( &self, specifier: &ModuleSpecifier, + module_kind: deno_ast::ModuleKind, source: &Arc<str>, ) -> PreEmitResult { - let source_hash = self.0.get_source_hash(source); + let source_hash = self.0.get_source_hash(module_kind, source); if let Some(emit_code) = self.0.emit_cache.get_emit_code(specifier, source_hash) { - PreEmitResult::Cached(emit_code.into_boxed_slice().into()) + PreEmitResult::Cached(emit_code) } else { PreEmitResult::NotCached { source_hash } } @@ -253,25 +277,24 @@ impl<'a> EmitParsedSourceHelper<'a> { pub fn transpile( parsed_source_cache: &ParsedSourceCache, specifier: &ModuleSpecifier, - source: Arc<str>, media_type: MediaType, + module_kind: deno_ast::ModuleKind, + source: Arc<str>, transpile_options: &deno_ast::TranspileOptions, emit_options: &deno_ast::EmitOptions, - ) -> Result<TranspileResult, AnyError> { + ) -> Result<String, AnyError> { // nothing else needs the parsed source at this point, so remove from // the cache in order to not transpile owned let parsed_source = parsed_source_cache .remove_or_parse_module(specifier, source, media_type)?; ensure_no_import_assertion(&parsed_source)?; - Ok(parsed_source.transpile(transpile_options, emit_options)?) - } - - pub fn post_emit_parsed_source( - &self, - specifier: &ModuleSpecifier, - transpile_result: TranspileResult, - source_hash: u64, - ) -> ModuleCodeBytes { + let transpile_result = parsed_source.transpile( + transpile_options, + &TranspileModuleOptions { + module_kind: Some(module_kind), + }, + emit_options, + )?; let transpiled_source = match transpile_result { TranspileResult::Owned(source) => source, TranspileResult::Cloned(source) => { @@ -280,12 +303,20 @@ impl<'a> EmitParsedSourceHelper<'a> { } }; debug_assert!(transpiled_source.source_map.is_none()); + Ok(transpiled_source.text) + } + + pub fn post_emit_parsed_source( + &self, + specifier: &ModuleSpecifier, + transpiled_source: &str, + source_hash: u64, + ) { self.0.emit_cache.set_emit_code( specifier, source_hash, - &transpiled_source.source, + transpiled_source.as_bytes(), ); - transpiled_source.source.into_boxed_slice().into() } } @@ -317,7 +348,7 @@ fn ensure_no_import_assertion( deno_core::anyhow::anyhow!("{}", msg) } - let Some(module) = parsed_source.program_ref().as_module() else { + let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else { return Ok(()); }; diff --git a/cli/errors.rs b/cli/errors.rs index 25b3fc332..b1808f733 100644 --- a/cli/errors.rs +++ b/cli/errors.rs @@ -88,6 +88,10 @@ fn get_resolution_error_class(err: &ResolutionError) -> &'static str { } } +fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str { + "TypeError" +} + pub fn get_error_class_name(e: &AnyError) -> &'static str { deno_runtime::errors::get_error_class_name(e) .or_else(|| { @@ -106,5 +110,9 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str { e.downcast_ref::<ResolutionError>() .map(get_resolution_error_class) }) + .or_else(|| { + e.downcast_ref::<std::num::TryFromIntError>() + .map(get_try_from_int_error_class) + }) .unwrap_or("Error") } diff --git a/cli/factory.rs b/cli/factory.rs index b96a133e9..7949a83a5 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -11,6 +11,7 @@ use crate::args::StorageKeyResolver; use crate::args::TsConfigType; use crate::cache::Caches; use crate::cache::CodeCache; +use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDir; use crate::cache::DenoDirProvider; use crate::cache::EmitCache; @@ -32,17 +33,22 @@ use crate::module_loader::ModuleLoadPreparer; use crate::node::CliCjsCodeAnalyzer; use crate::node::CliNodeCodeTranslator; use crate::npm::create_cli_npm_resolver; +use crate::npm::create_in_npm_pkg_checker; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; -use crate::resolver::CjsResolutionStore; +use crate::npm::CreateInNpmPkgCheckerOptions; +use crate::resolver::CjsTracker; +use crate::resolver::CliDenoResolver; use crate::resolver::CliDenoResolverFs; -use crate::resolver::CliGraphResolver; -use crate::resolver::CliGraphResolverOptions; -use crate::resolver::CliNodeResolver; +use crate::resolver::CliNpmReqResolver; +use crate::resolver::CliResolver; +use crate::resolver::CliResolverOptions; use crate::resolver::CliSloppyImportsResolver; +use crate::resolver::IsCjsResolverOptions; use crate::resolver::NpmModuleLoader; use crate::resolver::SloppyImportsCachedFs; use crate::standalone::DenoCompileBinaryWriter; @@ -50,6 +56,7 @@ use crate::tools::check::TypeChecker; use crate::tools::coverage::CoverageCollector; use crate::tools::lint::LintRuleProvider; use crate::tools::run::hmr::HmrRunner; +use crate::tsc::TypeCheckingCjsTracker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; @@ -58,15 +65,20 @@ use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; use std::path::PathBuf; +use deno_cache_dir::npm::NpmCacheDir; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::WorkspaceResolver; use deno_core::error::AnyError; use deno_core::futures::FutureExt; use deno_core::FeatureChecker; +use deno_resolver::npm::NpmReqResolverOptions; +use deno_resolver::DenoResolverOptions; +use deno_resolver::NodeAndNpmReqResolver; use deno_runtime::deno_fs; use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -76,6 +88,7 @@ use deno_runtime::inspector_server::InspectorServer; use deno_runtime::permissions::RuntimePermissionDescriptorParser; use log::warn; use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::InNpmPackageChecker; use once_cell::sync::OnceCell; use std::future::Future; use std::sync::Arc; @@ -117,7 +130,7 @@ impl RootCertStoreProvider for CliRootCertStoreProvider { } } -struct Deferred<T>(once_cell::unsync::OnceCell<T>); +pub struct Deferred<T>(once_cell::unsync::OnceCell<T>); impl<T> Default for Deferred<T> { fn default() -> Self { @@ -163,38 +176,42 @@ impl<T> Deferred<T> { #[derive(Default)] struct CliFactoryServices { - cli_options: Deferred<Arc<CliOptions>>, + blob_store: Deferred<Arc<BlobStore>>, caches: Deferred<Arc<Caches>>, + cjs_tracker: Deferred<Arc<CjsTracker>>, + cli_options: Deferred<Arc<CliOptions>>, + code_cache: Deferred<Arc<CodeCache>>, + deno_resolver: Deferred<Arc<CliDenoResolver>>, + emit_cache: Deferred<Arc<EmitCache>>, + emitter: Deferred<Arc<Emitter>>, + feature_checker: Deferred<Arc<FeatureChecker>>, file_fetcher: Deferred<Arc<FileFetcher>>, + fs: Deferred<Arc<dyn deno_fs::FileSystem>>, global_http_cache: Deferred<Arc<GlobalHttpCache>>, http_cache: Deferred<Arc<dyn HttpCache>>, http_client_provider: Deferred<Arc<HttpClientProvider>>, - emit_cache: Deferred<Arc<EmitCache>>, - emitter: Deferred<Arc<Emitter>>, - fs: Deferred<Arc<dyn deno_fs::FileSystem>>, + in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>, main_graph_container: Deferred<Arc<MainModuleGraphContainer>>, - maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>, - root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>, - blob_store: Deferred<Arc<BlobStore>>, - module_info_cache: Deferred<Arc<ModuleInfoCache>>, - parsed_source_cache: Deferred<Arc<ParsedSourceCache>>, - resolver: Deferred<Arc<CliGraphResolver>>, maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>, + maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>, module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>, module_graph_creator: Deferred<Arc<ModuleGraphCreator>>, + module_info_cache: Deferred<Arc<ModuleInfoCache>>, module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>, node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>, node_resolver: Deferred<Arc<NodeResolver>>, + npm_cache_dir: Deferred<Arc<NpmCacheDir>>, + npm_req_resolver: Deferred<Arc<CliNpmReqResolver>>, npm_resolver: Deferred<Arc<dyn CliNpmResolver>>, + parsed_source_cache: Deferred<Arc<ParsedSourceCache>>, permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>, + pkg_json_resolver: Deferred<Arc<PackageJsonResolver>>, + resolver: Deferred<Arc<CliResolver>>, + root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>, root_permissions_container: Deferred<PermissionsContainer>, sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>, text_only_progress_bar: Deferred<ProgressBar>, type_checker: Deferred<Arc<TypeChecker>>, - cjs_resolutions: Deferred<Arc<CjsResolutionStore>>, - cli_node_resolver: Deferred<Arc<CliNodeResolver>>, - feature_checker: Deferred<Arc<FeatureChecker>>, - code_cache: Deferred<Arc<CodeCache>>, workspace_resolver: Deferred<Arc<WorkspaceResolver>>, } @@ -351,56 +368,112 @@ impl CliFactory { self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs)) } + pub fn in_npm_pkg_checker( + &self, + ) -> Result<&Arc<dyn InNpmPackageChecker>, AnyError> { + self.services.in_npm_pkg_checker.get_or_try_init(|| { + let cli_options = self.cli_options()?; + let options = if cli_options.use_byonm() { + CreateInNpmPkgCheckerOptions::Byonm + } else { + CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: self.npm_cache_dir()?.root_dir_url(), + maybe_node_modules_path: cli_options + .node_modules_dir_path() + .map(|p| p.as_path()), + }, + ) + }; + Ok(create_in_npm_pkg_checker(options)) + }) + } + + pub fn npm_cache_dir(&self) -> Result<&Arc<NpmCacheDir>, AnyError> { + self.services.npm_cache_dir.get_or_try_init(|| { + let fs = self.fs(); + let global_path = self.deno_dir()?.npm_folder_path(); + let cli_options = self.cli_options()?; + Ok(Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + global_path, + cli_options.npmrc().get_all_known_registries_urls(), + ))) + }) + } + pub async fn npm_resolver( &self, ) -> Result<&Arc<dyn CliNpmResolver>, AnyError> { self .services .npm_resolver - .get_or_try_init_async(async { - let fs = self.fs(); - let cli_options = self.cli_options()?; - // For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory. - create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) { - CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(fs.clone()), - root_node_modules_dir: Some(match cli_options.node_modules_dir_path() { - Some(node_modules_path) => node_modules_path.to_path_buf(), - // path needs to be canonicalized for node resolution - // (node_modules_dir_path above is already canonicalized) - None => canonicalize_path_maybe_not_exists(cli_options.initial_cwd())? - .join("node_modules"), - }), - }) - } else { - CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { - snapshot: match cli_options.resolve_npm_resolution_snapshot()? { - Some(snapshot) => { - CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot)) - } - None => match cli_options.maybe_lockfile() { - Some(lockfile) => { - CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( - lockfile.clone(), - ) - } - None => CliNpmResolverManagedSnapshotOption::Specified(None), + .get_or_try_init_async( + async { + let fs = self.fs(); + let cli_options = self.cli_options()?; + create_cli_npm_resolver(if cli_options.use_byonm() { + CliNpmResolverCreateOptions::Byonm( + CliByonmNpmResolverCreateOptions { + fs: CliDenoResolverFs(fs.clone()), + pkg_json_resolver: self.pkg_json_resolver().clone(), + root_node_modules_dir: Some( + match cli_options.node_modules_dir_path() { + Some(node_modules_path) => node_modules_path.to_path_buf(), + // path needs to be canonicalized for node resolution + // (node_modules_dir_path above is already canonicalized) + None => canonicalize_path_maybe_not_exists( + cli_options.initial_cwd(), + )? + .join("node_modules"), + }, + ), }, - }, - maybe_lockfile: cli_options.maybe_lockfile().cloned(), - fs: fs.clone(), - http_client_provider: self.http_client_provider().clone(), - npm_global_cache_dir: self.deno_dir()?.npm_folder_path(), - cache_setting: cli_options.cache_setting(), - text_only_progress_bar: self.text_only_progress_bar().clone(), - maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(), - npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::from_workspace(cli_options.workspace())), - npm_system_info: cli_options.npm_system_info(), - npmrc: cli_options.npmrc().clone(), - lifecycle_scripts: cli_options.lifecycle_scripts_config(), + ) + } else { + CliNpmResolverCreateOptions::Managed( + CliManagedNpmResolverCreateOptions { + snapshot: match cli_options.resolve_npm_resolution_snapshot()? { + Some(snapshot) => { + CliNpmResolverManagedSnapshotOption::Specified(Some( + snapshot, + )) + } + None => match cli_options.maybe_lockfile() { + Some(lockfile) => { + CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( + lockfile.clone(), + ) + } + None => { + CliNpmResolverManagedSnapshotOption::Specified(None) + } + }, + }, + maybe_lockfile: cli_options.maybe_lockfile().cloned(), + fs: fs.clone(), + http_client_provider: self.http_client_provider().clone(), + npm_cache_dir: self.npm_cache_dir()?.clone(), + cache_setting: cli_options.cache_setting(), + text_only_progress_bar: self.text_only_progress_bar().clone(), + maybe_node_modules_path: cli_options + .node_modules_dir_path() + .cloned(), + npm_install_deps_provider: Arc::new( + NpmInstallDepsProvider::from_workspace( + cli_options.workspace(), + ), + ), + npm_system_info: cli_options.npm_system_info(), + npmrc: cli_options.npmrc().clone(), + lifecycle_scripts: cli_options.lifecycle_scripts_config(), + }, + ) }) - }).await - }.boxed_local()) + .await + } + .boxed_local(), + ) .await } @@ -455,28 +528,47 @@ impl CliFactory { .await } - pub async fn resolver(&self) -> Result<&Arc<CliGraphResolver>, AnyError> { + pub async fn deno_resolver(&self) -> Result<&Arc<CliDenoResolver>, AnyError> { + self + .services + .deno_resolver + .get_or_try_init_async(async { + let cli_options = self.cli_options()?; + Ok(Arc::new(CliDenoResolver::new(DenoResolverOptions { + in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(), + node_and_req_resolver: if cli_options.no_npm() { + None + } else { + Some(NodeAndNpmReqResolver { + node_resolver: self.node_resolver().await?.clone(), + npm_req_resolver: self.npm_req_resolver().await?.clone(), + }) + }, + sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(), + workspace_resolver: self.workspace_resolver().await?.clone(), + is_byonm: cli_options.use_byonm(), + maybe_vendor_dir: cli_options.vendor_dir_path(), + }))) + }) + .await + } + + pub async fn resolver(&self) -> Result<&Arc<CliResolver>, AnyError> { self .services .resolver .get_or_try_init_async( async { let cli_options = self.cli_options()?; - Ok(Arc::new(CliGraphResolver::new(CliGraphResolverOptions { - sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(), - node_resolver: Some(self.cli_node_resolver().await?.clone()), + Ok(Arc::new(CliResolver::new(CliResolverOptions { npm_resolver: if cli_options.no_npm() { None } else { Some(self.npm_resolver().await?.clone()) }, - workspace_resolver: self.workspace_resolver().await?.clone(), bare_node_builtins_enabled: cli_options .unstable_bare_node_builtins(), - maybe_jsx_import_source_config: cli_options - .workspace() - .to_maybe_jsx_import_source_config()?, - maybe_vendor_dir: cli_options.vendor_dir_path(), + deno_resolver: self.deno_resolver().await?.clone(), }))) } .boxed_local(), @@ -505,6 +597,7 @@ impl CliFactory { self.services.module_info_cache.get_or_try_init(|| { Ok(Arc::new(ModuleInfoCache::new( self.caches()?.dep_analysis_db(), + self.parsed_source_cache().clone(), ))) }) } @@ -533,6 +626,7 @@ impl CliFactory { ts_config_result.ts_config, )?; Ok(Arc::new(Emitter::new( + self.cjs_tracker()?.clone(), self.emit_cache()?.clone(), self.parsed_source_cache().clone(), transpile_options, @@ -556,7 +650,13 @@ impl CliFactory { async { Ok(Arc::new(NodeResolver::new( DenoFsNodeResolverEnv::new(self.fs().clone()), - self.npm_resolver().await?.clone().into_npm_resolver(), + self.in_npm_pkg_checker()?.clone(), + self + .npm_resolver() + .await? + .clone() + .into_npm_pkg_folder_resolver(), + self.pkg_json_resolver().clone(), ))) } .boxed_local(), @@ -574,23 +674,57 @@ impl CliFactory { let caches = self.caches()?; let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db()); - let node_resolver = self.cli_node_resolver().await?.clone(); + let node_resolver = self.node_resolver().await?.clone(); let cjs_esm_analyzer = CliCjsCodeAnalyzer::new( node_analysis_cache, + self.cjs_tracker()?.clone(), self.fs().clone(), - node_resolver, + Some(self.parsed_source_cache().clone()), ); Ok(Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, DenoFsNodeResolverEnv::new(self.fs().clone()), - self.node_resolver().await?.clone(), - self.npm_resolver().await?.clone().into_npm_resolver(), + self.in_npm_pkg_checker()?.clone(), + node_resolver, + self + .npm_resolver() + .await? + .clone() + .into_npm_pkg_folder_resolver(), + self.pkg_json_resolver().clone(), ))) }) .await } + pub async fn npm_req_resolver( + &self, + ) -> Result<&Arc<CliNpmReqResolver>, AnyError> { + self + .services + .npm_req_resolver + .get_or_try_init_async(async { + let npm_resolver = self.npm_resolver().await?; + Ok(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { + byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), + fs: CliDenoResolverFs(self.fs().clone()), + in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(), + node_resolver: self.node_resolver().await?.clone(), + npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), + }))) + }) + .await + } + + pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> { + self.services.pkg_json_resolver.get_or_init(|| { + Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new( + self.fs().clone(), + ))) + }) + } + pub async fn type_checker(&self) -> Result<&Arc<TypeChecker>, AnyError> { self .services @@ -599,6 +733,10 @@ impl CliFactory { let cli_options = self.cli_options()?; Ok(Arc::new(TypeChecker::new( self.caches()?.clone(), + Arc::new(TypeCheckingCjsTracker::new( + self.cjs_tracker()?.clone(), + self.module_info_cache()?.clone(), + )), cli_options.clone(), self.module_graph_builder().await?.clone(), self.node_resolver().await?.clone(), @@ -617,17 +755,19 @@ impl CliFactory { .get_or_try_init_async(async { let cli_options = self.cli_options()?; Ok(Arc::new(ModuleGraphBuilder::new( - cli_options.clone(), self.caches()?.clone(), + self.cjs_tracker()?.clone(), + cli_options.clone(), + self.file_fetcher()?.clone(), self.fs().clone(), - self.resolver().await?.clone(), - self.npm_resolver().await?.clone(), - self.module_info_cache()?.clone(), - self.parsed_source_cache().clone(), + self.global_http_cache()?.clone(), + self.in_npm_pkg_checker()?.clone(), cli_options.maybe_lockfile().cloned(), self.maybe_file_watcher_reporter().clone(), - self.file_fetcher()?.clone(), - self.global_http_cache()?.clone(), + self.module_info_cache()?.clone(), + self.npm_resolver().await?.clone(), + self.parsed_source_cache().clone(), + self.resolver().await?.clone(), self.root_permissions_container()?.clone(), ))) }) @@ -699,25 +839,18 @@ impl CliFactory { .await } - pub fn cjs_resolutions(&self) -> &Arc<CjsResolutionStore> { - self.services.cjs_resolutions.get_or_init(Default::default) - } - - pub async fn cli_node_resolver( - &self, - ) -> Result<&Arc<CliNodeResolver>, AnyError> { - self - .services - .cli_node_resolver - .get_or_try_init_async(async { - Ok(Arc::new(CliNodeResolver::new( - self.cjs_resolutions().clone(), - self.fs().clone(), - self.node_resolver().await?.clone(), - self.npm_resolver().await?.clone(), - ))) - }) - .await + pub fn cjs_tracker(&self) -> Result<&Arc<CjsTracker>, AnyError> { + self.services.cjs_tracker.get_or_try_init(|| { + let options = self.cli_options()?; + Ok(Arc::new(CjsTracker::new( + self.in_npm_pkg_checker()?.clone(), + self.pkg_json_resolver().clone(), + IsCjsResolverOptions { + detect_cjs: options.detect_cjs(), + is_node_main: options.is_node_main(), + }, + ))) + }) } pub fn permission_desc_parser( @@ -750,7 +883,9 @@ impl CliFactory { ) -> Result<DenoCompileBinaryWriter, AnyError> { let cli_options = self.cli_options()?; Ok(DenoCompileBinaryWriter::new( + self.cjs_tracker()?, self.deno_dir()?, + self.emitter()?, self.file_fetcher()?, self.http_client_provider(), self.npm_resolver().await?.as_ref(), @@ -779,16 +914,20 @@ impl CliFactory { &self, ) -> Result<CliMainWorkerFactory, AnyError> { let cli_options = self.cli_options()?; + let fs = self.fs(); let node_resolver = self.node_resolver().await?; let npm_resolver = self.npm_resolver().await?; - let fs = self.fs(); - let cli_node_resolver = self.cli_node_resolver().await?; let cli_npm_resolver = self.npm_resolver().await?.clone(); + let in_npm_pkg_checker = self.in_npm_pkg_checker()?; let maybe_file_watcher_communicator = if cli_options.has_hmr() { Some(self.watcher_communicator.clone().unwrap()) } else { None }; + let node_code_translator = self.node_code_translator().await?; + let cjs_tracker = self.cjs_tracker()?.clone(); + let pkg_json_resolver = self.pkg_json_resolver().clone(); + let npm_req_resolver = self.npm_req_resolver().await?; Ok(CliMainWorkerFactory::new( self.blob_store().clone(), @@ -798,38 +937,44 @@ impl CliFactory { None }, self.feature_checker()?.clone(), - self.fs().clone(), + fs.clone(), maybe_file_watcher_communicator, self.maybe_inspector_server()?.clone(), cli_options.maybe_lockfile().cloned(), Box::new(CliModuleLoaderFactory::new( cli_options, + cjs_tracker, if cli_options.code_cache_enabled() { Some(self.code_cache()?.clone()) } else { None }, self.emitter()?.clone(), + fs.clone(), + in_npm_pkg_checker.clone(), self.main_module_graph_container().await?.clone(), self.module_load_preparer().await?.clone(), - cli_node_resolver.clone(), + node_code_translator.clone(), + node_resolver.clone(), + npm_req_resolver.clone(), cli_npm_resolver.clone(), NpmModuleLoader::new( - self.cjs_resolutions().clone(), - self.node_code_translator().await?.clone(), + self.cjs_tracker()?.clone(), fs.clone(), - cli_node_resolver.clone(), + node_code_translator.clone(), ), self.parsed_source_cache().clone(), self.resolver().await?.clone(), )), node_resolver.clone(), npm_resolver.clone(), + pkg_json_resolver, self.root_cert_store_provider().clone(), self.root_permissions_container()?.clone(), StorageKeyResolver::from_options(cli_options), cli_options.sub_command().clone(), self.create_cli_main_worker_options()?, + self.cli_options()?.otel_config(), )) } @@ -878,7 +1023,6 @@ impl CliFactory { inspect_wait: cli_options.inspect_wait().is_some(), strace_ops: cli_options.strace_ops().clone(), is_inspecting: cli_options.is_inspecting(), - is_npm_main: cli_options.is_npm_main(), location: cli_options.location_flag().clone(), // if the user ran a binary command, we'll need to set process.argv[0] // to be the name of the binary command instead of deno diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 69daf1495..640f83c35 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -24,6 +24,7 @@ use deno_graph::source::LoaderChecksum; use deno_path_util::url_to_file_path; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_web::BlobStore; +use http::header; use log::debug; use std::borrow::Cow; use std::collections::HashMap; @@ -163,8 +164,19 @@ fn get_validated_scheme( ) -> Result<String, AnyError> { let scheme = specifier.scheme(); if !SUPPORTED_SCHEMES.contains(&scheme) { + // NOTE(bartlomieju): this message list additional `npm` and `jsr` schemes, but they should actually be handled + // before `file_fetcher.rs` APIs are even hit. + let mut all_supported_schemes = SUPPORTED_SCHEMES.to_vec(); + all_supported_schemes.extend_from_slice(&["npm", "jsr"]); + all_supported_schemes.sort(); + let scheme_list = all_supported_schemes + .iter() + .map(|scheme| format!(" - \"{}\"", scheme)) + .collect::<Vec<_>>() + .join("\n"); Err(generic_error(format!( - "Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes: {SUPPORTED_SCHEMES:#?}" + "Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes:\n{}", + scheme_list ))) } else { Ok(scheme.to_string()) @@ -181,6 +193,7 @@ pub enum FetchPermissionsOptionRef<'a> { pub struct FetchOptions<'a> { pub specifier: &'a ModuleSpecifier, pub permissions: FetchPermissionsOptionRef<'a>, + pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, pub maybe_accept: Option<&'a str>, pub maybe_cache_setting: Option<&'a CacheSetting>, } @@ -333,7 +346,7 @@ impl FileFetcher { ) })?; - let bytes = blob.read_all().await?; + let bytes = blob.read_all().await; let headers = HashMap::from([("content-type".to_string(), blob.media_type.clone())]); @@ -350,6 +363,7 @@ impl FileFetcher { maybe_accept: Option<&str>, cache_setting: &CacheSetting, maybe_checksum: Option<&LoaderChecksum>, + maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, ) -> Result<FileOrRedirect, AnyError> { debug!( "FileFetcher::fetch_remote_no_follow - specifier: {}", @@ -442,6 +456,7 @@ impl FileFetcher { .as_ref() .map(|(_, etag)| etag.clone()), maybe_auth_token: maybe_auth_token.clone(), + maybe_auth: maybe_auth.clone(), maybe_progress_guard: maybe_progress_guard.as_ref(), }) .await? @@ -538,7 +553,18 @@ impl FileFetcher { specifier: &ModuleSpecifier, ) -> Result<File, AnyError> { self - .fetch_inner(specifier, FetchPermissionsOptionRef::AllowAll) + .fetch_inner(specifier, None, FetchPermissionsOptionRef::AllowAll) + .await + } + + #[inline(always)] + pub async fn fetch_bypass_permissions_with_maybe_auth( + &self, + specifier: &ModuleSpecifier, + maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, + ) -> Result<File, AnyError> { + self + .fetch_inner(specifier, maybe_auth, FetchPermissionsOptionRef::AllowAll) .await } @@ -552,6 +578,7 @@ impl FileFetcher { self .fetch_inner( specifier, + None, FetchPermissionsOptionRef::StaticContainer(permissions), ) .await @@ -560,12 +587,14 @@ impl FileFetcher { async fn fetch_inner( &self, specifier: &ModuleSpecifier, + maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, permissions: FetchPermissionsOptionRef<'_>, ) -> Result<File, AnyError> { self .fetch_with_options(FetchOptions { specifier, permissions, + maybe_auth, maybe_accept: None, maybe_cache_setting: None, }) @@ -585,12 +614,14 @@ impl FileFetcher { max_redirect: usize, ) -> Result<File, AnyError> { let mut specifier = Cow::Borrowed(options.specifier); + let mut maybe_auth = options.maybe_auth.clone(); for _ in 0..=max_redirect { match self .fetch_no_follow_with_options(FetchNoFollowOptions { fetch_options: FetchOptions { specifier: &specifier, permissions: options.permissions, + maybe_auth: maybe_auth.clone(), maybe_accept: options.maybe_accept, maybe_cache_setting: options.maybe_cache_setting, }, @@ -602,6 +633,10 @@ impl FileFetcher { return Ok(file); } FileOrRedirect::Redirect(redirect_specifier) => { + // If we were redirected to another origin, don't send the auth header anymore. + if redirect_specifier.origin() != specifier.origin() { + maybe_auth = None; + } specifier = Cow::Owned(redirect_specifier); } } @@ -666,6 +701,7 @@ impl FileFetcher { options.maybe_accept, options.maybe_cache_setting.unwrap_or(&self.cache_setting), maybe_checksum, + options.maybe_auth, ) .await } @@ -756,6 +792,7 @@ mod tests { FetchOptions { specifier, permissions: FetchPermissionsOptionRef::AllowAll, + maybe_auth: None, maybe_accept: None, maybe_cache_setting: Some(&file_fetcher.cache_setting), }, @@ -1255,6 +1292,7 @@ mod tests { FetchOptions { specifier: &specifier, permissions: FetchPermissionsOptionRef::AllowAll, + maybe_auth: None, maybe_accept: None, maybe_cache_setting: Some(&file_fetcher.cache_setting), }, @@ -1268,6 +1306,7 @@ mod tests { FetchOptions { specifier: &specifier, permissions: FetchPermissionsOptionRef::AllowAll, + maybe_auth: None, maybe_accept: None, maybe_cache_setting: Some(&file_fetcher.cache_setting), }, diff --git a/cli/graph_util.rs b/cli/graph_util.rs index e2f6246e7..6ed0506dd 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -13,16 +13,19 @@ use crate::colors; use crate::errors::get_error_class_name; use crate::file_fetcher::FileFetcher; use crate::npm::CliNpmResolver; -use crate::resolver::CliGraphResolver; +use crate::resolver::CjsTracker; +use crate::resolver::CliResolver; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path; +use deno_config::deno_json::JsxImportSourceConfig; use deno_config::workspace::JsrPackageConfig; use deno_core::anyhow::bail; use deno_graph::source::LoaderChecksum; +use deno_graph::source::ResolutionMode; use deno_graph::FillFromLockfileOptions; use deno_graph::JsrLoadError; use deno_graph::ModuleLoadError; @@ -48,6 +51,7 @@ use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::jsr::JsrDepPackageReq; use deno_semver::package::PackageNv; use import_map::ImportMapError; +use node_resolver::InNpmPackageChecker; use std::collections::HashSet; use std::error::Error; use std::ops::Deref; @@ -184,7 +188,7 @@ pub fn graph_exit_integrity_errors(graph: &ModuleGraph) { fn exit_for_integrity_error(err: &ModuleError) { if let Some(err_message) = enhanced_integrity_error_message(err) { log::error!("{} {}", colors::red("error:"), err_message); - std::process::exit(10); + deno_runtime::exit(10); } } @@ -377,48 +381,54 @@ pub struct BuildFastCheckGraphOptions<'a> { } pub struct ModuleGraphBuilder { - options: Arc<CliOptions>, caches: Arc<cache::Caches>, + cjs_tracker: Arc<CjsTracker>, + cli_options: Arc<CliOptions>, + file_fetcher: Arc<FileFetcher>, fs: Arc<dyn FileSystem>, - resolver: Arc<CliGraphResolver>, - npm_resolver: Arc<dyn CliNpmResolver>, - module_info_cache: Arc<ModuleInfoCache>, - parsed_source_cache: Arc<ParsedSourceCache>, + global_http_cache: Arc<GlobalHttpCache>, + in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, lockfile: Option<Arc<CliLockfile>>, maybe_file_watcher_reporter: Option<FileWatcherReporter>, - file_fetcher: Arc<FileFetcher>, - global_http_cache: Arc<GlobalHttpCache>, + module_info_cache: Arc<ModuleInfoCache>, + npm_resolver: Arc<dyn CliNpmResolver>, + parsed_source_cache: Arc<ParsedSourceCache>, + resolver: Arc<CliResolver>, root_permissions_container: PermissionsContainer, } impl ModuleGraphBuilder { #[allow(clippy::too_many_arguments)] pub fn new( - options: Arc<CliOptions>, caches: Arc<cache::Caches>, + cjs_tracker: Arc<CjsTracker>, + cli_options: Arc<CliOptions>, + file_fetcher: Arc<FileFetcher>, fs: Arc<dyn FileSystem>, - resolver: Arc<CliGraphResolver>, - npm_resolver: Arc<dyn CliNpmResolver>, - module_info_cache: Arc<ModuleInfoCache>, - parsed_source_cache: Arc<ParsedSourceCache>, + global_http_cache: Arc<GlobalHttpCache>, + in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, lockfile: Option<Arc<CliLockfile>>, maybe_file_watcher_reporter: Option<FileWatcherReporter>, - file_fetcher: Arc<FileFetcher>, - global_http_cache: Arc<GlobalHttpCache>, + module_info_cache: Arc<ModuleInfoCache>, + npm_resolver: Arc<dyn CliNpmResolver>, + parsed_source_cache: Arc<ParsedSourceCache>, + resolver: Arc<CliResolver>, root_permissions_container: PermissionsContainer, ) -> Self { Self { - options, caches, + cjs_tracker, + cli_options, + file_fetcher, fs, - resolver, - npm_resolver, - module_info_cache, - parsed_source_cache, + global_http_cache, + in_npm_pkg_checker, lockfile, maybe_file_watcher_reporter, - file_fetcher, - global_http_cache, + module_info_cache, + npm_resolver, + parsed_source_cache, + resolver, root_permissions_container, } } @@ -504,19 +514,17 @@ impl ModuleGraphBuilder { } let maybe_imports = if options.graph_kind.include_types() { - self.options.to_compiler_option_types()? + self.cli_options.to_compiler_option_types()? } else { Vec::new() }; - let analyzer = self - .module_info_cache - .as_module_analyzer(&self.parsed_source_cache); + let analyzer = self.module_info_cache.as_module_analyzer(); let mut loader = match options.loader { Some(loader) => MutLoaderRef::Borrowed(loader), None => MutLoaderRef::Owned(self.create_graph_loader()), }; let cli_resolver = &self.resolver; - let graph_resolver = cli_resolver.as_graph_resolver(); + let graph_resolver = self.create_graph_resolver()?; let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); let maybe_file_watcher_reporter = self .maybe_file_watcher_reporter @@ -541,7 +549,7 @@ impl ModuleGraphBuilder { npm_resolver: Some(&graph_npm_resolver), module_analyzer: &analyzer, reporter: maybe_file_watcher_reporter, - resolver: Some(graph_resolver), + resolver: Some(&graph_resolver), locker: locker.as_mut().map(|l| l as _), }, ) @@ -558,7 +566,7 @@ impl ModuleGraphBuilder { // ensure an "npm install" is done if the user has explicitly // opted into using a node_modules directory if self - .options + .cli_options .node_modules_dir()? .map(|m| m.uses_node_modules_dir()) .unwrap_or(false) @@ -664,16 +672,16 @@ impl ModuleGraphBuilder { }; let parser = self.parsed_source_cache.as_capturing_parser(); let cli_resolver = &self.resolver; - let graph_resolver = cli_resolver.as_graph_resolver(); + let graph_resolver = self.create_graph_resolver()?; let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); graph.build_fast_check_type_graph( deno_graph::BuildFastCheckTypeGraphOptions { - jsr_url_provider: &CliJsrUrlProvider, + es_parser: Some(&parser), fast_check_cache: fast_check_cache.as_ref().map(|c| c as _), fast_check_dts: false, - module_parser: Some(&parser), - resolver: Some(graph_resolver), + jsr_url_provider: &CliJsrUrlProvider, + resolver: Some(&graph_resolver), npm_resolver: Some(&graph_npm_resolver), workspace_fast_check: options.workspace_fast_check, }, @@ -692,14 +700,15 @@ impl ModuleGraphBuilder { ) -> cache::FetchCacher { cache::FetchCacher::new( self.file_fetcher.clone(), + self.fs.clone(), self.global_http_cache.clone(), - self.npm_resolver.clone(), + self.in_npm_pkg_checker.clone(), self.module_info_cache.clone(), cache::FetchCacherOptions { - file_header_overrides: self.options.resolve_file_header_overrides(), + file_header_overrides: self.cli_options.resolve_file_header_overrides(), permissions, is_deno_publish: matches!( - self.options.sub_command(), + self.cli_options.sub_command(), crate::args::DenoSubcommand::Publish { .. } ), }, @@ -726,16 +735,28 @@ impl ModuleGraphBuilder { &self.fs, roots, GraphValidOptions { - kind: if self.options.type_check_mode().is_true() { + kind: if self.cli_options.type_check_mode().is_true() { GraphKind::All } else { GraphKind::CodeOnly }, - check_js: self.options.check_js(), + check_js: self.cli_options.check_js(), exit_integrity_errors: true, }, ) } + + fn create_graph_resolver(&self) -> Result<CliGraphResolver, AnyError> { + let jsx_import_source_config = self + .cli_options + .workspace() + .to_maybe_jsx_import_source_config()?; + Ok(CliGraphResolver { + cjs_tracker: &self.cjs_tracker, + resolver: &self.resolver, + jsx_import_source_config, + }) + } } /// Adds more explanatory information to a resolution error. @@ -998,7 +1019,11 @@ impl deno_graph::source::Reporter for FileWatcherReporter { ) { let mut file_paths = self.file_paths.lock(); if specifier.scheme() == "file" { - file_paths.push(specifier.to_file_path().unwrap()); + // Don't trust that the path is a valid path at this point: + // https://github.com/denoland/deno/issues/26209. + if let Ok(file_path) = specifier.to_file_path() { + file_paths.push(file_path); + } } if modules_done == modules_total { @@ -1136,6 +1161,53 @@ fn format_deno_graph_error(err: &dyn Error) -> String { message } +#[derive(Debug)] +struct CliGraphResolver<'a> { + cjs_tracker: &'a CjsTracker, + resolver: &'a CliResolver, + jsx_import_source_config: Option<JsxImportSourceConfig>, +} + +impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> { + fn default_jsx_import_source(&self) -> Option<String> { + self + .jsx_import_source_config + .as_ref() + .and_then(|c| c.default_specifier.clone()) + } + + fn default_jsx_import_source_types(&self) -> Option<String> { + self + .jsx_import_source_config + .as_ref() + .and_then(|c| c.default_types_specifier.clone()) + } + + fn jsx_import_source_module(&self) -> &str { + self + .jsx_import_source_config + .as_ref() + .map(|c| c.module.as_str()) + .unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE) + } + + fn resolve( + &self, + raw_specifier: &str, + referrer_range: &deno_graph::Range, + mode: ResolutionMode, + ) -> Result<ModuleSpecifier, ResolveError> { + self.resolver.resolve( + raw_specifier, + referrer_range, + self + .cjs_tracker + .get_referrer_kind(&referrer_range.specifier), + mode, + ) + } +} + #[cfg(test)] mod test { use std::sync::Arc; diff --git a/cli/http_util.rs b/cli/http_util.rs index cf244c525..4b17936d6 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -19,6 +19,7 @@ use deno_runtime::deno_fetch; use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::CreateHttpClientOptions; use deno_runtime::deno_tls::RootCertStoreProvider; +use http::header; use http::header::HeaderName; use http::header::HeaderValue; use http::header::ACCEPT; @@ -204,6 +205,7 @@ pub struct FetchOnceArgs<'a> { pub maybe_accept: Option<String>, pub maybe_etag: Option<String>, pub maybe_auth_token: Option<AuthToken>, + pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, pub maybe_progress_guard: Option<&'a UpdateGuard>, } @@ -382,6 +384,8 @@ impl HttpClient { request .headers_mut() .insert(AUTHORIZATION, authorization_val); + } else if let Some((header, value)) = args.maybe_auth { + request.headers_mut().insert(header, value); } if let Some(accept) = args.maybe_accept { let accepts_val = HeaderValue::from_str(&accept)?; @@ -470,15 +474,23 @@ impl HttpClient { } } - pub async fn download_with_progress( + pub async fn download_with_progress_and_retries( &self, url: Url, maybe_header: Option<(HeaderName, HeaderValue)>, progress_guard: &UpdateGuard, ) -> Result<Option<Vec<u8>>, DownloadError> { - self - .download_inner(url, maybe_header, Some(progress_guard)) - .await + crate::util::retry::retry( + || { + self.download_inner( + url.clone(), + maybe_header.clone(), + Some(progress_guard), + ) + }, + |e| matches!(e, DownloadError::BadResponse(_) | DownloadError::Fetch(_)), + ) + .await } pub async fn get_redirected_url( @@ -784,6 +796,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -810,6 +823,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -837,6 +851,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -858,6 +873,7 @@ mod test { maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; assert_eq!(res.unwrap(), FetchOnceResult::NotModified); @@ -877,6 +893,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -906,6 +923,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, _)) = result { @@ -931,6 +949,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Redirect(url, _)) = result { @@ -966,6 +985,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -1013,6 +1033,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; @@ -1075,6 +1096,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; @@ -1128,6 +1150,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -1169,6 +1192,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -1191,6 +1215,7 @@ mod test { maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; assert_eq!(res.unwrap(), FetchOnceResult::NotModified); @@ -1225,6 +1250,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -1254,6 +1280,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; assert!(result.is_err()); @@ -1275,6 +1302,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; @@ -1298,6 +1326,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; diff --git a/cli/js/40_jupyter.js b/cli/js/40_jupyter.js index ace50d6dc..198b6a350 100644 --- a/cli/js/40_jupyter.js +++ b/cli/js/40_jupyter.js @@ -177,6 +177,52 @@ function isCanvasLike(obj) { return obj !== null && typeof obj === "object" && "toDataURL" in obj; } +function isJpg(obj) { + // Check if obj is a Uint8Array + if (!(obj instanceof Uint8Array)) { + return false; + } + + // JPG files start with the magic bytes FF D8 + if (obj.length < 2 || obj[0] !== 0xFF || obj[1] !== 0xD8) { + return false; + } + + // JPG files end with the magic bytes FF D9 + if ( + obj.length < 2 || obj[obj.length - 2] !== 0xFF || + obj[obj.length - 1] !== 0xD9 + ) { + return false; + } + + return true; +} + +function isPng(obj) { + // Check if obj is a Uint8Array + if (!(obj instanceof Uint8Array)) { + return false; + } + + // PNG files start with a specific 8-byte signature + const pngSignature = [137, 80, 78, 71, 13, 10, 26, 10]; + + // Check if the array is at least as long as the signature + if (obj.length < pngSignature.length) { + return false; + } + + // Check each byte of the signature + for (let i = 0; i < pngSignature.length; i++) { + if (obj[i] !== pngSignature[i]) { + return false; + } + } + + return true; +} + /** Possible HTML and SVG Elements */ function isSVGElementLike(obj) { return obj !== null && typeof obj === "object" && "outerHTML" in obj && @@ -233,6 +279,16 @@ async function format(obj) { if (isDataFrameLike(obj)) { return extractDataFrame(obj); } + if (isJpg(obj)) { + return { + "image/jpeg": core.ops.op_base64_encode(obj), + }; + } + if (isPng(obj)) { + return { + "image/png": core.ops.op_base64_encode(obj), + }; + } if (isSVGElementLike(obj)) { return { "image/svg+xml": obj.outerHTML, @@ -314,6 +370,28 @@ const html = createTaggedTemplateDisplayable("text/html"); */ const svg = createTaggedTemplateDisplayable("image/svg+xml"); +function image(obj) { + if (typeof obj === "string") { + try { + obj = Deno.readFileSync(obj); + } catch { + // pass + } + } + + if (isJpg(obj)) { + return makeDisplayable({ "image/jpeg": core.ops.op_base64_encode(obj) }); + } + + if (isPng(obj)) { + return makeDisplayable({ "image/png": core.ops.op_base64_encode(obj) }); + } + + throw new TypeError( + "Object is not a valid image or a path to an image. `Deno.jupyter.image` supports displaying JPG or PNG images.", + ); +} + function isMediaBundle(obj) { if (obj == null || typeof obj !== "object" || Array.isArray(obj)) { return false; @@ -465,6 +543,7 @@ function enableJupyter() { md, html, svg, + image, $display, }; } diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 8c2e8bb1d..044b1573b 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -10,15 +10,17 @@ use super::tsc; use super::urls::url_to_uri; use crate::args::jsr_url; +use crate::lsp::logging::lsp_warn; use crate::lsp::search::PackageSearchApi; use crate::tools::lint::CliLinter; +use crate::util::path::relative_specifier; use deno_config::workspace::MappedResolution; +use deno_graph::source::ResolutionMode; use deno_lint::diagnostic::LintDiagnosticRange; use deno_ast::SourceRange; use deno_ast::SourceRangedForSpanned; use deno_ast::SourceTextInfo; -use deno_core::anyhow::anyhow; use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::serde::Deserialize; @@ -37,9 +39,10 @@ use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; use deno_semver::Version; use import_map::ImportMap; -use node_resolver::NpmResolver; +use node_resolver::NodeModuleKind; use once_cell::sync::Lazy; use regex::Regex; +use std::borrow::Cow; use std::cmp::Ordering; use std::collections::HashMap; use std::collections::HashSet; @@ -229,6 +232,7 @@ pub struct TsResponseImportMapper<'a> { documents: &'a Documents, maybe_import_map: Option<&'a ImportMap>, resolver: &'a LspResolver, + tsc_specifier_map: &'a tsc::TscSpecifierMap, file_referrer: ModuleSpecifier, } @@ -237,12 +241,14 @@ impl<'a> TsResponseImportMapper<'a> { documents: &'a Documents, maybe_import_map: Option<&'a ImportMap>, resolver: &'a LspResolver, + tsc_specifier_map: &'a tsc::TscSpecifierMap, file_referrer: &ModuleSpecifier, ) -> Self { Self { documents, maybe_import_map, resolver, + tsc_specifier_map, file_referrer: file_referrer.clone(), } } @@ -336,7 +342,11 @@ impl<'a> TsResponseImportMapper<'a> { .resolver .maybe_managed_npm_resolver(Some(&self.file_referrer)) { - if npm_resolver.in_npm_package(specifier) { + let in_npm_pkg = self + .resolver + .in_npm_pkg_checker(Some(&self.file_referrer)) + .in_npm_package(specifier); + if in_npm_pkg { if let Ok(Some(pkg_id)) = npm_resolver.resolve_pkg_id_from_specifier(specifier) { @@ -383,6 +393,11 @@ impl<'a> TsResponseImportMapper<'a> { } } } + } else if let Some(dep_name) = self + .resolver + .file_url_to_package_json_dep(specifier, Some(&self.file_referrer)) + { + return Some(dep_name); } // check if the import map has this specifier @@ -452,20 +467,39 @@ impl<'a> TsResponseImportMapper<'a> { &self, specifier: &str, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, ) -> Option<String> { - if let Ok(specifier) = referrer.join(specifier) { - if let Some(specifier) = self.check_specifier(&specifier, referrer) { - return Some(specifier); - } - } - let specifier = specifier.strip_suffix(".js").unwrap_or(specifier); - for ext in SUPPORTED_EXTENSIONS { - let specifier_with_ext = format!("{specifier}{ext}"); - if self - .documents - .contains_import(&specifier_with_ext, referrer) + let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier); + let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain( + SUPPORTED_EXTENSIONS + .iter() + .map(|ext| Cow::Owned(format!("{specifier_stem}{ext}"))), + ); + for specifier in specifiers { + if let Some(specifier) = self + .resolver + .as_cli_resolver(Some(&self.file_referrer)) + .resolve( + &specifier, + &deno_graph::Range { + specifier: referrer.clone(), + start: deno_graph::Position::zeroed(), + end: deno_graph::Position::zeroed(), + }, + referrer_kind, + ResolutionMode::Types, + ) + .ok() + .and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok()) + .filter(|s| self.documents.exists(s, Some(&self.file_referrer))) { - return Some(specifier_with_ext); + if let Some(specifier) = self + .check_specifier(&specifier, referrer) + .or_else(|| relative_specifier(referrer, &specifier)) + .filter(|s| !s.contains("/node_modules/")) + { + return Some(specifier); + } } } None @@ -475,10 +509,11 @@ impl<'a> TsResponseImportMapper<'a> { &self, specifier_text: &str, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, ) -> bool { self .resolver - .as_graph_resolver(Some(&self.file_referrer)) + .as_cli_resolver(Some(&self.file_referrer)) .resolve( specifier_text, &deno_graph::Range { @@ -486,6 +521,7 @@ impl<'a> TsResponseImportMapper<'a> { start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + referrer_kind, deno_graph::source::ResolutionMode::Types, ) .is_ok() @@ -554,9 +590,11 @@ fn try_reverse_map_package_json_exports( /// like an import and rewrite the import specifier to include the extension pub fn fix_ts_import_changes( referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, changes: &[tsc::FileTextChanges], - import_mapper: &TsResponseImportMapper, + language_server: &language_server::Inner, ) -> Result<Vec<tsc::FileTextChanges>, AnyError> { + let import_mapper = language_server.get_ts_response_import_mapper(referrer); let mut r = Vec::new(); for change in changes { let mut text_changes = Vec::new(); @@ -569,8 +607,8 @@ pub fn fix_ts_import_changes( if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) { let specifier = captures.iter().skip(1).find_map(|s| s).unwrap().as_str(); - if let Some(new_specifier) = - import_mapper.check_unresolved_specifier(specifier, referrer) + if let Some(new_specifier) = import_mapper + .check_unresolved_specifier(specifier, referrer, referrer_kind) { line.replace(specifier, &new_specifier) } else { @@ -598,68 +636,64 @@ pub fn fix_ts_import_changes( /// Fix tsc import code actions so that the module specifier is correct for /// resolution by Deno (includes the extension). -fn fix_ts_import_action( +fn fix_ts_import_action<'a>( referrer: &ModuleSpecifier, - action: &tsc::CodeFixAction, - import_mapper: &TsResponseImportMapper, -) -> Result<Option<tsc::CodeFixAction>, AnyError> { - if matches!( + referrer_kind: NodeModuleKind, + action: &'a tsc::CodeFixAction, + language_server: &language_server::Inner, +) -> Option<Cow<'a, tsc::CodeFixAction>> { + if !matches!( action.fix_name.as_str(), "import" | "fixMissingFunctionDeclaration" ) { - let change = action + return Some(Cow::Borrowed(action)); + } + let specifier = (|| { + let text_change = action.changes.first()?.text_changes.first()?; + let captures = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)?; + Some(captures.get(1)?.as_str()) + })(); + let Some(specifier) = specifier else { + return Some(Cow::Borrowed(action)); + }; + let import_mapper = language_server.get_ts_response_import_mapper(referrer); + if let Some(new_specifier) = + import_mapper.check_unresolved_specifier(specifier, referrer, referrer_kind) + { + let description = action.description.replace(specifier, &new_specifier); + let changes = action .changes - .first() - .ok_or_else(|| anyhow!("Unexpected action changes."))?; - let text_change = change - .text_changes - .first() - .ok_or_else(|| anyhow!("Missing text change."))?; - if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text) - { - let specifier = captures - .get(1) - .ok_or_else(|| anyhow!("Missing capture."))? - .as_str(); - if let Some(new_specifier) = - import_mapper.check_unresolved_specifier(specifier, referrer) - { - let description = action.description.replace(specifier, &new_specifier); - let changes = action - .changes + .iter() + .map(|c| { + let text_changes = c + .text_changes .iter() - .map(|c| { - let text_changes = c - .text_changes - .iter() - .map(|tc| tsc::TextChange { - span: tc.span.clone(), - new_text: tc.new_text.replace(specifier, &new_specifier), - }) - .collect(); - tsc::FileTextChanges { - file_name: c.file_name.clone(), - text_changes, - is_new_file: c.is_new_file, - } + .map(|tc| tsc::TextChange { + span: tc.span.clone(), + new_text: tc.new_text.replace(specifier, &new_specifier), }) .collect(); - - return Ok(Some(tsc::CodeFixAction { - description, - changes, - commands: None, - fix_name: action.fix_name.clone(), - fix_id: None, - fix_all_description: None, - })); - } else if !import_mapper.is_valid_import(specifier, referrer) { - return Ok(None); - } - } + tsc::FileTextChanges { + file_name: c.file_name.clone(), + text_changes, + is_new_file: c.is_new_file, + } + }) + .collect(); + + Some(Cow::Owned(tsc::CodeFixAction { + description, + changes, + commands: None, + fix_name: action.fix_name.clone(), + fix_id: None, + fix_all_description: None, + })) + } else if !import_mapper.is_valid_import(specifier, referrer, referrer_kind) { + None + } else { + Some(Cow::Borrowed(action)) } - - Ok(Some(action.clone())) } /// Determines if two TypeScript diagnostic codes are effectively equivalent. @@ -720,8 +754,14 @@ pub fn ts_changes_to_edit( ) -> Result<Option<lsp::WorkspaceEdit>, AnyError> { let mut text_document_edits = Vec::new(); for change in changes { - let text_document_edit = change.to_text_document_edit(language_server)?; - text_document_edits.push(text_document_edit); + let edit = match change.to_text_document_edit(language_server) { + Ok(e) => e, + Err(err) => { + lsp_warn!("Couldn't covert text document edit: {:#}", err); + continue; + } + }; + text_document_edits.push(edit); } Ok(Some(lsp::WorkspaceEdit { changes: None, @@ -730,7 +770,7 @@ pub fn ts_changes_to_edit( })) } -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionData { pub specifier: ModuleSpecifier, @@ -983,6 +1023,7 @@ impl CodeActionCollection { pub fn add_ts_fix_action( &mut self, specifier: &ModuleSpecifier, + specifier_kind: NodeModuleKind, action: &tsc::CodeFixAction, diagnostic: &lsp::Diagnostic, language_server: &language_server::Inner, @@ -1000,11 +1041,8 @@ impl CodeActionCollection { "The action returned from TypeScript is unsupported.", )); } - let Some(action) = fix_ts_import_action( - specifier, - action, - &language_server.get_ts_response_import_mapper(specifier), - )? + let Some(action) = + fix_ts_import_action(specifier, specifier_kind, action, language_server) else { return Ok(()); }; @@ -1027,7 +1065,7 @@ impl CodeActionCollection { }); self .actions - .push(CodeActionKind::Tsc(code_action, action.clone())); + .push(CodeActionKind::Tsc(code_action, action.as_ref().clone())); if let Some(fix_id) = &action.fix_id { if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) = @@ -1054,10 +1092,12 @@ impl CodeActionCollection { specifier: &ModuleSpecifier, diagnostic: &lsp::Diagnostic, ) { - let data = Some(json!({ - "specifier": specifier, - "fixId": action.fix_id, - })); + let data = action.fix_id.as_ref().map(|fix_id| { + json!(CodeActionData { + specifier: specifier.clone(), + fix_id: fix_id.clone(), + }) + }); let title = if let Some(description) = &action.fix_all_description { description.clone() } else { @@ -1206,14 +1246,11 @@ impl CodeActionCollection { }), ); - match parsed_source.program_ref() { - deno_ast::swc::ast::Program::Module(module) => module - .body - .iter() - .find(|i| i.range().contains(&specifier_range)) - .map(|i| text_info.line_and_column_index(i.range().start)), - deno_ast::swc::ast::Program::Script(_) => None, - } + parsed_source + .program_ref() + .body() + .find(|i| i.range().contains(&specifier_range)) + .map(|i| text_info.line_and_column_index(i.range().start)) } async fn deno_types_for_npm_action( @@ -1247,6 +1284,9 @@ impl CodeActionCollection { import_start_from_specifier(document, i) })?; let referrer = document.specifier(); + let referrer_kind = language_server + .is_cjs_resolver + .get_doc_module_kind(document); let file_referrer = document.file_referrer(); let config_data = language_server .config @@ -1269,10 +1309,11 @@ impl CodeActionCollection { if !config_data.byonm { return None; } - if !language_server - .resolver - .is_bare_package_json_dep(&dep_key, referrer) - { + if !language_server.resolver.is_bare_package_json_dep( + &dep_key, + referrer, + referrer_kind, + ) { return None; } NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()? @@ -1291,7 +1332,7 @@ impl CodeActionCollection { } if language_server .resolver - .npm_to_file_url(&npm_ref, document.specifier(), file_referrer) + .npm_to_file_url(&npm_ref, referrer, referrer_kind, file_referrer) .is_some() { // The package import has types. diff --git a/cli/lsp/capabilities.rs b/cli/lsp/capabilities.rs index e93d3b7c2..5cdb1224d 100644 --- a/cli/lsp/capabilities.rs +++ b/cli/lsp/capabilities.rs @@ -147,11 +147,11 @@ pub fn server_capabilities( moniker_provider: None, experimental: Some(json!({ "denoConfigTasks": true, - "testingApi":true, + "testingApi": true, + "didRefreshDenoConfigurationTreeNotifications": true, })), inlay_hint_provider: Some(OneOf::Left(true)), position_encoding: None, - // TODO(nayeemrmn): Support pull-based diagnostics. diagnostic_provider: None, inline_value_provider: None, inline_completion_provider: None, diff --git a/cli/lsp/client.rs b/cli/lsp/client.rs index b3f0d64fa..65865d5b3 100644 --- a/cli/lsp/client.rs +++ b/cli/lsp/client.rs @@ -92,6 +92,19 @@ impl Client { }); } + pub fn send_did_refresh_deno_configuration_tree_notification( + &self, + params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams, + ) { + // do on a task in case the caller currently is in the lsp lock + let client = self.0.clone(); + spawn(async move { + client + .send_did_refresh_deno_configuration_tree_notification(params) + .await; + }); + } + pub fn send_did_change_deno_configuration_notification( &self, params: lsp_custom::DidChangeDenoConfigurationNotificationParams, @@ -169,6 +182,10 @@ trait ClientTrait: Send + Sync { params: lsp_custom::DiagnosticBatchNotificationParams, ); async fn send_test_notification(&self, params: TestingNotification); + async fn send_did_refresh_deno_configuration_tree_notification( + &self, + params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams, + ); async fn send_did_change_deno_configuration_notification( &self, params: lsp_custom::DidChangeDenoConfigurationNotificationParams, @@ -249,6 +266,18 @@ impl ClientTrait for TowerClient { } } + async fn send_did_refresh_deno_configuration_tree_notification( + &self, + params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams, + ) { + self + .0 + .send_notification::<lsp_custom::DidRefreshDenoConfigurationTreeNotification>( + params, + ) + .await + } + async fn send_did_change_deno_configuration_notification( &self, params: lsp_custom::DidChangeDenoConfigurationNotificationParams, @@ -366,6 +395,12 @@ impl ClientTrait for ReplClient { async fn send_test_notification(&self, _params: TestingNotification) {} + async fn send_did_refresh_deno_configuration_tree_notification( + &self, + _params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams, + ) { + } + async fn send_did_change_deno_configuration_notification( &self, _params: lsp_custom::DidChangeDenoConfigurationNotificationParams, diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs index e117888fb..a57ca3ac9 100644 --- a/cli/lsp/code_lens.rs +++ b/cli/lsp/code_lens.rs @@ -421,7 +421,7 @@ pub fn collect_test( ) -> Result<Vec<lsp::CodeLens>, AnyError> { let mut collector = DenoTestCollector::new(specifier.clone(), parsed_source.clone()); - parsed_source.module().visit_with(&mut collector); + parsed_source.program().visit_with(&mut collector); Ok(collector.take()) } @@ -581,7 +581,7 @@ mod tests { .unwrap(); let mut collector = DenoTestCollector::new(specifier, parsed_module.clone()); - parsed_module.module().visit_with(&mut collector); + parsed_module.program().visit_with(&mut collector); assert_eq!( collector.take(), vec![ diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index 1590743b2..3ee8ae93e 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -9,6 +9,7 @@ use super::jsr::CliJsrSearchApi; use super::lsp_custom; use super::npm::CliNpmSearchApi; use super::registries::ModuleRegistry; +use super::resolver::LspIsCjsResolver; use super::resolver::LspResolver; use super::search::PackageSearchApi; use super::tsc; @@ -35,6 +36,7 @@ use deno_semver::package::PackageNv; use import_map::ImportMap; use indexmap::IndexSet; use lsp_types::CompletionList; +use node_resolver::NodeModuleKind; use once_cell::sync::Lazy; use regex::Regex; use tower_lsp::lsp_types as lsp; @@ -159,15 +161,17 @@ pub async fn get_import_completions( jsr_search_api: &CliJsrSearchApi, npm_search_api: &CliNpmSearchApi, documents: &Documents, + is_cjs_resolver: &LspIsCjsResolver, resolver: &LspResolver, maybe_import_map: Option<&ImportMap>, ) -> Option<lsp::CompletionResponse> { let document = documents.get(specifier)?; + let specifier_kind = is_cjs_resolver.get_doc_module_kind(&document); let file_referrer = document.file_referrer(); let (text, _, range) = document.get_maybe_dependency(position)?; let range = to_narrow_lsp_range(document.text_info(), &range); let resolved = resolver - .as_graph_resolver(file_referrer) + .as_cli_resolver(file_referrer) .resolve( &text, &Range { @@ -175,6 +179,7 @@ pub async fn get_import_completions( start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + specifier_kind, ResolutionMode::Execution, ) .ok(); @@ -201,7 +206,7 @@ pub async fn get_import_completions( // completions for import map specifiers Some(lsp::CompletionResponse::List(completion_list)) } else if let Some(completion_list) = - get_local_completions(specifier, &text, &range, resolver) + get_local_completions(specifier, specifier_kind, &text, &range, resolver) { // completions for local relative modules Some(lsp::CompletionResponse::List(completion_list)) @@ -355,24 +360,26 @@ fn get_import_map_completions( /// Return local completions that are relative to the base specifier. fn get_local_completions( - base: &ModuleSpecifier, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, text: &str, range: &lsp::Range, resolver: &LspResolver, ) -> Option<CompletionList> { - if base.scheme() != "file" { + if referrer.scheme() != "file" { return None; } let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1]; let resolved_parent = resolver - .as_graph_resolver(Some(base)) + .as_cli_resolver(Some(referrer)) .resolve( parent, &Range { - specifier: base.clone(), + specifier: referrer.clone(), start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + referrer_kind, ResolutionMode::Execution, ) .ok()?; @@ -385,7 +392,7 @@ fn get_local_completions( let de = de.ok()?; let label = de.path().file_name()?.to_string_lossy().to_string(); let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?; - if entry_specifier == *base { + if entry_specifier == *referrer { return None; } let full_text = format!("{parent}{label}"); @@ -905,6 +912,7 @@ mod tests { ModuleSpecifier::from_file_path(file_c).expect("could not create"); let actual = get_local_completions( &specifier, + NodeModuleKind::Esm, "./", &lsp::Range { start: lsp::Position { diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 07fdd3c65..ea77e36bc 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -4,6 +4,7 @@ use deno_ast::MediaType; use deno_config::deno_json::DenoJsonCache; use deno_config::deno_json::FmtConfig; use deno_config::deno_json::FmtOptionsConfig; +use deno_config::deno_json::JsxImportSourceConfig; use deno_config::deno_json::LintConfig; use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::TestConfig; @@ -41,6 +42,7 @@ use deno_runtime::deno_node::PackageJson; use indexmap::IndexSet; use lsp_types::ClientCapabilities; use std::collections::BTreeMap; +use std::collections::BTreeSet; use std::collections::HashMap; use std::ops::Deref; use std::ops::DerefMut; @@ -50,6 +52,8 @@ use std::sync::Arc; use tower_lsp::lsp_types as lsp; use super::logging::lsp_log; +use super::lsp_custom; +use super::urls::url_to_uri; use crate::args::discover_npmrc_from_workspace; use crate::args::has_flag_env_var; use crate::args::CliLockfile; @@ -437,6 +441,8 @@ pub struct LanguagePreferences { pub use_aliases_for_renames: bool, #[serde(default)] pub quote_style: QuoteStyle, + #[serde(default)] + pub prefer_type_only_auto_imports: bool, } impl Default for LanguagePreferences { @@ -447,6 +453,7 @@ impl Default for LanguagePreferences { auto_import_file_exclude_patterns: vec![], use_aliases_for_renames: true, quote_style: Default::default(), + prefer_type_only_auto_imports: false, } } } @@ -979,7 +986,7 @@ impl Config { | MediaType::Tsx => Some(&workspace_settings.typescript), MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => None, } @@ -1185,6 +1192,7 @@ pub struct ConfigData { pub resolver: Arc<WorkspaceResolver>, pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>, pub import_map_from_settings: Option<ModuleSpecifier>, + pub unstable: BTreeSet<String>, watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>, } @@ -1582,9 +1590,16 @@ impl ConfigData { .join("\n") ); } + let unstable = member_dir + .workspace + .unstable_features() + .iter() + .chain(settings.unstable.as_deref()) + .cloned() + .collect::<BTreeSet<_>>(); let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS") .is_ok() - || member_dir.workspace.has_unstable("sloppy-imports"); + || unstable.contains("sloppy-imports"); let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { Arc::new(CliSloppyImportsResolver::new( SloppyImportsCachedFs::new_without_stat_cache(Arc::new( @@ -1625,6 +1640,7 @@ impl ConfigData { lockfile, npmrc, import_map_from_settings, + unstable, watched_files, } } @@ -1639,6 +1655,17 @@ impl ConfigData { self.member_dir.maybe_pkg_json() } + pub fn maybe_jsx_import_source_config( + &self, + ) -> Option<JsxImportSourceConfig> { + self + .member_dir + .workspace + .to_maybe_jsx_import_source_config() + .ok() + .flatten() + } + pub fn scope_contains_specifier(&self, specifier: &ModuleSpecifier) -> bool { specifier.as_str().starts_with(self.scope.as_str()) || self @@ -1716,14 +1743,14 @@ impl ConfigTree { .unwrap_or_else(|| Arc::new(FmtConfig::new_with_base(PathBuf::from("/")))) } - /// Returns (scope_uri, type). + /// Returns (scope_url, type). pub fn watched_file_type( &self, specifier: &ModuleSpecifier, ) -> Option<(&ModuleSpecifier, ConfigWatchedFileType)> { - for (scope_uri, data) in self.scopes.iter() { + for (scope_url, data) in self.scopes.iter() { if let Some(typ) = data.watched_files.get(specifier) { - return Some((scope_uri, *typ)); + return Some((scope_url, *typ)); } } None @@ -1747,6 +1774,46 @@ impl ConfigTree { .any(|data| data.watched_files.contains_key(specifier)) } + pub fn to_did_refresh_params( + &self, + ) -> lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams { + let data = self + .scopes + .values() + .filter_map(|data| { + let workspace_root_scope_uri = + Some(data.member_dir.workspace.root_dir()) + .filter(|s| *s != data.member_dir.dir_url()) + .and_then(|s| url_to_uri(s).ok()); + Some(lsp_custom::DenoConfigurationData { + scope_uri: url_to_uri(&data.scope).ok()?, + deno_json: data.maybe_deno_json().and_then(|c| { + if workspace_root_scope_uri.is_some() + && Some(&c.specifier) + == data + .member_dir + .workspace + .root_deno_json() + .map(|c| &c.specifier) + { + return None; + } + Some(lsp::TextDocumentIdentifier { + uri: url_to_uri(&c.specifier).ok()?, + }) + }), + package_json: data.maybe_pkg_json().and_then(|p| { + Some(lsp::TextDocumentIdentifier { + uri: url_to_uri(&p.specifier()).ok()?, + }) + }), + workspace_root_scope_uri, + }) + }) + .collect(); + lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams { data } + } + pub async fn refresh( &mut self, settings: &Settings, @@ -2209,6 +2276,7 @@ mod tests { auto_import_file_exclude_patterns: vec![], use_aliases_for_renames: true, quote_style: QuoteStyle::Auto, + prefer_type_only_auto_imports: false, }, suggest: CompletionSettings { complete_function_calls: false, @@ -2254,6 +2322,7 @@ mod tests { auto_import_file_exclude_patterns: vec![], use_aliases_for_renames: true, quote_style: QuoteStyle::Auto, + prefer_type_only_auto_imports: false, }, suggest: CompletionSettings { complete_function_calls: false, diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index caabd3f04..e4fb82e58 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -1499,7 +1499,11 @@ fn diagnose_dependency( .data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer)) .and_then(|d| d.resolver.maybe_import_map()); if let Some(import_map) = import_map { - if let Resolution::Ok(resolved) = &dependency.maybe_code { + let resolved = dependency + .maybe_code + .ok() + .or_else(|| dependency.maybe_type.ok()); + if let Some(resolved) = resolved { if let Some(to) = import_map.lookup(&resolved.specifier, referrer) { if dependency_key != to { diagnostics.push( @@ -1703,6 +1707,7 @@ mod tests { documents: Arc::new(documents), assets: Default::default(), config: Arc::new(config), + is_cjs_resolver: Default::default(), resolver, }, ) diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 7d1ca6810..b01544ddf 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -3,7 +3,10 @@ use super::cache::calculate_fs_version; use super::cache::LspCache; use super::config::Config; +use super::resolver::LspIsCjsResolver; use super::resolver::LspResolver; +use super::resolver::ScopeDepInfo; +use super::resolver::SingleReferrerGraphResolver; use super::testing::TestCollector; use super::testing::TestModule; use super::text::LineIndex; @@ -33,9 +36,9 @@ use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; use indexmap::IndexMap; use indexmap::IndexSet; +use node_resolver::NodeModuleKind; use std::borrow::Cow; use std::collections::BTreeMap; -use std::collections::BTreeSet; use std::collections::HashMap; use std::collections::HashSet; use std::fs; @@ -272,7 +275,7 @@ fn get_maybe_test_module_fut( parsed_source.specifier().clone(), parsed_source.text_info_lazy().clone(), ); - parsed_source.module().visit_with(&mut collector); + parsed_source.program().visit_with(&mut collector); Arc::new(collector.take()) }) .map(Result::ok) @@ -293,6 +296,8 @@ pub struct Document { /// Contains the last-known-good set of dependencies from parsing the module. config: Arc<Config>, dependencies: Arc<IndexMap<String, deno_graph::Dependency>>, + /// If this is maybe a CJS script and maybe not an ES module. + is_script: Option<bool>, // TODO(nayeemrmn): This is unused, use it for scope attribution for remote // modules. file_referrer: Option<ModuleSpecifier>, @@ -323,6 +328,7 @@ impl Document { maybe_lsp_version: Option<i32>, maybe_language_id: Option<LanguageId>, maybe_headers: Option<HashMap<String, String>>, + is_cjs_resolver: &LspIsCjsResolver, resolver: Arc<LspResolver>, config: Arc<Config>, cache: &Arc<LspCache>, @@ -332,12 +338,8 @@ impl Document { .filter(|s| cache.is_valid_file_referrer(s)) .cloned() .or(file_referrer); - let media_type = resolve_media_type( - &specifier, - maybe_headers.as_ref(), - maybe_language_id, - &resolver, - ); + let media_type = + resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id); let (maybe_parsed_source, maybe_module) = if media_type_is_diagnosable(media_type) { parse_and_analyze_module( @@ -346,6 +348,7 @@ impl Document { maybe_headers.as_ref(), media_type, file_referrer.as_ref(), + is_cjs_resolver, &resolver, ) } else { @@ -371,6 +374,7 @@ impl Document { file_referrer.as_ref(), ), file_referrer, + is_script: maybe_module.as_ref().map(|m| m.is_script), maybe_types_dependency, line_index, maybe_language_id, @@ -392,6 +396,7 @@ impl Document { fn with_new_config( &self, + is_cjs_resolver: &LspIsCjsResolver, resolver: Arc<LspResolver>, config: Arc<Config>, ) -> Arc<Self> { @@ -399,11 +404,11 @@ impl Document { &self.specifier, self.maybe_headers.as_ref(), self.maybe_language_id, - &resolver, ); let dependencies; let maybe_types_dependency; let maybe_parsed_source; + let is_script; let maybe_test_module_fut; if media_type != self.media_type { let parsed_source_result = @@ -413,6 +418,7 @@ impl Document { &parsed_source_result, self.maybe_headers.as_ref(), self.file_referrer.as_ref(), + is_cjs_resolver, &resolver, ) .ok(); @@ -420,6 +426,7 @@ impl Document { .as_ref() .map(|m| Arc::new(m.dependencies.clone())) .unwrap_or_default(); + is_script = maybe_module.as_ref().map(|m| m.is_script); maybe_types_dependency = maybe_module .as_ref() .and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?))); @@ -427,10 +434,19 @@ impl Document { maybe_test_module_fut = get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config); } else { - let graph_resolver = - resolver.as_graph_resolver(self.file_referrer.as_ref()); + let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref()); let npm_resolver = resolver.create_graph_npm_resolver(self.file_referrer.as_ref()); + let config_data = resolver.as_config_data(self.file_referrer.as_ref()); + let jsx_import_source_config = + config_data.and_then(|d| d.maybe_jsx_import_source_config()); + let resolver = SingleReferrerGraphResolver { + valid_referrer: &self.specifier, + referrer_kind: is_cjs_resolver + .get_lsp_referrer_kind(&self.specifier, self.is_script), + cli_resolver, + jsx_import_source_config: jsx_import_source_config.as_ref(), + }; dependencies = Arc::new( self .dependencies @@ -441,7 +457,7 @@ impl Document { d.with_new_resolver( s, &CliJsrUrlProvider, - Some(graph_resolver), + Some(&resolver), Some(&npm_resolver), ), ) @@ -451,10 +467,11 @@ impl Document { maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| { Arc::new(d.with_new_resolver( &CliJsrUrlProvider, - Some(graph_resolver), + Some(&resolver), Some(&npm_resolver), )) }); + is_script = self.is_script; maybe_parsed_source = self.maybe_parsed_source().cloned(); maybe_test_module_fut = self .maybe_test_module_fut @@ -466,6 +483,7 @@ impl Document { // updated properties dependencies, file_referrer: self.file_referrer.clone(), + is_script, maybe_types_dependency, maybe_navigation_tree: Mutex::new(None), // maintain - this should all be copies/clones @@ -490,6 +508,7 @@ impl Document { fn with_change( &self, + is_cjs_resolver: &LspIsCjsResolver, version: i32, changes: Vec<lsp::TextDocumentContentChangeEvent>, ) -> Result<Arc<Self>, AnyError> { @@ -523,6 +542,7 @@ impl Document { self.maybe_headers.as_ref(), media_type, self.file_referrer.as_ref(), + is_cjs_resolver, self.resolver.as_ref(), ) } else { @@ -546,6 +566,7 @@ impl Document { get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config); Ok(Arc::new(Self { config: self.config.clone(), + is_script: maybe_module.as_ref().map(|m| m.is_script), specifier: self.specifier.clone(), file_referrer: self.file_referrer.clone(), maybe_fs_version: self.maybe_fs_version.clone(), @@ -580,6 +601,7 @@ impl Document { ), maybe_language_id: self.maybe_language_id, dependencies: self.dependencies.clone(), + is_script: self.is_script, maybe_types_dependency: self.maybe_types_dependency.clone(), text: self.text.clone(), text_info_cell: once_cell::sync::OnceCell::new(), @@ -607,6 +629,7 @@ impl Document { ), maybe_language_id: self.maybe_language_id, dependencies: self.dependencies.clone(), + is_script: self.is_script, maybe_types_dependency: self.maybe_types_dependency.clone(), text: self.text.clone(), text_info_cell: once_cell::sync::OnceCell::new(), @@ -655,6 +678,13 @@ impl Document { }) } + /// If this is maybe a CJS script and maybe not an ES module. + /// + /// Use `LspIsCjsResolver` to determine for sure. + pub fn is_script(&self) -> Option<bool> { + self.is_script + } + pub fn line_index(&self) -> Arc<LineIndex> { self.line_index.clone() } @@ -764,14 +794,7 @@ fn resolve_media_type( specifier: &ModuleSpecifier, maybe_headers: Option<&HashMap<String, String>>, maybe_language_id: Option<LanguageId>, - resolver: &LspResolver, ) -> MediaType { - if resolver.in_node_modules(specifier) { - if let Some(media_type) = resolver.node_media_type(specifier) { - return media_type; - } - } - if let Some(language_id) = maybe_language_id { return MediaType::from_specifier_and_content_type( specifier, @@ -809,6 +832,7 @@ impl FileSystemDocuments { pub fn get( &self, specifier: &ModuleSpecifier, + is_cjs_resolver: &LspIsCjsResolver, resolver: &Arc<LspResolver>, config: &Arc<Config>, cache: &Arc<LspCache>, @@ -832,7 +856,14 @@ impl FileSystemDocuments { }; if dirty { // attempt to update the file on the file system - self.refresh_document(specifier, resolver, config, cache, file_referrer) + self.refresh_document( + specifier, + is_cjs_resolver, + resolver, + config, + cache, + file_referrer, + ) } else { old_doc } @@ -843,6 +874,7 @@ impl FileSystemDocuments { fn refresh_document( &self, specifier: &ModuleSpecifier, + is_cjs_resolver: &LspIsCjsResolver, resolver: &Arc<LspResolver>, config: &Arc<Config>, cache: &Arc<LspCache>, @@ -859,6 +891,7 @@ impl FileSystemDocuments { None, None, None, + is_cjs_resolver, resolver.clone(), config.clone(), cache, @@ -875,6 +908,7 @@ impl FileSystemDocuments { None, None, None, + is_cjs_resolver, resolver.clone(), config.clone(), cache, @@ -902,6 +936,7 @@ impl FileSystemDocuments { None, None, maybe_headers, + is_cjs_resolver, resolver.clone(), config.clone(), cache, @@ -942,6 +977,11 @@ pub struct Documents { /// The DENO_DIR that the documents looks for non-file based modules. cache: Arc<LspCache>, config: Arc<Config>, + /// Resolver for detecting if a document is CJS or ESM. + is_cjs_resolver: Arc<LspIsCjsResolver>, + /// A resolver that takes into account currently loaded import map and JSX + /// settings. + resolver: Arc<LspResolver>, /// A flag that indicates that stated data is potentially invalid and needs to /// be recalculated before being considered valid. dirty: bool, @@ -949,15 +989,7 @@ pub struct Documents { open_docs: HashMap<ModuleSpecifier, Arc<Document>>, /// Documents stored on the file system. file_system_docs: Arc<FileSystemDocuments>, - /// A resolver that takes into account currently loaded import map and JSX - /// settings. - resolver: Arc<LspResolver>, - /// The npm package requirements found in npm specifiers. - npm_reqs_by_scope: - Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>, - /// Config scopes that contain a node: specifier such that a @types/node - /// package should be injected. - scopes_with_node_specifier: Arc<HashSet<Option<ModuleSpecifier>>>, + dep_info_by_scope: Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>>, } impl Documents { @@ -982,6 +1014,7 @@ impl Documents { // the cache for remote modules here in order to get the // x-typescript-types? None, + &self.is_cjs_resolver, self.resolver.clone(), self.config.clone(), &self.cache, @@ -1016,7 +1049,7 @@ impl Documents { )) })?; self.dirty = true; - let doc = doc.with_change(version, changes)?; + let doc = doc.with_change(&self.is_cjs_resolver, version, changes)?; self.open_docs.insert(doc.specifier().clone(), doc.clone()); Ok(doc) } @@ -1071,34 +1104,6 @@ impl Documents { self.cache.is_valid_file_referrer(specifier) } - /// Return `true` if the provided specifier can be resolved to a document, - /// otherwise `false`. - pub fn contains_import( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - ) -> bool { - let file_referrer = self.get_file_referrer(referrer); - let maybe_specifier = self - .resolver - .as_graph_resolver(file_referrer.as_deref()) - .resolve( - specifier, - &deno_graph::Range { - specifier: referrer.clone(), - start: deno_graph::Position::zeroed(), - end: deno_graph::Position::zeroed(), - }, - ResolutionMode::Types, - ) - .ok(); - if let Some(import_specifier) = maybe_specifier { - self.exists(&import_specifier, file_referrer.as_deref()) - } else { - false - } - } - pub fn resolve_document_specifier( &self, specifier: &ModuleSpecifier, @@ -1147,17 +1152,20 @@ impl Documents { false } - pub fn npm_reqs_by_scope( + pub fn dep_info_by_scope( &mut self, - ) -> Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>> { - self.calculate_npm_reqs_if_dirty(); - self.npm_reqs_by_scope.clone() + ) -> Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>> { + self.calculate_dep_info_if_dirty(); + self.dep_info_by_scope.clone() } - pub fn scopes_with_node_specifier( - &self, - ) -> &Arc<HashSet<Option<ModuleSpecifier>>> { - &self.scopes_with_node_specifier + pub fn scopes_with_node_specifier(&self) -> HashSet<Option<ModuleSpecifier>> { + self + .dep_info_by_scope + .iter() + .filter(|(_, i)| i.has_node_specifier) + .map(|(s, _)| s.clone()) + .collect::<HashSet<_>>() } /// Return a document for the specifier. @@ -1173,6 +1181,7 @@ impl Documents { if let Some(old_doc) = old_doc { self.file_system_docs.get( specifier, + &self.is_cjs_resolver, &self.resolver, &self.config, &self.cache, @@ -1197,6 +1206,7 @@ impl Documents { } else { self.file_system_docs.get( &specifier, + &self.is_cjs_resolver, &self.resolver, &self.config, &self.cache, @@ -1255,12 +1265,15 @@ impl Documents { referrer: &ModuleSpecifier, file_referrer: Option<&ModuleSpecifier>, ) -> Vec<Option<(ModuleSpecifier, MediaType)>> { - let document = self.get(referrer); - let file_referrer = document + let referrer_doc = self.get(referrer); + let file_referrer = referrer_doc .as_ref() .and_then(|d| d.file_referrer()) .or(file_referrer); - let dependencies = document.as_ref().map(|d| d.dependencies()); + let dependencies = referrer_doc.as_ref().map(|d| d.dependencies()); + let referrer_kind = self + .is_cjs_resolver + .get_maybe_doc_module_kind(referrer, referrer_doc.as_deref()); let mut results = Vec::new(); for raw_specifier in raw_specifiers { if raw_specifier.starts_with("asset:") { @@ -1277,31 +1290,35 @@ impl Documents { results.push(self.resolve_dependency( specifier, referrer, + referrer_kind, file_referrer, )); } else if let Some(specifier) = dep.maybe_code.maybe_specifier() { results.push(self.resolve_dependency( specifier, referrer, + referrer_kind, file_referrer, )); } else { results.push(None); } } else if let Ok(specifier) = - self.resolver.as_graph_resolver(file_referrer).resolve( + self.resolver.as_cli_resolver(file_referrer).resolve( raw_specifier, &deno_graph::Range { specifier: referrer.clone(), start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + referrer_kind, ResolutionMode::Types, ) { results.push(self.resolve_dependency( &specifier, referrer, + referrer_kind, file_referrer, )); } else { @@ -1320,7 +1337,11 @@ impl Documents { ) { self.config = Arc::new(config.clone()); self.cache = Arc::new(cache.clone()); + self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(cache)); self.resolver = resolver.clone(); + + node_resolver::PackageJsonThreadLocalCache::clear(); + { let fs_docs = &self.file_system_docs; // Clean up non-existent documents. @@ -1340,14 +1361,21 @@ impl Documents { if !config.specifier_enabled(doc.specifier()) { continue; } - *doc = doc.with_new_config(self.resolver.clone(), self.config.clone()); + *doc = doc.with_new_config( + &self.is_cjs_resolver, + self.resolver.clone(), + self.config.clone(), + ); } for mut doc in self.file_system_docs.docs.iter_mut() { if !config.specifier_enabled(doc.specifier()) { continue; } - *doc.value_mut() = - doc.with_new_config(self.resolver.clone(), self.config.clone()); + *doc.value_mut() = doc.with_new_config( + &self.is_cjs_resolver, + self.resolver.clone(), + self.config.clone(), + ); } self.open_docs = open_docs; let mut preload_count = 0; @@ -1364,6 +1392,7 @@ impl Documents { { fs_docs.refresh_document( specifier, + &self.is_cjs_resolver, &self.resolver, &self.config, &self.cache, @@ -1379,34 +1408,46 @@ impl Documents { /// Iterate through the documents, building a map where the key is a unique /// document and the value is a set of specifiers that depend on that /// document. - fn calculate_npm_reqs_if_dirty(&mut self) { - let mut npm_reqs_by_scope: BTreeMap<_, BTreeSet<_>> = Default::default(); - let mut scopes_with_specifier = HashSet::new(); + fn calculate_dep_info_if_dirty(&mut self) { + let mut dep_info_by_scope: BTreeMap<_, ScopeDepInfo> = Default::default(); let is_fs_docs_dirty = self.file_system_docs.set_dirty(false); if !is_fs_docs_dirty && !self.dirty { return; } let mut visit_doc = |doc: &Arc<Document>| { let scope = doc.scope(); - let reqs = npm_reqs_by_scope.entry(scope.cloned()).or_default(); + let dep_info = dep_info_by_scope.entry(scope.cloned()).or_default(); for dependency in doc.dependencies().values() { - if let Some(dep) = dependency.get_code() { + let code_specifier = dependency.get_code(); + let type_specifier = dependency.get_type(); + if let Some(dep) = code_specifier { if dep.scheme() == "node" { - scopes_with_specifier.insert(scope.cloned()); + dep_info.has_node_specifier = true; } if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { - reqs.insert(reference.into_inner().req); + dep_info.npm_reqs.insert(reference.into_inner().req); } } - if let Some(dep) = dependency.get_type() { + if let Some(dep) = type_specifier { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { - reqs.insert(reference.into_inner().req); + dep_info.npm_reqs.insert(reference.into_inner().req); + } + } + if dependency.maybe_deno_types_specifier.is_some() { + if let (Some(code_specifier), Some(type_specifier)) = + (code_specifier, type_specifier) + { + if MediaType::from_specifier(type_specifier).is_declaration() { + dep_info + .deno_types_to_code_resolutions + .insert(type_specifier.clone(), code_specifier.clone()); + } } } } if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { - reqs.insert(reference.into_inner().req); + dep_info.npm_reqs.insert(reference.into_inner().req); } } }; @@ -1417,14 +1458,49 @@ impl Documents { visit_doc(doc); } - // fill the reqs from the lockfile for (scope, config_data) in self.config.tree.data_by_scope().as_ref() { + let dep_info = dep_info_by_scope.entry(Some(scope.clone())).or_default(); + (|| { + let config_file = config_data.maybe_deno_json()?; + let jsx_config = + config_file.to_maybe_jsx_import_source_config().ok()??; + let type_specifier = jsx_config.default_types_specifier.as_ref()?; + let code_specifier = jsx_config.default_specifier.as_ref()?; + let cli_resolver = self.resolver.as_cli_resolver(Some(scope)); + let range = deno_graph::Range { + specifier: jsx_config.base_url.clone(), + start: deno_graph::Position::zeroed(), + end: deno_graph::Position::zeroed(), + }; + let type_specifier = cli_resolver + .resolve( + type_specifier, + &range, + // todo(dsherret): this is wrong because it doesn't consider CJS referrers + deno_package_json::NodeModuleKind::Esm, + ResolutionMode::Types, + ) + .ok()?; + let code_specifier = cli_resolver + .resolve( + code_specifier, + &range, + // todo(dsherret): this is wrong because it doesn't consider CJS referrers + deno_package_json::NodeModuleKind::Esm, + ResolutionMode::Execution, + ) + .ok()?; + dep_info + .deno_types_to_code_resolutions + .insert(type_specifier, code_specifier); + Some(()) + })(); + // fill the reqs from the lockfile if let Some(lockfile) = config_data.lockfile.as_ref() { - let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default(); let lockfile = lockfile.lock(); for dep_req in lockfile.content.packages.specifiers.keys() { if dep_req.kind == deno_semver::package::PackageKind::Npm { - reqs.insert(dep_req.req.clone()); + dep_info.npm_reqs.insert(dep_req.req.clone()); } } } @@ -1433,15 +1509,22 @@ impl Documents { // Ensure a @types/node package exists when any module uses a node: specifier. // Unlike on the command line, here we just add @types/node to the npm package // requirements since this won't end up in the lockfile. - for scope in &scopes_with_specifier { - let reqs = npm_reqs_by_scope.entry(scope.clone()).or_default(); - if !reqs.iter().any(|r| r.name == "@types/node") { - reqs.insert(PackageReq::from_str("@types/node").unwrap()); + for dep_info in dep_info_by_scope.values_mut() { + if dep_info.has_node_specifier + && !dep_info.npm_reqs.iter().any(|r| r.name == "@types/node") + { + dep_info + .npm_reqs + .insert(PackageReq::from_str("@types/node").unwrap()); } } - self.npm_reqs_by_scope = Arc::new(npm_reqs_by_scope); - self.scopes_with_node_specifier = Arc::new(scopes_with_specifier); + self.dep_info_by_scope = Arc::new( + dep_info_by_scope + .into_iter() + .map(|(s, i)| (s, Arc::new(i))) + .collect(), + ); self.dirty = false; } @@ -1449,6 +1532,7 @@ impl Documents { &self, specifier: &ModuleSpecifier, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, file_referrer: Option<&ModuleSpecifier>, ) -> Option<(ModuleSpecifier, MediaType)> { if let Some(module_name) = specifier.as_str().strip_prefix("node:") { @@ -1462,10 +1546,12 @@ impl Documents { let mut specifier = specifier.clone(); let mut media_type = None; if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) { - let (s, mt) = - self - .resolver - .npm_to_file_url(&npm_ref, referrer, file_referrer)?; + let (s, mt) = self.resolver.npm_to_file_url( + &npm_ref, + referrer, + referrer_kind, + file_referrer, + )?; specifier = s; media_type = Some(mt); } @@ -1475,7 +1561,8 @@ impl Documents { return Some((specifier, media_type)); }; if let Some(types) = doc.maybe_types_dependency().maybe_specifier() { - self.resolve_dependency(types, &specifier, file_referrer) + let specifier_kind = self.is_cjs_resolver.get_doc_module_kind(&doc); + self.resolve_dependency(types, &specifier, specifier_kind, file_referrer) } else { Some((doc.specifier().clone(), doc.media_type())) } @@ -1543,6 +1630,7 @@ fn parse_and_analyze_module( maybe_headers: Option<&HashMap<String, String>>, media_type: MediaType, file_referrer: Option<&ModuleSpecifier>, + is_cjs_resolver: &LspIsCjsResolver, resolver: &LspResolver, ) -> (Option<ParsedSourceResult>, Option<ModuleResult>) { let parsed_source_result = parse_source(specifier.clone(), text, media_type); @@ -1551,6 +1639,7 @@ fn parse_and_analyze_module( &parsed_source_result, maybe_headers, file_referrer, + is_cjs_resolver, resolver, ); (Some(parsed_source_result), Some(module_result)) @@ -1561,7 +1650,7 @@ fn parse_source( text: Arc<str>, media_type: MediaType, ) -> ParsedSourceResult { - deno_ast::parse_module(deno_ast::ParseParams { + deno_ast::parse_program(deno_ast::ParseParams { specifier, text, media_type, @@ -1576,11 +1665,26 @@ fn analyze_module( parsed_source_result: &ParsedSourceResult, maybe_headers: Option<&HashMap<String, String>>, file_referrer: Option<&ModuleSpecifier>, + is_cjs_resolver: &LspIsCjsResolver, resolver: &LspResolver, ) -> ModuleResult { match parsed_source_result { Ok(parsed_source) => { let npm_resolver = resolver.create_graph_npm_resolver(file_referrer); + let cli_resolver = resolver.as_cli_resolver(file_referrer); + let config_data = resolver.as_config_data(file_referrer); + let valid_referrer = specifier.clone(); + let jsx_import_source_config = + config_data.and_then(|d| d.maybe_jsx_import_source_config()); + let resolver = SingleReferrerGraphResolver { + valid_referrer: &valid_referrer, + referrer_kind: is_cjs_resolver.get_lsp_referrer_kind( + &specifier, + Some(parsed_source.compute_is_script()), + ), + cli_resolver, + jsx_import_source_config: jsx_import_source_config.as_ref(), + }; Ok(deno_graph::parse_module_from_ast( deno_graph::ParseModuleFromAstOptions { graph_kind: deno_graph::GraphKind::TypesOnly, @@ -1591,7 +1695,7 @@ fn analyze_module( // dynamic imports like import(`./dir/${something}`) in the LSP file_system: &deno_graph::source::NullFileSystem, jsr_url_provider: &CliJsrUrlProvider, - maybe_resolver: Some(resolver.as_graph_resolver(file_referrer)), + maybe_resolver: Some(&resolver), maybe_npm_resolver: Some(&npm_resolver), }, )) diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 8269dc851..2ce26c1f2 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -22,6 +22,7 @@ use deno_semver::jsr::JsrPackageReqReference; use indexmap::Equivalent; use indexmap::IndexSet; use log::error; +use node_resolver::NodeModuleKind; use serde::Deserialize; use serde_json::from_value; use std::collections::BTreeMap; @@ -77,6 +78,7 @@ use super::parent_process_checker; use super::performance::Performance; use super::refactor; use super::registries::ModuleRegistry; +use super::resolver::LspIsCjsResolver; use super::resolver::LspResolver; use super::testing; use super::text; @@ -144,6 +146,7 @@ pub struct StateSnapshot { pub project_version: usize, pub assets: AssetsSnapshot, pub config: Arc<Config>, + pub is_cjs_resolver: Arc<LspIsCjsResolver>, pub documents: Arc<Documents>, pub resolver: Arc<LspResolver>, } @@ -203,6 +206,7 @@ pub struct Inner { pub documents: Documents, http_client_provider: Arc<HttpClientProvider>, initial_cwd: PathBuf, + pub is_cjs_resolver: Arc<LspIsCjsResolver>, jsr_search_api: CliJsrSearchApi, /// Handles module registries, which allow discovery of modules module_registry: ModuleRegistry, @@ -480,6 +484,7 @@ impl Inner { let initial_cwd = std::env::current_dir().unwrap_or_else(|_| { panic!("Could not resolve current working directory") }); + let is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&cache)); Self { assets, @@ -491,6 +496,7 @@ impl Inner { documents, http_client_provider, initial_cwd: initial_cwd.clone(), + is_cjs_resolver, jsr_search_api, project_version: 0, task_queue: Default::default(), @@ -601,6 +607,7 @@ impl Inner { project_version: self.project_version, assets: self.assets.snapshot(), config: Arc::new(self.config.clone()), + is_cjs_resolver: self.is_cjs_resolver.clone(), documents: Arc::new(self.documents.clone()), resolver: self.resolver.snapshot(), }) @@ -622,6 +629,7 @@ impl Inner { } }); self.cache = LspCache::new(global_cache_url); + self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&self.cache)); let deno_dir = self.cache.deno_dir(); let workspace_settings = self.config.workspace_settings(); let maybe_root_path = self @@ -863,7 +871,10 @@ impl Inner { // We ignore these directories by default because there is a // high likelihood they aren't relevant. Someone can opt-into // them by specifying one of them as an enabled path. - if matches!(dir_name.as_str(), "vendor" | "node_modules" | ".git") { + if matches!( + dir_name.as_str(), + "vendor" | "coverage" | "node_modules" | ".git" + ) { continue; } // ignore cargo target directories for anyone using Deno with Rust @@ -904,7 +915,7 @@ impl Inner { | MediaType::Tsx => {} MediaType::Wasm | MediaType::SourceMap - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::Unknown => { if path.extension().and_then(|s| s.to_str()) != Some("jsonc") { continue; @@ -963,6 +974,11 @@ impl Inner { .tree .refresh(&self.config.settings, &self.workspace_files, &file_fetcher) .await; + self + .client + .send_did_refresh_deno_configuration_tree_notification( + self.config.tree.to_did_refresh_params(), + ); for config_file in self.config.tree.config_files() { (|| { let compiler_options = config_file.to_compiler_options().ok()?.options; @@ -974,7 +990,7 @@ impl Inner { spawn(async move { let specifier = { let inner = ls.inner.read().await; - let resolver = inner.resolver.as_graph_resolver(Some(&referrer)); + let resolver = inner.resolver.as_cli_resolver(Some(&referrer)); let Ok(specifier) = resolver.resolve( &specifier, &deno_graph::Range { @@ -982,6 +998,7 @@ impl Inner { start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + NodeModuleKind::Esm, deno_graph::source::ResolutionMode::Types, ) else { return; @@ -1019,7 +1036,7 @@ impl Inner { // refresh the npm specifiers because it might have discovered // a @types/node package and now's a good time to do that anyway - self.refresh_npm_specifiers().await; + self.refresh_dep_info().await; self.project_changed([], true); } @@ -1065,7 +1082,7 @@ impl Inner { ); if document.is_diagnosable() { self.project_changed([(document.specifier(), ChangeKind::Opened)], false); - self.refresh_npm_specifiers().await; + self.refresh_dep_info().await; self.diagnostics_server.invalidate(&[specifier]); self.send_diagnostics_update(); self.send_testing_update(); @@ -1086,8 +1103,8 @@ impl Inner { Ok(document) => { if document.is_diagnosable() { let old_scopes_with_node_specifier = - self.documents.scopes_with_node_specifier().clone(); - self.refresh_npm_specifiers().await; + self.documents.scopes_with_node_specifier(); + self.refresh_dep_info().await; let mut config_changed = false; if !self .documents @@ -1138,13 +1155,15 @@ impl Inner { })); } - async fn refresh_npm_specifiers(&mut self) { - let package_reqs = self.documents.npm_reqs_by_scope(); + async fn refresh_dep_info(&mut self) { + let dep_info_by_scope = self.documents.dep_info_by_scope(); let resolver = self.resolver.clone(); // spawn due to the lsp's `Send` requirement - spawn(async move { resolver.set_npm_reqs(&package_reqs).await }) - .await - .ok(); + spawn( + async move { resolver.set_dep_info_by_scope(&dep_info_by_scope).await }, + ) + .await + .ok(); } async fn did_close(&mut self, params: DidCloseTextDocumentParams) { @@ -1163,7 +1182,7 @@ impl Inner { .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); self.diagnostics_state.clear(&specifier); if self.is_diagnosable(&specifier) { - self.refresh_npm_specifiers().await; + self.refresh_dep_info().await; self.diagnostics_server.invalidate(&[specifier.clone()]); self.send_diagnostics_update(); self.send_testing_update(); @@ -1377,16 +1396,14 @@ impl Inner { .fmt_config_for_specifier(&specifier) .options .clone(); - fmt_options.use_tabs = Some(!params.options.insert_spaces); - fmt_options.indent_width = Some(params.options.tab_size as u8); - let maybe_workspace = self - .config - .tree - .data_for_specifier(&specifier) - .map(|d| &d.member_dir.workspace); + let config_data = self.config.tree.data_for_specifier(&specifier); + if !config_data.is_some_and(|d| d.maybe_deno_json().is_some()) { + fmt_options.use_tabs = Some(!params.options.insert_spaces); + fmt_options.indent_width = Some(params.options.tab_size as u8); + } let unstable_options = UnstableFmtOptions { - component: maybe_workspace - .map(|w| w.has_unstable("fmt-component")) + component: config_data + .map(|d| d.unstable.contains("fmt-component")) .unwrap_or(false), }; let document = document.clone(); @@ -1618,6 +1635,10 @@ impl Inner { let file_diagnostics = self .diagnostics_server .get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version()); + let specifier_kind = asset_or_doc + .document() + .map(|d| self.is_cjs_resolver.get_doc_module_kind(d)) + .unwrap_or(NodeModuleKind::Esm); let mut includes_no_cache = false; for diagnostic in &fixable_diagnostics { match diagnostic.source.as_deref() { @@ -1656,7 +1677,13 @@ impl Inner { .await; for action in actions { code_actions - .add_ts_fix_action(&specifier, &action, diagnostic, self) + .add_ts_fix_action( + &specifier, + specifier_kind, + &action, + diagnostic, + self, + ) .map_err(|err| { error!("Unable to convert fix: {:#}", err); LspError::internal_error() @@ -1802,10 +1829,9 @@ impl Inner { error!("Unable to decode code action data: {:#}", err); LspError::invalid_params("The CodeAction's data is invalid.") })?; - let scope = self - .get_asset_or_document(&code_action_data.specifier) - .ok() - .and_then(|d| d.scope().cloned()); + let maybe_asset_or_doc = + self.get_asset_or_document(&code_action_data.specifier).ok(); + let scope = maybe_asset_or_doc.as_ref().and_then(|d| d.scope().cloned()); let combined_code_actions = self .ts_server .get_combined_code_fix( @@ -1832,8 +1858,13 @@ impl Inner { let changes = if code_action_data.fix_id == "fixMissingImport" { fix_ts_import_changes( &code_action_data.specifier, + maybe_asset_or_doc + .as_ref() + .and_then(|d| d.document()) + .map(|d| self.is_cjs_resolver.get_doc_module_kind(d)) + .unwrap_or(NodeModuleKind::Esm), &combined_code_actions.changes, - &self.get_ts_response_import_mapper(&code_action_data.specifier), + self, ) .map_err(|err| { error!("Unable to remap changes: {:#}", err); @@ -1885,8 +1916,12 @@ impl Inner { if kind_suffix == ".rewrite.function.returnType" { refactor_edit_info.edits = fix_ts_import_changes( &action_data.specifier, + asset_or_doc + .document() + .map(|d| self.is_cjs_resolver.get_doc_module_kind(d)) + .unwrap_or(NodeModuleKind::Esm), &refactor_edit_info.edits, - &self.get_ts_response_import_mapper(&action_data.specifier), + self, ) .map_err(|err| { error!("Unable to remap changes: {:#}", err); @@ -1917,7 +1952,8 @@ impl Inner { // todo(dsherret): this should probably just take the resolver itself // as the import map is an implementation detail .and_then(|d| d.resolver.maybe_import_map()), - self.resolver.as_ref(), + &self.resolver, + &self.ts_server.specifier_map, file_referrer, ) } @@ -2233,6 +2269,7 @@ impl Inner { &self.jsr_search_api, &self.npm_search_api, &self.documents, + &self.is_cjs_resolver, self.resolver.as_ref(), self .config @@ -2280,7 +2317,11 @@ impl Inner { .into(), scope.cloned(), ) - .await; + .await + .unwrap_or_else(|err| { + error!("Unable to get completion info from TypeScript: {:#}", err); + None + }); if let Some(completions) = maybe_completion_info { response = Some( @@ -3563,15 +3604,16 @@ impl Inner { if byonm { roots.retain(|s| s.scheme() != "npm"); - } else if let Some(npm_reqs) = self + } else if let Some(dep_info) = self .documents - .npm_reqs_by_scope() + .dep_info_by_scope() .get(&config_data.map(|d| d.scope.as_ref().clone())) { // always include the npm packages since resolution of one npm package // might affect the resolution of other npm packages roots.extend( - npm_reqs + dep_info + .npm_reqs .iter() .map(|req| ModuleSpecifier::parse(&format!("npm:{}", req)).unwrap()), ); @@ -3592,9 +3634,8 @@ impl Inner { deno_json_cache: None, pkg_json_cache: None, workspace_cache: None, - config_parse_options: deno_config::deno_json::ConfigParseOptions { - include_task_comments: false, - }, + config_parse_options: + deno_config::deno_json::ConfigParseOptions::default(), additional_config_file_names: &[], discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"), maybe_vendor_override: if force_global_cache { @@ -3649,7 +3690,7 @@ impl Inner { async fn post_cache(&mut self) { self.resolver.did_cache(); - self.refresh_npm_specifiers().await; + self.refresh_dep_info().await; self.diagnostics_server.invalidate_all(); self.project_changed([], true); self.ts_server.cleanup_semantic_cache(self.snapshot()).await; @@ -3807,7 +3848,7 @@ impl Inner { let maybe_inlay_hints = maybe_inlay_hints.map(|hints| { hints .iter() - .map(|hint| hint.to_lsp(line_index.clone())) + .map(|hint| hint.to_lsp(line_index.clone(), self)) .collect() }); self.performance.measure(mark); @@ -3943,7 +3984,9 @@ mod tests { fn test_walk_workspace() { let temp_dir = TempDir::new(); temp_dir.create_dir_all("root1/vendor/"); + temp_dir.create_dir_all("root1/coverage/"); temp_dir.write("root1/vendor/mod.ts", ""); // no, vendor + temp_dir.write("root1/coverage/mod.ts", ""); // no, coverage temp_dir.create_dir_all("root1/node_modules/"); temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules diff --git a/cli/lsp/lsp_custom.rs b/cli/lsp/lsp_custom.rs index 5f485db7a..b570b6d0e 100644 --- a/cli/lsp/lsp_custom.rs +++ b/cli/lsp/lsp_custom.rs @@ -46,6 +46,30 @@ pub struct DiagnosticBatchNotificationParams { pub messages_len: usize, } +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct DenoConfigurationData { + pub scope_uri: lsp::Uri, + pub workspace_root_scope_uri: Option<lsp::Uri>, + pub deno_json: Option<lsp::TextDocumentIdentifier>, + pub package_json: Option<lsp::TextDocumentIdentifier>, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct DidRefreshDenoConfigurationTreeNotificationParams { + pub data: Vec<DenoConfigurationData>, +} + +pub enum DidRefreshDenoConfigurationTreeNotification {} + +impl lsp::notification::Notification + for DidRefreshDenoConfigurationTreeNotification +{ + type Params = DidRefreshDenoConfigurationTreeNotificationParams; + const METHOD: &'static str = "deno/didRefreshDenoConfigurationTree"; +} + #[derive(Debug, Eq, Hash, PartialEq, Copy, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub enum DenoConfigurationChangeType { @@ -88,13 +112,15 @@ pub struct DidChangeDenoConfigurationNotificationParams { pub changes: Vec<DenoConfigurationChangeEvent>, } +// TODO(nayeemrmn): This is being replaced by +// `DidRefreshDenoConfigurationTreeNotification` for Deno > v2.0.0. Remove it +// soon. pub enum DidChangeDenoConfigurationNotification {} impl lsp::notification::Notification for DidChangeDenoConfigurationNotification { type Params = DidChangeDenoConfigurationNotificationParams; - const METHOD: &'static str = "deno/didChangeDenoConfiguration"; } @@ -102,7 +128,6 @@ pub enum DidUpgradeCheckNotification {} impl lsp::notification::Notification for DidUpgradeCheckNotification { type Params = DidUpgradeCheckNotificationParams; - const METHOD: &'static str = "deno/didUpgradeCheck"; } @@ -125,6 +150,5 @@ pub enum DiagnosticBatchNotification {} impl lsp::notification::Notification for DiagnosticBatchNotification { type Params = DiagnosticBatchNotificationParams; - const METHOD: &'static str = "deno/internalTestDiagnosticBatch"; } diff --git a/cli/lsp/npm.rs b/cli/lsp/npm.rs index 8bdeb7e7d..2decfc342 100644 --- a/cli/lsp/npm.rs +++ b/cli/lsp/npm.rs @@ -4,6 +4,7 @@ use dashmap::DashMap; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::serde_json; +use deno_npm::npm_rc::NpmRc; use deno_semver::package::PackageNv; use deno_semver::Version; use serde::Deserialize; @@ -25,7 +26,10 @@ pub struct CliNpmSearchApi { impl CliNpmSearchApi { pub fn new(file_fetcher: Arc<FileFetcher>) -> Self { - let resolver = NpmFetchResolver::new(file_fetcher.clone()); + let resolver = NpmFetchResolver::new( + file_fetcher.clone(), + Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()), + ); Self { file_fetcher, resolver, diff --git a/cli/lsp/parent_process_checker.rs b/cli/lsp/parent_process_checker.rs index e5b2b2f23..b8a42cd1a 100644 --- a/cli/lsp/parent_process_checker.rs +++ b/cli/lsp/parent_process_checker.rs @@ -11,7 +11,7 @@ pub fn start(parent_process_id: u32) { std::thread::sleep(Duration::from_secs(10)); if !is_process_active(parent_process_id) { - std::process::exit(1); + deno_runtime::exit(1); } }); } diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs index 5f7ce0082..ade353e68 100644 --- a/cli/lsp/registries.rs +++ b/cli/lsp/registries.rs @@ -482,6 +482,7 @@ impl ModuleRegistry { .fetch_with_options(FetchOptions { specifier: &specifier, permissions: FetchPermissionsOptionRef::AllowAll, + maybe_auth: None, maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"), maybe_cache_setting: None, }) diff --git a/cli/lsp/repl.rs b/cli/lsp/repl.rs index fa5809045..b4aaa8cd0 100644 --- a/cli/lsp/repl.rs +++ b/cli/lsp/repl.rs @@ -263,7 +263,7 @@ impl ReplLanguageServer { } fn get_document_uri(&self) -> Uri { - uri_parse_unencoded(self.cwd_uri.join("$deno$repl.ts").unwrap().as_str()) + uri_parse_unencoded(self.cwd_uri.join("$deno$repl.mts").unwrap().as_str()) .unwrap() } } diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index c89273147..4ede79922 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -2,27 +2,36 @@ use dashmap::DashMap; use deno_ast::MediaType; +use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::HttpCache; +use deno_config::deno_json::JsxImportSourceConfig; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::WorkspaceResolver; +use deno_core::parking_lot::Mutex; use deno_core::url::Url; -use deno_graph::source::Resolver; +use deno_graph::source::ResolutionMode; use deno_graph::GraphImport; use deno_graph::ModuleSpecifier; +use deno_graph::Range; use deno_npm::NpmSystemInfo; +use deno_path_util::url_from_directory_path; use deno_path_util::url_to_file_path; +use deno_resolver::npm::NpmReqResolverOptions; +use deno_resolver::DenoResolverOptions; +use deno_resolver::NodeAndNpmReqResolver; use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJson; +use deno_runtime::deno_node::PackageJsonResolver; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use indexmap::IndexMap; use node_resolver::errors::ClosestPkgJsonError; -use node_resolver::NodeResolution; +use node_resolver::InNpmPackageChecker; +use node_resolver::NodeModuleKind; use node_resolver::NodeResolutionMode; -use node_resolver::NpmResolver; use std::borrow::Cow; use std::collections::BTreeMap; use std::collections::BTreeSet; @@ -31,11 +40,14 @@ use std::collections::HashSet; use std::sync::Arc; use super::cache::LspCache; +use super::documents::Document; use super::jsr::JsrCacheResolver; use crate::args::create_default_npmrc; use crate::args::CacheSetting; use crate::args::CliLockfile; use crate::args::NpmInstallDepsProvider; +use crate::cache::DenoCacheEnvFsAdapter; +use crate::factory::Deferred; use crate::graph_util::CliJsrUrlProvider; use crate::http_util::HttpClientProvider; use crate::lsp::config::Config; @@ -43,40 +55,56 @@ use crate::lsp::config::ConfigData; use crate::lsp::logging::lsp_warn; use crate::npm::create_cli_npm_resolver_for_lsp; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; +use crate::npm::CreateInNpmPkgCheckerOptions; use crate::npm::ManagedCliNpmResolver; -use crate::resolver::CjsResolutionStore; +use crate::resolver::CliDenoResolver; use crate::resolver::CliDenoResolverFs; -use crate::resolver::CliGraphResolver; -use crate::resolver::CliGraphResolverOptions; -use crate::resolver::CliNodeResolver; +use crate::resolver::CliNpmReqResolver; +use crate::resolver::CliResolver; +use crate::resolver::CliResolverOptions; +use crate::resolver::IsCjsResolver; use crate::resolver::WorkerCliNpmGraphResolver; +use crate::tsc::into_specifier_and_media_type; +use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; #[derive(Debug, Clone)] struct LspScopeResolver { - graph_resolver: Arc<CliGraphResolver>, + resolver: Arc<CliResolver>, + in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, jsr_resolver: Option<Arc<JsrCacheResolver>>, npm_resolver: Option<Arc<dyn CliNpmResolver>>, - node_resolver: Option<Arc<CliNodeResolver>>, + node_resolver: Option<Arc<NodeResolver>>, + npm_pkg_req_resolver: Option<Arc<CliNpmReqResolver>>, + pkg_json_resolver: Arc<PackageJsonResolver>, redirect_resolver: Option<Arc<RedirectResolver>>, graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>, + dep_info: Arc<Mutex<Arc<ScopeDepInfo>>>, + package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>, config_data: Option<Arc<ConfigData>>, } impl Default for LspScopeResolver { fn default() -> Self { + let factory = ResolverFactory::new(None); Self { - graph_resolver: create_graph_resolver(None, None, None), + resolver: factory.cli_resolver().clone(), + in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(), jsr_resolver: None, npm_resolver: None, node_resolver: None, + npm_pkg_req_resolver: None, + pkg_json_resolver: factory.pkg_json_resolver().clone(), redirect_resolver: None, graph_imports: Default::default(), + dep_info: Default::default(), + package_json_deps_by_resolution: Default::default(), config_data: None, } } @@ -88,22 +116,16 @@ impl LspScopeResolver { cache: &LspCache, http_client_provider: Option<&Arc<HttpClientProvider>>, ) -> Self { - let mut npm_resolver = None; - let mut node_resolver = None; - if let Some(http_client) = http_client_provider { - npm_resolver = create_npm_resolver( - config_data.map(|d| d.as_ref()), - cache, - http_client, - ) - .await; - node_resolver = create_node_resolver(npm_resolver.as_ref()); + let mut factory = ResolverFactory::new(config_data); + if let Some(http_client_provider) = http_client_provider { + factory.init_npm_resolver(http_client_provider, cache).await; } - let graph_resolver = create_graph_resolver( - config_data.map(|d| d.as_ref()), - npm_resolver.as_ref(), - node_resolver.as_ref(), - ); + let in_npm_pkg_checker = factory.in_npm_pkg_checker().clone(); + let npm_resolver = factory.npm_resolver().cloned(); + let node_resolver = factory.node_resolver().cloned(); + let npm_pkg_req_resolver = factory.npm_pkg_req_resolver().cloned(); + let cli_resolver = factory.cli_resolver().clone(); + let pkg_json_resolver = factory.pkg_json_resolver().clone(); let jsr_resolver = Some(Arc::new(JsrCacheResolver::new( cache.for_specifier(config_data.map(|d| d.scope.as_ref())), config_data.map(|d| d.as_ref()), @@ -112,7 +134,9 @@ impl LspScopeResolver { cache.for_specifier(config_data.map(|d| d.scope.as_ref())), config_data.and_then(|d| d.lockfile.clone()), ))); - let npm_graph_resolver = graph_resolver.create_graph_npm_resolver(); + let npm_graph_resolver = cli_resolver.create_graph_npm_resolver(); + let maybe_jsx_import_source_config = + config_data.and_then(|d| d.maybe_jsx_import_source_config()); let graph_imports = config_data .and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok()) .map(|imports| { @@ -120,11 +144,18 @@ impl LspScopeResolver { imports .into_iter() .map(|(referrer, imports)| { + let resolver = SingleReferrerGraphResolver { + valid_referrer: &referrer, + referrer_kind: NodeModuleKind::Esm, + cli_resolver: &cli_resolver, + jsx_import_source_config: maybe_jsx_import_source_config + .as_ref(), + }; let graph_import = GraphImport::new( &referrer, imports, &CliJsrUrlProvider, - Some(graph_resolver.as_ref()), + Some(&resolver), Some(&npm_graph_resolver), ); (referrer, graph_import) @@ -133,33 +164,81 @@ impl LspScopeResolver { ) }) .unwrap_or_default(); + let package_json_deps_by_resolution = (|| { + let npm_pkg_req_resolver = npm_pkg_req_resolver.as_ref()?; + let package_json = config_data?.maybe_pkg_json()?; + let referrer = package_json.specifier(); + let dependencies = package_json.dependencies.as_ref()?; + let result = dependencies + .iter() + .flat_map(|(name, _)| { + let req_ref = + NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()?; + let specifier = into_specifier_and_media_type(Some( + npm_pkg_req_resolver + .resolve_req_reference( + &req_ref, + &referrer, + // todo(dsherret): this is wrong because it doesn't consider CJS referrers + NodeModuleKind::Esm, + NodeResolutionMode::Types, + ) + .or_else(|_| { + npm_pkg_req_resolver.resolve_req_reference( + &req_ref, + &referrer, + // todo(dsherret): this is wrong because it doesn't consider CJS referrers + NodeModuleKind::Esm, + NodeResolutionMode::Execution, + ) + }) + .ok()?, + )) + .0; + Some((specifier, name.clone())) + }) + .collect(); + Some(result) + })(); + let package_json_deps_by_resolution = + Arc::new(package_json_deps_by_resolution.unwrap_or_default()); Self { - graph_resolver, + resolver: cli_resolver, + in_npm_pkg_checker, jsr_resolver, + npm_pkg_req_resolver, npm_resolver, node_resolver, + pkg_json_resolver, redirect_resolver, graph_imports, + dep_info: Default::default(), + package_json_deps_by_resolution, config_data: config_data.cloned(), } } fn snapshot(&self) -> Arc<Self> { + let mut factory = ResolverFactory::new(self.config_data.as_ref()); let npm_resolver = self.npm_resolver.as_ref().map(|r| r.clone_snapshotted()); - let node_resolver = create_node_resolver(npm_resolver.as_ref()); - let graph_resolver = create_graph_resolver( - self.config_data.as_deref(), - npm_resolver.as_ref(), - node_resolver.as_ref(), - ); + if let Some(npm_resolver) = &npm_resolver { + factory.set_npm_resolver(npm_resolver.clone()); + } Arc::new(Self { - graph_resolver, + resolver: factory.cli_resolver().clone(), + in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(), jsr_resolver: self.jsr_resolver.clone(), - npm_resolver, - node_resolver, + npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(), + npm_resolver: factory.npm_resolver().cloned(), + node_resolver: factory.node_resolver().cloned(), redirect_resolver: self.redirect_resolver.clone(), + pkg_json_resolver: factory.pkg_json_resolver().clone(), graph_imports: self.graph_imports.clone(), + dep_info: self.dep_info.clone(), + package_json_deps_by_resolution: self + .package_json_deps_by_resolution + .clone(), config_data: self.config_data.clone(), }) } @@ -223,19 +302,24 @@ impl LspResolver { } } - pub async fn set_npm_reqs( + pub async fn set_dep_info_by_scope( &self, - reqs: &BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>, + dep_info_by_scope: &Arc< + BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>, + >, ) { for (scope, resolver) in [(None, &self.unscoped)] .into_iter() .chain(self.by_scope.iter().map(|(s, r)| (Some(s), r))) { + let dep_info = dep_info_by_scope.get(&scope.cloned()); + if let Some(dep_info) = dep_info { + *resolver.dep_info.lock() = dep_info.clone(); + } if let Some(npm_resolver) = resolver.npm_resolver.as_ref() { if let Some(npm_resolver) = npm_resolver.as_managed() { - let reqs = reqs - .get(&scope.cloned()) - .map(|reqs| reqs.iter().cloned().collect::<Vec<_>>()) + let reqs = dep_info + .map(|i| i.npm_reqs.iter().cloned().collect::<Vec<_>>()) .unwrap_or_default(); if let Err(err) = npm_resolver.set_package_reqs(&reqs).await { lsp_warn!("Could not set npm package requirements: {:#}", err); @@ -245,12 +329,12 @@ impl LspResolver { } } - pub fn as_graph_resolver( + pub fn as_cli_resolver( &self, file_referrer: Option<&ModuleSpecifier>, - ) -> &dyn Resolver { + ) -> &CliResolver { let resolver = self.get_scope_resolver(file_referrer); - resolver.graph_resolver.as_ref() + resolver.resolver.as_ref() } pub fn create_graph_npm_resolver( @@ -258,7 +342,23 @@ impl LspResolver { file_referrer: Option<&ModuleSpecifier>, ) -> WorkerCliNpmGraphResolver { let resolver = self.get_scope_resolver(file_referrer); - resolver.graph_resolver.create_graph_npm_resolver() + resolver.resolver.create_graph_npm_resolver() + } + + pub fn as_config_data( + &self, + file_referrer: Option<&ModuleSpecifier>, + ) -> Option<&Arc<ConfigData>> { + let resolver = self.get_scope_resolver(file_referrer); + resolver.config_data.as_ref() + } + + pub fn in_npm_pkg_checker( + &self, + file_referrer: Option<&ModuleSpecifier>, + ) -> &Arc<dyn InNpmPackageChecker> { + let resolver = self.get_scope_resolver(file_referrer); + &resolver.in_npm_pkg_checker } pub fn maybe_managed_npm_resolver( @@ -324,17 +424,48 @@ impl LspResolver { &self, req_ref: &NpmPackageReqReference, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, file_referrer: Option<&ModuleSpecifier>, ) -> Option<(ModuleSpecifier, MediaType)> { let resolver = self.get_scope_resolver(file_referrer); - let node_resolver = resolver.node_resolver.as_ref()?; - Some(NodeResolution::into_specifier_and_media_type(Some( - node_resolver - .resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types) + let npm_pkg_req_resolver = resolver.npm_pkg_req_resolver.as_ref()?; + Some(into_specifier_and_media_type(Some( + npm_pkg_req_resolver + .resolve_req_reference( + req_ref, + referrer, + referrer_kind, + NodeResolutionMode::Types, + ) .ok()?, ))) } + pub fn file_url_to_package_json_dep( + &self, + specifier: &ModuleSpecifier, + file_referrer: Option<&ModuleSpecifier>, + ) -> Option<String> { + let resolver = self.get_scope_resolver(file_referrer); + resolver + .package_json_deps_by_resolution + .get(specifier) + .cloned() + } + + pub fn deno_types_to_code_resolution( + &self, + specifier: &ModuleSpecifier, + file_referrer: Option<&ModuleSpecifier>, + ) -> Option<ModuleSpecifier> { + let resolver = self.get_scope_resolver(file_referrer); + let dep_info = resolver.dep_info.lock().clone(); + dep_info + .deno_types_to_code_resolutions + .get(specifier) + .cloned() + } + pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool { fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool { // consider any /node_modules/ directory as being in the node_modules @@ -346,14 +477,10 @@ impl LspResolver { .contains("/node_modules/") } - let global_npm_resolver = self - .get_scope_resolver(Some(specifier)) - .npm_resolver - .as_ref() - .and_then(|npm_resolver| npm_resolver.as_managed()) - .filter(|r| r.root_node_modules_path().is_none()); - if let Some(npm_resolver) = &global_npm_resolver { - if npm_resolver.in_npm_package(specifier) { + if let Some(node_resolver) = + &self.get_scope_resolver(Some(specifier)).node_resolver + { + if node_resolver.in_npm_package(specifier) { return true; } } @@ -361,31 +488,22 @@ impl LspResolver { has_node_modules_dir(specifier) } - pub fn node_media_type( - &self, - specifier: &ModuleSpecifier, - ) -> Option<MediaType> { - let resolver = self.get_scope_resolver(Some(specifier)); - let node_resolver = resolver.node_resolver.as_ref()?; - let resolution = node_resolver - .url_to_node_resolution(specifier.clone()) - .ok()?; - Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1) - } - pub fn is_bare_package_json_dep( &self, specifier_text: &str, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, ) -> bool { let resolver = self.get_scope_resolver(Some(referrer)); - let Some(node_resolver) = resolver.node_resolver.as_ref() else { + let Some(npm_pkg_req_resolver) = resolver.npm_pkg_req_resolver.as_ref() + else { return false; }; - node_resolver + npm_pkg_req_resolver .resolve_if_for_npm_pkg( specifier_text, referrer, + referrer_kind, NodeResolutionMode::Types, ) .ok() @@ -398,10 +516,9 @@ impl LspResolver { referrer: &ModuleSpecifier, ) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> { let resolver = self.get_scope_resolver(Some(referrer)); - let Some(node_resolver) = resolver.node_resolver.as_ref() else { - return Ok(None); - }; - node_resolver.get_closest_package_json(referrer) + resolver + .pkg_json_resolver + .get_closest_package_json(referrer) } pub fn resolve_redirects( @@ -453,113 +570,213 @@ impl LspResolver { } } -async fn create_npm_resolver( - config_data: Option<&ConfigData>, - cache: &LspCache, - http_client_provider: &Arc<HttpClientProvider>, -) -> Option<Arc<dyn CliNpmResolver>> { - let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false); - let options = if enable_byonm { - CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)), - root_node_modules_dir: config_data.and_then(|config_data| { - config_data.node_modules_dir.clone().or_else(|| { - url_to_file_path(&config_data.scope) - .ok() - .map(|p| p.join("node_modules/")) - }) - }), - }) - } else { - CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { - http_client_provider: http_client_provider.clone(), - snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) { - Some(lockfile) => { - CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( - lockfile.clone(), - ) - } - None => CliNpmResolverManagedSnapshotOption::Specified(None), - }, - // Don't provide the lockfile. We don't want these resolvers - // updating it. Only the cache request should update the lockfile. - maybe_lockfile: None, - fs: Arc::new(deno_fs::RealFs), - npm_global_cache_dir: cache.deno_dir().npm_folder_path(), - // Use an "only" cache setting in order to make the - // user do an explicit "cache" command and prevent - // the cache from being filled with lots of packages while - // the user is typing. - cache_setting: CacheSetting::Only, - text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly), - maybe_node_modules_path: config_data - .and_then(|d| d.node_modules_dir.clone()), - // only used for top level install, so we can ignore this - npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), - npmrc: config_data - .and_then(|d| d.npmrc.clone()) - .unwrap_or_else(create_default_npmrc), - npm_system_info: NpmSystemInfo::default(), - lifecycle_scripts: Default::default(), - }) - }; - Some(create_cli_npm_resolver_for_lsp(options).await) +#[derive(Debug, Default, Clone)] +pub struct ScopeDepInfo { + pub deno_types_to_code_resolutions: HashMap<ModuleSpecifier, ModuleSpecifier>, + pub npm_reqs: BTreeSet<PackageReq>, + pub has_node_specifier: bool, } -fn create_node_resolver( - npm_resolver: Option<&Arc<dyn CliNpmResolver>>, -) -> Option<Arc<CliNodeResolver>> { - use once_cell::sync::Lazy; - - // it's not ideal to share this across all scopes and to - // never clear it, but it's fine for the time being - static CJS_RESOLUTIONS: Lazy<Arc<CjsResolutionStore>> = - Lazy::new(Default::default); - - let npm_resolver = npm_resolver?; - let fs = Arc::new(deno_fs::RealFs); - let node_resolver_inner = Arc::new(NodeResolver::new( - deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), - npm_resolver.clone().into_npm_resolver(), - )); - Some(Arc::new(CliNodeResolver::new( - CJS_RESOLUTIONS.clone(), - fs, - node_resolver_inner, - npm_resolver.clone(), - ))) +#[derive(Default)] +struct ResolverFactoryServices { + cli_resolver: Deferred<Arc<CliResolver>>, + in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>, + node_resolver: Deferred<Option<Arc<NodeResolver>>>, + npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>, + npm_resolver: Option<Arc<dyn CliNpmResolver>>, } -fn create_graph_resolver( - config_data: Option<&ConfigData>, - npm_resolver: Option<&Arc<dyn CliNpmResolver>>, - node_resolver: Option<&Arc<CliNodeResolver>>, -) -> Arc<CliGraphResolver> { - let workspace = config_data.map(|d| &d.member_dir.workspace); - Arc::new(CliGraphResolver::new(CliGraphResolverOptions { - node_resolver: node_resolver.cloned(), - npm_resolver: npm_resolver.cloned(), - workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else( - || { - Arc::new(WorkspaceResolver::new_raw( - // this is fine because this is only used before initialization - Arc::new(ModuleSpecifier::parse("file:///").unwrap()), - None, - Vec::new(), - Vec::new(), - PackageJsonDepResolution::Disabled, - )) - }, - ), - maybe_jsx_import_source_config: workspace.and_then(|workspace| { - workspace.to_maybe_jsx_import_source_config().ok().flatten() - }), - maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()), - bare_node_builtins_enabled: workspace - .is_some_and(|workspace| workspace.has_unstable("bare-node-builtins")), - sloppy_imports_resolver: config_data - .and_then(|d| d.sloppy_imports_resolver.clone()), - })) +struct ResolverFactory<'a> { + config_data: Option<&'a Arc<ConfigData>>, + fs: Arc<dyn deno_fs::FileSystem>, + pkg_json_resolver: Arc<PackageJsonResolver>, + services: ResolverFactoryServices, +} + +impl<'a> ResolverFactory<'a> { + pub fn new(config_data: Option<&'a Arc<ConfigData>>) -> Self { + let fs = Arc::new(deno_fs::RealFs); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); + Self { + config_data, + fs, + pkg_json_resolver, + services: Default::default(), + } + } + + async fn init_npm_resolver( + &mut self, + http_client_provider: &Arc<HttpClientProvider>, + cache: &LspCache, + ) { + let enable_byonm = self.config_data.map(|d| d.byonm).unwrap_or(false); + let options = if enable_byonm { + CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { + fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)), + pkg_json_resolver: self.pkg_json_resolver.clone(), + root_node_modules_dir: self.config_data.and_then(|config_data| { + config_data.node_modules_dir.clone().or_else(|| { + url_to_file_path(&config_data.scope) + .ok() + .map(|p| p.join("node_modules/")) + }) + }), + }) + } else { + let npmrc = self + .config_data + .and_then(|d| d.npmrc.clone()) + .unwrap_or_else(create_default_npmrc); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(self.fs.as_ref()), + cache.deno_dir().npm_folder_path(), + npmrc.get_all_known_registries_urls(), + )); + CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions { + http_client_provider: http_client_provider.clone(), + snapshot: match self.config_data.and_then(|d| d.lockfile.as_ref()) { + Some(lockfile) => { + CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( + lockfile.clone(), + ) + } + None => CliNpmResolverManagedSnapshotOption::Specified(None), + }, + // Don't provide the lockfile. We don't want these resolvers + // updating it. Only the cache request should update the lockfile. + maybe_lockfile: None, + fs: Arc::new(deno_fs::RealFs), + npm_cache_dir, + // Use an "only" cache setting in order to make the + // user do an explicit "cache" command and prevent + // the cache from being filled with lots of packages while + // the user is typing. + cache_setting: CacheSetting::Only, + text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly), + maybe_node_modules_path: self + .config_data + .and_then(|d| d.node_modules_dir.clone()), + // only used for top level install, so we can ignore this + npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), + npmrc, + npm_system_info: NpmSystemInfo::default(), + lifecycle_scripts: Default::default(), + }) + }; + self.set_npm_resolver(create_cli_npm_resolver_for_lsp(options).await); + } + + pub fn set_npm_resolver(&mut self, npm_resolver: Arc<dyn CliNpmResolver>) { + self.services.npm_resolver = Some(npm_resolver); + } + + pub fn npm_resolver(&self) -> Option<&Arc<dyn CliNpmResolver>> { + self.services.npm_resolver.as_ref() + } + + pub fn cli_resolver(&self) -> &Arc<CliResolver> { + self.services.cli_resolver.get_or_init(|| { + let npm_req_resolver = self.npm_pkg_req_resolver().cloned(); + let deno_resolver = Arc::new(CliDenoResolver::new(DenoResolverOptions { + in_npm_pkg_checker: self.in_npm_pkg_checker().clone(), + node_and_req_resolver: match (self.node_resolver(), npm_req_resolver) { + (Some(node_resolver), Some(npm_req_resolver)) => { + Some(NodeAndNpmReqResolver { + node_resolver: node_resolver.clone(), + npm_req_resolver, + }) + } + _ => None, + }, + sloppy_imports_resolver: self + .config_data + .and_then(|d| d.sloppy_imports_resolver.clone()), + workspace_resolver: self + .config_data + .map(|d| d.resolver.clone()) + .unwrap_or_else(|| { + Arc::new(WorkspaceResolver::new_raw( + // this is fine because this is only used before initialization + Arc::new(ModuleSpecifier::parse("file:///").unwrap()), + None, + Vec::new(), + Vec::new(), + PackageJsonDepResolution::Disabled, + )) + }), + is_byonm: self.config_data.map(|d| d.byonm).unwrap_or(false), + maybe_vendor_dir: self.config_data.and_then(|d| d.vendor_dir.as_ref()), + })); + Arc::new(CliResolver::new(CliResolverOptions { + deno_resolver, + npm_resolver: self.npm_resolver().cloned(), + bare_node_builtins_enabled: self + .config_data + .is_some_and(|d| d.unstable.contains("bare-node-builtins")), + })) + }) + } + + pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> { + &self.pkg_json_resolver + } + + pub fn in_npm_pkg_checker(&self) -> &Arc<dyn InNpmPackageChecker> { + self.services.in_npm_pkg_checker.get_or_init(|| { + crate::npm::create_in_npm_pkg_checker( + match self.services.npm_resolver.as_ref().map(|r| r.as_inner()) { + Some(crate::npm::InnerCliNpmResolverRef::Byonm(_)) | None => { + CreateInNpmPkgCheckerOptions::Byonm + } + Some(crate::npm::InnerCliNpmResolverRef::Managed(m)) => { + CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: m.global_cache_root_url(), + maybe_node_modules_path: m.maybe_node_modules_path(), + }, + ) + } + }, + ) + }) + } + + pub fn node_resolver(&self) -> Option<&Arc<NodeResolver>> { + self + .services + .node_resolver + .get_or_init(|| { + let npm_resolver = self.services.npm_resolver.as_ref()?; + Some(Arc::new(NodeResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(self.fs.clone()), + self.in_npm_pkg_checker().clone(), + npm_resolver.clone().into_npm_pkg_folder_resolver(), + self.pkg_json_resolver.clone(), + ))) + }) + .as_ref() + } + + pub fn npm_pkg_req_resolver(&self) -> Option<&Arc<CliNpmReqResolver>> { + self + .services + .npm_pkg_req_resolver + .get_or_init(|| { + let node_resolver = self.node_resolver()?; + let npm_resolver = self.npm_resolver()?; + Some(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { + byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), + fs: CliDenoResolverFs(self.fs.clone()), + in_npm_pkg_checker: self.in_npm_pkg_checker().clone(), + node_resolver: node_resolver.clone(), + npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), + }))) + }) + .as_ref() + } } #[derive(Debug, Eq, PartialEq)] @@ -586,6 +803,141 @@ impl std::fmt::Debug for RedirectResolver { } } +#[derive(Debug)] +pub struct LspIsCjsResolver { + inner: IsCjsResolver, +} + +impl Default for LspIsCjsResolver { + fn default() -> Self { + LspIsCjsResolver::new(&Default::default()) + } +} + +impl LspIsCjsResolver { + pub fn new(cache: &LspCache) -> Self { + #[derive(Debug)] + struct LspInNpmPackageChecker { + global_cache_dir: ModuleSpecifier, + } + + impl LspInNpmPackageChecker { + pub fn new(cache: &LspCache) -> Self { + let npm_folder_path = cache.deno_dir().npm_folder_path(); + Self { + global_cache_dir: url_from_directory_path( + &canonicalize_path_maybe_not_exists(&npm_folder_path) + .unwrap_or(npm_folder_path), + ) + .unwrap_or_else(|_| { + ModuleSpecifier::parse("file:///invalid/").unwrap() + }), + } + } + } + + impl InNpmPackageChecker for LspInNpmPackageChecker { + fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { + if specifier.scheme() != "file" { + return false; + } + if specifier + .as_str() + .starts_with(self.global_cache_dir.as_str()) + { + return true; + } + specifier.as_str().contains("/node_modules/") + } + } + + let fs = Arc::new(deno_fs::RealFs); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); + + LspIsCjsResolver { + inner: IsCjsResolver::new( + Arc::new(LspInNpmPackageChecker::new(cache)), + pkg_json_resolver, + crate::resolver::IsCjsResolverOptions { + detect_cjs: true, + is_node_main: false, + }, + ), + } + } + + pub fn get_maybe_doc_module_kind( + &self, + specifier: &ModuleSpecifier, + maybe_document: Option<&Document>, + ) -> NodeModuleKind { + self.get_lsp_referrer_kind( + specifier, + maybe_document.and_then(|d| d.is_script()), + ) + } + + pub fn get_doc_module_kind(&self, document: &Document) -> NodeModuleKind { + self.get_lsp_referrer_kind(document.specifier(), document.is_script()) + } + + pub fn get_lsp_referrer_kind( + &self, + specifier: &ModuleSpecifier, + is_script: Option<bool>, + ) -> NodeModuleKind { + self.inner.get_lsp_referrer_kind(specifier, is_script) + } +} + +#[derive(Debug)] +pub struct SingleReferrerGraphResolver<'a> { + pub valid_referrer: &'a ModuleSpecifier, + pub referrer_kind: NodeModuleKind, + pub cli_resolver: &'a CliResolver, + pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>, +} + +impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> { + fn default_jsx_import_source(&self) -> Option<String> { + self + .jsx_import_source_config + .and_then(|c| c.default_specifier.clone()) + } + + fn default_jsx_import_source_types(&self) -> Option<String> { + self + .jsx_import_source_config + .and_then(|c| c.default_types_specifier.clone()) + } + + fn jsx_import_source_module(&self) -> &str { + self + .jsx_import_source_config + .map(|c| c.module.as_str()) + .unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE) + } + + fn resolve( + &self, + specifier_text: &str, + referrer_range: &Range, + mode: ResolutionMode, + ) -> Result<ModuleSpecifier, deno_graph::source::ResolveError> { + // this resolver assumes it will only be used with a single referrer + // with the provided referrer kind + debug_assert_eq!(referrer_range.specifier, *self.valid_referrer); + self.cli_resolver.resolve( + specifier_text, + referrer_range, + self.referrer_kind, + mode, + ) + } +} + impl RedirectResolver { fn new( cache: Arc<dyn HttpCache>, diff --git a/cli/lsp/testing/collectors.rs b/cli/lsp/testing/collectors.rs index 2f2ddb877..2dd7ec0d9 100644 --- a/cli/lsp/testing/collectors.rs +++ b/cli/lsp/testing/collectors.rs @@ -650,7 +650,7 @@ pub mod tests { .unwrap(); let text_info = parsed_module.text_info_lazy().clone(); let mut collector = TestCollector::new(specifier, text_info); - parsed_module.module().visit_with(&mut collector); + parsed_module.program().visit_with(&mut collector); collector.take() } diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index c8b5c47f8..fc7ff5794 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -34,6 +34,7 @@ use crate::util::path::relative_specifier; use crate::util::path::to_percent_decoded_str; use crate::util::result::InfallibleResultExt; use crate::util::v8::convert; +use crate::worker::create_isolate_create_params; use deno_core::convert::Smi; use deno_core::convert::ToV8; use deno_core::error::StdAnyError; @@ -69,6 +70,7 @@ use indexmap::IndexMap; use indexmap::IndexSet; use lazy_regex::lazy_regex; use log::error; +use node_resolver::NodeModuleKind; use once_cell::sync::Lazy; use regex::Captures; use regex::Regex; @@ -236,7 +238,7 @@ pub struct TsServer { performance: Arc<Performance>, sender: mpsc::UnboundedSender<Request>, receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>, - specifier_map: Arc<TscSpecifierMap>, + pub specifier_map: Arc<TscSpecifierMap>, inspector_server: Mutex<Option<Arc<InspectorServer>>>, pending_change: Mutex<Option<PendingChange>>, } @@ -882,20 +884,22 @@ impl TsServer { options: GetCompletionsAtPositionOptions, format_code_settings: FormatCodeSettings, scope: Option<ModuleSpecifier>, - ) -> Option<CompletionInfo> { + ) -> Result<Option<CompletionInfo>, AnyError> { let req = TscRequest::GetCompletionsAtPosition(Box::new(( self.specifier_map.denormalize(&specifier), position, options, format_code_settings, ))); - match self.request(snapshot, req, scope).await { - Ok(maybe_info) => maybe_info, - Err(err) => { - log::error!("Unable to get completion info from TypeScript: {:#}", err); - None - } - } + self + .request::<Option<CompletionInfo>>(snapshot, req, scope) + .await + .map(|mut info| { + if let Some(info) = &mut info { + info.normalize(&self.specifier_map); + } + info + }) } pub async fn get_completion_details( @@ -2183,6 +2187,50 @@ impl NavigateToItem { } #[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct InlayHintDisplayPart { + pub text: String, + pub span: Option<TextSpan>, + pub file: Option<String>, +} + +impl InlayHintDisplayPart { + pub fn to_lsp( + &self, + language_server: &language_server::Inner, + ) -> lsp::InlayHintLabelPart { + let location = self.file.as_ref().map(|f| { + let specifier = + resolve_url(f).unwrap_or_else(|_| INVALID_SPECIFIER.clone()); + let file_referrer = + language_server.documents.get_file_referrer(&specifier); + let uri = language_server + .url_map + .specifier_to_uri(&specifier, file_referrer.as_deref()) + .unwrap_or_else(|_| INVALID_URI.clone()); + let range = self + .span + .as_ref() + .and_then(|s| { + let asset_or_doc = + language_server.get_asset_or_document(&specifier).ok()?; + Some(s.to_range(asset_or_doc.line_index())) + }) + .unwrap_or_else(|| { + lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)) + }); + lsp::Location { uri, range } + }); + lsp::InlayHintLabelPart { + value: self.text.clone(), + tooltip: None, + location, + command: None, + } + } +} + +#[derive(Debug, Clone, Deserialize)] pub enum InlayHintKind { Type, Parameter, @@ -2203,6 +2251,7 @@ impl InlayHintKind { #[serde(rename_all = "camelCase")] pub struct InlayHint { pub text: String, + pub display_parts: Option<Vec<InlayHintDisplayPart>>, pub position: u32, pub kind: InlayHintKind, pub whitespace_before: Option<bool>, @@ -2210,10 +2259,23 @@ pub struct InlayHint { } impl InlayHint { - pub fn to_lsp(&self, line_index: Arc<LineIndex>) -> lsp::InlayHint { + pub fn to_lsp( + &self, + line_index: Arc<LineIndex>, + language_server: &language_server::Inner, + ) -> lsp::InlayHint { lsp::InlayHint { position: line_index.position_tsc(self.position.into()), - label: lsp::InlayHintLabel::String(self.text.clone()), + label: if let Some(display_parts) = &self.display_parts { + lsp::InlayHintLabel::LabelParts( + display_parts + .iter() + .map(|p| p.to_lsp(language_server)) + .collect(), + ) + } else { + lsp::InlayHintLabel::String(self.text.clone()) + }, kind: self.kind.to_lsp(), padding_left: self.whitespace_before, padding_right: self.whitespace_after, @@ -3355,9 +3417,18 @@ fn parse_code_actions( additional_text_edits.extend(change.text_changes.iter().map(|tc| { let mut text_edit = tc.as_text_edit(asset_or_doc.line_index()); if let Some(specifier_rewrite) = &data.specifier_rewrite { - text_edit.new_text = text_edit - .new_text - .replace(&specifier_rewrite.0, &specifier_rewrite.1); + text_edit.new_text = text_edit.new_text.replace( + &specifier_rewrite.old_specifier, + &specifier_rewrite.new_specifier, + ); + if let Some(deno_types_specifier) = + &specifier_rewrite.new_deno_types_specifier + { + text_edit.new_text = format!( + "// @deno-types=\"{}\"\n{}", + deno_types_specifier, &text_edit.new_text + ); + } } text_edit })); @@ -3516,17 +3587,23 @@ impl CompletionEntryDetails { let mut text_edit = original_item.text_edit.clone(); if let Some(specifier_rewrite) = &data.specifier_rewrite { if let Some(text_edit) = &mut text_edit { - match text_edit { - lsp::CompletionTextEdit::Edit(text_edit) => { - text_edit.new_text = text_edit - .new_text - .replace(&specifier_rewrite.0, &specifier_rewrite.1); - } + let new_text = match text_edit { + lsp::CompletionTextEdit::Edit(text_edit) => &mut text_edit.new_text, lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => { - insert_replace_edit.new_text = insert_replace_edit - .new_text - .replace(&specifier_rewrite.0, &specifier_rewrite.1); + &mut insert_replace_edit.new_text } + }; + *new_text = new_text.replace( + &specifier_rewrite.old_specifier, + &specifier_rewrite.new_specifier, + ); + if let Some(deno_types_specifier) = + &specifier_rewrite.new_deno_types_specifier + { + *new_text = format!( + "// @deno-types=\"{}\"\n{}", + deno_types_specifier, new_text + ); } } } @@ -3584,6 +3661,12 @@ pub struct CompletionInfo { } impl CompletionInfo { + fn normalize(&mut self, specifier_map: &TscSpecifierMap) { + for entry in &mut self.entries { + entry.normalize(specifier_map); + } + } + pub fn as_completion_response( &self, line_index: Arc<LineIndex>, @@ -3625,6 +3708,13 @@ impl CompletionInfo { } } +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct CompletionSpecifierRewrite { + old_specifier: String, + new_specifier: String, + new_deno_types_specifier: Option<String>, +} + #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionItemData { @@ -3637,7 +3727,7 @@ pub struct CompletionItemData { /// be rewritten by replacing the first string with the second. Intended for /// auto-import specifiers to be reverse-import-mapped. #[serde(skip_serializing_if = "Option::is_none")] - pub specifier_rewrite: Option<(String, String)>, + pub specifier_rewrite: Option<CompletionSpecifierRewrite>, #[serde(skip_serializing_if = "Option::is_none")] pub data: Option<Value>, pub use_code_snippet: bool, @@ -3645,11 +3735,17 @@ pub struct CompletionItemData { #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -struct CompletionEntryDataImport { +struct CompletionEntryDataAutoImport { module_specifier: String, file_name: String, } +#[derive(Debug)] +pub struct CompletionNormalizedAutoImportData { + raw: CompletionEntryDataAutoImport, + normalized: ModuleSpecifier, +} + #[derive(Debug, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionEntry { @@ -3682,9 +3778,28 @@ pub struct CompletionEntry { is_import_statement_completion: Option<bool>, #[serde(skip_serializing_if = "Option::is_none")] data: Option<Value>, + /// This is not from tsc, we add it for convenience during normalization. + /// Represents `self.data.file_name`, but normalized. + #[serde(skip)] + auto_import_data: Option<CompletionNormalizedAutoImportData>, } impl CompletionEntry { + fn normalize(&mut self, specifier_map: &TscSpecifierMap) { + let Some(data) = &self.data else { + return; + }; + let Ok(raw) = + serde_json::from_value::<CompletionEntryDataAutoImport>(data.clone()) + else { + return; + }; + if let Ok(normalized) = specifier_map.normalize(&raw.file_name) { + self.auto_import_data = + Some(CompletionNormalizedAutoImportData { raw, normalized }); + } + } + fn get_commit_characters( &self, info: &CompletionInfo, @@ -3833,25 +3948,44 @@ impl CompletionEntry { if let Some(source) = &self.source { let mut display_source = source.clone(); - if let Some(data) = &self.data { - if let Ok(import_data) = - serde_json::from_value::<CompletionEntryDataImport>(data.clone()) + if let Some(import_data) = &self.auto_import_data { + let import_mapper = + language_server.get_ts_response_import_mapper(specifier); + if let Some(mut new_specifier) = import_mapper + .check_specifier(&import_data.normalized, specifier) + .or_else(|| relative_specifier(specifier, &import_data.normalized)) { - if let Ok(import_specifier) = resolve_url(&import_data.file_name) { - if let Some(new_module_specifier) = language_server - .get_ts_response_import_mapper(specifier) - .check_specifier(&import_specifier, specifier) - .or_else(|| relative_specifier(specifier, &import_specifier)) - { - display_source.clone_from(&new_module_specifier); - if new_module_specifier != import_data.module_specifier { - specifier_rewrite = - Some((import_data.module_specifier, new_module_specifier)); - } - } else if source.starts_with(jsr_url().as_str()) { - return None; - } + if new_specifier.contains("/node_modules/") { + return None; + } + let mut new_deno_types_specifier = None; + if let Some(code_specifier) = language_server + .resolver + .deno_types_to_code_resolution( + &import_data.normalized, + Some(specifier), + ) + .and_then(|s| { + import_mapper + .check_specifier(&s, specifier) + .or_else(|| relative_specifier(specifier, &s)) + }) + { + new_deno_types_specifier = + Some(std::mem::replace(&mut new_specifier, code_specifier)); + } + display_source.clone_from(&new_specifier); + if new_specifier != import_data.raw.module_specifier + || new_deno_types_specifier.is_some() + { + specifier_rewrite = Some(CompletionSpecifierRewrite { + old_specifier: import_data.raw.module_specifier.clone(), + new_specifier, + new_deno_types_specifier, + }); } + } else if source.starts_with(jsr_url().as_str()) { + return None; } } // We want relative or bare (import-mapped or otherwise) specifiers to @@ -3939,7 +4073,7 @@ pub struct OutliningSpan { kind: OutliningSpanKind, } -const FOLD_END_PAIR_CHARACTERS: &[u8] = &[b'}', b']', b')', b'`']; +const FOLD_END_PAIR_CHARACTERS: &[u8] = b"}])`"; impl OutliningSpan { pub fn to_folding_range( @@ -4154,6 +4288,11 @@ impl TscSpecifierMap { return specifier.to_string(); } let mut specifier = original.to_string(); + if !specifier.contains("/node_modules/@types/node/") { + // The ts server doesn't give completions from files in + // `node_modules/.deno/`. We work around it like this. + specifier = specifier.replace("/node_modules/", "/$node_modules/"); + } let media_type = MediaType::from_specifier(original); // If the URL-inferred media type doesn't correspond to tsc's path-inferred // media type, force it to be the same by appending an extension. @@ -4271,7 +4410,7 @@ fn op_is_cancelled(state: &mut OpState) -> bool { fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool { let state = state.borrow::<State>(); let mark = state.performance.mark("tsc.op.op_is_node_file"); - let r = match ModuleSpecifier::parse(&path) { + let r = match state.specifier_map.normalize(path) { Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier), Err(_) => false, }; @@ -4308,13 +4447,16 @@ fn op_load<'s>( data: doc.text(), script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), version: state.script_version(&specifier), - is_cjs: matches!( - doc.media_type(), - MediaType::Cjs | MediaType::Cts | MediaType::Dcts - ), + is_cjs: doc + .document() + .map(|d| state.state_snapshot.is_cjs_resolver.get_doc_module_kind(d)) + .unwrap_or(NodeModuleKind::Esm) + == NodeModuleKind::Cjs, }) }; + lsp_warn!("op_load {} {}", &specifier, maybe_load_response.is_some()); + let serialized = serde_v8::to_v8(scope, maybe_load_response)?; state.performance.measure(mark); @@ -4540,7 +4682,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames { for doc in &docs { let specifier = doc.specifier(); let is_open = doc.is_open(); - if is_open || specifier.scheme() == "file" { + if is_open + || (specifier.scheme() == "file" + && !state.state_snapshot.resolver.in_node_modules(specifier)) + { let script_names = doc .scope() .and_then(|s| result.by_scope.get_mut(s)) @@ -4551,6 +4696,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames { let (types, _) = documents.resolve_dependency( types, specifier, + state + .state_snapshot + .is_cjs_resolver + .get_doc_module_kind(doc), doc.file_referrer(), )?; let types_doc = documents.get_or_load(&types, doc.file_referrer())?; @@ -4654,6 +4803,7 @@ fn run_tsc_thread( specifier_map, request_rx, )], + create_params: create_isolate_create_params(), startup_snapshot: Some(tsc::compiler_snapshot()), inspector: has_inspector_server, ..Default::default() @@ -4892,6 +5042,10 @@ pub struct UserPreferences { pub allow_rename_of_import_path: Option<bool>, #[serde(skip_serializing_if = "Option::is_none")] pub auto_import_file_exclude_patterns: Option<Vec<String>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub interactive_inlay_hints: Option<bool>, + #[serde(skip_serializing_if = "Option::is_none")] + pub prefer_type_only_auto_imports: Option<bool>, } impl UserPreferences { @@ -4909,6 +5063,7 @@ impl UserPreferences { include_completions_with_snippet_text: Some( config.snippet_support_capable(), ), + interactive_inlay_hints: Some(true), provide_refactor_not_applicable_reason: Some(true), quote_preference: Some(fmt_config.into()), use_label_details_in_completion_entries: Some(true), @@ -5013,6 +5168,9 @@ impl UserPreferences { } else { Some(language_settings.preferences.quote_style) }, + prefer_type_only_auto_imports: Some( + language_settings.preferences.prefer_type_only_auto_imports, + ), ..base_preferences } } @@ -5425,6 +5583,7 @@ mod tests { documents: Arc::new(documents), assets: Default::default(), config: Arc::new(config), + is_cjs_resolver: Default::default(), resolver, }); let performance = Arc::new(Performance::default()); @@ -5958,6 +6117,7 @@ mod tests { Some(temp_dir.url()), ) .await + .unwrap() .unwrap(); assert_eq!(info.entries.len(), 22); let details = ts_server @@ -6117,6 +6277,7 @@ mod tests { Some(temp_dir.url()), ) .await + .unwrap() .unwrap(); let entry = info .entries @@ -6154,7 +6315,7 @@ mod tests { let change = changes.text_changes.first().unwrap(); assert_eq!( change.new_text, - "import type { someLongVariable } from './b.ts'\n" + "import { someLongVariable } from './b.ts'\n" ); } diff --git a/cli/main.rs b/cli/main.rs index ddb6078af..7d3ef0e6a 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -15,7 +15,6 @@ mod js; mod jsr; mod lsp; mod module_loader; -mod napi; mod node; mod npm; mod ops; @@ -38,6 +37,7 @@ use crate::util::v8::init_v8_flags; use args::TaskFlags; use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; +use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; use deno_runtime::WorkerExecutionMode; pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS; @@ -47,12 +47,10 @@ use deno_core::error::JsError; use deno_core::futures::FutureExt; use deno_core::unsync::JoinHandle; use deno_npm::resolution::SnapshotFromLockfileError; -use deno_runtime::fmt_errors::format_js_error_with_suggestions; -use deno_runtime::fmt_errors::FixSuggestion; +use deno_runtime::fmt_errors::format_js_error; use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; use deno_terminal::colors; use factory::CliFactory; -use npm::ResolvePkgFolderFromDenoReqError; use standalone::MODULE_NOT_FOUND; use standalone::UNSUPPORTED_SCHEME; use std::env; @@ -62,6 +60,10 @@ use std::ops::Deref; use std::path::PathBuf; use std::sync::Arc; +#[cfg(feature = "dhat-heap")] +#[global_allocator] +static ALLOC: dhat::Alloc = dhat::Alloc; + /// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type. trait SubcommandOutput { fn output(self) -> Result<i32, AnyError>; @@ -133,7 +135,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> { tools::compile::compile(flags, compile_flags).await }), DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async { - tools::coverage::cover_files(flags, coverage_flags).await + tools::coverage::cover_files(flags, coverage_flags) }), DenoSubcommand::Fmt(fmt_flags) => { spawn_subcommand( @@ -166,10 +168,10 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> { if std::io::stderr().is_terminal() { log::warn!( "{} command is intended to be run by text editors and IDEs and shouldn't be run manually. - + Visit https://docs.deno.com/runtime/getting_started/setup_your_environment/ for instruction how to setup your favorite text editor. - + Press Ctrl+C to exit. ", colors::cyan("deno lsp")); } @@ -348,109 +350,17 @@ fn setup_panic_hook() { eprintln!("Args: {:?}", env::args().collect::<Vec<_>>()); eprintln!(); orig_hook(panic_info); - std::process::exit(1); + deno_runtime::exit(1); })); } -#[allow(clippy::print_stderr)] fn exit_with_message(message: &str, code: i32) -> ! { - eprintln!( + log::error!( "{}: {}", colors::red_bold("error"), message.trim_start_matches("error: ") ); - std::process::exit(code); -} - -fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec<FixSuggestion> { - if let Some(msg) = &e.message { - if msg.contains("module is not defined") - || msg.contains("exports is not defined") - { - return vec![ - FixSuggestion::info( - "Deno does not support CommonJS modules without `.cjs` extension.", - ), - FixSuggestion::hint( - "Rewrite this module to ESM or change the file extension to `.cjs`.", - ), - ]; - } else if msg.contains("openKv is not a function") { - return vec![ - FixSuggestion::info("Deno.openKv() is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-kv` flag to enable this API.", - ), - ]; - } else if msg.contains("cron is not a function") { - return vec![ - FixSuggestion::info("Deno.cron() is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-cron` flag to enable this API.", - ), - ]; - } else if msg.contains("WebSocketStream is not defined") { - return vec![ - FixSuggestion::info("new WebSocketStream() is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-net` flag to enable this API.", - ), - ]; - } else if msg.contains("Temporal is not defined") { - return vec![ - FixSuggestion::info("Temporal is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-temporal` flag to enable this API.", - ), - ]; - } else if msg.contains("BroadcastChannel is not defined") { - return vec![ - FixSuggestion::info("BroadcastChannel is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-broadcast-channel` flag to enable this API.", - ), - ]; - } else if msg.contains("window is not defined") { - return vec![ - FixSuggestion::info("window global is not available in Deno 2."), - FixSuggestion::hint("Replace `window` with `globalThis`."), - ]; - } else if msg.contains("UnsafeWindowSurface is not a constructor") { - return vec![ - FixSuggestion::info("Deno.UnsafeWindowSurface is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-webgpu` flag to enable this API.", - ), - ]; - // Try to capture errors like: - // ``` - // Uncaught Error: Cannot find module '../build/Release/canvas.node' - // Require stack: - // - /.../deno/npm/registry.npmjs.org/canvas/2.11.2/lib/bindings.js - // - /.../.cache/deno/npm/registry.npmjs.org/canvas/2.11.2/lib/canvas.js - // ``` - } else if msg.contains("Cannot find module") - && msg.contains("Require stack") - && msg.contains(".node'") - { - return vec![ - FixSuggestion::info_multiline( - &[ - "Trying to execute an npm package using Node-API addons,", - "these packages require local `node_modules` directory to be present." - ] - ), - FixSuggestion::hint_multiline( - &[ - "Add `\"nodeModulesDir\": \"auto\" option to `deno.json`, and then run", - "`deno install --allow-scripts=npm:<package> --entrypoint <script>` to setup `node_modules` directory." - ] - ) - ]; - } - } - - vec![] + deno_runtime::exit(code); } fn exit_for_error(error: AnyError) -> ! { @@ -458,8 +368,7 @@ fn exit_for_error(error: AnyError) -> ! { let mut error_code = 1; if let Some(e) = error.downcast_ref::<JsError>() { - let suggestions = get_suggestions_for_terminal_errors(e); - error_string = format_js_error_with_suggestions(e, suggestions); + error_string = format_js_error(e); } else if let Some(SnapshotFromLockfileError::IntegrityCheckFailed(e)) = error.downcast_ref::<SnapshotFromLockfileError>() { @@ -470,16 +379,18 @@ fn exit_for_error(error: AnyError) -> ! { exit_with_message(&error_string, error_code); } -#[allow(clippy::print_stderr)] pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { - eprintln!( + log::error!( "Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.", feature ); - std::process::exit(70); + deno_runtime::exit(70); } pub fn main() { + #[cfg(feature = "dhat-heap")] + let profiler = dhat::Profiler::new_heap(); + setup_panic_hook(); util::unix::raise_fd_limit(); @@ -500,8 +411,13 @@ pub fn main() { run_subcommand(Arc::new(flags)).await }; - match create_and_run_current_thread_with_maybe_metrics(future) { - Ok(exit_code) => std::process::exit(exit_code), + let result = create_and_run_current_thread_with_maybe_metrics(future); + + #[cfg(feature = "dhat-heap")] + drop(profiler); + + match result { + Ok(exit_code) => deno_runtime::exit(exit_code), Err(err) => exit_for_error(err), } } @@ -515,12 +431,21 @@ fn resolve_flags_and_init( if err.kind() == clap::error::ErrorKind::DisplayVersion => { // Ignore results to avoid BrokenPipe errors. + util::logger::init(None); let _ = err.print(); - std::process::exit(0); + deno_runtime::exit(0); + } + Err(err) => { + util::logger::init(None); + exit_for_error(AnyError::from(err)) } - Err(err) => exit_for_error(AnyError::from(err)), }; + if let Some(otel_config) = flags.otel_config() { + deno_runtime::ops::otel::init(otel_config)?; + } + util::logger::init(flags.log_level); + // TODO(bartlomieju): remove in Deno v2.5 and hard error then. if flags.unstable_config.legacy_flag_enabled { log::warn!( @@ -549,7 +474,6 @@ fn resolve_flags_and_init( deno_core::JsRuntime::init_platform( None, /* import assertions enabled */ false, ); - util::logger::init(flags.log_level); Ok(flags) } diff --git a/cli/mainrt.rs b/cli/mainrt.rs index 02d58fcee..2951aa711 100644 --- a/cli/mainrt.rs +++ b/cli/mainrt.rs @@ -40,23 +40,21 @@ use std::env::current_exe; use crate::args::Flags; -#[allow(clippy::print_stderr)] pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { - eprintln!( + log::error!( "Unstable API '{api_name}'. The `--unstable-{}` flag must be provided.", feature ); - std::process::exit(70); + deno_runtime::exit(70); } -#[allow(clippy::print_stderr)] fn exit_with_message(message: &str, code: i32) -> ! { - eprintln!( + log::error!( "{}: {}", colors::red_bold("error"), message.trim_start_matches("error: ") ); - std::process::exit(code); + deno_runtime::exit(code); } fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T { @@ -88,15 +86,20 @@ fn main() { let standalone = standalone::extract_standalone(Cow::Owned(args)); let future = async move { match standalone { - Ok(Some(future)) => { - let (metadata, eszip) = future.await?; - util::logger::init(metadata.log_level); - load_env_vars(&metadata.env_vars_from_env_file); - let exit_code = standalone::run(eszip, metadata).await?; - std::process::exit(exit_code); + Ok(Some(data)) => { + if let Some(otel_config) = data.metadata.otel_config.clone() { + deno_runtime::ops::otel::init(otel_config)?; + } + util::logger::init(data.metadata.log_level); + load_env_vars(&data.metadata.env_vars_from_env_file); + let exit_code = standalone::run(data).await?; + deno_runtime::exit(exit_code); } Ok(None) => Ok(()), - Err(err) => Err(err), + Err(err) => { + util::logger::init(None); + Err(err) + } } }; diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 293f41dda..60de808b2 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -2,6 +2,7 @@ use std::borrow::Cow; use std::cell::RefCell; +use std::path::Path; use std::path::PathBuf; use std::pin::Pin; use std::rc::Rc; @@ -23,19 +24,23 @@ use crate::graph_container::ModuleGraphUpdatePermit; use crate::graph_util::CreateGraphOptions; use crate::graph_util::ModuleGraphBuilder; use crate::node; +use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; -use crate::resolver::CliGraphResolver; -use crate::resolver::CliNodeResolver; +use crate::resolver::CjsTracker; +use crate::resolver::CliNpmReqResolver; +use crate::resolver::CliResolver; use crate::resolver::ModuleCodeStringSource; +use crate::resolver::NotSupportedKindInNpmError; use crate::resolver::NpmModuleLoader; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::source_map_from_code; -use crate::worker::ModuleLoaderAndSourceMapGetter; +use crate::worker::CreateModuleLoaderResult; use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -55,7 +60,6 @@ use deno_core::RequestedModuleType; use deno_core::ResolutionKind; use deno_core::SourceCodeCacheInfo; use deno_graph::source::ResolutionMode; -use deno_graph::source::Resolver; use deno_graph::GraphKind; use deno_graph::JsModule; use deno_graph::JsonModule; @@ -63,9 +67,14 @@ use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::Resolution; use deno_runtime::code_cache; +use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::create_host_defined_options; +use deno_runtime::deno_node::NodeRequireLoader; +use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; +use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionMode; pub struct ModuleLoadPreparer { @@ -199,15 +208,20 @@ struct SharedCliModuleLoaderState { initial_cwd: PathBuf, is_inspecting: bool, is_repl: bool, + cjs_tracker: Arc<CjsTracker>, code_cache: Option<Arc<CodeCache>>, emitter: Arc<Emitter>, + fs: Arc<dyn FileSystem>, + in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, main_module_graph_container: Arc<MainModuleGraphContainer>, module_load_preparer: Arc<ModuleLoadPreparer>, - node_resolver: Arc<CliNodeResolver>, + node_code_translator: Arc<CliNodeCodeTranslator>, + node_resolver: Arc<NodeResolver>, + npm_req_resolver: Arc<CliNpmReqResolver>, npm_resolver: Arc<dyn CliNpmResolver>, npm_module_loader: NpmModuleLoader, parsed_source_cache: Arc<ParsedSourceCache>, - resolver: Arc<CliGraphResolver>, + resolver: Arc<CliResolver>, } pub struct CliModuleLoaderFactory { @@ -218,15 +232,20 @@ impl CliModuleLoaderFactory { #[allow(clippy::too_many_arguments)] pub fn new( options: &CliOptions, + cjs_tracker: Arc<CjsTracker>, code_cache: Option<Arc<CodeCache>>, emitter: Arc<Emitter>, + fs: Arc<dyn FileSystem>, + in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, main_module_graph_container: Arc<MainModuleGraphContainer>, module_load_preparer: Arc<ModuleLoadPreparer>, - node_resolver: Arc<CliNodeResolver>, + node_code_translator: Arc<CliNodeCodeTranslator>, + node_resolver: Arc<NodeResolver>, + npm_req_resolver: Arc<CliNpmReqResolver>, npm_resolver: Arc<dyn CliNpmResolver>, npm_module_loader: NpmModuleLoader, parsed_source_cache: Arc<ParsedSourceCache>, - resolver: Arc<CliGraphResolver>, + resolver: Arc<CliResolver>, ) -> Self { Self { shared: Arc::new(SharedCliModuleLoaderState { @@ -239,11 +258,16 @@ impl CliModuleLoaderFactory { options.sub_command(), DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_) ), + cjs_tracker, code_cache, emitter, + fs, + in_npm_pkg_checker, main_module_graph_container, module_load_preparer, + node_code_translator, node_resolver, + npm_req_resolver, npm_resolver, npm_module_loader, parsed_source_cache, @@ -259,19 +283,30 @@ impl CliModuleLoaderFactory { is_worker: bool, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { - lib, - is_worker, - parent_permissions, - permissions, - graph_container, + ) -> CreateModuleLoaderResult { + let module_loader = + Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { + lib, + is_worker, + parent_permissions, + permissions, + graph_container: graph_container.clone(), + node_code_translator: self.shared.node_code_translator.clone(), + emitter: self.shared.emitter.clone(), + parsed_source_cache: self.shared.parsed_source_cache.clone(), + shared: self.shared.clone(), + }))); + let node_require_loader = Rc::new(CliNodeRequireLoader { + cjs_tracker: self.shared.cjs_tracker.clone(), emitter: self.shared.emitter.clone(), - parsed_source_cache: self.shared.parsed_source_cache.clone(), - shared: self.shared.clone(), - }))); - ModuleLoaderAndSourceMapGetter { - module_loader: loader, + fs: self.shared.fs.clone(), + graph_container, + in_npm_pkg_checker: self.shared.in_npm_pkg_checker.clone(), + npm_resolver: self.shared.npm_resolver.clone(), + }); + CreateModuleLoaderResult { + module_loader, + node_require_loader, } } } @@ -280,7 +315,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { fn create_for_main( &self, root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { + ) -> CreateModuleLoaderResult { self.create_with_lib( (*self.shared.main_module_graph_container).clone(), self.shared.lib_window, @@ -294,7 +329,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { &self, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { + ) -> CreateModuleLoaderResult { self.create_with_lib( // create a fresh module graph for the worker WorkerModuleGraphContainer::new(Arc::new(ModuleGraph::new( @@ -318,6 +353,7 @@ struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> { permissions: PermissionsContainer, shared: Arc<SharedCliModuleLoaderState>, emitter: Arc<Emitter>, + node_code_translator: Arc<CliNodeCodeTranslator>, parsed_source_cache: Arc<ParsedSourceCache>, graph_container: TGraphContainer, } @@ -331,16 +367,7 @@ impl<TGraphContainer: ModuleGraphContainer> maybe_referrer: Option<&ModuleSpecifier>, requested_module_type: RequestedModuleType, ) -> Result<ModuleSource, AnyError> { - let code_source = if let Some(result) = self - .shared - .npm_module_loader - .load_if_in_npm_package(specifier, maybe_referrer) - .await - { - result? - } else { - self.load_prepared_module(specifier, maybe_referrer).await? - }; + let code_source = self.load_code_source(specifier, maybe_referrer).await?; let code = if self.shared.is_inspecting { // we need the code with the source map in order for // it to work with --inspect or --inspect-brk @@ -394,6 +421,29 @@ impl<TGraphContainer: ModuleGraphContainer> )) } + async fn load_code_source( + &self, + specifier: &ModuleSpecifier, + maybe_referrer: Option<&ModuleSpecifier>, + ) -> Result<ModuleCodeStringSource, AnyError> { + if let Some(code_source) = self.load_prepared_module(specifier).await? { + return Ok(code_source); + } + if self.shared.in_npm_pkg_checker.in_npm_package(specifier) { + return self + .shared + .npm_module_loader + .load(specifier, maybe_referrer) + .await; + } + + let mut msg = format!("Loading unprepared module: {specifier}"); + if let Some(referrer) = maybe_referrer { + msg = format!("{}, imported from: {}", msg, referrer.as_str()); + } + Err(anyhow!(msg)) + } + fn resolve_referrer( &self, referrer: &str, @@ -401,7 +451,7 @@ impl<TGraphContainer: ModuleGraphContainer> let referrer = if referrer.is_empty() && self.shared.is_repl { // FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL // and `Deno.core.evalContext` API. Ideally we should always have a referrer filled - "./$deno$repl.ts" + "./$deno$repl.mts" } else { referrer }; @@ -424,16 +474,6 @@ impl<TGraphContainer: ModuleGraphContainer> raw_specifier: &str, referrer: &ModuleSpecifier, ) -> Result<ModuleSpecifier, AnyError> { - if self.shared.node_resolver.in_npm_package(referrer) { - return Ok( - self - .shared - .node_resolver - .resolve(raw_specifier, referrer, NodeResolutionMode::Execution)? - .into_url(), - ); - } - let graph = self.graph_container.graph(); let resolution = match graph.get(referrer) { Some(Module::Js(module)) => module @@ -459,6 +499,7 @@ impl<TGraphContainer: ModuleGraphContainer> start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + self.shared.cjs_tracker.get_referrer_kind(referrer), ResolutionMode::Execution, )?), }; @@ -468,13 +509,14 @@ impl<TGraphContainer: ModuleGraphContainer> { return self .shared - .node_resolver + .npm_req_resolver .resolve_req_reference( &reference, referrer, + self.shared.cjs_tracker.get_referrer_kind(referrer), NodeResolutionMode::Execution, ) - .map(|res| res.into_url()); + .map_err(AnyError::from); } } @@ -489,22 +531,25 @@ impl<TGraphContainer: ModuleGraphContainer> self .shared .node_resolver - .resolve_package_sub_path_from_deno_module( + .resolve_package_subpath_from_deno_module( &package_folder, module.nv_reference.sub_path(), Some(referrer), + self.shared.cjs_tracker.get_referrer_kind(referrer), NodeResolutionMode::Execution, ) .with_context(|| { format!("Could not resolve '{}'.", module.nv_reference) })? - .into_url() } Some(Module::Node(module)) => module.specifier.clone(), Some(Module::Js(module)) => module.specifier.clone(), Some(Module::Json(module)) => module.specifier.clone(), Some(Module::External(module)) => { - node::resolve_specifier_into_node_modules(&module.specifier) + node::resolve_specifier_into_node_modules( + &module.specifier, + self.shared.fs.as_ref(), + ) } None => specifier.into_owned(), }; @@ -514,71 +559,82 @@ impl<TGraphContainer: ModuleGraphContainer> async fn load_prepared_module( &self, specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result<ModuleCodeStringSource, AnyError> { + ) -> Result<Option<ModuleCodeStringSource>, AnyError> { // Note: keep this in sync with the sync version below let graph = self.graph_container.graph(); - match self.load_prepared_module_or_defer_emit( - &graph, - specifier, - maybe_referrer, - ) { - Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source), - Ok(CodeOrDeferredEmit::DeferredEmit { + match self.load_prepared_module_or_defer_emit(&graph, specifier)? { + Some(CodeOrDeferredEmit::Code(code_source)) => Ok(Some(code_source)), + Some(CodeOrDeferredEmit::DeferredEmit { specifier, media_type, source, }) => { let transpile_result = self .emitter - .emit_parsed_source(specifier, media_type, source) + .emit_parsed_source(specifier, media_type, ModuleKind::Esm, source) .await?; // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); - Ok(ModuleCodeStringSource { - code: ModuleSourceCode::Bytes(transpile_result), + Ok(Some(ModuleCodeStringSource { + // note: it's faster to provide a string if we know it's a string + code: ModuleSourceCode::String(transpile_result.into()), found_url: specifier.clone(), media_type, - }) + })) } - Err(err) => Err(err), + Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type, + source, + }) => self + .load_maybe_cjs(specifier, media_type, source) + .await + .map(Some), + None => Ok(None), } } - fn load_prepared_module_sync( + fn load_prepared_module_for_source_map_sync( &self, specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result<ModuleCodeStringSource, AnyError> { + ) -> Result<Option<ModuleCodeStringSource>, AnyError> { // Note: keep this in sync with the async version above let graph = self.graph_container.graph(); - match self.load_prepared_module_or_defer_emit( - &graph, - specifier, - maybe_referrer, - ) { - Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source), - Ok(CodeOrDeferredEmit::DeferredEmit { + match self.load_prepared_module_or_defer_emit(&graph, specifier)? { + Some(CodeOrDeferredEmit::Code(code_source)) => Ok(Some(code_source)), + Some(CodeOrDeferredEmit::DeferredEmit { specifier, media_type, source, }) => { - let transpile_result = self - .emitter - .emit_parsed_source_sync(specifier, media_type, source)?; + let transpile_result = self.emitter.emit_parsed_source_sync( + specifier, + media_type, + ModuleKind::Esm, + source, + )?; // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); - Ok(ModuleCodeStringSource { - code: ModuleSourceCode::Bytes(transpile_result), + Ok(Some(ModuleCodeStringSource { + // note: it's faster to provide a string if we know it's a string + code: ModuleSourceCode::String(transpile_result.into()), found_url: specifier.clone(), media_type, - }) + })) + } + Some(CodeOrDeferredEmit::Cjs { .. }) => { + self.parsed_source_cache.free(specifier); + + // todo(dsherret): to make this work, we should probably just + // rely on the CJS export cache. At the moment this is hard because + // cjs export analysis is only async + Ok(None) } - Err(err) => Err(err), + None => Ok(None), } } @@ -586,8 +642,7 @@ impl<TGraphContainer: ModuleGraphContainer> &self, graph: &'graph ModuleGraph, specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result<CodeOrDeferredEmit<'graph>, AnyError> { + ) -> Result<Option<CodeOrDeferredEmit<'graph>>, AnyError> { if specifier.scheme() == "node" { // Node built-in modules should be handled internally. unreachable!("Deno bug. {} was misconfigured internally.", specifier); @@ -599,38 +654,55 @@ impl<TGraphContainer: ModuleGraphContainer> media_type, specifier, .. - })) => Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource { + })) => Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource { code: ModuleSourceCode::String(source.clone().into()), found_url: specifier.clone(), media_type: *media_type, - })), + }))), Some(deno_graph::Module::Js(JsModule { source, media_type, specifier, + is_script, .. })) => { + if self.shared.cjs_tracker.is_cjs_with_known_is_script( + specifier, + *media_type, + *is_script, + )? { + return Ok(Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type: *media_type, + source, + })); + } let code: ModuleCodeString = match media_type { MediaType::JavaScript | MediaType::Unknown - | MediaType::Cjs | MediaType::Mjs | MediaType::Json => source.clone().into(), MediaType::Dts | MediaType::Dcts | MediaType::Dmts => { Default::default() } + MediaType::Cjs | MediaType::Cts => { + return Ok(Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type: *media_type, + source, + })); + } MediaType::TypeScript | MediaType::Mts - | MediaType::Cts | MediaType::Jsx | MediaType::Tsx => { - return Ok(CodeOrDeferredEmit::DeferredEmit { + return Ok(Some(CodeOrDeferredEmit::DeferredEmit { specifier, media_type: *media_type, source, - }); + })); } - MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { + MediaType::Css | MediaType::Wasm | MediaType::SourceMap => { panic!("Unexpected media type {media_type} for {specifier}") } }; @@ -638,26 +710,62 @@ impl<TGraphContainer: ModuleGraphContainer> // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); - Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource { + Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource { code: ModuleSourceCode::String(code), found_url: specifier.clone(), media_type: *media_type, - })) + }))) } Some( deno_graph::Module::External(_) | deno_graph::Module::Node(_) | deno_graph::Module::Npm(_), ) - | None => { - let mut msg = format!("Loading unprepared module: {specifier}"); - if let Some(referrer) = maybe_referrer { - msg = format!("{}, imported from: {}", msg, referrer.as_str()); - } - Err(anyhow!(msg)) - } + | None => Ok(None), } } + + async fn load_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + original_source: &Arc<str>, + ) -> Result<ModuleCodeStringSource, AnyError> { + let js_source = if media_type.is_emittable() { + Cow::Owned( + self + .emitter + .emit_parsed_source( + specifier, + media_type, + ModuleKind::Cjs, + original_source, + ) + .await?, + ) + } else { + Cow::Borrowed(original_source.as_ref()) + }; + let text = self + .node_code_translator + .translate_cjs_to_esm(specifier, Some(js_source)) + .await?; + // at this point, we no longer need the parsed source in memory, so free it + self.parsed_source_cache.free(specifier); + Ok(ModuleCodeStringSource { + code: match text { + // perf: if the text is borrowed, that means it didn't make any changes + // to the original source, so we can just provide that instead of cloning + // the borrowed text + Cow::Borrowed(_) => { + ModuleSourceCode::String(original_source.clone().into()) + } + Cow::Owned(text) => ModuleSourceCode::String(text.into()), + }, + found_url: specifier.clone(), + media_type, + }) + } } enum CodeOrDeferredEmit<'a> { @@ -667,6 +775,11 @@ enum CodeOrDeferredEmit<'a> { media_type: MediaType, source: &'a Arc<str>, }, + Cjs { + specifier: &'a ModuleSpecifier, + media_type: MediaType, + source: &'a Arc<str>, + }, } // todo(dsherret): this double Rc boxing is not ideal @@ -708,7 +821,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader name: &str, ) -> Option<deno_core::v8::Local<'s, deno_core::v8::Data>> { let name = deno_core::ModuleSpecifier::parse(name).ok()?; - if self.0.shared.node_resolver.in_npm_package(&name) { + if self.0.shared.in_npm_pkg_checker.in_npm_package(&name) { Some(create_host_defined_options(scope)) } else { None @@ -745,7 +858,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader _maybe_referrer: Option<String>, is_dynamic: bool, ) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> { - if self.0.shared.node_resolver.in_npm_package(specifier) { + if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) { return Box::pin(deno_core::futures::future::ready(Ok(()))); } @@ -828,7 +941,10 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader "wasm" | "file" | "http" | "https" | "data" | "blob" => (), _ => return None, } - let source = self.0.load_prepared_module_sync(&specifier, None).ok()?; + let source = self + .0 + .load_prepared_module_for_source_map_sync(&specifier) + .ok()??; source_map_from_code(source.code.as_bytes()) } @@ -907,3 +1023,68 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit { drop(self.permit); // explicit drop for clarity } } + +#[derive(Debug)] +struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> { + cjs_tracker: Arc<CjsTracker>, + emitter: Arc<Emitter>, + fs: Arc<dyn FileSystem>, + graph_container: TGraphContainer, + in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>, + npm_resolver: Arc<dyn CliNpmResolver>, +} + +impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader + for CliNodeRequireLoader<TGraphContainer> +{ + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn deno_runtime::deno_node::NodePermissions, + path: &'a Path, + ) -> Result<std::borrow::Cow<'a, Path>, AnyError> { + if let Ok(url) = deno_path_util::url_from_file_path(path) { + // allow reading if it's in the module graph + if self.graph_container.graph().get(&url).is_some() { + return Ok(std::borrow::Cow::Borrowed(path)); + } + } + self.npm_resolver.ensure_read_permission(permissions, path) + } + + fn load_text_file_lossy(&self, path: &Path) -> Result<String, AnyError> { + // todo(dsherret): use the preloaded module from the graph if available? + let media_type = MediaType::from_path(path); + let text = self.fs.read_text_file_lossy_sync(path, None)?; + if media_type.is_emittable() { + let specifier = deno_path_util::url_from_file_path(path)?; + if self.in_npm_pkg_checker.in_npm_package(&specifier) { + return Err( + NotSupportedKindInNpmError { + media_type, + specifier, + } + .into(), + ); + } + self.emitter.emit_parsed_source_sync( + &specifier, + media_type, + // this is probably not super accurate due to require esm, but probably ok. + // If we find this causes a lot of churn in the emit cache then we should + // investigate how we can make this better + ModuleKind::Cjs, + &text.into(), + ) + } else { + Ok(text) + } + } + + fn is_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + ) -> Result<bool, ClosestPkgJsonError> { + let media_type = MediaType::from_specifier(specifier); + self.cjs_tracker.is_maybe_cjs(specifier, media_type) + } +} diff --git a/cli/napi/README.md b/cli/napi/README.md deleted file mode 100644 index 7b359ac6e..000000000 --- a/cli/napi/README.md +++ /dev/null @@ -1,114 +0,0 @@ -# napi - -This directory contains source for Deno's Node-API implementation. It depends on -`napi_sym` and `deno_napi`. - -Files are generally organized the same as in Node.js's implementation to ease in -ensuring compatibility. - -## Adding a new function - -Add the symbol name to -[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json). - -```diff -{ - "symbols": [ - ... - "napi_get_undefined", -- "napi_get_null" -+ "napi_get_null", -+ "napi_get_boolean" - ] -} -``` - -Determine where to place the implementation. `napi_get_boolean` is related to JS -values so we will place it in `js_native_api.rs`. If something is not clear, -just create a new file module. - -See [`napi_sym`](../napi_sym/) for writing the implementation: - -```rust -#[napi_sym::napi_sym] -pub fn napi_get_boolean( - env: *mut Env, - value: bool, - result: *mut napi_value, -) -> Result { - // ... - Ok(()) -} -``` - -Update the generated symbol lists using the script: - -``` -deno run --allow-write tools/napi/generate_symbols_lists.js -``` - -Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js -test suite for Node-API. - -```js -// tests/napi/boolean_test.js -import { assertEquals, loadTestLibrary } from "./common.js"; -const lib = loadTestLibrary(); -Deno.test("napi get boolean", function () { - assertEquals(lib.test_get_boolean(true), true); - assertEquals(lib.test_get_boolean(false), false); -}); -``` - -```rust -// tests/napi/src/boolean.rs - -use napi_sys::Status::napi_ok; -use napi_sys::ValueType::napi_boolean; -use napi_sys::*; - -extern "C" fn test_boolean( - env: napi_env, - info: napi_callback_info, -) -> napi_value { - let (args, argc, _) = crate::get_callback_info!(env, info, 1); - assert_eq!(argc, 1); - - let mut ty = -1; - assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok); - assert_eq!(ty, napi_boolean); - - // Use napi_get_boolean here... - - value -} - -pub fn init(env: napi_env, exports: napi_value) { - let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)]; - - unsafe { - napi_define_properties(env, exports, properties.len(), properties.as_ptr()) - }; -} -``` - -```diff -// tests/napi/src/lib.rs - -+ mod boolean; - -... - -#[no_mangle] -unsafe extern "C" fn napi_register_module_v1( - env: napi_env, - exports: napi_value, -) -> napi_value { - ... -+ boolean::init(env, exports); - - exports -} -``` - -Run the test using `cargo test -p tests/napi`. diff --git a/cli/napi/generated_symbol_exports_list_linux.def b/cli/napi/generated_symbol_exports_list_linux.def deleted file mode 100644 index 614880ebf..000000000 --- a/cli/napi/generated_symbol_exports_list_linux.def +++ /dev/null @@ -1 +0,0 @@ -{ "node_api_create_syntax_error"; "napi_make_callback"; "napi_has_named_property"; "napi_async_destroy"; "napi_coerce_to_object"; "napi_get_arraybuffer_info"; "napi_detach_arraybuffer"; "napi_get_undefined"; "napi_reference_unref"; "napi_fatal_error"; "napi_open_callback_scope"; "napi_close_callback_scope"; "napi_get_value_uint32"; "napi_create_function"; "napi_create_arraybuffer"; "napi_get_value_int64"; "napi_get_all_property_names"; "napi_resolve_deferred"; "napi_is_detached_arraybuffer"; "napi_create_string_utf8"; "napi_create_threadsafe_function"; "node_api_throw_syntax_error"; "napi_create_bigint_int64"; "napi_wrap"; "napi_set_property"; "napi_get_value_bigint_int64"; "napi_open_handle_scope"; "napi_create_error"; "napi_create_buffer"; "napi_cancel_async_work"; "napi_is_exception_pending"; "napi_acquire_threadsafe_function"; "napi_create_external"; "napi_get_threadsafe_function_context"; "napi_get_null"; "napi_create_string_utf16"; "node_api_create_external_string_utf16"; "napi_get_value_bigint_uint64"; "napi_module_register"; "napi_is_typedarray"; "napi_create_external_buffer"; "napi_get_new_target"; "napi_get_instance_data"; "napi_close_handle_scope"; "napi_get_value_string_utf16"; "napi_get_property_names"; "napi_is_arraybuffer"; "napi_get_cb_info"; "napi_define_properties"; "napi_add_env_cleanup_hook"; "node_api_get_module_file_name"; "napi_get_node_version"; "napi_create_int64"; "napi_create_double"; "napi_get_and_clear_last_exception"; "napi_create_reference"; "napi_get_typedarray_info"; "napi_call_threadsafe_function"; "napi_get_last_error_info"; "napi_create_array_with_length"; "napi_coerce_to_number"; "napi_get_global"; "napi_is_error"; "napi_set_instance_data"; "napi_create_typedarray"; "napi_throw_type_error"; "napi_has_property"; "napi_get_value_external"; "napi_create_range_error"; "napi_typeof"; "napi_ref_threadsafe_function"; "napi_create_bigint_uint64"; "napi_get_prototype"; "napi_adjust_external_memory"; "napi_release_threadsafe_function"; "napi_delete_async_work"; "napi_create_string_latin1"; "node_api_create_external_string_latin1"; "napi_is_array"; "napi_unref_threadsafe_function"; "napi_throw_error"; "napi_has_own_property"; "napi_get_reference_value"; "napi_remove_env_cleanup_hook"; "napi_get_value_string_utf8"; "napi_is_promise"; "napi_get_boolean"; "napi_run_script"; "napi_get_element"; "napi_get_named_property"; "napi_get_buffer_info"; "napi_get_value_bool"; "napi_reference_ref"; "napi_create_object"; "napi_create_promise"; "napi_create_int32"; "napi_escape_handle"; "napi_open_escapable_handle_scope"; "napi_throw"; "napi_get_value_double"; "napi_set_named_property"; "napi_call_function"; "napi_create_date"; "napi_object_freeze"; "napi_get_uv_event_loop"; "napi_get_value_string_latin1"; "napi_reject_deferred"; "napi_add_finalizer"; "napi_create_array"; "napi_delete_reference"; "napi_get_date_value"; "napi_create_dataview"; "napi_get_version"; "napi_define_class"; "napi_is_date"; "napi_remove_wrap"; "napi_delete_property"; "napi_instanceof"; "napi_create_buffer_copy"; "napi_delete_element"; "napi_object_seal"; "napi_queue_async_work"; "napi_get_value_bigint_words"; "napi_is_buffer"; "napi_get_array_length"; "napi_get_property"; "napi_new_instance"; "napi_set_element"; "napi_create_bigint_words"; "napi_strict_equals"; "napi_is_dataview"; "napi_close_escapable_handle_scope"; "napi_get_dataview_info"; "napi_get_value_int32"; "napi_unwrap"; "napi_throw_range_error"; "napi_coerce_to_bool"; "napi_create_uint32"; "napi_has_element"; "napi_create_external_arraybuffer"; "napi_create_symbol"; "node_api_symbol_for"; "napi_coerce_to_string"; "napi_create_type_error"; "napi_fatal_exception"; "napi_create_async_work"; "napi_async_init"; "node_api_create_property_key_utf16"; "napi_type_tag_object"; "napi_check_object_type_tag"; "node_api_post_finalizer"; "napi_add_async_cleanup_hook"; "napi_remove_async_cleanup_hook"; "uv_mutex_init"; "uv_mutex_lock"; "uv_mutex_unlock"; "uv_mutex_destroy"; "uv_async_init"; "uv_async_send"; "uv_close"; };
\ No newline at end of file diff --git a/cli/napi/generated_symbol_exports_list_macos.def b/cli/napi/generated_symbol_exports_list_macos.def deleted file mode 100644 index 36b2f37fa..000000000 --- a/cli/napi/generated_symbol_exports_list_macos.def +++ /dev/null @@ -1,160 +0,0 @@ -_node_api_create_syntax_error -_napi_make_callback -_napi_has_named_property -_napi_async_destroy -_napi_coerce_to_object -_napi_get_arraybuffer_info -_napi_detach_arraybuffer -_napi_get_undefined -_napi_reference_unref -_napi_fatal_error -_napi_open_callback_scope -_napi_close_callback_scope -_napi_get_value_uint32 -_napi_create_function -_napi_create_arraybuffer -_napi_get_value_int64 -_napi_get_all_property_names -_napi_resolve_deferred -_napi_is_detached_arraybuffer -_napi_create_string_utf8 -_napi_create_threadsafe_function -_node_api_throw_syntax_error -_napi_create_bigint_int64 -_napi_wrap -_napi_set_property -_napi_get_value_bigint_int64 -_napi_open_handle_scope -_napi_create_error -_napi_create_buffer -_napi_cancel_async_work -_napi_is_exception_pending -_napi_acquire_threadsafe_function -_napi_create_external -_napi_get_threadsafe_function_context -_napi_get_null -_napi_create_string_utf16 -_node_api_create_external_string_utf16 -_napi_get_value_bigint_uint64 -_napi_module_register -_napi_is_typedarray -_napi_create_external_buffer -_napi_get_new_target -_napi_get_instance_data -_napi_close_handle_scope -_napi_get_value_string_utf16 -_napi_get_property_names -_napi_is_arraybuffer -_napi_get_cb_info -_napi_define_properties -_napi_add_env_cleanup_hook -_node_api_get_module_file_name -_napi_get_node_version -_napi_create_int64 -_napi_create_double -_napi_get_and_clear_last_exception -_napi_create_reference -_napi_get_typedarray_info -_napi_call_threadsafe_function -_napi_get_last_error_info -_napi_create_array_with_length -_napi_coerce_to_number -_napi_get_global -_napi_is_error -_napi_set_instance_data -_napi_create_typedarray -_napi_throw_type_error -_napi_has_property -_napi_get_value_external -_napi_create_range_error -_napi_typeof -_napi_ref_threadsafe_function -_napi_create_bigint_uint64 -_napi_get_prototype -_napi_adjust_external_memory -_napi_release_threadsafe_function -_napi_delete_async_work -_napi_create_string_latin1 -_node_api_create_external_string_latin1 -_napi_is_array -_napi_unref_threadsafe_function -_napi_throw_error -_napi_has_own_property -_napi_get_reference_value -_napi_remove_env_cleanup_hook -_napi_get_value_string_utf8 -_napi_is_promise -_napi_get_boolean -_napi_run_script -_napi_get_element -_napi_get_named_property -_napi_get_buffer_info -_napi_get_value_bool -_napi_reference_ref -_napi_create_object -_napi_create_promise -_napi_create_int32 -_napi_escape_handle -_napi_open_escapable_handle_scope -_napi_throw -_napi_get_value_double -_napi_set_named_property -_napi_call_function -_napi_create_date -_napi_object_freeze -_napi_get_uv_event_loop -_napi_get_value_string_latin1 -_napi_reject_deferred -_napi_add_finalizer -_napi_create_array -_napi_delete_reference -_napi_get_date_value -_napi_create_dataview -_napi_get_version -_napi_define_class -_napi_is_date -_napi_remove_wrap -_napi_delete_property -_napi_instanceof -_napi_create_buffer_copy -_napi_delete_element -_napi_object_seal -_napi_queue_async_work -_napi_get_value_bigint_words -_napi_is_buffer -_napi_get_array_length -_napi_get_property -_napi_new_instance -_napi_set_element -_napi_create_bigint_words -_napi_strict_equals -_napi_is_dataview -_napi_close_escapable_handle_scope -_napi_get_dataview_info -_napi_get_value_int32 -_napi_unwrap -_napi_throw_range_error -_napi_coerce_to_bool -_napi_create_uint32 -_napi_has_element -_napi_create_external_arraybuffer -_napi_create_symbol -_node_api_symbol_for -_napi_coerce_to_string -_napi_create_type_error -_napi_fatal_exception -_napi_create_async_work -_napi_async_init -_node_api_create_property_key_utf16 -_napi_type_tag_object -_napi_check_object_type_tag -_node_api_post_finalizer -_napi_add_async_cleanup_hook -_napi_remove_async_cleanup_hook -_uv_mutex_init -_uv_mutex_lock -_uv_mutex_unlock -_uv_mutex_destroy -_uv_async_init -_uv_async_send -_uv_close
\ No newline at end of file diff --git a/cli/napi/generated_symbol_exports_list_windows.def b/cli/napi/generated_symbol_exports_list_windows.def deleted file mode 100644 index b7355112e..000000000 --- a/cli/napi/generated_symbol_exports_list_windows.def +++ /dev/null @@ -1,162 +0,0 @@ -LIBRARY -EXPORTS - node_api_create_syntax_error - napi_make_callback - napi_has_named_property - napi_async_destroy - napi_coerce_to_object - napi_get_arraybuffer_info - napi_detach_arraybuffer - napi_get_undefined - napi_reference_unref - napi_fatal_error - napi_open_callback_scope - napi_close_callback_scope - napi_get_value_uint32 - napi_create_function - napi_create_arraybuffer - napi_get_value_int64 - napi_get_all_property_names - napi_resolve_deferred - napi_is_detached_arraybuffer - napi_create_string_utf8 - napi_create_threadsafe_function - node_api_throw_syntax_error - napi_create_bigint_int64 - napi_wrap - napi_set_property - napi_get_value_bigint_int64 - napi_open_handle_scope - napi_create_error - napi_create_buffer - napi_cancel_async_work - napi_is_exception_pending - napi_acquire_threadsafe_function - napi_create_external - napi_get_threadsafe_function_context - napi_get_null - napi_create_string_utf16 - node_api_create_external_string_utf16 - napi_get_value_bigint_uint64 - napi_module_register - napi_is_typedarray - napi_create_external_buffer - napi_get_new_target - napi_get_instance_data - napi_close_handle_scope - napi_get_value_string_utf16 - napi_get_property_names - napi_is_arraybuffer - napi_get_cb_info - napi_define_properties - napi_add_env_cleanup_hook - node_api_get_module_file_name - napi_get_node_version - napi_create_int64 - napi_create_double - napi_get_and_clear_last_exception - napi_create_reference - napi_get_typedarray_info - napi_call_threadsafe_function - napi_get_last_error_info - napi_create_array_with_length - napi_coerce_to_number - napi_get_global - napi_is_error - napi_set_instance_data - napi_create_typedarray - napi_throw_type_error - napi_has_property - napi_get_value_external - napi_create_range_error - napi_typeof - napi_ref_threadsafe_function - napi_create_bigint_uint64 - napi_get_prototype - napi_adjust_external_memory - napi_release_threadsafe_function - napi_delete_async_work - napi_create_string_latin1 - node_api_create_external_string_latin1 - napi_is_array - napi_unref_threadsafe_function - napi_throw_error - napi_has_own_property - napi_get_reference_value - napi_remove_env_cleanup_hook - napi_get_value_string_utf8 - napi_is_promise - napi_get_boolean - napi_run_script - napi_get_element - napi_get_named_property - napi_get_buffer_info - napi_get_value_bool - napi_reference_ref - napi_create_object - napi_create_promise - napi_create_int32 - napi_escape_handle - napi_open_escapable_handle_scope - napi_throw - napi_get_value_double - napi_set_named_property - napi_call_function - napi_create_date - napi_object_freeze - napi_get_uv_event_loop - napi_get_value_string_latin1 - napi_reject_deferred - napi_add_finalizer - napi_create_array - napi_delete_reference - napi_get_date_value - napi_create_dataview - napi_get_version - napi_define_class - napi_is_date - napi_remove_wrap - napi_delete_property - napi_instanceof - napi_create_buffer_copy - napi_delete_element - napi_object_seal - napi_queue_async_work - napi_get_value_bigint_words - napi_is_buffer - napi_get_array_length - napi_get_property - napi_new_instance - napi_set_element - napi_create_bigint_words - napi_strict_equals - napi_is_dataview - napi_close_escapable_handle_scope - napi_get_dataview_info - napi_get_value_int32 - napi_unwrap - napi_throw_range_error - napi_coerce_to_bool - napi_create_uint32 - napi_has_element - napi_create_external_arraybuffer - napi_create_symbol - node_api_symbol_for - napi_coerce_to_string - napi_create_type_error - napi_fatal_exception - napi_create_async_work - napi_async_init - node_api_create_property_key_utf16 - napi_type_tag_object - napi_check_object_type_tag - node_api_post_finalizer - napi_add_async_cleanup_hook - napi_remove_async_cleanup_hook - uv_mutex_init - uv_mutex_lock - uv_mutex_unlock - uv_mutex_destroy - uv_async_init - uv_async_send - uv_close
\ No newline at end of file diff --git a/cli/napi/js_native_api.rs b/cli/napi/js_native_api.rs deleted file mode 100644 index e922d8c3f..000000000 --- a/cli/napi/js_native_api.rs +++ /dev/null @@ -1,3614 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -#![allow(non_upper_case_globals)] -#![deny(unsafe_op_in_unsafe_fn)] - -const NAPI_VERSION: u32 = 9; - -use deno_runtime::deno_napi::*; -use libc::INT_MAX; - -use super::util::check_new_from_utf8; -use super::util::check_new_from_utf8_len; -use super::util::get_array_buffer_ptr; -use super::util::make_external_backing_store; -use super::util::napi_clear_last_error; -use super::util::napi_set_last_error; -use super::util::v8_name_from_property_descriptor; -use crate::check_arg; -use crate::check_env; -use deno_runtime::deno_napi::function::create_function; -use deno_runtime::deno_napi::function::create_function_template; -use deno_runtime::deno_napi::function::CallbackInfo; -use napi_sym::napi_sym; -use std::ptr::NonNull; - -#[derive(Debug, Clone, Copy, PartialEq)] -enum ReferenceOwnership { - Runtime, - Userland, -} - -enum ReferenceState { - Strong(v8::Global<v8::Value>), - Weak(v8::Weak<v8::Value>), -} - -struct Reference { - env: *mut Env, - state: ReferenceState, - ref_count: u32, - ownership: ReferenceOwnership, - finalize_cb: Option<napi_finalize>, - finalize_data: *mut c_void, - finalize_hint: *mut c_void, -} - -impl Reference { - fn new( - env: *mut Env, - value: v8::Local<v8::Value>, - initial_ref_count: u32, - ownership: ReferenceOwnership, - finalize_cb: Option<napi_finalize>, - finalize_data: *mut c_void, - finalize_hint: *mut c_void, - ) -> Box<Self> { - let isolate = unsafe { (*env).isolate() }; - - let mut reference = Box::new(Reference { - env, - state: ReferenceState::Strong(v8::Global::new(isolate, value)), - ref_count: initial_ref_count, - ownership, - finalize_cb, - finalize_data, - finalize_hint, - }); - - if initial_ref_count == 0 { - reference.set_weak(); - } - - reference - } - - fn ref_(&mut self) -> u32 { - self.ref_count += 1; - if self.ref_count == 1 { - self.set_strong(); - } - self.ref_count - } - - fn unref(&mut self) -> u32 { - let old_ref_count = self.ref_count; - if self.ref_count > 0 { - self.ref_count -= 1; - } - if old_ref_count == 1 && self.ref_count == 0 { - self.set_weak(); - } - self.ref_count - } - - fn reset(&mut self) { - self.finalize_cb = None; - self.finalize_data = std::ptr::null_mut(); - self.finalize_hint = std::ptr::null_mut(); - } - - fn set_strong(&mut self) { - if let ReferenceState::Weak(w) = &self.state { - let isolate = unsafe { (*self.env).isolate() }; - if let Some(g) = w.to_global(isolate) { - self.state = ReferenceState::Strong(g); - } - } - } - - fn set_weak(&mut self) { - let reference = self as *mut Reference; - if let ReferenceState::Strong(g) = &self.state { - let cb = Box::new(move |_: &mut v8::Isolate| { - Reference::weak_callback(reference) - }); - let isolate = unsafe { (*self.env).isolate() }; - self.state = - ReferenceState::Weak(v8::Weak::with_finalizer(isolate, g, cb)); - } - } - - fn weak_callback(reference: *mut Reference) { - let reference = unsafe { &mut *reference }; - - let finalize_cb = reference.finalize_cb; - let finalize_data = reference.finalize_data; - let finalize_hint = reference.finalize_hint; - reference.reset(); - - // copy this value before the finalize callback, since - // it might free the reference (which would be a UAF) - let ownership = reference.ownership; - if let Some(finalize_cb) = finalize_cb { - unsafe { - finalize_cb(reference.env as _, finalize_data, finalize_hint); - } - } - - if ownership == ReferenceOwnership::Runtime { - unsafe { drop(Reference::from_raw(reference)) } - } - } - - fn into_raw(r: Box<Reference>) -> *mut Reference { - Box::into_raw(r) - } - - unsafe fn from_raw(r: *mut Reference) -> Box<Reference> { - unsafe { Box::from_raw(r) } - } - - unsafe fn remove(r: *mut Reference) { - let r = unsafe { &mut *r }; - if r.ownership == ReferenceOwnership::Userland { - r.reset(); - } else { - unsafe { drop(Reference::from_raw(r)) } - } - } -} - -#[napi_sym] -fn napi_get_last_error_info( - env: *mut Env, - result: *mut *const napi_extended_error_info, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, result); - - if env.last_error.error_code == napi_ok { - napi_clear_last_error(env); - } else { - env.last_error.error_message = - ERROR_MESSAGES[env.last_error.error_code as usize].as_ptr(); - } - - unsafe { - *result = &env.last_error; - } - - napi_ok -} - -#[napi_sym] -fn napi_create_function<'s>( - env: &'s mut Env, - name: *const c_char, - length: usize, - cb: Option<napi_callback>, - cb_info: napi_callback_info, - result: *mut napi_value<'s>, -) -> napi_status { - let env_ptr = env as *mut Env; - check_arg!(env, result); - check_arg!(env, cb); - - let name = if !name.is_null() { - match unsafe { check_new_from_utf8_len(env, name, length) } { - Ok(s) => Some(s), - Err(status) => return status, - } - } else { - None - }; - - unsafe { - *result = - create_function(&mut env.scope(), env_ptr, name, cb.unwrap(), cb_info) - .into(); - } - - napi_ok -} - -#[napi_sym] -#[allow(clippy::too_many_arguments)] -fn napi_define_class<'s>( - env: &'s mut Env, - utf8name: *const c_char, - length: usize, - constructor: Option<napi_callback>, - callback_data: *mut c_void, - property_count: usize, - properties: *const napi_property_descriptor, - result: *mut napi_value<'s>, -) -> napi_status { - let env_ptr = env as *mut Env; - check_arg!(env, result); - check_arg!(env, constructor); - - if property_count > 0 { - check_arg!(env, properties); - } - - let name = match unsafe { check_new_from_utf8_len(env, utf8name, length) } { - Ok(string) => string, - Err(status) => return status, - }; - - let tpl = create_function_template( - &mut env.scope(), - env_ptr, - Some(name), - constructor.unwrap(), - callback_data, - ); - - let napi_properties: &[napi_property_descriptor] = if property_count > 0 { - unsafe { std::slice::from_raw_parts(properties, property_count) } - } else { - &[] - }; - let mut static_property_count = 0; - - for p in napi_properties { - if p.attributes & napi_static != 0 { - // Will be handled below - static_property_count += 1; - continue; - } - - let name = match unsafe { v8_name_from_property_descriptor(env_ptr, p) } { - Ok(name) => name, - Err(status) => return status, - }; - - if p.getter.is_some() || p.setter.is_some() { - let getter = p.getter.map(|g| { - create_function_template(&mut env.scope(), env_ptr, None, g, p.data) - }); - let setter = p.setter.map(|s| { - create_function_template(&mut env.scope(), env_ptr, None, s, p.data) - }); - - let mut accessor_property = v8::PropertyAttribute::NONE; - if getter.is_some() - && setter.is_some() - && (p.attributes & napi_writable) == 0 - { - accessor_property = - accessor_property | v8::PropertyAttribute::READ_ONLY; - } - if p.attributes & napi_enumerable == 0 { - accessor_property = - accessor_property | v8::PropertyAttribute::DONT_ENUM; - } - if p.attributes & napi_configurable == 0 { - accessor_property = - accessor_property | v8::PropertyAttribute::DONT_DELETE; - } - - let proto = tpl.prototype_template(&mut env.scope()); - proto.set_accessor_property(name, getter, setter, accessor_property); - } else if let Some(method) = p.method { - let function = create_function_template( - &mut env.scope(), - env_ptr, - None, - method, - p.data, - ); - let proto = tpl.prototype_template(&mut env.scope()); - proto.set(name, function.into()); - } else { - let proto = tpl.prototype_template(&mut env.scope()); - proto.set(name, p.value.unwrap().into()); - } - } - - let value: v8::Local<v8::Value> = - tpl.get_function(&mut env.scope()).unwrap().into(); - - unsafe { - *result = value.into(); - } - - if static_property_count > 0 { - let mut static_descriptors = Vec::with_capacity(static_property_count); - - for p in napi_properties { - if p.attributes & napi_static != 0 { - static_descriptors.push(*p); - } - } - - crate::status_call!(unsafe { - napi_define_properties( - env_ptr, - *result, - static_descriptors.len(), - static_descriptors.as_ptr(), - ) - }); - } - - napi_ok -} - -#[napi_sym] -fn napi_get_property_names( - env: *mut Env, - object: napi_value, - result: *mut napi_value, -) -> napi_status { - unsafe { - napi_get_all_property_names( - env, - object, - napi_key_include_prototypes, - napi_key_enumerable | napi_key_skip_symbols, - napi_key_numbers_to_strings, - result, - ) - } -} - -#[napi_sym] -fn napi_get_all_property_names<'s>( - env: &'s mut Env, - object: napi_value, - key_mode: napi_key_collection_mode, - key_filter: napi_key_filter, - key_conversion: napi_key_conversion, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let scope = &mut env.scope(); - - let Some(obj) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let mut filter = v8::PropertyFilter::ALL_PROPERTIES; - - if key_filter & napi_key_writable != 0 { - filter = filter | v8::PropertyFilter::ONLY_WRITABLE; - } - if key_filter & napi_key_enumerable != 0 { - filter = filter | v8::PropertyFilter::ONLY_ENUMERABLE; - } - if key_filter & napi_key_configurable != 0 { - filter = filter | v8::PropertyFilter::ONLY_CONFIGURABLE; - } - if key_filter & napi_key_skip_strings != 0 { - filter = filter | v8::PropertyFilter::SKIP_STRINGS; - } - if key_filter & napi_key_skip_symbols != 0 { - filter = filter | v8::PropertyFilter::SKIP_SYMBOLS; - } - - let key_mode = match key_mode { - napi_key_include_prototypes => v8::KeyCollectionMode::IncludePrototypes, - napi_key_own_only => v8::KeyCollectionMode::OwnOnly, - _ => return napi_invalid_arg, - }; - - let key_conversion = match key_conversion { - napi_key_keep_numbers => v8::KeyConversionMode::KeepNumbers, - napi_key_numbers_to_strings => v8::KeyConversionMode::ConvertToString, - _ => return napi_invalid_arg, - }; - - let filter = v8::GetPropertyNamesArgsBuilder::new() - .mode(key_mode) - .property_filter(filter) - .index_filter(v8::IndexFilter::IncludeIndices) - .key_conversion(key_conversion) - .build(); - - let property_names = match obj.get_property_names(scope, filter) { - Some(n) => n, - None => return napi_generic_failure, - }; - - unsafe { - *result = property_names.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_set_property( - env: &mut Env, - object: napi_value, - key: napi_value, - value: napi_value, -) -> napi_status { - check_arg!(env, key); - check_arg!(env, value); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - if object.set(scope, key.unwrap(), value.unwrap()).is_none() { - return napi_generic_failure; - }; - - napi_ok -} - -#[napi_sym] -fn napi_has_property( - env: &mut Env, - object: napi_value, - key: napi_value, - result: *mut bool, -) -> napi_status { - check_arg!(env, key); - check_arg!(env, result); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let Some(has) = object.has(scope, key.unwrap()) else { - return napi_generic_failure; - }; - - unsafe { - *result = has; - } - - napi_ok -} - -#[napi_sym] -fn napi_get_property<'s>( - env: &'s mut Env, - object: napi_value, - key: napi_value, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, key); - check_arg!(env, result); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let Some(value) = object.get(scope, key.unwrap()) else { - return napi_generic_failure; - }; - - unsafe { - *result = value.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_delete_property( - env: &mut Env, - object: napi_value, - key: napi_value, - result: *mut bool, -) -> napi_status { - check_arg!(env, key); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let Some(deleted) = object.delete(scope, key.unwrap()) else { - return napi_generic_failure; - }; - - if !result.is_null() { - unsafe { - *result = deleted; - } - } - - napi_ok -} - -#[napi_sym] -fn napi_has_own_property( - env: &mut Env, - object: napi_value, - key: napi_value, - result: *mut bool, -) -> napi_status { - check_arg!(env, key); - check_arg!(env, result); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let Ok(key) = v8::Local::<v8::Name>::try_from(key.unwrap()) else { - return napi_name_expected; - }; - - let Some(has_own) = object.has_own_property(scope, key) else { - return napi_generic_failure; - }; - - unsafe { - *result = has_own; - } - - napi_ok -} - -#[napi_sym] -fn napi_has_named_property<'s>( - env: &'s mut Env, - object: napi_value<'s>, - utf8name: *const c_char, - result: *mut bool, -) -> napi_status { - let env_ptr = env as *mut Env; - check_arg!(env, result); - - let Some(object) = object.and_then(|o| o.to_object(&mut env.scope())) else { - return napi_object_expected; - }; - - let key = match unsafe { check_new_from_utf8(env_ptr, utf8name) } { - Ok(key) => key, - Err(status) => return status, - }; - - let Some(has_property) = object.has(&mut env.scope(), key.into()) else { - return napi_generic_failure; - }; - - unsafe { - *result = has_property; - } - - napi_ok -} - -#[napi_sym] -fn napi_set_named_property<'s>( - env: &'s mut Env, - object: napi_value<'s>, - utf8name: *const c_char, - value: napi_value<'s>, -) -> napi_status { - check_arg!(env, value); - let env_ptr = env as *mut Env; - - let Some(object) = object.and_then(|o| o.to_object(&mut env.scope())) else { - return napi_object_expected; - }; - - let key = match unsafe { check_new_from_utf8(env_ptr, utf8name) } { - Ok(key) => key, - Err(status) => return status, - }; - - let value = value.unwrap(); - - if !object - .set(&mut env.scope(), key.into(), value) - .unwrap_or(false) - { - return napi_generic_failure; - } - - napi_ok -} - -#[napi_sym] -fn napi_get_named_property<'s>( - env: &'s mut Env, - object: napi_value<'s>, - utf8name: *const c_char, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - let env_ptr = env as *mut Env; - - let Some(object) = object.and_then(|o| o.to_object(&mut env.scope())) else { - return napi_object_expected; - }; - - let key = match unsafe { check_new_from_utf8(env_ptr, utf8name) } { - Ok(key) => key, - Err(status) => return status, - }; - - let Some(value) = object.get(&mut env.scope(), key.into()) else { - return napi_generic_failure; - }; - - unsafe { - *result = value.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_set_element<'s>( - env: &'s mut Env, - object: napi_value<'s>, - index: u32, - value: napi_value<'s>, -) -> napi_status { - check_arg!(env, value); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - if !object - .set_index(scope, index, value.unwrap()) - .unwrap_or(false) - { - return napi_generic_failure; - } - - napi_ok -} - -#[napi_sym] -fn napi_has_element( - env: &mut Env, - object: napi_value, - index: u32, - result: *mut bool, -) -> napi_status { - check_arg!(env, result); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let Some(has) = object.has_index(scope, index) else { - return napi_generic_failure; - }; - - unsafe { - *result = has; - } - - napi_ok -} - -#[napi_sym] -fn napi_get_element<'s>( - env: &'s mut Env, - object: napi_value, - index: u32, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let Some(value) = object.get_index(scope, index) else { - return napi_generic_failure; - }; - - unsafe { - *result = value.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_delete_element( - env: &mut Env, - object: napi_value, - index: u32, - result: *mut bool, -) -> napi_status { - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let Some(deleted) = object.delete_index(scope, index) else { - return napi_generic_failure; - }; - - if !result.is_null() { - unsafe { - *result = deleted; - } - } - - napi_ok -} - -#[napi_sym] -fn napi_define_properties( - env: &mut Env, - object: napi_value, - property_count: usize, - properties: *const napi_property_descriptor, -) -> napi_status { - let env_ptr = env as *mut Env; - - if property_count > 0 { - check_arg!(env, properties); - } - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let properties = if property_count == 0 { - &[] - } else { - unsafe { std::slice::from_raw_parts(properties, property_count) } - }; - for property in properties { - let property_name = - match unsafe { v8_name_from_property_descriptor(env_ptr, property) } { - Ok(name) => name, - Err(status) => return status, - }; - - let writable = property.attributes & napi_writable != 0; - let enumerable = property.attributes & napi_enumerable != 0; - let configurable = property.attributes & napi_configurable != 0; - - if property.getter.is_some() || property.setter.is_some() { - let local_getter: v8::Local<v8::Value> = if let Some(getter) = - property.getter - { - create_function(&mut env.scope(), env_ptr, None, getter, property.data) - .into() - } else { - v8::undefined(scope).into() - }; - let local_setter: v8::Local<v8::Value> = if let Some(setter) = - property.setter - { - create_function(&mut env.scope(), env_ptr, None, setter, property.data) - .into() - } else { - v8::undefined(scope).into() - }; - - let mut desc = - v8::PropertyDescriptor::new_from_get_set(local_getter, local_setter); - desc.set_enumerable(enumerable); - desc.set_configurable(configurable); - - if !object - .define_property(scope, property_name, &desc) - .unwrap_or(false) - { - return napi_invalid_arg; - } - } else if let Some(method) = property.method { - let method: v8::Local<v8::Value> = { - let function = create_function( - &mut env.scope(), - env_ptr, - None, - method, - property.data, - ); - function.into() - }; - - let mut desc = - v8::PropertyDescriptor::new_from_value_writable(method, writable); - desc.set_enumerable(enumerable); - desc.set_configurable(configurable); - - if !object - .define_property(scope, property_name, &desc) - .unwrap_or(false) - { - return napi_generic_failure; - } - } else { - let value = property.value.unwrap(); - - if enumerable & writable & configurable { - if !object - .create_data_property(scope, property_name, value) - .unwrap_or(false) - { - return napi_invalid_arg; - } - } else { - let mut desc = - v8::PropertyDescriptor::new_from_value_writable(value, writable); - desc.set_enumerable(enumerable); - desc.set_configurable(configurable); - - if !object - .define_property(scope, property_name, &desc) - .unwrap_or(false) - { - return napi_invalid_arg; - } - } - } - } - - napi_ok -} - -#[napi_sym] -fn napi_object_freeze(env: &mut Env, object: napi_value) -> napi_status { - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - if !object - .set_integrity_level(scope, v8::IntegrityLevel::Frozen) - .unwrap_or(false) - { - return napi_generic_failure; - } - - napi_ok -} - -#[napi_sym] -fn napi_object_seal(env: &mut Env, object: napi_value) -> napi_status { - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - if !object - .set_integrity_level(scope, v8::IntegrityLevel::Sealed) - .unwrap_or(false) - { - return napi_generic_failure; - } - - napi_ok -} - -#[napi_sym] -fn napi_is_array( - env: *mut Env, - value: napi_value, - result: *mut bool, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - let value = value.unwrap(); - - unsafe { - *result = value.is_array(); - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_array_length( - env: &mut Env, - value: napi_value, - result: *mut u32, -) -> napi_status { - check_arg!(env, value); - check_arg!(env, result); - - let value = value.unwrap(); - - match v8::Local::<v8::Array>::try_from(value) { - Ok(array) => { - unsafe { - *result = array.length(); - } - napi_ok - } - Err(_) => napi_array_expected, - } -} - -#[napi_sym] -fn napi_strict_equals( - env: &mut Env, - lhs: napi_value, - rhs: napi_value, - result: *mut bool, -) -> napi_status { - check_arg!(env, lhs); - check_arg!(env, rhs); - check_arg!(env, result); - - unsafe { - *result = lhs.unwrap().strict_equals(rhs.unwrap()); - } - - napi_ok -} - -#[napi_sym] -fn napi_get_prototype<'s>( - env: &'s mut Env, - object: napi_value, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let scope = &mut env.scope(); - - let Some(object) = object.and_then(|o| o.to_object(scope)) else { - return napi_object_expected; - }; - - let Some(proto) = object.get_prototype(scope) else { - return napi_generic_failure; - }; - - unsafe { - *result = proto.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_create_object( - env_ptr: *mut Env, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::Object::new(&mut env.scope()).into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_create_array( - env_ptr: *mut Env, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::Array::new(&mut env.scope(), 0).into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_create_array_with_length( - env_ptr: *mut Env, - length: usize, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::Array::new(&mut env.scope(), length as _).into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_create_string_latin1( - env_ptr: *mut Env, - string: *const c_char, - length: usize, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - if length > 0 { - check_arg!(env, string); - } - crate::return_status_if_false!( - env, - (length == NAPI_AUTO_LENGTH) || length <= INT_MAX as _, - napi_invalid_arg - ); - - let buffer = if length > 0 { - unsafe { - std::slice::from_raw_parts( - string as _, - if length == NAPI_AUTO_LENGTH { - std::ffi::CStr::from_ptr(string).to_bytes().len() - } else { - length - }, - ) - } - } else { - &[] - }; - - let Some(string) = v8::String::new_from_one_byte( - &mut env.scope(), - buffer, - v8::NewStringType::Normal, - ) else { - return napi_set_last_error(env_ptr, napi_generic_failure); - }; - - unsafe { - *result = string.into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_create_string_utf8( - env_ptr: *mut Env, - string: *const c_char, - length: usize, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - if length > 0 { - check_arg!(env, string); - } - crate::return_status_if_false!( - env, - (length == NAPI_AUTO_LENGTH) || length <= INT_MAX as _, - napi_invalid_arg - ); - - let buffer = if length > 0 { - unsafe { - std::slice::from_raw_parts( - string as _, - if length == NAPI_AUTO_LENGTH { - std::ffi::CStr::from_ptr(string).to_bytes().len() - } else { - length - }, - ) - } - } else { - &[] - }; - - let Some(string) = v8::String::new_from_utf8( - &mut env.scope(), - buffer, - v8::NewStringType::Normal, - ) else { - return napi_set_last_error(env_ptr, napi_generic_failure); - }; - - unsafe { - *result = string.into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_create_string_utf16( - env_ptr: *mut Env, - string: *const u16, - length: usize, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - if length > 0 { - check_arg!(env, string); - } - crate::return_status_if_false!( - env, - (length == NAPI_AUTO_LENGTH) || length <= INT_MAX as _, - napi_invalid_arg - ); - - let buffer = if length > 0 { - unsafe { - std::slice::from_raw_parts( - string, - if length == NAPI_AUTO_LENGTH { - let mut length = 0; - while *(string.add(length)) != 0 { - length += 1; - } - length - } else { - length - }, - ) - } - } else { - &[] - }; - - let Some(string) = v8::String::new_from_two_byte( - &mut env.scope(), - buffer, - v8::NewStringType::Normal, - ) else { - return napi_set_last_error(env_ptr, napi_generic_failure); - }; - - unsafe { - *result = string.into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn node_api_create_external_string_latin1( - env_ptr: *mut Env, - string: *const c_char, - length: usize, - nogc_finalize_callback: Option<napi_finalize>, - finalize_hint: *mut c_void, - result: *mut napi_value, - copied: *mut bool, -) -> napi_status { - let status = - unsafe { napi_create_string_latin1(env_ptr, string, length, result) }; - - if status == napi_ok { - unsafe { - *copied = true; - } - - if let Some(finalize) = nogc_finalize_callback { - unsafe { - finalize(env_ptr as napi_env, string as *mut c_void, finalize_hint); - } - } - } - - status -} - -#[napi_sym] -fn node_api_create_external_string_utf16( - env_ptr: *mut Env, - string: *const u16, - length: usize, - nogc_finalize_callback: Option<napi_finalize>, - finalize_hint: *mut c_void, - result: *mut napi_value, - copied: *mut bool, -) -> napi_status { - let status = - unsafe { napi_create_string_utf16(env_ptr, string, length, result) }; - - if status == napi_ok { - unsafe { - *copied = true; - } - - if let Some(finalize) = nogc_finalize_callback { - unsafe { - finalize(env_ptr as napi_env, string as *mut c_void, finalize_hint); - } - } - } - - status -} - -#[napi_sym] -fn node_api_create_property_key_utf16( - env_ptr: *mut Env, - string: *const u16, - length: usize, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - if length > 0 { - check_arg!(env, string); - } - crate::return_status_if_false!( - env, - (length == NAPI_AUTO_LENGTH) || length <= INT_MAX as _, - napi_invalid_arg - ); - - let buffer = if length > 0 { - unsafe { - std::slice::from_raw_parts( - string, - if length == NAPI_AUTO_LENGTH { - let mut length = 0; - while *(string.add(length)) != 0 { - length += 1; - } - length - } else { - length - }, - ) - } - } else { - &[] - }; - - let Some(string) = v8::String::new_from_two_byte( - &mut env.scope(), - buffer, - v8::NewStringType::Internalized, - ) else { - return napi_set_last_error(env_ptr, napi_generic_failure); - }; - - unsafe { - *result = string.into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_create_double( - env_ptr: *mut Env, - value: f64, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::Number::new(&mut env.scope(), value).into(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_create_int32( - env_ptr: *mut Env, - value: i32, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::Integer::new(&mut env.scope(), value).into(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_create_uint32( - env_ptr: *mut Env, - value: u32, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::Integer::new_from_unsigned(&mut env.scope(), value).into(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_create_int64( - env_ptr: *mut Env, - value: i64, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::Number::new(&mut env.scope(), value as _).into(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_create_bigint_int64( - env_ptr: *mut Env, - value: i64, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::BigInt::new_from_i64(&mut env.scope(), value).into(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_create_bigint_uint64( - env_ptr: *mut Env, - value: u64, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = v8::BigInt::new_from_u64(&mut env.scope(), value).into(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_create_bigint_words<'s>( - env: &'s mut Env, - sign_bit: bool, - word_count: usize, - words: *const u64, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, words); - check_arg!(env, result); - - if word_count > INT_MAX as _ { - return napi_invalid_arg; - } - - match v8::BigInt::new_from_words(&mut env.scope(), sign_bit, unsafe { - std::slice::from_raw_parts(words, word_count) - }) { - Some(value) => unsafe { - *result = value.into(); - }, - None => { - return napi_generic_failure; - } - } - - napi_ok -} - -#[napi_sym] -fn napi_get_boolean( - env: *mut Env, - value: bool, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, result); - - unsafe { - *result = v8::Boolean::new(env.isolate(), value).into(); - } - - return napi_clear_last_error(env); -} - -#[napi_sym] -fn napi_create_symbol( - env_ptr: *mut Env, - description: napi_value, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - let description = if let Some(d) = *description { - let Some(d) = d.to_string(&mut env.scope()) else { - return napi_set_last_error(env, napi_string_expected); - }; - Some(d) - } else { - None - }; - - unsafe { - *result = v8::Symbol::new(&mut env.scope(), description).into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn node_api_symbol_for( - env: *mut Env, - utf8description: *const c_char, - length: usize, - result: *mut napi_value, -) -> napi_status { - { - let env = check_env!(env); - check_arg!(env, result); - - let description_string = - match unsafe { check_new_from_utf8_len(env, utf8description, length) } { - Ok(s) => s, - Err(status) => return napi_set_last_error(env, status), - }; - - unsafe { - *result = - v8::Symbol::for_key(&mut env.scope(), description_string).into(); - } - } - - napi_clear_last_error(env) -} - -macro_rules! napi_create_error_impl { - ($env_ptr:ident, $code:ident, $msg:ident, $result:ident, $error:ident) => {{ - let env_ptr = $env_ptr; - let code = $code; - let msg = $msg; - let result = $result; - - let env = check_env!(env_ptr); - check_arg!(env, msg); - check_arg!(env, result); - - let Some(message) = - msg.and_then(|v| v8::Local::<v8::String>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_string_expected); - }; - - let error = v8::Exception::$error(&mut env.scope(), message); - - if let Some(code) = *code { - let error_obj: v8::Local<v8::Object> = error.try_into().unwrap(); - let code_key = v8::String::new(&mut env.scope(), "code").unwrap(); - if !error_obj - .set(&mut env.scope(), code_key.into(), code) - .unwrap_or(false) - { - return napi_set_last_error(env_ptr, napi_generic_failure); - } - } - - unsafe { - *result = error.into(); - } - - return napi_clear_last_error(env_ptr); - }}; -} - -#[napi_sym] -fn napi_create_error( - env_ptr: *mut Env, - code: napi_value, - msg: napi_value, - result: *mut napi_value, -) -> napi_status { - napi_create_error_impl!(env_ptr, code, msg, result, error) -} - -#[napi_sym] -fn napi_create_type_error( - env_ptr: *mut Env, - code: napi_value, - msg: napi_value, - result: *mut napi_value, -) -> napi_status { - napi_create_error_impl!(env_ptr, code, msg, result, type_error) -} - -#[napi_sym] -fn napi_create_range_error( - env_ptr: *mut Env, - code: napi_value, - msg: napi_value, - result: *mut napi_value, -) -> napi_status { - napi_create_error_impl!(env_ptr, code, msg, result, range_error) -} - -#[napi_sym] -fn node_api_create_syntax_error( - env_ptr: *mut Env, - code: napi_value, - msg: napi_value, - result: *mut napi_value, -) -> napi_status { - napi_create_error_impl!(env_ptr, code, msg, result, syntax_error) -} - -pub fn get_value_type(value: v8::Local<v8::Value>) -> Option<napi_valuetype> { - if value.is_undefined() { - Some(napi_undefined) - } else if value.is_null() { - Some(napi_null) - } else if value.is_external() { - Some(napi_external) - } else if value.is_boolean() { - Some(napi_boolean) - } else if value.is_number() { - Some(napi_number) - } else if value.is_big_int() { - Some(napi_bigint) - } else if value.is_string() { - Some(napi_string) - } else if value.is_symbol() { - Some(napi_symbol) - } else if value.is_function() { - Some(napi_function) - } else if value.is_object() { - Some(napi_object) - } else { - None - } -} - -#[napi_sym] -fn napi_typeof( - env: *mut Env, - value: napi_value, - result: *mut napi_valuetype, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - let Some(ty) = get_value_type(value.unwrap()) else { - return napi_set_last_error(env, napi_invalid_arg); - }; - - unsafe { - *result = ty; - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_undefined(env: *mut Env, result: *mut napi_value) -> napi_status { - let env = check_env!(env); - check_arg!(env, result); - - unsafe { - *result = v8::undefined(&mut env.scope()).into(); - } - - return napi_clear_last_error(env); -} - -#[napi_sym] -fn napi_get_null(env: *mut Env, result: *mut napi_value) -> napi_status { - let env = check_env!(env); - check_arg!(env, result); - - unsafe { - *result = v8::null(&mut env.scope()).into(); - } - - return napi_clear_last_error(env); -} - -#[napi_sym] -fn napi_get_cb_info( - env: *mut Env, - cbinfo: napi_callback_info, - argc: *mut i32, - argv: *mut napi_value, - this_arg: *mut napi_value, - data: *mut *mut c_void, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, cbinfo); - - let cbinfo: &CallbackInfo = unsafe { &*(cbinfo as *const CallbackInfo) }; - let args = unsafe { &*(cbinfo.args as *const v8::FunctionCallbackArguments) }; - - if !argv.is_null() { - check_arg!(env, argc); - let argc = unsafe { *argc as usize }; - for i in 0..argc { - let mut arg = args.get(i as _); - unsafe { - *argv.add(i) = arg.into(); - } - } - } - - if !argc.is_null() { - unsafe { - *argc = args.length(); - } - } - - if !this_arg.is_null() { - unsafe { - *this_arg = args.this().into(); - } - } - - if !data.is_null() { - unsafe { - *data = cbinfo.cb_info; - } - } - - napi_clear_last_error(env); - napi_ok -} - -#[napi_sym] -fn napi_get_new_target( - env: *mut Env, - cbinfo: napi_callback_info, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, cbinfo); - check_arg!(env, result); - - let cbinfo: &CallbackInfo = unsafe { &*(cbinfo as *const CallbackInfo) }; - let args = unsafe { &*(cbinfo.args as *const v8::FunctionCallbackArguments) }; - - unsafe { - *result = args.new_target().into(); - } - - return napi_clear_last_error(env); -} - -#[napi_sym] -fn napi_call_function( - env_ptr: *mut Env, - recv: napi_value, - func: napi_value, - argc: usize, - argv: *const napi_value, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, recv); - let args = if argc > 0 { - check_arg!(env, argv); - unsafe { - std::slice::from_raw_parts(argv as *mut v8::Local<v8::Value>, argc) - } - } else { - &[] - }; - - let Some(func) = - func.and_then(|f| v8::Local::<v8::Function>::try_from(f).ok()) - else { - return napi_set_last_error(env, napi_function_expected); - }; - - let Some(v) = func.call(&mut env.scope(), recv.unwrap(), args) else { - return napi_set_last_error(env_ptr, napi_generic_failure); - }; - - if !result.is_null() { - unsafe { - *result = v.into(); - } - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_global(env_ptr: *mut Env, result: *mut napi_value) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - let global = v8::Local::new(&mut env.scope(), &env.global); - unsafe { - *result = global.into(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_throw(env: *mut Env, error: napi_value) -> napi_status { - let env = check_env!(env); - check_arg!(env, error); - - if env.last_exception.is_some() { - return napi_pending_exception; - } - - let error = error.unwrap(); - env.scope().throw_exception(error); - let error = v8::Global::new(&mut env.scope(), error); - env.last_exception = Some(error); - - napi_clear_last_error(env) -} - -macro_rules! napi_throw_error_impl { - ($env:ident, $code:ident, $msg:ident, $error:ident) => {{ - let env = check_env!($env); - let env_ptr = env as *mut Env; - let code = $code; - let msg = $msg; - - if env.last_exception.is_some() { - return napi_pending_exception; - } - - let str_ = match unsafe { check_new_from_utf8(env, msg) } { - Ok(s) => s, - Err(status) => return status, - }; - - let error = v8::Exception::$error(&mut env.scope(), str_); - - if !code.is_null() { - let error_obj: v8::Local<v8::Object> = error.try_into().unwrap(); - let code = match unsafe { check_new_from_utf8(env_ptr, code) } { - Ok(s) => s, - Err(status) => return napi_set_last_error(env, status), - }; - let code_key = v8::String::new(&mut env.scope(), "code").unwrap(); - if !error_obj - .set(&mut env.scope(), code_key.into(), code.into()) - .unwrap_or(false) - { - return napi_set_last_error(env, napi_generic_failure); - } - } - - env.scope().throw_exception(error); - let error = v8::Global::new(&mut env.scope(), error); - env.last_exception = Some(error); - - napi_clear_last_error(env) - }}; -} - -#[napi_sym] -fn napi_throw_error( - env: *mut Env, - code: *const c_char, - msg: *const c_char, -) -> napi_status { - napi_throw_error_impl!(env, code, msg, error) -} - -#[napi_sym] -fn napi_throw_type_error( - env: *mut Env, - code: *const c_char, - msg: *const c_char, -) -> napi_status { - napi_throw_error_impl!(env, code, msg, type_error) -} - -#[napi_sym] -fn napi_throw_range_error( - env: *mut Env, - code: *const c_char, - msg: *const c_char, -) -> napi_status { - napi_throw_error_impl!(env, code, msg, range_error) -} - -#[napi_sym] -fn node_api_throw_syntax_error( - env: *mut Env, - code: *const c_char, - msg: *const c_char, -) -> napi_status { - napi_throw_error_impl!(env, code, msg, syntax_error) -} - -#[napi_sym] -fn napi_is_error( - env: *mut Env, - value: napi_value, - result: *mut bool, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - unsafe { - *result = value.unwrap().is_native_error(); - } - - return napi_clear_last_error(env); -} - -#[napi_sym] -fn napi_get_value_double( - env_ptr: *mut Env, - value: napi_value, - result: *mut f64, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, result); - - let Some(number) = - value.and_then(|v| v8::Local::<v8::Number>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_number_expected); - }; - - unsafe { - *result = number.value(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_value_int32( - env_ptr: *mut Env, - value: napi_value, - result: *mut i32, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, result); - - let Some(value) = value.unwrap().int32_value(&mut env.scope()) else { - return napi_set_last_error(env, napi_number_expected); - }; - - unsafe { - *result = value; - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_value_uint32( - env_ptr: *mut Env, - value: napi_value, - result: *mut u32, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, result); - - let Some(value) = value.unwrap().uint32_value(&mut env.scope()) else { - return napi_set_last_error(env, napi_number_expected); - }; - - unsafe { - *result = value; - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_value_int64( - env_ptr: *mut Env, - value: napi_value, - result: *mut i64, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, result); - - let Some(number) = - value.and_then(|v| v8::Local::<v8::Number>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_number_expected); - }; - - let value = number.value(); - - unsafe { - if value.is_finite() { - *result = value as _; - } else { - *result = 0; - } - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_value_bigint_int64( - env_ptr: *mut Env, - value: napi_value, - result: *mut i64, - lossless: *mut bool, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, result); - check_arg!(env, lossless); - - let Some(bigint) = - value.and_then(|v| v8::Local::<v8::BigInt>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_bigint_expected); - }; - - let (result_, lossless_) = bigint.i64_value(); - - unsafe { - *result = result_; - *lossless = lossless_; - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_value_bigint_uint64( - env_ptr: *mut Env, - value: napi_value, - result: *mut u64, - lossless: *mut bool, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, result); - check_arg!(env, lossless); - - let Some(bigint) = - value.and_then(|v| v8::Local::<v8::BigInt>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_bigint_expected); - }; - - let (result_, lossless_) = bigint.u64_value(); - - unsafe { - *result = result_; - *lossless = lossless_; - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_value_bigint_words( - env_ptr: *mut Env, - value: napi_value, - sign_bit: *mut i32, - word_count: *mut usize, - words: *mut u64, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, word_count); - - let Some(bigint) = - value.and_then(|v| v8::Local::<v8::BigInt>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_bigint_expected); - }; - - let word_count_int; - - if sign_bit.is_null() && words.is_null() { - word_count_int = bigint.word_count(); - } else { - check_arg!(env, sign_bit); - check_arg!(env, words); - let out_words = - unsafe { std::slice::from_raw_parts_mut(words, *word_count) }; - let (sign, slice_) = bigint.to_words_array(out_words); - word_count_int = slice_.len(); - unsafe { - *sign_bit = sign as i32; - } - } - - unsafe { - *word_count = word_count_int; - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_value_bool( - env_ptr: *mut Env, - value: napi_value, - result: *mut bool, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, result); - - let Some(boolean) = - value.and_then(|v| v8::Local::<v8::Boolean>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_boolean_expected); - }; - - unsafe { - *result = boolean.is_true(); - } - - return napi_clear_last_error(env_ptr); -} - -#[napi_sym] -fn napi_get_value_string_latin1( - env_ptr: *mut Env, - value: napi_value, - buf: *mut c_char, - bufsize: usize, - result: *mut usize, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - - let Some(value) = - value.and_then(|v| v8::Local::<v8::String>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_string_expected); - }; - - if buf.is_null() { - check_arg!(env, result); - unsafe { - *result = value.length(); - } - } else if bufsize != 0 { - let buffer = - unsafe { std::slice::from_raw_parts_mut(buf as _, bufsize - 1) }; - let copied = value.write_one_byte( - &mut env.scope(), - buffer, - 0, - v8::WriteOptions::NO_NULL_TERMINATION, - ); - unsafe { - buf.add(copied).write(0); - } - if !result.is_null() { - unsafe { - *result = copied; - } - } - } else if !result.is_null() { - unsafe { - *result = 0; - } - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_get_value_string_utf8( - env_ptr: *mut Env, - value: napi_value, - buf: *mut u8, - bufsize: usize, - result: *mut usize, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - - let Some(value) = - value.and_then(|v| v8::Local::<v8::String>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_string_expected); - }; - - if buf.is_null() { - check_arg!(env, result); - unsafe { - *result = value.utf8_length(env.isolate()); - } - } else if bufsize != 0 { - let buffer = - unsafe { std::slice::from_raw_parts_mut(buf as _, bufsize - 1) }; - let copied = value.write_utf8( - &mut env.scope(), - buffer, - None, - v8::WriteOptions::REPLACE_INVALID_UTF8 - | v8::WriteOptions::NO_NULL_TERMINATION, - ); - unsafe { - buf.add(copied).write(0); - } - if !result.is_null() { - unsafe { - *result = copied; - } - } - } else if !result.is_null() { - unsafe { - *result = 0; - } - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_get_value_string_utf16( - env_ptr: *mut Env, - value: napi_value, - buf: *mut u16, - bufsize: usize, - result: *mut usize, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - - let Some(value) = - value.and_then(|v| v8::Local::<v8::String>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_string_expected); - }; - - if buf.is_null() { - check_arg!(env, result); - unsafe { - *result = value.length(); - } - } else if bufsize != 0 { - let buffer = - unsafe { std::slice::from_raw_parts_mut(buf as _, bufsize - 1) }; - let copied = value.write( - &mut env.scope(), - buffer, - 0, - v8::WriteOptions::NO_NULL_TERMINATION, - ); - unsafe { - buf.add(copied).write(0); - } - if !result.is_null() { - unsafe { - *result = copied; - } - } - } else if !result.is_null() { - unsafe { - *result = 0; - } - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_coerce_to_bool<'s>( - env: &'s mut Env, - value: napi_value, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, value); - check_arg!(env, result); - - let coerced = value.unwrap().to_boolean(&mut env.scope()); - - unsafe { - *result = coerced.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_coerce_to_number<'s>( - env: &'s mut Env, - value: napi_value, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, value); - check_arg!(env, result); - - let Some(coerced) = value.unwrap().to_number(&mut env.scope()) else { - return napi_number_expected; - }; - - unsafe { - *result = coerced.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_coerce_to_object<'s>( - env: &'s mut Env, - value: napi_value, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, value); - check_arg!(env, result); - - let Some(coerced) = value.unwrap().to_object(&mut env.scope()) else { - return napi_object_expected; - }; - - unsafe { - *result = coerced.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_coerce_to_string<'s>( - env: &'s mut Env, - value: napi_value, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, value); - check_arg!(env, result); - - let Some(coerced) = value.unwrap().to_string(&mut env.scope()) else { - return napi_string_expected; - }; - - unsafe { - *result = coerced.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_wrap( - env: &mut Env, - js_object: napi_value, - native_object: *mut c_void, - finalize_cb: Option<napi_finalize>, - finalize_hint: *mut c_void, - result: *mut napi_ref, -) -> napi_status { - check_arg!(env, js_object); - let env_ptr = env as *mut Env; - - let Some(obj) = - js_object.and_then(|v| v8::Local::<v8::Object>::try_from(v).ok()) - else { - return napi_invalid_arg; - }; - - let napi_wrap = v8::Local::new(&mut env.scope(), &env.shared().napi_wrap); - - if obj - .has_private(&mut env.scope(), napi_wrap) - .unwrap_or(false) - { - return napi_invalid_arg; - } - - if !result.is_null() { - check_arg!(env, finalize_cb); - } - - let ownership = if result.is_null() { - ReferenceOwnership::Runtime - } else { - ReferenceOwnership::Userland - }; - let reference = Reference::new( - env_ptr, - obj.into(), - 0, - ownership, - finalize_cb, - native_object, - finalize_hint, - ); - - let reference = Reference::into_raw(reference) as *mut c_void; - - if !result.is_null() { - check_arg!(env, finalize_cb); - unsafe { - *result = reference; - } - } - - let external = v8::External::new(&mut env.scope(), reference); - assert!(obj - .set_private(&mut env.scope(), napi_wrap, external.into()) - .unwrap()); - - napi_ok -} - -fn unwrap( - env: &mut Env, - obj: napi_value, - result: *mut *mut c_void, - keep: bool, -) -> napi_status { - check_arg!(env, obj); - if keep { - check_arg!(env, result); - } - - let Some(obj) = obj.and_then(|v| v8::Local::<v8::Object>::try_from(v).ok()) - else { - return napi_invalid_arg; - }; - - let napi_wrap = v8::Local::new(&mut env.scope(), &env.shared().napi_wrap); - let Some(val) = obj.get_private(&mut env.scope(), napi_wrap) else { - return napi_invalid_arg; - }; - - let Ok(external) = v8::Local::<v8::External>::try_from(val) else { - return napi_invalid_arg; - }; - - let reference = external.value() as *mut Reference; - let reference = unsafe { &mut *reference }; - - if !result.is_null() { - unsafe { - *result = reference.finalize_data; - } - } - - if !keep { - assert!(obj - .delete_private(&mut env.scope(), napi_wrap) - .unwrap_or(false)); - unsafe { Reference::remove(reference) }; - } - - napi_ok -} - -#[napi_sym] -fn napi_unwrap( - env: &mut Env, - obj: napi_value, - result: *mut *mut c_void, -) -> napi_status { - unwrap(env, obj, result, true) -} - -#[napi_sym] -fn napi_remove_wrap( - env: &mut Env, - obj: napi_value, - result: *mut *mut c_void, -) -> napi_status { - unwrap(env, obj, result, false) -} - -struct ExternalWrapper { - data: *mut c_void, - type_tag: Option<napi_type_tag>, -} - -#[napi_sym] -fn napi_create_external<'s>( - env: &'s mut Env, - data: *mut c_void, - finalize_cb: Option<napi_finalize>, - finalize_hint: *mut c_void, - result: *mut napi_value<'s>, -) -> napi_status { - let env_ptr = env as *mut Env; - check_arg!(env, result); - - let wrapper = Box::new(ExternalWrapper { - data, - type_tag: None, - }); - - let wrapper = Box::into_raw(wrapper); - let external = v8::External::new(&mut env.scope(), wrapper as _); - - if let Some(finalize_cb) = finalize_cb { - Reference::into_raw(Reference::new( - env_ptr, - external.into(), - 0, - ReferenceOwnership::Runtime, - Some(finalize_cb), - data, - finalize_hint, - )); - } - - unsafe { - *result = external.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_type_tag_object( - env: &mut Env, - object_or_external: napi_value, - type_tag: *const napi_type_tag, -) -> napi_status { - check_arg!(env, object_or_external); - check_arg!(env, type_tag); - - let val = object_or_external.unwrap(); - - if let Ok(external) = v8::Local::<v8::External>::try_from(val) { - let wrapper_ptr = external.value() as *mut ExternalWrapper; - let wrapper = unsafe { &mut *wrapper_ptr }; - if wrapper.type_tag.is_some() { - return napi_invalid_arg; - } - wrapper.type_tag = Some(unsafe { *type_tag }); - return napi_ok; - } - - let Some(object) = val.to_object(&mut env.scope()) else { - return napi_object_expected; - }; - - let key = v8::Local::new(&mut env.scope(), &env.shared().type_tag); - - if object.has_private(&mut env.scope(), key).unwrap_or(false) { - return napi_invalid_arg; - } - - let slice = unsafe { std::slice::from_raw_parts(type_tag as *const u64, 2) }; - let Some(tag) = v8::BigInt::new_from_words(&mut env.scope(), false, slice) - else { - return napi_generic_failure; - }; - - if !object - .set_private(&mut env.scope(), key, tag.into()) - .unwrap_or(false) - { - return napi_generic_failure; - } - - napi_ok -} - -#[napi_sym] -fn napi_check_object_type_tag( - env: &mut Env, - object_or_external: napi_value, - type_tag: *const napi_type_tag, - result: *mut bool, -) -> napi_status { - check_arg!(env, object_or_external); - check_arg!(env, type_tag); - check_arg!(env, result); - - let type_tag = unsafe { *type_tag }; - - let val = object_or_external.unwrap(); - - if let Ok(external) = v8::Local::<v8::External>::try_from(val) { - let wrapper_ptr = external.value() as *mut ExternalWrapper; - let wrapper = unsafe { &mut *wrapper_ptr }; - unsafe { - *result = match wrapper.type_tag { - Some(t) => t == type_tag, - None => false, - }; - }; - return napi_ok; - } - - let Some(object) = val.to_object(&mut env.scope()) else { - return napi_object_expected; - }; - - let key = v8::Local::new(&mut env.scope(), &env.shared().type_tag); - - let Some(val) = object.get_private(&mut env.scope(), key) else { - return napi_generic_failure; - }; - - unsafe { - *result = false; - } - - if let Ok(bigint) = v8::Local::<v8::BigInt>::try_from(val) { - let mut words = [0u64; 2]; - let (sign, words) = bigint.to_words_array(&mut words); - if !sign { - let pass = if words.len() == 2 { - type_tag.lower == words[0] && type_tag.upper == words[1] - } else if words.len() == 1 { - type_tag.lower == words[0] && type_tag.upper == 0 - } else if words.is_empty() { - type_tag.lower == 0 && type_tag.upper == 0 - } else { - false - }; - unsafe { - *result = pass; - } - } - } - - napi_ok -} - -#[napi_sym] -fn napi_get_value_external( - env: *mut Env, - value: napi_value, - result: *mut *mut c_void, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - let Some(external) = - value.and_then(|v| v8::Local::<v8::External>::try_from(v).ok()) - else { - return napi_set_last_error(env, napi_invalid_arg); - }; - - let wrapper_ptr = external.value() as *const ExternalWrapper; - let wrapper = unsafe { &*wrapper_ptr }; - - unsafe { - *result = wrapper.data; - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_create_reference( - env: *mut Env, - value: napi_value, - initial_refcount: u32, - result: *mut napi_ref, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - let value = value.unwrap(); - - let reference = Reference::new( - env, - value, - initial_refcount, - ReferenceOwnership::Userland, - None, - std::ptr::null_mut(), - std::ptr::null_mut(), - ); - - let ptr = Reference::into_raw(reference); - - unsafe { - *result = ptr as _; - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_delete_reference(env: *mut Env, ref_: napi_ref) -> napi_status { - let env = check_env!(env); - check_arg!(env, ref_); - - let reference = unsafe { Reference::from_raw(ref_ as _) }; - - drop(reference); - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_reference_ref( - env: *mut Env, - ref_: napi_ref, - result: *mut u32, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, ref_); - - let reference = unsafe { &mut *(ref_ as *mut Reference) }; - - let count = reference.ref_(); - - if !result.is_null() { - unsafe { - *result = count; - } - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_reference_unref( - env: *mut Env, - ref_: napi_ref, - result: *mut u32, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, ref_); - - let reference = unsafe { &mut *(ref_ as *mut Reference) }; - - if reference.ref_count == 0 { - return napi_set_last_error(env, napi_generic_failure); - } - - let count = reference.unref(); - - if !result.is_null() { - unsafe { - *result = count; - } - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_reference_value( - env_ptr: *mut Env, - ref_: napi_ref, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, ref_); - check_arg!(env, result); - - let reference = unsafe { &mut *(ref_ as *mut Reference) }; - - let value = match &reference.state { - ReferenceState::Strong(g) => Some(v8::Local::new(&mut env.scope(), g)), - ReferenceState::Weak(w) => w.to_local(&mut env.scope()), - }; - - unsafe { - *result = value.into(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_open_handle_scope( - env: *mut Env, - _result: *mut napi_handle_scope, -) -> napi_status { - let env = check_env!(env); - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_close_handle_scope( - env: *mut Env, - _scope: napi_handle_scope, -) -> napi_status { - let env = check_env!(env); - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_open_escapable_handle_scope( - env: *mut Env, - _result: *mut napi_escapable_handle_scope, -) -> napi_status { - let env = check_env!(env); - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_close_escapable_handle_scope( - env: *mut Env, - _scope: napi_escapable_handle_scope, -) -> napi_status { - let env = check_env!(env); - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_escape_handle<'s>( - env: *mut Env, - _scope: napi_escapable_handle_scope, - escapee: napi_value<'s>, - result: *mut napi_value<'s>, -) -> napi_status { - let env = check_env!(env); - - unsafe { - *result = escapee; - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_new_instance<'s>( - env: &'s mut Env, - constructor: napi_value, - argc: usize, - argv: *const napi_value, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, constructor); - if argc > 0 { - check_arg!(env, argv); - } - check_arg!(env, result); - - let Some(func) = - constructor.and_then(|v| v8::Local::<v8::Function>::try_from(v).ok()) - else { - return napi_invalid_arg; - }; - - let args = if argc > 0 { - unsafe { - std::slice::from_raw_parts(argv as *mut v8::Local<v8::Value>, argc) - } - } else { - &[] - }; - - let Some(value) = func.new_instance(&mut env.scope(), args) else { - return napi_pending_exception; - }; - - unsafe { - *result = value.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_instanceof( - env: &mut Env, - object: napi_value, - constructor: napi_value, - result: *mut bool, -) -> napi_status { - check_arg!(env, object); - check_arg!(env, result); - - let Some(ctor) = constructor.and_then(|v| v.to_object(&mut env.scope())) - else { - return napi_object_expected; - }; - - if !ctor.is_function() { - unsafe { - napi_throw_type_error( - env, - c"ERR_NAPI_CONS_FUNCTION".as_ptr(), - c"Constructor must be a function".as_ptr(), - ); - } - return napi_function_expected; - } - - let Some(res) = object.unwrap().instance_of(&mut env.scope(), ctor) else { - return napi_generic_failure; - }; - - unsafe { - *result = res; - } - - napi_ok -} - -#[napi_sym] -fn napi_is_exception_pending( - env_ptr: *mut Env, - result: *mut bool, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - unsafe { - *result = env.last_exception.is_some(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_get_and_clear_last_exception( - env_ptr: *mut Env, - result: *mut napi_value, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, result); - - let ex: v8::Local<v8::Value> = - if let Some(last_exception) = env.last_exception.take() { - v8::Local::new(&mut env.scope(), last_exception) - } else { - v8::undefined(&mut env.scope()).into() - }; - - unsafe { - *result = ex.into(); - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_is_arraybuffer( - env: *mut Env, - value: napi_value, - result: *mut bool, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - unsafe { - *result = value.unwrap().is_array_buffer(); - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_create_arraybuffer<'s>( - env: &'s mut Env, - len: usize, - data: *mut *mut c_void, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let buffer = v8::ArrayBuffer::new(&mut env.scope(), len); - - if !data.is_null() { - unsafe { - *data = get_array_buffer_ptr(buffer); - } - } - - unsafe { - *result = buffer.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_create_external_arraybuffer<'s>( - env: &'s mut Env, - data: *mut c_void, - byte_length: usize, - finalize_cb: napi_finalize, - finalize_hint: *mut c_void, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let store = make_external_backing_store( - env, - data, - byte_length, - std::ptr::null_mut(), - finalize_cb, - finalize_hint, - ); - - let ab = - v8::ArrayBuffer::with_backing_store(&mut env.scope(), &store.make_shared()); - let value: v8::Local<v8::Value> = ab.into(); - - unsafe { - *result = value.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_get_arraybuffer_info( - env: *mut Env, - value: napi_value, - data: *mut *mut c_void, - length: *mut usize, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - - let Some(buf) = - value.and_then(|v| v8::Local::<v8::ArrayBuffer>::try_from(v).ok()) - else { - return napi_set_last_error(env, napi_invalid_arg); - }; - - if !data.is_null() { - unsafe { - *data = get_array_buffer_ptr(buf); - } - } - - if !length.is_null() { - unsafe { - *length = buf.byte_length(); - } - } - - napi_ok -} - -#[napi_sym] -fn napi_is_typedarray( - env: *mut Env, - value: napi_value, - result: *mut bool, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - unsafe { - *result = value.unwrap().is_typed_array(); - } - - napi_ok -} - -#[napi_sym] -fn napi_create_typedarray<'s>( - env: &'s mut Env, - ty: napi_typedarray_type, - length: usize, - arraybuffer: napi_value, - byte_offset: usize, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, arraybuffer); - check_arg!(env, result); - - let Some(ab) = - arraybuffer.and_then(|v| v8::Local::<v8::ArrayBuffer>::try_from(v).ok()) - else { - return napi_arraybuffer_expected; - }; - - macro_rules! create { - ($TypedArray:ident, $size_of_element:expr) => {{ - let soe = $size_of_element; - if soe > 1 && byte_offset % soe != 0 { - let message = v8::String::new( - &mut env.scope(), - format!( - "start offset of {} should be multiple of {}", - stringify!($TypedArray), - soe - ) - .as_str(), - ) - .unwrap(); - let exc = v8::Exception::range_error(&mut env.scope(), message); - env.scope().throw_exception(exc); - return napi_pending_exception; - } - - if length * soe + byte_offset > ab.byte_length() { - let message = - v8::String::new(&mut env.scope(), "Invalid typed array length") - .unwrap(); - let exc = v8::Exception::range_error(&mut env.scope(), message); - env.scope().throw_exception(exc); - return napi_pending_exception; - } - - let Some(ta) = - v8::$TypedArray::new(&mut env.scope(), ab, byte_offset, length) - else { - return napi_generic_failure; - }; - ta.into() - }}; - } - - let typedarray: v8::Local<v8::Value> = match ty { - napi_uint8_array => create!(Uint8Array, 1), - napi_uint8_clamped_array => create!(Uint8ClampedArray, 1), - napi_int8_array => create!(Int8Array, 1), - napi_uint16_array => create!(Uint16Array, 2), - napi_int16_array => create!(Int16Array, 2), - napi_uint32_array => create!(Uint32Array, 4), - napi_int32_array => create!(Int32Array, 4), - napi_float32_array => create!(Float32Array, 4), - napi_float64_array => create!(Float64Array, 8), - napi_bigint64_array => create!(BigInt64Array, 8), - napi_biguint64_array => create!(BigUint64Array, 8), - _ => { - return napi_invalid_arg; - } - }; - - unsafe { - *result = typedarray.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_get_typedarray_info( - env_ptr: *mut Env, - typedarray: napi_value, - type_: *mut napi_typedarray_type, - length: *mut usize, - data: *mut *mut c_void, - arraybuffer: *mut napi_value, - byte_offset: *mut usize, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, typedarray); - - let Some(array) = - typedarray.and_then(|v| v8::Local::<v8::TypedArray>::try_from(v).ok()) - else { - return napi_set_last_error(env_ptr, napi_invalid_arg); - }; - - if !type_.is_null() { - let tatype = if array.is_int8_array() { - napi_int8_array - } else if array.is_uint8_array() { - napi_uint8_array - } else if array.is_uint8_clamped_array() { - napi_uint8_clamped_array - } else if array.is_int16_array() { - napi_int16_array - } else if array.is_uint16_array() { - napi_uint16_array - } else if array.is_int32_array() { - napi_int32_array - } else if array.is_uint32_array() { - napi_uint32_array - } else if array.is_float32_array() { - napi_float32_array - } else if array.is_float64_array() { - napi_float64_array - } else if array.is_big_int64_array() { - napi_bigint64_array - } else if array.is_big_uint64_array() { - napi_biguint64_array - } else { - unreachable!(); - }; - - unsafe { - *type_ = tatype; - } - } - - if !length.is_null() { - unsafe { - *length = array.length(); - } - } - - if !data.is_null() { - unsafe { - *data = array.data(); - } - } - - if !arraybuffer.is_null() { - let buf = array.buffer(&mut env.scope()).unwrap(); - unsafe { - *arraybuffer = buf.into(); - } - } - - if !byte_offset.is_null() { - unsafe { - *byte_offset = array.byte_offset(); - } - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_create_dataview<'s>( - env: &'s mut Env, - byte_length: usize, - arraybuffer: napi_value<'s>, - byte_offset: usize, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, arraybuffer); - check_arg!(env, result); - - let Some(buffer) = - arraybuffer.and_then(|v| v8::Local::<v8::ArrayBuffer>::try_from(v).ok()) - else { - return napi_invalid_arg; - }; - - if byte_length + byte_offset > buffer.byte_length() { - unsafe { - return napi_throw_range_error( - env, - c"ERR_NAPI_INVALID_DATAVIEW_ARGS".as_ptr(), - c"byte_offset + byte_length should be less than or equal to the size in bytes of the array passed in".as_ptr(), - ); - } - } - - let dataview = - v8::DataView::new(&mut env.scope(), buffer, byte_offset, byte_length); - - unsafe { - *result = dataview.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_is_dataview( - env: *mut Env, - value: napi_value, - result: *mut bool, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - unsafe { - *result = value.unwrap().is_data_view(); - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_dataview_info( - env_ptr: *mut Env, - dataview: napi_value, - byte_length: *mut usize, - data: *mut *mut c_void, - arraybuffer: *mut napi_value, - byte_offset: *mut usize, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, dataview); - - let Some(array) = - dataview.and_then(|v| v8::Local::<v8::DataView>::try_from(v).ok()) - else { - return napi_invalid_arg; - }; - - if !byte_length.is_null() { - unsafe { - *byte_length = array.byte_length(); - } - } - - if !arraybuffer.is_null() { - let Some(buffer) = array.buffer(&mut env.scope()) else { - return napi_generic_failure; - }; - - unsafe { - *arraybuffer = buffer.into(); - } - } - - if !data.is_null() { - unsafe { - *data = array.data(); - } - } - - if !byte_offset.is_null() { - unsafe { - *byte_offset = array.byte_offset(); - } - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn napi_get_version(env: *mut Env, result: *mut u32) -> napi_status { - let env = check_env!(env); - check_arg!(env, result); - - unsafe { - *result = NAPI_VERSION; - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_create_promise<'s>( - env: &'s mut Env, - deferred: *mut napi_deferred, - promise: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, deferred); - check_arg!(env, promise); - - let resolver = v8::PromiseResolver::new(&mut env.scope()).unwrap(); - - let global = v8::Global::new(&mut env.scope(), resolver); - let global_ptr = global.into_raw().as_ptr() as napi_deferred; - - let p = resolver.get_promise(&mut env.scope()); - - unsafe { - *deferred = global_ptr; - } - - unsafe { - *promise = p.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_resolve_deferred( - env: &mut Env, - deferred: napi_deferred, - result: napi_value, -) -> napi_status { - check_arg!(env, result); - check_arg!(env, deferred); - - // Make sure microtasks don't run and call back into JS - env - .scope() - .set_microtasks_policy(v8::MicrotasksPolicy::Explicit); - - let deferred_ptr = - unsafe { NonNull::new_unchecked(deferred as *mut v8::PromiseResolver) }; - let global = unsafe { v8::Global::from_raw(env.isolate(), deferred_ptr) }; - let resolver = v8::Local::new(&mut env.scope(), global); - - let success = resolver - .resolve(&mut env.scope(), result.unwrap()) - .unwrap_or(false); - - // Restore policy - env - .scope() - .set_microtasks_policy(v8::MicrotasksPolicy::Auto); - - if success { - napi_ok - } else { - napi_generic_failure - } -} - -#[napi_sym] -fn napi_reject_deferred( - env: &mut Env, - deferred: napi_deferred, - result: napi_value, -) -> napi_status { - check_arg!(env, result); - check_arg!(env, deferred); - - let deferred_ptr = - unsafe { NonNull::new_unchecked(deferred as *mut v8::PromiseResolver) }; - let global = unsafe { v8::Global::from_raw(env.isolate(), deferred_ptr) }; - let resolver = v8::Local::new(&mut env.scope(), global); - - if !resolver - .reject(&mut env.scope(), result.unwrap()) - .unwrap_or(false) - { - return napi_generic_failure; - } - - napi_ok -} - -#[napi_sym] -fn napi_is_promise( - env: *mut Env, - value: napi_value, - is_promise: *mut bool, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, is_promise); - - unsafe { - *is_promise = value.unwrap().is_promise(); - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_create_date<'s>( - env: &'s mut Env, - time: f64, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let Some(date) = v8::Date::new(&mut env.scope(), time) else { - return napi_generic_failure; - }; - - unsafe { - *result = date.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_is_date( - env: *mut Env, - value: napi_value, - is_date: *mut bool, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, is_date); - - unsafe { - *is_date = value.unwrap().is_date(); - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_date_value( - env: &mut Env, - value: napi_value, - result: *mut f64, -) -> napi_status { - check_arg!(env, result); - - let Some(date) = value.and_then(|v| v8::Local::<v8::Date>::try_from(v).ok()) - else { - return napi_date_expected; - }; - - unsafe { - *result = date.value_of(); - } - - napi_ok -} - -#[napi_sym] -fn napi_run_script<'s>( - env: &'s mut Env, - script: napi_value, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, script); - check_arg!(env, result); - - let Some(script) = - script.and_then(|v| v8::Local::<v8::String>::try_from(v).ok()) - else { - return napi_string_expected; - }; - - let Some(script) = v8::Script::compile(&mut env.scope(), script, None) else { - return napi_generic_failure; - }; - - let Some(rv) = script.run(&mut env.scope()) else { - return napi_generic_failure; - }; - - unsafe { - *result = rv.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_add_finalizer( - env_ptr: *mut Env, - value: napi_value, - finalize_data: *mut c_void, - finalize_cb: Option<napi_finalize>, - finalize_hint: *mut c_void, - result: *mut napi_ref, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, value); - check_arg!(env, finalize_cb); - - let Some(value) = - value.and_then(|v| v8::Local::<v8::Object>::try_from(v).ok()) - else { - return napi_set_last_error(env, napi_invalid_arg); - }; - - let ownership = if result.is_null() { - ReferenceOwnership::Runtime - } else { - ReferenceOwnership::Userland - }; - let reference = Reference::new( - env, - value.into(), - 0, - ownership, - finalize_cb, - finalize_data, - finalize_hint, - ); - - if !result.is_null() { - unsafe { - *result = Reference::into_raw(reference) as _; - } - } - - napi_clear_last_error(env_ptr) -} - -#[napi_sym] -fn node_api_post_finalizer( - env: *mut Env, - _finalize_cb: napi_finalize, - _finalize_data: *mut c_void, - _finalize_hint: *mut c_void, -) -> napi_status { - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_adjust_external_memory( - env: *mut Env, - change_in_bytes: i64, - adjusted_value: *mut i64, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, adjusted_value); - - unsafe { - *adjusted_value = env - .isolate() - .adjust_amount_of_external_allocated_memory(change_in_bytes); - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_set_instance_data( - env: *mut Env, - data: *mut c_void, - finalize_cb: Option<napi_finalize>, - finalize_hint: *mut c_void, -) -> napi_status { - let env = check_env!(env); - - env.shared_mut().instance_data = Some(InstanceData { - data, - finalize_cb, - finalize_hint, - }); - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_instance_data( - env: *mut Env, - data: *mut *mut c_void, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, data); - - let instance_data = match &env.shared().instance_data { - Some(v) => v.data, - None => std::ptr::null_mut(), - }; - - unsafe { *data = instance_data }; - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_detach_arraybuffer(env: *mut Env, value: napi_value) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - - let Some(ab) = - value.and_then(|v| v8::Local::<v8::ArrayBuffer>::try_from(v).ok()) - else { - return napi_set_last_error(env, napi_arraybuffer_expected); - }; - - if !ab.is_detachable() { - return napi_set_last_error(env, napi_detachable_arraybuffer_expected); - } - - // Expected to crash for None. - ab.detach(None).unwrap(); - - napi_clear_last_error(env); - napi_ok -} - -#[napi_sym] -fn napi_is_detached_arraybuffer( - env_ptr: *mut Env, - arraybuffer: napi_value, - result: *mut bool, -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, arraybuffer); - check_arg!(env, result); - - let is_detached = match arraybuffer - .and_then(|v| v8::Local::<v8::ArrayBuffer>::try_from(v).ok()) - { - Some(ab) => ab.was_detached(), - None => false, - }; - - unsafe { - *result = is_detached; - } - - napi_clear_last_error(env) -} diff --git a/cli/napi/mod.rs b/cli/napi/mod.rs deleted file mode 100644 index 811efb1ec..000000000 --- a/cli/napi/mod.rs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -#![allow(unused_mut)] -#![allow(non_camel_case_types)] -#![allow(clippy::undocumented_unsafe_blocks)] - -//! Symbols to be exported are now defined in this JSON file. -//! The `#[napi_sym]` macro checks for missing entries and panics. -//! -//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows, -//! which is also checked into git. -//! -//! To add a new napi function: -//! 1. Place `#[napi_sym]` on top of your implementation. -//! 2. Add the function's identifier to this JSON list. -//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `cli/napi/generated_symbol_exports_list_*.def`. - -pub mod js_native_api; -pub mod node_api; -pub mod util; -pub mod uv; diff --git a/cli/napi/node_api.rs b/cli/napi/node_api.rs deleted file mode 100644 index 2efb71c26..000000000 --- a/cli/napi/node_api.rs +++ /dev/null @@ -1,1004 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -#![deny(unsafe_op_in_unsafe_fn)] - -use super::util::get_array_buffer_ptr; -use super::util::make_external_backing_store; -use super::util::napi_clear_last_error; -use super::util::napi_set_last_error; -use super::util::SendPtr; -use crate::check_arg; -use crate::check_env; -use deno_core::parking_lot::Condvar; -use deno_core::parking_lot::Mutex; -use deno_core::V8CrossThreadTaskSpawner; -use deno_runtime::deno_napi::*; -use napi_sym::napi_sym; -use std::sync::atomic::AtomicBool; -use std::sync::atomic::AtomicU8; -use std::sync::atomic::AtomicUsize; -use std::sync::atomic::Ordering; -use std::sync::Arc; - -#[napi_sym] -fn napi_module_register(module: *const NapiModule) -> napi_status { - MODULE_TO_REGISTER.with(|cell| { - let mut slot = cell.borrow_mut(); - let prev = slot.replace(module); - assert!(prev.is_none()); - }); - napi_ok -} - -#[napi_sym] -fn napi_add_env_cleanup_hook( - env: *mut Env, - fun: Option<napi_cleanup_hook>, - arg: *mut c_void, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, fun); - - let fun = fun.unwrap(); - - env.add_cleanup_hook(fun, arg); - - napi_ok -} - -#[napi_sym] -fn napi_remove_env_cleanup_hook( - env: *mut Env, - fun: Option<napi_cleanup_hook>, - arg: *mut c_void, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, fun); - - let fun = fun.unwrap(); - - env.remove_cleanup_hook(fun, arg); - - napi_ok -} - -struct AsyncCleanupHandle { - env: *mut Env, - hook: napi_async_cleanup_hook, - data: *mut c_void, -} - -unsafe extern "C" fn async_cleanup_handler(arg: *mut c_void) { - unsafe { - let handle = Box::<AsyncCleanupHandle>::from_raw(arg as _); - (handle.hook)(arg, handle.data); - } -} - -#[napi_sym] -fn napi_add_async_cleanup_hook( - env: *mut Env, - hook: Option<napi_async_cleanup_hook>, - arg: *mut c_void, - remove_handle: *mut napi_async_cleanup_hook_handle, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, hook); - - let hook = hook.unwrap(); - - let handle = Box::into_raw(Box::new(AsyncCleanupHandle { - env, - hook, - data: arg, - })) as *mut c_void; - - env.add_cleanup_hook(async_cleanup_handler, handle); - - if !remove_handle.is_null() { - unsafe { - *remove_handle = handle; - } - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_remove_async_cleanup_hook( - remove_handle: napi_async_cleanup_hook_handle, -) -> napi_status { - if remove_handle.is_null() { - return napi_invalid_arg; - } - - let handle = - unsafe { Box::<AsyncCleanupHandle>::from_raw(remove_handle as _) }; - - let env = unsafe { &mut *handle.env }; - - env.remove_cleanup_hook(async_cleanup_handler, remove_handle); - - napi_ok -} - -#[napi_sym] -fn napi_fatal_exception(env: &mut Env, err: napi_value) -> napi_status { - check_arg!(env, err); - - let report_error = v8::Local::new(&mut env.scope(), &env.report_error); - - let this = v8::undefined(&mut env.scope()); - if report_error - .call(&mut env.scope(), this.into(), &[err.unwrap()]) - .is_none() - { - return napi_generic_failure; - } - - napi_ok -} - -#[napi_sym] -#[allow(clippy::print_stderr)] -fn napi_fatal_error( - location: *const c_char, - location_len: usize, - message: *const c_char, - message_len: usize, -) -> napi_status { - let location = if location.is_null() { - None - } else { - unsafe { - Some(if location_len == NAPI_AUTO_LENGTH { - std::ffi::CStr::from_ptr(location).to_str().unwrap() - } else { - let slice = std::slice::from_raw_parts( - location as *const u8, - location_len as usize, - ); - std::str::from_utf8(slice).unwrap() - }) - } - }; - - let message = if message_len == NAPI_AUTO_LENGTH { - unsafe { std::ffi::CStr::from_ptr(message).to_str().unwrap() } - } else { - let slice = unsafe { - std::slice::from_raw_parts(message as *const u8, message_len as usize) - }; - std::str::from_utf8(slice).unwrap() - }; - - if let Some(location) = location { - eprintln!("NODE API FATAL ERROR: {} {}", location, message); - } else { - eprintln!("NODE API FATAL ERROR: {}", message); - } - - std::process::abort(); -} - -#[napi_sym] -fn napi_open_callback_scope( - env: *mut Env, - _resource_object: napi_value, - _context: napi_value, - result: *mut napi_callback_scope, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, result); - - // we open scope automatically when it's needed - unsafe { - *result = std::ptr::null_mut(); - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_close_callback_scope( - env: *mut Env, - scope: napi_callback_scope, -) -> napi_status { - let env = check_env!(env); - // we close scope automatically when it's needed - assert!(scope.is_null()); - napi_clear_last_error(env) -} - -// NOTE: we don't support "async_hooks::AsyncContext" so these APIs are noops. -#[napi_sym] -fn napi_async_init( - env: *mut Env, - _async_resource: napi_value, - _async_resource_name: napi_value, - result: *mut napi_async_context, -) -> napi_status { - let env = check_env!(env); - unsafe { - *result = ptr::null_mut(); - } - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_async_destroy( - env: *mut Env, - async_context: napi_async_context, -) -> napi_status { - let env = check_env!(env); - assert!(async_context.is_null()); - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_make_callback<'s>( - env: &'s mut Env, - _async_context: napi_async_context, - recv: napi_value, - func: napi_value, - argc: usize, - argv: *const napi_value<'s>, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, recv); - if argc > 0 { - check_arg!(env, argv); - } - - let Some(recv) = recv.and_then(|v| v.to_object(&mut env.scope())) else { - return napi_object_expected; - }; - - let Some(func) = - func.and_then(|v| v8::Local::<v8::Function>::try_from(v).ok()) - else { - return napi_function_expected; - }; - - let args = if argc > 0 { - unsafe { - std::slice::from_raw_parts(argv as *mut v8::Local<v8::Value>, argc) - } - } else { - &[] - }; - - // TODO: async_context - - let Some(v) = func.call(&mut env.scope(), recv.into(), args) else { - return napi_generic_failure; - }; - - unsafe { - *result = v.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_create_buffer<'s>( - env: &'s mut Env, - length: usize, - data: *mut *mut c_void, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let ab = v8::ArrayBuffer::new(&mut env.scope(), length); - - let buffer_constructor = - v8::Local::new(&mut env.scope(), &env.buffer_constructor); - let Some(buffer) = - buffer_constructor.new_instance(&mut env.scope(), &[ab.into()]) - else { - return napi_generic_failure; - }; - - if !data.is_null() { - unsafe { - *data = get_array_buffer_ptr(ab); - } - } - - unsafe { - *result = buffer.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_create_external_buffer<'s>( - env: &'s mut Env, - length: usize, - data: *mut c_void, - finalize_cb: napi_finalize, - finalize_hint: *mut c_void, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let store = make_external_backing_store( - env, - data, - length, - ptr::null_mut(), - finalize_cb, - finalize_hint, - ); - - let ab = - v8::ArrayBuffer::with_backing_store(&mut env.scope(), &store.make_shared()); - - let buffer_constructor = - v8::Local::new(&mut env.scope(), &env.buffer_constructor); - let Some(buffer) = - buffer_constructor.new_instance(&mut env.scope(), &[ab.into()]) - else { - return napi_generic_failure; - }; - - unsafe { - *result = buffer.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_create_buffer_copy<'s>( - env: &'s mut Env, - length: usize, - data: *mut c_void, - result_data: *mut *mut c_void, - result: *mut napi_value<'s>, -) -> napi_status { - check_arg!(env, result); - - let ab = v8::ArrayBuffer::new(&mut env.scope(), length); - - let buffer_constructor = - v8::Local::new(&mut env.scope(), &env.buffer_constructor); - let Some(buffer) = - buffer_constructor.new_instance(&mut env.scope(), &[ab.into()]) - else { - return napi_generic_failure; - }; - - let ptr = get_array_buffer_ptr(ab); - unsafe { - std::ptr::copy(data, ptr, length); - } - - if !result_data.is_null() { - unsafe { - *result_data = ptr; - } - } - - unsafe { - *result = buffer.into(); - } - - napi_ok -} - -#[napi_sym] -fn napi_is_buffer( - env: *mut Env, - value: napi_value, - result: *mut bool, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - check_arg!(env, result); - - let buffer_constructor = - v8::Local::new(&mut env.scope(), &env.buffer_constructor); - - let Some(is_buffer) = value - .unwrap() - .instance_of(&mut env.scope(), buffer_constructor.into()) - else { - return napi_set_last_error(env, napi_generic_failure); - }; - - unsafe { - *result = is_buffer; - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_buffer_info( - env: *mut Env, - value: napi_value, - data: *mut *mut c_void, - length: *mut usize, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, value); - - // NB: Any TypedArray instance seems to be accepted by this function - // in Node.js. - let Some(ta) = - value.and_then(|v| v8::Local::<v8::TypedArray>::try_from(v).ok()) - else { - return napi_set_last_error(env, napi_invalid_arg); - }; - - if !data.is_null() { - unsafe { - *data = ta.data(); - } - } - - if !length.is_null() { - unsafe { - *length = ta.byte_length(); - } - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_node_version( - env: *mut Env, - result: *mut *const napi_node_version, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, result); - - const NODE_VERSION: napi_node_version = napi_node_version { - major: 20, - minor: 11, - patch: 1, - release: c"Deno".as_ptr(), - }; - - unsafe { - *result = &NODE_VERSION as *const napi_node_version; - } - - napi_clear_last_error(env) -} - -struct AsyncWork { - state: AtomicU8, - env: *mut Env, - _async_resource: v8::Global<v8::Object>, - _async_resource_name: String, - execute: napi_async_execute_callback, - complete: Option<napi_async_complete_callback>, - data: *mut c_void, -} - -impl AsyncWork { - const IDLE: u8 = 0; - const QUEUED: u8 = 1; - const RUNNING: u8 = 2; -} - -#[napi_sym] -fn napi_create_async_work( - env: *mut Env, - async_resource: napi_value, - async_resource_name: napi_value, - execute: Option<napi_async_execute_callback>, - complete: Option<napi_async_complete_callback>, - data: *mut c_void, - result: *mut napi_async_work, -) -> napi_status { - let env_ptr = env; - let env = check_env!(env); - check_arg!(env, execute); - check_arg!(env, result); - - let resource = if let Some(v) = *async_resource { - let Some(resource) = v.to_object(&mut env.scope()) else { - return napi_set_last_error(env, napi_object_expected); - }; - resource - } else { - v8::Object::new(&mut env.scope()) - }; - - let Some(resource_name) = - async_resource_name.and_then(|v| v.to_string(&mut env.scope())) - else { - return napi_set_last_error(env, napi_string_expected); - }; - - let resource_name = resource_name.to_rust_string_lossy(&mut env.scope()); - - let work = Box::new(AsyncWork { - state: AtomicU8::new(AsyncWork::IDLE), - env: env_ptr, - _async_resource: v8::Global::new(&mut env.scope(), resource), - _async_resource_name: resource_name, - execute: execute.unwrap(), - complete, - data, - }); - - unsafe { - *result = Box::into_raw(work) as _; - } - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_delete_async_work(env: *mut Env, work: napi_async_work) -> napi_status { - let env = check_env!(env); - check_arg!(env, work); - - drop(unsafe { Box::<AsyncWork>::from_raw(work as _) }); - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_get_uv_event_loop( - env_ptr: *mut Env, - uv_loop: *mut *mut (), -) -> napi_status { - let env = check_env!(env_ptr); - check_arg!(env, uv_loop); - unsafe { - *uv_loop = env_ptr.cast(); - } - 0 -} - -#[napi_sym] -fn napi_queue_async_work(env: *mut Env, work: napi_async_work) -> napi_status { - let env = check_env!(env); - check_arg!(env, work); - - let work = unsafe { &*(work as *mut AsyncWork) }; - - let result = - work - .state - .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |state| { - // allow queue if idle or if running, but not if already queued. - if state == AsyncWork::IDLE || state == AsyncWork::RUNNING { - Some(AsyncWork::QUEUED) - } else { - None - } - }); - - if result.is_err() { - return napi_clear_last_error(env); - } - - let work = SendPtr(work); - - env.add_async_work(move || { - let work = work.take(); - let work = unsafe { &*work }; - - let state = work.state.compare_exchange( - AsyncWork::QUEUED, - AsyncWork::RUNNING, - Ordering::SeqCst, - Ordering::SeqCst, - ); - - if state.is_ok() { - unsafe { - (work.execute)(work.env as _, work.data); - } - - // reset back to idle if its still marked as running - let _ = work.state.compare_exchange( - AsyncWork::RUNNING, - AsyncWork::IDLE, - Ordering::SeqCst, - Ordering::Relaxed, - ); - } - - if let Some(complete) = work.complete { - let status = if state.is_ok() { - napi_ok - } else if state == Err(AsyncWork::IDLE) { - napi_cancelled - } else { - napi_generic_failure - }; - - unsafe { - complete(work.env as _, status, work.data); - } - } - - // `complete` probably deletes this `work`, so don't use it here. - }); - - napi_clear_last_error(env) -} - -#[napi_sym] -fn napi_cancel_async_work(env: *mut Env, work: napi_async_work) -> napi_status { - let env = check_env!(env); - check_arg!(env, work); - - let work = unsafe { &*(work as *mut AsyncWork) }; - - let _ = work.state.compare_exchange( - AsyncWork::QUEUED, - AsyncWork::IDLE, - Ordering::SeqCst, - Ordering::Relaxed, - ); - - napi_clear_last_error(env) -} - -extern "C" fn default_call_js_cb( - env: napi_env, - js_callback: napi_value, - _context: *mut c_void, - _data: *mut c_void, -) { - if let Some(js_callback) = *js_callback { - if let Ok(js_callback) = v8::Local::<v8::Function>::try_from(js_callback) { - let env = unsafe { &mut *(env as *mut Env) }; - let scope = &mut env.scope(); - let recv = v8::undefined(scope); - js_callback.call(scope, recv.into(), &[]); - } - } -} - -struct TsFn { - env: *mut Env, - func: Option<v8::Global<v8::Function>>, - max_queue_size: usize, - queue_size: Mutex<usize>, - queue_cond: Condvar, - thread_count: AtomicUsize, - thread_finalize_data: *mut c_void, - thread_finalize_cb: Option<napi_finalize>, - context: *mut c_void, - call_js_cb: napi_threadsafe_function_call_js, - _resource: v8::Global<v8::Object>, - _resource_name: String, - is_closing: AtomicBool, - is_closed: Arc<AtomicBool>, - sender: V8CrossThreadTaskSpawner, - is_ref: AtomicBool, -} - -impl Drop for TsFn { - fn drop(&mut self) { - assert!(self - .is_closed - .compare_exchange(false, true, Ordering::Relaxed, Ordering::Relaxed) - .is_ok()); - - self.unref(); - - if let Some(finalizer) = self.thread_finalize_cb { - unsafe { - (finalizer)(self.env as _, self.thread_finalize_data, ptr::null_mut()); - } - } - } -} - -impl TsFn { - pub fn acquire(&self) -> napi_status { - if self.is_closing.load(Ordering::SeqCst) { - return napi_closing; - } - self.thread_count.fetch_add(1, Ordering::Relaxed); - napi_ok - } - - pub fn release( - tsfn: *mut TsFn, - mode: napi_threadsafe_function_release_mode, - ) -> napi_status { - let tsfn = unsafe { &mut *tsfn }; - - let result = tsfn.thread_count.fetch_update( - Ordering::Relaxed, - Ordering::Relaxed, - |x| { - if x == 0 { - None - } else { - Some(x - 1) - } - }, - ); - - if result.is_err() { - return napi_invalid_arg; - } - - if (result == Ok(1) || mode == napi_tsfn_abort) - && tsfn - .is_closing - .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst) - .is_ok() - { - tsfn.queue_cond.notify_all(); - let tsfnptr = SendPtr(tsfn); - // drop must be queued in order to preserve ordering consistent - // with Node.js and so that the finalizer runs on the main thread. - tsfn.sender.spawn(move |_| { - let tsfn = unsafe { Box::from_raw(tsfnptr.take() as *mut TsFn) }; - drop(tsfn); - }); - } - - napi_ok - } - - pub fn ref_(&self) -> napi_status { - if self - .is_ref - .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst) - .is_ok() - { - let env = unsafe { &mut *self.env }; - env.threadsafe_function_ref(); - } - napi_ok - } - - pub fn unref(&self) -> napi_status { - if self - .is_ref - .compare_exchange(true, false, Ordering::SeqCst, Ordering::SeqCst) - .is_ok() - { - let env = unsafe { &mut *self.env }; - env.threadsafe_function_unref(); - } - - napi_ok - } - - pub fn call( - &self, - data: *mut c_void, - mode: napi_threadsafe_function_call_mode, - ) -> napi_status { - if self.is_closing.load(Ordering::SeqCst) { - return napi_closing; - } - - if self.max_queue_size > 0 { - let mut queue_size = self.queue_size.lock(); - while *queue_size >= self.max_queue_size { - if mode == napi_tsfn_blocking { - self.queue_cond.wait(&mut queue_size); - - if self.is_closing.load(Ordering::SeqCst) { - return napi_closing; - } - } else { - return napi_queue_full; - } - } - *queue_size += 1; - } - - let is_closed = self.is_closed.clone(); - let tsfn = SendPtr(self); - let data = SendPtr(data); - let context = SendPtr(self.context); - let call_js_cb = self.call_js_cb; - - self.sender.spawn(move |scope: &mut v8::HandleScope| { - let data = data.take(); - - // if is_closed then tsfn is freed, don't read from it. - if is_closed.load(Ordering::Relaxed) { - unsafe { - call_js_cb( - std::ptr::null_mut(), - None::<v8::Local<v8::Value>>.into(), - context.take() as _, - data as _, - ); - } - } else { - let tsfn = tsfn.take(); - - let tsfn = unsafe { &*tsfn }; - - if tsfn.max_queue_size > 0 { - let mut queue_size = tsfn.queue_size.lock(); - let size = *queue_size; - *queue_size -= 1; - if size == tsfn.max_queue_size { - tsfn.queue_cond.notify_one(); - } - } - - let func = tsfn.func.as_ref().map(|f| v8::Local::new(scope, f)); - - unsafe { - (tsfn.call_js_cb)( - tsfn.env as _, - func.into(), - tsfn.context, - data as _, - ); - } - } - }); - - napi_ok - } -} - -#[napi_sym] -#[allow(clippy::too_many_arguments)] -fn napi_create_threadsafe_function( - env: *mut Env, - func: napi_value, - async_resource: napi_value, - async_resource_name: napi_value, - max_queue_size: usize, - initial_thread_count: usize, - thread_finalize_data: *mut c_void, - thread_finalize_cb: Option<napi_finalize>, - context: *mut c_void, - call_js_cb: Option<napi_threadsafe_function_call_js>, - result: *mut napi_threadsafe_function, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, async_resource_name); - if initial_thread_count == 0 { - return napi_set_last_error(env, napi_invalid_arg); - } - check_arg!(env, result); - - let func = if let Some(value) = *func { - let Ok(func) = v8::Local::<v8::Function>::try_from(value) else { - return napi_set_last_error(env, napi_function_expected); - }; - Some(v8::Global::new(&mut env.scope(), func)) - } else { - check_arg!(env, call_js_cb); - None - }; - - let resource = if let Some(v) = *async_resource { - let Some(resource) = v.to_object(&mut env.scope()) else { - return napi_set_last_error(env, napi_object_expected); - }; - resource - } else { - v8::Object::new(&mut env.scope()) - }; - let resource = v8::Global::new(&mut env.scope(), resource); - - let Some(resource_name) = - async_resource_name.and_then(|v| v.to_string(&mut env.scope())) - else { - return napi_set_last_error(env, napi_string_expected); - }; - let resource_name = resource_name.to_rust_string_lossy(&mut env.scope()); - - let mut tsfn = Box::new(TsFn { - env, - func, - max_queue_size, - queue_size: Mutex::new(0), - queue_cond: Condvar::new(), - thread_count: AtomicUsize::new(initial_thread_count), - thread_finalize_data, - thread_finalize_cb, - context, - call_js_cb: call_js_cb.unwrap_or(default_call_js_cb), - _resource: resource, - _resource_name: resource_name, - is_closing: AtomicBool::new(false), - is_closed: Arc::new(AtomicBool::new(false)), - is_ref: AtomicBool::new(false), - sender: env.async_work_sender.clone(), - }); - - tsfn.ref_(); - - unsafe { - *result = Box::into_raw(tsfn) as _; - } - - napi_clear_last_error(env) -} - -/// Maybe called from any thread. -#[napi_sym] -fn napi_get_threadsafe_function_context( - func: napi_threadsafe_function, - result: *mut *const c_void, -) -> napi_status { - assert!(!func.is_null()); - let tsfn = unsafe { &*(func as *const TsFn) }; - unsafe { - *result = tsfn.context; - } - napi_ok -} - -#[napi_sym] -fn napi_call_threadsafe_function( - func: napi_threadsafe_function, - data: *mut c_void, - is_blocking: napi_threadsafe_function_call_mode, -) -> napi_status { - assert!(!func.is_null()); - let tsfn = unsafe { &*(func as *mut TsFn) }; - tsfn.call(data, is_blocking) -} - -#[napi_sym] -fn napi_acquire_threadsafe_function( - tsfn: napi_threadsafe_function, -) -> napi_status { - assert!(!tsfn.is_null()); - let tsfn = unsafe { &*(tsfn as *mut TsFn) }; - tsfn.acquire() -} - -#[napi_sym] -fn napi_release_threadsafe_function( - tsfn: napi_threadsafe_function, - mode: napi_threadsafe_function_release_mode, -) -> napi_status { - assert!(!tsfn.is_null()); - TsFn::release(tsfn as _, mode) -} - -#[napi_sym] -fn napi_unref_threadsafe_function( - _env: &mut Env, - func: napi_threadsafe_function, -) -> napi_status { - assert!(!func.is_null()); - let tsfn = unsafe { &*(func as *mut TsFn) }; - tsfn.unref() -} - -#[napi_sym] -fn napi_ref_threadsafe_function( - _env: &mut Env, - func: napi_threadsafe_function, -) -> napi_status { - assert!(!func.is_null()); - let tsfn = unsafe { &*(func as *mut TsFn) }; - tsfn.ref_() -} - -#[napi_sym] -fn node_api_get_module_file_name( - env: *mut Env, - result: *mut *const c_char, -) -> napi_status { - let env = check_env!(env); - check_arg!(env, result); - - unsafe { - *result = env.shared().filename.as_ptr() as _; - } - - napi_clear_last_error(env) -} diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml deleted file mode 100644 index fef749fc4..000000000 --- a/cli/napi/sym/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -[package] -name = "napi_sym" -version = "0.101.0" -authors.workspace = true -edition.workspace = true -license.workspace = true -readme = "README.md" -repository.workspace = true -description = "proc macro for writing N-API symbols" - -[lib] -path = "./lib.rs" -proc-macro = true - -[dependencies] -quote.workspace = true -serde.workspace = true -serde_json.workspace = true -syn.workspace = true diff --git a/cli/napi/sym/README.md b/cli/napi/sym/README.md deleted file mode 100644 index de08a8e17..000000000 --- a/cli/napi/sym/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# napi_sym - -A proc_macro for Deno's Node-API implementation. It does the following things: - -- Marks the symbol as `#[no_mangle]` and rewrites it as `pub extern "C" $name`. -- Asserts that the function symbol is present in - [`symbol_exports.json`](./symbol_exports.json). -- Maps `deno_napi::Result` to raw `napi_result`. - -```rust -use deno_napi::napi_value; -use deno_napi::Env; -use deno_napi::Error; -use deno_napi::Result; - -#[napi_sym::napi_sym] -fn napi_get_boolean( - env: *mut Env, - value: bool, - result: *mut napi_value, -) -> Result { - let _env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?; - // *result = ... - Ok(()) -} -``` - -### `symbol_exports.json` - -A file containing the symbols that need to be put into the executable's dynamic -symbol table at link-time. - -This is done using `/DEF:` on Windows, `-exported_symbol,_` on macOS and -`--export-dynamic-symbol=` on Linux. See [`cli/build.rs`](../build.rs). - -On Windows, you need to generate the `.def` file by running -[`tools/napi/generate_symbols_lists.js`](../../tools/napi/generate_symbols_lists.js). diff --git a/cli/napi/sym/lib.rs b/cli/napi/sym/lib.rs deleted file mode 100644 index e2826306b..000000000 --- a/cli/napi/sym/lib.rs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use proc_macro::TokenStream; -use quote::quote; -use serde::Deserialize; - -static NAPI_EXPORTS: &str = include_str!("./symbol_exports.json"); - -#[derive(Deserialize)] -struct SymbolExports { - pub symbols: Vec<String>, -} - -#[proc_macro_attribute] -pub fn napi_sym(_attr: TokenStream, item: TokenStream) -> TokenStream { - let func = syn::parse::<syn::ItemFn>(item).expect("expected a function"); - - let exports: SymbolExports = - serde_json::from_str(NAPI_EXPORTS).expect("failed to parse exports"); - let name = &func.sig.ident; - assert!( - exports.symbols.contains(&name.to_string()), - "cli/napi/sym/symbol_exports.json is out of sync!" - ); - - TokenStream::from(quote! { - crate::napi_wrap! { - #func - } - }) -} diff --git a/cli/napi/sym/symbol_exports.json b/cli/napi/sym/symbol_exports.json deleted file mode 100644 index 00946b8ed..000000000 --- a/cli/napi/sym/symbol_exports.json +++ /dev/null @@ -1,164 +0,0 @@ -{ - "symbols": [ - "node_api_create_syntax_error", - "napi_make_callback", - "napi_has_named_property", - "napi_async_destroy", - "napi_coerce_to_object", - "napi_get_arraybuffer_info", - "napi_detach_arraybuffer", - "napi_get_undefined", - "napi_reference_unref", - "napi_fatal_error", - "napi_open_callback_scope", - "napi_close_callback_scope", - "napi_get_value_uint32", - "napi_create_function", - "napi_create_arraybuffer", - "napi_get_value_int64", - "napi_get_all_property_names", - "napi_resolve_deferred", - "napi_is_detached_arraybuffer", - "napi_create_string_utf8", - "napi_create_threadsafe_function", - "node_api_throw_syntax_error", - "napi_create_bigint_int64", - "napi_wrap", - "napi_set_property", - "napi_get_value_bigint_int64", - "napi_open_handle_scope", - "napi_create_error", - "napi_create_buffer", - "napi_cancel_async_work", - "napi_is_exception_pending", - "napi_acquire_threadsafe_function", - "napi_create_external", - "napi_get_threadsafe_function_context", - "napi_get_null", - "napi_create_string_utf16", - "node_api_create_external_string_utf16", - "napi_get_value_bigint_uint64", - "napi_module_register", - "napi_is_typedarray", - "napi_create_external_buffer", - "napi_get_new_target", - "napi_get_instance_data", - "napi_close_handle_scope", - "napi_get_value_string_utf16", - "napi_get_property_names", - "napi_is_arraybuffer", - "napi_get_cb_info", - "napi_define_properties", - "napi_add_env_cleanup_hook", - "node_api_get_module_file_name", - "napi_get_node_version", - "napi_create_int64", - "napi_create_double", - "napi_get_and_clear_last_exception", - "napi_create_reference", - "napi_get_typedarray_info", - "napi_call_threadsafe_function", - "napi_get_last_error_info", - "napi_create_array_with_length", - "napi_coerce_to_number", - "napi_get_global", - "napi_is_error", - "napi_set_instance_data", - "napi_create_typedarray", - "napi_throw_type_error", - "napi_has_property", - "napi_get_value_external", - "napi_create_range_error", - "napi_typeof", - "napi_ref_threadsafe_function", - "napi_create_bigint_uint64", - "napi_get_prototype", - "napi_adjust_external_memory", - "napi_release_threadsafe_function", - "napi_delete_async_work", - "napi_create_string_latin1", - "node_api_create_external_string_latin1", - "napi_is_array", - "napi_unref_threadsafe_function", - "napi_throw_error", - "napi_has_own_property", - "napi_get_reference_value", - "napi_remove_env_cleanup_hook", - "napi_get_value_string_utf8", - "napi_is_promise", - "napi_get_boolean", - "napi_run_script", - "napi_get_element", - "napi_get_named_property", - "napi_get_buffer_info", - "napi_get_value_bool", - "napi_reference_ref", - "napi_create_object", - "napi_create_promise", - "napi_create_int32", - "napi_escape_handle", - "napi_open_escapable_handle_scope", - "napi_throw", - "napi_get_value_double", - "napi_set_named_property", - "napi_call_function", - "napi_create_date", - "napi_object_freeze", - "napi_get_uv_event_loop", - "napi_get_value_string_latin1", - "napi_reject_deferred", - "napi_add_finalizer", - "napi_create_array", - "napi_delete_reference", - "napi_get_date_value", - "napi_create_dataview", - "napi_get_version", - "napi_define_class", - "napi_is_date", - "napi_remove_wrap", - "napi_delete_property", - "napi_instanceof", - "napi_create_buffer_copy", - "napi_delete_element", - "napi_object_seal", - "napi_queue_async_work", - "napi_get_value_bigint_words", - "napi_is_buffer", - "napi_get_array_length", - "napi_get_property", - "napi_new_instance", - "napi_set_element", - "napi_create_bigint_words", - "napi_strict_equals", - "napi_is_dataview", - "napi_close_escapable_handle_scope", - "napi_get_dataview_info", - "napi_get_value_int32", - "napi_unwrap", - "napi_throw_range_error", - "napi_coerce_to_bool", - "napi_create_uint32", - "napi_has_element", - "napi_create_external_arraybuffer", - "napi_create_symbol", - "node_api_symbol_for", - "napi_coerce_to_string", - "napi_create_type_error", - "napi_fatal_exception", - "napi_create_async_work", - "napi_async_init", - "node_api_create_property_key_utf16", - "napi_type_tag_object", - "napi_check_object_type_tag", - "node_api_post_finalizer", - "napi_add_async_cleanup_hook", - "napi_remove_async_cleanup_hook", - "uv_mutex_init", - "uv_mutex_lock", - "uv_mutex_unlock", - "uv_mutex_destroy", - "uv_async_init", - "uv_async_send", - "uv_close" - ] -} diff --git a/cli/napi/util.rs b/cli/napi/util.rs deleted file mode 100644 index 63d8effbf..000000000 --- a/cli/napi/util.rs +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_runtime::deno_napi::*; -use libc::INT_MAX; - -#[repr(transparent)] -pub struct SendPtr<T>(pub *const T); - -impl<T> SendPtr<T> { - // silly function to get around `clippy::redundant_locals` - pub fn take(self) -> *const T { - self.0 - } -} - -unsafe impl<T> Send for SendPtr<T> {} -unsafe impl<T> Sync for SendPtr<T> {} - -pub fn get_array_buffer_ptr(ab: v8::Local<v8::ArrayBuffer>) -> *mut c_void { - match ab.data() { - Some(p) => p.as_ptr(), - None => std::ptr::null_mut(), - } -} - -struct BufferFinalizer { - env: *mut Env, - finalize_cb: napi_finalize, - finalize_data: *mut c_void, - finalize_hint: *mut c_void, -} - -impl Drop for BufferFinalizer { - fn drop(&mut self) { - unsafe { - (self.finalize_cb)(self.env as _, self.finalize_data, self.finalize_hint); - } - } -} - -pub extern "C" fn backing_store_deleter_callback( - data: *mut c_void, - _byte_length: usize, - deleter_data: *mut c_void, -) { - let mut finalizer = - unsafe { Box::<BufferFinalizer>::from_raw(deleter_data as _) }; - - finalizer.finalize_data = data; - - drop(finalizer); -} - -pub fn make_external_backing_store( - env: *mut Env, - data: *mut c_void, - byte_length: usize, - finalize_data: *mut c_void, - finalize_cb: napi_finalize, - finalize_hint: *mut c_void, -) -> v8::UniqueRef<v8::BackingStore> { - let finalizer = Box::new(BufferFinalizer { - env, - finalize_data, - finalize_cb, - finalize_hint, - }); - - unsafe { - v8::ArrayBuffer::new_backing_store_from_ptr( - data, - byte_length, - backing_store_deleter_callback, - Box::into_raw(finalizer) as _, - ) - } -} - -#[macro_export] -macro_rules! check_env { - ($env: expr) => {{ - let env = $env; - if env.is_null() { - return napi_invalid_arg; - } - unsafe { &mut *env } - }}; -} - -#[macro_export] -macro_rules! return_error_status_if_false { - ($env: expr, $condition: expr, $status: ident) => { - if !$condition { - return Err( - $crate::napi::util::napi_set_last_error($env, $status).into(), - ); - } - }; -} - -#[macro_export] -macro_rules! return_status_if_false { - ($env: expr, $condition: expr, $status: ident) => { - if !$condition { - return $crate::napi::util::napi_set_last_error($env, $status); - } - }; -} - -pub(crate) unsafe fn check_new_from_utf8_len<'s>( - env: *mut Env, - str_: *const c_char, - len: usize, -) -> Result<v8::Local<'s, v8::String>, napi_status> { - let env = unsafe { &mut *env }; - return_error_status_if_false!( - env, - (len == NAPI_AUTO_LENGTH) || len <= INT_MAX as _, - napi_invalid_arg - ); - return_error_status_if_false!(env, !str_.is_null(), napi_invalid_arg); - let string = if len == NAPI_AUTO_LENGTH { - let result = unsafe { std::ffi::CStr::from_ptr(str_ as *const _) }.to_str(); - return_error_status_if_false!(env, result.is_ok(), napi_generic_failure); - result.unwrap() - } else { - let string = unsafe { std::slice::from_raw_parts(str_ as *const u8, len) }; - let result = std::str::from_utf8(string); - return_error_status_if_false!(env, result.is_ok(), napi_generic_failure); - result.unwrap() - }; - let result = { - let env = unsafe { &mut *(env as *mut Env) }; - v8::String::new(&mut env.scope(), string) - }; - return_error_status_if_false!(env, result.is_some(), napi_generic_failure); - Ok(result.unwrap()) -} - -#[inline] -pub(crate) unsafe fn check_new_from_utf8<'s>( - env: *mut Env, - str_: *const c_char, -) -> Result<v8::Local<'s, v8::String>, napi_status> { - unsafe { check_new_from_utf8_len(env, str_, NAPI_AUTO_LENGTH) } -} - -pub(crate) unsafe fn v8_name_from_property_descriptor<'s>( - env: *mut Env, - p: &'s napi_property_descriptor, -) -> Result<v8::Local<'s, v8::Name>, napi_status> { - if !p.utf8name.is_null() { - unsafe { check_new_from_utf8(env, p.utf8name).map(|v| v.into()) } - } else { - match *p.name { - Some(v) => match v.try_into() { - Ok(name) => Ok(name), - Err(_) => Err(napi_name_expected), - }, - None => Err(napi_name_expected), - } - } -} - -pub(crate) fn napi_clear_last_error(env: *mut Env) -> napi_status { - let env = unsafe { &mut *env }; - env.last_error.error_code = napi_ok; - env.last_error.engine_error_code = 0; - env.last_error.engine_reserved = std::ptr::null_mut(); - env.last_error.error_message = std::ptr::null_mut(); - napi_ok -} - -pub(crate) fn napi_set_last_error( - env: *mut Env, - error_code: napi_status, -) -> napi_status { - let env = unsafe { &mut *env }; - env.last_error.error_code = error_code; - error_code -} - -#[macro_export] -macro_rules! status_call { - ($call: expr) => { - let status = $call; - if status != napi_ok { - return status; - } - }; -} - -pub trait Nullable { - fn is_null(&self) -> bool; -} - -impl<T> Nullable for *mut T { - fn is_null(&self) -> bool { - (*self).is_null() - } -} - -impl<T> Nullable for *const T { - fn is_null(&self) -> bool { - (*self).is_null() - } -} - -impl<T> Nullable for Option<T> { - fn is_null(&self) -> bool { - self.is_none() - } -} - -impl<'s> Nullable for napi_value<'s> { - fn is_null(&self) -> bool { - self.is_none() - } -} - -#[macro_export] -macro_rules! check_arg { - ($env: expr, $ptr: expr) => { - $crate::return_status_if_false!( - $env, - !$crate::napi::util::Nullable::is_null(&$ptr), - napi_invalid_arg - ); - }; -} - -#[macro_export] -macro_rules! napi_wrap { - ( $( # $attr:tt )* fn $name:ident $( < $( $x:lifetime ),* > )? ( $env:ident : & $( $lt:lifetime )? mut Env $( , $ident:ident : $ty:ty )* $(,)? ) -> napi_status $body:block ) => { - $( # $attr )* - #[no_mangle] - pub unsafe extern "C" fn $name $( < $( $x ),* > )? ( env_ptr : *mut Env , $( $ident : $ty ),* ) -> napi_status { - let env: & $( $lt )? mut Env = $crate::check_env!(env_ptr); - - if env.last_exception.is_some() { - return napi_pending_exception; - } - - $crate::napi::util::napi_clear_last_error(env); - - let scope_env = unsafe { &mut *env_ptr }; - let scope = &mut scope_env.scope(); - let try_catch = &mut v8::TryCatch::new(scope); - - #[inline(always)] - fn inner $( < $( $x ),* > )? ( $env: & $( $lt )? mut Env , $( $ident : $ty ),* ) -> napi_status $body - - log::trace!("NAPI ENTER: {}", stringify!($name)); - - let result = inner( env, $( $ident ),* ); - - log::trace!("NAPI EXIT: {} {}", stringify!($name), result); - - if let Some(exception) = try_catch.exception() { - let env = unsafe { &mut *env_ptr }; - let global = v8::Global::new(env.isolate(), exception); - env.last_exception = Some(global); - return $crate::napi::util::napi_set_last_error(env_ptr, napi_pending_exception); - } - - if result != napi_ok { - return $crate::napi::util::napi_set_last_error(env_ptr, result); - } - - return result; - } - }; - - ( $( # $attr:tt )* fn $name:ident $( < $( $x:lifetime ),* > )? ( $( $ident:ident : $ty:ty ),* $(,)? ) -> napi_status $body:block ) => { - $( # $attr )* - #[no_mangle] - pub unsafe extern "C" fn $name $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status { - #[inline(always)] - fn inner $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status $body - - log::trace!("NAPI ENTER: {}", stringify!($name)); - - let result = inner( $( $ident ),* ); - - log::trace!("NAPI EXIT: {} {}", stringify!($name), result); - - result - } - }; -} diff --git a/cli/napi/uv.rs b/cli/napi/uv.rs deleted file mode 100644 index d4cb5c0b3..000000000 --- a/cli/napi/uv.rs +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use deno_core::parking_lot::Mutex; -use deno_runtime::deno_napi::*; -use std::mem::MaybeUninit; -use std::ptr::addr_of_mut; - -#[allow(clippy::print_stderr)] -fn assert_ok(res: c_int) -> c_int { - if res != 0 { - eprintln!("bad result in uv polyfill: {res}"); - // don't panic because that might unwind into - // c/c++ - std::process::abort(); - } - res -} - -use crate::napi::js_native_api::napi_create_string_utf8; -use crate::napi::node_api::napi_create_async_work; -use crate::napi::node_api::napi_delete_async_work; -use crate::napi::node_api::napi_queue_async_work; -use std::ffi::c_int; - -const UV_MUTEX_SIZE: usize = { - #[cfg(unix)] - { - std::mem::size_of::<libc::pthread_mutex_t>() - } - #[cfg(windows)] - { - std::mem::size_of::<windows_sys::Win32::System::Threading::CRITICAL_SECTION>( - ) - } -}; - -#[repr(C)] -struct uv_mutex_t { - mutex: Mutex<()>, - _padding: [MaybeUninit<usize>; const { - (UV_MUTEX_SIZE - size_of::<Mutex<()>>()) / size_of::<usize>() - }], -} - -#[no_mangle] -unsafe extern "C" fn uv_mutex_init(lock: *mut uv_mutex_t) -> c_int { - unsafe { - addr_of_mut!((*lock).mutex).write(Mutex::new(())); - 0 - } -} - -#[no_mangle] -unsafe extern "C" fn uv_mutex_lock(lock: *mut uv_mutex_t) { - unsafe { - let guard = (*lock).mutex.lock(); - // forget the guard so it doesn't unlock when it goes out of scope. - // we're going to unlock it manually - std::mem::forget(guard); - } -} - -#[no_mangle] -unsafe extern "C" fn uv_mutex_unlock(lock: *mut uv_mutex_t) { - unsafe { - (*lock).mutex.force_unlock(); - } -} - -#[no_mangle] -unsafe extern "C" fn uv_mutex_destroy(_lock: *mut uv_mutex_t) { - // no cleanup required -} - -#[repr(C)] -#[derive(Clone, Copy, Debug)] -#[allow(dead_code)] -enum uv_handle_type { - UV_UNKNOWN_HANDLE = 0, - UV_ASYNC, - UV_CHECK, - UV_FS_EVENT, - UV_FS_POLL, - UV_HANDLE, - UV_IDLE, - UV_NAMED_PIPE, - UV_POLL, - UV_PREPARE, - UV_PROCESS, - UV_STREAM, - UV_TCP, - UV_TIMER, - UV_TTY, - UV_UDP, - UV_SIGNAL, - UV_FILE, - UV_HANDLE_TYPE_MAX, -} - -const UV_HANDLE_SIZE: usize = 96; - -#[repr(C)] -struct uv_handle_t { - // public members - pub data: *mut c_void, - pub r#loop: *mut uv_loop_t, - pub r#type: uv_handle_type, - - _padding: [MaybeUninit<usize>; const { - (UV_HANDLE_SIZE - - size_of::<*mut c_void>() - - size_of::<*mut uv_loop_t>() - - size_of::<uv_handle_type>()) - / size_of::<usize>() - }], -} - -#[cfg(unix)] -const UV_ASYNC_SIZE: usize = 128; - -#[cfg(windows)] -const UV_ASYNC_SIZE: usize = 224; - -#[repr(C)] -struct uv_async_t { - // public members - pub data: *mut c_void, - pub r#loop: *mut uv_loop_t, - pub r#type: uv_handle_type, - // private - async_cb: uv_async_cb, - work: napi_async_work, - _padding: [MaybeUninit<usize>; const { - (UV_ASYNC_SIZE - - size_of::<*mut c_void>() - - size_of::<*mut uv_loop_t>() - - size_of::<uv_handle_type>() - - size_of::<uv_async_cb>() - - size_of::<napi_async_work>()) - / size_of::<usize>() - }], -} - -type uv_loop_t = Env; -type uv_async_cb = extern "C" fn(handle: *mut uv_async_t); -#[no_mangle] -unsafe extern "C" fn uv_async_init( - r#loop: *mut uv_loop_t, - // probably uninitialized - r#async: *mut uv_async_t, - async_cb: uv_async_cb, -) -> c_int { - unsafe { - addr_of_mut!((*r#async).r#loop).write(r#loop); - addr_of_mut!((*r#async).r#type).write(uv_handle_type::UV_ASYNC); - addr_of_mut!((*r#async).async_cb).write(async_cb); - - let mut resource_name: MaybeUninit<napi_value> = MaybeUninit::uninit(); - assert_ok(napi_create_string_utf8( - r#loop, - c"uv_async".as_ptr(), - usize::MAX, - resource_name.as_mut_ptr(), - )); - let resource_name = resource_name.assume_init(); - - let res = napi_create_async_work( - r#loop, - None::<v8::Local<'static, v8::Value>>.into(), - resource_name, - Some(async_exec_wrap), - None, - r#async.cast(), - addr_of_mut!((*r#async).work), - ); - -res - } -} - -#[no_mangle] -unsafe extern "C" fn uv_async_send(handle: *mut uv_async_t) -> c_int { - unsafe { -napi_queue_async_work((*handle).r#loop, (*handle).work) } -} - -type uv_close_cb = unsafe extern "C" fn(*mut uv_handle_t); - -#[no_mangle] -unsafe extern "C" fn uv_close(handle: *mut uv_handle_t, close: uv_close_cb) { - unsafe { - if handle.is_null() { - close(handle); - return; - } - if let uv_handle_type::UV_ASYNC = (*handle).r#type { - let handle: *mut uv_async_t = handle.cast(); - napi_delete_async_work((*handle).r#loop, (*handle).work); - } - close(handle); - } -} - -unsafe extern "C" fn async_exec_wrap(_env: napi_env, data: *mut c_void) { - let data: *mut uv_async_t = data.cast(); - unsafe { - ((*data).async_cb)(data); - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn sizes() { - assert_eq!( - std::mem::size_of::<libuv_sys_lite::uv_mutex_t>(), - UV_MUTEX_SIZE - ); - assert_eq!( - std::mem::size_of::<libuv_sys_lite::uv_handle_t>(), - UV_HANDLE_SIZE - ); - assert_eq!( - std::mem::size_of::<libuv_sys_lite::uv_async_t>(), - UV_ASYNC_SIZE - ); - assert_eq!(std::mem::size_of::<uv_mutex_t>(), UV_MUTEX_SIZE); - assert_eq!(std::mem::size_of::<uv_handle_t>(), UV_HANDLE_SIZE); - assert_eq!(std::mem::size_of::<uv_async_t>(), UV_ASYNC_SIZE); - } -} diff --git a/cli/node.rs b/cli/node.rs index a3cee8dde..bc39cdbde 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -1,10 +1,12 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; +use deno_graph::ParsedSourceStore; use deno_runtime::deno_fs; use deno_runtime::deno_node::DenoFsNodeResolverEnv; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; @@ -16,8 +18,8 @@ use serde::Serialize; use crate::cache::CacheDBHash; use crate::cache::NodeAnalysisCache; -use crate::resolver::CliNodeResolver; -use crate::util::fs::canonicalize_path_maybe_not_exists; +use crate::cache::ParsedSourceCache; +use crate::resolver::CjsTracker; pub type CliNodeCodeTranslator = NodeCodeTranslator<CliCjsCodeAnalyzer, DenoFsNodeResolverEnv>; @@ -30,15 +32,11 @@ pub type CliNodeCodeTranslator = /// because the node_modules folder might not exist at that time. pub fn resolve_specifier_into_node_modules( specifier: &ModuleSpecifier, + fs: &dyn deno_fs::FileSystem, ) -> ModuleSpecifier { - specifier - .to_file_path() - .ok() - // this path might not exist at the time the graph is being created - // because the node_modules folder might not yet exist - .and_then(|path| canonicalize_path_maybe_not_exists(&path).ok()) - .and_then(|path| ModuleSpecifier::from_file_path(path).ok()) - .unwrap_or_else(|| specifier.clone()) + node_resolver::resolve_specifier_into_node_modules(specifier, &|path| { + fs.realpath_sync(path).map_err(|err| err.into_io_error()) + }) } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -54,20 +52,23 @@ pub enum CliCjsAnalysis { pub struct CliCjsCodeAnalyzer { cache: NodeAnalysisCache, + cjs_tracker: Arc<CjsTracker>, fs: deno_fs::FileSystemRc, - node_resolver: Arc<CliNodeResolver>, + parsed_source_cache: Option<Arc<ParsedSourceCache>>, } impl CliCjsCodeAnalyzer { pub fn new( cache: NodeAnalysisCache, + cjs_tracker: Arc<CjsTracker>, fs: deno_fs::FileSystemRc, - node_resolver: Arc<CliNodeResolver>, + parsed_source_cache: Option<Arc<ParsedSourceCache>>, ) -> Self { Self { cache, + cjs_tracker, fs, - node_resolver, + parsed_source_cache, } } @@ -83,7 +84,7 @@ impl CliCjsCodeAnalyzer { return Ok(analysis); } - let mut media_type = MediaType::from_specifier(specifier); + let media_type = MediaType::from_specifier(specifier); if media_type == MediaType::Json { return Ok(CliCjsAnalysis::Cjs { exports: vec![], @@ -91,54 +92,51 @@ impl CliCjsCodeAnalyzer { }); } - if media_type == MediaType::JavaScript { - if let Some(package_json) = - self.node_resolver.get_closest_package_json(specifier)? - { - match package_json.typ.as_str() { - "commonjs" => { - media_type = MediaType::Cjs; - } - "module" => { - media_type = MediaType::Mjs; - } - _ => {} - } - } - } + let cjs_tracker = self.cjs_tracker.clone(); + let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?; + let analysis = if is_maybe_cjs { + let maybe_parsed_source = self + .parsed_source_cache + .as_ref() + .and_then(|c| c.remove_parsed_source(specifier)); - let analysis = deno_core::unsync::spawn_blocking({ - let specifier = specifier.clone(); - let source: Arc<str> = source.into(); - move || -> Result<_, deno_ast::ParseDiagnostic> { - let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { - specifier, - text: source, - media_type, - capture_tokens: true, - scope_analysis: false, - maybe_syntax: None, - })?; - if parsed_source.is_script() { - let analysis = parsed_source.analyze_cjs(); - Ok(CliCjsAnalysis::Cjs { - exports: analysis.exports, - reexports: analysis.reexports, - }) - } else if media_type == MediaType::Cjs { - // FIXME: `deno_ast` should internally handle MediaType::Cjs implying that - // the result must never be Esm - Ok(CliCjsAnalysis::Cjs { - exports: vec![], - reexports: vec![], - }) - } else { - Ok(CliCjsAnalysis::Esm) + deno_core::unsync::spawn_blocking({ + let specifier = specifier.clone(); + let source: Arc<str> = source.into(); + move || -> Result<_, AnyError> { + let parsed_source = + maybe_parsed_source.map(Ok).unwrap_or_else(|| { + deno_ast::parse_program(deno_ast::ParseParams { + specifier, + text: source, + media_type, + capture_tokens: true, + scope_analysis: false, + maybe_syntax: None, + }) + })?; + let is_script = parsed_source.compute_is_script(); + let is_cjs = cjs_tracker.is_cjs_with_known_is_script( + parsed_source.specifier(), + media_type, + is_script, + )?; + if is_cjs { + let analysis = parsed_source.analyze_cjs(); + Ok(CliCjsAnalysis::Cjs { + exports: analysis.exports, + reexports: analysis.reexports, + }) + } else { + Ok(CliCjsAnalysis::Esm) + } } - } - }) - .await - .unwrap()?; + }) + .await + .unwrap()? + } else { + CliCjsAnalysis::Esm + }; self .cache @@ -150,11 +148,11 @@ impl CliCjsCodeAnalyzer { #[async_trait::async_trait(?Send)] impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { - async fn analyze_cjs( + async fn analyze_cjs<'a>( &self, specifier: &ModuleSpecifier, - source: Option<String>, - ) -> Result<ExtNodeCjsAnalysis, AnyError> { + source: Option<Cow<'a, str>>, + ) -> Result<ExtNodeCjsAnalysis<'a>, AnyError> { let source = match source { Some(source) => source, None => { @@ -162,7 +160,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { if let Ok(source_from_file) = self.fs.read_text_file_lossy_async(path, None).await { - source_from_file + Cow::Owned(source_from_file) } else { return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports { exports: vec![], diff --git a/cli/npm/byonm.rs b/cli/npm/byonm.rs index fc095ab16..eca399251 100644 --- a/cli/npm/byonm.rs +++ b/cli/npm/byonm.rs @@ -2,19 +2,17 @@ use std::borrow::Cow; use std::path::Path; -use std::path::PathBuf; use std::sync::Arc; use deno_core::error::AnyError; use deno_core::serde_json; -use deno_core::url::Url; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolverCreateOptions; +use deno_resolver::npm::CliNpmReqResolver; +use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::ops::process::NpmProcessStateProvider; -use deno_semver::package::PackageReq; -use node_resolver::NpmResolver; +use node_resolver::NpmPackageFolderResolver; use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; @@ -22,33 +20,16 @@ use crate::resolver::CliDenoResolverFs; use super::CliNpmResolver; use super::InnerCliNpmResolverRef; -use super::ResolvePkgFolderFromDenoReqError; pub type CliByonmNpmResolverCreateOptions = - ByonmNpmResolverCreateOptions<CliDenoResolverFs>; -pub type CliByonmNpmResolver = ByonmNpmResolver<CliDenoResolverFs>; + ByonmNpmResolverCreateOptions<CliDenoResolverFs, DenoFsNodeResolverEnv>; +pub type CliByonmNpmResolver = + ByonmNpmResolver<CliDenoResolverFs, DenoFsNodeResolverEnv>; // todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple. #[derive(Debug)] struct CliByonmWrapper(Arc<CliByonmNpmResolver>); -impl NodeRequireResolver for CliByonmWrapper { - fn ensure_read_permission<'a>( - &self, - permissions: &mut dyn NodePermissions, - path: &'a Path, - ) -> Result<Cow<'a, Path>, AnyError> { - if !path - .components() - .any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules") - { - permissions.check_read_path(path) - } else { - Ok(Cow::Borrowed(path)) - } - } -} - impl NpmProcessStateProvider for CliByonmWrapper { fn get_npm_process_state(&self) -> String { serde_json::to_string(&NpmProcessState { @@ -63,12 +44,14 @@ impl NpmProcessStateProvider for CliByonmWrapper { } impl CliNpmResolver for CliByonmNpmResolver { - fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> { + fn into_npm_pkg_folder_resolver( + self: Arc<Self>, + ) -> Arc<dyn NpmPackageFolderResolver> { self } - fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> { - Arc::new(CliByonmWrapper(self)) + fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver> { + self } fn into_process_state_provider( @@ -77,6 +60,10 @@ impl CliNpmResolver for CliByonmNpmResolver { Arc::new(CliByonmWrapper(self)) } + fn into_maybe_byonm(self: Arc<Self>) -> Option<Arc<CliByonmNpmResolver>> { + Some(self) + } + fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> { Arc::new(self.clone()) } @@ -89,15 +76,19 @@ impl CliNpmResolver for CliByonmNpmResolver { self.root_node_modules_dir() } - fn resolve_pkg_folder_from_deno_module_req( + fn ensure_read_permission<'a>( &self, - req: &PackageReq, - referrer: &Url, - ) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> { - ByonmNpmResolver::resolve_pkg_folder_from_deno_module_req( - self, req, referrer, - ) - .map_err(ResolvePkgFolderFromDenoReqError::Byonm) + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result<Cow<'a, Path>, AnyError> { + if !path + .components() + .any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules") + { + permissions.check_read_path(path).map_err(Into::into) + } else { + Ok(Cow::Borrowed(path)) + } } fn check_state_hash(&self) -> Option<u64> { diff --git a/cli/npm/common.rs b/cli/npm/common.rs index a3a828e74..55f1bc086 100644 --- a/cli/npm/common.rs +++ b/cli/npm/common.rs @@ -3,6 +3,7 @@ use base64::prelude::BASE64_STANDARD; use base64::Engine; use deno_core::anyhow::bail; +use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_npm::npm_rc::RegistryConfig; use http::header; @@ -36,17 +37,21 @@ pub fn maybe_auth_header_for_npm_registry( } if username.is_some() && password.is_some() { + // The npm client does some double encoding when generating the + // bearer token value, see + // https://github.com/npm/cli/blob/780afc50e3a345feb1871a28e33fa48235bc3bd5/workspaces/config/lib/index.js#L846-L851 + let pw_base64 = BASE64_STANDARD + .decode(password.unwrap()) + .with_context(|| "The password in npmrc is an invalid base64 string")?; + let bearer = BASE64_STANDARD.encode(format!( + "{}:{}", + username.unwrap(), + String::from_utf8_lossy(&pw_base64) + )); + return Ok(Some(( header::AUTHORIZATION, - header::HeaderValue::from_str(&format!( - "Basic {}", - BASE64_STANDARD.encode(&format!( - "{}:{}", - username.unwrap(), - password.unwrap() - )) - )) - .unwrap(), + header::HeaderValue::from_str(&format!("Basic {}", bearer)).unwrap(), ))); } diff --git a/cli/npm/managed/cache/mod.rs b/cli/npm/managed/cache/mod.rs index fa0e8c8a5..8ae99f41e 100644 --- a/cli/npm/managed/cache/mod.rs +++ b/cli/npm/managed/cache/mod.rs @@ -26,7 +26,7 @@ use crate::cache::CACHE_PERM; use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::hard_link_dir_recursive; -mod registry_info; +pub mod registry_info; mod tarball; mod tarball_extract; @@ -36,7 +36,7 @@ pub use tarball::TarballCache; /// Stores a single copy of npm packages in a cache. #[derive(Debug)] pub struct NpmCache { - cache_dir: NpmCacheDir, + cache_dir: Arc<NpmCacheDir>, cache_setting: CacheSetting, npmrc: Arc<ResolvedNpmRc>, /// ensures a package is only downloaded once per run @@ -45,7 +45,7 @@ pub struct NpmCache { impl NpmCache { pub fn new( - cache_dir: NpmCacheDir, + cache_dir: Arc<NpmCacheDir>, cache_setting: CacheSetting, npmrc: Arc<ResolvedNpmRc>, ) -> Self { @@ -61,6 +61,10 @@ impl NpmCache { &self.cache_setting } + pub fn root_dir_path(&self) -> &Path { + self.cache_dir.root_dir() + } + pub fn root_dir_url(&self) -> &Url { self.cache_dir.root_dir_url() } @@ -152,10 +156,6 @@ impl NpmCache { self.cache_dir.package_name_folder(name, registry_url) } - pub fn root_folder(&self) -> PathBuf { - self.cache_dir.root_dir().to_owned() - } - pub fn resolve_package_folder_id_from_specifier( &self, specifier: &ModuleSpecifier, diff --git a/cli/npm/managed/cache/registry_info.rs b/cli/npm/managed/cache/registry_info.rs index 28b19373e..6d39d3c13 100644 --- a/cli/npm/managed/cache/registry_info.rs +++ b/cli/npm/managed/cache/registry_info.rs @@ -84,7 +84,7 @@ impl RegistryInfoDownloader { self.load_package_info_inner(name).await.with_context(|| { format!( "Error getting response at {} for package \"{}\"", - self.get_package_url(name), + get_package_url(&self.npmrc, name), name ) }) @@ -190,7 +190,7 @@ impl RegistryInfoDownloader { fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture { let downloader = self.clone(); - let package_url = self.get_package_url(name); + let package_url = get_package_url(&self.npmrc, name); let registry_config = self.npmrc.get_registry_config(name); let maybe_auth_header = match maybe_auth_header_for_npm_registry(registry_config) { @@ -202,10 +202,13 @@ impl RegistryInfoDownloader { let guard = self.progress_bar.update(package_url.as_str()); let name = name.to_string(); async move { - let maybe_bytes = downloader - .http_client_provider - .get_or_create()? - .download_with_progress(package_url, maybe_auth_header, &guard) + let client = downloader.http_client_provider.get_or_create()?; + let maybe_bytes = client + .download_with_progress_and_retries( + package_url, + maybe_auth_header, + &guard, + ) .await?; match maybe_bytes { Some(bytes) => { @@ -236,25 +239,36 @@ impl RegistryInfoDownloader { .map(|r| r.map_err(Arc::new)) .boxed_local() } +} - fn get_package_url(&self, name: &str) -> Url { - let registry_url = self.npmrc.get_registry_url(name); - // list of all characters used in npm packages: - // !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~ - const ASCII_SET: percent_encoding::AsciiSet = - percent_encoding::NON_ALPHANUMERIC - .remove(b'!') - .remove(b'\'') - .remove(b'(') - .remove(b')') - .remove(b'*') - .remove(b'-') - .remove(b'.') - .remove(b'/') - .remove(b'@') - .remove(b'_') - .remove(b'~'); - let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET); - registry_url.join(&name.to_string()).unwrap() - } +pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url { + let registry_url = npmrc.get_registry_url(name); + // The '/' character in scoped package names "@scope/name" must be + // encoded for older third party registries. Newer registries and + // npm itself support both ways + // - encoded: https://registry.npmjs.org/@rollup%2fplugin-json + // - non-ecoded: https://registry.npmjs.org/@rollup/plugin-json + // To support as many third party registries as possible we'll + // always encode the '/' character. + + // list of all characters used in npm packages: + // !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~ + const ASCII_SET: percent_encoding::AsciiSet = + percent_encoding::NON_ALPHANUMERIC + .remove(b'!') + .remove(b'\'') + .remove(b'(') + .remove(b')') + .remove(b'*') + .remove(b'-') + .remove(b'.') + .remove(b'@') + .remove(b'_') + .remove(b'~'); + let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET); + registry_url + // Ensure that scoped package name percent encoding is lower cased + // to match npm. + .join(&name.to_string().replace("%2F", "%2f")) + .unwrap() } diff --git a/cli/npm/managed/cache/tarball.rs b/cli/npm/managed/cache/tarball.rs index 4bcee38ea..7cf88d6d6 100644 --- a/cli/npm/managed/cache/tarball.rs +++ b/cli/npm/managed/cache/tarball.rs @@ -172,7 +172,7 @@ impl TarballCache { let guard = tarball_cache.progress_bar.update(&dist.tarball); let result = tarball_cache.http_client_provider .get_or_create()? - .download_with_progress(tarball_uri, maybe_auth_header, &guard) + .download_with_progress_and_retries(tarball_uri, maybe_auth_header, &guard) .await; let maybe_bytes = match result { Ok(maybe_bytes) => maybe_bytes, diff --git a/cli/npm/managed/mod.rs b/cli/npm/managed/mod.rs index ec50a9c65..2e64f5f18 100644 --- a/cli/npm/managed/mod.rs +++ b/cli/npm/managed/mod.rs @@ -12,6 +12,7 @@ use deno_cache_dir::npm::NpmCacheDir; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::serde_json; +use deno_core::url::Url; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmRegistryApi; @@ -21,16 +22,17 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; +use deno_resolver::npm::CliNpmReqResolver; use deno_runtime::colors; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; -use node_resolver::NpmResolver; +use node_resolver::InNpmPackageChecker; +use node_resolver::NpmPackageFolderResolver; use resolution::AddPkgReqsResult; use crate::args::CliLockfile; @@ -38,7 +40,7 @@ use crate::args::LifecycleScriptsConfig; use crate::args::NpmInstallDepsProvider; use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; -use crate::cache::DenoCacheEnvFsAdapter; +use crate::args::PackageJsonDepValueParseWithLocationError; use crate::cache::FastInsecureHasher; use crate::http_util::HttpClientProvider; use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; @@ -55,7 +57,7 @@ use super::CliNpmResolver; use super::InnerCliNpmResolverRef; use super::ResolvePkgFolderFromDenoReqError; -mod cache; +pub mod cache; mod registry; mod resolution; mod resolvers; @@ -65,12 +67,12 @@ pub enum CliNpmResolverManagedSnapshotOption { Specified(Option<ValidSerializedNpmResolutionSnapshot>), } -pub struct CliNpmResolverManagedCreateOptions { +pub struct CliManagedNpmResolverCreateOptions { pub snapshot: CliNpmResolverManagedSnapshotOption, pub maybe_lockfile: Option<Arc<CliLockfile>>, pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>, pub http_client_provider: Arc<crate::http_util::HttpClientProvider>, - pub npm_global_cache_dir: PathBuf, + pub npm_cache_dir: Arc<NpmCacheDir>, pub cache_setting: crate::args::CacheSetting, pub text_only_progress_bar: crate::util::progress_bar::ProgressBar, pub maybe_node_modules_path: Option<PathBuf>, @@ -81,7 +83,7 @@ pub struct CliNpmResolverManagedCreateOptions { } pub async fn create_managed_npm_resolver_for_lsp( - options: CliNpmResolverManagedCreateOptions, + options: CliManagedNpmResolverCreateOptions, ) -> Arc<dyn CliNpmResolver> { let npm_cache = create_cache(&options); let npm_api = create_api(&options, npm_cache.clone()); @@ -114,7 +116,7 @@ pub async fn create_managed_npm_resolver_for_lsp( } pub async fn create_managed_npm_resolver( - options: CliNpmResolverManagedCreateOptions, + options: CliManagedNpmResolverCreateOptions, ) -> Result<Arc<dyn CliNpmResolver>, AnyError> { let npm_cache = create_cache(&options); let npm_api = create_api(&options, npm_cache.clone()); @@ -188,20 +190,16 @@ fn create_inner( )) } -fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc<NpmCache> { +fn create_cache(options: &CliManagedNpmResolverCreateOptions) -> Arc<NpmCache> { Arc::new(NpmCache::new( - NpmCacheDir::new( - &DenoCacheEnvFsAdapter(options.fs.as_ref()), - options.npm_global_cache_dir.clone(), - options.npmrc.get_all_known_registries_urls(), - ), + options.npm_cache_dir.clone(), options.cache_setting.clone(), options.npmrc.clone(), )) } fn create_api( - options: &CliNpmResolverManagedCreateOptions, + options: &CliManagedNpmResolverCreateOptions, npm_cache: Arc<NpmCache>, ) -> Arc<CliNpmRegistryApi> { Arc::new(CliNpmRegistryApi::new( @@ -258,6 +256,35 @@ async fn snapshot_from_lockfile( Ok(snapshot) } +#[derive(Debug)] +struct ManagedInNpmPackageChecker { + root_dir: Url, +} + +impl InNpmPackageChecker for ManagedInNpmPackageChecker { + fn in_npm_package(&self, specifier: &Url) -> bool { + specifier.as_ref().starts_with(self.root_dir.as_str()) + } +} + +pub struct CliManagedInNpmPkgCheckerCreateOptions<'a> { + pub root_cache_dir_url: &'a Url, + pub maybe_node_modules_path: Option<&'a Path>, +} + +pub fn create_managed_in_npm_pkg_checker( + options: CliManagedInNpmPkgCheckerCreateOptions, +) -> Arc<dyn InNpmPackageChecker> { + let root_dir = match options.maybe_node_modules_path { + Some(node_modules_folder) => { + deno_path_util::url_from_directory_path(node_modules_folder).unwrap() + } + None => options.root_cache_dir_url.clone(), + }; + debug_assert!(root_dir.as_str().ends_with('/')); + Arc::new(ManagedInNpmPackageChecker { root_dir }) +} + /// An npm resolver where the resolution is managed by Deno rather than /// the user bringing their own node_modules (BYONM) on the file system. pub struct ManagedCliNpmResolver { @@ -480,19 +507,24 @@ impl ManagedCliNpmResolver { self.resolution.resolve_pkg_id_from_pkg_req(req) } - pub fn ensure_no_pkg_json_dep_errors(&self) -> Result<(), AnyError> { + pub fn ensure_no_pkg_json_dep_errors( + &self, + ) -> Result<(), Box<PackageJsonDepValueParseWithLocationError>> { for err in self.npm_install_deps_provider.pkg_json_dep_errors() { - match err { + match &err.source { deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => { - return Err( - AnyError::from(err.clone()) - .context("Failed to install from package.json"), - ); + return Err(Box::new(err.clone())); } deno_package_json::PackageJsonDepValueParseError::Unsupported { .. } => { - log::warn!("{} {} in package.json", colors::yellow("Warning"), err) + // only warn for this one + log::warn!( + "{} {}\n at {}", + colors::yellow("Warning"), + err.source, + err.location, + ) } } } @@ -549,8 +581,16 @@ impl ManagedCliNpmResolver { .map_err(|err| err.into()) } - pub fn global_cache_root_folder(&self) -> PathBuf { - self.npm_cache.root_folder() + pub fn maybe_node_modules_path(&self) -> Option<&Path> { + self.fs_resolver.node_modules_path() + } + + pub fn global_cache_root_path(&self) -> &Path { + self.npm_cache.root_dir_path() + } + + pub fn global_cache_root_url(&self) -> &Url { + self.npm_cache.root_dir_url() } } @@ -566,7 +606,7 @@ fn npm_process_state( .unwrap() } -impl NpmResolver for ManagedCliNpmResolver { +impl NpmPackageFolderResolver for ManagedCliNpmResolver { fn resolve_package_folder_from_package( &self, name: &str, @@ -585,22 +625,6 @@ impl NpmResolver for ManagedCliNpmResolver { log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); Ok(path) } - - fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - let root_dir_url = self.fs_resolver.root_dir_url(); - debug_assert!(root_dir_url.as_str().ends_with('/')); - specifier.as_ref().starts_with(root_dir_url.as_str()) - } -} - -impl NodeRequireResolver for ManagedCliNpmResolver { - fn ensure_read_permission<'a>( - &self, - permissions: &mut dyn NodePermissions, - path: &'a Path, - ) -> Result<Cow<'a, Path>, AnyError> { - self.fs_resolver.ensure_read_permission(permissions, path) - } } impl NpmProcessStateProvider for ManagedCliNpmResolver { @@ -612,12 +636,29 @@ impl NpmProcessStateProvider for ManagedCliNpmResolver { } } +impl CliNpmReqResolver for ManagedCliNpmResolver { + fn resolve_pkg_folder_from_deno_module_req( + &self, + req: &PackageReq, + _referrer: &ModuleSpecifier, + ) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> { + let pkg_id = self + .resolve_pkg_id_from_pkg_req(req) + .map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?; + self + .resolve_pkg_folder_from_pkg_id(&pkg_id) + .map_err(ResolvePkgFolderFromDenoReqError::Managed) + } +} + impl CliNpmResolver for ManagedCliNpmResolver { - fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> { + fn into_npm_pkg_folder_resolver( + self: Arc<Self>, + ) -> Arc<dyn NpmPackageFolderResolver> { self } - fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> { + fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver> { self } @@ -668,17 +709,12 @@ impl CliNpmResolver for ManagedCliNpmResolver { self.fs_resolver.node_modules_path() } - fn resolve_pkg_folder_from_deno_module_req( + fn ensure_read_permission<'a>( &self, - req: &PackageReq, - _referrer: &ModuleSpecifier, - ) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> { - let pkg_id = self - .resolve_pkg_id_from_pkg_req(req) - .map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?; - self - .resolve_pkg_folder_from_pkg_id(&pkg_id) - .map_err(ResolvePkgFolderFromDenoReqError::Managed) + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result<Cow<'a, Path>, AnyError> { + self.fs_resolver.ensure_read_permission(permissions, path) } fn check_state_hash(&self) -> Option<u64> { diff --git a/cli/npm/managed/resolvers/common.rs b/cli/npm/managed/resolvers/common.rs index 867bb4168..eee11c760 100644 --- a/cli/npm/managed/resolvers/common.rs +++ b/cli/npm/managed/resolvers/common.rs @@ -17,7 +17,6 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::StreamExt; -use deno_core::url::Url; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -30,9 +29,6 @@ use crate::npm::managed::cache::TarballCache; /// Part of the resolution that interacts with the file system. #[async_trait(?Send)] pub trait NpmPackageFsResolver: Send + Sync { - /// Specifier for the root directory. - fn root_dir_url(&self) -> &Url; - /// The local node_modules folder if it is applicable to the implementation. fn node_modules_path(&self) -> Option<&Path>; @@ -137,7 +133,7 @@ impl RegistryReadPermissionChecker { } } - permissions.check_read_path(path) + permissions.check_read_path(path).map_err(Into::into) } } diff --git a/cli/npm/managed/resolvers/common/bin_entries.rs b/cli/npm/managed/resolvers/common/bin_entries.rs index 4524ce832..e4a184568 100644 --- a/cli/npm/managed/resolvers/common/bin_entries.rs +++ b/cli/npm/managed/resolvers/common/bin_entries.rs @@ -18,6 +18,7 @@ pub struct BinEntries<'a> { seen_names: HashMap<&'a str, &'a NpmPackageId>, /// The bin entries entries: Vec<(&'a NpmResolutionPackage, PathBuf)>, + sorted: bool, } /// Returns the name of the default binary for the given package. @@ -31,6 +32,20 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str { .map_or(package.id.nv.name.as_str(), |(_, name)| name) } +pub fn warn_missing_entrypoint( + bin_name: &str, + package_path: &Path, + entrypoint: &Path, +) { + log::warn!( + "{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.", + deno_terminal::colors::yellow("Warning"), + bin_name, + package_path.display(), + entrypoint.display() + ); +} + impl<'a> BinEntries<'a> { pub fn new() -> Self { Self::default() @@ -42,6 +57,7 @@ impl<'a> BinEntries<'a> { package: &'a NpmResolutionPackage, package_path: PathBuf, ) { + self.sorted = false; // check for a new collision, if we haven't already // found one match package.bin.as_ref().unwrap() { @@ -79,16 +95,21 @@ impl<'a> BinEntries<'a> { &str, // bin name &str, // bin script ) -> Result<(), AnyError>, + mut filter: impl FnMut(&NpmResolutionPackage) -> bool, ) -> Result<(), AnyError> { - if !self.collisions.is_empty() { + if !self.collisions.is_empty() && !self.sorted { // walking the dependency tree to find out the depth of each package // is sort of expensive, so we only do it if there's a collision sort_by_depth(snapshot, &mut self.entries, &mut self.collisions); + self.sorted = true; } let mut seen = HashSet::new(); for (package, package_path) in &self.entries { + if !filter(package) { + continue; + } if let Some(bin_entries) = &package.bin { match bin_entries { deno_npm::registry::NpmPackageVersionBinEntry::String(script) => { @@ -118,8 +139,8 @@ impl<'a> BinEntries<'a> { } /// Collect the bin entries into a vec of (name, script path) - pub fn into_bin_files( - mut self, + pub fn collect_bin_files( + &mut self, snapshot: &NpmResolutionSnapshot, ) -> Vec<(String, PathBuf)> { let mut bins = Vec::new(); @@ -131,17 +152,18 @@ impl<'a> BinEntries<'a> { bins.push((name.to_string(), package_path.join(script))); Ok(()) }, + |_| true, ) .unwrap(); bins } - /// Finish setting up the bin entries, writing the necessary files - /// to disk. - pub fn finish( + fn set_up_entries_filtered( mut self, snapshot: &NpmResolutionSnapshot, bin_node_modules_dir_path: &Path, + filter: impl FnMut(&NpmResolutionPackage) -> bool, + mut handler: impl FnMut(&EntrySetupOutcome<'_>), ) -> Result<(), AnyError> { if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() { std::fs::create_dir_all(bin_node_modules_dir_path).with_context( @@ -160,18 +182,54 @@ impl<'a> BinEntries<'a> { Ok(()) }, |package, package_path, name, script| { - set_up_bin_entry( + let outcome = set_up_bin_entry( package, name, script, package_path, bin_node_modules_dir_path, - ) + )?; + handler(&outcome); + Ok(()) }, + filter, )?; Ok(()) } + + /// Finish setting up the bin entries, writing the necessary files + /// to disk. + pub fn finish( + self, + snapshot: &NpmResolutionSnapshot, + bin_node_modules_dir_path: &Path, + handler: impl FnMut(&EntrySetupOutcome<'_>), + ) -> Result<(), AnyError> { + self.set_up_entries_filtered( + snapshot, + bin_node_modules_dir_path, + |_| true, + handler, + ) + } + + /// Finish setting up the bin entries, writing the necessary files + /// to disk. + pub fn finish_only( + self, + snapshot: &NpmResolutionSnapshot, + bin_node_modules_dir_path: &Path, + handler: impl FnMut(&EntrySetupOutcome<'_>), + only: &HashSet<&NpmPackageId>, + ) -> Result<(), AnyError> { + self.set_up_entries_filtered( + snapshot, + bin_node_modules_dir_path, + |package| only.contains(&package.id), + handler, + ) + } } // walk the dependency tree to find out the depth of each package @@ -233,16 +291,17 @@ fn sort_by_depth( }); } -pub fn set_up_bin_entry( - package: &NpmResolutionPackage, - bin_name: &str, +pub fn set_up_bin_entry<'a>( + package: &'a NpmResolutionPackage, + bin_name: &'a str, #[allow(unused_variables)] bin_script: &str, - #[allow(unused_variables)] package_path: &Path, + #[allow(unused_variables)] package_path: &'a Path, bin_node_modules_dir_path: &Path, -) -> Result<(), AnyError> { +) -> Result<EntrySetupOutcome<'a>, AnyError> { #[cfg(windows)] { set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?; + Ok(EntrySetupOutcome::Success) } #[cfg(unix)] { @@ -252,9 +311,8 @@ pub fn set_up_bin_entry( bin_script, package_path, bin_node_modules_dir_path, - )?; + ) } - Ok(()) } #[cfg(windows)] @@ -301,14 +359,39 @@ fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> { Ok(true) } +pub enum EntrySetupOutcome<'a> { + #[cfg_attr(windows, allow(dead_code))] + MissingEntrypoint { + bin_name: &'a str, + package_path: &'a Path, + entrypoint: PathBuf, + package: &'a NpmResolutionPackage, + }, + Success, +} + +impl<'a> EntrySetupOutcome<'a> { + pub fn warn_if_failed(&self) { + match self { + EntrySetupOutcome::MissingEntrypoint { + bin_name, + package_path, + entrypoint, + .. + } => warn_missing_entrypoint(bin_name, package_path, entrypoint), + EntrySetupOutcome::Success => {} + } + } +} + #[cfg(unix)] -fn symlink_bin_entry( - _package: &NpmResolutionPackage, - bin_name: &str, +fn symlink_bin_entry<'a>( + package: &'a NpmResolutionPackage, + bin_name: &'a str, bin_script: &str, - package_path: &Path, + package_path: &'a Path, bin_node_modules_dir_path: &Path, -) -> Result<(), AnyError> { +) -> Result<EntrySetupOutcome<'a>, AnyError> { use std::io; use std::os::unix::fs::symlink; let link = bin_node_modules_dir_path.join(bin_name); @@ -318,14 +401,12 @@ fn symlink_bin_entry( format!("Can't set up '{}' bin at {}", bin_name, original.display()) })?; if !found { - log::warn!( - "{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.", - deno_terminal::colors::yellow("Warning"), + return Ok(EntrySetupOutcome::MissingEntrypoint { bin_name, - package_path.display(), - original.display() - ); - return Ok(()); + package_path, + entrypoint: original, + package, + }); } let original_relative = @@ -348,7 +429,7 @@ fn symlink_bin_entry( original_relative.display() ) })?; - return Ok(()); + return Ok(EntrySetupOutcome::Success); } return Err(err).with_context(|| { format!( @@ -359,5 +440,5 @@ fn symlink_bin_entry( }); } - Ok(()) + Ok(EntrySetupOutcome::Success) } diff --git a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs index 5735f5248..5c5755c81 100644 --- a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs +++ b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs @@ -10,6 +10,7 @@ use deno_runtime::deno_io::FromRawIoHandle; use deno_semver::package::PackageNv; use deno_semver::Version; use std::borrow::Cow; +use std::collections::HashSet; use std::rc::Rc; use std::path::Path; @@ -61,7 +62,7 @@ impl<'a> LifecycleScripts<'a> { } } -fn has_lifecycle_scripts( +pub fn has_lifecycle_scripts( package: &NpmResolutionPackage, package_path: &Path, ) -> bool { @@ -83,7 +84,7 @@ fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool { } impl<'a> LifecycleScripts<'a> { - fn can_run_scripts(&self, package_nv: &PackageNv) -> bool { + pub fn can_run_scripts(&self, package_nv: &PackageNv) -> bool { if !self.strategy.can_run_scripts() { return false; } @@ -98,6 +99,9 @@ impl<'a> LifecycleScripts<'a> { PackagesAllowedScripts::None => false, } } + pub fn has_run_scripts(&self, package: &NpmResolutionPackage) -> bool { + self.strategy.has_run(package) + } /// Register a package for running lifecycle scripts, if applicable. /// /// `package_path` is the path containing the package's code (its root dir). @@ -110,12 +114,12 @@ impl<'a> LifecycleScripts<'a> { ) { if has_lifecycle_scripts(package, &package_path) { if self.can_run_scripts(&package.id.nv) { - if !self.strategy.has_run(package) { + if !self.has_run_scripts(package) { self .packages_with_scripts .push((package, package_path.into_owned())); } - } else if !self.strategy.has_run(package) + } else if !self.has_run_scripts(package) && (self.config.explicit_install || !self.strategy.has_warned(package)) { // Skip adding `esbuild` as it is known that it can work properly without lifecycle script @@ -149,22 +153,32 @@ impl<'a> LifecycleScripts<'a> { self, snapshot: &NpmResolutionSnapshot, packages: &[NpmResolutionPackage], - root_node_modules_dir_path: Option<&Path>, + root_node_modules_dir_path: &Path, progress_bar: &ProgressBar, ) -> Result<(), AnyError> { self.warn_not_run_scripts()?; let get_package_path = |p: &NpmResolutionPackage| self.strategy.package_path(p); let mut failed_packages = Vec::new(); + let mut bin_entries = BinEntries::new(); if !self.packages_with_scripts.is_empty() { + let package_ids = self + .packages_with_scripts + .iter() + .map(|(p, _)| &p.id) + .collect::<HashSet<_>>(); // get custom commands for each bin available in the node_modules dir (essentially // the scripts that are in `node_modules/.bin`) - let base = - resolve_baseline_custom_commands(snapshot, packages, get_package_path)?; + let base = resolve_baseline_custom_commands( + &mut bin_entries, + snapshot, + packages, + get_package_path, + )?; let init_cwd = &self.config.initial_cwd; let process_state = crate::npm::managed::npm_process_state( snapshot.as_valid_serialized(), - root_node_modules_dir_path, + Some(root_node_modules_dir_path), ); let mut env_vars = crate::task_runner::real_env_vars(); @@ -221,7 +235,7 @@ impl<'a> LifecycleScripts<'a> { custom_commands: custom_commands.clone(), init_cwd, argv: &[], - root_node_modules_dir: root_node_modules_dir_path, + root_node_modules_dir: Some(root_node_modules_dir_path), stdio: Some(crate::task_runner::TaskIo { stderr: TaskStdio::piped(), stdout: TaskStdio::piped(), @@ -262,6 +276,17 @@ impl<'a> LifecycleScripts<'a> { } self.strategy.did_run_scripts(package)?; } + + // re-set up bin entries for the packages which we've run scripts for. + // lifecycle scripts can create files that are linked to by bin entries, + // and the only reliable way to handle this is to re-link bin entries + // (this is what PNPM does as well) + bin_entries.finish_only( + snapshot, + &root_node_modules_dir_path.join(".bin"), + |outcome| outcome.warn_if_failed(), + &package_ids, + )?; } if failed_packages.is_empty() { Ok(()) @@ -281,9 +306,10 @@ impl<'a> LifecycleScripts<'a> { // take in all (non copy) packages from snapshot, // and resolve the set of available binaries to create // custom commands available to the task runner -fn resolve_baseline_custom_commands( - snapshot: &NpmResolutionSnapshot, - packages: &[NpmResolutionPackage], +fn resolve_baseline_custom_commands<'a>( + bin_entries: &mut BinEntries<'a>, + snapshot: &'a NpmResolutionSnapshot, + packages: &'a [NpmResolutionPackage], get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, ) -> Result<crate::task_runner::TaskCustomCommands, AnyError> { let mut custom_commands = crate::task_runner::TaskCustomCommands::new(); @@ -306,6 +332,7 @@ fn resolve_baseline_custom_commands( // doing it for packages that are set up already. // realistically, scripts won't be run very often so it probably isn't too big of an issue. resolve_custom_commands_from_packages( + bin_entries, custom_commands, snapshot, packages, @@ -320,12 +347,12 @@ fn resolve_custom_commands_from_packages< 'a, P: IntoIterator<Item = &'a NpmResolutionPackage>, >( + bin_entries: &mut BinEntries<'a>, mut commands: crate::task_runner::TaskCustomCommands, snapshot: &'a NpmResolutionSnapshot, packages: P, get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf, ) -> Result<crate::task_runner::TaskCustomCommands, AnyError> { - let mut bin_entries = BinEntries::new(); for package in packages { let package_path = get_package_path(package); @@ -333,7 +360,7 @@ fn resolve_custom_commands_from_packages< bin_entries.add(package, package_path); } } - let bins = bin_entries.into_bin_files(snapshot); + let bins: Vec<(String, PathBuf)> = bin_entries.collect_bin_files(snapshot); for (bin_name, script_path) in bins { commands.insert( bin_name.clone(), @@ -356,7 +383,9 @@ fn resolve_custom_commands_from_deps( snapshot: &NpmResolutionSnapshot, get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, ) -> Result<crate::task_runner::TaskCustomCommands, AnyError> { + let mut bin_entries = BinEntries::new(); resolve_custom_commands_from_packages( + &mut bin_entries, baseline, snapshot, package diff --git a/cli/npm/managed/resolvers/global.rs b/cli/npm/managed/resolvers/global.rs index 5be315e99..f0193e78e 100644 --- a/cli/npm/managed/resolvers/global.rs +++ b/cli/npm/managed/resolvers/global.rs @@ -11,7 +11,6 @@ use crate::colors; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; -use deno_core::url::Url; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -56,7 +55,7 @@ impl GlobalNpmPackageResolver { Self { registry_read_permission_checker: RegistryReadPermissionChecker::new( fs, - cache.root_folder(), + cache.root_dir_path().to_path_buf(), ), cache, tarball_cache, @@ -69,10 +68,6 @@ impl GlobalNpmPackageResolver { #[async_trait(?Send)] impl NpmPackageFsResolver for GlobalNpmPackageResolver { - fn root_dir_url(&self) -> &Url { - self.cache.root_dir_url() - } - fn node_modules_path(&self) -> Option<&Path> { None } diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index 54f7576ad..50c5bd268 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -55,6 +55,7 @@ use crate::util::progress_bar::ProgressMessagePrompt; use super::super::cache::NpmCache; use super::super::cache::TarballCache; use super::super::resolution::NpmResolution; +use super::common::bin_entries; use super::common::NpmPackageFsResolver; use super::common::RegistryReadPermissionChecker; @@ -155,10 +156,6 @@ impl LocalNpmPackageResolver { #[async_trait(?Send)] impl NpmPackageFsResolver for LocalNpmPackageResolver { - fn root_dir_url(&self) -> &Url { - &self.root_node_modules_url - } - fn node_modules_path(&self) -> Option<&Path> { Some(self.root_node_modules_path.as_ref()) } @@ -333,8 +330,7 @@ async fn sync_resolution_with_fs( let mut cache_futures = FuturesUnordered::new(); let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> = HashMap::with_capacity(package_partitions.packages.len()); - let bin_entries = - Rc::new(RefCell::new(super::common::bin_entries::BinEntries::new())); + let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new())); let mut lifecycle_scripts = super::common::lifecycle_scripts::LifecycleScripts::new( lifecycle_scripts, @@ -662,7 +658,28 @@ async fn sync_resolution_with_fs( // 7. Set up `node_modules/.bin` entries for packages that need it. { let bin_entries = std::mem::take(&mut *bin_entries.borrow_mut()); - bin_entries.finish(snapshot, &bin_node_modules_dir_path)?; + bin_entries.finish( + snapshot, + &bin_node_modules_dir_path, + |setup_outcome| { + match setup_outcome { + bin_entries::EntrySetupOutcome::MissingEntrypoint { + package, + package_path, + .. + } if super::common::lifecycle_scripts::has_lifecycle_scripts( + package, + package_path, + ) && lifecycle_scripts.can_run_scripts(&package.id.nv) + && !lifecycle_scripts.has_run_scripts(package) => + { + // ignore, it might get fixed when the lifecycle scripts run. + // if not, we'll warn then + } + outcome => outcome.warn_if_failed(), + } + }, + )?; } // 8. Create symlinks for the workspace packages @@ -712,7 +729,7 @@ async fn sync_resolution_with_fs( .finish( snapshot, &package_partitions.packages, - Some(root_node_modules_dir_path), + root_node_modules_dir_path, progress_bar, ) .await?; @@ -1039,12 +1056,18 @@ fn junction_or_symlink_dir( if symlink_err.kind() == std::io::ErrorKind::PermissionDenied => { USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed); - junction::create(old_path, new_path).map_err(Into::into) + junction::create(old_path, new_path) + .context("Failed creating junction in node_modules folder") + } + Err(symlink_err) => { + log::warn!( + "{} Unexpected error symlinking node_modules: {symlink_err}", + colors::yellow("Warning") + ); + USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed); + junction::create(old_path, new_path) + .context("Failed creating junction in node_modules folder") } - Err(symlink_err) => Err( - AnyError::from(symlink_err) - .context("Failed creating symlink in node_modules folder"), - ), } } diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 53baaf77b..0e955ac5b 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -4,43 +4,40 @@ mod byonm; mod common; mod managed; +use std::borrow::Cow; use std::path::Path; -use std::path::PathBuf; use std::sync::Arc; +use common::maybe_auth_header_for_npm_registry; use dashmap::DashMap; -use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_core::serde_json; +use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; +use deno_resolver::npm::ByonmInNpmPackageChecker; use deno_resolver::npm::ByonmNpmResolver; -use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; -use deno_runtime::deno_node::NodeRequireResolver; +use deno_resolver::npm::CliNpmReqResolver; +use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; +use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; -use node_resolver::NpmResolver; -use thiserror::Error; +use managed::cache::registry_info::get_package_url; +use managed::create_managed_in_npm_pkg_checker; +use node_resolver::InNpmPackageChecker; +use node_resolver::NpmPackageFolderResolver; -use crate::args::npm_registry_url; use crate::file_fetcher::FileFetcher; pub use self::byonm::CliByonmNpmResolver; pub use self::byonm::CliByonmNpmResolverCreateOptions; -pub use self::managed::CliNpmResolverManagedCreateOptions; +pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions; +pub use self::managed::CliManagedNpmResolverCreateOptions; pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::ManagedCliNpmResolver; -#[derive(Debug, Error)] -pub enum ResolvePkgFolderFromDenoReqError { - #[error(transparent)] - Managed(deno_core::error::AnyError), - #[error(transparent)] - Byonm(#[from] ByonmResolvePkgFolderFromDenoReqError), -} - pub enum CliNpmResolverCreateOptions { - Managed(CliNpmResolverManagedCreateOptions), + Managed(CliManagedNpmResolverCreateOptions), Byonm(CliByonmNpmResolverCreateOptions), } @@ -66,18 +63,39 @@ pub async fn create_cli_npm_resolver( } } +pub enum CreateInNpmPkgCheckerOptions<'a> { + Managed(CliManagedInNpmPkgCheckerCreateOptions<'a>), + Byonm, +} + +pub fn create_in_npm_pkg_checker( + options: CreateInNpmPkgCheckerOptions, +) -> Arc<dyn InNpmPackageChecker> { + match options { + CreateInNpmPkgCheckerOptions::Managed(options) => { + create_managed_in_npm_pkg_checker(options) + } + CreateInNpmPkgCheckerOptions::Byonm => Arc::new(ByonmInNpmPackageChecker), + } +} + pub enum InnerCliNpmResolverRef<'a> { Managed(&'a ManagedCliNpmResolver), #[allow(dead_code)] Byonm(&'a CliByonmNpmResolver), } -pub trait CliNpmResolver: NpmResolver { - fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver>; - fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver>; +pub trait CliNpmResolver: NpmPackageFolderResolver + CliNpmReqResolver { + fn into_npm_pkg_folder_resolver( + self: Arc<Self>, + ) -> Arc<dyn NpmPackageFolderResolver>; + fn into_npm_req_resolver(self: Arc<Self>) -> Arc<dyn CliNpmReqResolver>; fn into_process_state_provider( self: Arc<Self>, ) -> Arc<dyn NpmProcessStateProvider>; + fn into_maybe_byonm(self: Arc<Self>) -> Option<Arc<CliByonmNpmResolver>> { + None + } fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver>; @@ -99,11 +117,11 @@ pub trait CliNpmResolver: NpmResolver { fn root_node_modules_path(&self) -> Option<&Path>; - fn resolve_pkg_folder_from_deno_module_req( + fn ensure_read_permission<'a>( &self, - req: &PackageReq, - referrer: &ModuleSpecifier, - ) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError>; + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result<Cow<'a, Path>, AnyError>; /// Returns a hash returning the state of the npm resolver /// or `None` if the state currently can't be determined. @@ -115,14 +133,19 @@ pub struct NpmFetchResolver { nv_by_req: DashMap<PackageReq, Option<PackageNv>>, info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>, file_fetcher: Arc<FileFetcher>, + npmrc: Arc<ResolvedNpmRc>, } impl NpmFetchResolver { - pub fn new(file_fetcher: Arc<FileFetcher>) -> Self { + pub fn new( + file_fetcher: Arc<FileFetcher>, + npmrc: Arc<ResolvedNpmRc>, + ) -> Self { Self { nv_by_req: Default::default(), info_by_name: Default::default(), file_fetcher, + npmrc, } } @@ -157,11 +180,21 @@ impl NpmFetchResolver { return info.value().clone(); } let fetch_package_info = || async { - let info_url = npm_registry_url().join(name).ok()?; + let info_url = get_package_url(&self.npmrc, name); let file_fetcher = self.file_fetcher.clone(); + let registry_config = self.npmrc.get_registry_config(name); + // TODO(bartlomieju): this should error out, not use `.ok()`. + let maybe_auth_header = + maybe_auth_header_for_npm_registry(registry_config).ok()?; // spawn due to the lsp's `Send` requirement let file = deno_core::unsync::spawn(async move { - file_fetcher.fetch_bypass_permissions(&info_url).await.ok() + file_fetcher + .fetch_bypass_permissions_with_maybe_auth( + &info_url, + maybe_auth_header, + ) + .await + .ok() }) .await .ok()??; @@ -172,3 +205,15 @@ impl NpmFetchResolver { info } } + +pub const NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent"; + +pub fn get_npm_config_user_agent() -> String { + format!( + "deno/{} npm/? deno/{} {} {}", + env!("CARGO_PKG_VERSION"), + env!("CARGO_PKG_VERSION"), + std::env::consts::OS, + std::env::consts::ARCH + ) +} diff --git a/cli/ops/bench.rs b/cli/ops/bench.rs index 5d1e6e746..1f4a4bd9b 100644 --- a/cli/ops/bench.rs +++ b/cli/ops/bench.rs @@ -2,7 +2,6 @@ use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; -use std::time; use deno_core::error::generic_error; use deno_core::error::type_error; @@ -13,6 +12,7 @@ use deno_core::ModuleSpecifier; use deno_core::OpState; use deno_runtime::deno_permissions::ChildPermissionsArg; use deno_runtime::deno_permissions::PermissionsContainer; +use deno_runtime::deno_web::StartTime; use tokio::sync::mpsc::UnboundedSender; use uuid::Uuid; @@ -56,7 +56,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer); pub fn op_pledge_test_permissions( state: &mut OpState, #[serde] args: ChildPermissionsArg, -) -> Result<Uuid, AnyError> { +) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> { let token = Uuid::new_v4(); let parent_permissions = state.borrow_mut::<PermissionsContainer>(); let worker_permissions = parent_permissions.create_child_permissions(args)?; @@ -147,8 +147,8 @@ fn op_dispatch_bench_event(state: &mut OpState, #[serde] event: BenchEvent) { #[op2(fast)] #[number] -fn op_bench_now(state: &mut OpState) -> Result<u64, AnyError> { - let ns = state.borrow::<time::Instant>().elapsed().as_nanos(); +fn op_bench_now(state: &mut OpState) -> Result<u64, std::num::TryFromIntError> { + let ns = state.borrow::<StartTime>().elapsed().as_nanos(); let ns_u64 = u64::try_from(ns)?; Ok(ns_u64) } diff --git a/cli/ops/jupyter.rs b/cli/ops/jupyter.rs index f7f006d9b..5bdf97e60 100644 --- a/cli/ops/jupyter.rs +++ b/cli/ops/jupyter.rs @@ -46,7 +46,7 @@ pub fn op_jupyter_input( state: &mut OpState, #[string] prompt: String, is_password: bool, -) -> Result<Option<String>, AnyError> { +) -> Option<String> { let (last_execution_request, stdin_connection_proxy) = { ( state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(), @@ -58,11 +58,11 @@ pub fn op_jupyter_input( if let Some(last_request) = maybe_last_request { let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content else { - return Ok(None); + return None; }; if !msg.allow_stdin { - return Ok(None); + return None; } let content = InputRequest { @@ -73,7 +73,7 @@ pub fn op_jupyter_input( let msg = JupyterMessage::new(content, Some(&last_request)); let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else { - return Ok(None); + return None; }; // Need to spawn a separate thread here, because `blocking_recv()` can't @@ -82,17 +82,25 @@ pub fn op_jupyter_input( stdin_connection_proxy.lock().rx.blocking_recv() }); let Ok(Some(response)) = join_handle.join() else { - return Ok(None); + return None; }; let JupyterMessageContent::InputReply(msg) = response.content else { - return Ok(None); + return None; }; - return Ok(Some(msg.value)); + return Some(msg.value); } - Ok(None) + None +} + +#[derive(Debug, thiserror::Error)] +pub enum JupyterBroadcastError { + #[error(transparent)] + SerdeJson(serde_json::Error), + #[error(transparent)] + ZeroMq(AnyError), } #[op2(async)] @@ -102,7 +110,7 @@ pub async fn op_jupyter_broadcast( #[serde] content: serde_json::Value, #[serde] metadata: serde_json::Value, #[serde] buffers: Vec<deno_core::JsBuffer>, -) -> Result<(), AnyError> { +) -> Result<(), JupyterBroadcastError> { let (iopub_connection, last_execution_request) = { let s = state.borrow(); @@ -125,36 +133,35 @@ pub async fn op_jupyter_broadcast( content, err ); - err + JupyterBroadcastError::SerdeJson(err) })?; let jupyter_message = JupyterMessage::new(content, Some(&last_request)) .with_metadata(metadata) .with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect()); - iopub_connection.lock().send(jupyter_message).await?; + iopub_connection + .lock() + .send(jupyter_message) + .await + .map_err(JupyterBroadcastError::ZeroMq)?; } Ok(()) } #[op2(fast)] -pub fn op_print( - state: &mut OpState, - #[string] msg: &str, - is_err: bool, -) -> Result<(), AnyError> { +pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) { let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>(); if is_err { if let Err(err) = sender.send(StreamContent::stderr(msg)) { log::error!("Failed to send stderr message: {}", err); } - return Ok(()); + return; } if let Err(err) = sender.send(StreamContent::stdout(msg)) { log::error!("Failed to send stdout message: {}", err); } - Ok(()) } diff --git a/cli/ops/testing.rs b/cli/ops/testing.rs index c3f469656..00aafb828 100644 --- a/cli/ops/testing.rs +++ b/cli/ops/testing.rs @@ -51,7 +51,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer); pub fn op_pledge_test_permissions( state: &mut OpState, #[serde] args: ChildPermissionsArg, -) -> Result<Uuid, AnyError> { +) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> { let token = Uuid::new_v4(); let parent_permissions = state.borrow_mut::<PermissionsContainer>(); let worker_permissions = parent_permissions.create_child_permissions(args)?; @@ -150,7 +150,7 @@ fn op_register_test_step( #[smi] parent_id: usize, #[smi] root_id: usize, #[string] root_name: String, -) -> Result<usize, AnyError> { +) -> usize { let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); let origin = state.borrow::<ModuleSpecifier>().to_string(); let description = TestStepDescription { @@ -169,7 +169,7 @@ fn op_register_test_step( }; let sender = state.borrow_mut::<TestEventSender>(); sender.send(TestEvent::StepRegister(description)).ok(); - Ok(id) + id } #[op2(fast)] diff --git a/cli/resolver.rs b/cli/resolver.rs index 7804261b8..a2dd47430 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -4,56 +4,55 @@ use async_trait::async_trait; use dashmap::DashMap; use dashmap::DashSet; use deno_ast::MediaType; -use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionError; -use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; use deno_core::error::AnyError; +use deno_core::url::Url; use deno_core::ModuleSourceCode; use deno_core::ModuleSpecifier; use deno_graph::source::ResolutionMode; use deno_graph::source::ResolveError; -use deno_graph::source::Resolver; use deno_graph::source::UnknownBuiltInNodeModuleError; -use deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE; use deno_graph::NpmLoadError; use deno_graph::NpmResolvePkgReqsResult; use deno_npm::resolution::NpmResolutionError; -use deno_package_json::PackageJsonDepValue; -use deno_resolver::sloppy_imports::SloppyImportsResolutionMode; use deno_resolver::sloppy_imports::SloppyImportsResolver; use deno_runtime::colors; use deno_runtime::deno_fs; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::is_builtin_node_module; -use deno_runtime::deno_node::NodeResolver; -use deno_semver::npm::NpmPackageReqReference; +use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_semver::package::PackageReq; -use node_resolver::errors::ClosestPkgJsonError; -use node_resolver::errors::NodeResolveError; -use node_resolver::errors::NodeResolveErrorKind; -use node_resolver::errors::PackageFolderResolveErrorKind; -use node_resolver::errors::PackageFolderResolveIoError; -use node_resolver::errors::PackageNotFoundError; -use node_resolver::errors::PackageResolveErrorKind; -use node_resolver::errors::UrlToNodeResolutionError; use node_resolver::NodeModuleKind; -use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; -use node_resolver::PackageJson; use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use thiserror::Error; -use crate::args::JsxImportSourceConfig; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; use crate::util::sync::AtomicFlag; +use crate::util::text_encoding::from_utf8_lossy_owned; + +pub type CjsTracker = deno_resolver::cjs::CjsTracker<DenoFsNodeResolverEnv>; +pub type IsCjsResolver = + deno_resolver::cjs::IsCjsResolver<DenoFsNodeResolverEnv>; +pub type IsCjsResolverOptions = deno_resolver::cjs::IsCjsResolverOptions; +pub type CliSloppyImportsResolver = + SloppyImportsResolver<SloppyImportsCachedFs>; +pub type CliDenoResolver = deno_resolver::DenoResolver< + CliDenoResolverFs, + DenoFsNodeResolverEnv, + SloppyImportsCachedFs, +>; +pub type CliNpmReqResolver = + deno_resolver::npm::NpmReqResolver<CliDenoResolverFs, DenoFsNodeResolverEnv>; pub struct ModuleCodeStringSource { pub code: ModuleSourceCode, @@ -76,6 +75,10 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs { self.0.realpath_sync(path).map_err(|e| e.into_io_error()) } + fn exists_sync(&self, path: &Path) -> bool { + self.0.exists_sync(path) + } + fn is_dir_sync(&self, path: &Path) -> bool { self.0.is_dir_sync(path) } @@ -101,278 +104,31 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs { } } -#[derive(Debug)] -pub struct CliNodeResolver { - cjs_resolutions: Arc<CjsResolutionStore>, - fs: Arc<dyn deno_fs::FileSystem>, - node_resolver: Arc<NodeResolver>, - npm_resolver: Arc<dyn CliNpmResolver>, -} - -impl CliNodeResolver { - pub fn new( - cjs_resolutions: Arc<CjsResolutionStore>, - fs: Arc<dyn deno_fs::FileSystem>, - node_resolver: Arc<NodeResolver>, - npm_resolver: Arc<dyn CliNpmResolver>, - ) -> Self { - Self { - cjs_resolutions, - fs, - node_resolver, - npm_resolver, - } - } - - pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - self.npm_resolver.in_npm_package(specifier) - } - - pub fn get_closest_package_json( - &self, - referrer: &ModuleSpecifier, - ) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> { - self.node_resolver.get_closest_package_json(referrer) - } - - pub fn resolve_if_for_npm_pkg( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - ) -> Result<Option<NodeResolution>, AnyError> { - let resolution_result = self.resolve(specifier, referrer, mode); - match resolution_result { - Ok(res) => Ok(Some(res)), - Err(err) => { - let err = err.into_kind(); - match err { - NodeResolveErrorKind::RelativeJoin(_) - | NodeResolveErrorKind::PackageImportsResolve(_) - | NodeResolveErrorKind::UnsupportedEsmUrlScheme(_) - | NodeResolveErrorKind::DataUrlReferrer(_) - | NodeResolveErrorKind::TypesNotFound(_) - | NodeResolveErrorKind::FinalizeResolution(_) - | NodeResolveErrorKind::UrlToNodeResolution(_) => Err(err.into()), - NodeResolveErrorKind::PackageResolve(err) => { - let err = err.into_kind(); - match err { - PackageResolveErrorKind::ClosestPkgJson(_) - | PackageResolveErrorKind::InvalidModuleSpecifier(_) - | PackageResolveErrorKind::ExportsResolve(_) - | PackageResolveErrorKind::SubpathResolve(_) => Err(err.into()), - PackageResolveErrorKind::PackageFolderResolve(err) => { - match err.as_kind() { - PackageFolderResolveErrorKind::Io( - PackageFolderResolveIoError { package_name, .. }, - ) - | PackageFolderResolveErrorKind::PackageNotFound( - PackageNotFoundError { package_name, .. }, - ) => { - if self.in_npm_package(referrer) { - return Err(err.into()); - } - if let Some(byonm_npm_resolver) = - self.npm_resolver.as_byonm() - { - if byonm_npm_resolver - .find_ancestor_package_json_with_dep( - package_name, - referrer, - ) - .is_some() - { - return Err(anyhow!( - concat!( - "Could not resolve \"{}\", but found it in a package.json. ", - "Deno expects the node_modules/ directory to be up to date. ", - "Did you forget to run `deno install`?" - ), - specifier - )); - } - } - Ok(None) - } - PackageFolderResolveErrorKind::ReferrerNotFound(_) => { - if self.in_npm_package(referrer) { - return Err(err.into()); - } - Ok(None) - } - } - } - } - } - } - } - } - } - - pub fn resolve( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - ) -> Result<NodeResolution, NodeResolveError> { - let referrer_kind = if self.cjs_resolutions.contains(referrer) { - NodeModuleKind::Cjs - } else { - NodeModuleKind::Esm - }; - - let res = - self - .node_resolver - .resolve(specifier, referrer, referrer_kind, mode)?; - Ok(self.handle_node_resolution(res)) - } - - pub fn resolve_req_reference( - &self, - req_ref: &NpmPackageReqReference, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - ) -> Result<NodeResolution, AnyError> { - self.resolve_req_with_sub_path( - req_ref.req(), - req_ref.sub_path(), - referrer, - mode, - ) - } - - pub fn resolve_req_with_sub_path( - &self, - req: &PackageReq, - sub_path: Option<&str>, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - ) -> Result<NodeResolution, AnyError> { - let package_folder = self - .npm_resolver - .resolve_pkg_folder_from_deno_module_req(req, referrer)?; - let resolution_result = self.resolve_package_sub_path_from_deno_module( - &package_folder, - sub_path, - Some(referrer), - mode, - ); - match resolution_result { - Ok(resolution) => Ok(resolution), - Err(err) => { - if self.npm_resolver.as_byonm().is_some() { - let package_json_path = package_folder.join("package.json"); - if !self.fs.exists_sync(&package_json_path) { - return Err(anyhow!( - "Could not find '{}'. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", - package_json_path.display(), - )); - } - } - Err(err) - } - } - } - - pub fn resolve_package_sub_path_from_deno_module( - &self, - package_folder: &Path, - sub_path: Option<&str>, - maybe_referrer: Option<&ModuleSpecifier>, - mode: NodeResolutionMode, - ) -> Result<NodeResolution, AnyError> { - let res = self - .node_resolver - .resolve_package_subpath_from_deno_module( - package_folder, - sub_path, - maybe_referrer, - mode, - )?; - Ok(self.handle_node_resolution(res)) - } - - pub fn handle_if_in_node_modules( - &self, - specifier: &ModuleSpecifier, - ) -> Result<Option<ModuleSpecifier>, AnyError> { - // skip canonicalizing if we definitely know it's unnecessary - if specifier.scheme() == "file" - && specifier.path().contains("/node_modules/") - { - // Specifiers in the node_modules directory are canonicalized - // so canoncalize then check if it's in the node_modules directory. - // If so, check if we need to store this specifier as being a CJS - // resolution. - let specifier = - crate::node::resolve_specifier_into_node_modules(specifier); - if self.in_npm_package(&specifier) { - let resolution = - self.node_resolver.url_to_node_resolution(specifier)?; - if let NodeResolution::CommonJs(specifier) = &resolution { - self.cjs_resolutions.insert(specifier.clone()); - } - return Ok(Some(resolution.into_url())); - } - } - - Ok(None) - } - - pub fn url_to_node_resolution( - &self, - specifier: ModuleSpecifier, - ) -> Result<NodeResolution, UrlToNodeResolutionError> { - self.node_resolver.url_to_node_resolution(specifier) - } - - fn handle_node_resolution( - &self, - resolution: NodeResolution, - ) -> NodeResolution { - if let NodeResolution::CommonJs(specifier) = &resolution { - // remember that this was a common js resolution - self.cjs_resolutions.insert(specifier.clone()); - } - resolution - } +#[derive(Debug, Error)] +#[error("{media_type} files are not supported in npm packages: {specifier}")] +pub struct NotSupportedKindInNpmError { + pub media_type: MediaType, + pub specifier: Url, } +// todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone) #[derive(Clone)] pub struct NpmModuleLoader { - cjs_resolutions: Arc<CjsResolutionStore>, - node_code_translator: Arc<CliNodeCodeTranslator>, + cjs_tracker: Arc<CjsTracker>, fs: Arc<dyn deno_fs::FileSystem>, - node_resolver: Arc<CliNodeResolver>, + node_code_translator: Arc<CliNodeCodeTranslator>, } impl NpmModuleLoader { pub fn new( - cjs_resolutions: Arc<CjsResolutionStore>, - node_code_translator: Arc<CliNodeCodeTranslator>, + cjs_tracker: Arc<CjsTracker>, fs: Arc<dyn deno_fs::FileSystem>, - node_resolver: Arc<CliNodeResolver>, + node_code_translator: Arc<CliNodeCodeTranslator>, ) -> Self { Self { - cjs_resolutions, + cjs_tracker, node_code_translator, fs, - node_resolver, - } - } - - pub async fn load_if_in_npm_package( - &self, - specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Option<Result<ModuleCodeStringSource, AnyError>> { - if self.node_resolver.in_npm_package(specifier) - || (specifier.scheme() == "file" && specifier.path().ends_with(".cjs")) - { - Some(self.load(specifier, maybe_referrer).await) - } else { - None } } @@ -418,27 +174,30 @@ impl NpmModuleLoader { } })?; - let code = if self.cjs_resolutions.contains(specifier) - || (specifier.scheme() == "file" && specifier.path().ends_with(".cjs")) - { + let media_type = MediaType::from_specifier(specifier); + if media_type.is_emittable() { + return Err(AnyError::from(NotSupportedKindInNpmError { + media_type, + specifier: specifier.clone(), + })); + } + + let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? { // translate cjs to esm if it's cjs and inject node globals - let code = match String::from_utf8_lossy(&code) { - Cow::Owned(code) => code, - // SAFETY: `String::from_utf8_lossy` guarantees that the result is valid - // UTF-8 if `Cow::Borrowed` is returned. - Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(code) }, - }; + let code = from_utf8_lossy_owned(code); ModuleSourceCode::String( self .node_code_translator - .translate_cjs_to_esm(specifier, Some(code)) + .translate_cjs_to_esm(specifier, Some(Cow::Owned(code))) .await? + .into_owned() .into(), ) } else { // esm and json code is untouched ModuleSourceCode::Bytes(code.into_boxed_slice().into()) }; + Ok(ModuleCodeStringSource { code, found_url: specifier.clone(), @@ -447,81 +206,36 @@ impl NpmModuleLoader { } } -/// Keeps track of what module specifiers were resolved as CJS. -#[derive(Debug, Default)] -pub struct CjsResolutionStore(DashSet<ModuleSpecifier>); - -impl CjsResolutionStore { - pub fn contains(&self, specifier: &ModuleSpecifier) -> bool { - self.0.contains(specifier) - } - - pub fn insert(&self, specifier: ModuleSpecifier) { - self.0.insert(specifier); - } +pub struct CliResolverOptions { + pub deno_resolver: Arc<CliDenoResolver>, + pub npm_resolver: Option<Arc<dyn CliNpmResolver>>, + pub bare_node_builtins_enabled: bool, } -pub type CliSloppyImportsResolver = - SloppyImportsResolver<SloppyImportsCachedFs>; - /// A resolver that takes care of resolution, taking into account loaded /// import map, JSX settings. #[derive(Debug)] -pub struct CliGraphResolver { - node_resolver: Option<Arc<CliNodeResolver>>, +pub struct CliResolver { + deno_resolver: Arc<CliDenoResolver>, npm_resolver: Option<Arc<dyn CliNpmResolver>>, - sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>, - workspace_resolver: Arc<WorkspaceResolver>, - maybe_default_jsx_import_source: Option<String>, - maybe_default_jsx_import_source_types: Option<String>, - maybe_jsx_import_source_module: Option<String>, - maybe_vendor_specifier: Option<ModuleSpecifier>, found_package_json_dep_flag: AtomicFlag, bare_node_builtins_enabled: bool, warned_pkgs: DashSet<PackageReq>, } -pub struct CliGraphResolverOptions<'a> { - pub node_resolver: Option<Arc<CliNodeResolver>>, - pub npm_resolver: Option<Arc<dyn CliNpmResolver>>, - pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>, - pub workspace_resolver: Arc<WorkspaceResolver>, - pub bare_node_builtins_enabled: bool, - pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>, - pub maybe_vendor_dir: Option<&'a PathBuf>, -} - -impl CliGraphResolver { - pub fn new(options: CliGraphResolverOptions) -> Self { +impl CliResolver { + pub fn new(options: CliResolverOptions) -> Self { Self { - node_resolver: options.node_resolver, + deno_resolver: options.deno_resolver, npm_resolver: options.npm_resolver, - sloppy_imports_resolver: options.sloppy_imports_resolver, - workspace_resolver: options.workspace_resolver, - maybe_default_jsx_import_source: options - .maybe_jsx_import_source_config - .as_ref() - .and_then(|c| c.default_specifier.clone()), - maybe_default_jsx_import_source_types: options - .maybe_jsx_import_source_config - .as_ref() - .and_then(|c| c.default_types_specifier.clone()), - maybe_jsx_import_source_module: options - .maybe_jsx_import_source_config - .map(|c| c.module), - maybe_vendor_specifier: options - .maybe_vendor_dir - .and_then(|v| ModuleSpecifier::from_directory_path(v).ok()), found_package_json_dep_flag: Default::default(), bare_node_builtins_enabled: options.bare_node_builtins_enabled, warned_pkgs: Default::default(), } } - pub fn as_graph_resolver(&self) -> &dyn Resolver { - self - } - + // todo(dsherret): move this off CliResolver as CliResolver is acting + // like a factory by doing this (it's beyond its responsibility) pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver { WorkerCliNpmGraphResolver { npm_resolver: self.npm_resolver.as_ref(), @@ -529,28 +243,12 @@ impl CliGraphResolver { bare_node_builtins_enabled: self.bare_node_builtins_enabled, } } -} - -impl Resolver for CliGraphResolver { - fn default_jsx_import_source(&self) -> Option<String> { - self.maybe_default_jsx_import_source.clone() - } - - fn default_jsx_import_source_types(&self) -> Option<String> { - self.maybe_default_jsx_import_source_types.clone() - } - - fn jsx_import_source_module(&self) -> &str { - self - .maybe_jsx_import_source_module - .as_deref() - .unwrap_or(DEFAULT_JSX_IMPORT_SOURCE_MODULE) - } - fn resolve( + pub fn resolve( &self, raw_specifier: &str, referrer_range: &deno_graph::Range, + referrer_kind: NodeModuleKind, mode: ResolutionMode, ) -> Result<ModuleSpecifier, ResolveError> { fn to_node_mode(mode: ResolutionMode) -> NodeResolutionMode { @@ -560,218 +258,50 @@ impl Resolver for CliGraphResolver { } } - let referrer = &referrer_range.specifier; + let resolution = self + .deno_resolver + .resolve( + raw_specifier, + &referrer_range.specifier, + referrer_kind, + to_node_mode(mode), + ) + .map_err(|err| match err.into_kind() { + deno_resolver::DenoResolveErrorKind::MappedResolution( + mapped_resolution_error, + ) => match mapped_resolution_error { + MappedResolutionError::Specifier(e) => ResolveError::Specifier(e), + // deno_graph checks specifically for an ImportMapError + MappedResolutionError::ImportMap(e) => ResolveError::Other(e.into()), + err => ResolveError::Other(err.into()), + }, + err => ResolveError::Other(err.into()), + })?; - // Use node resolution if we're in an npm package - if let Some(node_resolver) = self.node_resolver.as_ref() { - if referrer.scheme() == "file" && node_resolver.in_npm_package(referrer) { - return node_resolver - .resolve(raw_specifier, referrer, to_node_mode(mode)) - .map(|res| res.into_url()) - .map_err(|e| ResolveError::Other(e.into())); - } + if resolution.found_package_json_dep { + // mark that we need to do an "npm install" later + self.found_package_json_dep_flag.raise(); } - // Attempt to resolve with the workspace resolver - let result: Result<_, ResolveError> = self - .workspace_resolver - .resolve(raw_specifier, referrer) - .map_err(|err| match err { - MappedResolutionError::Specifier(err) => ResolveError::Specifier(err), - MappedResolutionError::ImportMap(err) => { - ResolveError::Other(err.into()) - } - MappedResolutionError::Workspace(err) => { - ResolveError::Other(err.into()) - } - }); - let result = match result { - Ok(resolution) => match resolution { - MappedResolution::Normal { - specifier, - maybe_diagnostic, - } - | MappedResolution::ImportMap { - specifier, - maybe_diagnostic, - } => { - if let Some(diagnostic) = maybe_diagnostic { - match &*diagnostic { - MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion { reference, .. } => { - if self.warned_pkgs.insert(reference.req().clone()) { - log::warn!("{} {}\n at {}", colors::yellow("Warning"), diagnostic, referrer_range); - } - } - } - } - // do sloppy imports resolution if enabled - if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver { - Ok( - sloppy_imports_resolver - .resolve( - &specifier, - match mode { - ResolutionMode::Execution => { - SloppyImportsResolutionMode::Execution - } - ResolutionMode::Types => SloppyImportsResolutionMode::Types, - }, - ) - .map(|s| s.into_specifier()) - .unwrap_or(specifier), - ) - } else { - Ok(specifier) - } - } - MappedResolution::WorkspaceJsrPackage { specifier, .. } => { - Ok(specifier) - } - MappedResolution::WorkspaceNpmPackage { - target_pkg_json: pkg_json, - sub_path, - .. - } => self - .node_resolver - .as_ref() - .unwrap() - .resolve_package_sub_path_from_deno_module( - pkg_json.dir_path(), - sub_path.as_deref(), - Some(referrer), - to_node_mode(mode), - ) - .map_err(ResolveError::Other) - .map(|res| res.into_url()), - MappedResolution::PackageJson { - dep_result, - alias, - sub_path, + if let Some(diagnostic) = resolution.maybe_diagnostic { + match &*diagnostic { + MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion { + reference, .. } => { - // found a specifier in the package.json, so mark that - // we need to do an "npm install" later - self.found_package_json_dep_flag.raise(); - - dep_result - .as_ref() - .map_err(|e| ResolveError::Other(e.clone().into())) - .and_then(|dep| match dep { - PackageJsonDepValue::Req(req) => { - ModuleSpecifier::parse(&format!( - "npm:{}{}", - req, - sub_path.map(|s| format!("/{}", s)).unwrap_or_default() - )) - .map_err(|e| ResolveError::Other(e.into())) - } - PackageJsonDepValue::Workspace(version_req) => self - .workspace_resolver - .resolve_workspace_pkg_json_folder_for_pkg_json_dep( - alias, - version_req, - ) - .map_err(|e| ResolveError::Other(e.into())) - .and_then(|pkg_folder| { - Ok( - self - .node_resolver - .as_ref() - .unwrap() - .resolve_package_sub_path_from_deno_module( - pkg_folder, - sub_path.as_deref(), - Some(referrer), - to_node_mode(mode), - )? - .into_url(), - ) - }), - }) - } - }, - Err(err) => Err(err), - }; - - // When the user is vendoring, don't allow them to import directly from the vendor/ directory - // as it might cause them confusion or duplicate dependencies. Additionally, this folder has - // special treatment in the language server so it will definitely cause issues/confusion there - // if they do this. - if let Some(vendor_specifier) = &self.maybe_vendor_specifier { - if let Ok(specifier) = &result { - if specifier.as_str().starts_with(vendor_specifier.as_str()) { - return Err(ResolveError::Other(anyhow!("Importing from the vendor directory is not permitted. Use a remote specifier instead or disable vendoring."))); - } - } - } - - let Some(node_resolver) = &self.node_resolver else { - return result; - }; - - let is_byonm = self - .npm_resolver - .as_ref() - .is_some_and(|r| r.as_byonm().is_some()); - match result { - Ok(specifier) => { - if let Ok(npm_req_ref) = - NpmPackageReqReference::from_specifier(&specifier) - { - // check if the npm specifier resolves to a workspace member - if let Some(pkg_folder) = self - .workspace_resolver - .resolve_workspace_pkg_json_folder_for_npm_specifier( - npm_req_ref.req(), - ) - { - return Ok( - node_resolver - .resolve_package_sub_path_from_deno_module( - pkg_folder, - npm_req_ref.sub_path(), - Some(referrer), - to_node_mode(mode), - )? - .into_url(), + if self.warned_pkgs.insert(reference.req().clone()) { + log::warn!( + "{} {}\n at {}", + colors::yellow("Warning"), + diagnostic, + referrer_range ); } - - // do npm resolution for byonm - if is_byonm { - return node_resolver - .resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode)) - .map(|res| res.into_url()) - .map_err(|err| err.into()); - } - } - - Ok(match node_resolver.handle_if_in_node_modules(&specifier)? { - Some(specifier) => specifier, - None => specifier, - }) - } - Err(err) => { - // If byonm, check if the bare specifier resolves to an npm package - if is_byonm && referrer.scheme() == "file" { - let maybe_resolution = node_resolver - .resolve_if_for_npm_pkg(raw_specifier, referrer, to_node_mode(mode)) - .map_err(ResolveError::Other)?; - if let Some(res) = maybe_resolution { - match res { - NodeResolution::Esm(url) | NodeResolution::CommonJs(url) => { - return Ok(url) - } - NodeResolution::BuiltIn(_) => { - // don't resolve bare specifiers for built-in modules via node resolution - } - } - } } - - Err(err) } } + + Ok(resolution.url) } } diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json index af18e6f21..3ba803ef8 100644 --- a/cli/schemas/config-file.v1.json +++ b/cli/schemas/config-file.v1.json @@ -291,7 +291,7 @@ "type": "array", "description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.", "items": { - "type": "string" + "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/tags.v1.json" }, "minItems": 0, "uniqueItems": true @@ -300,7 +300,7 @@ "type": "array", "description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.", "items": { - "type": "string" + "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json" }, "minItems": 0, "uniqueItems": true @@ -309,7 +309,7 @@ "type": "array", "description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.", "items": { - "type": "string" + "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json" }, "minItems": 0, "uniqueItems": true @@ -431,8 +431,34 @@ "type": "object", "patternProperties": { "^[A-Za-z][A-Za-z0-9_\\-:]*$": { - "type": "string", - "description": "Command to execute for this task name." + "oneOf": [ + { + "type": "string", + "description": "Command to execute for this task name." + }, + { + "type": "object", + "description": "A definition of a task to execute", + "properties": { + "description": { + "type": "string", + "description": "Description of a task that will be shown when running `deno task` without a task name" + }, + "command": { + "type": "string", + "required": true, + "description": "The task to execute" + }, + "dependencies": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tasks that should be executed before this task" + } + } + } + ] } }, "additionalProperties": false @@ -530,9 +556,11 @@ "cron", "ffi", "fs", + "fmt-component", "http", "kv", "net", + "node-globals", "sloppy-imports", "temporal", "unsafe-proto", diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 1290a238f..37753bafc 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -9,14 +9,19 @@ use std::ffi::OsString; use std::fs; use std::fs::File; use std::future::Future; +use std::io::ErrorKind; use std::io::Read; use std::io::Seek; use std::io::SeekFrom; use std::io::Write; +use std::ops::Range; use std::path::Path; use std::path::PathBuf; use std::process::Command; +use std::sync::Arc; +use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::ResolverWorkspaceJsrPackage; @@ -30,13 +35,23 @@ use deno_core::futures::AsyncReadExt; use deno_core::futures::AsyncSeekExt; use deno_core::serde_json; use deno_core::url::Url; +use deno_graph::source::RealFileSystem; +use deno_graph::ModuleGraph; +use deno_npm::resolution::SerializedNpmResolutionSnapshot; +use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; +use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; +use deno_npm::NpmPackageId; use deno_npm::NpmSystemInfo; +use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::RealFs; +use deno_runtime::deno_io::fs::FsError; use deno_runtime::deno_node::PackageJson; +use deno_runtime::ops::otel::OtelConfig; use deno_semver::npm::NpmVersionReqParseError; use deno_semver::package::PackageReq; use deno_semver::Version; use deno_semver::VersionReqSpecifierParseError; -use eszip::EszipRelativeFileBaseUrl; use indexmap::IndexMap; use log::Level; use serde::Deserialize; @@ -49,10 +64,13 @@ use crate::args::NpmInstallDepsProvider; use crate::args::PermissionFlags; use crate::args::UnstableConfig; use crate::cache::DenoDir; +use crate::cache::FastInsecureHasher; +use crate::emit::Emitter; use crate::file_fetcher::FileFetcher; use crate::http_util::HttpClientProvider; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; +use crate::resolver::CjsTracker; use crate::shared::ReleaseChannel; use crate::standalone::virtual_fs::VfsEntry; use crate::util::archive; @@ -60,12 +78,63 @@ use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; +use super::file_system::DenoCompileFileSystem; +use super::serialization::deserialize_binary_data_section; +use super::serialization::serialize_binary_data_section; +use super::serialization::DenoCompileModuleData; +use super::serialization::DeserializedDataSection; +use super::serialization::RemoteModulesStore; +use super::serialization::RemoteModulesStoreBuilder; use super::virtual_fs::FileBackedVfs; use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsRoot; use super::virtual_fs::VirtualDirectory; -const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd"; +/// A URL that can be designated as the base for relative URLs. +/// +/// After creation, this URL may be used to get the key for a +/// module in the binary. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct StandaloneRelativeFileBaseUrl<'a>(&'a Url); + +impl<'a> From<&'a Url> for StandaloneRelativeFileBaseUrl<'a> { + fn from(url: &'a Url) -> Self { + Self(url) + } +} + +impl<'a> StandaloneRelativeFileBaseUrl<'a> { + pub fn new(url: &'a Url) -> Self { + debug_assert_eq!(url.scheme(), "file"); + Self(url) + } + + /// Gets the module map key of the provided specifier. + /// + /// * Descendant file specifiers will be made relative to the base. + /// * Non-descendant file specifiers will stay as-is (absolute). + /// * Non-file specifiers will stay as-is. + pub fn specifier_key<'b>(&self, target: &'b Url) -> Cow<'b, str> { + if target.scheme() != "file" { + return Cow::Borrowed(target.as_str()); + } + + match self.0.make_relative(target) { + Some(relative) => { + if relative.starts_with("../") { + Cow::Borrowed(target.as_str()) + } else { + Cow::Owned(relative) + } + } + None => Cow::Borrowed(target.as_str()), + } + } + + pub fn inner(&self) -> &Url { + self.0 + } +} #[derive(Deserialize, Serialize)] pub enum NodeModules { @@ -106,6 +175,7 @@ pub struct SerializedWorkspaceResolver { pub struct Metadata { pub argv: Vec<String>, pub seed: Option<u64>, + pub code_cache_key: Option<u64>, pub permissions: PermissionFlags, pub location: Option<Url>, pub v8_flags: Vec<String>, @@ -118,80 +188,26 @@ pub struct Metadata { pub entrypoint_key: String, pub node_modules: Option<NodeModules>, pub unstable_config: UnstableConfig, -} - -pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result<FileBackedVfs, AnyError> { - let data = libsui::find_section("d3n0l4nd").unwrap(); - - // We do the first part sync so it can complete quickly - let trailer: [u8; TRAILER_SIZE] = data[0..TRAILER_SIZE].try_into().unwrap(); - let trailer = match Trailer::parse(&trailer)? { - None => panic!("Could not find trailer"), - Some(trailer) => trailer, - }; - let data = &data[TRAILER_SIZE..]; - - let vfs_data = - &data[trailer.npm_vfs_pos as usize..trailer.npm_files_pos as usize]; - let mut dir: VirtualDirectory = serde_json::from_slice(vfs_data)?; - - // align the name of the directory with the root dir - dir.name = root_dir_path - .file_name() - .unwrap() - .to_string_lossy() - .to_string(); - - let fs_root = VfsRoot { - dir, - root_path: root_dir_path, - start_file_offset: trailer.npm_files_pos, - }; - Ok(FileBackedVfs::new(data.to_vec(), fs_root)) + pub otel_config: Option<OtelConfig>, // None means disabled. } fn write_binary_bytes( mut file_writer: File, original_bin: Vec<u8>, metadata: &Metadata, - eszip: eszip::EszipV2, - npm_vfs: Option<&VirtualDirectory>, - npm_files: &Vec<Vec<u8>>, + npm_snapshot: Option<SerializedNpmResolutionSnapshot>, + remote_modules: &RemoteModulesStoreBuilder, + vfs: VfsBuilder, compile_flags: &CompileFlags, ) -> Result<(), AnyError> { - let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec(); - let npm_vfs = serde_json::to_string(&npm_vfs)?.as_bytes().to_vec(); - let eszip_archive = eszip.into_bytes(); - - let mut writer = Vec::new(); - - // write the trailer, which includes the positions - // of the data blocks in the file - writer.write_all(&{ - let metadata_pos = eszip_archive.len() as u64; - let npm_vfs_pos = metadata_pos + (metadata.len() as u64); - let npm_files_pos = npm_vfs_pos + (npm_vfs.len() as u64); - Trailer { - eszip_pos: 0, - metadata_pos, - npm_vfs_pos, - npm_files_pos, - } - .as_bytes() - })?; - - writer.write_all(&eszip_archive)?; - writer.write_all(&metadata)?; - writer.write_all(&npm_vfs)?; - for file in npm_files { - writer.write_all(file)?; - } + let data_section_bytes = + serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)?; let target = compile_flags.resolve_target(); if target.contains("linux") { libsui::Elf::new(&original_bin).append( "d3n0l4nd", - &writer, + &data_section_bytes, &mut file_writer, )?; } else if target.contains("windows") { @@ -201,11 +217,11 @@ fn write_binary_bytes( pe = pe.set_icon(&icon)?; } - pe.write_resource("d3n0l4nd", writer)? + pe.write_resource("d3n0l4nd", data_section_bytes)? .build(&mut file_writer)?; } else if target.contains("darwin") { libsui::Macho::from(original_bin)? - .write_section("d3n0l4nd", writer)? + .write_section("d3n0l4nd", data_section_bytes)? .build_and_sign(&mut file_writer)?; } Ok(()) @@ -221,6 +237,67 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool { || libsui::utils::is_macho(&data) } +pub struct StandaloneData { + pub fs: Arc<dyn deno_fs::FileSystem>, + pub metadata: Metadata, + pub modules: StandaloneModules, + pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>, + pub root_path: PathBuf, + pub vfs: Arc<FileBackedVfs>, +} + +pub struct StandaloneModules { + remote_modules: RemoteModulesStore, + vfs: Arc<FileBackedVfs>, +} + +impl StandaloneModules { + pub fn resolve_specifier<'a>( + &'a self, + specifier: &'a ModuleSpecifier, + ) -> Result<Option<&'a ModuleSpecifier>, AnyError> { + if specifier.scheme() == "file" { + Ok(Some(specifier)) + } else { + self.remote_modules.resolve_specifier(specifier) + } + } + + pub fn has_file(&self, path: &Path) -> bool { + self.vfs.file_entry(path).is_ok() + } + + pub fn read<'a>( + &'a self, + specifier: &'a ModuleSpecifier, + ) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> { + if specifier.scheme() == "file" { + let path = deno_path_util::url_to_file_path(specifier)?; + let bytes = match self.vfs.file_entry(&path) { + Ok(entry) => self.vfs.read_file_all(entry)?, + Err(err) if err.kind() == ErrorKind::NotFound => { + let bytes = match RealFs.read_file_sync(&path, None) { + Ok(bytes) => bytes, + Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => { + return Ok(None) + } + Err(err) => return Err(err.into()), + }; + Cow::Owned(bytes) + } + Err(err) => return Err(err.into()), + }; + Ok(Some(DenoCompileModuleData { + media_type: MediaType::from_specifier(specifier), + specifier, + data: bytes, + })) + } else { + self.remote_modules.read(specifier) + } + } +} + /// This function will try to run this binary as a standalone binary /// produced by `deno compile`. It determines if this is a standalone /// binary by skipping over the trailer width at the end of the file, @@ -228,110 +305,67 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool { /// the bundle is executed. If not, this function exits with `Ok(None)`. pub fn extract_standalone( cli_args: Cow<Vec<OsString>>, -) -> Result< - Option<impl Future<Output = Result<(Metadata, eszip::EszipV2), AnyError>>>, - AnyError, -> { +) -> Result<Option<StandaloneData>, AnyError> { let Some(data) = libsui::find_section("d3n0l4nd") else { return Ok(None); }; - // We do the first part sync so it can complete quickly - let trailer = match Trailer::parse(&data[0..TRAILER_SIZE])? { + let DeserializedDataSection { + mut metadata, + npm_snapshot, + remote_modules, + mut vfs_dir, + vfs_files_data, + } = match deserialize_binary_data_section(data)? { + Some(data_section) => data_section, None => return Ok(None), - Some(trailer) => trailer, }; + let root_path = { + let maybe_current_exe = std::env::current_exe().ok(); + let current_exe_name = maybe_current_exe + .as_ref() + .and_then(|p| p.file_name()) + .map(|p| p.to_string_lossy()) + // should never happen + .unwrap_or_else(|| Cow::Borrowed("binary")); + std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name)) + }; let cli_args = cli_args.into_owned(); - // If we have an eszip, read it out - Ok(Some(async move { - let bufreader = - deno_core::futures::io::BufReader::new(&data[TRAILER_SIZE..]); - - let (eszip, loader) = eszip::EszipV2::parse(bufreader) - .await - .context("Failed to parse eszip header")?; - - let bufreader = loader.await.context("Failed to parse eszip archive")?; - - let mut metadata = String::new(); - - bufreader - .take(trailer.metadata_len()) - .read_to_string(&mut metadata) - .await - .context("Failed to read metadata from the current executable")?; - - let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap(); - metadata.argv.reserve(cli_args.len() - 1); - for arg in cli_args.into_iter().skip(1) { - metadata.argv.push(arg.into_string().unwrap()); - } - - Ok((metadata, eszip)) - })) -} - -const TRAILER_SIZE: usize = std::mem::size_of::<Trailer>() + 8; // 8 bytes for the magic trailer string - -struct Trailer { - eszip_pos: u64, - metadata_pos: u64, - npm_vfs_pos: u64, - npm_files_pos: u64, -} - -impl Trailer { - pub fn parse(trailer: &[u8]) -> Result<Option<Trailer>, AnyError> { - let (magic_trailer, rest) = trailer.split_at(8); - if magic_trailer != MAGIC_TRAILER { - return Ok(None); - } - - let (eszip_archive_pos, rest) = rest.split_at(8); - let (metadata_pos, rest) = rest.split_at(8); - let (npm_vfs_pos, npm_files_pos) = rest.split_at(8); - let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?; - let metadata_pos = u64_from_bytes(metadata_pos)?; - let npm_vfs_pos = u64_from_bytes(npm_vfs_pos)?; - let npm_files_pos = u64_from_bytes(npm_files_pos)?; - Ok(Some(Trailer { - eszip_pos: eszip_archive_pos, - metadata_pos, - npm_vfs_pos, - npm_files_pos, - })) - } - - pub fn metadata_len(&self) -> u64 { - self.npm_vfs_pos - self.metadata_pos - } - - pub fn npm_vfs_len(&self) -> u64 { - self.npm_files_pos - self.npm_vfs_pos + metadata.argv.reserve(cli_args.len() - 1); + for arg in cli_args.into_iter().skip(1) { + metadata.argv.push(arg.into_string().unwrap()); } - - pub fn as_bytes(&self) -> Vec<u8> { - let mut trailer = MAGIC_TRAILER.to_vec(); - trailer.write_all(&self.eszip_pos.to_be_bytes()).unwrap(); - trailer.write_all(&self.metadata_pos.to_be_bytes()).unwrap(); - trailer.write_all(&self.npm_vfs_pos.to_be_bytes()).unwrap(); - trailer - .write_all(&self.npm_files_pos.to_be_bytes()) - .unwrap(); - trailer - } -} - -fn u64_from_bytes(arr: &[u8]) -> Result<u64, AnyError> { - let fixed_arr: &[u8; 8] = arr - .try_into() - .context("Failed to convert the buffer into a fixed-size array")?; - Ok(u64::from_be_bytes(*fixed_arr)) + let vfs = { + // align the name of the directory with the root dir + vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string(); + + let fs_root = VfsRoot { + dir: vfs_dir, + root_path: root_path.clone(), + start_file_offset: 0, + }; + Arc::new(FileBackedVfs::new(Cow::Borrowed(vfs_files_data), fs_root)) + }; + let fs: Arc<dyn deno_fs::FileSystem> = + Arc::new(DenoCompileFileSystem::new(vfs.clone())); + Ok(Some(StandaloneData { + fs, + metadata, + modules: StandaloneModules { + remote_modules, + vfs: vfs.clone(), + }, + npm_snapshot, + root_path, + vfs, + })) } pub struct DenoCompileBinaryWriter<'a> { + cjs_tracker: &'a CjsTracker, deno_dir: &'a DenoDir, + emitter: &'a Emitter, file_fetcher: &'a FileFetcher, http_client_provider: &'a HttpClientProvider, npm_resolver: &'a dyn CliNpmResolver, @@ -342,7 +376,9 @@ pub struct DenoCompileBinaryWriter<'a> { impl<'a> DenoCompileBinaryWriter<'a> { #[allow(clippy::too_many_arguments)] pub fn new( + cjs_tracker: &'a CjsTracker, deno_dir: &'a DenoDir, + emitter: &'a Emitter, file_fetcher: &'a FileFetcher, http_client_provider: &'a HttpClientProvider, npm_resolver: &'a dyn CliNpmResolver, @@ -350,7 +386,9 @@ impl<'a> DenoCompileBinaryWriter<'a> { npm_system_info: NpmSystemInfo, ) -> Self { Self { + cjs_tracker, deno_dir, + emitter, file_fetcher, http_client_provider, npm_resolver, @@ -362,8 +400,8 @@ impl<'a> DenoCompileBinaryWriter<'a> { pub async fn write_bin( &self, writer: File, - eszip: eszip::EszipV2, - root_dir_url: EszipRelativeFileBaseUrl<'_>, + graph: &ModuleGraph, + root_dir_url: StandaloneRelativeFileBaseUrl<'_>, entrypoint: &ModuleSpecifier, compile_flags: &CompileFlags, cli_options: &CliOptions, @@ -390,15 +428,17 @@ impl<'a> DenoCompileBinaryWriter<'a> { ) } } - self.write_standalone_binary( - writer, - original_binary, - eszip, - root_dir_url, - entrypoint, - cli_options, - compile_flags, - ) + self + .write_standalone_binary( + writer, + original_binary, + graph, + root_dir_url, + entrypoint, + cli_options, + compile_flags, + ) + .await } async fn get_base_binary( @@ -468,14 +508,18 @@ impl<'a> DenoCompileBinaryWriter<'a> { self .http_client_provider .get_or_create()? - .download_with_progress(download_url.parse()?, None, &progress) + .download_with_progress_and_retries( + download_url.parse()?, + None, + &progress, + ) .await? }; let bytes = match maybe_bytes { Some(bytes) => bytes, None => { log::info!("Download could not be found, aborting"); - std::process::exit(1) + deno_runtime::exit(1); } }; @@ -489,12 +533,12 @@ impl<'a> DenoCompileBinaryWriter<'a> { /// This functions creates a standalone deno binary by appending a bundle /// and magic trailer to the currently executing binary. #[allow(clippy::too_many_arguments)] - fn write_standalone_binary( + async fn write_standalone_binary( &self, writer: File, original_bin: Vec<u8>, - mut eszip: eszip::EszipV2, - root_dir_url: EszipRelativeFileBaseUrl<'_>, + graph: &ModuleGraph, + root_dir_url: StandaloneRelativeFileBaseUrl<'_>, entrypoint: &ModuleSpecifier, cli_options: &CliOptions, compile_flags: &CompileFlags, @@ -508,19 +552,17 @@ impl<'a> DenoCompileBinaryWriter<'a> { None => None, }; let root_path = root_dir_url.inner().to_file_path().unwrap(); - let (npm_vfs, npm_files, node_modules) = match self.npm_resolver.as_inner() + let (maybe_npm_vfs, node_modules, npm_snapshot) = match self + .npm_resolver + .as_inner() { InnerCliNpmResolverRef::Managed(managed) => { let snapshot = managed.serialized_valid_snapshot_for_system(&self.npm_system_info); if !snapshot.as_serialized().packages.is_empty() { - let (root_dir, files) = self - .build_vfs(&root_path, cli_options)? - .into_dir_and_files(); - eszip.add_npm_snapshot(snapshot); + let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?; ( - Some(root_dir), - files, + Some(npm_vfs_builder), Some(NodeModules::Managed { node_modules_dir: self.npm_resolver.root_node_modules_path().map( |path| { @@ -532,18 +574,16 @@ impl<'a> DenoCompileBinaryWriter<'a> { }, ), }), + Some(snapshot), ) } else { - (None, Vec::new(), None) + (None, None, None) } } InnerCliNpmResolverRef::Byonm(resolver) => { - let (root_dir, files) = self - .build_vfs(&root_path, cli_options)? - .into_dir_and_files(); + let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?; ( - Some(root_dir), - files, + Some(npm_vfs_builder), Some(NodeModules::Byonm { root_node_modules_dir: resolver.root_node_modules_path().map( |node_modules_dir| { @@ -556,14 +596,91 @@ impl<'a> DenoCompileBinaryWriter<'a> { }, ), }), + None, ) } }; + let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs { + npm_vfs + } else { + VfsBuilder::new(root_path.clone())? + }; + let mut remote_modules_store = RemoteModulesStoreBuilder::default(); + let mut code_cache_key_hasher = if cli_options.code_cache_enabled() { + Some(FastInsecureHasher::new_deno_versioned()) + } else { + None + }; + for module in graph.modules() { + if module.specifier().scheme() == "data" { + continue; // don't store data urls as an entry as they're in the code + } + if let Some(hasher) = &mut code_cache_key_hasher { + if let Some(source) = module.source() { + hasher.write(module.specifier().as_str().as_bytes()); + hasher.write(source.as_bytes()); + } + } + let (maybe_source, media_type) = match module { + deno_graph::Module::Js(m) => { + let source = if m.media_type.is_emittable() { + let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( + &m.specifier, + m.media_type, + m.is_script, + )?; + let module_kind = ModuleKind::from_is_cjs(is_cjs); + let source = self + .emitter + .emit_parsed_source( + &m.specifier, + m.media_type, + module_kind, + &m.source, + ) + .await?; + source.into_bytes() + } else { + m.source.as_bytes().to_vec() + }; + (Some(source), m.media_type) + } + deno_graph::Module::Json(m) => { + (Some(m.source.as_bytes().to_vec()), m.media_type) + } + deno_graph::Module::Npm(_) + | deno_graph::Module::Node(_) + | deno_graph::Module::External(_) => (None, MediaType::Unknown), + }; + if module.specifier().scheme() == "file" { + let file_path = deno_path_util::url_to_file_path(module.specifier())?; + vfs + .add_file_with_data( + &file_path, + match maybe_source { + Some(source) => source, + None => RealFs.read_file_sync(&file_path, None)?, + }, + ) + .with_context(|| { + format!("Failed adding '{}'", file_path.display()) + })?; + } else if let Some(source) = maybe_source { + remote_modules_store.add(module.specifier(), media_type, source); + } + } + remote_modules_store.add_redirects(&graph.redirects); let env_vars_from_env_file = match cli_options.env_file_name() { - Some(env_filename) => { - log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename); - get_file_env_vars(env_filename.to_string())? + Some(env_filenames) => { + let mut aggregated_env_vars = IndexMap::new(); + for env_filename in env_filenames.iter().rev() { + log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename); + + let env_vars = get_file_env_vars(env_filename.to_string())?; + aggregated_env_vars.extend(env_vars); + } + aggregated_env_vars } None => Default::default(), }; @@ -571,6 +688,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { let metadata = Metadata { argv: compile_flags.args.clone(), seed: cli_options.seed(), + code_cache_key: code_cache_key_hasher.map(|h| h.finish()), location: cli_options.location_flag().clone(), permissions: cli_options.permission_flags().clone(), v8_flags: cli_options.v8_flags().clone(), @@ -625,20 +743,21 @@ impl<'a> DenoCompileBinaryWriter<'a> { sloppy_imports: cli_options.unstable_sloppy_imports(), features: cli_options.unstable_features(), }, + otel_config: cli_options.otel_config(), }; write_binary_bytes( writer, original_bin, &metadata, - eszip, - npm_vfs.as_ref(), - &npm_files, + npm_snapshot.map(|s| s.into_serialized()), + &remote_modules_store, + vfs, compile_flags, ) } - fn build_vfs( + fn build_npm_vfs( &self, root_path: &Path, cli_options: &CliOptions, @@ -659,8 +778,9 @@ impl<'a> DenoCompileBinaryWriter<'a> { } else { // DO NOT include the user's registry url as it may contain credentials, // but also don't make this dependent on the registry url - let root_path = npm_resolver.global_cache_root_folder(); - let mut builder = VfsBuilder::new(root_path)?; + let global_cache_root_path = npm_resolver.global_cache_root_path(); + let mut builder = + VfsBuilder::new(global_cache_root_path.to_path_buf())?; let mut packages = npm_resolver.all_system_packages(&self.npm_system_info); packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism @@ -670,12 +790,12 @@ impl<'a> DenoCompileBinaryWriter<'a> { builder.add_dir_recursive(&folder)?; } - // Flatten all the registries folders into a single "node_modules/localhost" folder + // Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder // that will be used by denort when loading the npm cache. This avoids us exposing // the user's private registry information and means we don't have to bother // serializing all the different registry config into the binary. builder.with_root_dir(|root_dir| { - root_dir.name = "node_modules".to_string(); + root_dir.name = ".deno_compile_node_modules".to_string(); let mut new_entries = Vec::with_capacity(root_dir.entries.len()); let mut localhost_entries = IndexMap::new(); for entry in std::mem::take(&mut root_dir.entries) { @@ -710,6 +830,8 @@ impl<'a> DenoCompileBinaryWriter<'a> { root_dir.entries = new_entries; }); + builder.set_new_root_path(root_path.to_path_buf())?; + Ok(builder) } } diff --git a/cli/standalone/code_cache.rs b/cli/standalone/code_cache.rs new file mode 100644 index 000000000..25b490544 --- /dev/null +++ b/cli/standalone/code_cache.rs @@ -0,0 +1,514 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::collections::BTreeMap; +use std::collections::HashMap; +use std::io::BufReader; +use std::io::BufWriter; +use std::io::Read; +use std::io::Write; +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; + +use deno_ast::ModuleSpecifier; +use deno_core::anyhow::bail; +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; +use deno_core::unsync::sync::AtomicFlag; +use deno_runtime::code_cache::CodeCache; +use deno_runtime::code_cache::CodeCacheType; + +use crate::cache::FastInsecureHasher; +use crate::util::path::get_atomic_file_path; +use crate::worker::CliCodeCache; + +enum CodeCacheStrategy { + FirstRun(FirstRunCodeCacheStrategy), + SubsequentRun(SubsequentRunCodeCacheStrategy), +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct DenoCompileCodeCacheEntry { + pub source_hash: u64, + pub data: Vec<u8>, +} + +pub struct DenoCompileCodeCache { + strategy: CodeCacheStrategy, +} + +impl DenoCompileCodeCache { + pub fn new(file_path: PathBuf, cache_key: u64) -> Self { + // attempt to deserialize the cache data + match deserialize(&file_path, cache_key) { + Ok(data) => { + log::debug!("Loaded {} code cache entries", data.len()); + Self { + strategy: CodeCacheStrategy::SubsequentRun( + SubsequentRunCodeCacheStrategy { + is_finished: AtomicFlag::lowered(), + data: Mutex::new(data), + }, + ), + } + } + Err(err) => { + log::debug!("Failed to deserialize code cache: {:#}", err); + Self { + strategy: CodeCacheStrategy::FirstRun(FirstRunCodeCacheStrategy { + cache_key, + file_path, + is_finished: AtomicFlag::lowered(), + data: Mutex::new(FirstRunCodeCacheData { + cache: HashMap::new(), + add_count: 0, + }), + }), + } + } + } + } +} + +impl CodeCache for DenoCompileCodeCache { + fn get_sync( + &self, + specifier: &ModuleSpecifier, + code_cache_type: CodeCacheType, + source_hash: u64, + ) -> Option<Vec<u8>> { + match &self.strategy { + CodeCacheStrategy::FirstRun(strategy) => { + if !strategy.is_finished.is_raised() { + // we keep track of how many times the cache is requested + // then serialize the cache when we get that number of + // "set" calls + strategy.data.lock().add_count += 1; + } + None + } + CodeCacheStrategy::SubsequentRun(strategy) => { + if strategy.is_finished.is_raised() { + return None; + } + strategy.take_from_cache(specifier, code_cache_type, source_hash) + } + } + } + + fn set_sync( + &self, + specifier: ModuleSpecifier, + code_cache_type: CodeCacheType, + source_hash: u64, + bytes: &[u8], + ) { + match &self.strategy { + CodeCacheStrategy::FirstRun(strategy) => { + if strategy.is_finished.is_raised() { + return; + } + + let data_to_serialize = { + let mut data = strategy.data.lock(); + data.cache.insert( + (specifier.to_string(), code_cache_type), + DenoCompileCodeCacheEntry { + source_hash, + data: bytes.to_vec(), + }, + ); + if data.add_count != 0 { + data.add_count -= 1; + } + if data.add_count == 0 { + // don't allow using the cache anymore + strategy.is_finished.raise(); + if data.cache.is_empty() { + None + } else { + Some(std::mem::take(&mut data.cache)) + } + } else { + None + } + }; + if let Some(cache_data) = &data_to_serialize { + strategy.write_cache_data(cache_data); + } + } + CodeCacheStrategy::SubsequentRun(_) => { + // do nothing + } + } + } +} + +impl CliCodeCache for DenoCompileCodeCache { + fn enabled(&self) -> bool { + match &self.strategy { + CodeCacheStrategy::FirstRun(strategy) => { + !strategy.is_finished.is_raised() + } + CodeCacheStrategy::SubsequentRun(strategy) => { + !strategy.is_finished.is_raised() + } + } + } + + fn as_code_cache(self: Arc<Self>) -> Arc<dyn CodeCache> { + self + } +} + +type CodeCacheKey = (String, CodeCacheType); + +struct FirstRunCodeCacheData { + cache: HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, + add_count: usize, +} + +struct FirstRunCodeCacheStrategy { + cache_key: u64, + file_path: PathBuf, + is_finished: AtomicFlag, + data: Mutex<FirstRunCodeCacheData>, +} + +impl FirstRunCodeCacheStrategy { + fn write_cache_data( + &self, + cache_data: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, + ) { + let count = cache_data.len(); + let temp_file = get_atomic_file_path(&self.file_path); + match serialize(&temp_file, self.cache_key, cache_data) { + Ok(()) => { + if let Err(err) = std::fs::rename(&temp_file, &self.file_path) { + log::debug!("Failed to rename code cache: {}", err); + } else { + log::debug!("Serialized {} code cache entries", count); + } + } + Err(err) => { + let _ = std::fs::remove_file(&temp_file); + log::debug!("Failed to serialize code cache: {}", err); + } + } + } +} + +struct SubsequentRunCodeCacheStrategy { + is_finished: AtomicFlag, + data: Mutex<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>>, +} + +impl SubsequentRunCodeCacheStrategy { + fn take_from_cache( + &self, + specifier: &ModuleSpecifier, + code_cache_type: CodeCacheType, + source_hash: u64, + ) -> Option<Vec<u8>> { + let mut data = self.data.lock(); + // todo(dsherret): how to avoid the clone here? + let entry = data.remove(&(specifier.to_string(), code_cache_type))?; + if entry.source_hash != source_hash { + return None; + } + if data.is_empty() { + self.is_finished.raise(); + } + Some(entry.data) + } +} + +/// File format: +/// - <header> +/// - <cache key> +/// - <u32: number of entries> +/// - <[entry length]> - u64 * number of entries +/// - <[entry]> +/// - <[u8]: entry data> +/// - <String: specifier> +/// - <u8>: code cache type +/// - <u32: specifier length> +/// - <u64: source hash> +/// - <u64: entry data hash> +fn serialize( + file_path: &Path, + cache_key: u64, + cache: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, +) -> Result<(), AnyError> { + let cache_file = std::fs::OpenOptions::new() + .create(true) + .truncate(true) + .write(true) + .open(file_path)?; + let mut writer = BufWriter::new(cache_file); + serialize_with_writer(&mut writer, cache_key, cache) +} + +fn serialize_with_writer<T: Write>( + writer: &mut BufWriter<T>, + cache_key: u64, + cache: &HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, +) -> Result<(), AnyError> { + // header + writer.write_all(&cache_key.to_le_bytes())?; + writer.write_all(&(cache.len() as u32).to_le_bytes())?; + // lengths of each entry + for ((specifier, _), entry) in cache { + let len: u64 = + entry.data.len() as u64 + specifier.len() as u64 + 1 + 4 + 8 + 8; + writer.write_all(&len.to_le_bytes())?; + } + // entries + for ((specifier, code_cache_type), entry) in cache { + writer.write_all(&entry.data)?; + writer.write_all(&[match code_cache_type { + CodeCacheType::EsModule => 0, + CodeCacheType::Script => 1, + }])?; + writer.write_all(specifier.as_bytes())?; + writer.write_all(&(specifier.len() as u32).to_le_bytes())?; + writer.write_all(&entry.source_hash.to_le_bytes())?; + let hash: u64 = FastInsecureHasher::new_without_deno_version() + .write(&entry.data) + .finish(); + writer.write_all(&hash.to_le_bytes())?; + } + + writer.flush()?; + + Ok(()) +} + +fn deserialize( + file_path: &Path, + expected_cache_key: u64, +) -> Result<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, AnyError> { + let cache_file = std::fs::File::open(file_path)?; + let mut reader = BufReader::new(cache_file); + deserialize_with_reader(&mut reader, expected_cache_key) +} + +fn deserialize_with_reader<T: Read>( + reader: &mut BufReader<T>, + expected_cache_key: u64, +) -> Result<HashMap<CodeCacheKey, DenoCompileCodeCacheEntry>, AnyError> { + // it's very important to use this below so that a corrupt cache file + // doesn't cause a memory allocation error + fn new_vec_sized<T: Clone>( + capacity: usize, + default_value: T, + ) -> Result<Vec<T>, AnyError> { + let mut vec = Vec::new(); + vec.try_reserve(capacity)?; + vec.resize(capacity, default_value); + Ok(vec) + } + + fn try_subtract(a: usize, b: usize) -> Result<usize, AnyError> { + if a < b { + bail!("Integer underflow"); + } + Ok(a - b) + } + + let mut header_bytes = vec![0; 8 + 4]; + reader.read_exact(&mut header_bytes)?; + let actual_cache_key = u64::from_le_bytes(header_bytes[..8].try_into()?); + if actual_cache_key != expected_cache_key { + // cache bust + bail!("Cache key mismatch"); + } + let len = u32::from_le_bytes(header_bytes[8..].try_into()?) as usize; + // read the lengths for each entry found in the file + let entry_len_bytes_capacity = len * 8; + let mut entry_len_bytes = new_vec_sized(entry_len_bytes_capacity, 0)?; + reader.read_exact(&mut entry_len_bytes)?; + let mut lengths = Vec::new(); + lengths.try_reserve(len)?; + for i in 0..len { + let pos = i * 8; + lengths.push( + u64::from_le_bytes(entry_len_bytes[pos..pos + 8].try_into()?) as usize, + ); + } + + let mut map = HashMap::new(); + map.try_reserve(len)?; + for len in lengths { + let mut buffer = new_vec_sized(len, 0)?; + reader.read_exact(&mut buffer)?; + let entry_data_hash_start_pos = try_subtract(buffer.len(), 8)?; + let expected_entry_data_hash = + u64::from_le_bytes(buffer[entry_data_hash_start_pos..].try_into()?); + let source_hash_start_pos = try_subtract(entry_data_hash_start_pos, 8)?; + let source_hash = u64::from_le_bytes( + buffer[source_hash_start_pos..entry_data_hash_start_pos].try_into()?, + ); + let specifier_end_pos = try_subtract(source_hash_start_pos, 4)?; + let specifier_len = u32::from_le_bytes( + buffer[specifier_end_pos..source_hash_start_pos].try_into()?, + ) as usize; + let specifier_start_pos = try_subtract(specifier_end_pos, specifier_len)?; + let specifier = String::from_utf8( + buffer[specifier_start_pos..specifier_end_pos].to_vec(), + )?; + let code_cache_type_pos = try_subtract(specifier_start_pos, 1)?; + let code_cache_type = match buffer[code_cache_type_pos] { + 0 => CodeCacheType::EsModule, + 1 => CodeCacheType::Script, + _ => bail!("Invalid code cache type"), + }; + buffer.truncate(code_cache_type_pos); + let actual_entry_data_hash: u64 = + FastInsecureHasher::new_without_deno_version() + .write(&buffer) + .finish(); + if expected_entry_data_hash != actual_entry_data_hash { + bail!("Hash mismatch.") + } + map.insert( + (specifier, code_cache_type), + DenoCompileCodeCacheEntry { + source_hash, + data: buffer, + }, + ); + } + + Ok(map) +} + +#[cfg(test)] +mod test { + use test_util::TempDir; + + use super::*; + use std::fs::File; + + #[test] + fn serialize_deserialize() { + let cache_key = 123456; + let cache = { + let mut cache = HashMap::new(); + cache.insert( + ("specifier1".to_string(), CodeCacheType::EsModule), + DenoCompileCodeCacheEntry { + source_hash: 1, + data: vec![1, 2, 3], + }, + ); + cache.insert( + ("specifier2".to_string(), CodeCacheType::EsModule), + DenoCompileCodeCacheEntry { + source_hash: 2, + data: vec![4, 5, 6], + }, + ); + cache.insert( + ("specifier2".to_string(), CodeCacheType::Script), + DenoCompileCodeCacheEntry { + source_hash: 2, + data: vec![6, 5, 1], + }, + ); + cache + }; + let mut buffer = Vec::new(); + serialize_with_writer(&mut BufWriter::new(&mut buffer), cache_key, &cache) + .unwrap(); + let deserialized = + deserialize_with_reader(&mut BufReader::new(&buffer[..]), cache_key) + .unwrap(); + assert_eq!(cache, deserialized); + } + + #[test] + fn serialize_deserialize_empty() { + let cache_key = 1234; + let cache = HashMap::new(); + let mut buffer = Vec::new(); + serialize_with_writer(&mut BufWriter::new(&mut buffer), cache_key, &cache) + .unwrap(); + let deserialized = + deserialize_with_reader(&mut BufReader::new(&buffer[..]), cache_key) + .unwrap(); + assert_eq!(cache, deserialized); + } + + #[test] + fn serialize_deserialize_corrupt() { + let buffer = "corrupttestingtestingtesting".as_bytes().to_vec(); + let err = deserialize_with_reader(&mut BufReader::new(&buffer[..]), 1234) + .unwrap_err(); + assert_eq!(err.to_string(), "Cache key mismatch"); + } + + #[test] + fn code_cache() { + let temp_dir = TempDir::new(); + let file_path = temp_dir.path().join("cache.bin").to_path_buf(); + let url1 = ModuleSpecifier::parse("https://deno.land/example1.js").unwrap(); + let url2 = ModuleSpecifier::parse("https://deno.land/example2.js").unwrap(); + // first run + { + let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234); + assert!(code_cache + .get_sync(&url1, CodeCacheType::EsModule, 0) + .is_none()); + assert!(code_cache + .get_sync(&url2, CodeCacheType::EsModule, 1) + .is_none()); + assert!(code_cache.enabled()); + code_cache.set_sync(url1.clone(), CodeCacheType::EsModule, 0, &[1, 2, 3]); + assert!(code_cache.enabled()); + assert!(!file_path.exists()); + code_cache.set_sync(url2.clone(), CodeCacheType::EsModule, 1, &[2, 1, 3]); + assert!(file_path.exists()); // now the new code cache exists + assert!(!code_cache.enabled()); // no longer enabled + } + // second run + { + let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234); + assert!(code_cache.enabled()); + let result1 = code_cache + .get_sync(&url1, CodeCacheType::EsModule, 0) + .unwrap(); + assert!(code_cache.enabled()); + let result2 = code_cache + .get_sync(&url2, CodeCacheType::EsModule, 1) + .unwrap(); + assert!(!code_cache.enabled()); // no longer enabled + assert_eq!(result1, vec![1, 2, 3]); + assert_eq!(result2, vec![2, 1, 3]); + } + + // new cache key first run + { + let code_cache = DenoCompileCodeCache::new(file_path.clone(), 54321); + assert!(code_cache + .get_sync(&url1, CodeCacheType::EsModule, 0) + .is_none()); + assert!(code_cache + .get_sync(&url2, CodeCacheType::EsModule, 1) + .is_none()); + code_cache.set_sync(url1.clone(), CodeCacheType::EsModule, 0, &[2, 2, 3]); + code_cache.set_sync(url2.clone(), CodeCacheType::EsModule, 1, &[3, 2, 3]); + } + // new cache key second run + { + let code_cache = DenoCompileCodeCache::new(file_path.clone(), 54321); + let result1 = code_cache + .get_sync(&url1, CodeCacheType::EsModule, 0) + .unwrap(); + assert_eq!(result1, vec![2, 2, 3]); + assert!(code_cache + .get_sync(&url2, CodeCacheType::EsModule, 5) // different hash will cause none + .is_none()); + } + } +} diff --git a/cli/standalone/file_system.rs b/cli/standalone/file_system.rs index 314444630..712c6ee91 100644 --- a/cli/standalone/file_system.rs +++ b/cli/standalone/file_system.rs @@ -22,8 +22,8 @@ use super::virtual_fs::FileBackedVfs; pub struct DenoCompileFileSystem(Arc<FileBackedVfs>); impl DenoCompileFileSystem { - pub fn new(vfs: FileBackedVfs) -> Self { - Self(Arc::new(vfs)) + pub fn new(vfs: Arc<FileBackedVfs>) -> Self { + Self(vfs) } fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> { diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 258de0dad..b9f0b1d5b 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -5,6 +5,9 @@ #![allow(dead_code)] #![allow(unused_imports)] +use binary::StandaloneData; +use binary::StandaloneModules; +use code_cache::DenoCompileCodeCache; use deno_ast::MediaType; use deno_cache_dir::npm::NpmCacheDir; use deno_config::workspace::MappedResolution; @@ -15,8 +18,10 @@ use deno_core::anyhow::Context; use deno_core::error::generic_error; use deno_core::error::type_error; use deno_core::error::AnyError; +use deno_core::futures::future::LocalBoxFuture; use deno_core::futures::FutureExt; use deno_core::v8_set_flags; +use deno_core::FastString; use deno_core::FeatureChecker; use deno_core::ModuleLoader; use deno_core::ModuleSourceCode; @@ -24,11 +29,15 @@ use deno_core::ModuleSpecifier; use deno_core::ModuleType; use deno_core::RequestedModuleType; use deno_core::ResolutionKind; +use deno_core::SourceCodeCacheInfo; use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonDepValue; +use deno_resolver::npm::NpmReqResolverOptions; use deno_runtime::deno_fs; use deno_runtime::deno_node::create_host_defined_options; +use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -38,10 +47,12 @@ use deno_runtime::permissions::RuntimePermissionDescriptorParser; use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; -use eszip::EszipRelativeFileBaseUrl; use import_map::parse_from_json; use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::NodeModuleKind; use node_resolver::NodeResolutionMode; +use serialization::DenoCompileModuleSource; use std::borrow::Cow; use std::rc::Rc; use std::sync::Arc; @@ -54,86 +65,102 @@ use crate::args::CacheSetting; use crate::args::NpmInstallDepsProvider; use crate::args::StorageKeyResolver; use crate::cache::Caches; +use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDirProvider; +use crate::cache::FastInsecureHasher; use crate::cache::NodeAnalysisCache; use crate::cache::RealDenoCacheEnv; use crate::http_util::HttpClientProvider; use crate::node::CliCjsCodeAnalyzer; +use crate::node::CliNodeCodeTranslator; use crate::npm::create_cli_npm_resolver; +use crate::npm::create_in_npm_pkg_checker; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; +use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; -use crate::resolver::CjsResolutionStore; +use crate::npm::CreateInNpmPkgCheckerOptions; +use crate::resolver::CjsTracker; use crate::resolver::CliDenoResolverFs; -use crate::resolver::CliNodeResolver; +use crate::resolver::CliNpmReqResolver; +use crate::resolver::IsCjsResolverOptions; use crate::resolver::NpmModuleLoader; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::v8::construct_v8_flags; +use crate::worker::CliCodeCache; use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; -use crate::worker::ModuleLoaderAndSourceMapGetter; +use crate::worker::CreateModuleLoaderResult; use crate::worker::ModuleLoaderFactory; pub mod binary; +mod code_cache; mod file_system; +mod serialization; mod virtual_fs; pub use binary::extract_standalone; pub use binary::is_standalone_binary; pub use binary::DenoCompileBinaryWriter; -use self::binary::load_npm_vfs; use self::binary::Metadata; use self::file_system::DenoCompileFileSystem; -struct WorkspaceEszipModule { - specifier: ModuleSpecifier, - inner: eszip::Module, -} - -struct WorkspaceEszip { - eszip: eszip::EszipV2, - root_dir_url: Arc<ModuleSpecifier>, +struct SharedModuleLoaderState { + cjs_tracker: Arc<CjsTracker>, + fs: Arc<dyn deno_fs::FileSystem>, + modules: StandaloneModules, + node_code_translator: Arc<CliNodeCodeTranslator>, + node_resolver: Arc<NodeResolver>, + npm_module_loader: Arc<NpmModuleLoader>, + npm_req_resolver: Arc<CliNpmReqResolver>, + npm_resolver: Arc<dyn CliNpmResolver>, + workspace_resolver: WorkspaceResolver, + code_cache: Option<Arc<dyn CliCodeCache>>, } -impl WorkspaceEszip { - pub fn get_module( +impl SharedModuleLoaderState { + fn get_code_cache( &self, specifier: &ModuleSpecifier, - ) -> Option<WorkspaceEszipModule> { - if specifier.scheme() == "file" { - let specifier_key = EszipRelativeFileBaseUrl::new(&self.root_dir_url) - .specifier_key(specifier); - let module = self.eszip.get_module(&specifier_key)?; - let specifier = self.root_dir_url.join(&module.specifier).unwrap(); - Some(WorkspaceEszipModule { - specifier, - inner: module, - }) - } else { - let module = self.eszip.get_module(specifier.as_str())?; - Some(WorkspaceEszipModule { - specifier: ModuleSpecifier::parse(&module.specifier).unwrap(), - inner: module, - }) + source: &[u8], + ) -> Option<SourceCodeCacheInfo> { + let Some(code_cache) = &self.code_cache else { + return None; + }; + if !code_cache.enabled() { + return None; } + // deno version is already included in the root cache key + let hash = FastInsecureHasher::new_without_deno_version() + .write_hashable(source) + .finish(); + let data = code_cache.get_sync( + specifier, + deno_runtime::code_cache::CodeCacheType::EsModule, + hash, + ); + Some(SourceCodeCacheInfo { + hash, + data: data.map(Cow::Owned), + }) } } -struct SharedModuleLoaderState { - eszip: WorkspaceEszip, - workspace_resolver: WorkspaceResolver, - node_resolver: Arc<CliNodeResolver>, - npm_module_loader: Arc<NpmModuleLoader>, -} - #[derive(Clone)] struct EmbeddedModuleLoader { shared: Arc<SharedModuleLoaderState>, } +impl std::fmt::Debug for EmbeddedModuleLoader { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("EmbeddedModuleLoader").finish() + } +} + pub const MODULE_NOT_FOUND: &str = "Module not found"; pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme"; @@ -158,13 +185,27 @@ impl ModuleLoader for EmbeddedModuleLoader { type_error(format!("Referrer uses invalid specifier: {}", err)) })? }; + let referrer_kind = if self + .shared + .cjs_tracker + .is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))? + { + NodeModuleKind::Cjs + } else { + NodeModuleKind::Esm + }; if self.shared.node_resolver.in_npm_package(&referrer) { return Ok( self .shared .node_resolver - .resolve(raw_specifier, &referrer, NodeResolutionMode::Execution)? + .resolve( + raw_specifier, + &referrer, + referrer_kind, + NodeResolutionMode::Execution, + )? .into_url(), ); } @@ -186,13 +227,13 @@ impl ModuleLoader for EmbeddedModuleLoader { self .shared .node_resolver - .resolve_package_sub_path_from_deno_module( + .resolve_package_subpath_from_deno_module( pkg_json.dir_path(), sub_path.as_deref(), Some(&referrer), + referrer_kind, NodeResolutionMode::Execution, - )? - .into_url(), + )?, ), Ok(MappedResolution::PackageJson { dep_result, @@ -202,14 +243,15 @@ impl ModuleLoader for EmbeddedModuleLoader { }) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? { PackageJsonDepValue::Req(req) => self .shared - .node_resolver + .npm_req_resolver .resolve_req_with_sub_path( req, sub_path.as_deref(), &referrer, + referrer_kind, NodeResolutionMode::Execution, ) - .map(|res| res.into_url()), + .map_err(AnyError::from), PackageJsonDepValue::Workspace(version_req) => { let pkg_folder = self .shared @@ -222,13 +264,13 @@ impl ModuleLoader for EmbeddedModuleLoader { self .shared .node_resolver - .resolve_package_sub_path_from_deno_module( + .resolve_package_subpath_from_deno_module( pkg_folder, sub_path.as_deref(), Some(&referrer), + referrer_kind, NodeResolutionMode::Execution, - )? - .into_url(), + )?, ) } }, @@ -237,20 +279,19 @@ impl ModuleLoader for EmbeddedModuleLoader { if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier) { - return self - .shared - .node_resolver - .resolve_req_reference( - &reference, - &referrer, - NodeResolutionMode::Execution, - ) - .map(|res| res.into_url()); + return Ok(self.shared.npm_req_resolver.resolve_req_reference( + &reference, + &referrer, + referrer_kind, + NodeResolutionMode::Execution, + )?); } if specifier.scheme() == "jsr" { - if let Some(module) = self.shared.eszip.get_module(&specifier) { - return Ok(module.specifier); + if let Some(specifier) = + self.shared.modules.resolve_specifier(&specifier)? + { + return Ok(specifier.clone()); } } @@ -258,16 +299,17 @@ impl ModuleLoader for EmbeddedModuleLoader { self .shared .node_resolver - .handle_if_in_node_modules(&specifier)? + .handle_if_in_node_modules(&specifier) .unwrap_or(specifier), ) } Err(err) if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" => { - let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg( + let maybe_res = self.shared.npm_req_resolver.resolve_if_for_npm_pkg( raw_specifier, &referrer, + referrer_kind, NodeResolutionMode::Execution, )?; if let Some(res) = maybe_res { @@ -322,14 +364,19 @@ impl ModuleLoader for EmbeddedModuleLoader { } if self.shared.node_resolver.in_npm_package(original_specifier) { - let npm_module_loader = self.shared.npm_module_loader.clone(); + let shared = self.shared.clone(); let original_specifier = original_specifier.clone(); let maybe_referrer = maybe_referrer.cloned(); return deno_core::ModuleLoadResponse::Async( async move { - let code_source = npm_module_loader + let code_source = shared + .npm_module_loader .load(&original_specifier, maybe_referrer.as_ref()) .await?; + let code_cache_entry = shared.get_code_cache( + &code_source.found_url, + code_source.code.as_bytes(), + ); Ok(deno_core::ModuleSource::new_with_redirect( match code_source.media_type { MediaType::Json => ModuleType::Json, @@ -338,89 +385,177 @@ impl ModuleLoader for EmbeddedModuleLoader { code_source.code, &original_specifier, &code_source.found_url, - None, + code_cache_entry, )) } .boxed_local(), ); } - let Some(module) = self.shared.eszip.get_module(original_specifier) else { - return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!( - "{MODULE_NOT_FOUND}: {}", - original_specifier - )))); - }; - let original_specifier = original_specifier.clone(); - - deno_core::ModuleLoadResponse::Async( - async move { - let code = module.inner.source().await.ok_or_else(|| { - type_error(format!("Module not found: {}", original_specifier)) - })?; - let code = arc_u8_to_arc_str(code) - .map_err(|_| type_error("Module source is not utf-8"))?; - Ok(deno_core::ModuleSource::new_with_redirect( - match module.inner.kind { - eszip::ModuleKind::JavaScript => ModuleType::JavaScript, - eszip::ModuleKind::Json => ModuleType::Json, - eszip::ModuleKind::Jsonc => { - return Err(type_error("jsonc modules not supported")) - } - eszip::ModuleKind::OpaqueData => { - unreachable!(); + match self.shared.modules.read(original_specifier) { + Ok(Some(module)) => { + let media_type = module.media_type; + let (module_specifier, module_type, module_source) = + module.into_parts(); + let is_maybe_cjs = match self + .shared + .cjs_tracker + .is_maybe_cjs(original_specifier, media_type) + { + Ok(is_maybe_cjs) => is_maybe_cjs, + Err(err) => { + return deno_core::ModuleLoadResponse::Sync(Err(type_error( + format!("{:?}", err), + ))); + } + }; + if is_maybe_cjs { + let original_specifier = original_specifier.clone(); + let module_specifier = module_specifier.clone(); + let shared = self.shared.clone(); + deno_core::ModuleLoadResponse::Async( + async move { + let source = match module_source { + DenoCompileModuleSource::String(string) => { + Cow::Borrowed(string) + } + DenoCompileModuleSource::Bytes(module_code_bytes) => { + match module_code_bytes { + Cow::Owned(bytes) => Cow::Owned( + crate::util::text_encoding::from_utf8_lossy_owned(bytes), + ), + Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), + } + } + }; + let source = shared + .node_code_translator + .translate_cjs_to_esm(&module_specifier, Some(source)) + .await?; + let module_source = match source { + Cow::Owned(source) => ModuleSourceCode::String(source.into()), + Cow::Borrowed(source) => { + ModuleSourceCode::String(FastString::from_static(source)) + } + }; + let code_cache_entry = shared + .get_code_cache(&module_specifier, module_source.as_bytes()); + Ok(deno_core::ModuleSource::new_with_redirect( + module_type, + module_source, + &original_specifier, + &module_specifier, + code_cache_entry, + )) } - }, - ModuleSourceCode::String(code.into()), - &original_specifier, - &module.specifier, - None, - )) + .boxed_local(), + ) + } else { + let module_source = module_source.into_for_v8(); + let code_cache_entry = self + .shared + .get_code_cache(module_specifier, module_source.as_bytes()); + deno_core::ModuleLoadResponse::Sync(Ok( + deno_core::ModuleSource::new_with_redirect( + module_type, + module_source, + original_specifier, + module_specifier, + code_cache_entry, + ), + )) + } } - .boxed_local(), - ) + Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error( + format!("{MODULE_NOT_FOUND}: {}", original_specifier), + ))), + Err(err) => deno_core::ModuleLoadResponse::Sync(Err(type_error( + format!("{:?}", err), + ))), + } + } + + fn code_cache_ready( + &self, + specifier: ModuleSpecifier, + source_hash: u64, + code_cache_data: &[u8], + ) -> LocalBoxFuture<'static, ()> { + if let Some(code_cache) = &self.shared.code_cache { + code_cache.set_sync( + specifier, + deno_runtime::code_cache::CodeCacheType::EsModule, + source_hash, + code_cache_data, + ); + } + std::future::ready(()).boxed_local() } } -fn arc_u8_to_arc_str( - arc_u8: Arc<[u8]>, -) -> Result<Arc<str>, std::str::Utf8Error> { - // Check that the string is valid UTF-8. - std::str::from_utf8(&arc_u8)?; - // SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as - // Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the - // standard library. - Ok(unsafe { - std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8) - }) +impl NodeRequireLoader for EmbeddedModuleLoader { + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn deno_runtime::deno_node::NodePermissions, + path: &'a std::path::Path, + ) -> Result<Cow<'a, std::path::Path>, AnyError> { + if self.shared.modules.has_file(path) { + // allow reading if the file is in the snapshot + return Ok(Cow::Borrowed(path)); + } + + self + .shared + .npm_resolver + .ensure_read_permission(permissions, path) + } + + fn load_text_file_lossy( + &self, + path: &std::path::Path, + ) -> Result<String, AnyError> { + Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?) + } + + fn is_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + ) -> Result<bool, ClosestPkgJsonError> { + let media_type = MediaType::from_specifier(specifier); + self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type) + } } struct StandaloneModuleLoaderFactory { shared: Arc<SharedModuleLoaderState>, } +impl StandaloneModuleLoaderFactory { + pub fn create_result(&self) -> CreateModuleLoaderResult { + let loader = Rc::new(EmbeddedModuleLoader { + shared: self.shared.clone(), + }); + CreateModuleLoaderResult { + module_loader: loader.clone(), + node_require_loader: loader, + } + } +} + impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { fn create_for_main( &self, _root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - ModuleLoaderAndSourceMapGetter { - module_loader: Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - }), - } + ) -> CreateModuleLoaderResult { + self.create_result() } fn create_for_worker( &self, _parent_permissions: PermissionsContainer, _permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - ModuleLoaderAndSourceMapGetter { - module_loader: Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - }), - } + ) -> CreateModuleLoaderResult { + self.create_result() } } @@ -439,13 +574,15 @@ impl RootCertStoreProvider for StandaloneRootCertStoreProvider { } } -pub async fn run( - mut eszip: eszip::EszipV2, - metadata: Metadata, -) -> Result<i32, AnyError> { - let current_exe_path = std::env::current_exe().unwrap(); - let current_exe_name = - current_exe_path.file_name().unwrap().to_string_lossy(); +pub async fn run(data: StandaloneData) -> Result<i32, AnyError> { + let StandaloneData { + fs, + metadata, + modules, + npm_snapshot, + root_path, + vfs, + } = data; let deno_dir_provider = Arc::new(DenoDirProvider::new(None)); let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { ca_stores: metadata.ca_stores, @@ -459,44 +596,50 @@ pub async fn run( )); // use a dummy npm registry url let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap(); - let root_path = - std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name)); let root_dir_url = Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap()); let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); - let root_node_modules_path = root_path.join("node_modules"); - let npm_cache_dir = NpmCacheDir::new( - &RealDenoCacheEnv, - root_node_modules_path.clone(), - vec![npm_registry_url.clone()], - ); - let npm_global_cache_dir = npm_cache_dir.get_cache_location(); + let npm_global_cache_dir = root_path.join(".deno_compile_node_modules"); let cache_setting = CacheSetting::Only; - let (fs, npm_resolver, maybe_vfs_root) = match metadata.node_modules { + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); + let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules { Some(binary::NodeModules::Managed { node_modules_dir }) => { - // this will always have a snapshot - let snapshot = eszip.take_npm_snapshot().unwrap(); - let vfs_root_dir_path = if node_modules_dir.is_some() { - root_path.clone() - } else { - npm_cache_dir.root_dir().to_owned() - }; - let vfs = load_npm_vfs(vfs_root_dir_path.clone()) - .context("Failed to load npm vfs.")?; + // create an npmrc that uses the fake npm_registry_url to resolve packages + let npmrc = Arc::new(ResolvedNpmRc { + default_config: deno_npm::npm_rc::RegistryConfigWithUrl { + registry_url: npm_registry_url.clone(), + config: Default::default(), + }, + scopes: Default::default(), + registry_configs: Default::default(), + }); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + npm_global_cache_dir, + npmrc.get_all_known_registries_urls(), + )); + let snapshot = npm_snapshot.unwrap(); let maybe_node_modules_path = node_modules_dir - .map(|node_modules_dir| vfs_root_dir_path.join(node_modules_dir)); - let fs = Arc::new(DenoCompileFileSystem::new(vfs)) - as Arc<dyn deno_fs::FileSystem>; + .map(|node_modules_dir| root_path.join(node_modules_dir)); + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: npm_cache_dir.root_dir_url(), + maybe_node_modules_path: maybe_node_modules_path.as_deref(), + }, + )); let npm_resolver = create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( - CliNpmResolverManagedCreateOptions { + CliManagedNpmResolverCreateOptions { snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some( snapshot, )), maybe_lockfile: None, fs: fs.clone(), http_client_provider: http_client_provider.clone(), - npm_global_cache_dir, + npm_cache_dir, cache_setting, text_only_progress_bar: progress_bar, maybe_node_modules_path, @@ -505,50 +648,54 @@ pub async fn run( // this is only used for installing packages, which isn't necessary with deno compile NpmInstallDepsProvider::empty(), ), - // create an npmrc that uses the fake npm_registry_url to resolve packages - npmrc: Arc::new(ResolvedNpmRc { - default_config: deno_npm::npm_rc::RegistryConfigWithUrl { - registry_url: npm_registry_url.clone(), - config: Default::default(), - }, - scopes: Default::default(), - registry_configs: Default::default(), - }), + npmrc, lifecycle_scripts: Default::default(), }, )) .await?; - (fs, npm_resolver, Some(vfs_root_dir_path)) + (in_npm_pkg_checker, npm_resolver) } Some(binary::NodeModules::Byonm { root_node_modules_dir, }) => { - let vfs_root_dir_path = root_path.clone(); - let vfs = load_npm_vfs(vfs_root_dir_path.clone()) - .context("Failed to load vfs.")?; let root_node_modules_dir = root_node_modules_dir.map(|p| vfs.root().join(p)); - let fs = Arc::new(DenoCompileFileSystem::new(vfs)) - as Arc<dyn deno_fs::FileSystem>; + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Byonm); let npm_resolver = create_cli_npm_resolver( CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { fs: CliDenoResolverFs(fs.clone()), + pkg_json_resolver: pkg_json_resolver.clone(), root_node_modules_dir, }), ) .await?; - (fs, npm_resolver, Some(vfs_root_dir_path)) + (in_npm_pkg_checker, npm_resolver) } None => { - let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>; + // Packages from different registries are already inlined in the binary, + // so no need to create actual `.npmrc` configuration. + let npmrc = create_default_npmrc(); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + npm_global_cache_dir, + npmrc.get_all_known_registries_urls(), + )); + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: npm_cache_dir.root_dir_url(), + maybe_node_modules_path: None, + }, + )); let npm_resolver = create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( - CliNpmResolverManagedCreateOptions { + CliManagedNpmResolverCreateOptions { snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), maybe_lockfile: None, fs: fs.clone(), http_client_provider: http_client_provider.clone(), - npm_global_cache_dir, + npm_cache_dir, cache_setting, text_only_progress_bar: progress_bar, maybe_node_modules_path: None, @@ -557,41 +704,53 @@ pub async fn run( // this is only used for installing packages, which isn't necessary with deno compile NpmInstallDepsProvider::empty(), ), - // Packages from different registries are already inlined in the ESZip, - // so no need to create actual `.npmrc` configuration. npmrc: create_default_npmrc(), lifecycle_scripts: Default::default(), }, )) .await?; - (fs, npm_resolver, None) + (in_npm_pkg_checker, npm_resolver) } }; let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); let node_resolver = Arc::new(NodeResolver::new( deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), - npm_resolver.clone().into_npm_resolver(), + in_npm_pkg_checker.clone(), + npm_resolver.clone().into_npm_pkg_folder_resolver(), + pkg_json_resolver.clone(), + )); + let cjs_tracker = Arc::new(CjsTracker::new( + in_npm_pkg_checker.clone(), + pkg_json_resolver.clone(), + IsCjsResolverOptions { + detect_cjs: !metadata.workspace_resolver.package_jsons.is_empty(), + is_node_main: false, + }, )); - let cjs_resolutions = Arc::new(CjsResolutionStore::default()); let cache_db = Caches::new(deno_dir_provider.clone()); let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db()); - let cli_node_resolver = Arc::new(CliNodeResolver::new( - cjs_resolutions.clone(), - fs.clone(), - node_resolver.clone(), - npm_resolver.clone(), - )); + let npm_req_resolver = + Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { + byonm_resolver: (npm_resolver.clone()).into_maybe_byonm(), + fs: CliDenoResolverFs(fs.clone()), + in_npm_pkg_checker: in_npm_pkg_checker.clone(), + node_resolver: node_resolver.clone(), + npm_req_resolver: npm_resolver.clone().into_npm_req_resolver(), + })); let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new( node_analysis_cache, + cjs_tracker.clone(), fs.clone(), - cli_node_resolver.clone(), + None, ); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_code_analyzer, deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + in_npm_pkg_checker, node_resolver.clone(), - npm_resolver.clone().into_npm_resolver(), + npm_resolver.clone().into_npm_pkg_folder_resolver(), + pkg_json_resolver.clone(), )); let workspace_resolver = { let import_map = match metadata.workspace_resolver.import_map { @@ -642,39 +801,52 @@ pub async fn run( metadata.workspace_resolver.pkg_json_resolution, ) }; + let code_cache = match metadata.code_cache_key { + Some(code_cache_key) => Some(Arc::new(DenoCompileCodeCache::new( + root_path.with_file_name(format!( + "{}.cache", + root_path.file_name().unwrap().to_string_lossy() + )), + code_cache_key, + )) as Arc<dyn CliCodeCache>), + None => { + log::debug!("Code cache disabled."); + None + } + }; let module_loader_factory = StandaloneModuleLoaderFactory { shared: Arc::new(SharedModuleLoaderState { - eszip: WorkspaceEszip { - eszip, - root_dir_url, - }, - workspace_resolver, - node_resolver: cli_node_resolver.clone(), + cjs_tracker: cjs_tracker.clone(), + fs: fs.clone(), + modules, + node_code_translator: node_code_translator.clone(), + node_resolver: node_resolver.clone(), npm_module_loader: Arc::new(NpmModuleLoader::new( - cjs_resolutions, - node_code_translator, + cjs_tracker.clone(), fs.clone(), - cli_node_resolver, + node_code_translator, )), + code_cache: code_cache.clone(), + npm_resolver: npm_resolver.clone(), + workspace_resolver, + npm_req_resolver, }), }; let permissions = { let mut permissions = metadata.permissions.to_options(/* cli_arg_urls */ &[]); - // if running with an npm vfs, grant read access to it - if let Some(vfs_root) = maybe_vfs_root { - match &mut permissions.allow_read { - Some(vec) if vec.is_empty() => { - // do nothing, already granted - } - Some(vec) => { - vec.push(vfs_root.to_string_lossy().to_string()); - } - None => { - permissions.allow_read = - Some(vec![vfs_root.to_string_lossy().to_string()]); - } + // grant read access to the vfs + match &mut permissions.allow_read { + Some(vec) if vec.is_empty() => { + // do nothing, already granted + } + Some(vec) => { + vec.push(root_path.to_string_lossy().to_string()); + } + None => { + permissions.allow_read = + Some(vec![root_path.to_string_lossy().to_string()]); } } @@ -696,8 +868,7 @@ pub async fn run( }); let worker_factory = CliMainWorkerFactory::new( Arc::new(BlobStore::default()), - // Code cache is not supported for standalone binary yet. - None, + code_cache, feature_checker, fs, None, @@ -706,6 +877,7 @@ pub async fn run( Box::new(module_loader_factory), node_resolver, npm_resolver, + pkg_json_resolver, root_cert_store_provider, permissions, StorageKeyResolver::empty(), @@ -721,7 +893,6 @@ pub async fn run( inspect_wait: false, strace_ops: None, is_inspecting: false, - is_npm_main: main_module.scheme() == "npm", skip_op_registration: true, location: metadata.location, argv0: NpmPackageReqReference::from_specifier(&main_module) @@ -739,6 +910,7 @@ pub async fn run( serve_port: None, serve_host: None, }, + metadata.otel_config, ); // Initialize v8 once from the main thread. diff --git a/cli/standalone/serialization.rs b/cli/standalone/serialization.rs new file mode 100644 index 000000000..a5eb649bf --- /dev/null +++ b/cli/standalone/serialization.rs @@ -0,0 +1,661 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::collections::HashMap; +use std::io::Write; + +use deno_ast::MediaType; +use deno_core::anyhow::bail; +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +use deno_core::serde_json; +use deno_core::url::Url; +use deno_core::FastString; +use deno_core::ModuleSourceCode; +use deno_core::ModuleType; +use deno_npm::resolution::SerializedNpmResolutionSnapshot; +use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; +use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; +use deno_npm::NpmPackageId; +use deno_semver::package::PackageReq; + +use crate::standalone::virtual_fs::VirtualDirectory; + +use super::binary::Metadata; +use super::virtual_fs::VfsBuilder; + +const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; + +/// Binary format: +/// * d3n0l4nd +/// * <metadata_len><metadata> +/// * <npm_snapshot_len><npm_snapshot> +/// * <remote_modules_len><remote_modules> +/// * <vfs_headers_len><vfs_headers> +/// * <vfs_file_data_len><vfs_file_data> +/// * d3n0l4nd +pub fn serialize_binary_data_section( + metadata: &Metadata, + npm_snapshot: Option<SerializedNpmResolutionSnapshot>, + remote_modules: &RemoteModulesStoreBuilder, + vfs: VfsBuilder, +) -> Result<Vec<u8>, AnyError> { + fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) { + bytes.extend_from_slice(&(data.len() as u64).to_le_bytes()); + bytes.extend_from_slice(data); + } + + let mut bytes = Vec::new(); + bytes.extend_from_slice(MAGIC_BYTES); + + // 1. Metadata + { + let metadata = serde_json::to_string(metadata)?; + write_bytes_with_len(&mut bytes, metadata.as_bytes()); + } + // 2. Npm snapshot + { + let npm_snapshot = + npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default(); + write_bytes_with_len(&mut bytes, &npm_snapshot); + } + // 3. Remote modules + { + let update_index = bytes.len(); + bytes.extend_from_slice(&(0_u64).to_le_bytes()); + let start_index = bytes.len(); + remote_modules.write(&mut bytes)?; + let length = bytes.len() - start_index; + let length_bytes = (length as u64).to_le_bytes(); + bytes[update_index..update_index + length_bytes.len()] + .copy_from_slice(&length_bytes); + } + // 4. VFS + { + let (vfs, vfs_files) = vfs.into_dir_and_files(); + let vfs = serde_json::to_string(&vfs)?; + write_bytes_with_len(&mut bytes, vfs.as_bytes()); + let vfs_bytes_len = vfs_files.iter().map(|f| f.len() as u64).sum::<u64>(); + bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes()); + for file in &vfs_files { + bytes.extend_from_slice(file); + } + } + + // write the magic bytes at the end so we can use it + // to make sure we've deserialized correctly + bytes.extend_from_slice(MAGIC_BYTES); + + Ok(bytes) +} + +pub struct DeserializedDataSection { + pub metadata: Metadata, + pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>, + pub remote_modules: RemoteModulesStore, + pub vfs_dir: VirtualDirectory, + pub vfs_files_data: &'static [u8], +} + +pub fn deserialize_binary_data_section( + data: &'static [u8], +) -> Result<Option<DeserializedDataSection>, AnyError> { + fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { + let (input, len) = read_u64(input)?; + let (input, data) = read_bytes(input, len as usize)?; + Ok((input, data)) + } + + fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> { + if input.len() < MAGIC_BYTES.len() { + bail!("Unexpected end of data. Could not find magic bytes."); + } + let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len()); + if magic_bytes != MAGIC_BYTES { + return Ok((input, false)); + } + Ok((input, true)) + } + + let (input, found) = read_magic_bytes(data)?; + if !found { + return Ok(None); + } + + // 1. Metadata + let (input, data) = read_bytes_with_len(input).context("reading metadata")?; + let metadata: Metadata = + serde_json::from_slice(data).context("deserializing metadata")?; + // 2. Npm snapshot + let (input, data) = + read_bytes_with_len(input).context("reading npm snapshot")?; + let npm_snapshot = if data.is_empty() { + None + } else { + Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?) + }; + // 3. Remote modules + let (input, data) = + read_bytes_with_len(input).context("reading remote modules data")?; + let remote_modules = + RemoteModulesStore::build(data).context("deserializing remote modules")?; + // 4. VFS + let (input, data) = read_bytes_with_len(input).context("vfs")?; + let vfs_dir: VirtualDirectory = + serde_json::from_slice(data).context("deserializing vfs data")?; + let (input, vfs_files_data) = + read_bytes_with_len(input).context("reading vfs files data")?; + + // finally ensure we read the magic bytes at the end + let (_input, found) = read_magic_bytes(input)?; + if !found { + bail!("Could not find magic bytes at the end of the data."); + } + + Ok(Some(DeserializedDataSection { + metadata, + npm_snapshot, + remote_modules, + vfs_dir, + vfs_files_data, + })) +} + +#[derive(Default)] +pub struct RemoteModulesStoreBuilder { + specifiers: Vec<(String, u64)>, + data: Vec<(MediaType, Vec<u8>)>, + data_byte_len: u64, + redirects: Vec<(String, String)>, + redirects_len: u64, +} + +impl RemoteModulesStoreBuilder { + pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec<u8>) { + log::debug!("Adding '{}' ({})", specifier, media_type); + let specifier = specifier.to_string(); + self.specifiers.push((specifier, self.data_byte_len)); + self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data + self.data.push((media_type, data)); + } + + pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) { + self.redirects.reserve(redirects.len()); + for (from, to) in redirects { + log::debug!("Adding redirect '{}' -> '{}'", from, to); + let from = from.to_string(); + let to = to.to_string(); + self.redirects_len += (4 + from.len() + 4 + to.len()) as u64; + self.redirects.push((from, to)); + } + } + + fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> { + writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?; + writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?; + for (specifier, offset) in &self.specifiers { + writer.write_all(&(specifier.len() as u32).to_le_bytes())?; + writer.write_all(specifier.as_bytes())?; + writer.write_all(&offset.to_le_bytes())?; + } + for (from, to) in &self.redirects { + writer.write_all(&(from.len() as u32).to_le_bytes())?; + writer.write_all(from.as_bytes())?; + writer.write_all(&(to.len() as u32).to_le_bytes())?; + writer.write_all(to.as_bytes())?; + } + for (media_type, data) in &self.data { + writer.write_all(&[serialize_media_type(*media_type)])?; + writer.write_all(&(data.len() as u64).to_le_bytes())?; + writer.write_all(data)?; + } + Ok(()) + } +} + +pub enum DenoCompileModuleSource { + String(&'static str), + Bytes(Cow<'static, [u8]>), +} + +impl DenoCompileModuleSource { + pub fn into_for_v8(self) -> ModuleSourceCode { + fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode { + ModuleSourceCode::Bytes(match data { + Cow::Borrowed(d) => d.into(), + Cow::Owned(d) => d.into_boxed_slice().into(), + }) + } + + match self { + // todo(https://github.com/denoland/deno_core/pull/943): store whether + // the string is ascii or not ahead of time so we can avoid the is_ascii() + // check in FastString::from_static + Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)), + Self::Bytes(b) => into_bytes(b), + } + } +} + +pub struct DenoCompileModuleData<'a> { + pub specifier: &'a Url, + pub media_type: MediaType, + pub data: Cow<'static, [u8]>, +} + +impl<'a> DenoCompileModuleData<'a> { + pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) { + fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource { + match data { + Cow::Borrowed(d) => DenoCompileModuleSource::String( + // SAFETY: we know this is a valid utf8 string + unsafe { std::str::from_utf8_unchecked(d) }, + ), + Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)), + } + } + + let (media_type, source) = match self.media_type { + MediaType::JavaScript + | MediaType::Jsx + | MediaType::Mjs + | MediaType::Cjs + | MediaType::TypeScript + | MediaType::Mts + | MediaType::Cts + | MediaType::Dts + | MediaType::Dmts + | MediaType::Dcts + | MediaType::Tsx => { + (ModuleType::JavaScript, into_string_unsafe(self.data)) + } + MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)), + MediaType::Wasm => { + (ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data)) + } + // just assume javascript if we made it here + MediaType::Css | MediaType::SourceMap | MediaType::Unknown => ( + ModuleType::JavaScript, + DenoCompileModuleSource::Bytes(self.data), + ), + }; + (self.specifier, media_type, source) + } +} + +enum RemoteModulesStoreSpecifierValue { + Data(usize), + Redirect(Url), +} + +pub struct RemoteModulesStore { + specifiers: HashMap<Url, RemoteModulesStoreSpecifierValue>, + files_data: &'static [u8], +} + +impl RemoteModulesStore { + fn build(data: &'static [u8]) -> Result<Self, AnyError> { + fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> { + let (input, specifier) = read_string_lossy(input)?; + let specifier = Url::parse(&specifier)?; + let (input, offset) = read_u64(input)?; + Ok((input, (specifier, offset))) + } + + fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> { + let (input, from) = read_string_lossy(input)?; + let from = Url::parse(&from)?; + let (input, to) = read_string_lossy(input)?; + let to = Url::parse(&to)?; + Ok((input, (from, to))) + } + + fn read_headers( + input: &[u8], + ) -> Result<(&[u8], HashMap<Url, RemoteModulesStoreSpecifierValue>), AnyError> + { + let (input, specifiers_len) = read_u32_as_usize(input)?; + let (mut input, redirects_len) = read_u32_as_usize(input)?; + let mut specifiers = + HashMap::with_capacity(specifiers_len + redirects_len); + for _ in 0..specifiers_len { + let (current_input, (specifier, offset)) = + read_specifier(input).context("reading specifier")?; + input = current_input; + specifiers.insert( + specifier, + RemoteModulesStoreSpecifierValue::Data(offset as usize), + ); + } + + for _ in 0..redirects_len { + let (current_input, (from, to)) = read_redirect(input)?; + input = current_input; + specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to)); + } + + Ok((input, specifiers)) + } + + let (files_data, specifiers) = read_headers(data)?; + + Ok(Self { + specifiers, + files_data, + }) + } + + pub fn resolve_specifier<'a>( + &'a self, + specifier: &'a Url, + ) -> Result<Option<&'a Url>, AnyError> { + let mut count = 0; + let mut current = specifier; + loop { + if count > 10 { + bail!("Too many redirects resolving '{}'", specifier); + } + match self.specifiers.get(current) { + Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => { + current = to; + count += 1; + } + Some(RemoteModulesStoreSpecifierValue::Data(_)) => { + return Ok(Some(current)); + } + None => { + return Ok(None); + } + } + } + } + + pub fn read<'a>( + &'a self, + original_specifier: &'a Url, + ) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> { + let mut count = 0; + let mut specifier = original_specifier; + loop { + if count > 10 { + bail!("Too many redirects resolving '{}'", original_specifier); + } + match self.specifiers.get(specifier) { + Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => { + specifier = to; + count += 1; + } + Some(RemoteModulesStoreSpecifierValue::Data(offset)) => { + let input = &self.files_data[*offset..]; + let (input, media_type_byte) = read_bytes(input, 1)?; + let media_type = deserialize_media_type(media_type_byte[0])?; + let (input, len) = read_u64(input)?; + let (_input, data) = read_bytes(input, len as usize)?; + return Ok(Some(DenoCompileModuleData { + specifier, + media_type, + data: Cow::Borrowed(data), + })); + } + None => { + return Ok(None); + } + } + } + } +} + +fn serialize_npm_snapshot( + mut snapshot: SerializedNpmResolutionSnapshot, +) -> Vec<u8> { + fn append_string(bytes: &mut Vec<u8>, string: &str) { + let len = string.len() as u32; + bytes.extend_from_slice(&len.to_le_bytes()); + bytes.extend_from_slice(string.as_bytes()); + } + + snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism + let ids_to_stored_ids = snapshot + .packages + .iter() + .enumerate() + .map(|(i, pkg)| (&pkg.id, i as u32)) + .collect::<HashMap<_, _>>(); + + let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect(); + root_packages.sort(); + let mut bytes = Vec::new(); + + bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes()); + for pkg in &snapshot.packages { + append_string(&mut bytes, &pkg.id.as_serialized()); + } + + bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes()); + for (req, id) in root_packages { + append_string(&mut bytes, &req.to_string()); + let id = ids_to_stored_ids.get(&id).unwrap(); + bytes.extend_from_slice(&id.to_le_bytes()); + } + + for pkg in &snapshot.packages { + let deps_len = pkg.dependencies.len() as u32; + bytes.extend_from_slice(&deps_len.to_le_bytes()); + let mut deps: Vec<_> = pkg.dependencies.iter().collect(); + deps.sort(); + for (req, id) in deps { + append_string(&mut bytes, req); + let id = ids_to_stored_ids.get(&id).unwrap(); + bytes.extend_from_slice(&id.to_le_bytes()); + } + } + + bytes +} + +fn deserialize_npm_snapshot( + input: &[u8], +) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> { + fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> { + let (input, id) = read_string_lossy(input)?; + let id = NpmPackageId::from_serialized(&id)?; + Ok((input, id)) + } + + #[allow(clippy::needless_lifetimes)] // clippy bug + fn parse_root_package<'a>( + id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>, + ) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a + { + |input| { + let (input, req) = read_string_lossy(input)?; + let req = PackageReq::from_str(&req)?; + let (input, id) = read_u32_as_usize(input)?; + Ok((input, (req, id_to_npm_id(id)?))) + } + } + + #[allow(clippy::needless_lifetimes)] // clippy bug + fn parse_package_dep<'a>( + id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>, + ) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a + { + |input| { + let (input, req) = read_string_lossy(input)?; + let (input, id) = read_u32_as_usize(input)?; + Ok((input, (req.into_owned(), id_to_npm_id(id)?))) + } + } + + fn parse_package<'a>( + input: &'a [u8], + id: NpmPackageId, + id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>, + ) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> { + let (input, deps_len) = read_u32_as_usize(input)?; + let (input, dependencies) = + parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?; + Ok(( + input, + SerializedNpmResolutionSnapshotPackage { + id, + system: Default::default(), + dist: Default::default(), + dependencies, + optional_dependencies: Default::default(), + bin: None, + scripts: Default::default(), + deprecated: Default::default(), + }, + )) + } + + let (input, packages_len) = read_u32_as_usize(input)?; + + // get a hashmap of all the npm package ids to their serialized ids + let (input, data_ids_to_npm_ids) = + parse_vec_n_times(input, packages_len, parse_id) + .context("deserializing id")?; + let data_id_to_npm_id = |id: usize| { + data_ids_to_npm_ids + .get(id) + .cloned() + .ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id")) + }; + + let (input, root_packages_len) = read_u32_as_usize(input)?; + let (input, root_packages) = parse_hashmap_n_times( + input, + root_packages_len, + parse_root_package(&data_id_to_npm_id), + ) + .context("deserializing root package")?; + let (input, packages) = + parse_vec_n_times_with_index(input, packages_len, |input, index| { + parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id) + }) + .context("deserializing package")?; + + if !input.is_empty() { + bail!("Unexpected data left over"); + } + + Ok( + SerializedNpmResolutionSnapshot { + packages, + root_packages, + } + // this is ok because we have already verified that all the + // identifiers found in the snapshot are valid via the + // npm package id -> npm package id mapping + .into_valid_unsafe(), + ) +} + +fn serialize_media_type(media_type: MediaType) -> u8 { + match media_type { + MediaType::JavaScript => 0, + MediaType::Jsx => 1, + MediaType::Mjs => 2, + MediaType::Cjs => 3, + MediaType::TypeScript => 4, + MediaType::Mts => 5, + MediaType::Cts => 6, + MediaType::Dts => 7, + MediaType::Dmts => 8, + MediaType::Dcts => 9, + MediaType::Tsx => 10, + MediaType::Json => 11, + MediaType::Wasm => 12, + MediaType::Css => 13, + MediaType::SourceMap => 14, + MediaType::Unknown => 15, + } +} + +fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> { + match value { + 0 => Ok(MediaType::JavaScript), + 1 => Ok(MediaType::Jsx), + 2 => Ok(MediaType::Mjs), + 3 => Ok(MediaType::Cjs), + 4 => Ok(MediaType::TypeScript), + 5 => Ok(MediaType::Mts), + 6 => Ok(MediaType::Cts), + 7 => Ok(MediaType::Dts), + 8 => Ok(MediaType::Dmts), + 9 => Ok(MediaType::Dcts), + 10 => Ok(MediaType::Tsx), + 11 => Ok(MediaType::Json), + 12 => Ok(MediaType::Wasm), + 13 => Ok(MediaType::Css), + 14 => Ok(MediaType::SourceMap), + 15 => Ok(MediaType::Unknown), + _ => bail!("Unknown media type value: {}", value), + } +} + +fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>( + mut input: &[u8], + times: usize, + parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>, +) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> { + let mut results = HashMap::with_capacity(times); + for _ in 0..times { + let result = parse(input); + let (new_input, (key, value)) = result?; + results.insert(key, value); + input = new_input; + } + Ok((input, results)) +} + +fn parse_vec_n_times<TResult>( + input: &[u8], + times: usize, + parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>, +) -> Result<(&[u8], Vec<TResult>), AnyError> { + parse_vec_n_times_with_index(input, times, |input, _index| parse(input)) +} + +fn parse_vec_n_times_with_index<TResult>( + mut input: &[u8], + times: usize, + parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>, +) -> Result<(&[u8], Vec<TResult>), AnyError> { + let mut results = Vec::with_capacity(times); + for i in 0..times { + let result = parse(input, i); + let (new_input, result) = result?; + results.push(result); + input = new_input; + } + Ok((input, results)) +} + +fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> { + if input.len() < len { + bail!("Unexpected end of data.",); + } + let (len_bytes, input) = input.split_at(len); + Ok((input, len_bytes)) +} + +fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> { + let (input, str_len) = read_u32_as_usize(input)?; + let (input, data_bytes) = read_bytes(input, str_len)?; + Ok((input, String::from_utf8_lossy(data_bytes))) +} + +fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> { + let (input, len_bytes) = read_bytes(input, 4)?; + let len = u32::from_le_bytes(len_bytes.try_into()?); + Ok((input, len as usize)) +} + +fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> { + let (input, len_bytes) = read_bytes(input, 8)?; + let len = u64::from_le_bytes(len_bytes.try_into()?); + Ok((input, len)) +} diff --git a/cli/standalone/virtual_fs.rs b/cli/standalone/virtual_fs.rs index 53d045b62..26bb0db75 100644 --- a/cli/standalone/virtual_fs.rs +++ b/cli/standalone/virtual_fs.rs @@ -7,6 +7,7 @@ use std::fs::File; use std::io::Read; use std::io::Seek; use std::io::SeekFrom; +use std::ops::Range; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; @@ -67,6 +68,26 @@ impl VfsBuilder { }) } + pub fn set_new_root_path( + &mut self, + root_path: PathBuf, + ) -> Result<(), AnyError> { + let root_path = canonicalize_path(&root_path)?; + self.root_path = root_path; + self.root_dir = VirtualDirectory { + name: self + .root_path + .file_stem() + .map(|s| s.to_string_lossy().into_owned()) + .unwrap_or("root".to_string()), + entries: vec![VfsEntry::Dir(VirtualDirectory { + name: std::mem::take(&mut self.root_dir.name), + entries: std::mem::take(&mut self.root_dir.entries), + })], + }; + Ok(()) + } + pub fn with_root_dir<R>( &mut self, with_root: impl FnOnce(&mut VirtualDirectory) -> R, @@ -119,7 +140,7 @@ impl VfsBuilder { // inline the symlink and make the target file let file_bytes = std::fs::read(&target) .with_context(|| format!("Reading {}", path.display()))?; - self.add_file(&path, file_bytes)?; + self.add_file_with_data_inner(&path, file_bytes)?; } else { log::warn!( "{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.", @@ -191,16 +212,32 @@ impl VfsBuilder { self.add_file_at_path_not_symlink(&target_path) } - pub fn add_file_at_path_not_symlink( + fn add_file_at_path_not_symlink( &mut self, path: &Path, ) -> Result<(), AnyError> { let file_bytes = std::fs::read(path) .with_context(|| format!("Reading {}", path.display()))?; - self.add_file(path, file_bytes) + self.add_file_with_data_inner(path, file_bytes) } - fn add_file(&mut self, path: &Path, data: Vec<u8>) -> Result<(), AnyError> { + pub fn add_file_with_data( + &mut self, + path: &Path, + data: Vec<u8>, + ) -> Result<(), AnyError> { + let target_path = canonicalize_path(path)?; + if target_path != path { + self.add_symlink(path, &target_path)?; + } + self.add_file_with_data_inner(&target_path, data) + } + + fn add_file_with_data_inner( + &mut self, + path: &Path, + data: Vec<u8>, + ) -> Result<(), AnyError> { log::debug!("Adding file '{}'", path.display()); let checksum = util::checksum::gen(&[&data]); let offset = if let Some(offset) = self.file_offsets.get(&checksum) { @@ -249,8 +286,15 @@ impl VfsBuilder { path.display(), target.display() ); - let dest = self.path_relative_root(target)?; - if dest == self.path_relative_root(path)? { + let relative_target = self.path_relative_root(target)?; + let relative_path = match self.path_relative_root(path) { + Ok(path) => path, + Err(StripRootError { .. }) => { + // ignore if the original path is outside the root directory + return Ok(()); + } + }; + if relative_target == relative_path { // it's the same, ignore return Ok(()); } @@ -263,7 +307,7 @@ impl VfsBuilder { insert_index, VfsEntry::Symlink(VirtualSymlink { name: name.to_string(), - dest_parts: dest + dest_parts: relative_target .components() .map(|c| c.as_os_str().to_string_lossy().to_string()) .collect::<Vec<_>>(), @@ -306,6 +350,7 @@ impl<'a> VfsEntryRef<'a> { atime: None, birthtime: None, mtime: None, + ctime: None, blksize: 0, size: 0, dev: 0, @@ -328,6 +373,7 @@ impl<'a> VfsEntryRef<'a> { atime: None, birthtime: None, mtime: None, + ctime: None, blksize: 0, size: file.len, dev: 0, @@ -350,6 +396,7 @@ impl<'a> VfsEntryRef<'a> { atime: None, birthtime: None, mtime: None, + ctime: None, blksize: 0, size: 0, dev: 0, @@ -751,14 +798,14 @@ impl deno_io::fs::File for FileBackedVfsFile { #[derive(Debug)] pub struct FileBackedVfs { - file: Mutex<Vec<u8>>, + vfs_data: Cow<'static, [u8]>, fs_root: VfsRoot, } impl FileBackedVfs { - pub fn new(file: Vec<u8>, fs_root: VfsRoot) -> Self { + pub fn new(data: Cow<'static, [u8]>, fs_root: VfsRoot) -> Self { Self { - file: Mutex::new(file), + vfs_data: data, fs_root, } } @@ -827,10 +874,15 @@ impl FileBackedVfs { Ok(path) } - pub fn read_file_all(&self, file: &VirtualFile) -> std::io::Result<Vec<u8>> { - let mut buf = vec![0; file.len as usize]; - self.read_file(file, 0, &mut buf)?; - Ok(buf) + pub fn read_file_all( + &self, + file: &VirtualFile, + ) -> std::io::Result<Cow<'static, [u8]>> { + let read_range = self.get_read_range(file, 0, file.len)?; + match &self.vfs_data { + Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])), + Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())), + } } pub fn read_file( @@ -839,18 +891,27 @@ impl FileBackedVfs { pos: u64, buf: &mut [u8], ) -> std::io::Result<usize> { - let data = self.file.lock(); + let read_range = self.get_read_range(file, pos, buf.len() as u64)?; + buf.copy_from_slice(&self.vfs_data[read_range]); + Ok(buf.len()) + } + + fn get_read_range( + &self, + file: &VirtualFile, + pos: u64, + len: u64, + ) -> std::io::Result<Range<usize>> { + let data = &self.vfs_data; let start = self.fs_root.start_file_offset + file.offset + pos; - let end = start + buf.len() as u64; + let end = start + len; if end > data.len() as u64 { return Err(std::io::Error::new( std::io::ErrorKind::UnexpectedEof, "unexpected EOF", )); } - - buf.copy_from_slice(&data[start as usize..end as usize]); - Ok(buf.len()) + Ok(start as usize..end as usize) } pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> { @@ -888,7 +949,7 @@ mod test { #[track_caller] fn read_file(vfs: &FileBackedVfs, path: &Path) -> String { let file = vfs.file_entry(path).unwrap(); - String::from_utf8(vfs.read_file_all(file).unwrap()).unwrap() + String::from_utf8(vfs.read_file_all(file).unwrap().into_owned()).unwrap() } #[test] @@ -901,20 +962,23 @@ mod test { let src_path = src_path.to_path_buf(); let mut builder = VfsBuilder::new(src_path.clone()).unwrap(); builder - .add_file(&src_path.join("a.txt"), "data".into()) + .add_file_with_data_inner(&src_path.join("a.txt"), "data".into()) .unwrap(); builder - .add_file(&src_path.join("b.txt"), "data".into()) + .add_file_with_data_inner(&src_path.join("b.txt"), "data".into()) .unwrap(); assert_eq!(builder.files.len(), 1); // because duplicate data builder - .add_file(&src_path.join("c.txt"), "c".into()) + .add_file_with_data_inner(&src_path.join("c.txt"), "c".into()) .unwrap(); builder - .add_file(&src_path.join("sub_dir").join("d.txt"), "d".into()) + .add_file_with_data_inner( + &src_path.join("sub_dir").join("d.txt"), + "d".into(), + ) .unwrap(); builder - .add_file(&src_path.join("e.txt"), "e".into()) + .add_file_with_data_inner(&src_path.join("e.txt"), "e".into()) .unwrap(); builder .add_symlink( @@ -1031,7 +1095,7 @@ mod test { ( dest_path.to_path_buf(), FileBackedVfs::new( - data, + Cow::Owned(data), VfsRoot { dir: root_dir, root_path: dest_path.to_path_buf(), @@ -1082,7 +1146,7 @@ mod test { let temp_path = temp_dir.path().canonicalize(); let mut builder = VfsBuilder::new(temp_path.to_path_buf()).unwrap(); builder - .add_file( + .add_file_with_data_inner( temp_path.join("a.txt").as_path(), "0123456789".to_string().into_bytes(), ) diff --git a/cli/task_runner.rs b/cli/task_runner.rs index 418043b23..43840e868 100644 --- a/cli/task_runner.rs +++ b/cli/task_runner.rs @@ -155,6 +155,12 @@ fn prepare_env_vars( initial_cwd.to_string_lossy().to_string(), ); } + if !env_vars.contains_key(crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR) { + env_vars.insert( + crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR.into(), + crate::npm::get_npm_config_user_agent(), + ); + } if let Some(node_modules_dir) = node_modules_dir { prepend_to_path( &mut env_vars, @@ -204,7 +210,7 @@ impl ShellCommand for NpmCommand { mut context: ShellCommandContext, ) -> LocalBoxFuture<'static, ExecuteResult> { if context.args.first().map(|s| s.as_str()) == Some("run") - && context.args.len() > 2 + && context.args.len() >= 2 // for now, don't run any npm scripts that have a flag because // we don't handle stuff like `--workspaces` properly && !context.args.iter().any(|s| s.starts_with('-')) @@ -267,10 +273,12 @@ impl ShellCommand for NodeCommand { ) .execute(context); } + args.extend(["run", "-A"].into_iter().map(|s| s.to_string())); args.extend(context.args.iter().cloned()); let mut state = context.state; + state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1"); ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap()) .execute(ShellCommandContext { diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs index be5d0ad0e..1d49fa061 100644 --- a/cli/tools/bench/mod.rs +++ b/cli/tools/bench/mod.rs @@ -193,7 +193,7 @@ async fn bench_specifier_inner( .await?; // We execute the main module as a side module so that import.meta.main is not set. - worker.execute_side_module_possibly_with_npm().await?; + worker.execute_side_module().await?; let mut worker = worker.into_main_worker(); @@ -486,6 +486,7 @@ pub async fn run_benchmarks_with_watch( ), move |flags, watcher_communicator, changed_paths| { let bench_flags = bench_flags.clone(); + watcher_communicator.show_path_changed(changed_paths.clone()); Ok(async move { let factory = CliFactory::from_flags_for_watcher( flags, diff --git a/cli/tools/check.rs b/cli/tools/check.rs index 7edb392d4..d88027888 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -32,6 +32,7 @@ use crate::graph_util::ModuleGraphBuilder; use crate::npm::CliNpmResolver; use crate::tsc; use crate::tsc::Diagnostics; +use crate::tsc::TypeCheckingCjsTracker; use crate::util::extract; use crate::util::path::to_percent_decoded_str; @@ -99,6 +100,7 @@ pub struct CheckOptions { pub struct TypeChecker { caches: Arc<Caches>, + cjs_tracker: Arc<TypeCheckingCjsTracker>, cli_options: Arc<CliOptions>, module_graph_builder: Arc<ModuleGraphBuilder>, node_resolver: Arc<NodeResolver>, @@ -108,6 +110,7 @@ pub struct TypeChecker { impl TypeChecker { pub fn new( caches: Arc<Caches>, + cjs_tracker: Arc<TypeCheckingCjsTracker>, cli_options: Arc<CliOptions>, module_graph_builder: Arc<ModuleGraphBuilder>, node_resolver: Arc<NodeResolver>, @@ -115,6 +118,7 @@ impl TypeChecker { ) -> Self { Self { caches, + cjs_tracker, cli_options, module_graph_builder, node_resolver, @@ -244,6 +248,7 @@ impl TypeChecker { graph: graph.clone(), hash_data, maybe_npm: Some(tsc::RequestNpmState { + cjs_tracker: self.cjs_tracker.clone(), node_resolver: self.node_resolver.clone(), npm_resolver: self.npm_resolver.clone(), }), @@ -346,7 +351,7 @@ fn get_check_hash( } } MediaType::Json - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Wasm | MediaType::Unknown => continue, @@ -428,7 +433,7 @@ fn get_tsc_roots( } MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => None, }, @@ -536,7 +541,7 @@ fn has_ts_check(media_type: MediaType, file_text: &str) -> bool { | MediaType::Tsx | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => false, } diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index c1f98bc08..b3e999337 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -5,6 +5,7 @@ use crate::args::CompileFlags; use crate::args::Flags; use crate::factory::CliFactory; use crate::http_util::HttpClientProvider; +use crate::standalone::binary::StandaloneRelativeFileBaseUrl; use crate::standalone::is_standalone_binary; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; @@ -14,7 +15,6 @@ use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_graph::GraphKind; use deno_terminal::colors; -use eszip::EszipRelativeFileBaseUrl; use rand::Rng; use std::path::Path; use std::path::PathBuf; @@ -29,7 +29,6 @@ pub async fn compile( let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; let module_graph_creator = factory.module_graph_creator().await?; - let parsed_source_cache = factory.parsed_source_cache(); let binary_writer = factory.create_compile_binary_writer().await?; let http_client = factory.http_client_provider(); let module_specifier = cli_options.resolve_main_module()?; @@ -70,7 +69,7 @@ pub async fn compile( let graph = if cli_options.type_check_mode().is_true() { // In this case, the previous graph creation did type checking, which will // create a module graph with types information in it. We don't want to - // store that in the eszip so create a code only module graph from scratch. + // store that in the binary so create a code only module graph from scratch. module_graph_creator .create_graph(GraphKind::CodeOnly, module_roots) .await? @@ -81,11 +80,6 @@ pub async fn compile( let ts_config_for_emit = cli_options .resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?; check_warn_tsconfig(&ts_config_for_emit); - let (transpile_options, emit_options) = - crate::args::ts_config_to_transpile_and_emit_options( - ts_config_for_emit.ts_config, - )?; - let parser = parsed_source_cache.as_capturing_parser(); let root_dir_url = resolve_root_dir_from_specifiers( cli_options.workspace().root_dir(), graph.specifiers().map(|(s, _)| s).chain( @@ -96,17 +90,6 @@ pub async fn compile( ), ); log::debug!("Binary root dir: {}", root_dir_url); - let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url); - let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions { - graph, - parser, - transpile_options, - emit_options, - // make all the modules relative to the root folder - relative_file_base: Some(root_dir_url), - npm_packages: None, - })?; - log::info!( "{} {} to {}", colors::green("Compile"), @@ -133,15 +116,18 @@ pub async fn compile( let write_result = binary_writer .write_bin( file, - eszip, - root_dir_url, + &graph, + StandaloneRelativeFileBaseUrl::from(&root_dir_url), module_specifier, &compile_flags, cli_options, ) .await .with_context(|| { - format!("Writing temporary file '{}'", temp_path.display()) + format!( + "Writing deno compile executable to temporary file '{}'", + temp_path.display() + ) }); // set it as executable diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 260c0c842..2a554c133 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -6,12 +6,12 @@ use crate::args::FileFlags; use crate::args::Flags; use crate::cdp; use crate::factory::CliFactory; -use crate::npm::CliNpmResolver; use crate::tools::fmt::format_json; use crate::tools::test::is_supported_test_path; use crate::util::text_encoding::source_map_from_code; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_config::glob::FileCollector; use deno_config::glob::FilePatterns; @@ -25,6 +25,7 @@ use deno_core::serde_json; use deno_core::sourcemap::SourceMap; use deno_core::url::Url; use deno_core::LocalInspectorSession; +use node_resolver::InNpmPackageChecker; use regex::Regex; use std::fs; use std::fs::File; @@ -327,6 +328,7 @@ fn generate_coverage_report( coverage_report.found_lines = if let Some(source_map) = maybe_source_map.as_ref() { + let script_source_lines = script_source.lines().collect::<Vec<_>>(); let mut found_lines = line_counts .iter() .enumerate() @@ -334,7 +336,23 @@ fn generate_coverage_report( // get all the mappings from this destination line to a different src line let mut results = source_map .tokens() - .filter(move |token| token.get_dst_line() as usize == index) + .filter(|token| { + let dst_line = token.get_dst_line() as usize; + dst_line == index && { + let dst_col = token.get_dst_col() as usize; + let content = script_source_lines + .get(dst_line) + .and_then(|line| { + line.get(dst_col..std::cmp::min(dst_col + 2, line.len())) + }) + .unwrap_or(""); + + !content.is_empty() + && content != "/*" + && content != "*/" + && content != "//" + } + }) .map(move |token| (token.get_src_line() as usize, *count)) .collect::<Vec<_>>(); // only keep the results that point at different src lines @@ -444,7 +462,7 @@ fn filter_coverages( coverages: Vec<cdp::ScriptCoverage>, include: Vec<String>, exclude: Vec<String>, - npm_resolver: &dyn CliNpmResolver, + in_npm_pkg_checker: &dyn InNpmPackageChecker, ) -> Vec<cdp::ScriptCoverage> { let include: Vec<Regex> = include.iter().map(|e| Regex::new(e).unwrap()).collect(); @@ -462,13 +480,13 @@ fn filter_coverages( .filter(|e| { let is_internal = e.url.starts_with("ext:") || e.url.ends_with("__anonymous__") - || e.url.ends_with("$deno$test.js") + || e.url.ends_with("$deno$test.mjs") || e.url.ends_with(".snap") || is_supported_test_path(Path::new(e.url.as_str())) || doc_test_re.is_match(e.url.as_str()) || Url::parse(&e.url) .ok() - .map(|url| npm_resolver.in_npm_package(&url)) + .map(|url| in_npm_pkg_checker.in_npm_package(&url)) .unwrap_or(false); let is_included = include.iter().any(|p| p.is_match(&e.url)); @@ -479,7 +497,7 @@ fn filter_coverages( .collect::<Vec<cdp::ScriptCoverage>>() } -pub async fn cover_files( +pub fn cover_files( flags: Arc<Flags>, coverage_flags: CoverageFlags, ) -> Result<(), AnyError> { @@ -489,9 +507,10 @@ pub async fn cover_files( let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; - let npm_resolver = factory.npm_resolver().await?; + let in_npm_pkg_checker = factory.in_npm_pkg_checker()?; let file_fetcher = factory.file_fetcher()?; let emitter = factory.emitter()?; + let cjs_tracker = factory.cjs_tracker()?; assert!(!coverage_flags.files.include.is_empty()); @@ -511,7 +530,7 @@ pub async fn cover_files( script_coverages, coverage_flags.include, coverage_flags.exclude, - npm_resolver.as_ref(), + in_npm_pkg_checker.as_ref(), ); if script_coverages.is_empty() { return Err(generic_error("No covered files included in the report")); @@ -568,16 +587,21 @@ pub async fn cover_files( let transpiled_code = match file.media_type { MediaType::JavaScript | MediaType::Unknown + | MediaType::Css + | MediaType::Wasm | MediaType::Cjs | MediaType::Mjs | MediaType::Json => None, - MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(Vec::new()), + MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(String::new()), MediaType::TypeScript | MediaType::Jsx | MediaType::Mts | MediaType::Cts | MediaType::Tsx => { - Some(match emitter.maybe_cached_emit(&file.specifier, &file.source) { + let module_kind = ModuleKind::from_is_cjs( + cjs_tracker.is_maybe_cjs(&file.specifier, file.media_type)?, + ); + Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source) { Some(code) => code, None => { return Err(anyhow!( @@ -588,13 +612,12 @@ pub async fn cover_files( } }) } - MediaType::Wasm | MediaType::TsBuildInfo | MediaType::SourceMap => { + MediaType::SourceMap => { unreachable!() } }; let runtime_code: String = match transpiled_code { - Some(code) => String::from_utf8(code) - .with_context(|| format!("Failed decoding {}", file.specifier))?, + Some(code) => code, None => original_source.to_string(), }; diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 5e18546a2..e33da4efb 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -22,9 +22,9 @@ use deno_core::serde_json; use deno_doc as doc; use deno_doc::html::UrlResolveKind; use deno_graph::source::NullFileSystem; +use deno_graph::EsParser; use deno_graph::GraphKind; use deno_graph::ModuleAnalyzer; -use deno_graph::ModuleParser; use deno_graph::ModuleSpecifier; use doc::html::ShortPath; use doc::DocDiagnostic; @@ -37,7 +37,7 @@ const JSON_SCHEMA_VERSION: u8 = 1; async fn generate_doc_nodes_for_builtin_types( doc_flags: DocFlags, - parser: &dyn ModuleParser, + parser: &dyn EsParser, analyzer: &dyn ModuleAnalyzer, ) -> Result<IndexMap<ModuleSpecifier, Vec<doc::DocNode>>, AnyError> { let source_file_specifier = @@ -96,7 +96,7 @@ pub async fn doc( let module_info_cache = factory.module_info_cache()?; let parsed_source_cache = factory.parsed_source_cache(); let capturing_parser = parsed_source_cache.as_capturing_parser(); - let analyzer = module_info_cache.as_module_analyzer(parsed_source_cache); + let analyzer = module_info_cache.as_module_analyzer(); let doc_nodes_by_url = match doc_flags.source_files { DocSourceFileFlag::Builtin => { diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 8a4bc4e6c..d40abd5f5 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -83,6 +83,7 @@ pub async fn format( file_watcher::PrintConfig::new("Fmt", !watch_flags.no_clear_screen), move |flags, watcher_communicator, changed_paths| { let fmt_flags = fmt_flags.clone(); + watcher_communicator.show_path_changed(changed_paths.clone()); Ok(async move { let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; @@ -227,6 +228,7 @@ fn collect_fmt_files( }) .ignore_git_folder() .ignore_node_modules() + .use_gitignore() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files) } @@ -353,6 +355,21 @@ fn format_yaml( file_text: &str, fmt_options: &FmtOptionsConfig, ) -> Result<Option<String>, AnyError> { + let ignore_file = file_text + .lines() + .take_while(|line| line.starts_with('#')) + .any(|line| { + line + .strip_prefix('#') + .unwrap() + .trim() + .starts_with("deno-fmt-ignore-file") + }); + + if ignore_file { + return Ok(None); + } + let formatted_str = pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options)) .map_err(AnyError::from)?; @@ -775,28 +792,26 @@ fn format_ensure_stable( return Ok(Some(current_text)); } Err(err) => { - panic!( + bail!( concat!( "Formatting succeeded initially, but failed when ensuring a ", "stable format. This indicates a bug in the formatter where ", "the text it produces is not syntactically correct. As a temporary ", - "workaround you can ignore this file ({}).\n\n{:#}" + "workaround you can ignore this file.\n\n{:#}" ), - file_path.display(), err, ) } } count += 1; if count == 5 { - panic!( + bail!( concat!( "Formatting not stable. Bailed after {} tries. This indicates a bug ", - "in the formatter where it formats the file ({}) differently each time. As a ", + "in the formatter where it formats the file differently each time. As a ", "temporary workaround you can ignore this file." ), count, - file_path.display(), ) } } @@ -978,6 +993,7 @@ fn get_resolved_malva_config( single_line_top_level_declarations: false, selector_override_comment_directive: "deno-fmt-selector-override".into(), ignore_comment_directive: "deno-fmt-ignore".into(), + ignore_file_comment_directive: "deno-fmt-ignore-file".into(), }; FormatOptions { @@ -1016,7 +1032,7 @@ fn get_resolved_markup_fmt_config( max_attrs_per_line: None, prefer_attrs_single_line: false, html_normal_self_closing: None, - html_void_self_closing: Some(true), + html_void_self_closing: None, component_self_closing: None, svg_self_closing: None, mathml_self_closing: None, @@ -1036,6 +1052,7 @@ fn get_resolved_markup_fmt_config( svelte_directive_shorthand: Some(true), astro_attr_shorthand: Some(true), ignore_comment_directive: "deno-fmt-ignore".into(), + ignore_file_comment_directive: "deno-fmt-ignore-file".into(), }; FormatOptions { @@ -1198,6 +1215,8 @@ fn is_supported_ext_fmt(path: &Path) -> bool { #[cfg(test)] mod test { + use test_util::assert_starts_with; + use super::*; #[test] @@ -1253,12 +1272,16 @@ mod test { } #[test] - #[should_panic(expected = "Formatting not stable. Bailed after 5 tries.")] fn test_format_ensure_stable_unstable_format() { - format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { - Ok(Some(format!("1{file_text}"))) - }) - .unwrap(); + let err = + format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { + Ok(Some(format!("1{file_text}"))) + }) + .unwrap_err(); + assert_starts_with!( + err.to_string(), + "Formatting not stable. Bailed after 5 tries." + ); } #[test] @@ -1272,16 +1295,20 @@ mod test { } #[test] - #[should_panic(expected = "Formatting succeeded initially, but failed when")] fn test_format_ensure_stable_error_second() { - format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { - if file_text == "1" { - Ok(Some("11".to_string())) - } else { - bail!("Error formatting.") - } - }) - .unwrap(); + let err = + format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { + if file_text == "1" { + Ok(Some("11".to_string())) + } else { + bail!("Error formatting.") + } + }) + .unwrap_err(); + assert_starts_with!( + err.to_string(), + "Formatting succeeded initially, but failed when" + ); } #[test] diff --git a/cli/tools/info.rs b/cli/tools/info.rs index a6b390b81..c138d03d4 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -11,12 +11,14 @@ use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_core::serde_json; +use deno_core::url; use deno_graph::Dependency; use deno_graph::GraphKind; use deno_graph::Module; use deno_graph::ModuleError; use deno_graph::ModuleGraph; use deno_graph::Resolution; +use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -47,20 +49,23 @@ pub async fn info( let module_graph_creator = factory.module_graph_creator().await?; let npm_resolver = factory.npm_resolver().await?; let maybe_lockfile = cli_options.maybe_lockfile(); + let npmrc = cli_options.npmrc(); let resolver = factory.workspace_resolver().await?; - let maybe_import_specifier = - if let Some(import_map) = resolver.maybe_import_map() { - if let Ok(imports_specifier) = - import_map.resolve(&specifier, import_map.base_url()) - { - Some(imports_specifier) - } else { - None - } + let cwd_url = + url::Url::from_directory_path(cli_options.initial_cwd()).unwrap(); + + let maybe_import_specifier = if let Some(import_map) = + resolver.maybe_import_map() + { + if let Ok(imports_specifier) = import_map.resolve(&specifier, &cwd_url) { + Some(imports_specifier) } else { None - }; + } + } else { + None + }; let specifier = match maybe_import_specifier { Some(specifier) => specifier, @@ -88,7 +93,8 @@ pub async fn info( JSON_SCHEMA_VERSION.into(), ); } - add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref()); + + add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref(), npmrc); display::write_json_to_stdout(&json_graph)?; } else { let mut output = String::new(); @@ -192,6 +198,7 @@ fn print_cache_info( fn add_npm_packages_to_json( json: &mut serde_json::Value, npm_resolver: &dyn CliNpmResolver, + npmrc: &ResolvedNpmRc, ) { let Some(npm_resolver) = npm_resolver.as_managed() else { return; // does not include byonm to deno info's output @@ -202,45 +209,28 @@ fn add_npm_packages_to_json( let json = json.as_object_mut().unwrap(); let modules = json.get_mut("modules").and_then(|m| m.as_array_mut()); if let Some(modules) = modules { - if modules.len() == 1 - && modules[0].get("kind").and_then(|k| k.as_str()) == Some("npm") - { - // If there is only one module and it's "external", then that means - // someone provided an npm specifier as a cli argument. In this case, - // we want to show which npm package the cli argument resolved to. - let module = &mut modules[0]; - let maybe_package = module - .get("specifier") - .and_then(|k| k.as_str()) - .and_then(|specifier| NpmPackageNvReference::from_str(specifier).ok()) - .and_then(|package_ref| { - snapshot - .resolve_package_from_deno_module(package_ref.nv()) - .ok() - }); - if let Some(pkg) = maybe_package { - if let Some(module) = module.as_object_mut() { - module - .insert("npmPackage".to_string(), pkg.id.as_serialized().into()); - } - } - } else { - // Filter out npm package references from the modules and instead - // have them only listed as dependencies. This is done because various - // npm specifiers modules in the graph are really just unresolved - // references. So there could be listed multiple npm specifiers - // that would resolve to a single npm package. - for i in (0..modules.len()).rev() { - if matches!( - modules[i].get("kind").and_then(|k| k.as_str()), - Some("npm") | Some("external") - ) { - modules.remove(i); + for module in modules.iter_mut() { + if matches!(module.get("kind").and_then(|k| k.as_str()), Some("npm")) { + // If there is only one module and it's "external", then that means + // someone provided an npm specifier as a cli argument. In this case, + // we want to show which npm package the cli argument resolved to. + let maybe_package = module + .get("specifier") + .and_then(|k| k.as_str()) + .and_then(|specifier| NpmPackageNvReference::from_str(specifier).ok()) + .and_then(|package_ref| { + snapshot + .resolve_package_from_deno_module(package_ref.nv()) + .ok() + }); + if let Some(pkg) = maybe_package { + if let Some(module) = module.as_object_mut() { + module + .insert("npmPackage".to_string(), pkg.id.as_serialized().into()); + } } } - } - for module in modules.iter_mut() { let dependencies = module .get_mut("dependencies") .and_then(|d| d.as_array_mut()); @@ -272,7 +262,7 @@ fn add_npm_packages_to_json( let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len()); for pkg in sorted_packages { let mut kv = serde_json::Map::new(); - kv.insert("name".to_string(), pkg.id.nv.name.to_string().into()); + kv.insert("name".to_string(), pkg.id.nv.name.clone().into()); kv.insert("version".to_string(), pkg.id.nv.version.to_string().into()); let mut deps = pkg.dependencies.values().collect::<Vec<_>>(); deps.sort(); @@ -281,6 +271,8 @@ fn add_npm_packages_to_json( .map(|id| serde_json::Value::String(id.as_serialized())) .collect::<Vec<_>>(); kv.insert("dependencies".to_string(), deps.into()); + let registry_url = npmrc.get_registry_url(&pkg.id.nv.name); + kv.insert("registryUrl".to_string(), registry_url.to_string().into()); json_packages.insert(pkg.id.as_serialized(), kv.into()); } @@ -545,7 +537,7 @@ impl<'a> GraphDisplayContext<'a> { fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode { enum PackageOrSpecifier { - Package(NpmResolutionPackage), + Package(Box<NpmResolutionPackage>), Specifier(ModuleSpecifier), } @@ -553,7 +545,7 @@ impl<'a> GraphDisplayContext<'a> { let package_or_specifier = match module.npm() { Some(npm) => match self.npm_info.resolve_package(npm.nv_reference.nv()) { - Some(package) => Package(package.clone()), + Some(package) => Package(Box::new(package.clone())), None => Specifier(module.specifier().clone()), // should never happen }, None => Specifier(module.specifier().clone()), @@ -660,10 +652,12 @@ impl<'a> GraphDisplayContext<'a> { let message = match err { HttpsChecksumIntegrity(_) => "(checksum integrity error)", Decode(_) => "(loading decode error)", - Loader(err) => match deno_core::error::get_custom_error_class(err) { - Some("NotCapable") => "(not capable, requires --allow-import)", - _ => "(loading error)", - }, + Loader(err) => { + match deno_runtime::errors::get_error_class_name(err) { + Some("NotCapable") => "(not capable, requires --allow-import)", + _ => "(loading error)", + } + } Jsr(_) => "(loading error)", NodeUnknownBuiltinModule(_) => "(unknown node built-in error)", Npm(_) => "(npm loading error)", diff --git a/cli/tools/init/mod.rs b/cli/tools/init/mod.rs index 2d6a894e1..4e4a686c5 100644 --- a/cli/tools/init/mod.rs +++ b/cli/tools/init/mod.rs @@ -24,32 +24,29 @@ pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> { create_file( &dir, "main.ts", - r#"import { type Route, route, serveDir } from "@std/http"; + r#"import { serveDir } from "@std/http"; -const routes: Route[] = [ - { - pattern: new URLPattern({ pathname: "/" }), - handler: () => new Response("Home page"), - }, - { - pattern: new URLPattern({ pathname: "/users/:id" }), - handler: (_req, _info, params) => new Response(params?.pathname.groups.id), - }, - { - pattern: new URLPattern({ pathname: "/static/*" }), - handler: (req) => serveDir(req), - }, -]; - -function defaultHandler(_req: Request) { - return new Response("Not found", { status: 404 }); -} - -const handler = route(routes, defaultHandler); +const userPagePattern = new URLPattern({ pathname: "/users/:id" }); +const staticPathPattern = new URLPattern({ pathname: "/static/*" }); export default { fetch(req) { - return handler(req); + const url = new URL(req.url); + + if (url.pathname === "/") { + return new Response("Home page"); + } + + const userPageMatch = userPagePattern.exec(url); + if (userPageMatch) { + return new Response(userPageMatch.pathname.groups.id); + } + + if (staticPathPattern.test(url)) { + return serveDir(req); + } + + return new Response("Not found", { status: 404 }); }, } satisfies Deno.ServeDefaultExport; "#, diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index ed86e86c7..fe477a8e6 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -3,6 +3,7 @@ use crate::args::resolve_no_prompt; use crate::args::AddFlags; use crate::args::CaData; +use crate::args::CacheSetting; use crate::args::ConfigFlag; use crate::args::Flags; use crate::args::InstallFlags; @@ -13,8 +14,11 @@ use crate::args::TypeCheckMode; use crate::args::UninstallFlags; use crate::args::UninstallKind; use crate::factory::CliFactory; +use crate::file_fetcher::FileFetcher; use crate::graph_container::ModuleGraphContainer; use crate::http_util::HttpClientProvider; +use crate::jsr::JsrFetchResolver; +use crate::npm::NpmFetchResolver; use crate::util::fs::canonicalize_path_maybe_not_exists; use deno_core::anyhow::bail; @@ -354,12 +358,51 @@ async fn install_global( ) -> Result<(), AnyError> { // ensure the module is cached let factory = CliFactory::from_flags(flags.clone()); + + let http_client = factory.http_client_provider(); + let deps_http_cache = factory.global_http_cache()?; + let mut deps_file_fetcher = FileFetcher::new( + deps_http_cache.clone(), + CacheSetting::ReloadAll, + true, + http_client.clone(), + Default::default(), + None, + ); + + let npmrc = factory.cli_options().unwrap().npmrc(); + + deps_file_fetcher.set_download_log_level(log::Level::Trace); + let deps_file_fetcher = Arc::new(deps_file_fetcher); + let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone())); + let npm_resolver = Arc::new(NpmFetchResolver::new( + deps_file_fetcher.clone(), + npmrc.clone(), + )); + + let entry_text = install_flags_global.module_url.as_str(); + let req = super::registry::AddRmPackageReq::parse(entry_text); + + // found a package requirement but missing the prefix + if let Ok(Err(package_req)) = req { + if jsr_resolver.req_to_nv(&package_req).await.is_some() { + bail!( + "{entry_text} is missing a prefix. Did you mean `{}`?", + crate::colors::yellow(format!("deno install -g jsr:{package_req}")) + ); + } else if npm_resolver.req_to_nv(&package_req).await.is_some() { + bail!( + "{entry_text} is missing a prefix. Did you mean `{}`?", + crate::colors::yellow(format!("deno install -g npm:{package_req}")) + ); + } + } + factory .main_module_graph_container() .await? .load_and_type_check_files(&[install_flags_global.module_url.clone()]) .await?; - let http_client = factory.http_client_provider(); // create the install shim create_install_shim(http_client, &flags, install_flags_global).await @@ -1396,6 +1439,7 @@ mod tests { .env_clear() // use the deno binary in the target directory .env("PATH", test_util::target_dir()) + .env("RUST_BACKTRACE", "1") .spawn() .unwrap() .wait() diff --git a/cli/tools/jupyter/install.rs b/cli/tools/jupyter/install.rs index b0ddc948d..aeff89ccf 100644 --- a/cli/tools/jupyter/install.rs +++ b/cli/tools/jupyter/install.rs @@ -58,9 +58,9 @@ pub fn install() -> Result<(), AnyError> { let f = std::fs::File::create(kernel_json_path)?; serde_json::to_writer_pretty(f, &json_data)?; - install_icon(&user_data_dir, "logo-32x32.png", DENO_ICON_32)?; - install_icon(&user_data_dir, "logo-64x64.png", DENO_ICON_64)?; - install_icon(&user_data_dir, "logo-svg.svg", DENO_ICON_SVG)?; + install_icon(&kernel_dir, "logo-32x32.png", DENO_ICON_32)?; + install_icon(&kernel_dir, "logo-64x64.png", DENO_ICON_64)?; + install_icon(&kernel_dir, "logo-svg.svg", DENO_ICON_SVG)?; log::info!("✅ Deno kernelspec installed successfully."); Ok(()) diff --git a/cli/tools/jupyter/mod.rs b/cli/tools/jupyter/mod.rs index 0ffd0da1e..732f95c49 100644 --- a/cli/tools/jupyter/mod.rs +++ b/cli/tools/jupyter/mod.rs @@ -61,7 +61,7 @@ pub async fn kernel( let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; let main_module = - resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd()) + resolve_url_or_path("./$deno$jupyter.mts", cli_options.initial_cwd()) .unwrap(); // TODO(bartlomieju): should we run with all permissions? let permissions = diff --git a/cli/tools/jupyter/resources/deno-logo-32x32.png b/cli/tools/jupyter/resources/deno-logo-32x32.png Binary files differindex 97871a02e..d59f251a2 100644 --- a/cli/tools/jupyter/resources/deno-logo-32x32.png +++ b/cli/tools/jupyter/resources/deno-logo-32x32.png diff --git a/cli/tools/jupyter/resources/deno-logo-64x64.png b/cli/tools/jupyter/resources/deno-logo-64x64.png Binary files differindex 1b9444ef6..37e98abaf 100644 --- a/cli/tools/jupyter/resources/deno-logo-64x64.png +++ b/cli/tools/jupyter/resources/deno-logo-64x64.png diff --git a/cli/tools/jupyter/resources/deno-logo-svg.svg b/cli/tools/jupyter/resources/deno-logo-svg.svg index d7bb9ef80..fbc22cd91 100644 --- a/cli/tools/jupyter/resources/deno-logo-svg.svg +++ b/cli/tools/jupyter/resources/deno-logo-svg.svg @@ -1 +1,17 @@ -<svg viewBox="0 0 30 30" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#clip0_29_599)"><path d="M15 0C23.2843 0 30 6.71572 30 15C30 23.2843 23.2843 30 15 30C6.71572 30 0 23.2843 0 15C0 6.71572 6.71572 0 15 0Z" fill="currentColor"></path><path d="M14.6635 22.3394C14.2788 22.2357 13.8831 22.4584 13.7705 22.8381L13.7655 22.8558L12.7694 26.5472L12.7649 26.565C12.6711 26.9498 12.9011 27.3414 13.2858 27.4451C13.6704 27.549 14.0661 27.3263 14.1787 26.9465L14.1837 26.9289L15.1797 23.2375L15.1843 23.2196C15.1911 23.1919 15.1962 23.164 15.1997 23.1362L15.2026 23.1084L15.179 22.9888L15.1445 22.8166L15.1227 22.7091C15.076 22.619 15.0111 22.5396 14.932 22.4759C14.853 22.4123 14.7615 22.3658 14.6635 22.3394ZM7.7224 18.5379C7.70424 18.5741 7.68883 18.6123 7.67658 18.6522L7.66967 18.6763L6.67358 22.3677L6.669 22.3856C6.57525 22.7704 6.80524 23.1619 7.1899 23.2657C7.57451 23.3695 7.97026 23.1469 8.08287 22.7671L8.08779 22.7494L8.99096 19.4023C8.51793 19.1518 8.09336 18.8628 7.7224 18.5379ZM5.34707 14.2929C4.9624 14.1891 4.56666 14.4117 4.4541 14.7915L4.44912 14.8092L3.45303 18.5006L3.44846 18.5184C3.35471 18.9032 3.58469 19.2947 3.96936 19.3985C4.35397 19.5023 4.74971 19.2797 4.86232 18.8999L4.86725 18.8822L5.86334 15.1908L5.86791 15.173C5.96166 14.7882 5.73174 14.3967 5.34707 14.2929ZM27.682 13.4546C27.2973 13.3508 26.9015 13.5734 26.789 13.9532L26.784 13.9709L25.7879 17.6623L25.7833 17.6801C25.6896 18.0649 25.9196 18.4564 26.3042 18.5602C26.6889 18.664 27.0846 18.4414 27.1972 18.0616L27.2021 18.0439L28.1982 14.3525L28.2028 14.3347C28.2965 13.9499 28.0666 13.5584 27.682 13.4546ZM3.17781 8.52527C2.34361 10.0444 1.81243 11.7112 1.61377 13.4329C1.7088 13.5412 1.83381 13.619 1.97301 13.6563C2.35768 13.7602 2.75342 13.5375 2.86598 13.1577L2.87096 13.1401L3.86705 9.44865L3.87162 9.43084C3.96537 9.04599 3.73539 8.65447 3.35072 8.5507C3.2943 8.53547 3.23623 8.52694 3.17781 8.52527ZM25.159 8.5507C24.7744 8.44687 24.3786 8.66953 24.266 9.04933L24.2611 9.06697L23.265 12.7584L23.2604 12.7762C23.1667 13.161 23.3966 13.5526 23.7813 13.6563C24.1659 13.7602 24.5617 13.5375 24.6743 13.1577L24.6792 13.1401L25.6753 9.44865L25.6799 9.43084C25.7736 9.04599 25.5436 8.65447 25.159 8.5507Z" fill="white"></path><path d="M7.51285 5.04065C7.12824 4.93682 6.73249 5.15948 6.61988 5.53929L6.61495 5.55692L5.61886 9.24833L5.61429 9.26614C5.52054 9.65098 5.75052 10.0425 6.13519 10.1463C6.5198 10.2501 6.91554 10.0274 7.02816 9.64764L7.03308 9.63001L8.02917 5.9386L8.03374 5.92079C8.12749 5.53595 7.89751 5.14442 7.51285 5.04065ZM20.3116 5.73845C19.9269 5.63462 19.5312 5.85727 19.4186 6.23708L19.4136 6.25471L18.7443 8.73499C19.1779 8.94915 19.5917 9.20126 19.9809 9.48839L20.0453 9.53643L20.8279 6.63639L20.8324 6.61858C20.9262 6.23374 20.6963 5.84221 20.3116 5.73845ZM13.7968 1.57642C13.3296 1.61771 12.8647 1.68338 12.4043 1.77317L12.3066 1.79263L11.3782 5.23419L11.3736 5.252C11.2799 5.63684 11.5099 6.02837 11.8945 6.13214C12.2792 6.23596 12.6749 6.01331 12.7875 5.6335L12.7924 5.61587L13.7885 1.92446L13.7931 1.90665C13.8196 1.79831 13.8209 1.68533 13.7968 1.57642ZM22.9626 4.1263L22.7669 4.85169L22.7623 4.86944C22.6686 5.25429 22.8986 5.64581 23.2832 5.74958C23.6678 5.85341 24.0636 5.63075 24.1762 5.25095L24.1811 5.23331L24.2025 5.15462C23.8362 4.81205 23.4511 4.49009 23.0491 4.19022L22.9626 4.1263ZM17.1672 1.69677L16.8139 3.00593L16.8094 3.02374C16.7156 3.40858 16.9456 3.80011 17.3303 3.90388C17.7149 4.0077 18.1106 3.78505 18.2233 3.40524L18.2282 3.38761L18.6 2.00966C18.1624 1.88867 17.719 1.79001 17.2714 1.71405L17.1672 1.69677Z" fill="white"></path><path d="M9.69085 24.6253C9.80341 24.2455 10.1992 24.0229 10.5838 24.1266C10.9685 24.2303 11.1984 24.6219 11.1047 25.0068L11.1001 25.0246L10.3872 27.6664L10.2876 27.6297C9.85836 27.4694 9.43765 27.2873 9.0271 27.0839L9.68587 24.6429L9.69085 24.6253Z" fill="white"></path><path d="M14.4141 8.49082C10.0522 8.49082 6.65918 11.2368 6.65918 14.6517C6.65918 17.8769 9.78123 19.9362 14.6211 19.8331C15.0327 19.8243 15.1517 20.1008 15.2856 20.4734C15.4196 20.846 15.7796 22.8097 16.0665 24.3117C16.3233 25.656 16.5842 27.0052 16.7834 28.3596C19.9439 27.9418 22.8663 26.3807 25.0076 24.0261L22.7237 15.5088C22.1544 13.4518 21.489 11.5564 19.7283 10.1794C18.3118 9.07166 16.5122 8.49082 14.4141 8.49082Z" fill="white"></path><path d="M15.3516 10.957C15.8694 10.957 16.2891 11.3767 16.2891 11.8945C16.2891 12.4123 15.8694 12.832 15.3516 12.832C14.8338 12.832 14.4141 12.4123 14.4141 11.8945C14.4141 11.3767 14.8338 10.957 15.3516 10.957Z" fill="currentColor"></path></g><defs><clipPath id="clip0_29_599"><rect width="30" height="30" fill="white"></rect></clipPath></defs></svg> +<svg width="100%" height="100%" viewBox="0 0 441 441" version="1.1" xmlns="http://www.w3.org/2000/svg" + xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" + style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;"> + <g transform="matrix(1.02631,-2.08167e-17,2.08167e-17,1.02631,-0.525826,-0.525138)"> + <path + d="M37.965,296.635C26.441,271.766 20.009,244.065 20.009,214.873C20.009,207.318 20.439,199.863 21.278,192.531C22.129,185.123 23.39,177.852 25.036,170.742C34.286,130.852 55.801,95.64 85.384,69.301C110.233,47.207 140.674,31.444 174.043,24.299C187.212,21.486 200.872,20.006 214.875,20.006C219.783,20.011 224.727,20.2 229.701,20.579C253.285,22.38 275.571,28.317 295.904,37.625C312.305,45.143 327.486,54.87 341.064,66.426C375.17,95.48 398.957,135.953 406.867,181.369C408.757,192.255 409.742,203.45 409.742,214.873C409.738,219.789 409.548,224.74 409.168,229.721C407.731,248.545 403.659,266.542 397.34,283.379C388.521,306.83 375.308,328.136 358.706,346.294C337.113,368.342 309.673,378.152 286.755,377.744C270.09,377.447 253.784,370.816 242.516,361.114C226.42,347.253 219.918,331.409 217.69,313.729C217.136,309.334 217.461,297.358 219.748,289.066C221.453,282.885 225.777,270.948 232.1,265.727C224.703,262.541 215.183,255.604 212.182,252.274C211.445,251.455 211.54,250.174 212.2,249.292C212.861,248.41 214.02,248.062 215.057,248.435C221.416,250.618 229.161,252.771 237.327,254.137C248.067,255.932 261.424,258.194 274.955,258.859C307.946,260.479 342.407,245.67 353.103,216.207C363.798,186.744 359.649,157.602 321.279,140.121C282.909,122.64 265.185,101.856 234.183,89.32C213.934,81.131 191.396,85.992 168.257,98.78C105.931,133.223 50.092,242.048 75.833,342.873C76.201,344.252 75.58,345.705 74.328,346.392C73.156,347.036 71.713,346.852 70.741,345.962C63.25,337.731 56.454,328.857 50.445,319.433C45.796,312.139 41.623,304.524 37.965,296.635Z" /> + </g> + <g transform="matrix(0.0920293,0.00428099,-0.00428099,0.0920293,-28.1272,-500.301)"> + <path + d="M3053.7,5296.9C4371.65,5296.9 5441.66,6366.91 5441.66,7684.86C5441.66,9002.81 4371.65,10072.8 3053.7,10072.8C1735.75,10072.8 665.74,9002.81 665.74,7684.86C665.74,6366.91 1735.75,5296.9 3053.7,5296.9ZM3745.03,8143.22C3594.12,8142.82 3444.31,8124.57 3323.87,8110.15C3232.29,8099.18 3144.99,8079.23 3073.1,8058.23C3061.36,8054.62 3048.65,8059.09 3041.75,8069.24C3034.86,8079.4 3034.46,8093.71 3043.09,8102.44C3078.21,8137.94 3187.74,8210.21 3271.7,8241.83C3204.04,8303.2 3162.1,8438.28 3146.33,8507.94C3125.17,8601.4 3127.75,8734.83 3136.19,8783.45C3170.14,8979.04 3250.69,9151.99 3436.99,9297.9C3567.4,9400.03 3752.28,9465.38 3937.88,9460.06C4194.01,9452.71 4495.48,9328.51 4724.65,9070.17C5023.25,8710.58 5208.52,8252.45 5223.47,7749.5C5259.08,6551.9 4315.7,5550.69 3118.1,5515.08C1920.51,5479.47 919.301,6422.86 883.689,7620.45C865.246,8240.66 1109.37,8808.21 1515.43,9216.2C1526.73,9227.39 1544.21,9229.43 1557.78,9221.14C1571.35,9212.85 1577.51,9196.36 1572.7,9181.2C1234.07,8072.55 1799.11,6832.64 2474.84,6417.1C2725.71,6262.82 2973.99,6197.06 3203.56,6277.7C3555.04,6401.15 3763.03,6623.26 4199.06,6797.93C4635.09,6972.59 4696.35,7294.74 4592.58,7628.14C4488.81,7961.54 4113,8144.17 3745.03,8143.22ZM2917.17,6442.51C2777.75,6459.97 2693.93,6637.44 2687.08,6749.42C2680.18,6861.39 2744.03,7042.7 2926.19,7030.63C3139.52,7016.49 3195.89,6830.7 3164.24,6654.94C3140.48,6522.94 3033.73,6427.9 2917.17,6442.51Z" + style="fill:white;" /> + </g> + <g transform="matrix(7.12289,0.543899,-0.543899,7.12289,-4867.49,-1040.55)"> + <path + d="M721.316,105.751C722.813,105.518 724.225,106.703 724.582,108.395C725.058,110.649 724.402,113.065 721.658,113.329C719.314,113.555 718.422,111.242 718.468,109.796C718.513,108.35 719.525,106.03 721.316,105.751Z" /> + </g> +</svg> diff --git a/cli/tools/jupyter/server.rs b/cli/tools/jupyter/server.rs index 0cd80f7dd..5680ed4c1 100644 --- a/cli/tools/jupyter/server.rs +++ b/cli/tools/jupyter/server.rs @@ -329,7 +329,12 @@ impl JupyterServer { }) .collect(); - (candidates, cursor_pos - prop_name.len()) + if prop_name.len() > cursor_pos { + // TODO(bartlomieju): most likely not correct, but better than panicking because of sub with overflow + (candidates, cursor_pos) + } else { + (candidates, cursor_pos - prop_name.len()) + } } else { // combine results of declarations and globalThis properties let mut candidates = get_expression_property_names( @@ -349,7 +354,12 @@ impl JupyterServer { candidates.sort(); candidates.dedup(); // make sure to sort first - (candidates, cursor_pos - expr.len()) + if expr.len() > cursor_pos { + // TODO(bartlomieju): most likely not correct, but better than panicking because of sub with overflow + (candidates, cursor_pos) + } else { + (candidates, cursor_pos - expr.len()) + } }; connection diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs index e096b486e..fcefb4587 100644 --- a/cli/tools/lint/mod.rs +++ b/cli/tools/lint/mod.rs @@ -63,7 +63,7 @@ pub use rules::LintRuleProvider; const JSON_SCHEMA_VERSION: u8 = 1; -static STDIN_FILE_NAME: &str = "$deno$stdin.ts"; +static STDIN_FILE_NAME: &str = "$deno$stdin.mts"; pub async fn lint( flags: Arc<Flags>, @@ -80,6 +80,7 @@ pub async fn lint( file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen), move |flags, watcher_communicator, changed_paths| { let lint_flags = lint_flags.clone(); + watcher_communicator.show_path_changed(changed_paths.clone()); Ok(async move { let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; @@ -191,7 +192,7 @@ pub async fn lint( linter.finish() }; if !success { - std::process::exit(1); + deno_runtime::exit(1); } } @@ -435,6 +436,7 @@ fn collect_lint_files( }) .ignore_git_folder() .ignore_node_modules() + .use_gitignore() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files) } diff --git a/cli/tools/lint/rules/no_sloppy_imports.rs b/cli/tools/lint/rules/no_sloppy_imports.rs index 2f6087588..94bf9a7c6 100644 --- a/cli/tools/lint/rules/no_sloppy_imports.rs +++ b/cli/tools/lint/rules/no_sloppy_imports.rs @@ -87,6 +87,7 @@ impl LintRule for NoSloppyImportsRule { captures: Default::default(), }; + // fill this and capture the sloppy imports in the resolver deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions { graph_kind: deno_graph::GraphKind::All, specifier: context.specifier().clone(), diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs index 4098d62e3..12289c581 100644 --- a/cli/tools/registry/mod.rs +++ b/cli/tools/registry/mod.rs @@ -69,6 +69,7 @@ pub use pm::add; pub use pm::cache_top_level_deps; pub use pm::remove; pub use pm::AddCommandName; +pub use pm::AddRmPackageReq; use publish_order::PublishOrderGraph; use unfurl::SpecifierUnfurler; diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs index 5dc042620..c1ea2c75e 100644 --- a/cli/tools/registry/pm.rs +++ b/cli/tools/registry/pm.rs @@ -1,32 +1,25 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -mod cache_deps; - -pub use cache_deps::cache_top_level_deps; -use deno_semver::jsr::JsrPackageReqReference; -use deno_semver::npm::NpmPackageReqReference; -use deno_semver::VersionReq; - -use std::borrow::Cow; +use std::path::Path; use std::path::PathBuf; use std::sync::Arc; -use deno_ast::TextChange; -use deno_config::deno_json::FmtOptionsConfig; -use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; -use deno_core::serde_json; -use deno_core::ModuleSpecifier; -use deno_runtime::deno_node; +use deno_path_util::url_to_file_path; +use deno_semver::jsr::JsrPackageReqReference; +use deno_semver::npm::NpmPackageReqReference; +use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; -use indexmap::IndexMap; -use jsonc_parser::ast::ObjectProp; -use jsonc_parser::ast::Value; -use yoke::Yoke; +use deno_semver::Version; +use deno_semver::VersionReq; +use jsonc_parser::cst::CstObject; +use jsonc_parser::cst::CstObjectProp; +use jsonc_parser::cst::CstRootNode; +use jsonc_parser::json; use crate::args::AddFlags; use crate::args::CacheSetting; @@ -38,236 +31,181 @@ use crate::file_fetcher::FileFetcher; use crate::jsr::JsrFetchResolver; use crate::npm::NpmFetchResolver; -enum DenoConfigFormat { - Json, - Jsonc, -} +mod cache_deps; -impl DenoConfigFormat { - fn from_specifier(spec: &ModuleSpecifier) -> Result<Self, AnyError> { - let file_name = spec - .path_segments() - .ok_or_else(|| anyhow!("Empty path in deno config specifier: {spec}"))? - .last() - .unwrap(); - match file_name { - "deno.json" => Ok(Self::Json), - "deno.jsonc" => Ok(Self::Jsonc), - _ => bail!("Unsupported deno config file: {file_name}"), - } - } -} +pub use cache_deps::cache_top_level_deps; -struct DenoConfig { - config: Arc<deno_config::deno_json::ConfigFile>, - format: DenoConfigFormat, - imports: IndexMap<String, String>, +#[derive(Debug, Copy, Clone)] +enum ConfigKind { + DenoJson, + PackageJson, } -fn deno_json_imports( - config: &deno_config::deno_json::ConfigFile, -) -> Result<IndexMap<String, String>, AnyError> { - Ok( - config - .json - .imports - .clone() - .map(|imports| { - serde_json::from_value(imports) - .map_err(|err| anyhow!("Malformed \"imports\" configuration: {err}")) - }) - .transpose()? - .unwrap_or_default(), - ) +struct ConfigUpdater { + kind: ConfigKind, + cst: CstRootNode, + root_object: CstObject, + path: PathBuf, + modified: bool, } -impl DenoConfig { - fn from_options(options: &CliOptions) -> Result<Option<Self>, AnyError> { - let start_dir = &options.start_dir; - if let Some(config) = start_dir.maybe_deno_json() { - Ok(Some(Self { - imports: deno_json_imports(config)?, - config: config.clone(), - format: DenoConfigFormat::from_specifier(&config.specifier)?, - })) - } else { - Ok(None) - } - } - fn add(&mut self, selected: SelectedPackage) { - self.imports.insert( - selected.import_name, - format!("{}@{}", selected.package_name, selected.version_req), - ); +impl ConfigUpdater { + fn new( + kind: ConfigKind, + config_file_path: PathBuf, + ) -> Result<Self, AnyError> { + let config_file_contents = std::fs::read_to_string(&config_file_path) + .with_context(|| { + format!("Reading config file '{}'", config_file_path.display()) + })?; + let cst = CstRootNode::parse(&config_file_contents, &Default::default()) + .with_context(|| { + format!("Parsing config file '{}'", config_file_path.display()) + })?; + let root_object = cst.object_value_or_set(); + Ok(Self { + kind, + cst, + root_object, + path: config_file_path, + modified: false, + }) } - fn remove(&mut self, package: &str) -> bool { - self.imports.shift_remove(package).is_some() + fn display_path(&self) -> String { + deno_path_util::url_from_file_path(&self.path) + .map(|u| u.to_string()) + .unwrap_or_else(|_| self.path.display().to_string()) } - fn take_import_fields( - &mut self, - ) -> Vec<(&'static str, IndexMap<String, String>)> { - vec![("imports", std::mem::take(&mut self.imports))] + fn obj(&self) -> &CstObject { + &self.root_object } -} -impl NpmConfig { - fn from_options(options: &CliOptions) -> Result<Option<Self>, AnyError> { - let start_dir = &options.start_dir; - if let Some(pkg_json) = start_dir.maybe_pkg_json() { - Ok(Some(Self { - dependencies: pkg_json.dependencies.clone().unwrap_or_default(), - dev_dependencies: pkg_json.dev_dependencies.clone().unwrap_or_default(), - config: pkg_json.clone(), - fmt_options: None, - })) - } else { - Ok(None) - } + fn contents(&self) -> String { + self.cst.to_string() } fn add(&mut self, selected: SelectedPackage, dev: bool) { - let (name, version) = package_json_dependency_entry(selected); - if dev { - self.dependencies.swap_remove(&name); - self.dev_dependencies.insert(name, version); - } else { - self.dev_dependencies.swap_remove(&name); - self.dependencies.insert(name, version); + fn insert_index(object: &CstObject, searching_name: &str) -> usize { + object + .properties() + .into_iter() + .take_while(|prop| { + let prop_name = + prop.name().and_then(|name| name.decoded_value().ok()); + match prop_name { + Some(current_name) => { + searching_name.cmp(¤t_name) == std::cmp::Ordering::Greater + } + None => true, + } + }) + .count() } - } - fn remove(&mut self, package: &str) -> bool { - let in_deps = self.dependencies.shift_remove(package).is_some(); - let in_dev_deps = self.dev_dependencies.shift_remove(package).is_some(); - in_deps || in_dev_deps - } - - fn take_import_fields( - &mut self, - ) -> Vec<(&'static str, IndexMap<String, String>)> { - vec![ - ("dependencies", std::mem::take(&mut self.dependencies)), - ( - "devDependencies", - std::mem::take(&mut self.dev_dependencies), - ), - ] - } -} - -struct NpmConfig { - config: Arc<deno_node::PackageJson>, - fmt_options: Option<FmtOptionsConfig>, - dependencies: IndexMap<String, String>, - dev_dependencies: IndexMap<String, String>, -} - -enum DenoOrPackageJson { - Deno(DenoConfig), - Npm(NpmConfig), -} - -impl From<DenoConfig> for DenoOrPackageJson { - fn from(config: DenoConfig) -> Self { - Self::Deno(config) - } -} - -impl From<NpmConfig> for DenoOrPackageJson { - fn from(config: NpmConfig) -> Self { - Self::Npm(config) - } -} + match self.kind { + ConfigKind::DenoJson => { + let imports = self.root_object.object_value_or_set("imports"); + let value = + format!("{}@{}", selected.package_name, selected.version_req); + if let Some(prop) = imports.get(&selected.import_name) { + prop.set_value(json!(value)); + } else { + let index = insert_index(&imports, &selected.import_name); + imports.insert(index, &selected.import_name, json!(value)); + } + } + ConfigKind::PackageJson => { + let deps_prop = self.root_object.get("dependencies"); + let dev_deps_prop = self.root_object.get("devDependencies"); + + let dependencies = if dev { + self + .root_object + .object_value("devDependencies") + .unwrap_or_else(|| { + let index = deps_prop + .as_ref() + .map(|p| p.property_index() + 1) + .unwrap_or_else(|| self.root_object.properties().len()); + self + .root_object + .insert(index, "devDependencies", json!({})) + .object_value_or_set() + }) + } else { + self + .root_object + .object_value("dependencies") + .unwrap_or_else(|| { + let index = dev_deps_prop + .as_ref() + .map(|p| p.property_index()) + .unwrap_or_else(|| self.root_object.properties().len()); + self + .root_object + .insert(index, "dependencies", json!({})) + .object_value_or_set() + }) + }; + let other_dependencies = if dev { + deps_prop.and_then(|p| p.value().and_then(|v| v.as_object())) + } else { + dev_deps_prop.and_then(|p| p.value().and_then(|v| v.as_object())) + }; -/// Wrapper around `jsonc_parser::ast::Object` that can be stored in a `Yoke` -#[derive(yoke::Yokeable)] -struct JsoncObjectView<'a>(jsonc_parser::ast::Object<'a>); + let (alias, value) = package_json_dependency_entry(selected); -struct ConfigUpdater { - config: DenoOrPackageJson, - // the `Yoke` is so we can carry the parsed object (which borrows from - // the source) along with the source itself - ast: Yoke<JsoncObjectView<'static>, String>, - path: PathBuf, - modified: bool, -} + if let Some(other) = other_dependencies { + if let Some(prop) = other.get(&alias) { + remove_prop_and_maybe_parent_prop(prop); + } + } -impl ConfigUpdater { - fn obj(&self) -> &jsonc_parser::ast::Object<'_> { - &self.ast.get().0 - } - fn contents(&self) -> &str { - self.ast.backing_cart() - } - async fn maybe_new( - config: Option<impl Into<DenoOrPackageJson>>, - ) -> Result<Option<Self>, AnyError> { - if let Some(config) = config { - Ok(Some(Self::new(config.into()).await?)) - } else { - Ok(None) - } - } - async fn new(config: DenoOrPackageJson) -> Result<Self, AnyError> { - let specifier = config.specifier(); - if specifier.scheme() != "file" { - bail!("Can't update a remote configuration file"); - } - let config_file_path = specifier.to_file_path().map_err(|_| { - anyhow!("Specifier {specifier:?} is an invalid file path") - })?; - let config_file_contents = { - let contents = tokio::fs::read_to_string(&config_file_path) - .await - .with_context(|| { - format!("Reading config file at: {}", config_file_path.display()) - })?; - if contents.trim().is_empty() { - "{}\n".into() - } else { - contents + if let Some(prop) = dependencies.get(&alias) { + prop.set_value(json!(value)); + } else { + let index = insert_index(&dependencies, &alias); + dependencies.insert(index, &alias, json!(value)); + } } - }; - let ast = Yoke::try_attach_to_cart(config_file_contents, |contents| { - let ast = jsonc_parser::parse_to_ast( - contents, - &Default::default(), - &Default::default(), - ) - .with_context(|| { - format!("Failed to parse config file at {}", specifier) - })?; - let obj = match ast.value { - Some(Value::Object(obj)) => obj, - _ => bail!( - "Failed to update config file at {}, expected an object", - specifier - ), - }; - Ok(JsoncObjectView(obj)) - })?; - Ok(Self { - config, - ast, - path: config_file_path, - modified: false, - }) - } - - fn add(&mut self, selected: SelectedPackage, dev: bool) { - match &mut self.config { - DenoOrPackageJson::Deno(deno) => deno.add(selected), - DenoOrPackageJson::Npm(npm) => npm.add(selected, dev), } + self.modified = true; } fn remove(&mut self, package: &str) -> bool { - let removed = match &mut self.config { - DenoOrPackageJson::Deno(deno) => deno.remove(package), - DenoOrPackageJson::Npm(npm) => npm.remove(package), + let removed = match self.kind { + ConfigKind::DenoJson => { + if let Some(prop) = self + .root_object + .object_value("imports") + .and_then(|i| i.get(package)) + { + remove_prop_and_maybe_parent_prop(prop); + true + } else { + false + } + } + ConfigKind::PackageJson => { + let deps = [ + self + .root_object + .object_value("dependencies") + .and_then(|deps| deps.get(package)), + self + .root_object + .object_value("devDependencies") + .and_then(|deps| deps.get(package)), + ]; + let removed = deps.iter().any(|d| d.is_some()); + for dep in deps.into_iter().flatten() { + remove_prop_and_maybe_parent_prop(dep); + } + removed + } }; if removed { self.modified = true; @@ -275,76 +213,28 @@ impl ConfigUpdater { removed } - async fn commit(mut self) -> Result<(), AnyError> { + fn commit(&self) -> Result<(), AnyError> { if !self.modified { return Ok(()); } - let import_fields = self.config.take_import_fields(); - - let fmt_config_options = self.config.fmt_options(); - - let new_text = update_config_file_content( - self.obj(), - self.contents(), - fmt_config_options, - import_fields.into_iter().map(|(k, v)| { - ( - k, - if v.is_empty() { - None - } else { - Some(generate_imports(v.into_iter().collect())) - }, - ) - }), - self.config.file_name(), - ); - - tokio::fs::write(&self.path, new_text).await?; + let new_text = self.contents(); + std::fs::write(&self.path, new_text).with_context(|| { + format!("failed writing to '{}'", self.path.display()) + })?; Ok(()) } } -impl DenoOrPackageJson { - fn specifier(&self) -> Cow<ModuleSpecifier> { - match self { - Self::Deno(d, ..) => Cow::Borrowed(&d.config.specifier), - Self::Npm(n, ..) => Cow::Owned(n.config.specifier()), - } - } - - fn fmt_options(&self) -> FmtOptionsConfig { - match self { - DenoOrPackageJson::Deno(deno, ..) => deno - .config - .to_fmt_config() - .ok() - .map(|f| f.options) - .unwrap_or_default(), - DenoOrPackageJson::Npm(config) => { - config.fmt_options.clone().unwrap_or_default() - } - } - } - - fn take_import_fields( - &mut self, - ) -> Vec<(&'static str, IndexMap<String, String>)> { - match self { - Self::Deno(d) => d.take_import_fields(), - Self::Npm(n) => n.take_import_fields(), - } - } - - fn file_name(&self) -> &'static str { - match self { - DenoOrPackageJson::Deno(config) => match config.format { - DenoConfigFormat::Json => "deno.json", - DenoConfigFormat::Jsonc => "deno.jsonc", - }, - DenoOrPackageJson::Npm(..) => "package.json", - } +fn remove_prop_and_maybe_parent_prop(prop: CstObjectProp) { + let parent = prop.parent().unwrap().as_object().unwrap(); + prop.remove(); + if parent.properties().is_empty() { + let parent_property = parent.parent().unwrap(); + let root_object = parent_property.parent().unwrap().as_object().unwrap(); + // remove the property + parent_property.remove(); + root_object.ensure_multiline(); } } @@ -363,7 +253,14 @@ fn package_json_dependency_entry( selected: SelectedPackage, ) -> (String, String) { if let Some(npm_package) = selected.package_name.strip_prefix("npm:") { - (npm_package.into(), selected.version_req) + if selected.import_name == npm_package { + (npm_package.into(), selected.version_req) + } else { + ( + selected.import_name, + format!("npm:{}@{}", npm_package, selected.version_req), + ) + } } else if let Some(jsr_package) = selected.package_name.strip_prefix("jsr:") { let jsr_package = jsr_package.strip_prefix('@').unwrap_or(jsr_package); let scope_replaced = jsr_package.replace('/', "__"); @@ -393,21 +290,45 @@ impl std::fmt::Display for AddCommandName { fn load_configs( flags: &Arc<Flags>, -) -> Result<(CliFactory, Option<NpmConfig>, Option<DenoConfig>), AnyError> { + has_jsr_specifiers: impl FnOnce() -> bool, +) -> Result<(CliFactory, Option<ConfigUpdater>, Option<ConfigUpdater>), AnyError> +{ let cli_factory = CliFactory::from_flags(flags.clone()); let options = cli_factory.cli_options()?; - let npm_config = NpmConfig::from_options(options)?; - let (cli_factory, deno_config) = match DenoConfig::from_options(options)? { + let start_dir = &options.start_dir; + let npm_config = match start_dir.maybe_pkg_json() { + Some(pkg_json) => Some(ConfigUpdater::new( + ConfigKind::PackageJson, + pkg_json.path.clone(), + )?), + None => None, + }; + let deno_config = match start_dir.maybe_deno_json() { + Some(deno_json) => Some(ConfigUpdater::new( + ConfigKind::DenoJson, + url_to_file_path(&deno_json.specifier)?, + )?), + None => None, + }; + + let (cli_factory, deno_config) = match deno_config { Some(config) => (cli_factory, Some(config)), - None if npm_config.is_some() => (cli_factory, None), - None => { + None if npm_config.is_some() && !has_jsr_specifiers() => { + (cli_factory, None) + } + _ => { let factory = create_deno_json(flags, options)?; let options = factory.cli_options()?.clone(); + let deno_json = options + .start_dir + .maybe_deno_json() + .expect("Just created deno.json"); ( factory, - Some( - DenoConfig::from_options(&options)?.expect("Just created deno.json"), - ), + Some(ConfigUpdater::new( + ConfigKind::DenoJson, + url_to_file_path(&deno_json.specifier)?, + )?), ) } }; @@ -415,18 +336,26 @@ fn load_configs( Ok((cli_factory, npm_config, deno_config)) } +fn path_distance(a: &Path, b: &Path) -> usize { + let diff = pathdiff::diff_paths(a, b); + let Some(diff) = diff else { + return usize::MAX; + }; + diff.components().count() +} + pub async fn add( flags: Arc<Flags>, add_flags: AddFlags, cmd_name: AddCommandName, ) -> Result<(), AnyError> { - let (cli_factory, npm_config, deno_config) = load_configs(&flags)?; - let mut npm_config = ConfigUpdater::maybe_new(npm_config).await?; - let mut deno_config = ConfigUpdater::maybe_new(deno_config).await?; + let (cli_factory, mut npm_config, mut deno_config) = + load_configs(&flags, || { + add_flags.packages.iter().any(|s| s.starts_with("jsr:")) + })?; if let Some(deno) = &deno_config { - let specifier = deno.config.specifier(); - if deno.obj().get_string("importMap").is_some() { + if deno.obj().get("importMap").is_some() { bail!( concat!( "`deno {}` is not supported when configuration file contains an \"importMap\" field. ", @@ -434,11 +363,26 @@ pub async fn add( " at {}", ), cmd_name, - specifier + deno.display_path(), ); } } + let start_dir = cli_factory.cli_options()?.start_dir.dir_path(); + + // only prefer to add npm deps to `package.json` if there isn't a closer deno.json. + // example: if deno.json is in the CWD and package.json is in the parent, we should add + // npm deps to deno.json, since it's closer + let prefer_npm_config = match (npm_config.as_ref(), deno_config.as_ref()) { + (Some(npm), Some(deno)) => { + let npm_distance = path_distance(&npm.path, &start_dir); + let deno_distance = path_distance(&deno.path, &start_dir); + npm_distance <= deno_distance + } + (Some(_), None) => true, + (None, _) => false, + }; + let http_client = cli_factory.http_client_provider(); let deps_http_cache = cli_factory.global_http_cache()?; let mut deps_file_fetcher = FileFetcher::new( @@ -449,16 +393,20 @@ pub async fn add( Default::default(), None, ); + + let npmrc = cli_factory.cli_options().unwrap().npmrc(); + deps_file_fetcher.set_download_log_level(log::Level::Trace); let deps_file_fetcher = Arc::new(deps_file_fetcher); let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone())); - let npm_resolver = Arc::new(NpmFetchResolver::new(deps_file_fetcher)); + let npm_resolver = + Arc::new(NpmFetchResolver::new(deps_file_fetcher, npmrc.clone())); let mut selected_packages = Vec::with_capacity(add_flags.packages.len()); let mut package_reqs = Vec::with_capacity(add_flags.packages.len()); for entry_text in add_flags.packages.iter() { - let req = AddPackageReq::parse(entry_text).with_context(|| { + let req = AddRmPackageReq::parse(entry_text).with_context(|| { format!("Failed to parse package required: {}", entry_text) })?; @@ -509,15 +457,32 @@ pub async fn add( match package_and_version { PackageAndVersion::NotFound { package: package_name, - found_npm_package, + help, package_req, - } => { - if found_npm_package { - bail!("{} was not found, but a matching npm package exists. Did you mean `{}`?", crate::colors::red(package_name), crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}"))); - } else { - bail!("{} was not found.", crate::colors::red(package_name)); + } => match help { + Some(NotFoundHelp::NpmPackage) => { + bail!( + "{} was not found, but a matching npm package exists. Did you mean `{}`?", + crate::colors::red(package_name), + crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}")) + ); } - } + Some(NotFoundHelp::JsrPackage) => { + bail!( + "{} was not found, but a matching jsr package exists. Did you mean `{}`?", + crate::colors::red(package_name), + crate::colors::yellow(format!("deno {cmd_name} jsr:{package_req}")) + ) + } + Some(NotFoundHelp::PreReleaseVersion(version)) => { + bail!( + "{} has only pre-release versions available. Try specifying a version: `{}`", + crate::colors::red(&package_name), + crate::colors::yellow(format!("deno {cmd_name} {package_name}@^{version}")) + ) + } + None => bail!("{} was not found.", crate::colors::red(package_name)), + }, PackageAndVersion::Selected(selected) => { selected_packages.push(selected); } @@ -533,7 +498,7 @@ pub async fn add( selected_package.selected_version ); - if selected_package.package_name.starts_with("npm:") { + if selected_package.package_name.starts_with("npm:") && prefer_npm_config { if let Some(npm) = &mut npm_config { npm.add(selected_package, dev); } else { @@ -546,18 +511,11 @@ pub async fn add( } } - let mut commit_futures = vec![]; if let Some(npm) = npm_config { - commit_futures.push(npm.commit()); + npm.commit()?; } if let Some(deno) = deno_config { - commit_futures.push(deno.commit()); - } - let commit_futures = - deno_core::futures::future::join_all(commit_futures).await; - - for result in commit_futures { - result.context("Failed to update configuration file")?; + deno.commit()?; } npm_install_after_modification(flags, Some(jsr_resolver)).await?; @@ -572,87 +530,161 @@ struct SelectedPackage { selected_version: String, } +enum NotFoundHelp { + NpmPackage, + JsrPackage, + PreReleaseVersion(Version), +} + enum PackageAndVersion { NotFound { package: String, - found_npm_package: bool, package_req: PackageReq, + help: Option<NotFoundHelp>, }, Selected(SelectedPackage), } +fn best_version<'a>( + versions: impl Iterator<Item = &'a Version>, +) -> Option<&'a Version> { + let mut maybe_best_version: Option<&Version> = None; + for version in versions { + let is_best_version = maybe_best_version + .as_ref() + .map(|best_version| (*best_version).cmp(version).is_lt()) + .unwrap_or(true); + if is_best_version { + maybe_best_version = Some(version); + } + } + maybe_best_version +} + +trait PackageInfoProvider { + const SPECIFIER_PREFIX: &str; + /// The help to return if a package is found by this provider + const HELP: NotFoundHelp; + async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv>; + async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version>; +} + +impl PackageInfoProvider for Arc<JsrFetchResolver> { + const HELP: NotFoundHelp = NotFoundHelp::JsrPackage; + const SPECIFIER_PREFIX: &str = "jsr"; + async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> { + (**self).req_to_nv(req).await + } + + async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> { + let info = self.package_info(&req.name).await?; + best_version( + info + .versions + .iter() + .filter(|(_, version_info)| !version_info.yanked) + .map(|(version, _)| version), + ) + .cloned() + } +} + +impl PackageInfoProvider for Arc<NpmFetchResolver> { + const HELP: NotFoundHelp = NotFoundHelp::NpmPackage; + const SPECIFIER_PREFIX: &str = "npm"; + async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> { + (**self).req_to_nv(req).await + } + + async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> { + let info = self.package_info(&req.name).await?; + best_version(info.versions.keys()).cloned() + } +} + async fn find_package_and_select_version_for_req( jsr_resolver: Arc<JsrFetchResolver>, npm_resolver: Arc<NpmFetchResolver>, - add_package_req: AddPackageReq, + add_package_req: AddRmPackageReq, ) -> Result<PackageAndVersion, AnyError> { - match add_package_req.value { - AddPackageReqValue::Jsr(req) => { - let jsr_prefixed_name = format!("jsr:{}", &req.name); - let Some(nv) = jsr_resolver.req_to_nv(&req).await else { - if npm_resolver.req_to_nv(&req).await.is_some() { + async fn select<T: PackageInfoProvider, S: PackageInfoProvider>( + main_resolver: T, + fallback_resolver: S, + add_package_req: AddRmPackageReq, + ) -> Result<PackageAndVersion, AnyError> { + let req = match &add_package_req.value { + AddRmPackageReqValue::Jsr(req) => req, + AddRmPackageReqValue::Npm(req) => req, + }; + let prefixed_name = format!("{}:{}", T::SPECIFIER_PREFIX, req.name); + let help_if_found_in_fallback = S::HELP; + let Some(nv) = main_resolver.req_to_nv(req).await else { + if fallback_resolver.req_to_nv(req).await.is_some() { + // it's in the other registry + return Ok(PackageAndVersion::NotFound { + package: prefixed_name, + help: Some(help_if_found_in_fallback), + package_req: req.clone(), + }); + } + if req.version_req.version_text() == "*" { + if let Some(pre_release_version) = + main_resolver.latest_version(req).await + { return Ok(PackageAndVersion::NotFound { - package: jsr_prefixed_name, - found_npm_package: true, - package_req: req, + package: prefixed_name, + package_req: req.clone(), + help: Some(NotFoundHelp::PreReleaseVersion( + pre_release_version.clone(), + )), }); } + } - return Ok(PackageAndVersion::NotFound { - package: jsr_prefixed_name, - found_npm_package: false, - package_req: req, - }); - }; - let range_symbol = if req.version_req.version_text().starts_with('~') { - '~' - } else { - '^' - }; - Ok(PackageAndVersion::Selected(SelectedPackage { - import_name: add_package_req.alias, - package_name: jsr_prefixed_name, - version_req: format!("{}{}", range_symbol, &nv.version), - selected_version: nv.version.to_string(), - })) + return Ok(PackageAndVersion::NotFound { + package: prefixed_name, + help: None, + package_req: req.clone(), + }); + }; + let range_symbol = if req.version_req.version_text().starts_with('~') { + "~" + } else if req.version_req.version_text() == nv.version.to_string() { + "" + } else { + "^" + }; + Ok(PackageAndVersion::Selected(SelectedPackage { + import_name: add_package_req.alias, + package_name: prefixed_name, + version_req: format!("{}{}", range_symbol, &nv.version), + selected_version: nv.version.to_string(), + })) + } + + match &add_package_req.value { + AddRmPackageReqValue::Jsr(_) => { + select(jsr_resolver, npm_resolver, add_package_req).await } - AddPackageReqValue::Npm(req) => { - let npm_prefixed_name = format!("npm:{}", &req.name); - let Some(nv) = npm_resolver.req_to_nv(&req).await else { - return Ok(PackageAndVersion::NotFound { - package: npm_prefixed_name, - found_npm_package: false, - package_req: req, - }); - }; - let range_symbol = if req.version_req.version_text().starts_with('~') { - '~' - } else { - '^' - }; - Ok(PackageAndVersion::Selected(SelectedPackage { - import_name: add_package_req.alias, - package_name: npm_prefixed_name, - version_req: format!("{}{}", range_symbol, &nv.version), - selected_version: nv.version.to_string(), - })) + AddRmPackageReqValue::Npm(_) => { + select(npm_resolver, jsr_resolver, add_package_req).await } } } #[derive(Debug, PartialEq, Eq)] -enum AddPackageReqValue { +enum AddRmPackageReqValue { Jsr(PackageReq), Npm(PackageReq), } #[derive(Debug, PartialEq, Eq)] -struct AddPackageReq { +pub struct AddRmPackageReq { alias: String, - value: AddPackageReqValue, + value: AddRmPackageReqValue, } -impl AddPackageReq { +impl AddRmPackageReq { pub fn parse(entry_text: &str) -> Result<Result<Self, PackageReq>, AnyError> { enum Prefix { Jsr, @@ -707,9 +739,9 @@ impl AddPackageReq { let req_ref = JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?; let package_req = req_ref.into_inner().req; - Ok(Ok(AddPackageReq { + Ok(Ok(AddRmPackageReq { alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()), - value: AddPackageReqValue::Jsr(package_req), + value: AddRmPackageReqValue::Jsr(package_req), })) } Prefix::Npm => { @@ -727,49 +759,48 @@ impl AddPackageReq { deno_semver::RangeSetOrTag::Tag("latest".into()), ); } - Ok(Ok(AddPackageReq { + Ok(Ok(AddRmPackageReq { alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()), - value: AddPackageReqValue::Npm(package_req), + value: AddRmPackageReqValue::Npm(package_req), })) } } } } -fn generate_imports(mut packages_to_version: Vec<(String, String)>) -> String { - packages_to_version.sort_by(|(k1, _), (k2, _)| k1.cmp(k2)); - let mut contents = vec![]; - let len = packages_to_version.len(); - for (index, (package, version)) in packages_to_version.iter().enumerate() { - // TODO(bartlomieju): fix it, once we start support specifying version on the cli - contents.push(format!("\"{}\": \"{}\"", package, version)); - if index != len - 1 { - contents.push(",".to_string()); - } - } - contents.join("\n") -} - pub async fn remove( flags: Arc<Flags>, remove_flags: RemoveFlags, ) -> Result<(), AnyError> { - let (_, npm_config, deno_config) = load_configs(&flags)?; + let (_, npm_config, deno_config) = load_configs(&flags, || false)?; - let mut configs = [ - ConfigUpdater::maybe_new(npm_config).await?, - ConfigUpdater::maybe_new(deno_config).await?, - ]; + let mut configs = [npm_config, deno_config]; let mut removed_packages = vec![]; for package in &remove_flags.packages { - let mut removed = false; + let req = AddRmPackageReq::parse(package).with_context(|| { + format!("Failed to parse package required: {}", package) + })?; + let mut parsed_pkg_name = None; for config in configs.iter_mut().flatten() { - removed |= config.remove(package); + match &req { + Ok(rm_pkg) => { + if config.remove(&rm_pkg.alias) && parsed_pkg_name.is_none() { + parsed_pkg_name = Some(rm_pkg.alias.clone()); + } + } + Err(pkg) => { + // An alias or a package name without registry/version + // constraints. Try to remove the package anyway. + if config.remove(&pkg.name) && parsed_pkg_name.is_none() { + parsed_pkg_name = Some(pkg.name.clone()); + } + } + } } - if removed { - removed_packages.push(package.clone()); + if let Some(pkg) = parsed_pkg_name { + removed_packages.push(pkg); } } @@ -780,7 +811,7 @@ pub async fn remove( log::info!("Removed {}", crate::colors::green(package)); } for config in configs.into_iter().flatten() { - config.commit().await?; + config.commit()?; } npm_install_after_modification(flags, None).await?; @@ -807,88 +838,11 @@ async fn npm_install_after_modification( // npm install cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?; - Ok(()) -} - -fn update_config_file_content< - I: IntoIterator<Item = (&'static str, Option<String>)>, ->( - obj: &jsonc_parser::ast::Object, - config_file_contents: &str, - fmt_options: FmtOptionsConfig, - entries: I, - file_name: &str, -) -> String { - let mut text_changes = vec![]; - for (key, value) in entries { - match obj.properties.iter().enumerate().find_map(|(idx, k)| { - if k.name.as_str() == key { - Some((idx, k)) - } else { - None - } - }) { - Some(( - idx, - ObjectProp { - value: Value::Object(lit), - range, - .. - }, - )) => { - if let Some(value) = value { - text_changes.push(TextChange { - range: (lit.range.start + 1)..(lit.range.end - 1), - new_text: value, - }) - } else { - text_changes.push(TextChange { - // remove field entirely, making sure to - // remove the comma if it's not the last field - range: range.start..(if idx == obj.properties.len() - 1 { - range.end - } else { - obj.properties[idx + 1].range.start - }), - new_text: "".to_string(), - }) - } - } - - // need to add field - None => { - if let Some(value) = value { - let insert_position = obj.range.end - 1; - text_changes.push(TextChange { - range: insert_position..insert_position, - // NOTE(bartlomieju): adding `\n` here to force the formatter to always - // produce a config file that is multiline, like so: - // ``` - // { - // "imports": { - // "<package_name>": "<registry>:<package_name>@<semver>" - // } - // } - new_text: format!("\"{key}\": {{\n {value} }}"), - }) - } - } - // we verified the shape of `imports`/`dependencies` above - Some(_) => unreachable!(), - } + if let Some(lockfile) = cli_factory.cli_options()?.maybe_lockfile() { + lockfile.write_if_changed()?; } - let new_text = - deno_ast::apply_text_changes(config_file_contents, text_changes); - - crate::tools::fmt::format_json( - &PathBuf::from(file_name), - &new_text, - &fmt_options, - ) - .ok() - .map(|formatted_text| formatted_text.unwrap_or_else(|| new_text.clone())) - .unwrap_or(new_text) + Ok(()) } #[cfg(test)] @@ -898,48 +852,52 @@ mod test { #[test] fn test_parse_add_package_req() { assert_eq!( - AddPackageReq::parse("jsr:foo").unwrap().unwrap(), - AddPackageReq { + AddRmPackageReq::parse("jsr:foo").unwrap().unwrap(), + AddRmPackageReq { alias: "foo".to_string(), - value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) + value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); assert_eq!( - AddPackageReq::parse("alias@jsr:foo").unwrap().unwrap(), - AddPackageReq { + AddRmPackageReq::parse("alias@jsr:foo").unwrap().unwrap(), + AddRmPackageReq { alias: "alias".to_string(), - value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) + value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); assert_eq!( - AddPackageReq::parse("@alias/pkg@npm:foo").unwrap().unwrap(), - AddPackageReq { + AddRmPackageReq::parse("@alias/pkg@npm:foo") + .unwrap() + .unwrap(), + AddRmPackageReq { alias: "@alias/pkg".to_string(), - value: AddPackageReqValue::Npm( + value: AddRmPackageReqValue::Npm( PackageReq::from_str("foo@latest").unwrap() ) } ); assert_eq!( - AddPackageReq::parse("@alias/pkg@jsr:foo").unwrap().unwrap(), - AddPackageReq { + AddRmPackageReq::parse("@alias/pkg@jsr:foo") + .unwrap() + .unwrap(), + AddRmPackageReq { alias: "@alias/pkg".to_string(), - value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) + value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap()) } ); assert_eq!( - AddPackageReq::parse("alias@jsr:foo@^1.5.0") + AddRmPackageReq::parse("alias@jsr:foo@^1.5.0") .unwrap() .unwrap(), - AddPackageReq { + AddRmPackageReq { alias: "alias".to_string(), - value: AddPackageReqValue::Jsr( + value: AddRmPackageReqValue::Jsr( PackageReq::from_str("foo@^1.5.0").unwrap() ) } ); assert_eq!( - AddPackageReq::parse("@scope/pkg@tag") + AddRmPackageReq::parse("@scope/pkg@tag") .unwrap() .unwrap_err() .to_string(), diff --git a/cli/tools/registry/pm/cache_deps.rs b/cli/tools/registry/pm/cache_deps.rs index c8258e600..d3c8da868 100644 --- a/cli/tools/registry/pm/cache_deps.rs +++ b/cli/tools/registry/pm/cache_deps.rs @@ -44,7 +44,11 @@ pub async fn cache_top_level_deps( let mut seen_reqs = std::collections::HashSet::new(); - for entry in import_map.imports().entries() { + for entry in import_map.imports().entries().chain( + import_map + .scopes() + .flat_map(|scope| scope.imports.entries()), + ) { let Some(specifier) = entry.value else { continue; }; @@ -75,6 +79,13 @@ pub async fn cache_top_level_deps( if entry.key.ends_with('/') && specifier.as_str().ends_with('/') { continue; } + if specifier.scheme() == "file" { + if let Ok(path) = specifier.to_file_path() { + if !path.is_file() { + continue; + } + } + } roots.push(specifier.clone()); } } @@ -82,10 +93,6 @@ pub async fn cache_top_level_deps( while let Some(info_future) = info_futures.next().await { if let Some((specifier, info)) = info_future { - if info.export(".").is_some() { - roots.push(specifier.clone()); - continue; - } let exports = info.exports(); for (k, _) in exports { if let Ok(spec) = specifier.join(k) { diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs index aca125e00..6d1801ce6 100644 --- a/cli/tools/registry/tar.rs +++ b/cli/tools/registry/tar.rs @@ -120,7 +120,7 @@ fn resolve_content_maybe_unfurling( | MediaType::Unknown | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo => { + | MediaType::Css => { // not unfurlable data return Ok(data); } diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 484664dae..8e05c4abb 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -7,7 +7,7 @@ use crate::cdp; use crate::colors; use crate::lsp::ReplLanguageServer; use crate::npm::CliNpmResolver; -use crate::resolver::CliGraphResolver; +use crate::resolver::CliResolver; use crate::tools::test::report_tests; use crate::tools::test::reporters::PrettyTestReporter; use crate::tools::test::reporters::TestReporter; @@ -25,6 +25,7 @@ use deno_ast::swc::visit::noop_visit_type; use deno_ast::swc::visit::Visit; use deno_ast::swc::visit::VisitWith; use deno_ast::ImportsNotUsedAsValues; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_ast::ParseDiagnosticsError; use deno_ast::ParsedSource; @@ -43,12 +44,12 @@ use deno_core::url::Url; use deno_core::LocalInspectorSession; use deno_core::PollEventLoopOptions; use deno_graph::source::ResolutionMode; -use deno_graph::source::Resolver; use deno_graph::Position; use deno_graph::PositionRange; use deno_graph::SpecifierWithRange; use deno_runtime::worker::MainWorker; use deno_semver::npm::NpmPackageReqReference; +use node_resolver::NodeModuleKind; use once_cell::sync::Lazy; use regex::Match; use regex::Regex; @@ -179,7 +180,7 @@ struct ReplJsxState { pub struct ReplSession { npm_resolver: Arc<dyn CliNpmResolver>, - resolver: Arc<CliGraphResolver>, + resolver: Arc<CliResolver>, pub worker: MainWorker, session: LocalInspectorSession, pub context_id: u64, @@ -198,7 +199,7 @@ impl ReplSession { pub async fn initialize( cli_options: &CliOptions, npm_resolver: Arc<dyn CliNpmResolver>, - resolver: Arc<CliGraphResolver>, + resolver: Arc<CliResolver>, mut worker: MainWorker, main_module: ModuleSpecifier, test_event_receiver: TestEventReceiver, @@ -244,7 +245,7 @@ impl ReplSession { assert_ne!(context_id, 0); let referrer = - deno_core::resolve_path("./$deno$repl.ts", cli_options.initial_cwd()) + deno_core::resolve_path("./$deno$repl.mts", cli_options.initial_cwd()) .unwrap(); let cwd_url = @@ -641,6 +642,10 @@ impl ReplSession { jsx_fragment_factory: self.jsx.frag_factory.clone(), jsx_import_source: self.jsx.import_source.clone(), var_decl_imports: true, + verbatim_module_syntax: false, + }, + &deno_ast::TranspileModuleOptions { + module_kind: Some(ModuleKind::Esm), }, &deno_ast::EmitOptions { source_map: deno_ast::SourceMapOption::None, @@ -651,7 +656,6 @@ impl ReplSession { }, )? .into_source() - .into_string()? .text; let value = self @@ -708,7 +712,12 @@ impl ReplSession { .flat_map(|i| { self .resolver - .resolve(i, &referrer_range, ResolutionMode::Execution) + .resolve( + i, + &referrer_range, + NodeModuleKind::Esm, + ResolutionMode::Execution, + ) .ok() .or_else(|| ModuleSpecifier::parse(i).ok()) }) diff --git a/cli/tools/run/hmr.rs b/cli/tools/run/hmr.rs index 6ccf8e344..373c207d6 100644 --- a/cli/tools/run/hmr.rs +++ b/cli/tools/run/hmr.rs @@ -1,9 +1,9 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::cdp; -use crate::emit::Emitter; -use crate::util::file_watcher::WatcherCommunicator; -use crate::util::file_watcher::WatcherRestartMode; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::Arc; + use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::StreamExt; @@ -12,11 +12,13 @@ use deno_core::serde_json::{self}; use deno_core::url::Url; use deno_core::LocalInspectorSession; use deno_terminal::colors; -use std::collections::HashMap; -use std::path::PathBuf; -use std::sync::Arc; use tokio::select; +use crate::cdp; +use crate::emit::Emitter; +use crate::util::file_watcher::WatcherCommunicator; +use crate::util::file_watcher::WatcherRestartMode; + fn explain(status: &cdp::Status) -> &'static str { match status { cdp::Status::Ok => "OK", @@ -139,7 +141,7 @@ impl crate::worker::HmrRunner for HmrRunner { }; let source_code = self.emitter.load_and_emit_for_hmr( - &module_url + &module_url, ).await?; let mut tries = 1; diff --git a/cli/tools/run/mod.rs b/cli/tools/run/mod.rs index 152e2650b..8fab544ec 100644 --- a/cli/tools/run/mod.rs +++ b/cli/tools/run/mod.rs @@ -30,6 +30,16 @@ To grant permissions, set them before the script argument. For example: } } +fn set_npm_user_agent() { + static ONCE: std::sync::Once = std::sync::Once::new(); + ONCE.call_once(|| { + std::env::set_var( + crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR, + crate::npm::get_npm_config_user_agent(), + ); + }); +} + pub async fn run_script( mode: WorkerExecutionMode, flags: Arc<Flags>, @@ -58,6 +68,10 @@ pub async fn run_script( let main_module = cli_options.resolve_main_module()?; + if main_module.scheme() == "npm" { + set_npm_user_agent(); + } + maybe_npm_install(&factory).await?; let worker_factory = factory.create_cli_main_worker_factory().await?; @@ -110,7 +124,8 @@ async fn run_with_watch( !watch_flags.no_clear_screen, ), WatcherRestartMode::Automatic, - move |flags, watcher_communicator, _changed_paths| { + move |flags, watcher_communicator, changed_paths| { + watcher_communicator.show_path_changed(changed_paths.clone()); Ok(async move { let factory = CliFactory::from_flags_for_watcher( flags, @@ -119,6 +134,10 @@ async fn run_with_watch( let cli_options = factory.cli_options()?; let main_module = cli_options.resolve_main_module()?; + if main_module.scheme() == "npm" { + set_npm_user_agent(); + } + maybe_npm_install(&factory).await?; let _ = watcher_communicator.watch_paths(cli_options.watch_paths()); diff --git a/cli/tools/serve.rs b/cli/tools/serve.rs index 4ce1cad6f..d7989140a 100644 --- a/cli/tools/serve.rs +++ b/cli/tools/serve.rs @@ -44,12 +44,15 @@ pub async fn serve( maybe_npm_install(&factory).await?; let worker_factory = factory.create_cli_main_worker_factory().await?; - + let hmr = serve_flags + .watch + .map(|watch_flags| watch_flags.hmr) + .unwrap_or(false); do_serve( worker_factory, main_module.clone(), serve_flags.worker_count, - false, + hmr, ) .await } @@ -109,8 +112,6 @@ async fn do_serve( } } Ok(exit_code) - - // main.await? } async fn run_worker( @@ -119,7 +120,7 @@ async fn run_worker( main_module: ModuleSpecifier, hmr: bool, ) -> Result<i32, AnyError> { - let mut worker = worker_factory + let mut worker: crate::worker::CliMainWorker = worker_factory .create_main_worker( deno_runtime::WorkerExecutionMode::Serve { is_main: false, @@ -150,7 +151,8 @@ async fn serve_with_watch( !watch_flags.no_clear_screen, ), WatcherRestartMode::Automatic, - move |flags, watcher_communicator, _changed_paths| { + move |flags, watcher_communicator, changed_paths| { + watcher_communicator.show_path_changed(changed_paths.clone()); Ok(async move { let factory = CliFactory::from_flags_for_watcher( flags, diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 502b09d2c..682dbf814 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -1,14 +1,14 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use std::borrow::Cow; use std::collections::HashMap; use std::collections::HashSet; +use std::num::NonZeroUsize; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; -use deno_config::deno_json::Task; +use deno_config::workspace::TaskDefinition; use deno_config::workspace::TaskOrScript; use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceTasksConfig; @@ -16,8 +16,15 @@ use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; +use deno_core::futures::future::LocalBoxFuture; +use deno_core::futures::stream::futures_unordered; +use deno_core::futures::FutureExt; +use deno_core::futures::StreamExt; +use deno_core::url::Url; use deno_path_util::normalize_path; +use deno_runtime::deno_node::NodeResolver; use deno_task_shell::ShellCommand; +use indexmap::IndexMap; use crate::args::CliOptions; use crate::args::Flags; @@ -48,155 +55,376 @@ pub async fn execute_script( v == "1" }) .unwrap_or(false); - let tasks_config = start_dir.to_tasks_config()?; - let tasks_config = if force_use_pkg_json { - tasks_config.with_only_pkg_json() - } else { - tasks_config - }; + let mut tasks_config = start_dir.to_tasks_config()?; + if force_use_pkg_json { + tasks_config = tasks_config.with_only_pkg_json() + } - let task_name = match &task_flags.task { - Some(task) => task, - None => { - print_available_tasks( - &mut std::io::stdout(), - &cli_options.start_dir, - &tasks_config, - )?; - return Ok(0); - } + let Some(task_name) = &task_flags.task else { + print_available_tasks( + &mut std::io::stdout(), + &cli_options.start_dir, + &tasks_config, + )?; + return Ok(0); }; let npm_resolver = factory.npm_resolver().await?; let node_resolver = factory.node_resolver().await?; let env_vars = task_runner::real_env_vars(); - match tasks_config.task(task_name) { - Some((dir_url, task_or_script)) => match task_or_script { - TaskOrScript::Task(_tasks, script) => { - let cwd = match task_flags.cwd { - Some(path) => canonicalize_path(&PathBuf::from(path)) - .context("failed canonicalizing --cwd")?, - None => normalize_path(dir_url.to_file_path().unwrap()), - }; - - let custom_commands = task_runner::resolve_custom_commands( - npm_resolver.as_ref(), - node_resolver, - )?; - run_task(RunTaskOptions { - task_name, - script, - cwd: &cwd, - env_vars, - custom_commands, - npm_resolver: npm_resolver.as_ref(), - cli_options, - }) - .await - } - TaskOrScript::Script(scripts, _script) => { - // ensure the npm packages are installed if using a managed resolver - if let Some(npm_resolver) = npm_resolver.as_managed() { - npm_resolver.ensure_top_level_package_json_install().await?; + let no_of_concurrent_tasks = if let Ok(value) = std::env::var("DENO_JOBS") { + value.parse::<NonZeroUsize>().ok() + } else { + std::thread::available_parallelism().ok() + } + .unwrap_or_else(|| NonZeroUsize::new(2).unwrap()); + + let task_runner = TaskRunner { + tasks_config, + task_flags: &task_flags, + npm_resolver: npm_resolver.as_ref(), + node_resolver: node_resolver.as_ref(), + env_vars, + cli_options, + concurrency: no_of_concurrent_tasks.into(), + }; + + task_runner.run_task(task_name).await +} + +struct RunSingleOptions<'a> { + task_name: &'a str, + script: &'a str, + cwd: &'a Path, + custom_commands: HashMap<String, Rc<dyn ShellCommand>>, +} + +struct TaskRunner<'a> { + tasks_config: WorkspaceTasksConfig, + task_flags: &'a TaskFlags, + npm_resolver: &'a dyn CliNpmResolver, + node_resolver: &'a NodeResolver, + env_vars: HashMap<String, String>, + cli_options: &'a CliOptions, + concurrency: usize, +} + +impl<'a> TaskRunner<'a> { + pub async fn run_task( + &self, + task_name: &str, + ) -> Result<i32, deno_core::anyhow::Error> { + match sort_tasks_topo(task_name, &self.tasks_config) { + Ok(sorted) => self.run_tasks_in_parallel(sorted).await, + Err(err) => match err { + TaskError::NotFound(name) => { + if self.task_flags.is_run { + return Err(anyhow!("Task not found: {}", name)); + } + + log::error!("Task not found: {}", name); + if log::log_enabled!(log::Level::Error) { + self.print_available_tasks()?; + } + Ok(1) + } + TaskError::TaskDepCycle { path } => { + log::error!("Task cycle detected: {}", path.join(" -> ")); + Ok(1) } + }, + } + } + + pub fn print_available_tasks(&self) -> Result<(), std::io::Error> { + print_available_tasks( + &mut std::io::stderr(), + &self.cli_options.start_dir, + &self.tasks_config, + ) + } - let cwd = match task_flags.cwd { - Some(path) => canonicalize_path(&PathBuf::from(path))?, - None => normalize_path(dir_url.to_file_path().unwrap()), - }; - - // At this point we already checked if the task name exists in package.json. - // We can therefore check for "pre" and "post" scripts too, since we're only - // dealing with package.json here and not deno.json - let task_names = vec![ - format!("pre{}", task_name), - task_name.clone(), - format!("post{}", task_name), - ]; - let custom_commands = task_runner::resolve_custom_commands( - npm_resolver.as_ref(), - node_resolver, - )?; - for task_name in &task_names { - if let Some(script) = scripts.get(task_name) { - let exit_code = run_task(RunTaskOptions { - task_name, - script, - cwd: &cwd, - env_vars: env_vars.clone(), - custom_commands: custom_commands.clone(), - npm_resolver: npm_resolver.as_ref(), - cli_options, - }) - .await?; - if exit_code > 0 { - return Ok(exit_code); + async fn run_tasks_in_parallel( + &self, + task_names: Vec<String>, + ) -> Result<i32, deno_core::anyhow::Error> { + struct PendingTasksContext { + completed: HashSet<String>, + running: HashSet<String>, + task_names: Vec<String>, + } + + impl PendingTasksContext { + fn has_remaining_tasks(&self) -> bool { + self.completed.len() < self.task_names.len() + } + + fn mark_complete(&mut self, task_name: String) { + self.running.remove(&task_name); + self.completed.insert(task_name); + } + + fn get_next_task<'a>( + &mut self, + runner: &'a TaskRunner<'a>, + ) -> Option<LocalBoxFuture<'a, Result<(i32, String), AnyError>>> { + for name in &self.task_names { + if self.completed.contains(name) || self.running.contains(name) { + continue; + } + + let should_run = if let Ok((_, def)) = runner.get_task(name) { + match def { + TaskOrScript::Task(_, def) => def + .dependencies + .iter() + .all(|dep| self.completed.contains(dep)), + TaskOrScript::Script(_, _) => true, } + } else { + false + }; + + if !should_run { + continue; } + + self.running.insert(name.clone()); + let name = name.clone(); + return Some( + async move { + runner + .run_task_no_dependencies(&name) + .await + .map(|exit_code| (exit_code, name)) + } + .boxed_local(), + ); } + None + } + } - Ok(0) + let mut context = PendingTasksContext { + completed: HashSet::with_capacity(task_names.len()), + running: HashSet::with_capacity(self.concurrency), + task_names, + }; + + let mut queue = futures_unordered::FuturesUnordered::new(); + + while context.has_remaining_tasks() { + while queue.len() < self.concurrency { + if let Some(task) = context.get_next_task(self) { + queue.push(task); + } else { + break; + } } - }, - None => { - if task_flags.is_run { - return Err(anyhow!("Task not found: {}", task_name)); + + // If queue is empty at this point, then there are no more tasks in the queue. + let Some(result) = queue.next().await else { + debug_assert_eq!(context.task_names.len(), 0); + break; + }; + + let (exit_code, name) = result?; + if exit_code > 0 { + return Ok(exit_code); } - log::error!("Task not found: {}", task_name); - if log::log_enabled!(log::Level::Error) { - print_available_tasks( - &mut std::io::stderr(), - &cli_options.start_dir, - &tasks_config, - )?; + + context.mark_complete(name); + } + + Ok(0) + } + + fn get_task( + &self, + task_name: &str, + ) -> Result<(&Url, TaskOrScript), TaskError> { + let Some(result) = self.tasks_config.task(task_name) else { + return Err(TaskError::NotFound(task_name.to_string())); + }; + + Ok(result) + } + + async fn run_task_no_dependencies( + &self, + task_name: &String, + ) -> Result<i32, deno_core::anyhow::Error> { + let (dir_url, task_or_script) = self.get_task(task_name.as_str()).unwrap(); + + match task_or_script { + TaskOrScript::Task(_tasks, definition) => { + self.run_deno_task(dir_url, task_name, definition).await + } + TaskOrScript::Script(scripts, _script) => { + self.run_npm_script(dir_url, task_name, scripts).await } - Ok(1) } } -} -struct RunTaskOptions<'a> { - task_name: &'a str, - script: &'a str, - cwd: &'a Path, - env_vars: HashMap<String, String>, - custom_commands: HashMap<String, Rc<dyn ShellCommand>>, - npm_resolver: &'a dyn CliNpmResolver, - cli_options: &'a CliOptions, -} + async fn run_deno_task( + &self, + dir_url: &Url, + task_name: &String, + definition: &TaskDefinition, + ) -> Result<i32, deno_core::anyhow::Error> { + let cwd = match &self.task_flags.cwd { + Some(path) => canonicalize_path(&PathBuf::from(path)) + .context("failed canonicalizing --cwd")?, + None => normalize_path(dir_url.to_file_path().unwrap()), + }; -async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> { - let RunTaskOptions { - task_name, - script, - cwd, - env_vars, - custom_commands, - npm_resolver, - cli_options, - } = opts; + let custom_commands = task_runner::resolve_custom_commands( + self.npm_resolver, + self.node_resolver, + )?; + self + .run_single(RunSingleOptions { + task_name, + script: &definition.command, + cwd: &cwd, + custom_commands, + }) + .await + } - output_task( - opts.task_name, - &task_runner::get_script_with_args(script, cli_options.argv()), - ); + async fn run_npm_script( + &self, + dir_url: &Url, + task_name: &String, + scripts: &IndexMap<String, String>, + ) -> Result<i32, deno_core::anyhow::Error> { + // ensure the npm packages are installed if using a managed resolver + if let Some(npm_resolver) = self.npm_resolver.as_managed() { + npm_resolver.ensure_top_level_package_json_install().await?; + } + + let cwd = match &self.task_flags.cwd { + Some(path) => canonicalize_path(&PathBuf::from(path))?, + None => normalize_path(dir_url.to_file_path().unwrap()), + }; - Ok( - task_runner::run_task(task_runner::RunTaskOptions { + // At this point we already checked if the task name exists in package.json. + // We can therefore check for "pre" and "post" scripts too, since we're only + // dealing with package.json here and not deno.json + let task_names = vec![ + format!("pre{}", task_name), + task_name.clone(), + format!("post{}", task_name), + ]; + let custom_commands = task_runner::resolve_custom_commands( + self.npm_resolver, + self.node_resolver, + )?; + for task_name in &task_names { + if let Some(script) = scripts.get(task_name) { + let exit_code = self + .run_single(RunSingleOptions { + task_name, + script, + cwd: &cwd, + custom_commands: custom_commands.clone(), + }) + .await?; + if exit_code > 0 { + return Ok(exit_code); + } + } + } + + Ok(0) + } + + async fn run_single( + &self, + opts: RunSingleOptions<'_>, + ) -> Result<i32, AnyError> { + let RunSingleOptions { task_name, script, cwd, - env_vars, custom_commands, - init_cwd: opts.cli_options.initial_cwd(), - argv: cli_options.argv(), - root_node_modules_dir: npm_resolver.root_node_modules_path(), - stdio: None, - }) - .await? - .exit_code, - ) + } = opts; + + output_task( + opts.task_name, + &task_runner::get_script_with_args(script, self.cli_options.argv()), + ); + + Ok( + task_runner::run_task(task_runner::RunTaskOptions { + task_name, + script, + cwd, + env_vars: self.env_vars.clone(), + custom_commands, + init_cwd: self.cli_options.initial_cwd(), + argv: self.cli_options.argv(), + root_node_modules_dir: self.npm_resolver.root_node_modules_path(), + stdio: None, + }) + .await? + .exit_code, + ) + } +} + +#[derive(Debug)] +enum TaskError { + NotFound(String), + TaskDepCycle { path: Vec<String> }, +} + +fn sort_tasks_topo( + name: &str, + task_config: &WorkspaceTasksConfig, +) -> Result<Vec<String>, TaskError> { + fn sort_visit<'a>( + name: &'a str, + sorted: &mut Vec<String>, + mut path: Vec<&'a str>, + tasks_config: &'a WorkspaceTasksConfig, + ) -> Result<(), TaskError> { + // Already sorted + if sorted.iter().any(|sorted_name| sorted_name == name) { + return Ok(()); + } + + // Graph has a cycle + if path.contains(&name) { + path.push(name); + return Err(TaskError::TaskDepCycle { + path: path.iter().map(|s| s.to_string()).collect(), + }); + } + + let Some(def) = tasks_config.task(name) else { + return Err(TaskError::NotFound(name.to_string())); + }; + + if let TaskOrScript::Task(_, actual_def) = def.1 { + for dep in &actual_def.dependencies { + let mut path = path.clone(); + path.push(name); + sort_visit(dep, sorted, path, tasks_config)? + } + } + + sorted.push(name.to_string()); + + Ok(()) + } + + let mut sorted: Vec<String> = vec![]; + + sort_visit(name, &mut sorted, Vec::new(), task_config)?; + + Ok(sorted) } fn output_task(task_name: &str, script: &str) { @@ -222,80 +450,97 @@ fn print_available_tasks( " {}", colors::red("No tasks found in configuration file") )?; - } else { - let mut seen_task_names = - HashSet::with_capacity(tasks_config.tasks_count()); - for maybe_config in [&tasks_config.member, &tasks_config.root] { - let Some(config) = maybe_config else { - continue; - }; - for (is_root, is_deno, (key, task)) in config - .deno_json - .as_ref() - .map(|config| { - let is_root = !is_cwd_root_dir - && config.folder_url - == *workspace_dir.workspace.root_dir().as_ref(); - config - .tasks - .iter() - .map(move |(k, t)| (is_root, true, (k, Cow::Borrowed(t)))) - }) - .into_iter() - .flatten() - .chain( - config - .package_json - .as_ref() - .map(|config| { - let is_root = !is_cwd_root_dir - && config.folder_url - == *workspace_dir.workspace.root_dir().as_ref(); - config.tasks.iter().map(move |(k, v)| { - (is_root, false, (k, Cow::Owned(Task::Definition(v.clone())))) - }) - }) - .into_iter() - .flatten(), - ) - { - if !seen_task_names.insert(key) { + return Ok(()); + } + + struct AvailableTaskDescription { + is_root: bool, + is_deno: bool, + name: String, + task: TaskDefinition, + } + let mut seen_task_names = HashSet::with_capacity(tasks_config.tasks_count()); + let mut task_descriptions = Vec::with_capacity(tasks_config.tasks_count()); + + for maybe_config in [&tasks_config.member, &tasks_config.root] { + let Some(config) = maybe_config else { + continue; + }; + + if let Some(config) = config.deno_json.as_ref() { + let is_root = !is_cwd_root_dir + && config.folder_url == *workspace_dir.workspace.root_dir().as_ref(); + + for (name, definition) in &config.tasks { + if !seen_task_names.insert(name) { continue; // already seen } - writeln!( - writer, - "- {}{}", - colors::cyan(key), - if is_root { - if is_deno { - format!(" {}", colors::italic_gray("(workspace)")) - } else { - format!(" {}", colors::italic_gray("(workspace package.json)")) - } - } else if is_deno { - "".to_string() - } else { - format!(" {}", colors::italic_gray("(package.json)")) - } - )?; - let definition = match task.as_ref() { - Task::Definition(definition) => definition, - Task::Commented { definition, .. } => definition, - }; - if let Task::Commented { comments, .. } = task.as_ref() { - let slash_slash = colors::italic_gray("//"); - for comment in comments { - writeln!( - writer, - " {slash_slash} {}", - colors::italic_gray(comment) - )?; - } + task_descriptions.push(AvailableTaskDescription { + is_root, + is_deno: true, + name: name.to_string(), + task: definition.clone(), + }); + } + } + + if let Some(config) = config.package_json.as_ref() { + let is_root = !is_cwd_root_dir + && config.folder_url == *workspace_dir.workspace.root_dir().as_ref(); + for (name, script) in &config.tasks { + if !seen_task_names.insert(name) { + continue; // already seen } - writeln!(writer, " {definition}")?; + + task_descriptions.push(AvailableTaskDescription { + is_root, + is_deno: false, + name: name.to_string(), + task: deno_config::deno_json::TaskDefinition { + command: script.to_string(), + dependencies: vec![], + description: None, + }, + }); } } } + for desc in task_descriptions { + writeln!( + writer, + "- {}{}", + colors::cyan(desc.name), + if desc.is_root { + if desc.is_deno { + format!(" {}", colors::italic_gray("(workspace)")) + } else { + format!(" {}", colors::italic_gray("(workspace package.json)")) + } + } else if desc.is_deno { + "".to_string() + } else { + format!(" {}", colors::italic_gray("(package.json)")) + } + )?; + if let Some(description) = &desc.task.description { + let slash_slash = colors::italic_gray("//"); + writeln!( + writer, + " {slash_slash} {}", + colors::italic_gray(description) + )?; + } + writeln!(writer, " {}", desc.task.command)?; + if !desc.task.dependencies.is_empty() { + writeln!( + writer, + " {} {}", + colors::gray("depends on:"), + colors::cyan(desc.task.dependencies.join(", ")) + )?; + } + } + Ok(()) } diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index e81abad0b..6357ebcae 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -631,7 +631,7 @@ async fn configure_main_worker( "Deno[Deno.internal].core.setLeakTracingEnabled(true);", )?; } - let res = worker.execute_side_module_possibly_with_npm().await; + let res = worker.execute_side_module().await; let mut worker = worker.into_main_worker(); match res { Ok(()) => Ok(()), @@ -1357,6 +1357,7 @@ pub async fn report_tests( if let Err(err) = reporter.flush_report(&elapsed, &tests, &test_steps) { eprint!("Test reporter failed to flush: {}", err) } + #[allow(clippy::disallowed_methods)] std::process::exit(130); } } @@ -1642,6 +1643,7 @@ pub async fn run_tests_with_watch( loop { signal::ctrl_c().await.unwrap(); if !HAS_TEST_RUN_SIGINT_HANDLER.load(Ordering::Relaxed) { + #[allow(clippy::disallowed_methods)] std::process::exit(130); } } @@ -1659,6 +1661,7 @@ pub async fn run_tests_with_watch( ), move |flags, watcher_communicator, changed_paths| { let test_flags = test_flags.clone(); + watcher_communicator.show_path_changed(changed_paths.clone()); Ok(async move { let factory = CliFactory::from_flags_for_watcher( flags, diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index 7f21e6649..cb85859f7 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -540,7 +540,7 @@ pub async fn upgrade( let Some(archive_data) = download_package(&client, download_url).await? else { log::error!("Download could not be found, aborting"); - std::process::exit(1) + deno_runtime::exit(1) }; log::info!( @@ -579,6 +579,10 @@ pub async fn upgrade( let output_exe_path = full_path_output_flag.as_ref().unwrap_or(¤t_exe_path); + + #[cfg(windows)] + kill_running_deno_lsp_processes(); + let output_result = if *output_exe_path == current_exe_path { replace_exe(&new_exe_path, output_exe_path) } else { @@ -913,7 +917,7 @@ async fn download_package( // text above which will stay alive after the progress bars are complete let progress = progress_bar.update(""); let maybe_bytes = client - .download_with_progress(download_url.clone(), None, &progress) + .download_with_progress_and_retries(download_url.clone(), None, &progress) .await .with_context(|| format!("Failed downloading {download_url}. The version you requested may not have been built for the current architecture."))?; Ok(maybe_bytes) @@ -966,6 +970,34 @@ fn check_windows_access_denied_error( }) } +#[cfg(windows)] +fn kill_running_deno_lsp_processes() { + // limit this to `deno lsp` invocations to avoid killing important programs someone might be running + let is_debug = log::log_enabled!(log::Level::Debug); + let get_pipe = || { + if is_debug { + std::process::Stdio::inherit() + } else { + std::process::Stdio::null() + } + }; + let _ = Command::new("powershell.exe") + .args([ + "-Command", + r#"Get-WmiObject Win32_Process | Where-Object { + $_.Name -eq 'deno.exe' -and + $_.CommandLine -match '^(?:\"[^\"]+\"|\S+)\s+lsp\b' +} | ForEach-Object { + if ($_.Terminate()) { + Write-Host 'Terminated:' $_.ProcessId + } +}"#, + ]) + .stdout(get_pipe()) + .stderr(get_pipe()) + .output(); +} + fn set_exe_permissions( current_exe_path: &Path, output_exe_path: &Path, diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 719f2b982..68d099253 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -121,8 +121,8 @@ delete Object.prototype.__proto__; /** @type {Map<string, ts.SourceFile>} */ const sourceFileCache = new Map(); - /** @type {Map<string, string>} */ - const sourceTextCache = new Map(); + /** @type {Map<string, ts.IScriptSnapshot & { isCjs?: boolean; }>} */ + const scriptSnapshotCache = new Map(); /** @type {Map<string, number>} */ const sourceRefCounts = new Map(); @@ -133,9 +133,6 @@ delete Object.prototype.__proto__; /** @type {Map<string, boolean>} */ const isNodeSourceFileCache = new Map(); - /** @type {Map<string, boolean>} */ - const isCjsCache = new Map(); - // Maps asset specifiers to the first scope that the asset was loaded into. /** @type {Map<string, string | null>} */ const assetScopes = new Map(); @@ -210,12 +207,13 @@ delete Object.prototype.__proto__; const mapKey = path + key; let sourceFile = documentRegistrySourceFileCache.get(mapKey); if (!sourceFile || sourceFile.version !== version) { + const isCjs = /** @type {any} */ (scriptSnapshot).isCjs; sourceFile = ts.createLanguageServiceSourceFile( fileName, scriptSnapshot, { ...getCreateSourceFileOptions(sourceFileOptions), - impliedNodeFormat: (isCjsCache.get(fileName) ?? false) + impliedNodeFormat: isCjs ? ts.ModuleKind.CommonJS : ts.ModuleKind.ESNext, // in the lsp we want to be able to show documentation @@ -320,7 +318,7 @@ delete Object.prototype.__proto__; if (lastRequestMethod != "cleanupSemanticCache") { const mapKey = path + key; documentRegistrySourceFileCache.delete(mapKey); - sourceTextCache.delete(path); + scriptSnapshotCache.delete(path); ops.op_release(path); } } else { @@ -516,7 +514,6 @@ delete Object.prototype.__proto__; /** @typedef {{ * ls: ts.LanguageService & { [k:string]: any }, * compilerOptions: ts.CompilerOptions, - * forceEnabledVerbatimModuleSyntax: boolean, * }} LanguageServiceEntry */ /** @type {{ unscoped: LanguageServiceEntry, byScope: Map<string, LanguageServiceEntry> }} */ const languageServiceEntries = { @@ -625,8 +622,6 @@ delete Object.prototype.__proto__; `"data" is unexpectedly null for "${specifier}".`, ); - isCjsCache.set(specifier, isCjs); - sourceFile = ts.createSourceFile( specifier, data, @@ -700,7 +695,7 @@ delete Object.prototype.__proto__; /** @type {[string, ts.Extension] | undefined} */ const resolved = ops.op_resolve( containingFilePath, - isCjsCache.get(containingFilePath) ?? false, + containingFileMode === ts.ModuleKind.CommonJS, [fileReference.fileName], )?.[0]; if (resolved) { @@ -724,7 +719,14 @@ delete Object.prototype.__proto__; } }); }, - resolveModuleNames(specifiers, base) { + resolveModuleNames( + specifiers, + base, + _reusedNames, + _redirectedReference, + _options, + containingSourceFile, + ) { if (logDebug) { debug(`host.resolveModuleNames()`); debug(` base: ${base}`); @@ -733,7 +735,7 @@ delete Object.prototype.__proto__; /** @type {Array<[string, ts.Extension] | undefined>} */ const resolved = ops.op_resolve( base, - isCjsCache.get(base) ?? false, + containingSourceFile?.impliedNodeFormat === ts.ModuleKind.CommonJS, specifiers, ); if (resolved) { @@ -802,27 +804,32 @@ delete Object.prototype.__proto__; if (logDebug) { debug(`host.getScriptSnapshot("${specifier}")`); } - const sourceFile = sourceFileCache.get(specifier); - if (sourceFile) { - if (!assetScopes.has(specifier)) { - assetScopes.set(specifier, lastRequestScope); + if (specifier.startsWith(ASSETS_URL_PREFIX)) { + const sourceFile = this.getSourceFile( + specifier, + ts.ScriptTarget.ESNext, + ); + if (sourceFile) { + if (!assetScopes.has(specifier)) { + assetScopes.set(specifier, lastRequestScope); + } + // This case only occurs for assets. + return ts.ScriptSnapshot.fromString(sourceFile.text); } - // This case only occurs for assets. - return ts.ScriptSnapshot.fromString(sourceFile.text); } - let sourceText = sourceTextCache.get(specifier); - if (sourceText == undefined) { + let scriptSnapshot = scriptSnapshotCache.get(specifier); + if (scriptSnapshot == undefined) { /** @type {{ data: string, version: string, isCjs: boolean }} */ const fileInfo = ops.op_load(specifier); if (!fileInfo) { return undefined; } - isCjsCache.set(specifier, fileInfo.isCjs); - sourceTextCache.set(specifier, fileInfo.data); + scriptSnapshot = ts.ScriptSnapshot.fromString(fileInfo.data); + scriptSnapshot.isCjs = fileInfo.isCjs; + scriptSnapshotCache.set(specifier, scriptSnapshot); scriptVersionCache.set(specifier, fileInfo.version); - sourceText = fileInfo.data; } - return ts.ScriptSnapshot.fromString(sourceText); + return scriptSnapshot; }, }; @@ -846,6 +853,8 @@ delete Object.prototype.__proto__; jqueryMessage, "Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_jQuery_Try_npm_i_save_dev_types_Slash_2592": jqueryMessage, + "Module_0_was_resolved_to_1_but_allowArbitraryExtensions_is_not_set_6263": + "Module '{0}' was resolved to '{1}', but importing these modules is not supported.", }; })()); @@ -1026,7 +1035,7 @@ delete Object.prototype.__proto__; : ts.sortAndDeduplicateDiagnostics( checkFiles.map((s) => program.getSemanticDiagnostics(s)).flat(), )), - ].filter(filterMapDiagnostic.bind(null, false)); + ].filter(filterMapDiagnostic); // emit the tsbuildinfo file // @ts-ignore: emitBuildInfo is not exposed (https://github.com/microsoft/TypeScript/issues/49871) @@ -1041,28 +1050,11 @@ delete Object.prototype.__proto__; debug("<<< exec stop"); } - /** - * @param {boolean} isLsp - * @param {ts.Diagnostic} diagnostic - */ - function filterMapDiagnostic(isLsp, diagnostic) { + /** @param {ts.Diagnostic} diagnostic */ + function filterMapDiagnostic(diagnostic) { if (IGNORED_DIAGNOSTICS.includes(diagnostic.code)) { return false; } - if (isLsp) { - // TS1484: `...` is a type and must be imported using a type-only import when 'verbatimModuleSyntax' is enabled. - // We force-enable `verbatimModuleSyntax` in the LSP so the `type` - // modifier is used when auto-importing types. But we don't want this - // diagnostic unless it was explicitly enabled by the user. - if (diagnostic.code == 1484) { - const entry = (lastRequestScope - ? languageServiceEntries.byScope.get(lastRequestScope) - : null) ?? languageServiceEntries.unscoped; - if (entry.forceEnabledVerbatimModuleSyntax) { - return false; - } - } - } // make the diagnostic for using an `export =` in an es module a warning if (diagnostic.code === 1203) { diagnostic.category = ts.DiagnosticCategory.Warning; @@ -1159,12 +1151,10 @@ delete Object.prototype.__proto__; "strict": true, "target": "esnext", "useDefineForClassFields": true, - "verbatimModuleSyntax": true, "jsx": "react", "jsxFactory": "React.createElement", "jsxFragmentFactory": "React.Fragment", }), - forceEnabledVerbatimModuleSyntax: true, }; setLogDebug(enableDebugLogging, "TSLS"); debug("serverInit()"); @@ -1230,17 +1220,8 @@ delete Object.prototype.__proto__; const ls = oldEntry ? oldEntry.ls : ts.createLanguageService(host, documentRegistry); - let forceEnabledVerbatimModuleSyntax = false; - if (!config["verbatimModuleSyntax"]) { - config["verbatimModuleSyntax"] = true; - forceEnabledVerbatimModuleSyntax = true; - } const compilerOptions = lspTsConfigToCompilerOptions(config); - newByScope.set(scope, { - ls, - compilerOptions, - forceEnabledVerbatimModuleSyntax, - }); + newByScope.set(scope, { ls, compilerOptions }); languageServiceEntries.byScope.delete(scope); } for (const oldEntry of languageServiceEntries.byScope.values()) { @@ -1260,7 +1241,7 @@ delete Object.prototype.__proto__; closed = true; } scriptVersionCache.delete(script); - sourceTextCache.delete(script); + scriptSnapshotCache.delete(script); } if (newConfigsByScope || opened || closed) { @@ -1305,7 +1286,7 @@ delete Object.prototype.__proto__; ...ls.getSemanticDiagnostics(specifier), ...ls.getSuggestionDiagnostics(specifier), ...ls.getSyntacticDiagnostics(specifier), - ].filter(filterMapDiagnostic.bind(null, true))); + ].filter(filterMapDiagnostic)); } return respond(id, diagnosticMap); } catch (e) { @@ -1366,18 +1347,12 @@ delete Object.prototype.__proto__; "console", "Console", "ErrorConstructor", - "exports", "gc", "Global", "ImportMeta", "localStorage", - "module", - "NodeModule", - "NodeRequire", - "process", "queueMicrotask", "RequestInit", - "require", "ResponseInit", "sessionStorage", "setImmediate", diff --git a/cli/tsc/diagnostics.rs b/cli/tsc/diagnostics.rs index b0394ec17..d3795706e 100644 --- a/cli/tsc/diagnostics.rs +++ b/cli/tsc/diagnostics.rs @@ -323,7 +323,7 @@ impl Diagnostics { // todo(dsherret): use a short lived cache to prevent parsing // source maps so often if let Ok(source_map) = - SourceMap::from_slice(&fast_check_module.source_map) + SourceMap::from_slice(fast_check_module.source_map.as_bytes()) { if let Some(start) = d.start.as_mut() { let maybe_token = source_map diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 36592e10d..8179e4223 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -556,14 +556,23 @@ declare namespace Deno { */ env?: "inherit" | boolean | string[]; - /** Specifies if the `sys` permission should be requested or revoked. - * If set to `"inherit"`, the current `sys` permission will be inherited. - * If set to `true`, the global `sys` permission will be requested. - * If set to `false`, the global `sys` permission will be revoked. + /** Specifies if the `ffi` permission should be requested or revoked. + * If set to `"inherit"`, the current `ffi` permission will be inherited. + * If set to `true`, the global `ffi` permission will be requested. + * If set to `false`, the global `ffi` permission will be revoked. * * @default {false} */ - sys?: "inherit" | boolean | string[]; + ffi?: "inherit" | boolean | Array<string | URL>; + + /** Specifies if the `import` permission should be requested or revoked. + * If set to `"inherit"` the current `import` permission will be inherited. + * If set to `true`, the global `import` permission will be requested. + * If set to `false`, the global `import` permission will be revoked. + * If set to `Array<string>`, the `import` permissions will be requested with the + * specified domains. + */ + import?: "inherit" | boolean | Array<string>; /** Specifies if the `net` permission should be requested or revoked. * if set to `"inherit"`, the current `net` permission will be inherited. @@ -638,15 +647,6 @@ declare namespace Deno { */ net?: "inherit" | boolean | string[]; - /** Specifies if the `ffi` permission should be requested or revoked. - * If set to `"inherit"`, the current `ffi` permission will be inherited. - * If set to `true`, the global `ffi` permission will be requested. - * If set to `false`, the global `ffi` permission will be revoked. - * - * @default {false} - */ - ffi?: "inherit" | boolean | Array<string | URL>; - /** Specifies if the `read` permission should be requested or revoked. * If set to `"inherit"`, the current `read` permission will be inherited. * If set to `true`, the global `read` permission will be requested. @@ -667,6 +667,15 @@ declare namespace Deno { */ run?: "inherit" | boolean | Array<string | URL>; + /** Specifies if the `sys` permission should be requested or revoked. + * If set to `"inherit"`, the current `sys` permission will be inherited. + * If set to `true`, the global `sys` permission will be requested. + * If set to `false`, the global `sys` permission will be revoked. + * + * @default {false} + */ + sys?: "inherit" | boolean | string[]; + /** Specifies if the `write` permission should be requested or revoked. * If set to `"inherit"`, the current `write` permission will be inherited. * If set to `true`, the global `write` permission will be requested. @@ -2962,6 +2971,10 @@ declare namespace Deno { * field from `stat` on Mac/BSD and `ftCreationTime` on Windows. This may * not be available on all platforms. */ birthtime: Date | null; + /** The last change time of the file. This corresponds to the `ctime` + * field from `stat` on Mac/BSD and `ChangeTime` on Windows. This may + * not be available on all platforms. */ + ctime: Date | null; /** ID of the device containing the file. */ dev: number; /** Inode number. @@ -2970,8 +2983,7 @@ declare namespace Deno { ino: number | null; /** The underlying raw `st_mode` bits that contain the standard Unix * permissions for this file/directory. - * - * _Linux/Mac OS only._ */ + */ mode: number | null; /** Number of hard links pointing to this file. * diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index 973a09d92..6759856e6 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -1181,6 +1181,32 @@ declare namespace Deno { ): Displayable; /** + * Display a JPG or PNG image. + * + * ``` + * Deno.jupyter.image("./cat.jpg"); + * Deno.jupyter.image("./dog.png"); + * ``` + * + * @category Jupyter + * @experimental + */ + export function image(path: string): Displayable; + + /** + * Display a JPG or PNG image. + * + * ``` + * const img = Deno.readFileSync("./cat.jpg"); + * Deno.jupyter.image(img); + * ``` + * + * @category Jupyter + * @experimental + */ + export function image(data: Uint8Array): Displayable; + + /** * Format an object for displaying in Deno * * @param obj - The object to be displayed @@ -1225,6 +1251,73 @@ declare namespace Deno { export {}; // only export exports } + /** + * **UNSTABLE**: New API, yet to be vetted. + * + * APIs for working with the OpenTelemetry observability framework. Deno can + * export traces, metrics, and logs to OpenTelemetry compatible backends via + * the OTLP protocol. + * + * Deno automatically instruments the runtime with OpenTelemetry traces and + * metrics. This data is exported via OTLP to OpenTelemetry compatible + * backends. User logs from the `console` API are exported as OpenTelemetry + * logs via OTLP. + * + * User code can also create custom traces, metrics, and logs using the + * OpenTelemetry API. This is done using the official OpenTelemetry package + * for JavaScript: + * [`npm:@opentelemetry/api`](https://opentelemetry.io/docs/languages/js/). + * Deno integrates with this package to provide trace context propagation + * between native Deno APIs (like `Deno.serve` or `fetch`) and custom user + * code. Deno also provides APIs that allow exporting custom telemetry data + * via the same OTLP channel used by the Deno runtime. This is done using the + * [`jsr:@deno/otel`](https://jsr.io/@deno/otel) package. + * + * @example Using OpenTelemetry API to create custom traces + * ```ts,ignore + * import { trace } from "npm:@opentelemetry/api@1"; + * import "jsr:@deno/otel@0.0.2/register"; + * + * const tracer = trace.getTracer("example-tracer"); + * + * async function doWork() { + * return tracer.startActiveSpan("doWork", async (span) => { + * span.setAttribute("key", "value"); + * await new Promise((resolve) => setTimeout(resolve, 1000)); + * span.end(); + * }); + * } + * + * Deno.serve(async (req) => { + * await doWork(); + * const resp = await fetch("https://example.com"); + * return resp; + * }); + * ``` + * + * @category Telemetry + * @experimental + */ + export namespace telemetry { + /** + * A SpanExporter compatible with OpenTelemetry.js + * https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_sdk_trace_base.SpanExporter.html + * @category Telemetry + * @experimental + */ + export class SpanExporter {} + + /** + * A ContextManager compatible with OpenTelemetry.js + * https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_api.ContextManager.html + * @category Telemetry + * @experimental + */ + export class ContextManager {} + + export {}; // only export exports + } + export {}; // only export exports } diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 0e3387494..8f8bed20a 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -3,10 +3,13 @@ use crate::args::TsConfig; use crate::args::TypeCheckMode; use crate::cache::FastInsecureHasher; +use crate::cache::ModuleInfoCache; use crate::node; use crate::npm::CliNpmResolver; +use crate::resolver::CjsTracker; use crate::util::checksum; use crate::util::path::mapped_specifier_for_tsc; +use crate::worker::create_isolate_create_params; use deno_ast::MediaType; use deno_core::anyhow::anyhow; @@ -31,12 +34,14 @@ use deno_graph::GraphKind; use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; +use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; +use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; use deno_semver::npm::NpmPackageReqReference; use node_resolver::errors::NodeJsErrorCode; use node_resolver::errors::NodeJsErrorCoded; +use node_resolver::errors::PackageSubpathResolveError; use node_resolver::NodeModuleKind; -use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; use once_cell::sync::Lazy; use std::borrow::Cow; @@ -45,6 +50,7 @@ use std::fmt; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use thiserror::Error; mod diagnostics; @@ -299,8 +305,81 @@ pub struct EmittedFile { pub media_type: MediaType, } +pub fn into_specifier_and_media_type( + specifier: Option<ModuleSpecifier>, +) -> (ModuleSpecifier, MediaType) { + match specifier { + Some(specifier) => { + let media_type = MediaType::from_specifier(&specifier); + + (specifier, media_type) + } + None => ( + Url::parse("internal:///missing_dependency.d.ts").unwrap(), + MediaType::Dts, + ), + } +} + +#[derive(Debug)] +pub struct TypeCheckingCjsTracker { + cjs_tracker: Arc<CjsTracker>, + module_info_cache: Arc<ModuleInfoCache>, +} + +impl TypeCheckingCjsTracker { + pub fn new( + cjs_tracker: Arc<CjsTracker>, + module_info_cache: Arc<ModuleInfoCache>, + ) -> Self { + Self { + cjs_tracker, + module_info_cache, + } + } + + pub fn is_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + code: &Arc<str>, + ) -> bool { + let maybe_is_script = self + .module_info_cache + .as_module_analyzer() + .analyze_sync(specifier, media_type, code) + .ok() + .map(|info| info.is_script); + maybe_is_script + .and_then(|is_script| { + self + .cjs_tracker + .is_cjs_with_known_is_script(specifier, media_type, is_script) + .ok() + }) + .unwrap_or_else(|| { + self + .cjs_tracker + .is_maybe_cjs(specifier, media_type) + .unwrap_or(false) + }) + } + + pub fn is_cjs_with_known_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: bool, + ) -> Result<bool, node_resolver::errors::ClosestPkgJsonError> { + self + .cjs_tracker + .is_cjs_with_known_is_script(specifier, media_type, is_script) + } +} + #[derive(Debug)] pub struct RequestNpmState { + pub cjs_tracker: Arc<TypeCheckingCjsTracker>, pub node_resolver: Arc<NodeResolver>, pub npm_resolver: Arc<dyn CliNpmResolver>, } @@ -453,7 +532,7 @@ pub fn as_ts_script_kind(media_type: MediaType) -> i32 { MediaType::Tsx => 4, MediaType::Json => 6, MediaType::SourceMap - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::Wasm | MediaType::Unknown => 0, } @@ -486,25 +565,22 @@ fn op_load_inner( ) -> Result<Option<LoadResponse>, AnyError> { fn load_from_node_modules( specifier: &ModuleSpecifier, - node_resolver: Option<&NodeResolver>, + npm_state: Option<&RequestNpmState>, media_type: &mut MediaType, is_cjs: &mut bool, ) -> Result<String, AnyError> { *media_type = MediaType::from_specifier(specifier); - *is_cjs = node_resolver - .map(|node_resolver| { - match node_resolver.url_to_node_resolution(specifier.clone()) { - Ok(NodeResolution::CommonJs(_)) => true, - Ok(NodeResolution::Esm(_)) - | Ok(NodeResolution::BuiltIn(_)) - | Err(_) => false, - } - }) - .unwrap_or(false); let file_path = specifier.to_file_path().unwrap(); let code = std::fs::read_to_string(&file_path) .with_context(|| format!("Unable to load {}", file_path.display()))?; - Ok(code) + let code: Arc<str> = code.into(); + *is_cjs = npm_state + .map(|npm_state| { + npm_state.cjs_tracker.is_cjs(specifier, *media_type, &code) + }) + .unwrap_or(false); + // todo(dsherret): how to avoid cloning here? + Ok(code.to_string()) } let state = state.borrow_mut::<State>(); @@ -557,6 +633,13 @@ fn op_load_inner( match module { Module::Js(module) => { media_type = module.media_type; + if let Some(npm_state) = &state.maybe_npm { + is_cjs = npm_state.cjs_tracker.is_cjs_with_known_is_script( + specifier, + module.media_type, + module.is_script, + )?; + } let source = module .fast_check_module() .map(|m| &*m.source) @@ -570,11 +653,13 @@ fn op_load_inner( Module::Npm(_) | Module::Node(_) => None, Module::External(module) => { // means it's Deno code importing an npm module - let specifier = - node::resolve_specifier_into_node_modules(&module.specifier); + let specifier = node::resolve_specifier_into_node_modules( + &module.specifier, + &deno_fs::RealFs, + ); Some(Cow::Owned(load_from_node_modules( &specifier, - state.maybe_npm.as_ref().map(|n| n.node_resolver.as_ref()), + state.maybe_npm.as_ref(), &mut media_type, &mut is_cjs, )?)) @@ -587,7 +672,7 @@ fn op_load_inner( { Some(Cow::Owned(load_from_node_modules( specifier, - Some(npm.node_resolver.as_ref()), + Some(npm), &mut media_type, &mut is_cjs, )?)) @@ -662,6 +747,7 @@ fn op_resolve_inner( "Error converting a string module specifier for \"op_resolve\".", )? }; + let referrer_module = state.graph.get(&referrer); for specifier in args.specifiers { if specifier.starts_with("node:") { resolved.push(( @@ -677,23 +763,44 @@ fn op_resolve_inner( continue; } - let graph = &state.graph; - let resolved_dep = graph - .get(&referrer) + let resolved_dep = referrer_module .and_then(|m| m.js()) .and_then(|m| m.dependencies_prefer_fast_check().get(&specifier)) .and_then(|d| d.maybe_type.ok().or_else(|| d.maybe_code.ok())); let maybe_result = match resolved_dep { Some(ResolutionResolved { specifier, .. }) => { - resolve_graph_specifier_types(specifier, &referrer, state)? + resolve_graph_specifier_types( + specifier, + &referrer, + referrer_kind, + state, + )? + } + _ => { + match resolve_non_graph_specifier_types( + &specifier, + &referrer, + referrer_kind, + state, + ) { + Ok(maybe_result) => maybe_result, + Err( + err @ ResolveNonGraphSpecifierTypesError::ResolvePkgFolderFromDenoReq( + ResolvePkgFolderFromDenoReqError::Managed(_), + ), + ) => { + // it's most likely requesting the jsxImportSource, which isn't loaded + // into the graph when not using jsx, so just ignore this error + if specifier.ends_with("/jsx-runtime") { + None + } else { + return Err(err.into()); + } + } + Err(err) => return Err(err.into()), + } } - _ => resolve_non_graph_specifier_types( - &specifier, - &referrer, - referrer_kind, - state, - )?, }; let result = match maybe_result { Some((specifier, media_type)) => { @@ -718,7 +825,13 @@ fn op_resolve_inner( } } }; - (specifier_str, media_type.as_ts_extension()) + ( + specifier_str, + match media_type { + MediaType::Css => ".js", // surface these as .js for typescript + media_type => media_type.as_ts_extension(), + }, + ) } None => ( MISSING_DEPENDENCY_SPECIFIER.to_string(), @@ -735,6 +848,7 @@ fn op_resolve_inner( fn resolve_graph_specifier_types( specifier: &ModuleSpecifier, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, state: &State, ) -> Result<Option<(ModuleSpecifier, MediaType)>, AnyError> { let graph = &state.graph; @@ -787,43 +901,53 @@ fn resolve_graph_specifier_types( &package_folder, module.nv_reference.sub_path(), Some(referrer), + referrer_kind, NodeResolutionMode::Types, ); - let maybe_resolution = match res_result { - Ok(res) => Some(res), + let maybe_url = match res_result { + Ok(url) => Some(url), Err(err) => match err.code() { NodeJsErrorCode::ERR_TYPES_NOT_FOUND | NodeJsErrorCode::ERR_MODULE_NOT_FOUND => None, _ => return Err(err.into()), }, }; - Ok(Some(NodeResolution::into_specifier_and_media_type( - maybe_resolution, - ))) + Ok(Some(into_specifier_and_media_type(maybe_url))) } else { Ok(None) } } Some(Module::External(module)) => { // we currently only use "External" for when the module is in an npm package - Ok(state.maybe_npm.as_ref().map(|npm| { - let specifier = - node::resolve_specifier_into_node_modules(&module.specifier); - NodeResolution::into_specifier_and_media_type( - npm.node_resolver.url_to_node_resolution(specifier).ok(), - ) + Ok(state.maybe_npm.as_ref().map(|_| { + let specifier = node::resolve_specifier_into_node_modules( + &module.specifier, + &deno_fs::RealFs, + ); + into_specifier_and_media_type(Some(specifier)) })) } Some(Module::Node(_)) | None => Ok(None), } } +#[derive(Debug, Error)] +enum ResolveNonGraphSpecifierTypesError { + #[error(transparent)] + ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError), + #[error(transparent)] + PackageSubpathResolve(#[from] PackageSubpathResolveError), +} + fn resolve_non_graph_specifier_types( raw_specifier: &str, referrer: &ModuleSpecifier, referrer_kind: NodeModuleKind, state: &State, -) -> Result<Option<(ModuleSpecifier, MediaType)>, AnyError> { +) -> Result< + Option<(ModuleSpecifier, MediaType)>, + ResolveNonGraphSpecifierTypesError, +> { let npm = match state.maybe_npm.as_ref() { Some(npm) => npm, None => return Ok(None), // we only support non-graph types for npm packages @@ -831,7 +955,7 @@ fn resolve_non_graph_specifier_types( let node_resolver = &npm.node_resolver; if node_resolver.in_npm_package(referrer) { // we're in an npm package, so use node resolution - Ok(Some(NodeResolution::into_specifier_and_media_type( + Ok(Some(into_specifier_and_media_type( node_resolver .resolve( raw_specifier, @@ -839,7 +963,8 @@ fn resolve_non_graph_specifier_types( referrer_kind, NodeResolutionMode::Types, ) - .ok(), + .ok() + .map(|res| res.into_url()), ))) } else if let Ok(npm_req_ref) = NpmPackageReqReference::from_str(raw_specifier) @@ -856,19 +981,18 @@ fn resolve_non_graph_specifier_types( &package_folder, npm_req_ref.sub_path(), Some(referrer), + referrer_kind, NodeResolutionMode::Types, ); - let maybe_resolution = match res_result { - Ok(res) => Some(res), + let maybe_url = match res_result { + Ok(url) => Some(url), Err(err) => match err.code() { NodeJsErrorCode::ERR_TYPES_NOT_FOUND | NodeJsErrorCode::ERR_MODULE_NOT_FOUND => None, _ => return Err(err.into()), }, }; - Ok(Some(NodeResolution::into_specifier_and_media_type( - maybe_resolution, - ))) + Ok(Some(into_specifier_and_media_type(maybe_url))) } else { Ok(None) } @@ -981,6 +1105,7 @@ pub fn exec(request: Request) -> Result<Response, AnyError> { root_map, remapped_specifiers, )], + create_params: create_isolate_create_params(), ..Default::default() }); diff --git a/cli/util/extract.rs b/cli/util/extract.rs index 841cf6eb0..be68202aa 100644 --- a/cli/util/extract.rs +++ b/cli/util/extract.rs @@ -64,7 +64,7 @@ fn extract_inner( }) { Ok(parsed) => { let mut c = ExportCollector::default(); - c.visit_program(parsed.program_ref()); + c.visit_program(parsed.program().as_ref()); c } Err(_) => ExportCollector::default(), @@ -254,7 +254,11 @@ impl ExportCollector { let mut import_specifiers = vec![]; if let Some(default_export) = &self.default_export { - if !symbols_to_exclude.contains(default_export) { + // If the default export conflicts with a named export, a named one + // takes precedence. + if !symbols_to_exclude.contains(default_export) + && !self.named_exports.contains(default_export) + { import_specifiers.push(ast::ImportSpecifier::Default( ast::ImportDefaultSpecifier { span: DUMMY_SP, @@ -566,14 +570,14 @@ fn generate_pseudo_file( })?; let top_level_atoms = swc_utils::collect_decls_with_ctxt::<Atom, _>( - parsed.program_ref(), + &parsed.program_ref(), parsed.top_level_context(), ); let transformed = parsed .program_ref() - .clone() + .to_owned() .fold_with(&mut as_folder(Transform { specifier: &file.specifier, base_file_specifier, @@ -582,7 +586,10 @@ fn generate_pseudo_file( wrap_kind, })); - let source = deno_ast::swc::codegen::to_code(&transformed); + let source = deno_ast::swc::codegen::to_code_with_comments( + Some(&parsed.comments().as_single_threaded()), + &transformed, + ); log::debug!("{}:\n{}", file.specifier, source); @@ -1137,6 +1144,57 @@ Deno.test("file:///README.md$6-12.js", async ()=>{ media_type: MediaType::JavaScript, }], }, + // https://github.com/denoland/deno/issues/26009 + Test { + input: Input { + source: r#" +/** + * ```ts + * console.log(Foo) + * ``` + */ +export class Foo {} +export default Foo +"#, + specifier: "file:///main.ts", + }, + expected: vec![Expected { + source: r#"import { Foo } from "file:///main.ts"; +Deno.test("file:///main.ts$3-6.ts", async ()=>{ + console.log(Foo); +}); +"#, + specifier: "file:///main.ts$3-6.ts", + media_type: MediaType::TypeScript, + }], + }, + // https://github.com/denoland/deno/issues/26728 + Test { + input: Input { + source: r#" +/** + * ```ts + * // @ts-expect-error: can only add numbers + * add('1', '2'); + * ``` + */ +export function add(first: number, second: number) { + return first + second; +} +"#, + specifier: "file:///main.ts", + }, + expected: vec![Expected { + source: r#"import { add } from "file:///main.ts"; +Deno.test("file:///main.ts$3-7.ts", async ()=>{ + // @ts-expect-error: can only add numbers + add('1', '2'); +}); +"#, + specifier: "file:///main.ts$3-7.ts", + media_type: MediaType::TypeScript, + }], + }, ]; for test in tests { @@ -1326,6 +1384,53 @@ assertEquals(add(1, 2), 3); media_type: MediaType::JavaScript, }], }, + // https://github.com/denoland/deno/issues/26009 + Test { + input: Input { + source: r#" +/** + * ```ts + * console.log(Foo) + * ``` + */ +export class Foo {} +export default Foo +"#, + specifier: "file:///main.ts", + }, + expected: vec![Expected { + source: r#"import { Foo } from "file:///main.ts"; +console.log(Foo); +"#, + specifier: "file:///main.ts$3-6.ts", + media_type: MediaType::TypeScript, + }], + }, + // https://github.com/denoland/deno/issues/26728 + Test { + input: Input { + source: r#" +/** + * ```ts + * // @ts-expect-error: can only add numbers + * add('1', '2'); + * ``` + */ +export function add(first: number, second: number) { + return first + second; +} +"#, + specifier: "file:///main.ts", + }, + expected: vec![Expected { + source: r#"import { add } from "file:///main.ts"; +// @ts-expect-error: can only add numbers +add('1', '2'); +"#, + specifier: "file:///main.ts$3-7.ts", + media_type: MediaType::TypeScript, + }], + }, ]; for test in tests { @@ -1366,7 +1471,7 @@ assertEquals(add(1, 2), 3); }) .unwrap(); - collector.visit_program(parsed.program_ref()); + parsed.program_ref().visit_with(&mut collector); collector } @@ -1581,6 +1686,16 @@ declare global { named_expected: atom_set!(), default_expected: None, }, + // The identifier `Foo` conflicts, but `ExportCollector` doesn't do + // anything about it. It is handled by `to_import_specifiers` method. + Test { + input: r#" +export class Foo {} +export default Foo +"#, + named_expected: atom_set!("Foo"), + default_expected: Some("Foo".into()), + }, ]; for test in tests { diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs index d92d880bc..eb3fb8c60 100644 --- a/cli/util/file_watcher.rs +++ b/cli/util/file_watcher.rs @@ -30,7 +30,7 @@ use tokio::sync::mpsc; use tokio::sync::mpsc::UnboundedReceiver; use tokio::time::sleep; -const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H"; +const CLEAR_SCREEN: &str = "\x1B[H\x1B[2J\x1B[3J"; const DEBOUNCE_INTERVAL: Duration = Duration::from_millis(200); struct DebouncedReceiver { @@ -73,7 +73,6 @@ impl DebouncedReceiver { } } -#[allow(clippy::print_stderr)] async fn error_handler<F>(watch_future: F) -> bool where F: Future<Output = Result<(), AnyError>>, @@ -84,7 +83,7 @@ where Some(e) => format_js_error(e), None => format!("{err:?}"), }; - eprintln!( + log::error!( "{}: {}", colors::red_bold("error"), error_string.trim_start_matches("error: ") @@ -128,19 +127,12 @@ impl PrintConfig { } } -fn create_print_after_restart_fn( - banner: &'static str, - clear_screen: bool, -) -> impl Fn() { +fn create_print_after_restart_fn(clear_screen: bool) -> impl Fn() { move || { #[allow(clippy::print_stderr)] if clear_screen && std::io::stderr().is_terminal() { eprint!("{}", CLEAR_SCREEN); } - info!( - "{} File change detected! Restarting!", - colors::intense_blue(banner), - ); } } @@ -188,7 +180,15 @@ impl WatcherCommunicator { } pub fn print(&self, msg: String) { - log::info!("{} {}", self.banner, msg); + log::info!("{} {}", self.banner, colors::gray(msg)); + } + + pub fn show_path_changed(&self, changed_paths: Option<Vec<PathBuf>>) { + if let Some(paths) = changed_paths { + self.print( + format!("Restarting! File change detected: {:?}", paths[0]).to_string(), + ) + } } } @@ -264,7 +264,7 @@ where clear_screen, } = print_config; - let print_after_restart = create_print_after_restart_fn(banner, clear_screen); + let print_after_restart = create_print_after_restart_fn(clear_screen); let watcher_communicator = Arc::new(WatcherCommunicator { paths_to_watch_tx: paths_to_watch_tx.clone(), changed_paths_rx: changed_paths_rx.resubscribe(), diff --git a/cli/util/fs.rs b/cli/util/fs.rs index a021ec19c..d36c02242 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -160,11 +160,11 @@ fn atomic_write_file( data: &[u8], ) -> std::io::Result<()> { fs.write_file(temp_file_path, data)?; - fs.rename_file(temp_file_path, file_path).map_err(|err| { - // clean up the created temp file on error - let _ = fs.remove_file(temp_file_path); - err - }) + fs.rename_file(temp_file_path, file_path) + .inspect_err(|_err| { + // clean up the created temp file on error + let _ = fs.remove_file(temp_file_path); + }) } let temp_file_path = get_atomic_file_path(file_path); @@ -277,7 +277,7 @@ pub fn write_file_2<T: AsRef<[u8]>>( /// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. pub fn canonicalize_path(path: &Path) -> Result<PathBuf, Error> { - Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) + Ok(deno_path_util::strip_unc_prefix(path.canonicalize()?)) } /// Canonicalizes a path which might be non-existent by going up the @@ -565,7 +565,9 @@ pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> { use std::os::windows::fs::symlink_dir; symlink_dir(oldpath, newpath).map_err(|err| { if let Some(code) = err.raw_os_error() { - if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD { + if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD + || code as u32 == winapi::shared::winerror::ERROR_INVALID_FUNCTION + { return err_mapper(err, Some(ErrorKind::PermissionDenied)); } } diff --git a/cli/util/logger.rs b/cli/util/logger.rs index cdc89411f..f76663df2 100644 --- a/cli/util/logger.rs +++ b/cli/util/logger.rs @@ -29,6 +29,7 @@ impl log::Log for CliLogger { // thread's state DrawThread::hide(); self.0.log(record); + deno_runtime::ops::otel::handle_log(record); DrawThread::show(); } } @@ -65,6 +66,8 @@ pub fn init(maybe_level: Option<log::Level>) { .filter_module("swc_ecma_parser", log::LevelFilter::Error) // Suppress span lifecycle logs since they are too verbose .filter_module("tracing::span", log::LevelFilter::Off) + // for deno_compile, this is too verbose + .filter_module("editpe", log::LevelFilter::Error) .format(|buf, record| { let mut target = record.target().to_string(); if let Some(line_no) = record.line() { diff --git a/cli/util/mod.rs b/cli/util/mod.rs index e59b09d2c..f81a74c44 100644 --- a/cli/util/mod.rs +++ b/cli/util/mod.rs @@ -14,6 +14,7 @@ pub mod logger; pub mod path; pub mod progress_bar; pub mod result; +pub mod retry; pub mod sync; pub mod text_encoding; pub mod unix; diff --git a/cli/util/path.rs b/cli/util/path.rs index e4ae6e7cb..58bed664f 100644 --- a/cli/util/path.rs +++ b/cli/util/path.rs @@ -42,21 +42,6 @@ pub fn get_extension(file_path: &Path) -> Option<String> { .map(|e| e.to_lowercase()); } -pub fn specifier_has_extension( - specifier: &ModuleSpecifier, - searching_ext: &str, -) -> bool { - let Some((_, ext)) = specifier.path().rsplit_once('.') else { - return false; - }; - let searching_ext = searching_ext.strip_prefix('.').unwrap_or(searching_ext); - debug_assert!(!searching_ext.contains('.')); // exts like .d.ts are not implemented here - if ext.len() != searching_ext.len() { - return false; - } - ext.eq_ignore_ascii_case(searching_ext) -} - pub fn get_atomic_dir_path(file_path: &Path) -> PathBuf { let rand = gen_rand_path_component(); let new_file_name = format!( @@ -351,18 +336,6 @@ mod test { } #[test] - fn test_specifier_has_extension() { - fn get(specifier: &str, ext: &str) -> bool { - specifier_has_extension(&ModuleSpecifier::parse(specifier).unwrap(), ext) - } - - assert!(get("file:///a/b/c.ts", "ts")); - assert!(get("file:///a/b/c.ts", ".ts")); - assert!(!get("file:///a/b/c.ts", ".cts")); - assert!(get("file:///a/b/c.CtS", ".cts")); - } - - #[test] fn test_to_percent_decoded_str() { let str = to_percent_decoded_str("%F0%9F%A6%95"); assert_eq!(str, "🦕"); diff --git a/cli/util/progress_bar/renderer.rs b/cli/util/progress_bar/renderer.rs index a83ceb333..6b08dada1 100644 --- a/cli/util/progress_bar/renderer.rs +++ b/cli/util/progress_bar/renderer.rs @@ -193,10 +193,16 @@ impl ProgressBarRenderer for TextOnlyProgressBarRenderer { } }; + // TODO(@marvinhagemeister): We're trying to reconstruct the original + // specifier from the resolved one, but we lack the information about + // private registries URLs and other things here. let message = display_entry .message .replace("https://registry.npmjs.org/", "npm:") - .replace("https://jsr.io/", "jsr:"); + .replace("https://jsr.io/", "jsr:") + .replace("%2f", "/") + .replace("%2F", "/"); + display_str.push_str( &colors::gray(format!(" - {}{}\n", message, bytes_text)).to_string(), ); diff --git a/cli/util/retry.rs b/cli/util/retry.rs new file mode 100644 index 000000000..a8febe60d --- /dev/null +++ b/cli/util/retry.rs @@ -0,0 +1,41 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::future::Future; +use std::time::Duration; + +pub fn retry< + F: FnMut() -> Fut, + T, + E, + Fut: Future<Output = Result<T, E>>, + ShouldRetry: FnMut(&E) -> bool, +>( + mut f: F, + mut should_retry: ShouldRetry, +) -> impl Future<Output = Result<T, E>> { + const WAITS: [Duration; 3] = [ + Duration::from_millis(100), + Duration::from_millis(250), + Duration::from_millis(500), + ]; + + let mut waits = WAITS.into_iter(); + async move { + let mut first_result = None; + loop { + let result = f().await; + match result { + Ok(r) => return Ok(r), + Err(e) if !should_retry(&e) => return Err(e), + _ => {} + } + if first_result.is_none() { + first_result = Some(result); + } + let Some(wait) = waits.next() else { + return first_result.unwrap(); + }; + tokio::time::sleep(wait).await; + } + } +} diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs index d2e0832c9..8524e63eb 100644 --- a/cli/util/text_encoding.rs +++ b/cli/util/text_encoding.rs @@ -1,6 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; use std::ops::Range; +use std::sync::Arc; use base64::prelude::BASE64_STANDARD; use base64::Engine; @@ -9,6 +11,15 @@ use deno_core::ModuleSourceCode; static SOURCE_MAP_PREFIX: &[u8] = b"//# sourceMappingURL=data:application/json;base64,"; +pub fn from_utf8_lossy_owned(bytes: Vec<u8>) -> String { + match String::from_utf8_lossy(&bytes) { + Cow::Owned(code) => code, + // SAFETY: `String::from_utf8_lossy` guarantees that the result is valid + // UTF-8 if `Cow::Borrowed` is returned. + Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) }, + } +} + pub fn source_map_from_code(code: &[u8]) -> Option<Vec<u8>> { let range = find_source_map_range(code)?; let source_map_range = &code[range]; @@ -85,6 +96,29 @@ fn find_source_map_range(code: &[u8]) -> Option<Range<usize>> { } } +/// Converts an `Arc<str>` to an `Arc<[u8]>`. +#[allow(dead_code)] +pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> { + let raw = Arc::into_raw(arc_str); + // SAFETY: This is safe because they have the same memory layout. + unsafe { Arc::from_raw(raw as *const [u8]) } +} + +/// Converts an `Arc<u8>` to an `Arc<str>` if able. +#[allow(dead_code)] +pub fn arc_u8_to_arc_str( + arc_u8: Arc<[u8]>, +) -> Result<Arc<str>, std::str::Utf8Error> { + // Check that the string is valid UTF-8. + std::str::from_utf8(&arc_u8)?; + // SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as + // Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the + // standard library. + Ok(unsafe { + std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8) + }) +} + #[cfg(test)] mod tests { use std::sync::Arc; diff --git a/cli/util/v8.rs b/cli/util/v8.rs index fb16e67b7..6e690e6f3 100644 --- a/cli/util/v8.rs +++ b/cli/util/v8.rs @@ -46,15 +46,14 @@ pub fn init_v8_flags( .skip(1) .collect::<Vec<_>>(); - #[allow(clippy::print_stderr)] if !unrecognized_v8_flags.is_empty() { for f in unrecognized_v8_flags { - eprintln!("error: V8 did not recognize flag '{f}'"); + log::error!("error: V8 did not recognize flag '{f}'"); } - eprintln!("\nFor a list of V8 flags, use '--v8-flags=--help'"); - std::process::exit(1); + log::error!("\nFor a list of V8 flags, use '--v8-flags=--help'"); + deno_runtime::exit(1); } if v8_flags_includes_help { - std::process::exit(0); + deno_runtime::exit(0); } } diff --git a/cli/worker.rs b/cli/worker.rs index cc18c0d15..3b09714d5 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -14,21 +14,23 @@ use deno_core::v8; use deno_core::CompiledWasmModuleStore; use deno_core::Extension; use deno_core::FeatureChecker; -use deno_core::ModuleId; use deno_core::ModuleLoader; use deno_core::PollEventLoopOptions; use deno_core::SharedArrayBufferStore; use deno_runtime::code_cache; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_fs; -use deno_runtime::deno_node; use deno_runtime::deno_node::NodeExtInitServices; +use deno_runtime::deno_node::NodeRequireLoader; +use deno_runtime::deno_node::NodeRequireLoaderRc; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::inspector_server::InspectorServer; +use deno_runtime::ops::otel::OtelConfig; use deno_runtime::ops::process::NpmProcessStateProviderRc; use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::web_worker::WebWorker; @@ -42,7 +44,7 @@ use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; use deno_terminal::colors; -use node_resolver::NodeResolution; +use node_resolver::NodeModuleKind; use node_resolver::NodeResolutionMode; use tokio::select; @@ -56,21 +58,22 @@ use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherRestartMode; use crate::version; -pub struct ModuleLoaderAndSourceMapGetter { +pub struct CreateModuleLoaderResult { pub module_loader: Rc<dyn ModuleLoader>, + pub node_require_loader: Rc<dyn NodeRequireLoader>, } pub trait ModuleLoaderFactory: Send + Sync { fn create_for_main( &self, root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter; + ) -> CreateModuleLoaderResult; fn create_for_worker( &self, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter; + ) -> CreateModuleLoaderResult; } #[async_trait::async_trait(?Send)] @@ -80,6 +83,15 @@ pub trait HmrRunner: Send + Sync { async fn run(&mut self) -> Result<(), AnyError>; } +pub trait CliCodeCache: code_cache::CodeCache { + /// Gets if the code cache is still enabled. + fn enabled(&self) -> bool { + true + } + + fn as_code_cache(self: Arc<Self>) -> Arc<dyn code_cache::CodeCache>; +} + #[async_trait::async_trait(?Send)] pub trait CoverageCollector: Send + Sync { async fn start_collecting(&mut self) -> Result<(), AnyError>; @@ -107,7 +119,6 @@ pub struct CliMainWorkerOptions { pub inspect_wait: bool, pub strace_ops: Option<Vec<String>>, pub is_inspecting: bool, - pub is_npm_main: bool, pub location: Option<Url>, pub argv0: Option<String>, pub node_debug: Option<String>, @@ -125,7 +136,7 @@ pub struct CliMainWorkerOptions { struct SharedWorkerState { blob_store: Arc<BlobStore>, broadcast_channel: InMemoryBroadcastChannel, - code_cache: Option<Arc<dyn code_cache::CodeCache>>, + code_cache: Option<Arc<dyn CliCodeCache>>, compiled_wasm_module_store: CompiledWasmModuleStore, feature_checker: Arc<FeatureChecker>, fs: Arc<dyn deno_fs::FileSystem>, @@ -135,20 +146,26 @@ struct SharedWorkerState { module_loader_factory: Box<dyn ModuleLoaderFactory>, node_resolver: Arc<NodeResolver>, npm_resolver: Arc<dyn CliNpmResolver>, + pkg_json_resolver: Arc<PackageJsonResolver>, root_cert_store_provider: Arc<dyn RootCertStoreProvider>, root_permissions: PermissionsContainer, shared_array_buffer_store: SharedArrayBufferStore, storage_key_resolver: StorageKeyResolver, options: CliMainWorkerOptions, subcommand: DenoSubcommand, + otel_config: Option<OtelConfig>, // `None` means OpenTelemetry is disabled. } impl SharedWorkerState { - pub fn create_node_init_services(&self) -> NodeExtInitServices { + pub fn create_node_init_services( + &self, + node_require_loader: NodeRequireLoaderRc, + ) -> NodeExtInitServices { NodeExtInitServices { - node_require_resolver: self.npm_resolver.clone().into_require_resolver(), + node_require_loader, node_resolver: self.node_resolver.clone(), - npm_resolver: self.npm_resolver.clone().into_npm_resolver(), + npm_resolver: self.npm_resolver.clone().into_npm_pkg_folder_resolver(), + pkg_json_resolver: self.pkg_json_resolver.clone(), } } @@ -159,7 +176,6 @@ impl SharedWorkerState { pub struct CliMainWorker { main_module: ModuleSpecifier, - is_main_cjs: bool, worker: MainWorker, shared: Arc<SharedWorkerState>, } @@ -181,17 +197,7 @@ impl CliMainWorker { log::debug!("main_module {}", self.main_module); - if self.is_main_cjs { - deno_node::load_cjs_module( - &mut self.worker.js_runtime, - &self.main_module.to_file_path().unwrap().to_string_lossy(), - true, - self.shared.options.inspect_brk, - )?; - } else { - self.execute_main_module_possibly_with_npm().await?; - } - + self.execute_main_module().await?; self.worker.dispatch_load_event()?; loop { @@ -279,22 +285,7 @@ impl CliMainWorker { /// Execute the given main module emitting load and unload events before and after execution /// respectively. pub async fn execute(&mut self) -> Result<(), AnyError> { - if self.inner.is_main_cjs { - deno_node::load_cjs_module( - &mut self.inner.worker.js_runtime, - &self - .inner - .main_module - .to_file_path() - .unwrap() - .to_string_lossy(), - true, - self.inner.shared.options.inspect_brk, - )?; - } else { - self.inner.execute_main_module_possibly_with_npm().await?; - } - + self.inner.execute_main_module().await?; self.inner.worker.dispatch_load_event()?; self.pending_unload = true; @@ -335,24 +326,13 @@ impl CliMainWorker { executor.execute().await } - pub async fn execute_main_module_possibly_with_npm( - &mut self, - ) -> Result<(), AnyError> { + pub async fn execute_main_module(&mut self) -> Result<(), AnyError> { let id = self.worker.preload_main_module(&self.main_module).await?; - self.evaluate_module_possibly_with_npm(id).await + self.worker.evaluate_module(id).await } - pub async fn execute_side_module_possibly_with_npm( - &mut self, - ) -> Result<(), AnyError> { + pub async fn execute_side_module(&mut self) -> Result<(), AnyError> { let id = self.worker.preload_side_module(&self.main_module).await?; - self.evaluate_module_possibly_with_npm(id).await - } - - async fn evaluate_module_possibly_with_npm( - &mut self, - id: ModuleId, - ) -> Result<(), AnyError> { self.worker.evaluate_module(id).await } @@ -422,7 +402,7 @@ impl CliMainWorkerFactory { #[allow(clippy::too_many_arguments)] pub fn new( blob_store: Arc<BlobStore>, - code_cache: Option<Arc<dyn code_cache::CodeCache>>, + code_cache: Option<Arc<dyn CliCodeCache>>, feature_checker: Arc<FeatureChecker>, fs: Arc<dyn deno_fs::FileSystem>, maybe_file_watcher_communicator: Option<Arc<WatcherCommunicator>>, @@ -431,11 +411,13 @@ impl CliMainWorkerFactory { module_loader_factory: Box<dyn ModuleLoaderFactory>, node_resolver: Arc<NodeResolver>, npm_resolver: Arc<dyn CliNpmResolver>, + pkg_json_resolver: Arc<PackageJsonResolver>, root_cert_store_provider: Arc<dyn RootCertStoreProvider>, root_permissions: PermissionsContainer, storage_key_resolver: StorageKeyResolver, subcommand: DenoSubcommand, options: CliMainWorkerOptions, + otel_config: Option<OtelConfig>, ) -> Self { Self { shared: Arc::new(SharedWorkerState { @@ -451,12 +433,14 @@ impl CliMainWorkerFactory { module_loader_factory, node_resolver, npm_resolver, + pkg_json_resolver, root_cert_store_provider, root_permissions, shared_array_buffer_store: Default::default(), storage_key_resolver, options, subcommand, + otel_config, }), } } @@ -486,7 +470,13 @@ impl CliMainWorkerFactory { stdio: deno_runtime::deno_io::Stdio, ) -> Result<CliMainWorker, AnyError> { let shared = &self.shared; - let (main_module, is_main_cjs) = if let Ok(package_ref) = + let CreateModuleLoaderResult { + module_loader, + node_require_loader, + } = shared + .module_loader_factory + .create_for_main(permissions.clone()); + let main_module = if let Ok(package_ref) = NpmPackageReqReference::from_specifier(&main_module) { if let Some(npm_resolver) = shared.npm_resolver.as_managed() { @@ -506,9 +496,8 @@ impl CliMainWorkerFactory { package_ref.req(), &referrer, )?; - let node_resolution = self + let main_module = self .resolve_binary_entrypoint(&package_folder, package_ref.sub_path())?; - let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); if let Some(lockfile) = &shared.maybe_lockfile { // For npm binary commands, ensure that the lockfile gets updated @@ -517,22 +506,11 @@ impl CliMainWorkerFactory { lockfile.write_if_changed()?; } - (node_resolution.into_url(), is_main_cjs) - } else if shared.options.is_npm_main - || shared.node_resolver.in_npm_package(&main_module) - { - let node_resolution = - shared.node_resolver.url_to_node_resolution(main_module)?; - let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); - (node_resolution.into_url(), is_main_cjs) + main_module } else { - let is_cjs = main_module.path().ends_with(".cjs"); - (main_module, is_cjs) + main_module }; - let ModuleLoaderAndSourceMapGetter { module_loader } = shared - .module_loader_factory - .create_for_main(permissions.clone()); let maybe_inspector_server = shared.maybe_inspector_server.clone(); let create_web_worker_cb = @@ -572,18 +550,22 @@ impl CliMainWorkerFactory { root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), module_loader, fs: shared.fs.clone(), - node_services: Some(shared.create_node_init_services()), + node_services: Some( + shared.create_node_init_services(node_require_loader), + ), npm_process_state_provider: Some(shared.npm_process_state_provider()), blob_store: shared.blob_store.clone(), broadcast_channel: shared.broadcast_channel.clone(), + fetch_dns_resolver: Default::default(), shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), compiled_wasm_module_store: Some( shared.compiled_wasm_module_store.clone(), ), feature_checker, permissions, - v8_code_cache: shared.code_cache.clone(), + v8_code_cache: shared.code_cache.clone().map(|c| c.as_code_cache()), }; + let options = WorkerOptions { bootstrap: BootstrapOptions { deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(), @@ -610,10 +592,11 @@ impl CliMainWorkerFactory { mode, serve_port: shared.options.serve_port, serve_host: shared.options.serve_host.clone(), + otel_config: shared.otel_config.clone(), }, extensions: custom_extensions, startup_snapshot: crate::js::deno_isolate_init(), - create_params: None, + create_params: create_isolate_create_params(), unsafely_ignore_certificate_errors: shared .options .unsafely_ignore_certificate_errors @@ -657,7 +640,6 @@ impl CliMainWorkerFactory { Ok(CliMainWorker { main_module, - is_main_cjs, worker, shared: shared.clone(), }) @@ -667,19 +649,19 @@ impl CliMainWorkerFactory { &self, package_folder: &Path, sub_path: Option<&str>, - ) -> Result<NodeResolution, AnyError> { + ) -> Result<ModuleSpecifier, AnyError> { match self .shared .node_resolver .resolve_binary_export(package_folder, sub_path) { - Ok(node_resolution) => Ok(node_resolution), + Ok(specifier) => Ok(specifier), Err(original_err) => { // if the binary entrypoint was not found, fallback to regular node resolution let result = self.resolve_binary_entrypoint_fallback(package_folder, sub_path); match result { - Ok(Some(resolution)) => Ok(resolution), + Ok(Some(specifier)) => Ok(specifier), Ok(None) => Err(original_err.into()), Err(fallback_err) => { bail!("{:#}\n\nFallback failed: {:#}", original_err, fallback_err) @@ -694,7 +676,7 @@ impl CliMainWorkerFactory { &self, package_folder: &Path, sub_path: Option<&str>, - ) -> Result<Option<NodeResolution>, AnyError> { + ) -> Result<Option<ModuleSpecifier>, AnyError> { // only fallback if the user specified a sub path if sub_path.is_none() { // it's confusing to users if the package doesn't have any binary @@ -703,28 +685,24 @@ impl CliMainWorkerFactory { return Ok(None); } - let resolution = self + let specifier = self .shared .node_resolver .resolve_package_subpath_from_deno_module( package_folder, sub_path, /* referrer */ None, + NodeModuleKind::Esm, NodeResolutionMode::Execution, )?; - match &resolution { - NodeResolution::BuiltIn(_) => Ok(None), - NodeResolution::CommonJs(specifier) | NodeResolution::Esm(specifier) => { - if specifier - .to_file_path() - .map(|p| p.exists()) - .unwrap_or(false) - { - Ok(Some(resolution)) - } else { - bail!("Cannot find module '{}'", specifier) - } - } + if specifier + .to_file_path() + .map(|p| p.exists()) + .unwrap_or(false) + { + Ok(Some(specifier)) + } else { + bail!("Cannot find module '{}'", specifier) } } } @@ -736,11 +714,13 @@ fn create_web_worker_callback( Arc::new(move |args| { let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let ModuleLoaderAndSourceMapGetter { module_loader } = - shared.module_loader_factory.create_for_worker( - args.parent_permissions.clone(), - args.permissions.clone(), - ); + let CreateModuleLoaderResult { + module_loader, + node_require_loader, + } = shared.module_loader_factory.create_for_worker( + args.parent_permissions.clone(), + args.permissions.clone(), + ); let create_web_worker_cb = create_web_worker_callback(shared.clone(), stdio.clone()); @@ -770,7 +750,9 @@ fn create_web_worker_callback( root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), module_loader, fs: shared.fs.clone(), - node_services: Some(shared.create_node_init_services()), + node_services: Some( + shared.create_node_init_services(node_require_loader), + ), blob_store: shared.blob_store.clone(), broadcast_channel: shared.broadcast_channel.clone(), shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), @@ -811,9 +793,11 @@ fn create_web_worker_callback( mode: WorkerExecutionMode::Worker, serve_port: shared.options.serve_port, serve_host: shared.options.serve_host.clone(), + otel_config: shared.otel_config.clone(), }, extensions: vec![], startup_snapshot: crate::js::deno_isolate_init(), + create_params: create_isolate_create_params(), unsafely_ignore_certificate_errors: shared .options .unsafely_ignore_certificate_errors @@ -834,6 +818,17 @@ fn create_web_worker_callback( }) } +/// By default V8 uses 1.4Gb heap limit which is meant for browser tabs. +/// Instead probe for the total memory on the system and use it instead +/// as a default. +pub fn create_isolate_create_params() -> Option<v8::CreateParams> { + let maybe_mem_info = deno_runtime::sys_info::mem_info(); + maybe_mem_info.map(|mem_info| { + v8::CreateParams::default() + .heap_limits_from_system_memory(mem_info.total, 0) + }) +} + #[allow(clippy::print_stdout)] #[allow(clippy::print_stderr)] #[cfg(test)] @@ -870,6 +865,7 @@ mod tests { node_services: Default::default(), npm_process_state_provider: Default::default(), root_cert_store_provider: Default::default(), + fetch_dns_resolver: Default::default(), shared_array_buffer_store: Default::default(), compiled_wasm_module_store: Default::default(), v8_code_cache: Default::default(), |