From 3bf147fe287ac779b20d318daba56b336f356adf Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 25 Jul 2024 19:08:14 -0400 Subject: refactor: decouple node resolution from deno_core (#24724) --- Cargo.lock | 22 + Cargo.toml | 2 + cli/Cargo.toml | 1 + cli/args/mod.rs | 4 +- cli/factory.rs | 7 +- cli/lsp/analysis.rs | 2 +- cli/lsp/resolver.rs | 10 +- cli/module_loader.rs | 2 +- cli/node.rs | 12 +- cli/npm/byonm.rs | 71 +- cli/npm/managed/mod.rs | 37 +- cli/npm/managed/resolvers/common.rs | 2 +- cli/npm/managed/resolvers/global.rs | 6 +- cli/npm/managed/resolvers/local.rs | 8 +- cli/npm/mod.rs | 8 +- cli/resolver.rs | 24 +- cli/standalone/mod.rs | 65 +- cli/tools/registry/pm.rs | 2 +- cli/tsc/mod.rs | 10 +- cli/worker.rs | 23 +- ext/fs/clippy.toml | 38 +- ext/fs/sync.rs | 62 -- ext/node/Cargo.toml | 3 +- ext/node/analyze.rs | 619 ----------- ext/node/errors.rs | 769 ------------- ext/node/global.rs | 2 +- ext/node/lib.rs | 161 ++- ext/node/ops/require.rs | 17 +- ext/node/ops/worker_threads.rs | 14 +- ext/node/package_json.rs | 70 -- ext/node/path.rs | 50 - ext/node/polyfill.rs | 14 - ext/node/resolution.rs | 2016 ---------------------------------- ext/node_resolver/Cargo.toml | 32 + ext/node_resolver/README.md | 6 + ext/node_resolver/analyze.rs | 624 +++++++++++ ext/node_resolver/clippy.toml | 48 + ext/node_resolver/env.rs | 39 + ext/node_resolver/errors.rs | 769 +++++++++++++ ext/node_resolver/lib.rs | 26 + ext/node_resolver/npm.rs | 41 + ext/node_resolver/package_json.rs | 53 + ext/node_resolver/path.rs | 142 +++ ext/node_resolver/resolution.rs | 2025 +++++++++++++++++++++++++++++++++++ ext/node_resolver/sync.rs | 86 ++ runtime/Cargo.toml | 1 + runtime/snapshot.rs | 2 +- runtime/web_worker.rs | 8 +- runtime/worker.rs | 10 +- 49 files changed, 4243 insertions(+), 3822 deletions(-) delete mode 100644 ext/node/analyze.rs delete mode 100644 ext/node/errors.rs delete mode 100644 ext/node/package_json.rs delete mode 100644 ext/node/path.rs delete mode 100644 ext/node/resolution.rs create mode 100644 ext/node_resolver/Cargo.toml create mode 100644 ext/node_resolver/README.md create mode 100644 ext/node_resolver/analyze.rs create mode 100644 ext/node_resolver/clippy.toml create mode 100644 ext/node_resolver/env.rs create mode 100644 ext/node_resolver/errors.rs create mode 100644 ext/node_resolver/lib.rs create mode 100644 ext/node_resolver/npm.rs create mode 100644 ext/node_resolver/package_json.rs create mode 100644 ext/node_resolver/path.rs create mode 100644 ext/node_resolver/resolution.rs create mode 100644 ext/node_resolver/sync.rs diff --git a/Cargo.lock b/Cargo.lock index 019166214..4ff45842d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1167,6 +1167,7 @@ dependencies = [ "monch", "napi_sym", "nix 0.26.2", + "node_resolver", "notify", "once_cell", "open", @@ -1767,6 +1768,7 @@ dependencies = [ "libz-sys", "md-5", "md4", + "node_resolver", "num-bigint", "num-bigint-dig", "num-integer", @@ -1908,6 +1910,7 @@ dependencies = [ "log", "netif", "nix 0.26.2", + "node_resolver", "notify", "ntapi", "once_cell", @@ -4341,6 +4344,25 @@ dependencies = [ "libc", ] +[[package]] +name = "node_resolver" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "deno_media_type", + "deno_package_json", + "futures", + "lazy-regex", + "once_cell", + "path-clean", + "regex", + "serde_json", + "thiserror", + "tokio", + "url", +] + [[package]] name = "nom" version = "5.1.3" diff --git a/Cargo.toml b/Cargo.toml index 1e9f53e46..3902a028a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,6 +21,7 @@ members = [ "ext/napi", "ext/net", "ext/node", + "ext/node_resolver", "ext/url", "ext/web", "ext/webgpu", @@ -83,6 +84,7 @@ deno_webgpu = { version = "0.129.0", path = "./ext/webgpu" } deno_webidl = { version = "0.162.0", path = "./ext/webidl" } deno_websocket = { version = "0.167.0", path = "./ext/websocket" } deno_webstorage = { version = "0.157.0", path = "./ext/webstorage" } +node_resolver = { version = "0.1.0", path = "./ext/node_resolver" } aes = "=0.8.3" anyhow = "1.0.57" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index a19dcbe3d..cda410c63 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -80,6 +80,7 @@ deno_task_shell = "=0.17.0" deno_terminal.workspace = true eszip = "=0.72.2" napi_sym.workspace = true +node_resolver.workspace = true async-trait.workspace = true base32.workspace = true diff --git a/cli/args/mod.rs b/cli/args/mod.rs index aea6ed8a8..ea79aaa46 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -820,9 +820,7 @@ impl CliOptions { WorkspaceDiscoverOptions { fs: Default::default(), // use real fs deno_json_cache: None, - pkg_json_cache: Some( - &deno_runtime::deno_node::PackageJsonThreadLocalCache, - ), + pkg_json_cache: Some(&node_resolver::PackageJsonThreadLocalCache), workspace_cache: None, config_parse_options, additional_config_file_names, diff --git a/cli/factory.rs b/cli/factory.rs index aeab3cbc4..3e618e239 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -62,13 +62,14 @@ use deno_core::futures::FutureExt; use deno_core::FeatureChecker; use deno_runtime::deno_fs; -use deno_runtime::deno_node::analyze::NodeCodeTranslator; +use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::inspector_server::InspectorServer; use log::warn; +use node_resolver::analyze::NodeCodeTranslator; use once_cell::sync::OnceCell; use std::future::Future; use std::sync::Arc; @@ -553,7 +554,7 @@ impl CliFactory { .get_or_try_init_async( async { Ok(Arc::new(NodeResolver::new( - self.fs().clone(), + DenoFsNodeResolverEnv::new(self.fs().clone()), self.npm_resolver().await?.clone().into_npm_resolver(), ))) } @@ -577,7 +578,7 @@ impl CliFactory { Ok(Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, - self.fs().clone(), + DenoFsNodeResolverEnv::new(self.fs().clone()), self.node_resolver().await?.clone(), self.npm_resolver().await?.clone().into_npm_resolver(), ))) diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 97730ac7e..ec8bd4a28 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -23,7 +23,6 @@ use deno_core::serde::Serialize; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::ModuleSpecifier; -use deno_runtime::deno_node::NpmResolver; use deno_runtime::deno_node::PathClean; use deno_semver::jsr::JsrPackageNvReference; use deno_semver::jsr::JsrPackageReqReference; @@ -34,6 +33,7 @@ use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; use deno_semver::Version; use import_map::ImportMap; +use node_resolver::NpmResolver; use once_cell::sync::Lazy; use regex::Regex; use std::cmp::Ordering; diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index bdfd5fd3e..d6fc3096c 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -35,11 +35,7 @@ use deno_graph::GraphImport; use deno_graph::ModuleSpecifier; use deno_npm::NpmSystemInfo; use deno_runtime::deno_fs; -use deno_runtime::deno_node::errors::ClosestPkgJsonError; -use deno_runtime::deno_node::NodeResolution; -use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_node::NpmResolver; use deno_runtime::deno_node::PackageJson; use deno_runtime::fs_util::specifier_to_file_path; use deno_semver::jsr::JsrPackageReqReference; @@ -47,6 +43,10 @@ use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use indexmap::IndexMap; +use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::NodeResolution; +use node_resolver::NodeResolutionMode; +use node_resolver::NpmResolver; use std::borrow::Cow; use std::collections::BTreeMap; use std::collections::BTreeSet; @@ -496,7 +496,7 @@ fn create_node_resolver( let npm_resolver = npm_resolver?; let fs = Arc::new(deno_fs::RealFs); let node_resolver_inner = Arc::new(NodeResolver::new( - fs.clone(), + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), npm_resolver.clone().into_npm_resolver(), )); Some(Arc::new(CliNodeResolver::new( diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 2e047d36d..bda4e58d8 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -64,9 +64,9 @@ use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::Resolution; use deno_runtime::code_cache; -use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; +use node_resolver::NodeResolutionMode; pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> { let npm_resolver = factory.npm_resolver().await?; diff --git a/cli/node.rs b/cli/node.rs index 5ecbacdc7..0fd18e299 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -6,10 +6,11 @@ use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_runtime::deno_fs; -use deno_runtime::deno_node::analyze::CjsAnalysis as ExtNodeCjsAnalysis; -use deno_runtime::deno_node::analyze::CjsAnalysisExports; -use deno_runtime::deno_node::analyze::CjsCodeAnalyzer; -use deno_runtime::deno_node::analyze::NodeCodeTranslator; +use deno_runtime::deno_node::DenoFsNodeResolverEnv; +use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; +use node_resolver::analyze::CjsAnalysisExports; +use node_resolver::analyze::CjsCodeAnalyzer; +use node_resolver::analyze::NodeCodeTranslator; use serde::Deserialize; use serde::Serialize; @@ -17,7 +18,8 @@ use crate::cache::CacheDBHash; use crate::cache::NodeAnalysisCache; use crate::util::fs::canonicalize_path_maybe_not_exists; -pub type CliNodeCodeTranslator = NodeCodeTranslator; +pub type CliNodeCodeTranslator = + NodeCodeTranslator; /// Resolves a specifier that is pointing into a node_modules folder. /// diff --git a/cli/npm/byonm.rs b/cli/npm/byonm.rs index 86c9badac..a0f23fc66 100644 --- a/cli/npm/byonm.rs +++ b/cli/npm/byonm.rs @@ -11,14 +11,18 @@ use deno_core::error::AnyError; use deno_core::serde_json; use deno_package_json::PackageJsonDepValue; use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node::errors::PackageFolderResolveError; -use deno_runtime::deno_node::errors::PackageFolderResolveIoError; -use deno_runtime::deno_node::errors::PackageNotFoundError; -use deno_runtime::deno_node::load_pkg_json; +use deno_runtime::deno_node::DenoPkgJsonFsAdapter; use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NpmResolver; +use deno_runtime::deno_node::NodeRequireResolver; +use deno_runtime::deno_node::NpmProcessStateProvider; use deno_runtime::deno_node::PackageJson; use deno_semver::package::PackageReq; +use node_resolver::errors::PackageFolderResolveError; +use node_resolver::errors::PackageFolderResolveIoError; +use node_resolver::errors::PackageJsonLoadError; +use node_resolver::errors::PackageNotFoundError; +use node_resolver::load_pkg_json; +use node_resolver::NpmResolver; use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; @@ -49,6 +53,15 @@ pub struct ByonmCliNpmResolver { root_node_modules_dir: Option, } +impl ByonmCliNpmResolver { + fn load_pkg_json( + &self, + path: &Path, + ) -> Result>, PackageJsonLoadError> { + load_pkg_json(&DenoPkgJsonFsAdapter(self.fs.as_ref()), path) + } +} + impl ByonmCliNpmResolver { /// Finds the ancestor package.json that contains the specified dependency. pub fn find_ancestor_package_json_with_dep( @@ -60,9 +73,7 @@ impl ByonmCliNpmResolver { let mut current_folder = referrer_path.parent()?; loop { let pkg_json_path = current_folder.join("package.json"); - if let Ok(Some(pkg_json)) = - load_pkg_json(self.fs.as_ref(), &pkg_json_path) - { + if let Ok(Some(pkg_json)) = self.load_pkg_json(&pkg_json_path) { if let Some(deps) = &pkg_json.dependencies { if deps.contains_key(dep_name) { return Some(pkg_json); @@ -119,9 +130,7 @@ impl ByonmCliNpmResolver { let mut current_path = file_path.as_path(); while let Some(dir_path) = current_path.parent() { let package_json_path = dir_path.join("package.json"); - if let Some(pkg_json) = - load_pkg_json(self.fs.as_ref(), &package_json_path)? - { + if let Some(pkg_json) = self.load_pkg_json(&package_json_path)? { if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref()) { @@ -136,9 +145,7 @@ impl ByonmCliNpmResolver { if let Some(root_node_modules_dir) = &self.root_node_modules_dir { let root_pkg_json_path = root_node_modules_dir.parent().unwrap().join("package.json"); - if let Some(pkg_json) = - load_pkg_json(self.fs.as_ref(), &root_pkg_json_path)? - { + if let Some(pkg_json) = self.load_pkg_json(&root_pkg_json_path)? { if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref()) { return Ok((pkg_json, alias)); @@ -158,17 +165,6 @@ impl ByonmCliNpmResolver { } impl NpmResolver for ByonmCliNpmResolver { - fn get_npm_process_state(&self) -> String { - serde_json::to_string(&NpmProcessState { - kind: NpmProcessStateKind::Byonm, - local_node_modules_path: self - .root_node_modules_dir - .as_ref() - .map(|p| p.to_string_lossy().to_string()), - }) - .unwrap() - } - fn resolve_package_folder_from_package( &self, name: &str, @@ -226,7 +222,9 @@ impl NpmResolver for ByonmCliNpmResolver { .to_ascii_lowercase() .contains("/node_modules/") } +} +impl NodeRequireResolver for ByonmCliNpmResolver { fn ensure_read_permission( &self, permissions: &mut dyn NodePermissions, @@ -242,11 +240,34 @@ impl NpmResolver for ByonmCliNpmResolver { } } +impl NpmProcessStateProvider for ByonmCliNpmResolver { + fn get_npm_process_state(&self) -> String { + serde_json::to_string(&NpmProcessState { + kind: NpmProcessStateKind::Byonm, + local_node_modules_path: self + .root_node_modules_dir + .as_ref() + .map(|p| p.to_string_lossy().to_string()), + }) + .unwrap() + } +} + impl CliNpmResolver for ByonmCliNpmResolver { fn into_npm_resolver(self: Arc) -> Arc { self } + fn into_require_resolver(self: Arc) -> Arc { + self + } + + fn into_process_state_provider( + self: Arc, + ) -> Arc { + self + } + fn clone_snapshotted(&self) -> Arc { Arc::new(Self { fs: self.fs.clone(), diff --git a/cli/npm/managed/mod.rs b/cli/npm/managed/mod.rs index 602733cab..1561d3969 100644 --- a/cli/npm/managed/mod.rs +++ b/cli/npm/managed/mod.rs @@ -20,12 +20,14 @@ use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node::errors::PackageFolderResolveError; -use deno_runtime::deno_node::errors::PackageFolderResolveIoError; use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NpmResolver; +use deno_runtime::deno_node::NodeRequireResolver; +use deno_runtime::deno_node::NpmProcessStateProvider; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use node_resolver::errors::PackageFolderResolveError; +use node_resolver::errors::PackageFolderResolveIoError; +use node_resolver::NpmResolver; use resolution::AddPkgReqsResult; use crate::args::CliLockfile; @@ -531,14 +533,6 @@ fn npm_process_state( } impl NpmResolver for ManagedCliNpmResolver { - /// Gets the state of npm for the process. - fn get_npm_process_state(&self) -> String { - npm_process_state( - self.resolution.serialized_valid_snapshot(), - self.fs_resolver.node_modules_path().map(|p| p.as_path()), - ) - } - fn resolve_package_folder_from_package( &self, name: &str, @@ -563,7 +557,9 @@ impl NpmResolver for ManagedCliNpmResolver { debug_assert!(root_dir_url.as_str().ends_with('/')); specifier.as_ref().starts_with(root_dir_url.as_str()) } +} +impl NodeRequireResolver for ManagedCliNpmResolver { fn ensure_read_permission( &self, permissions: &mut dyn NodePermissions, @@ -573,11 +569,30 @@ impl NpmResolver for ManagedCliNpmResolver { } } +impl NpmProcessStateProvider for ManagedCliNpmResolver { + fn get_npm_process_state(&self) -> String { + npm_process_state( + self.resolution.serialized_valid_snapshot(), + self.fs_resolver.node_modules_path().map(|p| p.as_path()), + ) + } +} + impl CliNpmResolver for ManagedCliNpmResolver { fn into_npm_resolver(self: Arc) -> Arc { self } + fn into_require_resolver(self: Arc) -> Arc { + self + } + + fn into_process_state_provider( + self: Arc, + ) -> Arc { + self + } + fn clone_snapshotted(&self) -> Arc { // create a new snapshotted npm resolution and resolver let npm_resolution = Arc::new(NpmResolution::new( diff --git a/cli/npm/managed/resolvers/common.rs b/cli/npm/managed/resolvers/common.rs index dffa1b75c..170dc2ae6 100644 --- a/cli/npm/managed/resolvers/common.rs +++ b/cli/npm/managed/resolvers/common.rs @@ -18,8 +18,8 @@ use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node::errors::PackageFolderResolveError; use deno_runtime::deno_node::NodePermissions; +use node_resolver::errors::PackageFolderResolveError; use crate::npm::managed::cache::TarballCache; diff --git a/cli/npm/managed/resolvers/global.rs b/cli/npm/managed/resolvers/global.rs index e7a57fc23..7f8f285f3 100644 --- a/cli/npm/managed/resolvers/global.rs +++ b/cli/npm/managed/resolvers/global.rs @@ -14,10 +14,10 @@ use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmSystemInfo; use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node::errors::PackageFolderResolveError; -use deno_runtime::deno_node::errors::PackageNotFoundError; -use deno_runtime::deno_node::errors::ReferrerNotFoundError; use deno_runtime::deno_node::NodePermissions; +use node_resolver::errors::PackageFolderResolveError; +use node_resolver::errors::PackageNotFoundError; +use node_resolver::errors::ReferrerNotFoundError; use super::super::cache::NpmCache; use super::super::cache::TarballCache; diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index cda78548b..b741fd15d 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -32,12 +32,12 @@ use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; use deno_runtime::deno_fs; -use deno_runtime::deno_node::errors::PackageFolderResolveError; -use deno_runtime::deno_node::errors::PackageFolderResolveIoError; -use deno_runtime::deno_node::errors::PackageNotFoundError; -use deno_runtime::deno_node::errors::ReferrerNotFoundError; use deno_runtime::deno_node::NodePermissions; use deno_semver::package::PackageNv; +use node_resolver::errors::PackageFolderResolveError; +use node_resolver::errors::PackageFolderResolveIoError; +use node_resolver::errors::PackageNotFoundError; +use node_resolver::errors::ReferrerNotFoundError; use serde::Deserialize; use serde::Serialize; diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 8ae81de24..f883883aa 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -13,10 +13,12 @@ use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_core::serde_json; use deno_npm::registry::NpmPackageInfo; -use deno_runtime::deno_node::NpmResolver; +use deno_runtime::deno_node::NodeRequireResolver; +use deno_runtime::deno_node::NpmProcessStateProvider; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use node_resolver::NpmResolver; use crate::args::npm_registry_url; use crate::file_fetcher::FileFetcher; @@ -63,6 +65,10 @@ pub enum InnerCliNpmResolverRef<'a> { pub trait CliNpmResolver: NpmResolver { fn into_npm_resolver(self: Arc) -> Arc; + fn into_require_resolver(self: Arc) -> Arc; + fn into_process_state_provider( + self: Arc, + ) -> Arc; fn clone_snapshotted(&self) -> Arc; diff --git a/cli/resolver.rs b/cli/resolver.rs index 5296b42b8..18804c025 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -23,23 +23,23 @@ use deno_npm::resolution::NpmResolutionError; use deno_package_json::PackageJsonDepValue; use deno_runtime::deno_fs; use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node::errors::ClosestPkgJsonError; -use deno_runtime::deno_node::errors::NodeResolveError; -use deno_runtime::deno_node::errors::NodeResolveErrorKind; -use deno_runtime::deno_node::errors::PackageFolderResolveErrorKind; -use deno_runtime::deno_node::errors::PackageFolderResolveIoError; -use deno_runtime::deno_node::errors::PackageNotFoundError; -use deno_runtime::deno_node::errors::PackageResolveErrorKind; -use deno_runtime::deno_node::errors::UrlToNodeResolutionError; use deno_runtime::deno_node::is_builtin_node_module; -use deno_runtime::deno_node::NodeModuleKind; -use deno_runtime::deno_node::NodeResolution; -use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_node::PackageJson; use deno_runtime::fs_util::specifier_to_file_path; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; +use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::errors::NodeResolveError; +use node_resolver::errors::NodeResolveErrorKind; +use node_resolver::errors::PackageFolderResolveErrorKind; +use node_resolver::errors::PackageFolderResolveIoError; +use node_resolver::errors::PackageNotFoundError; +use node_resolver::errors::PackageResolveErrorKind; +use node_resolver::errors::UrlToNodeResolutionError; +use node_resolver::NodeModuleKind; +use node_resolver::NodeResolution; +use node_resolver::NodeResolutionMode; +use node_resolver::PackageJson; use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index c91f3bec9..1538807f1 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -5,34 +5,6 @@ #![allow(dead_code)] #![allow(unused_imports)] -use crate::args::create_default_npmrc; -use crate::args::get_root_cert_store; -use crate::args::npm_pkg_req_ref_to_binary_command; -use crate::args::CaData; -use crate::args::CacheSetting; -use crate::args::PackageJsonInstallDepsProvider; -use crate::args::StorageKeyResolver; -use crate::cache::Caches; -use crate::cache::DenoDirProvider; -use crate::cache::NodeAnalysisCache; -use crate::http_util::HttpClientProvider; -use crate::node::CliCjsCodeAnalyzer; -use crate::npm::create_cli_npm_resolver; -use crate::npm::CliNpmResolverByonmCreateOptions; -use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; -use crate::npm::CliNpmResolverManagedSnapshotOption; -use crate::npm::NpmCacheDir; -use crate::resolver::CjsResolutionStore; -use crate::resolver::CliNodeResolver; -use crate::resolver::NpmModuleLoader; -use crate::util::progress_bar::ProgressBar; -use crate::util::progress_bar::ProgressBarStyle; -use crate::util::v8::construct_v8_flags; -use crate::worker::CliMainWorkerFactory; -use crate::worker::CliMainWorkerOptions; -use crate::worker::ModuleLoaderAndSourceMapGetter; -use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolutionError; @@ -53,8 +25,6 @@ use deno_core::ResolutionKind; use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonDepValue; use deno_runtime::deno_fs; -use deno_runtime::deno_node::analyze::NodeCodeTranslator; -use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; @@ -65,10 +35,41 @@ use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; use eszip::EszipRelativeFileBaseUrl; use import_map::parse_from_json; +use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::NodeResolutionMode; use std::borrow::Cow; use std::rc::Rc; use std::sync::Arc; +use crate::args::create_default_npmrc; +use crate::args::get_root_cert_store; +use crate::args::npm_pkg_req_ref_to_binary_command; +use crate::args::CaData; +use crate::args::CacheSetting; +use crate::args::PackageJsonInstallDepsProvider; +use crate::args::StorageKeyResolver; +use crate::cache::Caches; +use crate::cache::DenoDirProvider; +use crate::cache::NodeAnalysisCache; +use crate::http_util::HttpClientProvider; +use crate::node::CliCjsCodeAnalyzer; +use crate::npm::create_cli_npm_resolver; +use crate::npm::CliNpmResolverByonmCreateOptions; +use crate::npm::CliNpmResolverCreateOptions; +use crate::npm::CliNpmResolverManagedCreateOptions; +use crate::npm::CliNpmResolverManagedSnapshotOption; +use crate::npm::NpmCacheDir; +use crate::resolver::CjsResolutionStore; +use crate::resolver::CliNodeResolver; +use crate::resolver::NpmModuleLoader; +use crate::util::progress_bar::ProgressBar; +use crate::util::progress_bar::ProgressBarStyle; +use crate::util::v8::construct_v8_flags; +use crate::worker::CliMainWorkerFactory; +use crate::worker::CliMainWorkerOptions; +use crate::worker::ModuleLoaderAndSourceMapGetter; +use crate::worker::ModuleLoaderFactory; + pub mod binary; mod file_system; mod virtual_fs; @@ -549,7 +550,7 @@ pub async fn run( let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); let node_resolver = Arc::new(NodeResolver::new( - fs.clone(), + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), npm_resolver.clone().into_npm_resolver(), )); let cjs_resolutions = Arc::new(CjsResolutionStore::default()); @@ -559,7 +560,7 @@ pub async fn run( CliCjsCodeAnalyzer::new(node_analysis_cache, fs.clone()); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_code_analyzer, - fs.clone(), + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), node_resolver.clone(), npm_resolver.clone().into_npm_resolver(), )); diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs index 233e68240..2986c1c2a 100644 --- a/cli/tools/registry/pm.rs +++ b/cli/tools/registry/pm.rs @@ -308,7 +308,7 @@ pub async fn add( .context("Failed to update configuration file")?; // clear the previously cached package.json from memory before reloading it - deno_node::PackageJsonThreadLocalCache::clear(); + node_resolver::PackageJsonThreadLocalCache::clear(); // make a new CliFactory to pick up the updated config file let cli_factory = CliFactory::from_flags(flags); // cache deps diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 424b5c3d3..ac7fc48e3 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -30,14 +30,14 @@ use deno_graph::GraphKind; use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; -use deno_runtime::deno_node::errors::NodeJsErrorCode; -use deno_runtime::deno_node::errors::NodeJsErrorCoded; -use deno_runtime::deno_node::NodeModuleKind; -use deno_runtime::deno_node::NodeResolution; -use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::NodeResolver; use deno_semver::npm::NpmPackageReqReference; use lsp_types::Url; +use node_resolver::errors::NodeJsErrorCode; +use node_resolver::errors::NodeJsErrorCoded; +use node_resolver::NodeModuleKind; +use node_resolver::NodeResolution; +use node_resolver::NodeResolutionMode; use once_cell::sync::Lazy; use std::borrow::Cow; use std::collections::HashMap; diff --git a/cli/worker.rs b/cli/worker.rs index 0d7e61c50..8673804ab 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -22,8 +22,7 @@ use deno_runtime::code_cache; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_fs; use deno_runtime::deno_node; -use deno_runtime::deno_node::NodeResolution; -use deno_runtime::deno_node::NodeResolutionMode; +use deno_runtime::deno_node::NodeExtInitServices; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::RootCertStoreProvider; @@ -40,6 +39,8 @@ use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; use deno_terminal::colors; +use node_resolver::NodeResolution; +use node_resolver::NodeResolutionMode; use tokio::select; use crate::args::CliLockfile; @@ -144,7 +145,17 @@ struct SharedWorkerState { } impl SharedWorkerState { - // Currently empty + pub fn create_node_init_services(&self) -> NodeExtInitServices { + NodeExtInitServices { + node_require_resolver: self.npm_resolver.clone().into_require_resolver(), + node_resolver: self.node_resolver.clone(), + npm_process_state_provider: self + .npm_resolver + .clone() + .into_process_state_provider(), + npm_resolver: self.npm_resolver.clone().into_npm_resolver(), + } + } } pub struct CliMainWorker { @@ -599,8 +610,7 @@ impl CliMainWorkerFactory { strace_ops: shared.options.strace_ops.clone(), module_loader, fs: shared.fs.clone(), - node_resolver: Some(shared.node_resolver.clone()), - npm_resolver: Some(shared.npm_resolver.clone().into_npm_resolver()), + node_services: Some(shared.create_node_init_services()), get_error_class_fn: Some(&errors::get_error_class_name), cache_storage_dir, origin_storage_dir, @@ -793,8 +803,7 @@ fn create_web_worker_callback( format_js_error_fn: Some(Arc::new(format_js_error)), module_loader, fs: shared.fs.clone(), - node_resolver: Some(shared.node_resolver.clone()), - npm_resolver: Some(shared.npm_resolver.clone().into_npm_resolver()), + node_services: Some(shared.create_node_init_services()), worker_type: args.worker_type, maybe_inspector_server, get_error_class_fn: Some(&errors::get_error_class_name), diff --git a/ext/fs/clippy.toml b/ext/fs/clippy.toml index 023769214..943d28c6d 100644 --- a/ext/fs/clippy.toml +++ b/ext/fs/clippy.toml @@ -1,24 +1,24 @@ disallowed-methods = [ { path = "std::env::current_dir", reason = "File system operations should be done using FileSystem trait" }, - { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_file", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_symlink", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::read_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::read_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::try_exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_symlink", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::try_exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using FileSystem trait" }, { path = "std::env::set_current_dir", reason = "File system operations should be done using FileSystem trait" }, { path = "std::env::temp_dir", reason = "File system operations should be done using FileSystem trait" }, { path = "std::fs::canonicalize", reason = "File system operations should be done using FileSystem trait" }, diff --git a/ext/fs/sync.rs b/ext/fs/sync.rs index 83f1f8bc3..6a913f658 100644 --- a/ext/fs/sync.rs +++ b/ext/fs/sync.rs @@ -6,80 +6,18 @@ pub use inner::*; mod inner { #![allow(clippy::disallowed_types)] - use std::ops::Deref; - use std::ops::DerefMut; pub use std::sync::Arc as MaybeArc; pub use core::marker::Send as MaybeSend; pub use core::marker::Sync as MaybeSync; - - pub struct MaybeArcMutexGuard<'lock, T>(std::sync::MutexGuard<'lock, T>); - - impl<'lock, T> Deref for MaybeArcMutexGuard<'lock, T> { - type Target = std::sync::MutexGuard<'lock, T>; - fn deref(&self) -> &std::sync::MutexGuard<'lock, T> { - &self.0 - } - } - - impl<'lock, T> DerefMut for MaybeArcMutexGuard<'lock, T> { - fn deref_mut(&mut self) -> &mut std::sync::MutexGuard<'lock, T> { - &mut self.0 - } - } - - #[derive(Debug)] - pub struct MaybeArcMutex(std::sync::Arc>); - impl MaybeArcMutex { - pub fn new(val: T) -> Self { - Self(std::sync::Arc::new(std::sync::Mutex::new(val))) - } - } - - impl<'lock, T> MaybeArcMutex { - pub fn lock(&'lock self) -> MaybeArcMutexGuard<'lock, T> { - MaybeArcMutexGuard(self.0.lock().unwrap()) - } - } } #[cfg(not(feature = "sync_fs"))] mod inner { - use std::ops::Deref; - use std::ops::DerefMut; pub use std::rc::Rc as MaybeArc; pub trait MaybeSync {} impl MaybeSync for T where T: ?Sized {} pub trait MaybeSend {} impl MaybeSend for T where T: ?Sized {} - - pub struct MaybeArcMutexGuard<'lock, T>(std::cell::RefMut<'lock, T>); - - impl<'lock, T> Deref for MaybeArcMutexGuard<'lock, T> { - type Target = std::cell::RefMut<'lock, T>; - fn deref(&self) -> &std::cell::RefMut<'lock, T> { - &self.0 - } - } - - impl<'lock, T> DerefMut for MaybeArcMutexGuard<'lock, T> { - fn deref_mut(&mut self) -> &mut std::cell::RefMut<'lock, T> { - &mut self.0 - } - } - - #[derive(Debug)] - pub struct MaybeArcMutex(std::rc::Rc>); - impl MaybeArcMutex { - pub fn new(val: T) -> Self { - Self(std::rc::Rc::new(std::cell::RefCell::new(val))) - } - } - - impl<'lock, T> MaybeArcMutex { - pub fn lock(&'lock self) -> MaybeArcMutexGuard<'lock, T> { - MaybeArcMutexGuard(self.0.borrow_mut()) - } - } } diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index ed168eace..00afb64eb 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -14,7 +14,7 @@ description = "Node compatibility for Deno" path = "lib.rs" [features] -sync_fs = ["deno_package_json/sync"] +sync_fs = ["deno_package_json/sync", "node_resolver/sync"] [dependencies] aead-gcm-stream = "0.1" @@ -55,6 +55,7 @@ libc.workspace = true libz-sys.workspace = true md-5 = { version = "0.10.5", features = ["oid"] } md4 = "0.10.2" +node_resolver.workspace = true num-bigint.workspace = true num-bigint-dig = "0.8.2" num-integer = "0.1.45" diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs deleted file mode 100644 index 3513a8105..000000000 --- a/ext/node/analyze.rs +++ /dev/null @@ -1,619 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::collections::BTreeSet; -use std::collections::HashSet; -use std::path::Path; -use std::path::PathBuf; - -use deno_core::anyhow; -use deno_core::anyhow::Context; -use deno_core::futures::future::LocalBoxFuture; -use deno_core::futures::stream::FuturesUnordered; -use deno_core::futures::FutureExt; -use deno_core::futures::StreamExt; -use deno_core::ModuleSpecifier; -use once_cell::sync::Lazy; - -use deno_core::error::AnyError; - -use crate::package_json::load_pkg_json; -use crate::path::to_file_specifier; -use crate::resolution::NodeResolverRc; -use crate::NodeModuleKind; -use crate::NodeResolutionMode; -use crate::NpmResolverRc; -use crate::PathClean; - -#[derive(Debug, Clone)] -pub enum CjsAnalysis { - /// File was found to be an ES module and the translator should - /// load the code as ESM. - Esm(String), - Cjs(CjsAnalysisExports), -} - -#[derive(Debug, Clone)] -pub struct CjsAnalysisExports { - pub exports: Vec, - pub reexports: Vec, -} - -/// Code analyzer for CJS and ESM files. -#[async_trait::async_trait(?Send)] -pub trait CjsCodeAnalyzer { - /// Analyzes CommonJs code for exports and reexports, which is - /// then used to determine the wrapper ESM module exports. - /// - /// Note that the source is provided by the caller when the caller - /// already has it. If the source is needed by the implementation, - /// then it can use the provided source, or otherwise load it if - /// necessary. - async fn analyze_cjs( - &self, - specifier: &ModuleSpecifier, - maybe_source: Option, - ) -> Result; -} - -pub struct NodeCodeTranslator { - cjs_code_analyzer: TCjsCodeAnalyzer, - fs: deno_fs::FileSystemRc, - node_resolver: NodeResolverRc, - npm_resolver: NpmResolverRc, -} - -impl NodeCodeTranslator { - pub fn new( - cjs_code_analyzer: TCjsCodeAnalyzer, - fs: deno_fs::FileSystemRc, - node_resolver: NodeResolverRc, - npm_resolver: NpmResolverRc, - ) -> Self { - Self { - cjs_code_analyzer, - fs, - node_resolver, - npm_resolver, - } - } - - /// Translates given CJS module into ESM. This function will perform static - /// analysis on the file to find defined exports and reexports. - /// - /// For all discovered reexports the analysis will be performed recursively. - /// - /// If successful a source code for equivalent ES module is returned. - pub async fn translate_cjs_to_esm( - &self, - entry_specifier: &ModuleSpecifier, - source: Option, - ) -> Result { - let mut temp_var_count = 0; - - let analysis = self - .cjs_code_analyzer - .analyze_cjs(entry_specifier, source) - .await?; - - let analysis = match analysis { - CjsAnalysis::Esm(source) => return Ok(source), - CjsAnalysis::Cjs(analysis) => analysis, - }; - - let mut source = vec![ - r#"import {createRequire as __internalCreateRequire} from "node:module"; - const require = __internalCreateRequire(import.meta.url);"# - .to_string(), - ]; - - // use a BTreeSet to make the output deterministic for v8's code cache - let mut all_exports = analysis.exports.into_iter().collect::>(); - - if !analysis.reexports.is_empty() { - let mut errors = Vec::new(); - self - .analyze_reexports( - entry_specifier, - analysis.reexports, - &mut all_exports, - &mut errors, - ) - .await; - - // surface errors afterwards in a deterministic way - if !errors.is_empty() { - errors.sort_by_cached_key(|e| e.to_string()); - return Err(errors.remove(0)); - } - } - - source.push(format!( - "const mod = require(\"{}\");", - entry_specifier - .to_file_path() - .unwrap() - .to_str() - .unwrap() - .replace('\\', "\\\\") - .replace('\'', "\\\'") - .replace('\"', "\\\"") - )); - - for export in &all_exports { - if export.as_str() != "default" { - add_export( - &mut source, - export, - &format!("mod[\"{}\"]", escape_for_double_quote_string(export)), - &mut temp_var_count, - ); - } - } - - source.push("export default mod;".to_string()); - - let translated_source = source.join("\n"); - Ok(translated_source) - } - - async fn analyze_reexports<'a>( - &'a self, - entry_specifier: &url::Url, - reexports: Vec, - all_exports: &mut BTreeSet, - // this goes through the modules concurrently, so collect - // the errors in order to be deterministic - errors: &mut Vec, - ) { - struct Analysis { - reexport_specifier: url::Url, - referrer: url::Url, - analysis: CjsAnalysis, - } - - type AnalysisFuture<'a> = LocalBoxFuture<'a, Result>; - - let mut handled_reexports: HashSet = HashSet::default(); - handled_reexports.insert(entry_specifier.clone()); - let mut analyze_futures: FuturesUnordered> = - FuturesUnordered::new(); - let cjs_code_analyzer = &self.cjs_code_analyzer; - let mut handle_reexports = - |referrer: url::Url, - reexports: Vec, - analyze_futures: &mut FuturesUnordered>, - errors: &mut Vec| { - // 1. Resolve the re-exports and start a future to analyze each one - for reexport in reexports { - let result = self.resolve( - &reexport, - &referrer, - // FIXME(bartlomieju): check if these conditions are okay, probably - // should be `deno-require`, because `deno` is already used in `esm_resolver.rs` - &["deno", "require", "default"], - NodeResolutionMode::Execution, - ); - let reexport_specifier = match result { - Ok(specifier) => specifier, - Err(err) => { - errors.push(err); - continue; - } - }; - - if !handled_reexports.insert(reexport_specifier.clone()) { - continue; - } - - let referrer = referrer.clone(); - let future = async move { - let analysis = cjs_code_analyzer - .analyze_cjs(&reexport_specifier, None) - .await - .with_context(|| { - format!( - "Could not load '{}' ({}) referenced from {}", - reexport, reexport_specifier, referrer - ) - })?; - - Ok(Analysis { - reexport_specifier, - referrer, - analysis, - }) - } - .boxed_local(); - analyze_futures.push(future); - } - }; - - handle_reexports( - entry_specifier.clone(), - reexports, - &mut analyze_futures, - errors, - ); - - while let Some(analysis_result) = analyze_futures.next().await { - // 2. Look at the analysis result and resolve its exports and re-exports - let Analysis { - reexport_specifier, - referrer, - analysis, - } = match analysis_result { - Ok(analysis) => analysis, - Err(err) => { - errors.push(err); - continue; - } - }; - match analysis { - CjsAnalysis::Esm(_) => { - // todo(dsherret): support this once supporting requiring ES modules - errors.push(anyhow::anyhow!( - "Cannot require ES module '{}' from '{}'", - reexport_specifier, - referrer, - )); - } - CjsAnalysis::Cjs(analysis) => { - if !analysis.reexports.is_empty() { - handle_reexports( - reexport_specifier.clone(), - analysis.reexports, - &mut analyze_futures, - errors, - ); - } - - all_exports.extend( - analysis - .exports - .into_iter() - .filter(|e| e.as_str() != "default"), - ); - } - } - } - } - - // todo(dsherret): what is going on here? Isn't this a bunch of duplicate code? - fn resolve( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - if specifier.starts_with('/') { - todo!(); - } - - let referrer_path = referrer.to_file_path().unwrap(); - if specifier.starts_with("./") || specifier.starts_with("../") { - if let Some(parent) = referrer_path.parent() { - return self - .file_extension_probe(parent.join(specifier), &referrer_path) - .map(|p| to_file_specifier(&p)); - } else { - todo!(); - } - } - - // We've got a bare specifier or maybe bare_specifier/blah.js" - let (package_specifier, package_subpath) = - parse_specifier(specifier).unwrap(); - - // todo(dsherret): use not_found error on not found here - let module_dir = self.npm_resolver.resolve_package_folder_from_package( - package_specifier.as_str(), - referrer, - )?; - - let package_json_path = module_dir.join("package.json"); - let maybe_package_json = load_pkg_json(&*self.fs, &package_json_path)?; - if let Some(package_json) = maybe_package_json { - if let Some(exports) = &package_json.exports { - return self - .node_resolver - .package_exports_resolve( - &package_json_path, - &package_subpath, - exports, - Some(referrer), - NodeModuleKind::Esm, - conditions, - mode, - ) - .map_err(AnyError::from); - } - - // old school - if package_subpath != "." { - let d = module_dir.join(package_subpath); - if self.fs.is_dir_sync(&d) { - // subdir might have a package.json that specifies the entrypoint - let package_json_path = d.join("package.json"); - let maybe_package_json = - load_pkg_json(&*self.fs, &package_json_path)?; - if let Some(package_json) = maybe_package_json { - if let Some(main) = package_json.main(NodeModuleKind::Cjs) { - return Ok(to_file_specifier(&d.join(main).clean())); - } - } - - return Ok(to_file_specifier(&d.join("index.js").clean())); - } - return self - .file_extension_probe(d, &referrer_path) - .map(|p| to_file_specifier(&p)); - } else if let Some(main) = package_json.main(NodeModuleKind::Cjs) { - return Ok(to_file_specifier(&module_dir.join(main).clean())); - } else { - return Ok(to_file_specifier(&module_dir.join("index.js").clean())); - } - } - - // as a fallback, attempt to resolve it via the ancestor directories - let mut last = referrer_path.as_path(); - while let Some(parent) = last.parent() { - if !self.npm_resolver.in_npm_package_at_dir_path(parent) { - break; - } - let path = if parent.ends_with("node_modules") { - parent.join(specifier) - } else { - parent.join("node_modules").join(specifier) - }; - if let Ok(path) = self.file_extension_probe(path, &referrer_path) { - return Ok(to_file_specifier(&path)); - } - last = parent; - } - - Err(not_found(specifier, &referrer_path)) - } - - fn file_extension_probe( - &self, - p: PathBuf, - referrer: &Path, - ) -> Result { - let p = p.clean(); - if self.fs.exists_sync(&p) { - let file_name = p.file_name().unwrap(); - let p_js = - p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if self.fs.is_file_sync(&p_js) { - return Ok(p_js); - } else if self.fs.is_dir_sync(&p) { - return Ok(p.join("index.js")); - } else { - return Ok(p); - } - } else if let Some(file_name) = p.file_name() { - { - let p_js = - p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if self.fs.is_file_sync(&p_js) { - return Ok(p_js); - } - } - { - let p_json = - p.with_file_name(format!("{}.json", file_name.to_str().unwrap())); - if self.fs.is_file_sync(&p_json) { - return Ok(p_json); - } - } - } - Err(not_found(&p.to_string_lossy(), referrer)) - } -} - -static RESERVED_WORDS: Lazy> = Lazy::new(|| { - HashSet::from([ - "abstract", - "arguments", - "async", - "await", - "boolean", - "break", - "byte", - "case", - "catch", - "char", - "class", - "const", - "continue", - "debugger", - "default", - "delete", - "do", - "double", - "else", - "enum", - "eval", - "export", - "extends", - "false", - "final", - "finally", - "float", - "for", - "function", - "get", - "goto", - "if", - "implements", - "import", - "in", - "instanceof", - "int", - "interface", - "let", - "long", - "mod", - "native", - "new", - "null", - "package", - "private", - "protected", - "public", - "return", - "set", - "short", - "static", - "super", - "switch", - "synchronized", - "this", - "throw", - "throws", - "transient", - "true", - "try", - "typeof", - "var", - "void", - "volatile", - "while", - "with", - "yield", - ]) -}); - -fn add_export( - source: &mut Vec, - name: &str, - initializer: &str, - temp_var_count: &mut usize, -) { - fn is_valid_var_decl(name: &str) -> bool { - // it's ok to be super strict here - if name.is_empty() { - return false; - } - - if let Some(first) = name.chars().next() { - if !first.is_ascii_alphabetic() && first != '_' && first != '$' { - return false; - } - } - - name - .chars() - .all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$') - } - - // TODO(bartlomieju): Node actually checks if a given export exists in `exports` object, - // but it might not be necessary here since our analysis is more detailed? - if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) { - *temp_var_count += 1; - // we can't create an identifier with a reserved word or invalid identifier name, - // so assign it to a temporary variable that won't have a conflict, then re-export - // it as a string - source.push(format!( - "const __deno_export_{temp_var_count}__ = {initializer};" - )); - source.push(format!( - "export {{ __deno_export_{temp_var_count}__ as \"{}\" }};", - escape_for_double_quote_string(name) - )); - } else { - source.push(format!("export const {name} = {initializer};")); - } -} - -fn parse_specifier(specifier: &str) -> Option<(String, String)> { - let mut separator_index = specifier.find('/'); - let mut valid_package_name = true; - // let mut is_scoped = false; - if specifier.is_empty() { - valid_package_name = false; - } else if specifier.starts_with('@') { - // is_scoped = true; - if let Some(index) = separator_index { - separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1); - } else { - valid_package_name = false; - } - } - - let package_name = if let Some(index) = separator_index { - specifier[0..index].to_string() - } else { - specifier.to_string() - }; - - // Package name cannot have leading . and cannot have percent-encoding or separators. - for ch in package_name.chars() { - if ch == '%' || ch == '\\' { - valid_package_name = false; - break; - } - } - - if !valid_package_name { - return None; - } - - let package_subpath = if let Some(index) = separator_index { - format!(".{}", specifier.chars().skip(index).collect::()) - } else { - ".".to_string() - }; - - Some((package_name, package_subpath)) -} - -fn not_found(path: &str, referrer: &Path) -> AnyError { - let msg = format!( - "[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"", - path, - referrer.to_string_lossy() - ); - std::io::Error::new(std::io::ErrorKind::NotFound, msg).into() -} - -fn escape_for_double_quote_string(text: &str) -> String { - text.replace('\\', "\\\\").replace('"', "\\\"") -} -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_add_export() { - let mut temp_var_count = 0; - let mut source = vec![]; - - let exports = vec!["static", "server", "app", "dashed-export", "3d"]; - for export in exports { - add_export(&mut source, export, "init", &mut temp_var_count); - } - assert_eq!( - source, - vec![ - "const __deno_export_1__ = init;".to_string(), - "export { __deno_export_1__ as \"static\" };".to_string(), - "export const server = init;".to_string(), - "export const app = init;".to_string(), - "const __deno_export_2__ = init;".to_string(), - "export { __deno_export_2__ as \"dashed-export\" };".to_string(), - "const __deno_export_3__ = init;".to_string(), - "export { __deno_export_3__ as \"3d\" };".to_string(), - ] - ) - } - - #[test] - fn test_parse_specifier() { - assert_eq!( - parse_specifier("@some-package/core/actions"), - Some(("@some-package/core".to_string(), "./actions".to_string())) - ); - } -} diff --git a/ext/node/errors.rs b/ext/node/errors.rs deleted file mode 100644 index 64625d32f..000000000 --- a/ext/node/errors.rs +++ /dev/null @@ -1,769 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::borrow::Cow; -use std::fmt::Write; -use std::path::PathBuf; - -use deno_core::ModuleSpecifier; -use thiserror::Error; - -use crate::NodeModuleKind; -use crate::NodeResolutionMode; - -macro_rules! kinded_err { - ($name:ident, $kind_name:ident) => { - #[derive(Error, Debug)] - #[error(transparent)] - pub struct $name(pub Box<$kind_name>); - - impl $name { - pub fn as_kind(&self) -> &$kind_name { - &self.0 - } - - pub fn into_kind(self) -> $kind_name { - *self.0 - } - } - - impl From for $name - where - $kind_name: From, - { - fn from(err: E) -> Self { - $name(Box::new($kind_name::from(err))) - } - } - }; -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -#[allow(non_camel_case_types)] -pub enum NodeJsErrorCode { - ERR_INVALID_MODULE_SPECIFIER, - ERR_INVALID_PACKAGE_CONFIG, - ERR_INVALID_PACKAGE_TARGET, - ERR_MODULE_NOT_FOUND, - ERR_PACKAGE_IMPORT_NOT_DEFINED, - ERR_PACKAGE_PATH_NOT_EXPORTED, - ERR_UNKNOWN_FILE_EXTENSION, - ERR_UNSUPPORTED_DIR_IMPORT, - ERR_UNSUPPORTED_ESM_URL_SCHEME, - /// Deno specific since Node doesn't support TypeScript. - ERR_TYPES_NOT_FOUND, -} - -impl std::fmt::Display for NodeJsErrorCode { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.as_str()) - } -} - -impl NodeJsErrorCode { - pub fn as_str(&self) -> &'static str { - use NodeJsErrorCode::*; - match self { - ERR_INVALID_MODULE_SPECIFIER => "ERR_INVALID_MODULE_SPECIFIER", - ERR_INVALID_PACKAGE_CONFIG => "ERR_INVALID_PACKAGE_CONFIG", - ERR_INVALID_PACKAGE_TARGET => "ERR_INVALID_PACKAGE_TARGET", - ERR_MODULE_NOT_FOUND => "ERR_MODULE_NOT_FOUND", - ERR_PACKAGE_IMPORT_NOT_DEFINED => "ERR_PACKAGE_IMPORT_NOT_DEFINED", - ERR_PACKAGE_PATH_NOT_EXPORTED => "ERR_PACKAGE_PATH_NOT_EXPORTED", - ERR_UNKNOWN_FILE_EXTENSION => "ERR_UNKNOWN_FILE_EXTENSION", - ERR_UNSUPPORTED_DIR_IMPORT => "ERR_UNSUPPORTED_DIR_IMPORT", - ERR_UNSUPPORTED_ESM_URL_SCHEME => "ERR_UNSUPPORTED_ESM_URL_SCHEME", - ERR_TYPES_NOT_FOUND => "ERR_TYPES_NOT_FOUND", - } - } -} - -pub trait NodeJsErrorCoded { - fn code(&self) -> NodeJsErrorCode; -} - -kinded_err!( - ResolvePkgSubpathFromDenoModuleError, - ResolvePkgSubpathFromDenoModuleErrorKind -); - -impl NodeJsErrorCoded for ResolvePkgSubpathFromDenoModuleError { - fn code(&self) -> NodeJsErrorCode { - use ResolvePkgSubpathFromDenoModuleErrorKind::*; - match self.as_kind() { - PackageSubpathResolve(e) => e.code(), - UrlToNodeResolution(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum ResolvePkgSubpathFromDenoModuleErrorKind { - #[error(transparent)] - PackageSubpathResolve(#[from] PackageSubpathResolveError), - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), -} - -// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError -#[derive(Debug, Clone, Error)] -#[error( - "[{}] Invalid module '{}' {}{}", - self.code(), - request, - reason, - maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default() -)] -pub struct InvalidModuleSpecifierError { - pub request: String, - pub reason: Cow<'static, str>, - pub maybe_referrer: Option, -} - -impl NodeJsErrorCoded for InvalidModuleSpecifierError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_INVALID_MODULE_SPECIFIER - } -} - -kinded_err!(LegacyResolveError, LegacyResolveErrorKind); - -#[derive(Debug, Error)] -pub enum LegacyResolveErrorKind { - #[error(transparent)] - TypesNotFound(#[from] TypesNotFoundError), - #[error(transparent)] - ModuleNotFound(#[from] ModuleNotFoundError), -} - -impl NodeJsErrorCoded for LegacyResolveError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - LegacyResolveErrorKind::TypesNotFound(e) => e.code(), - LegacyResolveErrorKind::ModuleNotFound(e) => e.code(), - } - } -} - -kinded_err!(PackageFolderResolveError, PackageFolderResolveErrorKind); - -#[derive(Debug, Error)] -#[error( - "Could not find package '{}' from referrer '{}'{}.", - package_name, - referrer, - referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default() -)] -pub struct PackageNotFoundError { - pub package_name: String, - pub referrer: ModuleSpecifier, - /// Extra information about the referrer. - pub referrer_extra: Option, -} - -impl NodeJsErrorCoded for PackageNotFoundError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_MODULE_NOT_FOUND - } -} - -#[derive(Debug, Error)] -#[error( - "Could not find referrer npm package '{}'{}.", - referrer, - referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default() -)] -pub struct ReferrerNotFoundError { - pub referrer: ModuleSpecifier, - /// Extra information about the referrer. - pub referrer_extra: Option, -} - -impl NodeJsErrorCoded for ReferrerNotFoundError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_MODULE_NOT_FOUND - } -} - -#[derive(Debug, Error)] -#[error("Failed resolving '{package_name}' from referrer '{referrer}'.")] -pub struct PackageFolderResolveIoError { - pub package_name: String, - pub referrer: ModuleSpecifier, - #[source] - pub source: std::io::Error, -} - -impl NodeJsErrorCoded for PackageFolderResolveIoError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_MODULE_NOT_FOUND - } -} - -impl NodeJsErrorCoded for PackageFolderResolveError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - PackageFolderResolveErrorKind::PackageNotFound(e) => e.code(), - PackageFolderResolveErrorKind::ReferrerNotFound(e) => e.code(), - PackageFolderResolveErrorKind::Io(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum PackageFolderResolveErrorKind { - #[error(transparent)] - PackageNotFound(#[from] PackageNotFoundError), - #[error(transparent)] - ReferrerNotFound(#[from] ReferrerNotFoundError), - #[error(transparent)] - Io(#[from] PackageFolderResolveIoError), -} - -kinded_err!(PackageSubpathResolveError, PackageSubpathResolveErrorKind); - -impl NodeJsErrorCoded for PackageSubpathResolveError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - PackageSubpathResolveErrorKind::PkgJsonLoad(e) => e.code(), - PackageSubpathResolveErrorKind::Exports(e) => e.code(), - PackageSubpathResolveErrorKind::LegacyResolve(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum PackageSubpathResolveErrorKind { - #[error(transparent)] - PkgJsonLoad(#[from] PackageJsonLoadError), - #[error(transparent)] - Exports(PackageExportsResolveError), - #[error(transparent)] - LegacyResolve(LegacyResolveError), -} - -#[derive(Debug, Error)] -#[error( - "Target '{}' not found from '{}'{}{}.", - target, - pkg_json_path.display(), - maybe_referrer.as_ref().map(|r| - format!( - " from{} referrer {}", - match referrer_kind { - NodeModuleKind::Esm => "", - NodeModuleKind::Cjs => " cjs", - }, - r - ) - ).unwrap_or_default(), - match mode { - NodeResolutionMode::Execution => "", - NodeResolutionMode::Types => " for types", - } -)] -pub struct PackageTargetNotFoundError { - pub pkg_json_path: PathBuf, - pub target: String, - pub maybe_referrer: Option, - pub referrer_kind: NodeModuleKind, - pub mode: NodeResolutionMode, -} - -impl NodeJsErrorCoded for PackageTargetNotFoundError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_MODULE_NOT_FOUND - } -} - -kinded_err!(PackageTargetResolveError, PackageTargetResolveErrorKind); - -impl NodeJsErrorCoded for PackageTargetResolveError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - PackageTargetResolveErrorKind::NotFound(e) => e.code(), - PackageTargetResolveErrorKind::InvalidPackageTarget(e) => e.code(), - PackageTargetResolveErrorKind::InvalidModuleSpecifier(e) => e.code(), - PackageTargetResolveErrorKind::PackageResolve(e) => e.code(), - PackageTargetResolveErrorKind::TypesNotFound(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum PackageTargetResolveErrorKind { - #[error(transparent)] - NotFound(#[from] PackageTargetNotFoundError), - #[error(transparent)] - InvalidPackageTarget(#[from] InvalidPackageTargetError), - #[error(transparent)] - InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), - #[error(transparent)] - PackageResolve(#[from] PackageResolveError), - #[error(transparent)] - TypesNotFound(#[from] TypesNotFoundError), -} - -kinded_err!(PackageExportsResolveError, PackageExportsResolveErrorKind); - -impl NodeJsErrorCoded for PackageExportsResolveError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - PackageExportsResolveErrorKind::PackagePathNotExported(e) => e.code(), - PackageExportsResolveErrorKind::PackageTargetResolve(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum PackageExportsResolveErrorKind { - #[error(transparent)] - PackagePathNotExported(#[from] PackagePathNotExportedError), - #[error(transparent)] - PackageTargetResolve(#[from] PackageTargetResolveError), -} - -#[derive(Debug, Error)] -#[error( - "[{}] Could not find types for '{}'{}", - self.code(), - self.0.code_specifier, - self.0.maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(), - )] -pub struct TypesNotFoundError(pub Box); - -#[derive(Debug)] -pub struct TypesNotFoundErrorData { - pub code_specifier: ModuleSpecifier, - pub maybe_referrer: Option, -} - -impl NodeJsErrorCoded for TypesNotFoundError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_TYPES_NOT_FOUND - } -} - -#[derive(Debug, Error)] -#[error( - "[{}] Invalid package config. {}", - self.code(), - self.0 -)] -pub struct PackageJsonLoadError( - #[source] - #[from] - pub deno_package_json::PackageJsonLoadError, -); - -impl NodeJsErrorCoded for PackageJsonLoadError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_INVALID_PACKAGE_CONFIG - } -} - -kinded_err!(ClosestPkgJsonError, ClosestPkgJsonErrorKind); - -impl NodeJsErrorCoded for ClosestPkgJsonError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - ClosestPkgJsonErrorKind::CanonicalizingDir(e) => e.code(), - ClosestPkgJsonErrorKind::Load(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum ClosestPkgJsonErrorKind { - #[error(transparent)] - CanonicalizingDir(#[from] CanonicalizingPkgJsonDirError), - #[error(transparent)] - Load(#[from] PackageJsonLoadError), -} - -#[derive(Debug, Error)] -#[error("[{}] Failed canonicalizing package.json directory '{}'.", self.code(), dir_path.display())] -pub struct CanonicalizingPkgJsonDirError { - pub dir_path: PathBuf, - #[source] - pub source: std::io::Error, -} - -impl NodeJsErrorCoded for CanonicalizingPkgJsonDirError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_MODULE_NOT_FOUND - } -} - -#[derive(Debug, Error)] -#[error("TypeScript files are not supported in npm packages: {specifier}")] -pub struct TypeScriptNotSupportedInNpmError { - pub specifier: ModuleSpecifier, -} - -impl NodeJsErrorCoded for TypeScriptNotSupportedInNpmError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION - } -} - -kinded_err!(UrlToNodeResolutionError, UrlToNodeResolutionErrorKind); - -impl NodeJsErrorCoded for UrlToNodeResolutionError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - UrlToNodeResolutionErrorKind::TypeScriptNotSupported(e) => e.code(), - UrlToNodeResolutionErrorKind::ClosestPkgJson(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum UrlToNodeResolutionErrorKind { - #[error(transparent)] - TypeScriptNotSupported(#[from] TypeScriptNotSupportedInNpmError), - #[error(transparent)] - ClosestPkgJson(#[from] ClosestPkgJsonError), -} - -// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError -#[derive(Debug, Error)] -#[error( - "[{}] Package import specifier \"{}\" is not defined{}{}", - self.code(), - name, - package_json_path.as_ref().map(|p| format!(" in package {}", p.display())).unwrap_or_default(), - maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(), -)] -pub struct PackageImportNotDefinedError { - pub name: String, - pub package_json_path: Option, - pub maybe_referrer: Option, -} - -impl NodeJsErrorCoded for PackageImportNotDefinedError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_PACKAGE_IMPORT_NOT_DEFINED - } -} - -kinded_err!(PackageImportsResolveError, PackageImportsResolveErrorKind); - -#[derive(Debug, Error)] -pub enum PackageImportsResolveErrorKind { - #[error(transparent)] - ClosestPkgJson(ClosestPkgJsonError), - #[error(transparent)] - InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), - #[error(transparent)] - NotDefined(#[from] PackageImportNotDefinedError), - #[error(transparent)] - Target(#[from] PackageTargetResolveError), -} - -impl NodeJsErrorCoded for PackageImportsResolveErrorKind { - fn code(&self) -> NodeJsErrorCode { - match self { - Self::ClosestPkgJson(e) => e.code(), - Self::InvalidModuleSpecifier(e) => e.code(), - Self::NotDefined(e) => e.code(), - Self::Target(e) => e.code(), - } - } -} - -kinded_err!(PackageResolveError, PackageResolveErrorKind); - -impl NodeJsErrorCoded for PackageResolveError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - PackageResolveErrorKind::ClosestPkgJson(e) => e.code(), - PackageResolveErrorKind::InvalidModuleSpecifier(e) => e.code(), - PackageResolveErrorKind::PackageFolderResolve(e) => e.code(), - PackageResolveErrorKind::ExportsResolve(e) => e.code(), - PackageResolveErrorKind::SubpathResolve(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum PackageResolveErrorKind { - #[error(transparent)] - ClosestPkgJson(#[from] ClosestPkgJsonError), - #[error(transparent)] - InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), - #[error(transparent)] - PackageFolderResolve(#[from] PackageFolderResolveError), - #[error(transparent)] - ExportsResolve(#[from] PackageExportsResolveError), - #[error(transparent)] - SubpathResolve(#[from] PackageSubpathResolveError), -} - -#[derive(Debug, Error)] -#[error("Failed joining '{path}' from '{base}'.")] -pub struct NodeResolveRelativeJoinError { - pub path: String, - pub base: ModuleSpecifier, - #[source] - pub source: url::ParseError, -} - -#[derive(Debug, Error)] -#[error("Failed resolving specifier from data url referrer.")] -pub struct DataUrlReferrerError { - #[source] - pub source: url::ParseError, -} - -kinded_err!(NodeResolveError, NodeResolveErrorKind); - -#[derive(Debug, Error)] -pub enum NodeResolveErrorKind { - #[error(transparent)] - RelativeJoin(#[from] NodeResolveRelativeJoinError), - #[error(transparent)] - PackageImportsResolve(#[from] PackageImportsResolveError), - #[error(transparent)] - UnsupportedEsmUrlScheme(#[from] UnsupportedEsmUrlSchemeError), - #[error(transparent)] - DataUrlReferrer(#[from] DataUrlReferrerError), - #[error(transparent)] - PackageResolve(#[from] PackageResolveError), - #[error(transparent)] - TypesNotFound(#[from] TypesNotFoundError), - #[error(transparent)] - FinalizeResolution(#[from] FinalizeResolutionError), - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), -} - -kinded_err!(FinalizeResolutionError, FinalizeResolutionErrorKind); - -#[derive(Debug, Error)] -pub enum FinalizeResolutionErrorKind { - #[error(transparent)] - InvalidModuleSpecifierError(#[from] InvalidModuleSpecifierError), - #[error(transparent)] - ModuleNotFound(#[from] ModuleNotFoundError), - #[error(transparent)] - UnsupportedDirImport(#[from] UnsupportedDirImportError), -} - -impl NodeJsErrorCoded for FinalizeResolutionError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - FinalizeResolutionErrorKind::InvalidModuleSpecifierError(e) => e.code(), - FinalizeResolutionErrorKind::ModuleNotFound(e) => e.code(), - FinalizeResolutionErrorKind::UnsupportedDirImport(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -#[error( - "[{}] Cannot find {} '{}'{}", - self.code(), - typ, - specifier, - maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default() -)] -pub struct ModuleNotFoundError { - pub specifier: ModuleSpecifier, - pub maybe_referrer: Option, - pub typ: &'static str, -} - -impl NodeJsErrorCoded for ModuleNotFoundError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_MODULE_NOT_FOUND - } -} - -#[derive(Debug, Error)] -#[error( - "[{}] Directory import '{}' is not supported resolving ES modules{}", - self.code(), - dir_url, - maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default(), -)] -pub struct UnsupportedDirImportError { - pub dir_url: ModuleSpecifier, - pub maybe_referrer: Option, -} - -impl NodeJsErrorCoded for UnsupportedDirImportError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_UNSUPPORTED_DIR_IMPORT - } -} - -#[derive(Debug)] -pub struct InvalidPackageTargetError { - pub pkg_json_path: PathBuf, - pub sub_path: String, - pub target: String, - pub is_import: bool, - pub maybe_referrer: Option, -} - -impl std::error::Error for InvalidPackageTargetError {} - -impl std::fmt::Display for InvalidPackageTargetError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let rel_error = !self.is_import - && !self.target.is_empty() - && !self.target.starts_with("./"); - f.write_char('[')?; - f.write_str(self.code().as_str())?; - f.write_char(']')?; - - if self.sub_path == "." { - assert!(!self.is_import); - write!( - f, - " Invalid \"exports\" main target {} defined in the package config {}", - self.target, - self.pkg_json_path.display() - )?; - } else { - let ie = if self.is_import { "imports" } else { "exports" }; - write!( - f, - " Invalid \"{}\" target {} defined for '{}' in the package config {}", - ie, - self.target, - self.sub_path, - self.pkg_json_path.display() - )?; - }; - - if let Some(referrer) = &self.maybe_referrer { - write!(f, " imported from '{}'", referrer)?; - } - if rel_error { - write!(f, "; target must start with \"./\"")?; - } - Ok(()) - } -} - -impl NodeJsErrorCoded for InvalidPackageTargetError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET - } -} - -#[derive(Debug)] -pub struct PackagePathNotExportedError { - pub pkg_json_path: PathBuf, - pub subpath: String, - pub maybe_referrer: Option, - pub mode: NodeResolutionMode, -} - -impl NodeJsErrorCoded for PackagePathNotExportedError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_PACKAGE_PATH_NOT_EXPORTED - } -} - -impl std::error::Error for PackagePathNotExportedError {} - -impl std::fmt::Display for PackagePathNotExportedError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_char('[')?; - f.write_str(self.code().as_str())?; - f.write_char(']')?; - - let types_msg = match self.mode { - NodeResolutionMode::Execution => String::new(), - NodeResolutionMode::Types => " for types".to_string(), - }; - if self.subpath == "." { - write!( - f, - " No \"exports\" main defined{} in '{}'", - types_msg, - self.pkg_json_path.display() - )?; - } else { - write!( - f, - " Package subpath '{}' is not defined{} by \"exports\" in '{}'", - self.subpath, - types_msg, - self.pkg_json_path.display() - )?; - }; - - if let Some(referrer) = &self.maybe_referrer { - write!(f, " imported from '{}'", referrer)?; - } - Ok(()) - } -} - -#[derive(Debug, Clone, Error)] -#[error( - "[{}] Only file and data URLs are supported by the default ESM loader.{} Received protocol '{}'", - self.code(), - if cfg!(windows) && url_scheme.len() == 2 { " On Windows, absolute path must be valid file:// URLS."} else { "" }, - url_scheme -)] -pub struct UnsupportedEsmUrlSchemeError { - pub url_scheme: String, -} - -impl NodeJsErrorCoded for UnsupportedEsmUrlSchemeError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_UNSUPPORTED_ESM_URL_SCHEME - } -} - -#[derive(Debug, Error)] -pub enum ResolvePkgJsonBinExportError { - #[error(transparent)] - PkgJsonLoad(#[from] PackageJsonLoadError), - #[error("Failed resolving binary export. '{}' did not exist", pkg_json_path.display())] - MissingPkgJson { pkg_json_path: PathBuf }, - #[error("Failed resolving binary export. {message}")] - InvalidBinProperty { message: String }, - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), -} - -#[derive(Debug, Error)] -pub enum ResolveBinaryCommandsError { - #[error(transparent)] - PkgJsonLoad(#[from] PackageJsonLoadError), - #[error("'{}' did not have a name", pkg_json_path.display())] - MissingPkgJsonName { pkg_json_path: PathBuf }, -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn types_resolution_package_path_not_exported() { - let separator_char = if cfg!(windows) { '\\' } else { '/' }; - assert_eq!( - PackagePathNotExportedError { - pkg_json_path: PathBuf::from("test_path").join("package.json"), - subpath: "./jsx-runtime".to_string(), - maybe_referrer: None, - mode: NodeResolutionMode::Types - }.to_string(), - format!("[ERR_PACKAGE_PATH_NOT_EXPORTED] Package subpath './jsx-runtime' is not defined for types by \"exports\" in 'test_path{separator_char}package.json'") - ); - assert_eq!( - PackagePathNotExportedError { - pkg_json_path: PathBuf::from("test_path").join("package.json"), - subpath: ".".to_string(), - maybe_referrer: None, - mode: NodeResolutionMode::Types - }.to_string(), - format!("[ERR_PACKAGE_PATH_NOT_EXPORTED] No \"exports\" main defined for types in 'test_path{separator_char}package.json'") - ); - } -} diff --git a/ext/node/global.rs b/ext/node/global.rs index 7f901fd03..618e68494 100644 --- a/ext/node/global.rs +++ b/ext/node/global.rs @@ -6,7 +6,7 @@ use deno_core::v8; use deno_core::v8::GetPropertyNamesArgs; use deno_core::v8::MapFnTo; -use crate::resolution::NodeResolverRc; +use crate::NodeResolverRc; // NOTE(bartlomieju): somehow calling `.map_fn_to()` multiple times on a function // returns two different pointers. That shouldn't be the case as `.map_fn_to()` diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 21af5a094..2c8650577 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -5,7 +5,6 @@ use std::collections::HashSet; use std::path::Path; -use std::path::PathBuf; use deno_core::error::AnyError; use deno_core::located_script_name; @@ -15,24 +14,20 @@ use deno_core::url::Url; use deno_core::v8; use deno_core::v8::ExternalReference; use deno_core::JsRuntime; -use deno_core::ModuleSpecifier; use deno_core::OpState; use deno_fs::sync::MaybeSend; use deno_fs::sync::MaybeSync; +use node_resolver::NpmResolverRc; use once_cell::sync::Lazy; extern crate libz_sys as zlib; -pub mod analyze; -pub mod errors; mod global; mod ops; -mod package_json; -mod path; mod polyfill; -mod resolution; pub use deno_package_json::PackageJson; +pub use node_resolver::PathClean; pub use ops::ipc::ChildPipeFd; pub use ops::ipc::IpcJsonStreamResource; use ops::vm; @@ -40,17 +35,9 @@ pub use ops::vm::create_v8_context; pub use ops::vm::init_global_template; pub use ops::vm::ContextInitMode; pub use ops::vm::VM_CONTEXT_INDEX; -pub use package_json::load_pkg_json; -pub use package_json::PackageJsonThreadLocalCache; -pub use path::PathClean; pub use polyfill::is_builtin_node_module; pub use polyfill::SUPPORTED_BUILTIN_NODE_MODULES; pub use polyfill::SUPPORTED_BUILTIN_NODE_MODULES_WITH_PREFIX; -pub use resolution::NodeModuleKind; -pub use resolution::NodeResolution; -pub use resolution::NodeResolutionMode; -pub use resolution::NodeResolver; -use resolution::NodeResolverRc; use crate::global::global_object_middleware; use crate::global::global_template_middleware; @@ -149,9 +136,12 @@ impl NodePermissions for deno_permissions::PermissionsContainer { } #[allow(clippy::disallowed_types)] -pub type NpmResolverRc = deno_fs::sync::MaybeArc; +pub type NpmProcessStateProviderRc = + deno_fs::sync::MaybeArc; -pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync { +pub trait NpmProcessStateProvider: + std::fmt::Debug + MaybeSend + MaybeSync +{ /// Gets a string containing the serialized npm state of the process. /// /// This will be set on the `DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE` environment @@ -161,34 +151,13 @@ pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync { // This method is only used in the CLI. String::new() } +} - /// Resolves an npm package folder path from an npm package referrer. - fn resolve_package_folder_from_package( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - ) -> Result; - - fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool; - - fn in_npm_package_at_dir_path(&self, path: &Path) -> bool { - let specifier = - match ModuleSpecifier::from_directory_path(path.to_path_buf().clean()) { - Ok(p) => p, - Err(_) => return false, - }; - self.in_npm_package(&specifier) - } - - fn in_npm_package_at_file_path(&self, path: &Path) -> bool { - let specifier = - match ModuleSpecifier::from_file_path(path.to_path_buf().clean()) { - Ok(p) => p, - Err(_) => return false, - }; - self.in_npm_package(&specifier) - } +#[allow(clippy::disallowed_types)] +pub type NodeRequireResolverRc = + deno_fs::sync::MaybeArc; +pub trait NodeRequireResolver: std::fmt::Debug + MaybeSend + MaybeSync { fn ensure_read_permission( &self, permissions: &mut dyn NodePermissions, @@ -223,10 +192,17 @@ fn op_node_is_promise_rejected(value: v8::Local) -> bool { #[op2] #[string] fn op_npm_process_state(state: &mut OpState) -> Result { - let npm_resolver = state.borrow_mut::(); + let npm_resolver = state.borrow_mut::(); Ok(npm_resolver.get_npm_process_state()) } +pub struct NodeExtInitServices { + pub node_require_resolver: NodeRequireResolverRc, + pub node_resolver: NodeResolverRc, + pub npm_process_state_provider: NpmProcessStateProviderRc, + pub npm_resolver: NpmResolverRc, +} + deno_core::extension!(deno_node, deps = [ deno_io, deno_fs ], parameters = [P: NodePermissions], @@ -643,21 +619,17 @@ deno_core::extension!(deno_node, "node:zlib" = "zlib.ts", ], options = { - maybe_node_resolver: Option, - maybe_npm_resolver: Option, + maybe_init: Option, fs: deno_fs::FileSystemRc, }, state = |state, options| { - // you should provide both of these or neither - debug_assert_eq!(options.maybe_node_resolver.is_some(), options.maybe_npm_resolver.is_some()); - state.put(options.fs.clone()); - if let Some(node_resolver) = &options.maybe_node_resolver { - state.put(node_resolver.clone()); - } - if let Some(npm_resolver) = &options.maybe_npm_resolver { - state.put(npm_resolver.clone()); + if let Some(init) = &options.maybe_init { + state.put(init.node_require_resolver.clone()); + state.put(init.node_resolver.clone()); + state.put(init.npm_resolver.clone()); + state.put(init.npm_process_state_provider.clone()); } }, global_template_middleware = global_template_middleware, @@ -783,3 +755,84 @@ pub fn load_cjs_module( js_runtime.execute_script(located_script_name!(), source_code)?; Ok(()) } + +pub type NodeResolver = node_resolver::NodeResolver; +#[allow(clippy::disallowed_types)] +pub type NodeResolverRc = + deno_fs::sync::MaybeArc>; + +#[derive(Debug)] +pub struct DenoFsNodeResolverEnv { + fs: deno_fs::FileSystemRc, +} + +impl DenoFsNodeResolverEnv { + pub fn new(fs: deno_fs::FileSystemRc) -> Self { + Self { fs } + } +} + +impl node_resolver::env::NodeResolverEnv for DenoFsNodeResolverEnv { + fn is_builtin_node_module(&self, specifier: &str) -> bool { + is_builtin_node_module(specifier) + } + + fn realpath_sync( + &self, + path: &std::path::Path, + ) -> std::io::Result { + self + .fs + .realpath_sync(path) + .map_err(|err| err.into_io_error()) + } + + fn stat_sync( + &self, + path: &std::path::Path, + ) -> std::io::Result { + self + .fs + .stat_sync(path) + .map(|stat| node_resolver::env::NodeResolverFsStat { + is_file: stat.is_file, + is_dir: stat.is_directory, + is_symlink: stat.is_symlink, + }) + .map_err(|err| err.into_io_error()) + } + + fn exists_sync(&self, path: &std::path::Path) -> bool { + self.fs.exists_sync(path) + } + + fn pkg_json_fs(&self) -> &dyn deno_package_json::fs::DenoPkgJsonFs { + self + } +} + +impl deno_package_json::fs::DenoPkgJsonFs for DenoFsNodeResolverEnv { + fn read_to_string_lossy( + &self, + path: &std::path::Path, + ) -> Result { + self + .fs + .read_text_file_lossy_sync(path, None) + .map_err(|err| err.into_io_error()) + } +} + +pub struct DenoPkgJsonFsAdapter<'a>(pub &'a dyn deno_fs::FileSystem); + +impl<'a> deno_package_json::fs::DenoPkgJsonFs for DenoPkgJsonFsAdapter<'a> { + fn read_to_string_lossy( + &self, + path: &Path, + ) -> Result { + self + .0 + .read_text_file_lossy_sync(path, None) + .map_err(|err| err.into_io_error()) + } +} diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index d03b3dd9c..d074234c3 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -10,16 +10,17 @@ use deno_core::JsRuntimeInspector; use deno_core::ModuleSpecifier; use deno_core::OpState; use deno_fs::FileSystemRc; +use node_resolver::NodeModuleKind; +use node_resolver::NodeResolutionMode; +use node_resolver::REQUIRE_CONDITIONS; use std::cell::RefCell; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; -use crate::resolution; -use crate::resolution::NodeResolverRc; -use crate::NodeModuleKind; use crate::NodePermissions; -use crate::NodeResolutionMode; +use crate::NodeRequireResolverRc; +use crate::NodeResolverRc; use crate::NpmResolverRc; use crate::PackageJson; @@ -30,7 +31,7 @@ fn ensure_read_permission

( where P: NodePermissions + 'static, { - let resolver = state.borrow::().clone(); + let resolver = state.borrow::().clone(); let permissions = state.borrow_mut::

(); resolver.ensure_read_permission(permissions, file_path) } @@ -423,7 +424,7 @@ where exports, Some(&referrer), NodeModuleKind::Cjs, - resolution::REQUIRE_CONDITIONS, + REQUIRE_CONDITIONS, NodeResolutionMode::Execution, )?; Ok(Some(if r.scheme() == "file" { @@ -511,7 +512,7 @@ where exports, Some(&referrer), NodeModuleKind::Cjs, - resolution::REQUIRE_CONDITIONS, + REQUIRE_CONDITIONS, NodeResolutionMode::Execution, )?; Ok(Some(if r.scheme() == "file" { @@ -590,7 +591,7 @@ where Some(&referrer_url), NodeModuleKind::Cjs, Some(&pkg), - resolution::REQUIRE_CONDITIONS, + REQUIRE_CONDITIONS, NodeResolutionMode::Execution, )?; Ok(Some(url_to_file_path_string(&url)?)) diff --git a/ext/node/ops/worker_threads.rs b/ext/node/ops/worker_threads.rs index 182ba0118..c7ea4c52c 100644 --- a/ext/node/ops/worker_threads.rs +++ b/ext/node/ops/worker_threads.rs @@ -6,13 +6,13 @@ use deno_core::op2; use deno_core::url::Url; use deno_core::OpState; use deno_fs::FileSystemRc; +use node_resolver::NodeResolution; use std::path::Path; use std::path::PathBuf; -use crate::resolution; -use crate::resolution::NodeResolverRc; use crate::NodePermissions; -use crate::NpmResolverRc; +use crate::NodeRequireResolverRc; +use crate::NodeResolverRc; fn ensure_read_permission

( state: &mut OpState, @@ -21,7 +21,7 @@ fn ensure_read_permission

( where P: NodePermissions + 'static, { - let resolver = state.borrow::().clone(); + let resolver = state.borrow::().clone(); let permissions = state.borrow_mut::

(); resolver.ensure_read_permission(permissions, file_path) } @@ -64,9 +64,9 @@ where } let node_resolver = state.borrow::(); match node_resolver.url_to_node_resolution(url)? { - resolution::NodeResolution::Esm(u) => Ok(u.to_string()), - resolution::NodeResolution::CommonJs(u) => wrap_cjs(u), - _ => Err(generic_error("Neither ESM nor CJS")), + NodeResolution::Esm(u) => Ok(u.to_string()), + NodeResolution::CommonJs(u) => wrap_cjs(u), + NodeResolution::BuiltIn(_) => Err(generic_error("Neither ESM nor CJS")), } } diff --git a/ext/node/package_json.rs b/ext/node/package_json.rs deleted file mode 100644 index 877acfc7a..000000000 --- a/ext/node/package_json.rs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use deno_package_json::PackageJson; -use deno_package_json::PackageJsonRc; -use std::cell::RefCell; -use std::collections::HashMap; -use std::io::ErrorKind; -use std::path::Path; -use std::path::PathBuf; - -use crate::errors::PackageJsonLoadError; - -// use a thread local cache so that workers have their own distinct cache -thread_local! { - static CACHE: RefCell> = RefCell::new(HashMap::new()); -} - -pub struct PackageJsonThreadLocalCache; - -impl PackageJsonThreadLocalCache { - pub fn clear() { - CACHE.with(|cache| cache.borrow_mut().clear()); - } -} - -impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache { - fn get(&self, path: &Path) -> Option { - CACHE.with(|cache| cache.borrow().get(path).cloned()) - } - - fn set(&self, path: PathBuf, package_json: PackageJsonRc) { - CACHE.with(|cache| cache.borrow_mut().insert(path, package_json)); - } -} - -pub struct DenoPkgJsonFsAdapter<'a>(pub &'a dyn deno_fs::FileSystem); - -impl<'a> deno_package_json::fs::DenoPkgJsonFs for DenoPkgJsonFsAdapter<'a> { - fn read_to_string_lossy( - &self, - path: &Path, - ) -> Result { - self - .0 - .read_text_file_lossy_sync(path, None) - .map_err(|err| err.into_io_error()) - } -} - -/// Helper to load a package.json file using the thread local cache -/// in deno_node. -pub fn load_pkg_json( - fs: &dyn deno_fs::FileSystem, - path: &Path, -) -> Result, PackageJsonLoadError> { - let result = PackageJson::load_from_path( - path, - &DenoPkgJsonFsAdapter(fs), - Some(&PackageJsonThreadLocalCache), - ); - match result { - Ok(pkg_json) => Ok(Some(pkg_json)), - Err(deno_package_json::PackageJsonLoadError::Io { source, .. }) - if source.kind() == ErrorKind::NotFound => - { - Ok(None) - } - Err(err) => Err(PackageJsonLoadError(err)), - } -} diff --git a/ext/node/path.rs b/ext/node/path.rs deleted file mode 100644 index 0f151edaf..000000000 --- a/ext/node/path.rs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::path::Component; -use std::path::Path; -use std::path::PathBuf; - -use deno_core::ModuleSpecifier; - -/// Extension to path_clean::PathClean -pub trait PathClean { - fn clean(&self) -> T; -} - -impl PathClean for PathBuf { - fn clean(&self) -> PathBuf { - let path = path_clean::PathClean::clean(self); - if cfg!(windows) && path.to_string_lossy().contains("..\\") { - // temporary workaround because path_clean::PathClean::clean is - // not good enough on windows - let mut components = Vec::new(); - - for component in path.components() { - match component { - Component::CurDir => { - // skip - } - Component::ParentDir => { - let maybe_last_component = components.pop(); - if !matches!(maybe_last_component, Some(Component::Normal(_))) { - panic!("Error normalizing: {}", path.display()); - } - } - Component::Normal(_) | Component::RootDir | Component::Prefix(_) => { - components.push(component); - } - } - } - components.into_iter().collect::() - } else { - path - } - } -} - -pub(crate) fn to_file_specifier(path: &Path) -> ModuleSpecifier { - match ModuleSpecifier::from_file_path(path) { - Ok(url) => url, - Err(_) => panic!("Invalid path: {}", path.display()), - } -} diff --git a/ext/node/polyfill.rs b/ext/node/polyfill.rs index 5847acc42..b4030a491 100644 --- a/ext/node/polyfill.rs +++ b/ext/node/polyfill.rs @@ -1,7 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::ModuleSpecifier; - /// e.g. `is_builtin_node_module("assert")` pub fn is_builtin_node_module(module_name: &str) -> bool { SUPPORTED_BUILTIN_NODE_MODULES @@ -9,18 +7,6 @@ pub fn is_builtin_node_module(module_name: &str) -> bool { .any(|m| *m == module_name) } -/// Ex. returns `fs` for `node:fs` -pub fn get_module_name_from_builtin_node_module_specifier( - specifier: &ModuleSpecifier, -) -> Option<&str> { - if specifier.scheme() != "node" { - return None; - } - - let (_, specifier) = specifier.as_str().split_once(':')?; - Some(specifier) -} - macro_rules! generate_builtin_node_module_lists { ($( $module_name:literal ,)+) => { pub static SUPPORTED_BUILTIN_NODE_MODULES: &[&str] = &[ diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs deleted file mode 100644 index 6417835a2..000000000 --- a/ext/node/resolution.rs +++ /dev/null @@ -1,2016 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::borrow::Cow; -use std::collections::HashMap; -use std::path::Path; -use std::path::PathBuf; - -use deno_core::anyhow::bail; -use deno_core::error::AnyError; -use deno_core::serde_json::Map; -use deno_core::serde_json::Value; -use deno_core::url::Url; -use deno_core::ModuleSpecifier; -use deno_fs::FileSystemRc; -use deno_media_type::MediaType; -use deno_package_json::PackageJsonRc; - -use crate::errors; -use crate::errors::CanonicalizingPkgJsonDirError; -use crate::errors::ClosestPkgJsonError; -use crate::errors::DataUrlReferrerError; -use crate::errors::FinalizeResolutionError; -use crate::errors::InvalidModuleSpecifierError; -use crate::errors::InvalidPackageTargetError; -use crate::errors::LegacyResolveError; -use crate::errors::ModuleNotFoundError; -use crate::errors::NodeJsErrorCode; -use crate::errors::NodeJsErrorCoded; -use crate::errors::NodeResolveError; -use crate::errors::NodeResolveRelativeJoinError; -use crate::errors::PackageExportsResolveError; -use crate::errors::PackageImportNotDefinedError; -use crate::errors::PackageImportsResolveError; -use crate::errors::PackageImportsResolveErrorKind; -use crate::errors::PackageJsonLoadError; -use crate::errors::PackagePathNotExportedError; -use crate::errors::PackageResolveError; -use crate::errors::PackageSubpathResolveError; -use crate::errors::PackageSubpathResolveErrorKind; -use crate::errors::PackageTargetNotFoundError; -use crate::errors::PackageTargetResolveError; -use crate::errors::PackageTargetResolveErrorKind; -use crate::errors::ResolveBinaryCommandsError; -use crate::errors::ResolvePkgJsonBinExportError; -use crate::errors::ResolvePkgSubpathFromDenoModuleError; -use crate::errors::TypeScriptNotSupportedInNpmError; -use crate::errors::TypesNotFoundError; -use crate::errors::TypesNotFoundErrorData; -use crate::errors::UnsupportedDirImportError; -use crate::errors::UnsupportedEsmUrlSchemeError; -use crate::errors::UrlToNodeResolutionError; -use crate::is_builtin_node_module; -use crate::path::to_file_specifier; -use crate::polyfill::get_module_name_from_builtin_node_module_specifier; -use crate::NpmResolverRc; -use crate::PackageJson; -use crate::PathClean; - -pub static DEFAULT_CONDITIONS: &[&str] = &["deno", "node", "import"]; -pub static REQUIRE_CONDITIONS: &[&str] = &["require", "node"]; -static TYPES_ONLY_CONDITIONS: &[&str] = &["types"]; - -pub type NodeModuleKind = deno_package_json::NodeModuleKind; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum NodeResolutionMode { - Execution, - Types, -} - -impl NodeResolutionMode { - pub fn is_types(&self) -> bool { - matches!(self, NodeResolutionMode::Types) - } -} - -#[derive(Debug)] -pub enum NodeResolution { - Esm(ModuleSpecifier), - CommonJs(ModuleSpecifier), - BuiltIn(String), -} - -impl NodeResolution { - pub fn into_url(self) -> ModuleSpecifier { - match self { - Self::Esm(u) => u, - Self::CommonJs(u) => u, - Self::BuiltIn(specifier) => { - if specifier.starts_with("node:") { - ModuleSpecifier::parse(&specifier).unwrap() - } else { - ModuleSpecifier::parse(&format!("node:{specifier}")).unwrap() - } - } - } - } - - pub fn into_specifier_and_media_type( - resolution: Option, - ) -> (ModuleSpecifier, MediaType) { - match resolution { - Some(NodeResolution::CommonJs(specifier)) => { - let media_type = MediaType::from_specifier(&specifier); - ( - specifier, - match media_type { - MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs, - MediaType::TypeScript | MediaType::Tsx => MediaType::Cts, - MediaType::Dts => MediaType::Dcts, - _ => media_type, - }, - ) - } - Some(NodeResolution::Esm(specifier)) => { - let media_type = MediaType::from_specifier(&specifier); - ( - specifier, - match media_type { - MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs, - MediaType::TypeScript | MediaType::Tsx => MediaType::Mts, - MediaType::Dts => MediaType::Dmts, - _ => media_type, - }, - ) - } - Some(resolution) => (resolution.into_url(), MediaType::Dts), - None => ( - ModuleSpecifier::parse("internal:///missing_dependency.d.ts").unwrap(), - MediaType::Dts, - ), - } - } -} - -#[allow(clippy::disallowed_types)] -pub type NodeResolverRc = deno_fs::sync::MaybeArc; - -#[derive(Debug)] -pub struct NodeResolver { - fs: FileSystemRc, - npm_resolver: NpmResolverRc, - in_npm_package_cache: deno_fs::sync::MaybeArcMutex>, -} - -impl NodeResolver { - pub fn new(fs: FileSystemRc, npm_resolver: NpmResolverRc) -> Self { - Self { - fs, - npm_resolver, - in_npm_package_cache: deno_fs::sync::MaybeArcMutex::new(HashMap::new()), - } - } - - pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - self.npm_resolver.in_npm_package(specifier) - } - - pub fn in_npm_package_with_cache(&self, specifier: Cow) -> bool { - let mut cache = self.in_npm_package_cache.lock(); - - if let Some(result) = cache.get(specifier.as_ref()) { - return *result; - } - - let result = - if let Ok(specifier) = deno_core::ModuleSpecifier::parse(&specifier) { - self.npm_resolver.in_npm_package(&specifier) - } else { - false - }; - cache.insert(specifier.into_owned(), result); - result - } - - /// This function is an implementation of `defaultResolve` in - /// `lib/internal/modules/esm/resolve.js` from Node. - pub fn resolve( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - mode: NodeResolutionMode, - ) -> Result { - // Note: if we are here, then the referrer is an esm module - // TODO(bartlomieju): skipped "policy" part as we don't plan to support it - - if crate::is_builtin_node_module(specifier) { - return Ok(NodeResolution::BuiltIn(specifier.to_string())); - } - - if let Ok(url) = Url::parse(specifier) { - if url.scheme() == "data" { - return Ok(NodeResolution::Esm(url)); - } - - if let Some(module_name) = - get_module_name_from_builtin_node_module_specifier(&url) - { - return Ok(NodeResolution::BuiltIn(module_name.to_string())); - } - - let protocol = url.scheme(); - - if protocol != "file" && protocol != "data" { - return Err( - UnsupportedEsmUrlSchemeError { - url_scheme: protocol.to_string(), - } - .into(), - ); - } - - // todo(dsherret): this seems wrong - if referrer.scheme() == "data" { - let url = referrer - .join(specifier) - .map_err(|source| DataUrlReferrerError { source })?; - return Ok(NodeResolution::Esm(url)); - } - } - - let url = self.module_resolve( - specifier, - referrer, - referrer_kind, - match referrer_kind { - NodeModuleKind::Esm => DEFAULT_CONDITIONS, - NodeModuleKind::Cjs => REQUIRE_CONDITIONS, - }, - mode, - )?; - - let url = if mode.is_types() { - let file_path = to_file_path(&url); - self.path_to_declaration_url(&file_path, Some(referrer), referrer_kind)? - } else { - url - }; - - let url = self.finalize_resolution(url, Some(referrer))?; - let resolve_response = self.url_to_node_resolution(url)?; - // TODO(bartlomieju): skipped checking errors for commonJS resolution and - // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(resolve_response) - } - - fn module_resolve( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - if should_be_treated_as_relative_or_absolute_path(specifier) { - Ok(referrer.join(specifier).map_err(|err| { - NodeResolveRelativeJoinError { - path: specifier.to_string(), - base: referrer.clone(), - source: err, - } - })?) - } else if specifier.starts_with('#') { - let pkg_config = self - .get_closest_package_json(referrer) - .map_err(PackageImportsResolveErrorKind::ClosestPkgJson) - .map_err(|err| PackageImportsResolveError(Box::new(err)))?; - Ok(self.package_imports_resolve( - specifier, - Some(referrer), - referrer_kind, - pkg_config.as_deref(), - conditions, - mode, - )?) - } else if let Ok(resolved) = Url::parse(specifier) { - Ok(resolved) - } else { - Ok(self.package_resolve( - specifier, - referrer, - referrer_kind, - conditions, - mode, - )?) - } - } - - fn finalize_resolution( - &self, - resolved: ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result { - let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C"); - - if encoded_sep_re.is_match(resolved.path()) { - return Err( - errors::InvalidModuleSpecifierError { - request: resolved.to_string(), - reason: Cow::Borrowed( - "must not include encoded \"/\" or \"\\\\\" characters", - ), - maybe_referrer: maybe_referrer.map(to_file_path_string), - } - .into(), - ); - } - - if resolved.scheme() == "node" { - return Ok(resolved); - } - - let path = to_file_path(&resolved); - - // TODO(bartlomieju): currently not supported - // if (getOptionValue('--experimental-specifier-resolution') === 'node') { - // ... - // } - - let p_str = path.to_str().unwrap(); - let p = if p_str.ends_with('/') { - p_str[p_str.len() - 1..].to_string() - } else { - p_str.to_string() - }; - - let (is_dir, is_file) = if let Ok(stats) = self.fs.stat_sync(Path::new(&p)) - { - (stats.is_directory, stats.is_file) - } else { - (false, false) - }; - if is_dir { - return Err( - UnsupportedDirImportError { - dir_url: resolved.clone(), - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - } - .into(), - ); - } else if !is_file { - return Err( - ModuleNotFoundError { - specifier: resolved, - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - typ: "module", - } - .into(), - ); - } - - Ok(resolved) - } - - pub fn resolve_package_subpath_from_deno_module( - &self, - package_dir: &Path, - package_subpath: Option<&str>, - maybe_referrer: Option<&ModuleSpecifier>, - mode: NodeResolutionMode, - ) -> Result { - let node_module_kind = NodeModuleKind::Esm; - let package_subpath = package_subpath - .map(|s| format!("./{s}")) - .unwrap_or_else(|| ".".to_string()); - let resolved_url = self.resolve_package_dir_subpath( - package_dir, - &package_subpath, - maybe_referrer, - node_module_kind, - DEFAULT_CONDITIONS, - mode, - )?; - let resolve_response = self.url_to_node_resolution(resolved_url)?; - // TODO(bartlomieju): skipped checking errors for commonJS resolution and - // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(resolve_response) - } - - pub fn resolve_binary_commands( - &self, - package_folder: &Path, - ) -> Result, ResolveBinaryCommandsError> { - let pkg_json_path = package_folder.join("package.json"); - let Some(package_json) = self.load_package_json(&pkg_json_path)? else { - return Ok(Vec::new()); - }; - - Ok(match &package_json.bin { - Some(Value::String(_)) => { - let Some(name) = &package_json.name else { - return Err(ResolveBinaryCommandsError::MissingPkgJsonName { - pkg_json_path, - }); - }; - vec![name.to_string()] - } - Some(Value::Object(o)) => { - o.iter().map(|(key, _)| key.clone()).collect::>() - } - _ => Vec::new(), - }) - } - - pub fn resolve_binary_export( - &self, - package_folder: &Path, - sub_path: Option<&str>, - ) -> Result { - let pkg_json_path = package_folder.join("package.json"); - let Some(package_json) = self.load_package_json(&pkg_json_path)? else { - return Err(ResolvePkgJsonBinExportError::MissingPkgJson { - pkg_json_path, - }); - }; - let bin_entry = - resolve_bin_entry_value(&package_json, sub_path).map_err(|err| { - ResolvePkgJsonBinExportError::InvalidBinProperty { - message: err.to_string(), - } - })?; - let url = to_file_specifier(&package_folder.join(bin_entry)); - - let resolve_response = self.url_to_node_resolution(url)?; - // TODO(bartlomieju): skipped checking errors for commonJS resolution and - // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(resolve_response) - } - - pub fn url_to_node_resolution( - &self, - url: ModuleSpecifier, - ) -> Result { - let url_str = url.as_str().to_lowercase(); - if url_str.starts_with("http") || url_str.ends_with(".json") { - Ok(NodeResolution::Esm(url)) - } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { - let maybe_package_config = self.get_closest_package_json(&url)?; - match maybe_package_config { - Some(c) if c.typ == "module" => Ok(NodeResolution::Esm(url)), - Some(_) => Ok(NodeResolution::CommonJs(url)), - None => Ok(NodeResolution::Esm(url)), - } - } else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") { - Ok(NodeResolution::Esm(url)) - } else if url_str.ends_with(".ts") || url_str.ends_with(".mts") { - if self.in_npm_package(&url) { - Err(TypeScriptNotSupportedInNpmError { specifier: url }.into()) - } else { - Ok(NodeResolution::Esm(url)) - } - } else { - Ok(NodeResolution::CommonJs(url)) - } - } - - /// Checks if the resolved file has a corresponding declaration file. - fn path_to_declaration_url( - &self, - path: &Path, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - ) -> Result { - fn probe_extensions( - fs: &dyn deno_fs::FileSystem, - path: &Path, - lowercase_path: &str, - referrer_kind: NodeModuleKind, - ) -> Option { - let mut searched_for_d_mts = false; - let mut searched_for_d_cts = false; - if lowercase_path.ends_with(".mjs") { - let d_mts_path = with_known_extension(path, "d.mts"); - if fs.exists_sync(&d_mts_path) { - return Some(d_mts_path); - } - searched_for_d_mts = true; - } else if lowercase_path.ends_with(".cjs") { - let d_cts_path = with_known_extension(path, "d.cts"); - if fs.exists_sync(&d_cts_path) { - return Some(d_cts_path); - } - searched_for_d_cts = true; - } - - let dts_path = with_known_extension(path, "d.ts"); - if fs.exists_sync(&dts_path) { - return Some(dts_path); - } - - let specific_dts_path = match referrer_kind { - NodeModuleKind::Cjs if !searched_for_d_cts => { - Some(with_known_extension(path, "d.cts")) - } - NodeModuleKind::Esm if !searched_for_d_mts => { - Some(with_known_extension(path, "d.mts")) - } - _ => None, // already searched above - }; - if let Some(specific_dts_path) = specific_dts_path { - if fs.exists_sync(&specific_dts_path) { - return Some(specific_dts_path); - } - } - None - } - - let lowercase_path = path.to_string_lossy().to_lowercase(); - if lowercase_path.ends_with(".d.ts") - || lowercase_path.ends_with(".d.cts") - || lowercase_path.ends_with(".d.mts") - { - return Ok(to_file_specifier(path)); - } - if let Some(path) = - probe_extensions(&*self.fs, path, &lowercase_path, referrer_kind) - { - return Ok(to_file_specifier(&path)); - } - if self.fs.is_dir_sync(path) { - let resolution_result = self.resolve_package_dir_subpath( - path, - /* sub path */ ".", - maybe_referrer, - referrer_kind, - match referrer_kind { - NodeModuleKind::Esm => DEFAULT_CONDITIONS, - NodeModuleKind::Cjs => REQUIRE_CONDITIONS, - }, - NodeResolutionMode::Types, - ); - if let Ok(resolution) = resolution_result { - return Ok(resolution); - } - let index_path = path.join("index.js"); - if let Some(path) = probe_extensions( - &*self.fs, - &index_path, - &index_path.to_string_lossy().to_lowercase(), - referrer_kind, - ) { - return Ok(to_file_specifier(&path)); - } - } - // allow resolving .css files for types resolution - if lowercase_path.ends_with(".css") { - return Ok(to_file_specifier(path)); - } - Err(TypesNotFoundError(Box::new(TypesNotFoundErrorData { - code_specifier: to_file_specifier(path), - maybe_referrer: maybe_referrer.cloned(), - }))) - } - - #[allow(clippy::too_many_arguments)] - pub(super) fn package_imports_resolve( - &self, - name: &str, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - referrer_pkg_json: Option<&PackageJson>, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - if name == "#" || name.starts_with("#/") || name.ends_with('/') { - let reason = "is not a valid internal imports specifier name"; - return Err( - errors::InvalidModuleSpecifierError { - request: name.to_string(), - reason: Cow::Borrowed(reason), - maybe_referrer: maybe_referrer.map(to_specifier_display_string), - } - .into(), - ); - } - - let mut package_json_path = None; - if let Some(pkg_json) = &referrer_pkg_json { - package_json_path = Some(pkg_json.path.clone()); - if let Some(imports) = &pkg_json.imports { - if imports.contains_key(name) && !name.contains('*') { - let target = imports.get(name).unwrap(); - let maybe_resolved = self.resolve_package_target( - package_json_path.as_ref().unwrap(), - target, - "", - name, - maybe_referrer, - referrer_kind, - false, - true, - conditions, - mode, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } - } else { - let mut best_match = ""; - let mut best_match_subpath = None; - for key in imports.keys() { - let pattern_index = key.find('*'); - if let Some(pattern_index) = pattern_index { - let key_sub = &key[0..=pattern_index]; - if name.starts_with(key_sub) { - let pattern_trailer = &key[pattern_index + 1..]; - if name.len() > key.len() - && name.ends_with(&pattern_trailer) - && pattern_key_compare(best_match, key) == 1 - && key.rfind('*') == Some(pattern_index) - { - best_match = key; - best_match_subpath = Some( - name[pattern_index..=(name.len() - pattern_trailer.len())] - .to_string(), - ); - } - } - } - } - - if !best_match.is_empty() { - let target = imports.get(best_match).unwrap(); - let maybe_resolved = self.resolve_package_target( - package_json_path.as_ref().unwrap(), - target, - &best_match_subpath.unwrap(), - best_match, - maybe_referrer, - referrer_kind, - true, - true, - conditions, - mode, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } - } - } - } - } - - Err( - PackageImportNotDefinedError { - name: name.to_string(), - package_json_path, - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - } - .into(), - ) - } - - #[allow(clippy::too_many_arguments)] - fn resolve_package_target_string( - &self, - target: &str, - subpath: &str, - match_: &str, - package_json_path: &Path, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - pattern: bool, - internal: bool, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - if !subpath.is_empty() && !pattern && !target.ends_with('/') { - return Err( - InvalidPackageTargetError { - pkg_json_path: package_json_path.to_path_buf(), - sub_path: match_.to_string(), - target: target.to_string(), - is_import: internal, - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - } - .into(), - ); - } - let invalid_segment_re = - lazy_regex::regex!(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)"); - let pattern_re = lazy_regex::regex!(r"\*"); - if !target.starts_with("./") { - if internal && !target.starts_with("../") && !target.starts_with('/') { - let target_url = Url::parse(target); - match target_url { - Ok(url) => { - if get_module_name_from_builtin_node_module_specifier(&url) - .is_some() - { - return Ok(url); - } - } - Err(_) => { - let export_target = if pattern { - pattern_re - .replace(target, |_caps: ®ex::Captures| subpath) - .to_string() - } else { - format!("{target}{subpath}") - }; - let package_json_url = to_file_specifier(package_json_path); - let result = match self.package_resolve( - &export_target, - &package_json_url, - referrer_kind, - conditions, - mode, - ) { - Ok(url) => Ok(url), - Err(err) => match err.code() { - NodeJsErrorCode::ERR_INVALID_MODULE_SPECIFIER - | NodeJsErrorCode::ERR_INVALID_PACKAGE_CONFIG - | NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET - | NodeJsErrorCode::ERR_PACKAGE_IMPORT_NOT_DEFINED - | NodeJsErrorCode::ERR_PACKAGE_PATH_NOT_EXPORTED - | NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION - | NodeJsErrorCode::ERR_UNSUPPORTED_DIR_IMPORT - | NodeJsErrorCode::ERR_UNSUPPORTED_ESM_URL_SCHEME - | NodeJsErrorCode::ERR_TYPES_NOT_FOUND => { - Err(PackageTargetResolveErrorKind::PackageResolve(err).into()) - } - NodeJsErrorCode::ERR_MODULE_NOT_FOUND => Err( - PackageTargetResolveErrorKind::NotFound( - PackageTargetNotFoundError { - pkg_json_path: package_json_path.to_path_buf(), - target: export_target.to_string(), - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - referrer_kind, - mode, - }, - ) - .into(), - ), - }, - }; - - return match result { - Ok(url) => Ok(url), - Err(err) => { - if is_builtin_node_module(target) { - Ok( - ModuleSpecifier::parse(&format!("node:{}", target)) - .unwrap(), - ) - } else { - Err(err) - } - } - }; - } - } - } - return Err( - InvalidPackageTargetError { - pkg_json_path: package_json_path.to_path_buf(), - sub_path: match_.to_string(), - target: target.to_string(), - is_import: internal, - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - } - .into(), - ); - } - if invalid_segment_re.is_match(&target[2..]) { - return Err( - InvalidPackageTargetError { - pkg_json_path: package_json_path.to_path_buf(), - sub_path: match_.to_string(), - target: target.to_string(), - is_import: internal, - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - } - .into(), - ); - } - let package_path = package_json_path.parent().unwrap(); - let resolved_path = package_path.join(target).clean(); - if !resolved_path.starts_with(package_path) { - return Err( - InvalidPackageTargetError { - pkg_json_path: package_json_path.to_path_buf(), - sub_path: match_.to_string(), - target: target.to_string(), - is_import: internal, - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - } - .into(), - ); - } - if subpath.is_empty() { - return Ok(to_file_specifier(&resolved_path)); - } - if invalid_segment_re.is_match(subpath) { - let request = if pattern { - match_.replace('*', subpath) - } else { - format!("{match_}{subpath}") - }; - return Err( - throw_invalid_subpath( - request, - package_json_path, - internal, - maybe_referrer, - ) - .into(), - ); - } - if pattern { - let resolved_path_str = resolved_path.to_string_lossy(); - let replaced = pattern_re - .replace(&resolved_path_str, |_caps: ®ex::Captures| subpath); - return Ok(to_file_specifier(&PathBuf::from(replaced.to_string()))); - } - Ok(to_file_specifier(&resolved_path.join(subpath).clean())) - } - - #[allow(clippy::too_many_arguments)] - fn resolve_package_target( - &self, - package_json_path: &Path, - target: &Value, - subpath: &str, - package_subpath: &str, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - pattern: bool, - internal: bool, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result, PackageTargetResolveError> { - let result = self.resolve_package_target_inner( - package_json_path, - target, - subpath, - package_subpath, - maybe_referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - ); - match result { - Ok(maybe_resolved) => Ok(maybe_resolved), - Err(err) => { - if mode.is_types() - && err.code() == NodeJsErrorCode::ERR_TYPES_NOT_FOUND - && conditions != TYPES_ONLY_CONDITIONS - { - // try resolving with just "types" conditions for when someone misconfigures - // and puts the "types" condition in the wrong place - if let Ok(Some(resolved)) = self.resolve_package_target_inner( - package_json_path, - target, - subpath, - package_subpath, - maybe_referrer, - referrer_kind, - pattern, - internal, - TYPES_ONLY_CONDITIONS, - mode, - ) { - return Ok(Some(resolved)); - } - } - - Err(err) - } - } - } - - #[allow(clippy::too_many_arguments)] - fn resolve_package_target_inner( - &self, - package_json_path: &Path, - target: &Value, - subpath: &str, - package_subpath: &str, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - pattern: bool, - internal: bool, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result, PackageTargetResolveError> { - if let Some(target) = target.as_str() { - let url = self.resolve_package_target_string( - target, - subpath, - package_subpath, - package_json_path, - maybe_referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - )?; - if mode.is_types() && url.scheme() == "file" { - let path = url.to_file_path().unwrap(); - return Ok(Some(self.path_to_declaration_url( - &path, - maybe_referrer, - referrer_kind, - )?)); - } else { - return Ok(Some(url)); - } - } else if let Some(target_arr) = target.as_array() { - if target_arr.is_empty() { - return Ok(None); - } - - let mut last_error = None; - for target_item in target_arr { - let resolved_result = self.resolve_package_target( - package_json_path, - target_item, - subpath, - package_subpath, - maybe_referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - ); - - match resolved_result { - Ok(Some(resolved)) => return Ok(Some(resolved)), - Ok(None) => { - last_error = None; - continue; - } - Err(e) => { - if e.code() == NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET { - last_error = Some(e); - continue; - } else { - return Err(e); - } - } - } - } - if last_error.is_none() { - return Ok(None); - } - return Err(last_error.unwrap()); - } else if let Some(target_obj) = target.as_object() { - for key in target_obj.keys() { - // TODO(bartlomieju): verify that keys are not numeric - // return Err(errors::err_invalid_package_config( - // to_file_path_string(package_json_url), - // Some(base.as_str().to_string()), - // Some("\"exports\" cannot contain numeric property keys.".to_string()), - // )); - - if key == "default" - || conditions.contains(&key.as_str()) - || mode.is_types() && key.as_str() == "types" - { - let condition_target = target_obj.get(key).unwrap(); - - let resolved = self.resolve_package_target( - package_json_path, - condition_target, - subpath, - package_subpath, - maybe_referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - )?; - match resolved { - Some(resolved) => return Ok(Some(resolved)), - None => { - continue; - } - } - } - } - } else if target.is_null() { - return Ok(None); - } - - Err( - InvalidPackageTargetError { - pkg_json_path: package_json_path.to_path_buf(), - sub_path: package_subpath.to_string(), - target: target.to_string(), - is_import: internal, - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - } - .into(), - ) - } - - #[allow(clippy::too_many_arguments)] - pub fn package_exports_resolve( - &self, - package_json_path: &Path, - package_subpath: &str, - package_exports: &Map, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - if package_exports.contains_key(package_subpath) - && package_subpath.find('*').is_none() - && !package_subpath.ends_with('/') - { - let target = package_exports.get(package_subpath).unwrap(); - let resolved = self.resolve_package_target( - package_json_path, - target, - "", - package_subpath, - maybe_referrer, - referrer_kind, - false, - false, - conditions, - mode, - )?; - return match resolved { - Some(resolved) => Ok(resolved), - None => Err( - PackagePathNotExportedError { - pkg_json_path: package_json_path.to_path_buf(), - subpath: package_subpath.to_string(), - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - mode, - } - .into(), - ), - }; - } - - let mut best_match = ""; - let mut best_match_subpath = None; - for key in package_exports.keys() { - let pattern_index = key.find('*'); - if let Some(pattern_index) = pattern_index { - let key_sub = &key[0..pattern_index]; - if package_subpath.starts_with(key_sub) { - // When this reaches EOL, this can throw at the top of the whole function: - // - // if (StringPrototypeEndsWith(packageSubpath, '/')) - // throwInvalidSubpath(packageSubpath) - // - // To match "imports" and the spec. - if package_subpath.ends_with('/') { - // TODO(bartlomieju): - // emitTrailingSlashPatternDeprecation(); - } - let pattern_trailer = &key[pattern_index + 1..]; - if package_subpath.len() >= key.len() - && package_subpath.ends_with(&pattern_trailer) - && pattern_key_compare(best_match, key) == 1 - && key.rfind('*') == Some(pattern_index) - { - best_match = key; - best_match_subpath = Some( - package_subpath[pattern_index - ..(package_subpath.len() - pattern_trailer.len())] - .to_string(), - ); - } - } - } - } - - if !best_match.is_empty() { - let target = package_exports.get(best_match).unwrap(); - let maybe_resolved = self.resolve_package_target( - package_json_path, - target, - &best_match_subpath.unwrap(), - best_match, - maybe_referrer, - referrer_kind, - true, - false, - conditions, - mode, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } else { - return Err( - PackagePathNotExportedError { - pkg_json_path: package_json_path.to_path_buf(), - subpath: package_subpath.to_string(), - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - mode, - } - .into(), - ); - } - } - - Err( - PackagePathNotExportedError { - pkg_json_path: package_json_path.to_path_buf(), - subpath: package_subpath.to_string(), - maybe_referrer: maybe_referrer.map(ToOwned::to_owned), - mode, - } - .into(), - ) - } - - pub(super) fn package_resolve( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - let (package_name, package_subpath, _is_scoped) = - parse_npm_pkg_name(specifier, referrer)?; - - if let Some(package_config) = self.get_closest_package_json(referrer)? { - // ResolveSelf - if package_config.name.as_ref() == Some(&package_name) { - if let Some(exports) = &package_config.exports { - return self - .package_exports_resolve( - &package_config.path, - &package_subpath, - exports, - Some(referrer), - referrer_kind, - conditions, - mode, - ) - .map_err(|err| err.into()); - } - } - } - - self.resolve_package_subpath_for_package( - &package_name, - &package_subpath, - referrer, - referrer_kind, - conditions, - mode, - ) - } - - #[allow(clippy::too_many_arguments)] - fn resolve_package_subpath_for_package( - &self, - package_name: &str, - package_subpath: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - let result = self.resolve_package_subpath_for_package_inner( - package_name, - package_subpath, - referrer, - referrer_kind, - conditions, - mode, - ); - if mode.is_types() && !matches!(result, Ok(ModuleSpecifier { .. })) { - // try to resolve with the @types package - let package_name = types_package_name(package_name); - if let Ok(result) = self.resolve_package_subpath_for_package_inner( - &package_name, - package_subpath, - referrer, - referrer_kind, - conditions, - mode, - ) { - return Ok(result); - } - } - result - } - - #[allow(clippy::too_many_arguments)] - fn resolve_package_subpath_for_package_inner( - &self, - package_name: &str, - package_subpath: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - let package_dir_path = self - .npm_resolver - .resolve_package_folder_from_package(package_name, referrer)?; - - // todo: error with this instead when can't find package - // Err(errors::err_module_not_found( - // &package_json_url - // .join(".") - // .unwrap() - // .to_file_path() - // .unwrap() - // .display() - // .to_string(), - // &to_file_path_string(referrer), - // "package", - // )) - - // Package match. - self - .resolve_package_dir_subpath( - &package_dir_path, - package_subpath, - Some(referrer), - referrer_kind, - conditions, - mode, - ) - .map_err(|err| err.into()) - } - - #[allow(clippy::too_many_arguments)] - fn resolve_package_dir_subpath( - &self, - package_dir_path: &Path, - package_subpath: &str, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - let package_json_path = package_dir_path.join("package.json"); - match self.load_package_json(&package_json_path)? { - Some(pkg_json) => self.resolve_package_subpath( - &pkg_json, - package_subpath, - maybe_referrer, - referrer_kind, - conditions, - mode, - ), - None => self - .resolve_package_subpath_no_pkg_json( - package_dir_path, - package_subpath, - maybe_referrer, - referrer_kind, - mode, - ) - .map_err(|err| { - PackageSubpathResolveErrorKind::LegacyResolve(err).into() - }), - } - } - - #[allow(clippy::too_many_arguments)] - fn resolve_package_subpath( - &self, - package_json: &PackageJson, - package_subpath: &str, - referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - ) -> Result { - if let Some(exports) = &package_json.exports { - let result = self.package_exports_resolve( - &package_json.path, - package_subpath, - exports, - referrer, - referrer_kind, - conditions, - mode, - ); - match result { - Ok(found) => return Ok(found), - Err(exports_err) => { - if mode.is_types() && package_subpath == "." { - return self - .legacy_main_resolve(package_json, referrer, referrer_kind, mode) - .map_err(|err| { - PackageSubpathResolveErrorKind::LegacyResolve(err).into() - }); - } - return Err( - PackageSubpathResolveErrorKind::Exports(exports_err).into(), - ); - } - } - } - - if package_subpath == "." { - return self - .legacy_main_resolve(package_json, referrer, referrer_kind, mode) - .map_err(|err| { - PackageSubpathResolveErrorKind::LegacyResolve(err).into() - }); - } - - self - .resolve_subpath_exact( - package_json.path.parent().unwrap(), - package_subpath, - referrer, - referrer_kind, - mode, - ) - .map_err(|err| { - PackageSubpathResolveErrorKind::LegacyResolve(err.into()).into() - }) - } - - fn resolve_subpath_exact( - &self, - directory: &Path, - package_subpath: &str, - referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - mode: NodeResolutionMode, - ) -> Result { - assert_ne!(package_subpath, "."); - let file_path = directory.join(package_subpath); - if mode.is_types() { - Ok(self.path_to_declaration_url(&file_path, referrer, referrer_kind)?) - } else { - Ok(to_file_specifier(&file_path)) - } - } - - fn resolve_package_subpath_no_pkg_json( - &self, - directory: &Path, - package_subpath: &str, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - mode: NodeResolutionMode, - ) -> Result { - if package_subpath == "." { - self.legacy_index_resolve(directory, maybe_referrer, referrer_kind, mode) - } else { - self - .resolve_subpath_exact( - directory, - package_subpath, - maybe_referrer, - referrer_kind, - mode, - ) - .map_err(|err| err.into()) - } - } - - pub fn get_closest_package_json( - &self, - url: &ModuleSpecifier, - ) -> Result, ClosestPkgJsonError> { - let Ok(file_path) = url.to_file_path() else { - return Ok(None); - }; - self.get_closest_package_json_from_path(&file_path) - } - - pub fn get_closest_package_json_from_path( - &self, - file_path: &Path, - ) -> Result, ClosestPkgJsonError> { - let parent_dir = file_path.parent().unwrap(); - let current_dir = - deno_core::strip_unc_prefix(self.fs.realpath_sync(parent_dir).map_err( - |source| CanonicalizingPkgJsonDirError { - dir_path: parent_dir.to_path_buf(), - source: source.into_io_error(), - }, - )?); - for current_dir in current_dir.ancestors() { - let package_json_path = current_dir.join("package.json"); - if let Some(pkg_json) = self.load_package_json(&package_json_path)? { - return Ok(Some(pkg_json)); - } - } - - Ok(None) - } - - pub(super) fn load_package_json( - &self, - package_json_path: &Path, - ) -> Result, PackageJsonLoadError> { - crate::package_json::load_pkg_json(&*self.fs, package_json_path) - } - - pub(super) fn legacy_main_resolve( - &self, - package_json: &PackageJson, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - mode: NodeResolutionMode, - ) -> Result { - let maybe_main = if mode.is_types() { - match package_json.types.as_ref() { - Some(types) => Some(types.as_str()), - None => { - // fallback to checking the main entrypoint for - // a corresponding declaration file - if let Some(main) = package_json.main(referrer_kind) { - let main = package_json.path.parent().unwrap().join(main).clean(); - let decl_url_result = self.path_to_declaration_url( - &main, - maybe_referrer, - referrer_kind, - ); - // don't surface errors, fallback to checking the index now - if let Ok(url) = decl_url_result { - return Ok(url); - } - } - None - } - } - } else { - package_json.main(referrer_kind) - }; - - if let Some(main) = maybe_main { - let guess = package_json.path.parent().unwrap().join(main).clean(); - if self.fs.is_file_sync(&guess) { - return Ok(to_file_specifier(&guess)); - } - - // todo(dsherret): investigate exactly how node and typescript handles this - let endings = if mode.is_types() { - match referrer_kind { - NodeModuleKind::Cjs => { - vec![".d.ts", ".d.cts", "/index.d.ts", "/index.d.cts"] - } - NodeModuleKind::Esm => vec![ - ".d.ts", - ".d.mts", - "/index.d.ts", - "/index.d.mts", - ".d.cts", - "/index.d.cts", - ], - } - } else { - vec![".js", "/index.js"] - }; - for ending in endings { - let guess = package_json - .path - .parent() - .unwrap() - .join(format!("{main}{ending}")) - .clean(); - if self.fs.is_file_sync(&guess) { - // TODO(bartlomieju): emitLegacyIndexDeprecation() - return Ok(to_file_specifier(&guess)); - } - } - } - - self.legacy_index_resolve( - package_json.path.parent().unwrap(), - maybe_referrer, - referrer_kind, - mode, - ) - } - - fn legacy_index_resolve( - &self, - directory: &Path, - maybe_referrer: Option<&ModuleSpecifier>, - referrer_kind: NodeModuleKind, - mode: NodeResolutionMode, - ) -> Result { - let index_file_names = if mode.is_types() { - // todo(dsherret): investigate exactly how typescript does this - match referrer_kind { - NodeModuleKind::Cjs => vec!["index.d.ts", "index.d.cts"], - NodeModuleKind::Esm => vec!["index.d.ts", "index.d.mts", "index.d.cts"], - } - } else { - vec!["index.js"] - }; - for index_file_name in index_file_names { - let guess = directory.join(index_file_name).clean(); - if self.fs.is_file_sync(&guess) { - // TODO(bartlomieju): emitLegacyIndexDeprecation() - return Ok(to_file_specifier(&guess)); - } - } - - if mode.is_types() { - Err( - TypesNotFoundError(Box::new(TypesNotFoundErrorData { - code_specifier: to_file_specifier(&directory.join("index.js")), - maybe_referrer: maybe_referrer.cloned(), - })) - .into(), - ) - } else { - Err( - ModuleNotFoundError { - specifier: to_file_specifier(&directory.join("index.js")), - typ: "module", - maybe_referrer: maybe_referrer.cloned(), - } - .into(), - ) - } - } -} - -fn resolve_bin_entry_value<'a>( - package_json: &'a PackageJson, - bin_name: Option<&str>, -) -> Result<&'a str, AnyError> { - let bin = match &package_json.bin { - Some(bin) => bin, - None => bail!( - "'{}' did not have a bin property", - package_json.path.display(), - ), - }; - let bin_entry = match bin { - Value::String(_) => { - if bin_name.is_some() - && bin_name - != package_json - .name - .as_deref() - .map(|name| name.rsplit_once('/').map_or(name, |(_, name)| name)) - { - None - } else { - Some(bin) - } - } - Value::Object(o) => { - if let Some(bin_name) = bin_name { - o.get(bin_name) - } else if o.len() == 1 - || o.len() > 1 && o.values().all(|v| v == o.values().next().unwrap()) - { - o.values().next() - } else { - package_json.name.as_ref().and_then(|n| o.get(n)) - } - } - _ => bail!( - "'{}' did not have a bin property with a string or object value", - package_json.path.display() - ), - }; - let bin_entry = match bin_entry { - Some(e) => e, - None => { - let prefix = package_json - .name - .as_ref() - .map(|n| { - let mut prefix = format!("npm:{}", n); - if let Some(version) = &package_json.version { - prefix.push('@'); - prefix.push_str(version); - } - prefix.push('/'); - prefix - }) - .unwrap_or_default(); - let keys = bin - .as_object() - .map(|o| { - o.keys() - .map(|k| format!(" * {prefix}{k}")) - .collect::>() - }) - .unwrap_or_default(); - bail!( - "'{}' did not have a bin entry{}{}", - package_json.path.display(), - bin_name - .or(package_json.name.as_deref()) - .map(|name| format!(" for '{}'", name)) - .unwrap_or_default(), - if keys.is_empty() { - "".to_string() - } else { - format!("\n\nPossibilities:\n{}", keys.join("\n")) - } - ) - } - }; - match bin_entry { - Value::String(s) => Ok(s), - _ => bail!( - "'{}' had a non-string sub property of bin", - package_json.path.display(), - ), - } -} - -fn to_file_path(url: &ModuleSpecifier) -> PathBuf { - url - .to_file_path() - .unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}")) -} - -fn to_file_path_string(url: &ModuleSpecifier) -> String { - to_file_path(url).display().to_string() -} - -fn should_be_treated_as_relative_or_absolute_path(specifier: &str) -> bool { - if specifier.is_empty() { - return false; - } - - if specifier.starts_with('/') { - return true; - } - - is_relative_specifier(specifier) -} - -// TODO(ry) We very likely have this utility function elsewhere in Deno. -fn is_relative_specifier(specifier: &str) -> bool { - let specifier_len = specifier.len(); - let specifier_chars: Vec<_> = specifier.chars().take(3).collect(); - - if !specifier_chars.is_empty() && specifier_chars[0] == '.' { - if specifier_len == 1 || specifier_chars[1] == '/' { - return true; - } - if specifier_chars[1] == '.' - && (specifier_len == 2 || specifier_chars[2] == '/') - { - return true; - } - } - false -} - -/// Alternate `PathBuf::with_extension` that will handle known extensions -/// more intelligently. -fn with_known_extension(path: &Path, ext: &str) -> PathBuf { - const NON_DECL_EXTS: &[&str] = &[ - "cjs", "js", "json", "jsx", "mjs", "tsx", /* ex. types.d */ "d", - ]; - const DECL_EXTS: &[&str] = &["cts", "mts", "ts"]; - - let file_name = match path.file_name() { - Some(value) => value.to_string_lossy(), - None => return path.to_path_buf(), - }; - let lowercase_file_name = file_name.to_lowercase(); - let period_index = lowercase_file_name.rfind('.').and_then(|period_index| { - let ext = &lowercase_file_name[period_index + 1..]; - if DECL_EXTS.contains(&ext) { - if let Some(next_period_index) = - lowercase_file_name[..period_index].rfind('.') - { - if &lowercase_file_name[next_period_index + 1..period_index] == "d" { - Some(next_period_index) - } else { - Some(period_index) - } - } else { - Some(period_index) - } - } else if NON_DECL_EXTS.contains(&ext) { - Some(period_index) - } else { - None - } - }); - - let file_name = match period_index { - Some(period_index) => &file_name[..period_index], - None => &file_name, - }; - path.with_file_name(format!("{file_name}.{ext}")) -} - -fn to_specifier_display_string(url: &ModuleSpecifier) -> String { - if let Ok(path) = url.to_file_path() { - path.display().to_string() - } else { - url.to_string() - } -} - -fn throw_invalid_subpath( - subpath: String, - package_json_path: &Path, - internal: bool, - maybe_referrer: Option<&ModuleSpecifier>, -) -> InvalidModuleSpecifierError { - let ie = if internal { "imports" } else { "exports" }; - let reason = format!( - "request is not a valid subpath for the \"{}\" resolution of {}", - ie, - package_json_path.display(), - ); - InvalidModuleSpecifierError { - request: subpath, - reason: Cow::Owned(reason), - maybe_referrer: maybe_referrer.map(to_specifier_display_string), - } -} - -pub fn parse_npm_pkg_name( - specifier: &str, - referrer: &ModuleSpecifier, -) -> Result<(String, String, bool), InvalidModuleSpecifierError> { - let mut separator_index = specifier.find('/'); - let mut valid_package_name = true; - let mut is_scoped = false; - if specifier.is_empty() { - valid_package_name = false; - } else if specifier.starts_with('@') { - is_scoped = true; - if let Some(index) = separator_index { - separator_index = specifier[index + 1..] - .find('/') - .map(|new_index| index + 1 + new_index); - } else { - valid_package_name = false; - } - } - - let package_name = if let Some(index) = separator_index { - specifier[0..index].to_string() - } else { - specifier.to_string() - }; - - // Package name cannot have leading . and cannot have percent-encoding or separators. - for ch in package_name.chars() { - if ch == '%' || ch == '\\' { - valid_package_name = false; - break; - } - } - - if !valid_package_name { - return Err(errors::InvalidModuleSpecifierError { - request: specifier.to_string(), - reason: Cow::Borrowed("is not a valid package name"), - maybe_referrer: Some(to_specifier_display_string(referrer)), - }); - } - - let package_subpath = if let Some(index) = separator_index { - format!(".{}", specifier.chars().skip(index).collect::()) - } else { - ".".to_string() - }; - - Ok((package_name, package_subpath, is_scoped)) -} - -fn pattern_key_compare(a: &str, b: &str) -> i32 { - let a_pattern_index = a.find('*'); - let b_pattern_index = b.find('*'); - - let base_len_a = if let Some(index) = a_pattern_index { - index + 1 - } else { - a.len() - }; - let base_len_b = if let Some(index) = b_pattern_index { - index + 1 - } else { - b.len() - }; - - if base_len_a > base_len_b { - return -1; - } - - if base_len_b > base_len_a { - return 1; - } - - if a_pattern_index.is_none() { - return 1; - } - - if b_pattern_index.is_none() { - return -1; - } - - if a.len() > b.len() { - return -1; - } - - if b.len() > a.len() { - return 1; - } - - 0 -} - -/// Gets the corresponding @types package for the provided package name. -fn types_package_name(package_name: &str) -> String { - debug_assert!(!package_name.starts_with("@types/")); - // Scoped packages will get two underscores for each slash - // https://github.com/DefinitelyTyped/DefinitelyTyped/tree/15f1ece08f7b498f4b9a2147c2a46e94416ca777#what-about-scoped-packages - format!("@types/{}", package_name.replace('/', "__")) -} - -#[cfg(test)] -mod tests { - use deno_core::serde_json::json; - - use super::*; - - fn build_package_json(json: Value) -> PackageJson { - PackageJson::load_from_value(PathBuf::from("/package.json"), json) - } - - #[test] - fn test_resolve_bin_entry_value() { - // should resolve the specified value - let pkg_json = build_package_json(json!({ - "name": "pkg", - "version": "1.1.1", - "bin": { - "bin1": "./value1", - "bin2": "./value2", - "pkg": "./value3", - } - })); - assert_eq!( - resolve_bin_entry_value(&pkg_json, Some("bin1")).unwrap(), - "./value1" - ); - - // should resolve the value with the same name when not specified - assert_eq!( - resolve_bin_entry_value(&pkg_json, None).unwrap(), - "./value3" - ); - - // should not resolve when specified value does not exist - assert_eq!( - resolve_bin_entry_value(&pkg_json, Some("other"),) - .err() - .unwrap() - .to_string(), - concat!( - "'/package.json' did not have a bin entry for 'other'\n", - "\n", - "Possibilities:\n", - " * npm:pkg@1.1.1/bin1\n", - " * npm:pkg@1.1.1/bin2\n", - " * npm:pkg@1.1.1/pkg" - ) - ); - - // should not resolve when default value can't be determined - let pkg_json = build_package_json(json!({ - "name": "pkg", - "version": "1.1.1", - "bin": { - "bin": "./value1", - "bin2": "./value2", - } - })); - assert_eq!( - resolve_bin_entry_value(&pkg_json, None) - .err() - .unwrap() - .to_string(), - concat!( - "'/package.json' did not have a bin entry for 'pkg'\n", - "\n", - "Possibilities:\n", - " * npm:pkg@1.1.1/bin\n", - " * npm:pkg@1.1.1/bin2", - ) - ); - - // should resolve since all the values are the same - let pkg_json = build_package_json(json!({ - "name": "pkg", - "version": "1.2.3", - "bin": { - "bin1": "./value", - "bin2": "./value", - } - })); - assert_eq!( - resolve_bin_entry_value(&pkg_json, None,).unwrap(), - "./value" - ); - - // should not resolve when specified and is a string - let pkg_json = build_package_json(json!({ - "name": "pkg", - "version": "1.2.3", - "bin": "./value", - })); - assert_eq!( - resolve_bin_entry_value(&pkg_json, Some("path"),) - .err() - .unwrap() - .to_string(), - "'/package.json' did not have a bin entry for 'path'" - ); - - // no version in the package.json - let pkg_json = build_package_json(json!({ - "name": "pkg", - "bin": { - "bin1": "./value1", - "bin2": "./value2", - } - })); - assert_eq!( - resolve_bin_entry_value(&pkg_json, None) - .err() - .unwrap() - .to_string(), - concat!( - "'/package.json' did not have a bin entry for 'pkg'\n", - "\n", - "Possibilities:\n", - " * npm:pkg/bin1\n", - " * npm:pkg/bin2", - ) - ); - - // no name or version in the package.json - let pkg_json = build_package_json(json!({ - "bin": { - "bin1": "./value1", - "bin2": "./value2", - } - })); - assert_eq!( - resolve_bin_entry_value(&pkg_json, None) - .err() - .unwrap() - .to_string(), - concat!( - "'/package.json' did not have a bin entry\n", - "\n", - "Possibilities:\n", - " * bin1\n", - " * bin2", - ) - ); - } - - #[test] - fn test_parse_package_name() { - let dummy_referrer = Url::parse("http://example.com").unwrap(); - - assert_eq!( - parse_npm_pkg_name("fetch-blob", &dummy_referrer).unwrap(), - ("fetch-blob".to_string(), ".".to_string(), false) - ); - assert_eq!( - parse_npm_pkg_name("@vue/plugin-vue", &dummy_referrer).unwrap(), - ("@vue/plugin-vue".to_string(), ".".to_string(), true) - ); - assert_eq!( - parse_npm_pkg_name("@astrojs/prism/dist/highlighter", &dummy_referrer) - .unwrap(), - ( - "@astrojs/prism".to_string(), - "./dist/highlighter".to_string(), - true - ) - ); - } - - #[test] - fn test_with_known_extension() { - let cases = &[ - ("test", "d.ts", "test.d.ts"), - ("test.d.ts", "ts", "test.ts"), - ("test.worker", "d.ts", "test.worker.d.ts"), - ("test.d.mts", "js", "test.js"), - ]; - for (path, ext, expected) in cases { - let actual = with_known_extension(&PathBuf::from(path), ext); - assert_eq!(actual.to_string_lossy(), *expected); - } - } - - #[test] - fn test_types_package_name() { - assert_eq!(types_package_name("name"), "@types/name"); - assert_eq!( - types_package_name("@scoped/package"), - "@types/@scoped__package" - ); - } -} diff --git a/ext/node_resolver/Cargo.toml b/ext/node_resolver/Cargo.toml new file mode 100644 index 000000000..a636eaf9f --- /dev/null +++ b/ext/node_resolver/Cargo.toml @@ -0,0 +1,32 @@ +# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +[package] +name = "node_resolver" +version = "0.1.0" +authors.workspace = true +edition.workspace = true +license.workspace = true +readme = "README.md" +repository.workspace = true +description = "Node.js module resolution algorithm used in Deno" + +[lib] +path = "lib.rs" + +[features] +sync = ["deno_package_json/sync"] + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +deno_media_type.workspace = true +deno_package_json.workspace = true +futures.workspace = true +lazy-regex.workspace = true +once_cell.workspace = true +path-clean = "=0.1.0" +regex.workspace = true +serde_json.workspace = true +thiserror.workspace = true +tokio.workspace = true +url.workspace = true diff --git a/ext/node_resolver/README.md b/ext/node_resolver/README.md new file mode 100644 index 000000000..8f2f63ca1 --- /dev/null +++ b/ext/node_resolver/README.md @@ -0,0 +1,6 @@ +# Node Resolver + +[![crates](https://img.shields.io/crates/v/node_resolver.svg)](https://crates.io/crates/node_resolver) +[![docs](https://docs.rs/node_resolver/badge.svg)](https://docs.rs/node_resolver) + +Provides Node.js compatible resolution for the Deno project. diff --git a/ext/node_resolver/analyze.rs b/ext/node_resolver/analyze.rs new file mode 100644 index 000000000..8d6a73424 --- /dev/null +++ b/ext/node_resolver/analyze.rs @@ -0,0 +1,624 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::collections::BTreeSet; +use std::collections::HashSet; +use std::path::Path; +use std::path::PathBuf; + +use futures::future::LocalBoxFuture; +use futures::stream::FuturesUnordered; +use futures::FutureExt; +use futures::StreamExt; +use once_cell::sync::Lazy; + +use anyhow::Context; +use anyhow::Error as AnyError; +use url::Url; + +use crate::env::NodeResolverEnv; +use crate::package_json::load_pkg_json; +use crate::path::to_file_specifier; +use crate::resolution::NodeResolverRc; +use crate::NodeModuleKind; +use crate::NodeResolutionMode; +use crate::NpmResolverRc; +use crate::PathClean; + +#[derive(Debug, Clone)] +pub enum CjsAnalysis { + /// File was found to be an ES module and the translator should + /// load the code as ESM. + Esm(String), + Cjs(CjsAnalysisExports), +} + +#[derive(Debug, Clone)] +pub struct CjsAnalysisExports { + pub exports: Vec, + pub reexports: Vec, +} + +/// Code analyzer for CJS and ESM files. +#[async_trait::async_trait(?Send)] +pub trait CjsCodeAnalyzer { + /// Analyzes CommonJs code for exports and reexports, which is + /// then used to determine the wrapper ESM module exports. + /// + /// Note that the source is provided by the caller when the caller + /// already has it. If the source is needed by the implementation, + /// then it can use the provided source, or otherwise load it if + /// necessary. + async fn analyze_cjs( + &self, + specifier: &Url, + maybe_source: Option, + ) -> Result; +} + +pub struct NodeCodeTranslator< + TCjsCodeAnalyzer: CjsCodeAnalyzer, + TNodeResolverEnv: NodeResolverEnv, +> { + cjs_code_analyzer: TCjsCodeAnalyzer, + env: TNodeResolverEnv, + node_resolver: NodeResolverRc, + npm_resolver: NpmResolverRc, +} + +impl + NodeCodeTranslator +{ + pub fn new( + cjs_code_analyzer: TCjsCodeAnalyzer, + env: TNodeResolverEnv, + node_resolver: NodeResolverRc, + npm_resolver: NpmResolverRc, + ) -> Self { + Self { + cjs_code_analyzer, + env, + node_resolver, + npm_resolver, + } + } + + /// Translates given CJS module into ESM. This function will perform static + /// analysis on the file to find defined exports and reexports. + /// + /// For all discovered reexports the analysis will be performed recursively. + /// + /// If successful a source code for equivalent ES module is returned. + pub async fn translate_cjs_to_esm( + &self, + entry_specifier: &Url, + source: Option, + ) -> Result { + let mut temp_var_count = 0; + + let analysis = self + .cjs_code_analyzer + .analyze_cjs(entry_specifier, source) + .await?; + + let analysis = match analysis { + CjsAnalysis::Esm(source) => return Ok(source), + CjsAnalysis::Cjs(analysis) => analysis, + }; + + let mut source = vec![ + r#"import {createRequire as __internalCreateRequire} from "node:module"; + const require = __internalCreateRequire(import.meta.url);"# + .to_string(), + ]; + + // use a BTreeSet to make the output deterministic for v8's code cache + let mut all_exports = analysis.exports.into_iter().collect::>(); + + if !analysis.reexports.is_empty() { + let mut errors = Vec::new(); + self + .analyze_reexports( + entry_specifier, + analysis.reexports, + &mut all_exports, + &mut errors, + ) + .await; + + // surface errors afterwards in a deterministic way + if !errors.is_empty() { + errors.sort_by_cached_key(|e| e.to_string()); + return Err(errors.remove(0)); + } + } + + source.push(format!( + "const mod = require(\"{}\");", + entry_specifier + .to_file_path() + .unwrap() + .to_str() + .unwrap() + .replace('\\', "\\\\") + .replace('\'', "\\\'") + .replace('\"', "\\\"") + )); + + for export in &all_exports { + if export.as_str() != "default" { + add_export( + &mut source, + export, + &format!("mod[\"{}\"]", escape_for_double_quote_string(export)), + &mut temp_var_count, + ); + } + } + + source.push("export default mod;".to_string()); + + let translated_source = source.join("\n"); + Ok(translated_source) + } + + async fn analyze_reexports<'a>( + &'a self, + entry_specifier: &url::Url, + reexports: Vec, + all_exports: &mut BTreeSet, + // this goes through the modules concurrently, so collect + // the errors in order to be deterministic + errors: &mut Vec, + ) { + struct Analysis { + reexport_specifier: url::Url, + referrer: url::Url, + analysis: CjsAnalysis, + } + + type AnalysisFuture<'a> = LocalBoxFuture<'a, Result>; + + let mut handled_reexports: HashSet = HashSet::default(); + handled_reexports.insert(entry_specifier.clone()); + let mut analyze_futures: FuturesUnordered> = + FuturesUnordered::new(); + let cjs_code_analyzer = &self.cjs_code_analyzer; + let mut handle_reexports = + |referrer: url::Url, + reexports: Vec, + analyze_futures: &mut FuturesUnordered>, + errors: &mut Vec| { + // 1. Resolve the re-exports and start a future to analyze each one + for reexport in reexports { + let result = self.resolve( + &reexport, + &referrer, + // FIXME(bartlomieju): check if these conditions are okay, probably + // should be `deno-require`, because `deno` is already used in `esm_resolver.rs` + &["deno", "require", "default"], + NodeResolutionMode::Execution, + ); + let reexport_specifier = match result { + Ok(specifier) => specifier, + Err(err) => { + errors.push(err); + continue; + } + }; + + if !handled_reexports.insert(reexport_specifier.clone()) { + continue; + } + + let referrer = referrer.clone(); + let future = async move { + let analysis = cjs_code_analyzer + .analyze_cjs(&reexport_specifier, None) + .await + .with_context(|| { + format!( + "Could not load '{}' ({}) referenced from {}", + reexport, reexport_specifier, referrer + ) + })?; + + Ok(Analysis { + reexport_specifier, + referrer, + analysis, + }) + } + .boxed_local(); + analyze_futures.push(future); + } + }; + + handle_reexports( + entry_specifier.clone(), + reexports, + &mut analyze_futures, + errors, + ); + + while let Some(analysis_result) = analyze_futures.next().await { + // 2. Look at the analysis result and resolve its exports and re-exports + let Analysis { + reexport_specifier, + referrer, + analysis, + } = match analysis_result { + Ok(analysis) => analysis, + Err(err) => { + errors.push(err); + continue; + } + }; + match analysis { + CjsAnalysis::Esm(_) => { + // todo(dsherret): support this once supporting requiring ES modules + errors.push(anyhow::anyhow!( + "Cannot require ES module '{}' from '{}'", + reexport_specifier, + referrer, + )); + } + CjsAnalysis::Cjs(analysis) => { + if !analysis.reexports.is_empty() { + handle_reexports( + reexport_specifier.clone(), + analysis.reexports, + &mut analyze_futures, + errors, + ); + } + + all_exports.extend( + analysis + .exports + .into_iter() + .filter(|e| e.as_str() != "default"), + ); + } + } + } + } + + // todo(dsherret): what is going on here? Isn't this a bunch of duplicate code? + fn resolve( + &self, + specifier: &str, + referrer: &Url, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + if specifier.starts_with('/') { + todo!(); + } + + let referrer_path = referrer.to_file_path().unwrap(); + if specifier.starts_with("./") || specifier.starts_with("../") { + if let Some(parent) = referrer_path.parent() { + return self + .file_extension_probe(parent.join(specifier), &referrer_path) + .map(|p| to_file_specifier(&p)); + } else { + todo!(); + } + } + + // We've got a bare specifier or maybe bare_specifier/blah.js" + let (package_specifier, package_subpath) = + parse_specifier(specifier).unwrap(); + + let module_dir = self.npm_resolver.resolve_package_folder_from_package( + package_specifier.as_str(), + referrer, + )?; + + let package_json_path = module_dir.join("package.json"); + let maybe_package_json = + load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?; + if let Some(package_json) = maybe_package_json { + if let Some(exports) = &package_json.exports { + return self + .node_resolver + .package_exports_resolve( + &package_json_path, + &package_subpath, + exports, + Some(referrer), + NodeModuleKind::Esm, + conditions, + mode, + ) + .map_err(AnyError::from); + } + + // old school + if package_subpath != "." { + let d = module_dir.join(package_subpath); + if self.env.is_dir_sync(&d) { + // subdir might have a package.json that specifies the entrypoint + let package_json_path = d.join("package.json"); + let maybe_package_json = + load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?; + if let Some(package_json) = maybe_package_json { + if let Some(main) = package_json.main(NodeModuleKind::Cjs) { + return Ok(to_file_specifier(&d.join(main).clean())); + } + } + + return Ok(to_file_specifier(&d.join("index.js").clean())); + } + return self + .file_extension_probe(d, &referrer_path) + .map(|p| to_file_specifier(&p)); + } else if let Some(main) = package_json.main(NodeModuleKind::Cjs) { + return Ok(to_file_specifier(&module_dir.join(main).clean())); + } else { + return Ok(to_file_specifier(&module_dir.join("index.js").clean())); + } + } + + // as a fallback, attempt to resolve it via the ancestor directories + let mut last = referrer_path.as_path(); + while let Some(parent) = last.parent() { + if !self.npm_resolver.in_npm_package_at_dir_path(parent) { + break; + } + let path = if parent.ends_with("node_modules") { + parent.join(specifier) + } else { + parent.join("node_modules").join(specifier) + }; + if let Ok(path) = self.file_extension_probe(path, &referrer_path) { + return Ok(to_file_specifier(&path)); + } + last = parent; + } + + Err(not_found(specifier, &referrer_path)) + } + + fn file_extension_probe( + &self, + p: PathBuf, + referrer: &Path, + ) -> Result { + let p = p.clean(); + if self.env.exists_sync(&p) { + let file_name = p.file_name().unwrap(); + let p_js = + p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); + if self.env.is_file_sync(&p_js) { + return Ok(p_js); + } else if self.env.is_dir_sync(&p) { + return Ok(p.join("index.js")); + } else { + return Ok(p); + } + } else if let Some(file_name) = p.file_name() { + { + let p_js = + p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); + if self.env.is_file_sync(&p_js) { + return Ok(p_js); + } + } + { + let p_json = + p.with_file_name(format!("{}.json", file_name.to_str().unwrap())); + if self.env.is_file_sync(&p_json) { + return Ok(p_json); + } + } + } + Err(not_found(&p.to_string_lossy(), referrer)) + } +} + +static RESERVED_WORDS: Lazy> = Lazy::new(|| { + HashSet::from([ + "abstract", + "arguments", + "async", + "await", + "boolean", + "break", + "byte", + "case", + "catch", + "char", + "class", + "const", + "continue", + "debugger", + "default", + "delete", + "do", + "double", + "else", + "enum", + "eval", + "export", + "extends", + "false", + "final", + "finally", + "float", + "for", + "function", + "get", + "goto", + "if", + "implements", + "import", + "in", + "instanceof", + "int", + "interface", + "let", + "long", + "mod", + "native", + "new", + "null", + "package", + "private", + "protected", + "public", + "return", + "set", + "short", + "static", + "super", + "switch", + "synchronized", + "this", + "throw", + "throws", + "transient", + "true", + "try", + "typeof", + "var", + "void", + "volatile", + "while", + "with", + "yield", + ]) +}); + +fn add_export( + source: &mut Vec, + name: &str, + initializer: &str, + temp_var_count: &mut usize, +) { + fn is_valid_var_decl(name: &str) -> bool { + // it's ok to be super strict here + if name.is_empty() { + return false; + } + + if let Some(first) = name.chars().next() { + if !first.is_ascii_alphabetic() && first != '_' && first != '$' { + return false; + } + } + + name + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$') + } + + // TODO(bartlomieju): Node actually checks if a given export exists in `exports` object, + // but it might not be necessary here since our analysis is more detailed? + if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) { + *temp_var_count += 1; + // we can't create an identifier with a reserved word or invalid identifier name, + // so assign it to a temporary variable that won't have a conflict, then re-export + // it as a string + source.push(format!( + "const __deno_export_{temp_var_count}__ = {initializer};" + )); + source.push(format!( + "export {{ __deno_export_{temp_var_count}__ as \"{}\" }};", + escape_for_double_quote_string(name) + )); + } else { + source.push(format!("export const {name} = {initializer};")); + } +} + +fn parse_specifier(specifier: &str) -> Option<(String, String)> { + let mut separator_index = specifier.find('/'); + let mut valid_package_name = true; + // let mut is_scoped = false; + if specifier.is_empty() { + valid_package_name = false; + } else if specifier.starts_with('@') { + // is_scoped = true; + if let Some(index) = separator_index { + separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1); + } else { + valid_package_name = false; + } + } + + let package_name = if let Some(index) = separator_index { + specifier[0..index].to_string() + } else { + specifier.to_string() + }; + + // Package name cannot have leading . and cannot have percent-encoding or separators. + for ch in package_name.chars() { + if ch == '%' || ch == '\\' { + valid_package_name = false; + break; + } + } + + if !valid_package_name { + return None; + } + + let package_subpath = if let Some(index) = separator_index { + format!(".{}", specifier.chars().skip(index).collect::()) + } else { + ".".to_string() + }; + + Some((package_name, package_subpath)) +} + +fn not_found(path: &str, referrer: &Path) -> AnyError { + let msg = format!( + "[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"", + path, + referrer.to_string_lossy() + ); + std::io::Error::new(std::io::ErrorKind::NotFound, msg).into() +} + +fn escape_for_double_quote_string(text: &str) -> String { + text.replace('\\', "\\\\").replace('"', "\\\"") +} +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_add_export() { + let mut temp_var_count = 0; + let mut source = vec![]; + + let exports = vec!["static", "server", "app", "dashed-export", "3d"]; + for export in exports { + add_export(&mut source, export, "init", &mut temp_var_count); + } + assert_eq!( + source, + vec![ + "const __deno_export_1__ = init;".to_string(), + "export { __deno_export_1__ as \"static\" };".to_string(), + "export const server = init;".to_string(), + "export const app = init;".to_string(), + "const __deno_export_2__ = init;".to_string(), + "export { __deno_export_2__ as \"dashed-export\" };".to_string(), + "const __deno_export_3__ = init;".to_string(), + "export { __deno_export_3__ as \"3d\" };".to_string(), + ] + ) + } + + #[test] + fn test_parse_specifier() { + assert_eq!( + parse_specifier("@some-package/core/actions"), + Some(("@some-package/core".to_string(), "./actions".to_string())) + ); + } +} diff --git a/ext/node_resolver/clippy.toml b/ext/node_resolver/clippy.toml new file mode 100644 index 000000000..86150781b --- /dev/null +++ b/ext/node_resolver/clippy.toml @@ -0,0 +1,48 @@ +disallowed-methods = [ + { path = "std::env::current_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::is_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::is_file", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::is_symlink", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::metadata", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::read_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::read_link", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::try_exists", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::exists", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::env::set_current_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::env::temp_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::copy", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::create_dir_all", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::create_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::DirBuilder::new", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::hard_link", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::metadata", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::OpenOptions::new", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::read_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::read_link", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::read_to_string", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::read", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::remove_dir_all", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::remove_dir", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::remove_file", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::rename", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::set_permissions", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::fs::write", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" }, + { path = "std::path::Path::exists", reason = "File system operations should be done using NodeResolverFs trait" }, +] +disallowed-types = [ + { path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" }, +] diff --git a/ext/node_resolver/env.rs b/ext/node_resolver/env.rs new file mode 100644 index 000000000..b520ece0f --- /dev/null +++ b/ext/node_resolver/env.rs @@ -0,0 +1,39 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::path::Path; +use std::path::PathBuf; + +use crate::sync::MaybeSend; +use crate::sync::MaybeSync; + +pub struct NodeResolverFsStat { + pub is_file: bool, + pub is_dir: bool, + pub is_symlink: bool, +} + +pub trait NodeResolverEnv: std::fmt::Debug + MaybeSend + MaybeSync { + fn is_builtin_node_module(&self, specifier: &str) -> bool; + + fn realpath_sync(&self, path: &Path) -> std::io::Result; + + fn stat_sync(&self, path: &Path) -> std::io::Result; + + fn exists_sync(&self, path: &Path) -> bool; + + fn is_file_sync(&self, path: &Path) -> bool { + self + .stat_sync(path) + .map(|stat| stat.is_file) + .unwrap_or(false) + } + + fn is_dir_sync(&self, path: &Path) -> bool { + self + .stat_sync(path) + .map(|stat| stat.is_dir) + .unwrap_or(false) + } + + fn pkg_json_fs(&self) -> &dyn deno_package_json::fs::DenoPkgJsonFs; +} diff --git a/ext/node_resolver/errors.rs b/ext/node_resolver/errors.rs new file mode 100644 index 000000000..4ba829eda --- /dev/null +++ b/ext/node_resolver/errors.rs @@ -0,0 +1,769 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::borrow::Cow; +use std::fmt::Write; +use std::path::PathBuf; + +use thiserror::Error; +use url::Url; + +use crate::NodeModuleKind; +use crate::NodeResolutionMode; + +macro_rules! kinded_err { + ($name:ident, $kind_name:ident) => { + #[derive(Error, Debug)] + #[error(transparent)] + pub struct $name(pub Box<$kind_name>); + + impl $name { + pub fn as_kind(&self) -> &$kind_name { + &self.0 + } + + pub fn into_kind(self) -> $kind_name { + *self.0 + } + } + + impl From for $name + where + $kind_name: From, + { + fn from(err: E) -> Self { + $name(Box::new($kind_name::from(err))) + } + } + }; +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[allow(non_camel_case_types)] +pub enum NodeJsErrorCode { + ERR_INVALID_MODULE_SPECIFIER, + ERR_INVALID_PACKAGE_CONFIG, + ERR_INVALID_PACKAGE_TARGET, + ERR_MODULE_NOT_FOUND, + ERR_PACKAGE_IMPORT_NOT_DEFINED, + ERR_PACKAGE_PATH_NOT_EXPORTED, + ERR_UNKNOWN_FILE_EXTENSION, + ERR_UNSUPPORTED_DIR_IMPORT, + ERR_UNSUPPORTED_ESM_URL_SCHEME, + /// Deno specific since Node doesn't support TypeScript. + ERR_TYPES_NOT_FOUND, +} + +impl std::fmt::Display for NodeJsErrorCode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +impl NodeJsErrorCode { + pub fn as_str(&self) -> &'static str { + use NodeJsErrorCode::*; + match self { + ERR_INVALID_MODULE_SPECIFIER => "ERR_INVALID_MODULE_SPECIFIER", + ERR_INVALID_PACKAGE_CONFIG => "ERR_INVALID_PACKAGE_CONFIG", + ERR_INVALID_PACKAGE_TARGET => "ERR_INVALID_PACKAGE_TARGET", + ERR_MODULE_NOT_FOUND => "ERR_MODULE_NOT_FOUND", + ERR_PACKAGE_IMPORT_NOT_DEFINED => "ERR_PACKAGE_IMPORT_NOT_DEFINED", + ERR_PACKAGE_PATH_NOT_EXPORTED => "ERR_PACKAGE_PATH_NOT_EXPORTED", + ERR_UNKNOWN_FILE_EXTENSION => "ERR_UNKNOWN_FILE_EXTENSION", + ERR_UNSUPPORTED_DIR_IMPORT => "ERR_UNSUPPORTED_DIR_IMPORT", + ERR_UNSUPPORTED_ESM_URL_SCHEME => "ERR_UNSUPPORTED_ESM_URL_SCHEME", + ERR_TYPES_NOT_FOUND => "ERR_TYPES_NOT_FOUND", + } + } +} + +pub trait NodeJsErrorCoded { + fn code(&self) -> NodeJsErrorCode; +} + +kinded_err!( + ResolvePkgSubpathFromDenoModuleError, + ResolvePkgSubpathFromDenoModuleErrorKind +); + +impl NodeJsErrorCoded for ResolvePkgSubpathFromDenoModuleError { + fn code(&self) -> NodeJsErrorCode { + use ResolvePkgSubpathFromDenoModuleErrorKind::*; + match self.as_kind() { + PackageSubpathResolve(e) => e.code(), + UrlToNodeResolution(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +pub enum ResolvePkgSubpathFromDenoModuleErrorKind { + #[error(transparent)] + PackageSubpathResolve(#[from] PackageSubpathResolveError), + #[error(transparent)] + UrlToNodeResolution(#[from] UrlToNodeResolutionError), +} + +// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError +#[derive(Debug, Clone, Error)] +#[error( + "[{}] Invalid module '{}' {}{}", + self.code(), + request, + reason, + maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default() +)] +pub struct InvalidModuleSpecifierError { + pub request: String, + pub reason: Cow<'static, str>, + pub maybe_referrer: Option, +} + +impl NodeJsErrorCoded for InvalidModuleSpecifierError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_INVALID_MODULE_SPECIFIER + } +} + +kinded_err!(LegacyResolveError, LegacyResolveErrorKind); + +#[derive(Debug, Error)] +pub enum LegacyResolveErrorKind { + #[error(transparent)] + TypesNotFound(#[from] TypesNotFoundError), + #[error(transparent)] + ModuleNotFound(#[from] ModuleNotFoundError), +} + +impl NodeJsErrorCoded for LegacyResolveError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + LegacyResolveErrorKind::TypesNotFound(e) => e.code(), + LegacyResolveErrorKind::ModuleNotFound(e) => e.code(), + } + } +} + +kinded_err!(PackageFolderResolveError, PackageFolderResolveErrorKind); + +#[derive(Debug, Error)] +#[error( + "Could not find package '{}' from referrer '{}'{}.", + package_name, + referrer, + referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default() +)] +pub struct PackageNotFoundError { + pub package_name: String, + pub referrer: Url, + /// Extra information about the referrer. + pub referrer_extra: Option, +} + +impl NodeJsErrorCoded for PackageNotFoundError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_MODULE_NOT_FOUND + } +} + +#[derive(Debug, Error)] +#[error( + "Could not find referrer npm package '{}'{}.", + referrer, + referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default() +)] +pub struct ReferrerNotFoundError { + pub referrer: Url, + /// Extra information about the referrer. + pub referrer_extra: Option, +} + +impl NodeJsErrorCoded for ReferrerNotFoundError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_MODULE_NOT_FOUND + } +} + +#[derive(Debug, Error)] +#[error("Failed resolving '{package_name}' from referrer '{referrer}'.")] +pub struct PackageFolderResolveIoError { + pub package_name: String, + pub referrer: Url, + #[source] + pub source: std::io::Error, +} + +impl NodeJsErrorCoded for PackageFolderResolveIoError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_MODULE_NOT_FOUND + } +} + +impl NodeJsErrorCoded for PackageFolderResolveError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + PackageFolderResolveErrorKind::PackageNotFound(e) => e.code(), + PackageFolderResolveErrorKind::ReferrerNotFound(e) => e.code(), + PackageFolderResolveErrorKind::Io(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +pub enum PackageFolderResolveErrorKind { + #[error(transparent)] + PackageNotFound(#[from] PackageNotFoundError), + #[error(transparent)] + ReferrerNotFound(#[from] ReferrerNotFoundError), + #[error(transparent)] + Io(#[from] PackageFolderResolveIoError), +} + +kinded_err!(PackageSubpathResolveError, PackageSubpathResolveErrorKind); + +impl NodeJsErrorCoded for PackageSubpathResolveError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + PackageSubpathResolveErrorKind::PkgJsonLoad(e) => e.code(), + PackageSubpathResolveErrorKind::Exports(e) => e.code(), + PackageSubpathResolveErrorKind::LegacyResolve(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +pub enum PackageSubpathResolveErrorKind { + #[error(transparent)] + PkgJsonLoad(#[from] PackageJsonLoadError), + #[error(transparent)] + Exports(PackageExportsResolveError), + #[error(transparent)] + LegacyResolve(LegacyResolveError), +} + +#[derive(Debug, Error)] +#[error( + "Target '{}' not found from '{}'{}{}.", + target, + pkg_json_path.display(), + maybe_referrer.as_ref().map(|r| + format!( + " from{} referrer {}", + match referrer_kind { + NodeModuleKind::Esm => "", + NodeModuleKind::Cjs => " cjs", + }, + r + ) + ).unwrap_or_default(), + match mode { + NodeResolutionMode::Execution => "", + NodeResolutionMode::Types => " for types", + } +)] +pub struct PackageTargetNotFoundError { + pub pkg_json_path: PathBuf, + pub target: String, + pub maybe_referrer: Option, + pub referrer_kind: NodeModuleKind, + pub mode: NodeResolutionMode, +} + +impl NodeJsErrorCoded for PackageTargetNotFoundError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_MODULE_NOT_FOUND + } +} + +kinded_err!(PackageTargetResolveError, PackageTargetResolveErrorKind); + +impl NodeJsErrorCoded for PackageTargetResolveError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + PackageTargetResolveErrorKind::NotFound(e) => e.code(), + PackageTargetResolveErrorKind::InvalidPackageTarget(e) => e.code(), + PackageTargetResolveErrorKind::InvalidModuleSpecifier(e) => e.code(), + PackageTargetResolveErrorKind::PackageResolve(e) => e.code(), + PackageTargetResolveErrorKind::TypesNotFound(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +pub enum PackageTargetResolveErrorKind { + #[error(transparent)] + NotFound(#[from] PackageTargetNotFoundError), + #[error(transparent)] + InvalidPackageTarget(#[from] InvalidPackageTargetError), + #[error(transparent)] + InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), + #[error(transparent)] + PackageResolve(#[from] PackageResolveError), + #[error(transparent)] + TypesNotFound(#[from] TypesNotFoundError), +} + +kinded_err!(PackageExportsResolveError, PackageExportsResolveErrorKind); + +impl NodeJsErrorCoded for PackageExportsResolveError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + PackageExportsResolveErrorKind::PackagePathNotExported(e) => e.code(), + PackageExportsResolveErrorKind::PackageTargetResolve(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +pub enum PackageExportsResolveErrorKind { + #[error(transparent)] + PackagePathNotExported(#[from] PackagePathNotExportedError), + #[error(transparent)] + PackageTargetResolve(#[from] PackageTargetResolveError), +} + +#[derive(Debug, Error)] +#[error( + "[{}] Could not find types for '{}'{}", + self.code(), + self.0.code_specifier, + self.0.maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(), + )] +pub struct TypesNotFoundError(pub Box); + +#[derive(Debug)] +pub struct TypesNotFoundErrorData { + pub code_specifier: Url, + pub maybe_referrer: Option, +} + +impl NodeJsErrorCoded for TypesNotFoundError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_TYPES_NOT_FOUND + } +} + +#[derive(Debug, Error)] +#[error( + "[{}] Invalid package config. {}", + self.code(), + self.0 +)] +pub struct PackageJsonLoadError( + #[source] + #[from] + pub deno_package_json::PackageJsonLoadError, +); + +impl NodeJsErrorCoded for PackageJsonLoadError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_INVALID_PACKAGE_CONFIG + } +} + +kinded_err!(ClosestPkgJsonError, ClosestPkgJsonErrorKind); + +impl NodeJsErrorCoded for ClosestPkgJsonError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + ClosestPkgJsonErrorKind::CanonicalizingDir(e) => e.code(), + ClosestPkgJsonErrorKind::Load(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +pub enum ClosestPkgJsonErrorKind { + #[error(transparent)] + CanonicalizingDir(#[from] CanonicalizingPkgJsonDirError), + #[error(transparent)] + Load(#[from] PackageJsonLoadError), +} + +#[derive(Debug, Error)] +#[error("[{}] Failed canonicalizing package.json directory '{}'.", self.code(), dir_path.display())] +pub struct CanonicalizingPkgJsonDirError { + pub dir_path: PathBuf, + #[source] + pub source: std::io::Error, +} + +impl NodeJsErrorCoded for CanonicalizingPkgJsonDirError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_MODULE_NOT_FOUND + } +} + +#[derive(Debug, Error)] +#[error("TypeScript files are not supported in npm packages: {specifier}")] +pub struct TypeScriptNotSupportedInNpmError { + pub specifier: Url, +} + +impl NodeJsErrorCoded for TypeScriptNotSupportedInNpmError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION + } +} + +kinded_err!(UrlToNodeResolutionError, UrlToNodeResolutionErrorKind); + +impl NodeJsErrorCoded for UrlToNodeResolutionError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + UrlToNodeResolutionErrorKind::TypeScriptNotSupported(e) => e.code(), + UrlToNodeResolutionErrorKind::ClosestPkgJson(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +pub enum UrlToNodeResolutionErrorKind { + #[error(transparent)] + TypeScriptNotSupported(#[from] TypeScriptNotSupportedInNpmError), + #[error(transparent)] + ClosestPkgJson(#[from] ClosestPkgJsonError), +} + +// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError +#[derive(Debug, Error)] +#[error( + "[{}] Package import specifier \"{}\" is not defined{}{}", + self.code(), + name, + package_json_path.as_ref().map(|p| format!(" in package {}", p.display())).unwrap_or_default(), + maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(), +)] +pub struct PackageImportNotDefinedError { + pub name: String, + pub package_json_path: Option, + pub maybe_referrer: Option, +} + +impl NodeJsErrorCoded for PackageImportNotDefinedError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_PACKAGE_IMPORT_NOT_DEFINED + } +} + +kinded_err!(PackageImportsResolveError, PackageImportsResolveErrorKind); + +#[derive(Debug, Error)] +pub enum PackageImportsResolveErrorKind { + #[error(transparent)] + ClosestPkgJson(ClosestPkgJsonError), + #[error(transparent)] + InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), + #[error(transparent)] + NotDefined(#[from] PackageImportNotDefinedError), + #[error(transparent)] + Target(#[from] PackageTargetResolveError), +} + +impl NodeJsErrorCoded for PackageImportsResolveErrorKind { + fn code(&self) -> NodeJsErrorCode { + match self { + Self::ClosestPkgJson(e) => e.code(), + Self::InvalidModuleSpecifier(e) => e.code(), + Self::NotDefined(e) => e.code(), + Self::Target(e) => e.code(), + } + } +} + +kinded_err!(PackageResolveError, PackageResolveErrorKind); + +impl NodeJsErrorCoded for PackageResolveError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + PackageResolveErrorKind::ClosestPkgJson(e) => e.code(), + PackageResolveErrorKind::InvalidModuleSpecifier(e) => e.code(), + PackageResolveErrorKind::PackageFolderResolve(e) => e.code(), + PackageResolveErrorKind::ExportsResolve(e) => e.code(), + PackageResolveErrorKind::SubpathResolve(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +pub enum PackageResolveErrorKind { + #[error(transparent)] + ClosestPkgJson(#[from] ClosestPkgJsonError), + #[error(transparent)] + InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), + #[error(transparent)] + PackageFolderResolve(#[from] PackageFolderResolveError), + #[error(transparent)] + ExportsResolve(#[from] PackageExportsResolveError), + #[error(transparent)] + SubpathResolve(#[from] PackageSubpathResolveError), +} + +#[derive(Debug, Error)] +#[error("Failed joining '{path}' from '{base}'.")] +pub struct NodeResolveRelativeJoinError { + pub path: String, + pub base: Url, + #[source] + pub source: url::ParseError, +} + +#[derive(Debug, Error)] +#[error("Failed resolving specifier from data url referrer.")] +pub struct DataUrlReferrerError { + #[source] + pub source: url::ParseError, +} + +kinded_err!(NodeResolveError, NodeResolveErrorKind); + +#[derive(Debug, Error)] +pub enum NodeResolveErrorKind { + #[error(transparent)] + RelativeJoin(#[from] NodeResolveRelativeJoinError), + #[error(transparent)] + PackageImportsResolve(#[from] PackageImportsResolveError), + #[error(transparent)] + UnsupportedEsmUrlScheme(#[from] UnsupportedEsmUrlSchemeError), + #[error(transparent)] + DataUrlReferrer(#[from] DataUrlReferrerError), + #[error(transparent)] + PackageResolve(#[from] PackageResolveError), + #[error(transparent)] + TypesNotFound(#[from] TypesNotFoundError), + #[error(transparent)] + FinalizeResolution(#[from] FinalizeResolutionError), + #[error(transparent)] + UrlToNodeResolution(#[from] UrlToNodeResolutionError), +} + +kinded_err!(FinalizeResolutionError, FinalizeResolutionErrorKind); + +#[derive(Debug, Error)] +pub enum FinalizeResolutionErrorKind { + #[error(transparent)] + InvalidModuleSpecifierError(#[from] InvalidModuleSpecifierError), + #[error(transparent)] + ModuleNotFound(#[from] ModuleNotFoundError), + #[error(transparent)] + UnsupportedDirImport(#[from] UnsupportedDirImportError), +} + +impl NodeJsErrorCoded for FinalizeResolutionError { + fn code(&self) -> NodeJsErrorCode { + match self.as_kind() { + FinalizeResolutionErrorKind::InvalidModuleSpecifierError(e) => e.code(), + FinalizeResolutionErrorKind::ModuleNotFound(e) => e.code(), + FinalizeResolutionErrorKind::UnsupportedDirImport(e) => e.code(), + } + } +} + +#[derive(Debug, Error)] +#[error( + "[{}] Cannot find {} '{}'{}", + self.code(), + typ, + specifier, + maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default() +)] +pub struct ModuleNotFoundError { + pub specifier: Url, + pub maybe_referrer: Option, + pub typ: &'static str, +} + +impl NodeJsErrorCoded for ModuleNotFoundError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_MODULE_NOT_FOUND + } +} + +#[derive(Debug, Error)] +#[error( + "[{}] Directory import '{}' is not supported resolving ES modules{}", + self.code(), + dir_url, + maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default(), +)] +pub struct UnsupportedDirImportError { + pub dir_url: Url, + pub maybe_referrer: Option, +} + +impl NodeJsErrorCoded for UnsupportedDirImportError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_UNSUPPORTED_DIR_IMPORT + } +} + +#[derive(Debug)] +pub struct InvalidPackageTargetError { + pub pkg_json_path: PathBuf, + pub sub_path: String, + pub target: String, + pub is_import: bool, + pub maybe_referrer: Option, +} + +impl std::error::Error for InvalidPackageTargetError {} + +impl std::fmt::Display for InvalidPackageTargetError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let rel_error = !self.is_import + && !self.target.is_empty() + && !self.target.starts_with("./"); + f.write_char('[')?; + f.write_str(self.code().as_str())?; + f.write_char(']')?; + + if self.sub_path == "." { + assert!(!self.is_import); + write!( + f, + " Invalid \"exports\" main target {} defined in the package config {}", + self.target, + self.pkg_json_path.display() + )?; + } else { + let ie = if self.is_import { "imports" } else { "exports" }; + write!( + f, + " Invalid \"{}\" target {} defined for '{}' in the package config {}", + ie, + self.target, + self.sub_path, + self.pkg_json_path.display() + )?; + }; + + if let Some(referrer) = &self.maybe_referrer { + write!(f, " imported from '{}'", referrer)?; + } + if rel_error { + write!(f, "; target must start with \"./\"")?; + } + Ok(()) + } +} + +impl NodeJsErrorCoded for InvalidPackageTargetError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET + } +} + +#[derive(Debug)] +pub struct PackagePathNotExportedError { + pub pkg_json_path: PathBuf, + pub subpath: String, + pub maybe_referrer: Option, + pub mode: NodeResolutionMode, +} + +impl NodeJsErrorCoded for PackagePathNotExportedError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_PACKAGE_PATH_NOT_EXPORTED + } +} + +impl std::error::Error for PackagePathNotExportedError {} + +impl std::fmt::Display for PackagePathNotExportedError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_char('[')?; + f.write_str(self.code().as_str())?; + f.write_char(']')?; + + let types_msg = match self.mode { + NodeResolutionMode::Execution => String::new(), + NodeResolutionMode::Types => " for types".to_string(), + }; + if self.subpath == "." { + write!( + f, + " No \"exports\" main defined{} in '{}'", + types_msg, + self.pkg_json_path.display() + )?; + } else { + write!( + f, + " Package subpath '{}' is not defined{} by \"exports\" in '{}'", + self.subpath, + types_msg, + self.pkg_json_path.display() + )?; + }; + + if let Some(referrer) = &self.maybe_referrer { + write!(f, " imported from '{}'", referrer)?; + } + Ok(()) + } +} + +#[derive(Debug, Clone, Error)] +#[error( + "[{}] Only file and data URLs are supported by the default ESM loader.{} Received protocol '{}'", + self.code(), + if cfg!(windows) && url_scheme.len() == 2 { " On Windows, absolute path must be valid file:// URLS."} else { "" }, + url_scheme +)] +pub struct UnsupportedEsmUrlSchemeError { + pub url_scheme: String, +} + +impl NodeJsErrorCoded for UnsupportedEsmUrlSchemeError { + fn code(&self) -> NodeJsErrorCode { + NodeJsErrorCode::ERR_UNSUPPORTED_ESM_URL_SCHEME + } +} + +#[derive(Debug, Error)] +pub enum ResolvePkgJsonBinExportError { + #[error(transparent)] + PkgJsonLoad(#[from] PackageJsonLoadError), + #[error("Failed resolving binary export. '{}' did not exist", pkg_json_path.display())] + MissingPkgJson { pkg_json_path: PathBuf }, + #[error("Failed resolving binary export. {message}")] + InvalidBinProperty { message: String }, + #[error(transparent)] + UrlToNodeResolution(#[from] UrlToNodeResolutionError), +} + +#[derive(Debug, Error)] +pub enum ResolveBinaryCommandsError { + #[error(transparent)] + PkgJsonLoad(#[from] PackageJsonLoadError), + #[error("'{}' did not have a name", pkg_json_path.display())] + MissingPkgJsonName { pkg_json_path: PathBuf }, +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn types_resolution_package_path_not_exported() { + let separator_char = if cfg!(windows) { '\\' } else { '/' }; + assert_eq!( + PackagePathNotExportedError { + pkg_json_path: PathBuf::from("test_path").join("package.json"), + subpath: "./jsx-runtime".to_string(), + maybe_referrer: None, + mode: NodeResolutionMode::Types + }.to_string(), + format!("[ERR_PACKAGE_PATH_NOT_EXPORTED] Package subpath './jsx-runtime' is not defined for types by \"exports\" in 'test_path{separator_char}package.json'") + ); + assert_eq!( + PackagePathNotExportedError { + pkg_json_path: PathBuf::from("test_path").join("package.json"), + subpath: ".".to_string(), + maybe_referrer: None, + mode: NodeResolutionMode::Types + }.to_string(), + format!("[ERR_PACKAGE_PATH_NOT_EXPORTED] No \"exports\" main defined for types in 'test_path{separator_char}package.json'") + ); + } +} diff --git a/ext/node_resolver/lib.rs b/ext/node_resolver/lib.rs new file mode 100644 index 000000000..1ab972ccf --- /dev/null +++ b/ext/node_resolver/lib.rs @@ -0,0 +1,26 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +#![deny(clippy::print_stderr)] +#![deny(clippy::print_stdout)] + +pub mod analyze; +pub mod env; +pub mod errors; +mod npm; +mod package_json; +mod path; +mod resolution; +mod sync; + +pub use deno_package_json::PackageJson; +pub use npm::NpmResolver; +pub use npm::NpmResolverRc; +pub use package_json::load_pkg_json; +pub use package_json::PackageJsonThreadLocalCache; +pub use path::PathClean; +pub use resolution::NodeModuleKind; +pub use resolution::NodeResolution; +pub use resolution::NodeResolutionMode; +pub use resolution::NodeResolver; +pub use resolution::DEFAULT_CONDITIONS; +pub use resolution::REQUIRE_CONDITIONS; diff --git a/ext/node_resolver/npm.rs b/ext/node_resolver/npm.rs new file mode 100644 index 000000000..77df57c48 --- /dev/null +++ b/ext/node_resolver/npm.rs @@ -0,0 +1,41 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::path::Path; +use std::path::PathBuf; + +use url::Url; + +use crate::errors; +use crate::path::PathClean; +use crate::sync::MaybeSend; +use crate::sync::MaybeSync; + +#[allow(clippy::disallowed_types)] +pub type NpmResolverRc = crate::sync::MaybeArc; + +pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync { + /// Resolves an npm package folder path from an npm package referrer. + fn resolve_package_folder_from_package( + &self, + specifier: &str, + referrer: &Url, + ) -> Result; + + fn in_npm_package(&self, specifier: &Url) -> bool; + + fn in_npm_package_at_dir_path(&self, path: &Path) -> bool { + let specifier = match Url::from_directory_path(path.to_path_buf().clean()) { + Ok(p) => p, + Err(_) => return false, + }; + self.in_npm_package(&specifier) + } + + fn in_npm_package_at_file_path(&self, path: &Path) -> bool { + let specifier = match Url::from_file_path(path.to_path_buf().clean()) { + Ok(p) => p, + Err(_) => return false, + }; + self.in_npm_package(&specifier) + } +} diff --git a/ext/node_resolver/package_json.rs b/ext/node_resolver/package_json.rs new file mode 100644 index 000000000..de750f1d7 --- /dev/null +++ b/ext/node_resolver/package_json.rs @@ -0,0 +1,53 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use deno_package_json::PackageJson; +use deno_package_json::PackageJsonRc; +use std::cell::RefCell; +use std::collections::HashMap; +use std::io::ErrorKind; +use std::path::Path; +use std::path::PathBuf; + +use crate::errors::PackageJsonLoadError; + +// use a thread local cache so that workers have their own distinct cache +thread_local! { + static CACHE: RefCell> = RefCell::new(HashMap::new()); +} + +pub struct PackageJsonThreadLocalCache; + +impl PackageJsonThreadLocalCache { + pub fn clear() { + CACHE.with(|cache| cache.borrow_mut().clear()); + } +} + +impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache { + fn get(&self, path: &Path) -> Option { + CACHE.with(|cache| cache.borrow().get(path).cloned()) + } + + fn set(&self, path: PathBuf, package_json: PackageJsonRc) { + CACHE.with(|cache| cache.borrow_mut().insert(path, package_json)); + } +} + +/// Helper to load a package.json file using the thread local cache +/// in node_resolver. +pub fn load_pkg_json( + fs: &dyn deno_package_json::fs::DenoPkgJsonFs, + path: &Path, +) -> Result, PackageJsonLoadError> { + let result = + PackageJson::load_from_path(path, fs, Some(&PackageJsonThreadLocalCache)); + match result { + Ok(pkg_json) => Ok(Some(pkg_json)), + Err(deno_package_json::PackageJsonLoadError::Io { source, .. }) + if source.kind() == ErrorKind::NotFound => + { + Ok(None) + } + Err(err) => Err(PackageJsonLoadError(err)), + } +} diff --git a/ext/node_resolver/path.rs b/ext/node_resolver/path.rs new file mode 100644 index 000000000..8c33285db --- /dev/null +++ b/ext/node_resolver/path.rs @@ -0,0 +1,142 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::path::Component; +use std::path::Path; +use std::path::PathBuf; + +use url::Url; + +/// Extension to path_clean::PathClean +pub trait PathClean { + fn clean(&self) -> T; +} + +impl PathClean for PathBuf { + fn clean(&self) -> PathBuf { + let path = path_clean::PathClean::clean(self); + if cfg!(windows) && path.to_string_lossy().contains("..\\") { + // temporary workaround because path_clean::PathClean::clean is + // not good enough on windows + let mut components = Vec::new(); + + for component in path.components() { + match component { + Component::CurDir => { + // skip + } + Component::ParentDir => { + let maybe_last_component = components.pop(); + if !matches!(maybe_last_component, Some(Component::Normal(_))) { + panic!("Error normalizing: {}", path.display()); + } + } + Component::Normal(_) | Component::RootDir | Component::Prefix(_) => { + components.push(component); + } + } + } + components.into_iter().collect::() + } else { + path + } + } +} + +pub(crate) fn to_file_specifier(path: &Path) -> Url { + match Url::from_file_path(path) { + Ok(url) => url, + Err(_) => panic!("Invalid path: {}", path.display()), + } +} + +// todo(dsherret): we have the below code also in deno_core and it +// would be good to somehow re-use it in both places (we don't want +// to create a dependency on deno_core here) + +#[cfg(not(windows))] +#[inline] +pub fn strip_unc_prefix(path: PathBuf) -> PathBuf { + path +} + +/// Strips the unc prefix (ex. \\?\) from Windows paths. +#[cfg(windows)] +pub fn strip_unc_prefix(path: PathBuf) -> PathBuf { + use std::path::Component; + use std::path::Prefix; + + let mut components = path.components(); + match components.next() { + Some(Component::Prefix(prefix)) => { + match prefix.kind() { + // \\?\device + Prefix::Verbatim(device) => { + let mut path = PathBuf::new(); + path.push(format!(r"\\{}\", device.to_string_lossy())); + path.extend(components.filter(|c| !matches!(c, Component::RootDir))); + path + } + // \\?\c:\path + Prefix::VerbatimDisk(_) => { + let mut path = PathBuf::new(); + path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", "")); + path.extend(components); + path + } + // \\?\UNC\hostname\share_name\path + Prefix::VerbatimUNC(hostname, share_name) => { + let mut path = PathBuf::new(); + path.push(format!( + r"\\{}\{}\", + hostname.to_string_lossy(), + share_name.to_string_lossy() + )); + path.extend(components.filter(|c| !matches!(c, Component::RootDir))); + path + } + _ => path, + } + } + _ => path, + } +} + +#[cfg(test)] +mod test { + #[cfg(windows)] + #[test] + fn test_strip_unc_prefix() { + use std::path::PathBuf; + + run_test(r"C:\", r"C:\"); + run_test(r"C:\test\file.txt", r"C:\test\file.txt"); + + run_test(r"\\?\C:\", r"C:\"); + run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt"); + + run_test(r"\\.\C:\", r"\\.\C:\"); + run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt"); + + run_test(r"\\?\UNC\localhost\", r"\\localhost"); + run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$"); + run_test( + r"\\?\UNC\localhost\c$\Windows\file.txt", + r"\\localhost\c$\Windows\file.txt", + ); + run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json"); + + run_test(r"\\?\server1", r"\\server1"); + run_test(r"\\?\server1\e$\", r"\\server1\e$\"); + run_test( + r"\\?\server1\e$\test\file.txt", + r"\\server1\e$\test\file.txt", + ); + + fn run_test(input: &str, expected: &str) { + assert_eq!( + super::strip_unc_prefix(PathBuf::from(input)), + PathBuf::from(expected) + ); + } + } +} diff --git a/ext/node_resolver/resolution.rs b/ext/node_resolver/resolution.rs new file mode 100644 index 000000000..d7918c75c --- /dev/null +++ b/ext/node_resolver/resolution.rs @@ -0,0 +1,2025 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::borrow::Cow; +use std::collections::HashMap; +use std::path::Path; +use std::path::PathBuf; + +use anyhow::bail; +use anyhow::Error as AnyError; +use deno_media_type::MediaType; +use deno_package_json::PackageJsonRc; +use serde_json::Map; +use serde_json::Value; +use url::Url; + +use crate::env::NodeResolverEnv; +use crate::errors; +use crate::errors::CanonicalizingPkgJsonDirError; +use crate::errors::ClosestPkgJsonError; +use crate::errors::DataUrlReferrerError; +use crate::errors::FinalizeResolutionError; +use crate::errors::InvalidModuleSpecifierError; +use crate::errors::InvalidPackageTargetError; +use crate::errors::LegacyResolveError; +use crate::errors::ModuleNotFoundError; +use crate::errors::NodeJsErrorCode; +use crate::errors::NodeJsErrorCoded; +use crate::errors::NodeResolveError; +use crate::errors::NodeResolveRelativeJoinError; +use crate::errors::PackageExportsResolveError; +use crate::errors::PackageImportNotDefinedError; +use crate::errors::PackageImportsResolveError; +use crate::errors::PackageImportsResolveErrorKind; +use crate::errors::PackageJsonLoadError; +use crate::errors::PackagePathNotExportedError; +use crate::errors::PackageResolveError; +use crate::errors::PackageSubpathResolveError; +use crate::errors::PackageSubpathResolveErrorKind; +use crate::errors::PackageTargetNotFoundError; +use crate::errors::PackageTargetResolveError; +use crate::errors::PackageTargetResolveErrorKind; +use crate::errors::ResolveBinaryCommandsError; +use crate::errors::ResolvePkgJsonBinExportError; +use crate::errors::ResolvePkgSubpathFromDenoModuleError; +use crate::errors::TypeScriptNotSupportedInNpmError; +use crate::errors::TypesNotFoundError; +use crate::errors::TypesNotFoundErrorData; +use crate::errors::UnsupportedDirImportError; +use crate::errors::UnsupportedEsmUrlSchemeError; +use crate::errors::UrlToNodeResolutionError; +use crate::path::strip_unc_prefix; +use crate::path::to_file_specifier; +use crate::NpmResolverRc; +use crate::PathClean; +use deno_package_json::PackageJson; + +pub static DEFAULT_CONDITIONS: &[&str] = &["deno", "node", "import"]; +pub static REQUIRE_CONDITIONS: &[&str] = &["require", "node"]; +static TYPES_ONLY_CONDITIONS: &[&str] = &["types"]; + +pub type NodeModuleKind = deno_package_json::NodeModuleKind; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum NodeResolutionMode { + Execution, + Types, +} + +impl NodeResolutionMode { + pub fn is_types(&self) -> bool { + matches!(self, NodeResolutionMode::Types) + } +} + +#[derive(Debug)] +pub enum NodeResolution { + Esm(Url), + CommonJs(Url), + BuiltIn(String), +} + +impl NodeResolution { + pub fn into_url(self) -> Url { + match self { + Self::Esm(u) => u, + Self::CommonJs(u) => u, + Self::BuiltIn(specifier) => { + if specifier.starts_with("node:") { + Url::parse(&specifier).unwrap() + } else { + Url::parse(&format!("node:{specifier}")).unwrap() + } + } + } + } + + pub fn into_specifier_and_media_type( + resolution: Option, + ) -> (Url, MediaType) { + match resolution { + Some(NodeResolution::CommonJs(specifier)) => { + let media_type = MediaType::from_specifier(&specifier); + ( + specifier, + match media_type { + MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs, + MediaType::TypeScript | MediaType::Tsx => MediaType::Cts, + MediaType::Dts => MediaType::Dcts, + _ => media_type, + }, + ) + } + Some(NodeResolution::Esm(specifier)) => { + let media_type = MediaType::from_specifier(&specifier); + ( + specifier, + match media_type { + MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs, + MediaType::TypeScript | MediaType::Tsx => MediaType::Mts, + MediaType::Dts => MediaType::Dmts, + _ => media_type, + }, + ) + } + Some(resolution) => (resolution.into_url(), MediaType::Dts), + None => ( + Url::parse("internal:///missing_dependency.d.ts").unwrap(), + MediaType::Dts, + ), + } + } +} + +#[allow(clippy::disallowed_types)] +pub type NodeResolverRc = crate::sync::MaybeArc>; + +#[derive(Debug)] +pub struct NodeResolver { + env: TEnv, + npm_resolver: NpmResolverRc, + in_npm_package_cache: crate::sync::MaybeArcMutex>, +} + +impl NodeResolver { + pub fn new(env: TEnv, npm_resolver: NpmResolverRc) -> Self { + Self { + env, + npm_resolver, + in_npm_package_cache: crate::sync::MaybeArcMutex::new(HashMap::new()), + } + } + + pub fn in_npm_package(&self, specifier: &Url) -> bool { + self.npm_resolver.in_npm_package(specifier) + } + + pub fn in_npm_package_with_cache(&self, specifier: Cow) -> bool { + let mut cache = self.in_npm_package_cache.lock(); + + if let Some(result) = cache.get(specifier.as_ref()) { + return *result; + } + + let result = if let Ok(specifier) = Url::parse(&specifier) { + self.npm_resolver.in_npm_package(&specifier) + } else { + false + }; + cache.insert(specifier.into_owned(), result); + result + } + + /// This function is an implementation of `defaultResolve` in + /// `lib/internal/modules/esm/resolve.js` from Node. + pub fn resolve( + &self, + specifier: &str, + referrer: &Url, + referrer_kind: NodeModuleKind, + mode: NodeResolutionMode, + ) -> Result { + // Note: if we are here, then the referrer is an esm module + // TODO(bartlomieju): skipped "policy" part as we don't plan to support it + + if self.env.is_builtin_node_module(specifier) { + return Ok(NodeResolution::BuiltIn(specifier.to_string())); + } + + if let Ok(url) = Url::parse(specifier) { + if url.scheme() == "data" { + return Ok(NodeResolution::Esm(url)); + } + + if let Some(module_name) = + get_module_name_from_builtin_node_module_specifier(&url) + { + return Ok(NodeResolution::BuiltIn(module_name.to_string())); + } + + let protocol = url.scheme(); + + if protocol != "file" && protocol != "data" { + return Err( + UnsupportedEsmUrlSchemeError { + url_scheme: protocol.to_string(), + } + .into(), + ); + } + + // todo(dsherret): this seems wrong + if referrer.scheme() == "data" { + let url = referrer + .join(specifier) + .map_err(|source| DataUrlReferrerError { source })?; + return Ok(NodeResolution::Esm(url)); + } + } + + let url = self.module_resolve( + specifier, + referrer, + referrer_kind, + match referrer_kind { + NodeModuleKind::Esm => DEFAULT_CONDITIONS, + NodeModuleKind::Cjs => REQUIRE_CONDITIONS, + }, + mode, + )?; + + let url = if mode.is_types() { + let file_path = to_file_path(&url); + self.path_to_declaration_url(&file_path, Some(referrer), referrer_kind)? + } else { + url + }; + + let url = self.finalize_resolution(url, Some(referrer))?; + let resolve_response = self.url_to_node_resolution(url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(resolve_response) + } + + fn module_resolve( + &self, + specifier: &str, + referrer: &Url, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + if should_be_treated_as_relative_or_absolute_path(specifier) { + Ok(referrer.join(specifier).map_err(|err| { + NodeResolveRelativeJoinError { + path: specifier.to_string(), + base: referrer.clone(), + source: err, + } + })?) + } else if specifier.starts_with('#') { + let pkg_config = self + .get_closest_package_json(referrer) + .map_err(PackageImportsResolveErrorKind::ClosestPkgJson) + .map_err(|err| PackageImportsResolveError(Box::new(err)))?; + Ok(self.package_imports_resolve( + specifier, + Some(referrer), + referrer_kind, + pkg_config.as_deref(), + conditions, + mode, + )?) + } else if let Ok(resolved) = Url::parse(specifier) { + Ok(resolved) + } else { + Ok(self.package_resolve( + specifier, + referrer, + referrer_kind, + conditions, + mode, + )?) + } + } + + fn finalize_resolution( + &self, + resolved: Url, + maybe_referrer: Option<&Url>, + ) -> Result { + let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C"); + + if encoded_sep_re.is_match(resolved.path()) { + return Err( + errors::InvalidModuleSpecifierError { + request: resolved.to_string(), + reason: Cow::Borrowed( + "must not include encoded \"/\" or \"\\\\\" characters", + ), + maybe_referrer: maybe_referrer.map(to_file_path_string), + } + .into(), + ); + } + + if resolved.scheme() == "node" { + return Ok(resolved); + } + + let path = to_file_path(&resolved); + + // TODO(bartlomieju): currently not supported + // if (getOptionValue('--experimental-specifier-resolution') === 'node') { + // ... + // } + + let p_str = path.to_str().unwrap(); + let p = if p_str.ends_with('/') { + p_str[p_str.len() - 1..].to_string() + } else { + p_str.to_string() + }; + + let (is_dir, is_file) = if let Ok(stats) = self.env.stat_sync(Path::new(&p)) + { + (stats.is_dir, stats.is_file) + } else { + (false, false) + }; + if is_dir { + return Err( + UnsupportedDirImportError { + dir_url: resolved.clone(), + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + } + .into(), + ); + } else if !is_file { + return Err( + ModuleNotFoundError { + specifier: resolved, + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + typ: "module", + } + .into(), + ); + } + + Ok(resolved) + } + + pub fn resolve_package_subpath_from_deno_module( + &self, + package_dir: &Path, + package_subpath: Option<&str>, + maybe_referrer: Option<&Url>, + mode: NodeResolutionMode, + ) -> Result { + let node_module_kind = NodeModuleKind::Esm; + let package_subpath = package_subpath + .map(|s| format!("./{s}")) + .unwrap_or_else(|| ".".to_string()); + let resolved_url = self.resolve_package_dir_subpath( + package_dir, + &package_subpath, + maybe_referrer, + node_module_kind, + DEFAULT_CONDITIONS, + mode, + )?; + let resolve_response = self.url_to_node_resolution(resolved_url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(resolve_response) + } + + pub fn resolve_binary_commands( + &self, + package_folder: &Path, + ) -> Result, ResolveBinaryCommandsError> { + let pkg_json_path = package_folder.join("package.json"); + let Some(package_json) = self.load_package_json(&pkg_json_path)? else { + return Ok(Vec::new()); + }; + + Ok(match &package_json.bin { + Some(Value::String(_)) => { + let Some(name) = &package_json.name else { + return Err(ResolveBinaryCommandsError::MissingPkgJsonName { + pkg_json_path, + }); + }; + vec![name.to_string()] + } + Some(Value::Object(o)) => { + o.iter().map(|(key, _)| key.clone()).collect::>() + } + _ => Vec::new(), + }) + } + + pub fn resolve_binary_export( + &self, + package_folder: &Path, + sub_path: Option<&str>, + ) -> Result { + let pkg_json_path = package_folder.join("package.json"); + let Some(package_json) = self.load_package_json(&pkg_json_path)? else { + return Err(ResolvePkgJsonBinExportError::MissingPkgJson { + pkg_json_path, + }); + }; + let bin_entry = + resolve_bin_entry_value(&package_json, sub_path).map_err(|err| { + ResolvePkgJsonBinExportError::InvalidBinProperty { + message: err.to_string(), + } + })?; + let url = to_file_specifier(&package_folder.join(bin_entry)); + + let resolve_response = self.url_to_node_resolution(url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(resolve_response) + } + + pub fn url_to_node_resolution( + &self, + url: Url, + ) -> Result { + let url_str = url.as_str().to_lowercase(); + if url_str.starts_with("http") || url_str.ends_with(".json") { + Ok(NodeResolution::Esm(url)) + } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { + let maybe_package_config = self.get_closest_package_json(&url)?; + match maybe_package_config { + Some(c) if c.typ == "module" => Ok(NodeResolution::Esm(url)), + Some(_) => Ok(NodeResolution::CommonJs(url)), + None => Ok(NodeResolution::Esm(url)), + } + } else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") { + Ok(NodeResolution::Esm(url)) + } else if url_str.ends_with(".ts") || url_str.ends_with(".mts") { + if self.in_npm_package(&url) { + Err(TypeScriptNotSupportedInNpmError { specifier: url }.into()) + } else { + Ok(NodeResolution::Esm(url)) + } + } else { + Ok(NodeResolution::CommonJs(url)) + } + } + + /// Checks if the resolved file has a corresponding declaration file. + fn path_to_declaration_url( + &self, + path: &Path, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + ) -> Result { + fn probe_extensions( + fs: &TEnv, + path: &Path, + lowercase_path: &str, + referrer_kind: NodeModuleKind, + ) -> Option { + let mut searched_for_d_mts = false; + let mut searched_for_d_cts = false; + if lowercase_path.ends_with(".mjs") { + let d_mts_path = with_known_extension(path, "d.mts"); + if fs.exists_sync(&d_mts_path) { + return Some(d_mts_path); + } + searched_for_d_mts = true; + } else if lowercase_path.ends_with(".cjs") { + let d_cts_path = with_known_extension(path, "d.cts"); + if fs.exists_sync(&d_cts_path) { + return Some(d_cts_path); + } + searched_for_d_cts = true; + } + + let dts_path = with_known_extension(path, "d.ts"); + if fs.exists_sync(&dts_path) { + return Some(dts_path); + } + + let specific_dts_path = match referrer_kind { + NodeModuleKind::Cjs if !searched_for_d_cts => { + Some(with_known_extension(path, "d.cts")) + } + NodeModuleKind::Esm if !searched_for_d_mts => { + Some(with_known_extension(path, "d.mts")) + } + _ => None, // already searched above + }; + if let Some(specific_dts_path) = specific_dts_path { + if fs.exists_sync(&specific_dts_path) { + return Some(specific_dts_path); + } + } + None + } + + let lowercase_path = path.to_string_lossy().to_lowercase(); + if lowercase_path.ends_with(".d.ts") + || lowercase_path.ends_with(".d.cts") + || lowercase_path.ends_with(".d.mts") + { + return Ok(to_file_specifier(path)); + } + if let Some(path) = + probe_extensions(&self.env, path, &lowercase_path, referrer_kind) + { + return Ok(to_file_specifier(&path)); + } + if self.env.is_dir_sync(path) { + let resolution_result = self.resolve_package_dir_subpath( + path, + /* sub path */ ".", + maybe_referrer, + referrer_kind, + match referrer_kind { + NodeModuleKind::Esm => DEFAULT_CONDITIONS, + NodeModuleKind::Cjs => REQUIRE_CONDITIONS, + }, + NodeResolutionMode::Types, + ); + if let Ok(resolution) = resolution_result { + return Ok(resolution); + } + let index_path = path.join("index.js"); + if let Some(path) = probe_extensions( + &self.env, + &index_path, + &index_path.to_string_lossy().to_lowercase(), + referrer_kind, + ) { + return Ok(to_file_specifier(&path)); + } + } + // allow resolving .css files for types resolution + if lowercase_path.ends_with(".css") { + return Ok(to_file_specifier(path)); + } + Err(TypesNotFoundError(Box::new(TypesNotFoundErrorData { + code_specifier: to_file_specifier(path), + maybe_referrer: maybe_referrer.cloned(), + }))) + } + + #[allow(clippy::too_many_arguments)] + pub fn package_imports_resolve( + &self, + name: &str, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + referrer_pkg_json: Option<&PackageJson>, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + if name == "#" || name.starts_with("#/") || name.ends_with('/') { + let reason = "is not a valid internal imports specifier name"; + return Err( + errors::InvalidModuleSpecifierError { + request: name.to_string(), + reason: Cow::Borrowed(reason), + maybe_referrer: maybe_referrer.map(to_specifier_display_string), + } + .into(), + ); + } + + let mut package_json_path = None; + if let Some(pkg_json) = &referrer_pkg_json { + package_json_path = Some(pkg_json.path.clone()); + if let Some(imports) = &pkg_json.imports { + if imports.contains_key(name) && !name.contains('*') { + let target = imports.get(name).unwrap(); + let maybe_resolved = self.resolve_package_target( + package_json_path.as_ref().unwrap(), + target, + "", + name, + maybe_referrer, + referrer_kind, + false, + true, + conditions, + mode, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } + } else { + let mut best_match = ""; + let mut best_match_subpath = None; + for key in imports.keys() { + let pattern_index = key.find('*'); + if let Some(pattern_index) = pattern_index { + let key_sub = &key[0..=pattern_index]; + if name.starts_with(key_sub) { + let pattern_trailer = &key[pattern_index + 1..]; + if name.len() > key.len() + && name.ends_with(&pattern_trailer) + && pattern_key_compare(best_match, key) == 1 + && key.rfind('*') == Some(pattern_index) + { + best_match = key; + best_match_subpath = Some( + name[pattern_index..=(name.len() - pattern_trailer.len())] + .to_string(), + ); + } + } + } + } + + if !best_match.is_empty() { + let target = imports.get(best_match).unwrap(); + let maybe_resolved = self.resolve_package_target( + package_json_path.as_ref().unwrap(), + target, + &best_match_subpath.unwrap(), + best_match, + maybe_referrer, + referrer_kind, + true, + true, + conditions, + mode, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } + } + } + } + } + + Err( + PackageImportNotDefinedError { + name: name.to_string(), + package_json_path, + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + } + .into(), + ) + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_target_string( + &self, + target: &str, + subpath: &str, + match_: &str, + package_json_path: &Path, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + pattern: bool, + internal: bool, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + if !subpath.is_empty() && !pattern && !target.ends_with('/') { + return Err( + InvalidPackageTargetError { + pkg_json_path: package_json_path.to_path_buf(), + sub_path: match_.to_string(), + target: target.to_string(), + is_import: internal, + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + } + .into(), + ); + } + let invalid_segment_re = + lazy_regex::regex!(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)"); + let pattern_re = lazy_regex::regex!(r"\*"); + if !target.starts_with("./") { + if internal && !target.starts_with("../") && !target.starts_with('/') { + let target_url = Url::parse(target); + match target_url { + Ok(url) => { + if get_module_name_from_builtin_node_module_specifier(&url) + .is_some() + { + return Ok(url); + } + } + Err(_) => { + let export_target = if pattern { + pattern_re + .replace(target, |_caps: ®ex::Captures| subpath) + .to_string() + } else { + format!("{target}{subpath}") + }; + let package_json_url = to_file_specifier(package_json_path); + let result = match self.package_resolve( + &export_target, + &package_json_url, + referrer_kind, + conditions, + mode, + ) { + Ok(url) => Ok(url), + Err(err) => match err.code() { + NodeJsErrorCode::ERR_INVALID_MODULE_SPECIFIER + | NodeJsErrorCode::ERR_INVALID_PACKAGE_CONFIG + | NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET + | NodeJsErrorCode::ERR_PACKAGE_IMPORT_NOT_DEFINED + | NodeJsErrorCode::ERR_PACKAGE_PATH_NOT_EXPORTED + | NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION + | NodeJsErrorCode::ERR_UNSUPPORTED_DIR_IMPORT + | NodeJsErrorCode::ERR_UNSUPPORTED_ESM_URL_SCHEME + | NodeJsErrorCode::ERR_TYPES_NOT_FOUND => { + Err(PackageTargetResolveErrorKind::PackageResolve(err).into()) + } + NodeJsErrorCode::ERR_MODULE_NOT_FOUND => Err( + PackageTargetResolveErrorKind::NotFound( + PackageTargetNotFoundError { + pkg_json_path: package_json_path.to_path_buf(), + target: export_target.to_string(), + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + referrer_kind, + mode, + }, + ) + .into(), + ), + }, + }; + + return match result { + Ok(url) => Ok(url), + Err(err) => { + if self.env.is_builtin_node_module(target) { + Ok(Url::parse(&format!("node:{}", target)).unwrap()) + } else { + Err(err) + } + } + }; + } + } + } + return Err( + InvalidPackageTargetError { + pkg_json_path: package_json_path.to_path_buf(), + sub_path: match_.to_string(), + target: target.to_string(), + is_import: internal, + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + } + .into(), + ); + } + if invalid_segment_re.is_match(&target[2..]) { + return Err( + InvalidPackageTargetError { + pkg_json_path: package_json_path.to_path_buf(), + sub_path: match_.to_string(), + target: target.to_string(), + is_import: internal, + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + } + .into(), + ); + } + let package_path = package_json_path.parent().unwrap(); + let resolved_path = package_path.join(target).clean(); + if !resolved_path.starts_with(package_path) { + return Err( + InvalidPackageTargetError { + pkg_json_path: package_json_path.to_path_buf(), + sub_path: match_.to_string(), + target: target.to_string(), + is_import: internal, + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + } + .into(), + ); + } + if subpath.is_empty() { + return Ok(to_file_specifier(&resolved_path)); + } + if invalid_segment_re.is_match(subpath) { + let request = if pattern { + match_.replace('*', subpath) + } else { + format!("{match_}{subpath}") + }; + return Err( + throw_invalid_subpath( + request, + package_json_path, + internal, + maybe_referrer, + ) + .into(), + ); + } + if pattern { + let resolved_path_str = resolved_path.to_string_lossy(); + let replaced = pattern_re + .replace(&resolved_path_str, |_caps: ®ex::Captures| subpath); + return Ok(to_file_specifier(&PathBuf::from(replaced.to_string()))); + } + Ok(to_file_specifier(&resolved_path.join(subpath).clean())) + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_target( + &self, + package_json_path: &Path, + target: &Value, + subpath: &str, + package_subpath: &str, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + pattern: bool, + internal: bool, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result, PackageTargetResolveError> { + let result = self.resolve_package_target_inner( + package_json_path, + target, + subpath, + package_subpath, + maybe_referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + ); + match result { + Ok(maybe_resolved) => Ok(maybe_resolved), + Err(err) => { + if mode.is_types() + && err.code() == NodeJsErrorCode::ERR_TYPES_NOT_FOUND + && conditions != TYPES_ONLY_CONDITIONS + { + // try resolving with just "types" conditions for when someone misconfigures + // and puts the "types" condition in the wrong place + if let Ok(Some(resolved)) = self.resolve_package_target_inner( + package_json_path, + target, + subpath, + package_subpath, + maybe_referrer, + referrer_kind, + pattern, + internal, + TYPES_ONLY_CONDITIONS, + mode, + ) { + return Ok(Some(resolved)); + } + } + + Err(err) + } + } + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_target_inner( + &self, + package_json_path: &Path, + target: &Value, + subpath: &str, + package_subpath: &str, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + pattern: bool, + internal: bool, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result, PackageTargetResolveError> { + if let Some(target) = target.as_str() { + let url = self.resolve_package_target_string( + target, + subpath, + package_subpath, + package_json_path, + maybe_referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + )?; + if mode.is_types() && url.scheme() == "file" { + let path = url.to_file_path().unwrap(); + return Ok(Some(self.path_to_declaration_url( + &path, + maybe_referrer, + referrer_kind, + )?)); + } else { + return Ok(Some(url)); + } + } else if let Some(target_arr) = target.as_array() { + if target_arr.is_empty() { + return Ok(None); + } + + let mut last_error = None; + for target_item in target_arr { + let resolved_result = self.resolve_package_target( + package_json_path, + target_item, + subpath, + package_subpath, + maybe_referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + ); + + match resolved_result { + Ok(Some(resolved)) => return Ok(Some(resolved)), + Ok(None) => { + last_error = None; + continue; + } + Err(e) => { + if e.code() == NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET { + last_error = Some(e); + continue; + } else { + return Err(e); + } + } + } + } + if last_error.is_none() { + return Ok(None); + } + return Err(last_error.unwrap()); + } else if let Some(target_obj) = target.as_object() { + for key in target_obj.keys() { + // TODO(bartlomieju): verify that keys are not numeric + // return Err(errors::err_invalid_package_config( + // to_file_path_string(package_json_url), + // Some(base.as_str().to_string()), + // Some("\"exports\" cannot contain numeric property keys.".to_string()), + // )); + + if key == "default" + || conditions.contains(&key.as_str()) + || mode.is_types() && key.as_str() == "types" + { + let condition_target = target_obj.get(key).unwrap(); + + let resolved = self.resolve_package_target( + package_json_path, + condition_target, + subpath, + package_subpath, + maybe_referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + )?; + match resolved { + Some(resolved) => return Ok(Some(resolved)), + None => { + continue; + } + } + } + } + } else if target.is_null() { + return Ok(None); + } + + Err( + InvalidPackageTargetError { + pkg_json_path: package_json_path.to_path_buf(), + sub_path: package_subpath.to_string(), + target: target.to_string(), + is_import: internal, + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + } + .into(), + ) + } + + #[allow(clippy::too_many_arguments)] + pub fn package_exports_resolve( + &self, + package_json_path: &Path, + package_subpath: &str, + package_exports: &Map, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + if package_exports.contains_key(package_subpath) + && package_subpath.find('*').is_none() + && !package_subpath.ends_with('/') + { + let target = package_exports.get(package_subpath).unwrap(); + let resolved = self.resolve_package_target( + package_json_path, + target, + "", + package_subpath, + maybe_referrer, + referrer_kind, + false, + false, + conditions, + mode, + )?; + return match resolved { + Some(resolved) => Ok(resolved), + None => Err( + PackagePathNotExportedError { + pkg_json_path: package_json_path.to_path_buf(), + subpath: package_subpath.to_string(), + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + mode, + } + .into(), + ), + }; + } + + let mut best_match = ""; + let mut best_match_subpath = None; + for key in package_exports.keys() { + let pattern_index = key.find('*'); + if let Some(pattern_index) = pattern_index { + let key_sub = &key[0..pattern_index]; + if package_subpath.starts_with(key_sub) { + // When this reaches EOL, this can throw at the top of the whole function: + // + // if (StringPrototypeEndsWith(packageSubpath, '/')) + // throwInvalidSubpath(packageSubpath) + // + // To match "imports" and the spec. + if package_subpath.ends_with('/') { + // TODO(bartlomieju): + // emitTrailingSlashPatternDeprecation(); + } + let pattern_trailer = &key[pattern_index + 1..]; + if package_subpath.len() >= key.len() + && package_subpath.ends_with(&pattern_trailer) + && pattern_key_compare(best_match, key) == 1 + && key.rfind('*') == Some(pattern_index) + { + best_match = key; + best_match_subpath = Some( + package_subpath[pattern_index + ..(package_subpath.len() - pattern_trailer.len())] + .to_string(), + ); + } + } + } + } + + if !best_match.is_empty() { + let target = package_exports.get(best_match).unwrap(); + let maybe_resolved = self.resolve_package_target( + package_json_path, + target, + &best_match_subpath.unwrap(), + best_match, + maybe_referrer, + referrer_kind, + true, + false, + conditions, + mode, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } else { + return Err( + PackagePathNotExportedError { + pkg_json_path: package_json_path.to_path_buf(), + subpath: package_subpath.to_string(), + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + mode, + } + .into(), + ); + } + } + + Err( + PackagePathNotExportedError { + pkg_json_path: package_json_path.to_path_buf(), + subpath: package_subpath.to_string(), + maybe_referrer: maybe_referrer.map(ToOwned::to_owned), + mode, + } + .into(), + ) + } + + pub(super) fn package_resolve( + &self, + specifier: &str, + referrer: &Url, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + let (package_name, package_subpath, _is_scoped) = + parse_npm_pkg_name(specifier, referrer)?; + + if let Some(package_config) = self.get_closest_package_json(referrer)? { + // ResolveSelf + if package_config.name.as_ref() == Some(&package_name) { + if let Some(exports) = &package_config.exports { + return self + .package_exports_resolve( + &package_config.path, + &package_subpath, + exports, + Some(referrer), + referrer_kind, + conditions, + mode, + ) + .map_err(|err| err.into()); + } + } + } + + self.resolve_package_subpath_for_package( + &package_name, + &package_subpath, + referrer, + referrer_kind, + conditions, + mode, + ) + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_subpath_for_package( + &self, + package_name: &str, + package_subpath: &str, + referrer: &Url, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + let result = self.resolve_package_subpath_for_package_inner( + package_name, + package_subpath, + referrer, + referrer_kind, + conditions, + mode, + ); + if mode.is_types() && !matches!(result, Ok(Url { .. })) { + // try to resolve with the @types package + let package_name = types_package_name(package_name); + if let Ok(result) = self.resolve_package_subpath_for_package_inner( + &package_name, + package_subpath, + referrer, + referrer_kind, + conditions, + mode, + ) { + return Ok(result); + } + } + result + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_subpath_for_package_inner( + &self, + package_name: &str, + package_subpath: &str, + referrer: &Url, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + let package_dir_path = self + .npm_resolver + .resolve_package_folder_from_package(package_name, referrer)?; + + // todo: error with this instead when can't find package + // Err(errors::err_module_not_found( + // &package_json_url + // .join(".") + // .unwrap() + // .to_file_path() + // .unwrap() + // .display() + // .to_string(), + // &to_file_path_string(referrer), + // "package", + // )) + + // Package match. + self + .resolve_package_dir_subpath( + &package_dir_path, + package_subpath, + Some(referrer), + referrer_kind, + conditions, + mode, + ) + .map_err(|err| err.into()) + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_dir_subpath( + &self, + package_dir_path: &Path, + package_subpath: &str, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + let package_json_path = package_dir_path.join("package.json"); + match self.load_package_json(&package_json_path)? { + Some(pkg_json) => self.resolve_package_subpath( + &pkg_json, + package_subpath, + maybe_referrer, + referrer_kind, + conditions, + mode, + ), + None => self + .resolve_package_subpath_no_pkg_json( + package_dir_path, + package_subpath, + maybe_referrer, + referrer_kind, + mode, + ) + .map_err(|err| { + PackageSubpathResolveErrorKind::LegacyResolve(err).into() + }), + } + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_subpath( + &self, + package_json: &PackageJson, + package_subpath: &str, + referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + ) -> Result { + if let Some(exports) = &package_json.exports { + let result = self.package_exports_resolve( + &package_json.path, + package_subpath, + exports, + referrer, + referrer_kind, + conditions, + mode, + ); + match result { + Ok(found) => return Ok(found), + Err(exports_err) => { + if mode.is_types() && package_subpath == "." { + return self + .legacy_main_resolve(package_json, referrer, referrer_kind, mode) + .map_err(|err| { + PackageSubpathResolveErrorKind::LegacyResolve(err).into() + }); + } + return Err( + PackageSubpathResolveErrorKind::Exports(exports_err).into(), + ); + } + } + } + + if package_subpath == "." { + return self + .legacy_main_resolve(package_json, referrer, referrer_kind, mode) + .map_err(|err| { + PackageSubpathResolveErrorKind::LegacyResolve(err).into() + }); + } + + self + .resolve_subpath_exact( + package_json.path.parent().unwrap(), + package_subpath, + referrer, + referrer_kind, + mode, + ) + .map_err(|err| { + PackageSubpathResolveErrorKind::LegacyResolve(err.into()).into() + }) + } + + fn resolve_subpath_exact( + &self, + directory: &Path, + package_subpath: &str, + referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + mode: NodeResolutionMode, + ) -> Result { + assert_ne!(package_subpath, "."); + let file_path = directory.join(package_subpath); + if mode.is_types() { + Ok(self.path_to_declaration_url(&file_path, referrer, referrer_kind)?) + } else { + Ok(to_file_specifier(&file_path)) + } + } + + fn resolve_package_subpath_no_pkg_json( + &self, + directory: &Path, + package_subpath: &str, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + mode: NodeResolutionMode, + ) -> Result { + if package_subpath == "." { + self.legacy_index_resolve(directory, maybe_referrer, referrer_kind, mode) + } else { + self + .resolve_subpath_exact( + directory, + package_subpath, + maybe_referrer, + referrer_kind, + mode, + ) + .map_err(|err| err.into()) + } + } + + pub fn get_closest_package_json( + &self, + url: &Url, + ) -> Result, ClosestPkgJsonError> { + let Ok(file_path) = url.to_file_path() else { + return Ok(None); + }; + self.get_closest_package_json_from_path(&file_path) + } + + pub fn get_closest_package_json_from_path( + &self, + file_path: &Path, + ) -> Result, ClosestPkgJsonError> { + let parent_dir = file_path.parent().unwrap(); + let current_dir = + strip_unc_prefix(self.env.realpath_sync(parent_dir).map_err( + |source| CanonicalizingPkgJsonDirError { + dir_path: parent_dir.to_path_buf(), + source, + }, + )?); + for current_dir in current_dir.ancestors() { + let package_json_path = current_dir.join("package.json"); + if let Some(pkg_json) = self.load_package_json(&package_json_path)? { + return Ok(Some(pkg_json)); + } + } + + Ok(None) + } + + pub fn load_package_json( + &self, + package_json_path: &Path, + ) -> Result, PackageJsonLoadError> { + crate::package_json::load_pkg_json( + self.env.pkg_json_fs(), + package_json_path, + ) + } + + pub(super) fn legacy_main_resolve( + &self, + package_json: &PackageJson, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + mode: NodeResolutionMode, + ) -> Result { + let maybe_main = if mode.is_types() { + match package_json.types.as_ref() { + Some(types) => Some(types.as_str()), + None => { + // fallback to checking the main entrypoint for + // a corresponding declaration file + if let Some(main) = package_json.main(referrer_kind) { + let main = package_json.path.parent().unwrap().join(main).clean(); + let decl_url_result = self.path_to_declaration_url( + &main, + maybe_referrer, + referrer_kind, + ); + // don't surface errors, fallback to checking the index now + if let Ok(url) = decl_url_result { + return Ok(url); + } + } + None + } + } + } else { + package_json.main(referrer_kind) + }; + + if let Some(main) = maybe_main { + let guess = package_json.path.parent().unwrap().join(main).clean(); + if self.env.is_file_sync(&guess) { + return Ok(to_file_specifier(&guess)); + } + + // todo(dsherret): investigate exactly how node and typescript handles this + let endings = if mode.is_types() { + match referrer_kind { + NodeModuleKind::Cjs => { + vec![".d.ts", ".d.cts", "/index.d.ts", "/index.d.cts"] + } + NodeModuleKind::Esm => vec![ + ".d.ts", + ".d.mts", + "/index.d.ts", + "/index.d.mts", + ".d.cts", + "/index.d.cts", + ], + } + } else { + vec![".js", "/index.js"] + }; + for ending in endings { + let guess = package_json + .path + .parent() + .unwrap() + .join(format!("{main}{ending}")) + .clean(); + if self.env.is_file_sync(&guess) { + // TODO(bartlomieju): emitLegacyIndexDeprecation() + return Ok(to_file_specifier(&guess)); + } + } + } + + self.legacy_index_resolve( + package_json.path.parent().unwrap(), + maybe_referrer, + referrer_kind, + mode, + ) + } + + fn legacy_index_resolve( + &self, + directory: &Path, + maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, + mode: NodeResolutionMode, + ) -> Result { + let index_file_names = if mode.is_types() { + // todo(dsherret): investigate exactly how typescript does this + match referrer_kind { + NodeModuleKind::Cjs => vec!["index.d.ts", "index.d.cts"], + NodeModuleKind::Esm => vec!["index.d.ts", "index.d.mts", "index.d.cts"], + } + } else { + vec!["index.js"] + }; + for index_file_name in index_file_names { + let guess = directory.join(index_file_name).clean(); + if self.env.is_file_sync(&guess) { + // TODO(bartlomieju): emitLegacyIndexDeprecation() + return Ok(to_file_specifier(&guess)); + } + } + + if mode.is_types() { + Err( + TypesNotFoundError(Box::new(TypesNotFoundErrorData { + code_specifier: to_file_specifier(&directory.join("index.js")), + maybe_referrer: maybe_referrer.cloned(), + })) + .into(), + ) + } else { + Err( + ModuleNotFoundError { + specifier: to_file_specifier(&directory.join("index.js")), + typ: "module", + maybe_referrer: maybe_referrer.cloned(), + } + .into(), + ) + } + } +} + +fn resolve_bin_entry_value<'a>( + package_json: &'a PackageJson, + bin_name: Option<&str>, +) -> Result<&'a str, AnyError> { + let bin = match &package_json.bin { + Some(bin) => bin, + None => bail!( + "'{}' did not have a bin property", + package_json.path.display(), + ), + }; + let bin_entry = match bin { + Value::String(_) => { + if bin_name.is_some() + && bin_name + != package_json + .name + .as_deref() + .map(|name| name.rsplit_once('/').map_or(name, |(_, name)| name)) + { + None + } else { + Some(bin) + } + } + Value::Object(o) => { + if let Some(bin_name) = bin_name { + o.get(bin_name) + } else if o.len() == 1 + || o.len() > 1 && o.values().all(|v| v == o.values().next().unwrap()) + { + o.values().next() + } else { + package_json.name.as_ref().and_then(|n| o.get(n)) + } + } + _ => bail!( + "'{}' did not have a bin property with a string or object value", + package_json.path.display() + ), + }; + let bin_entry = match bin_entry { + Some(e) => e, + None => { + let prefix = package_json + .name + .as_ref() + .map(|n| { + let mut prefix = format!("npm:{}", n); + if let Some(version) = &package_json.version { + prefix.push('@'); + prefix.push_str(version); + } + prefix.push('/'); + prefix + }) + .unwrap_or_default(); + let keys = bin + .as_object() + .map(|o| { + o.keys() + .map(|k| format!(" * {prefix}{k}")) + .collect::>() + }) + .unwrap_or_default(); + bail!( + "'{}' did not have a bin entry{}{}", + package_json.path.display(), + bin_name + .or(package_json.name.as_deref()) + .map(|name| format!(" for '{}'", name)) + .unwrap_or_default(), + if keys.is_empty() { + "".to_string() + } else { + format!("\n\nPossibilities:\n{}", keys.join("\n")) + } + ) + } + }; + match bin_entry { + Value::String(s) => Ok(s), + _ => bail!( + "'{}' had a non-string sub property of bin", + package_json.path.display(), + ), + } +} + +fn to_file_path(url: &Url) -> PathBuf { + url + .to_file_path() + .unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}")) +} + +fn to_file_path_string(url: &Url) -> String { + to_file_path(url).display().to_string() +} + +fn should_be_treated_as_relative_or_absolute_path(specifier: &str) -> bool { + if specifier.is_empty() { + return false; + } + + if specifier.starts_with('/') { + return true; + } + + is_relative_specifier(specifier) +} + +// TODO(ry) We very likely have this utility function elsewhere in Deno. +fn is_relative_specifier(specifier: &str) -> bool { + let specifier_len = specifier.len(); + let specifier_chars: Vec<_> = specifier.chars().take(3).collect(); + + if !specifier_chars.is_empty() && specifier_chars[0] == '.' { + if specifier_len == 1 || specifier_chars[1] == '/' { + return true; + } + if specifier_chars[1] == '.' + && (specifier_len == 2 || specifier_chars[2] == '/') + { + return true; + } + } + false +} + +/// Alternate `PathBuf::with_extension` that will handle known extensions +/// more intelligently. +fn with_known_extension(path: &Path, ext: &str) -> PathBuf { + const NON_DECL_EXTS: &[&str] = &[ + "cjs", "js", "json", "jsx", "mjs", "tsx", /* ex. types.d */ "d", + ]; + const DECL_EXTS: &[&str] = &["cts", "mts", "ts"]; + + let file_name = match path.file_name() { + Some(value) => value.to_string_lossy(), + None => return path.to_path_buf(), + }; + let lowercase_file_name = file_name.to_lowercase(); + let period_index = lowercase_file_name.rfind('.').and_then(|period_index| { + let ext = &lowercase_file_name[period_index + 1..]; + if DECL_EXTS.contains(&ext) { + if let Some(next_period_index) = + lowercase_file_name[..period_index].rfind('.') + { + if &lowercase_file_name[next_period_index + 1..period_index] == "d" { + Some(next_period_index) + } else { + Some(period_index) + } + } else { + Some(period_index) + } + } else if NON_DECL_EXTS.contains(&ext) { + Some(period_index) + } else { + None + } + }); + + let file_name = match period_index { + Some(period_index) => &file_name[..period_index], + None => &file_name, + }; + path.with_file_name(format!("{file_name}.{ext}")) +} + +fn to_specifier_display_string(url: &Url) -> String { + if let Ok(path) = url.to_file_path() { + path.display().to_string() + } else { + url.to_string() + } +} + +fn throw_invalid_subpath( + subpath: String, + package_json_path: &Path, + internal: bool, + maybe_referrer: Option<&Url>, +) -> InvalidModuleSpecifierError { + let ie = if internal { "imports" } else { "exports" }; + let reason = format!( + "request is not a valid subpath for the \"{}\" resolution of {}", + ie, + package_json_path.display(), + ); + InvalidModuleSpecifierError { + request: subpath, + reason: Cow::Owned(reason), + maybe_referrer: maybe_referrer.map(to_specifier_display_string), + } +} + +pub fn parse_npm_pkg_name( + specifier: &str, + referrer: &Url, +) -> Result<(String, String, bool), InvalidModuleSpecifierError> { + let mut separator_index = specifier.find('/'); + let mut valid_package_name = true; + let mut is_scoped = false; + if specifier.is_empty() { + valid_package_name = false; + } else if specifier.starts_with('@') { + is_scoped = true; + if let Some(index) = separator_index { + separator_index = specifier[index + 1..] + .find('/') + .map(|new_index| index + 1 + new_index); + } else { + valid_package_name = false; + } + } + + let package_name = if let Some(index) = separator_index { + specifier[0..index].to_string() + } else { + specifier.to_string() + }; + + // Package name cannot have leading . and cannot have percent-encoding or separators. + for ch in package_name.chars() { + if ch == '%' || ch == '\\' { + valid_package_name = false; + break; + } + } + + if !valid_package_name { + return Err(errors::InvalidModuleSpecifierError { + request: specifier.to_string(), + reason: Cow::Borrowed("is not a valid package name"), + maybe_referrer: Some(to_specifier_display_string(referrer)), + }); + } + + let package_subpath = if let Some(index) = separator_index { + format!(".{}", specifier.chars().skip(index).collect::()) + } else { + ".".to_string() + }; + + Ok((package_name, package_subpath, is_scoped)) +} + +fn pattern_key_compare(a: &str, b: &str) -> i32 { + let a_pattern_index = a.find('*'); + let b_pattern_index = b.find('*'); + + let base_len_a = if let Some(index) = a_pattern_index { + index + 1 + } else { + a.len() + }; + let base_len_b = if let Some(index) = b_pattern_index { + index + 1 + } else { + b.len() + }; + + if base_len_a > base_len_b { + return -1; + } + + if base_len_b > base_len_a { + return 1; + } + + if a_pattern_index.is_none() { + return 1; + } + + if b_pattern_index.is_none() { + return -1; + } + + if a.len() > b.len() { + return -1; + } + + if b.len() > a.len() { + return 1; + } + + 0 +} + +/// Gets the corresponding @types package for the provided package name. +fn types_package_name(package_name: &str) -> String { + debug_assert!(!package_name.starts_with("@types/")); + // Scoped packages will get two underscores for each slash + // https://github.com/DefinitelyTyped/DefinitelyTyped/tree/15f1ece08f7b498f4b9a2147c2a46e94416ca777#what-about-scoped-packages + format!("@types/{}", package_name.replace('/', "__")) +} + +/// Ex. returns `fs` for `node:fs` +fn get_module_name_from_builtin_node_module_specifier( + specifier: &Url, +) -> Option<&str> { + if specifier.scheme() != "node" { + return None; + } + + let (_, specifier) = specifier.as_str().split_once(':')?; + Some(specifier) +} + +#[cfg(test)] +mod tests { + use serde_json::json; + + use super::*; + + fn build_package_json(json: Value) -> PackageJson { + PackageJson::load_from_value(PathBuf::from("/package.json"), json) + } + + #[test] + fn test_resolve_bin_entry_value() { + // should resolve the specified value + let pkg_json = build_package_json(json!({ + "name": "pkg", + "version": "1.1.1", + "bin": { + "bin1": "./value1", + "bin2": "./value2", + "pkg": "./value3", + } + })); + assert_eq!( + resolve_bin_entry_value(&pkg_json, Some("bin1")).unwrap(), + "./value1" + ); + + // should resolve the value with the same name when not specified + assert_eq!( + resolve_bin_entry_value(&pkg_json, None).unwrap(), + "./value3" + ); + + // should not resolve when specified value does not exist + assert_eq!( + resolve_bin_entry_value(&pkg_json, Some("other"),) + .err() + .unwrap() + .to_string(), + concat!( + "'/package.json' did not have a bin entry for 'other'\n", + "\n", + "Possibilities:\n", + " * npm:pkg@1.1.1/bin1\n", + " * npm:pkg@1.1.1/bin2\n", + " * npm:pkg@1.1.1/pkg" + ) + ); + + // should not resolve when default value can't be determined + let pkg_json = build_package_json(json!({ + "name": "pkg", + "version": "1.1.1", + "bin": { + "bin": "./value1", + "bin2": "./value2", + } + })); + assert_eq!( + resolve_bin_entry_value(&pkg_json, None) + .err() + .unwrap() + .to_string(), + concat!( + "'/package.json' did not have a bin entry for 'pkg'\n", + "\n", + "Possibilities:\n", + " * npm:pkg@1.1.1/bin\n", + " * npm:pkg@1.1.1/bin2", + ) + ); + + // should resolve since all the values are the same + let pkg_json = build_package_json(json!({ + "name": "pkg", + "version": "1.2.3", + "bin": { + "bin1": "./value", + "bin2": "./value", + } + })); + assert_eq!( + resolve_bin_entry_value(&pkg_json, None,).unwrap(), + "./value" + ); + + // should not resolve when specified and is a string + let pkg_json = build_package_json(json!({ + "name": "pkg", + "version": "1.2.3", + "bin": "./value", + })); + assert_eq!( + resolve_bin_entry_value(&pkg_json, Some("path"),) + .err() + .unwrap() + .to_string(), + "'/package.json' did not have a bin entry for 'path'" + ); + + // no version in the package.json + let pkg_json = build_package_json(json!({ + "name": "pkg", + "bin": { + "bin1": "./value1", + "bin2": "./value2", + } + })); + assert_eq!( + resolve_bin_entry_value(&pkg_json, None) + .err() + .unwrap() + .to_string(), + concat!( + "'/package.json' did not have a bin entry for 'pkg'\n", + "\n", + "Possibilities:\n", + " * npm:pkg/bin1\n", + " * npm:pkg/bin2", + ) + ); + + // no name or version in the package.json + let pkg_json = build_package_json(json!({ + "bin": { + "bin1": "./value1", + "bin2": "./value2", + } + })); + assert_eq!( + resolve_bin_entry_value(&pkg_json, None) + .err() + .unwrap() + .to_string(), + concat!( + "'/package.json' did not have a bin entry\n", + "\n", + "Possibilities:\n", + " * bin1\n", + " * bin2", + ) + ); + } + + #[test] + fn test_parse_package_name() { + let dummy_referrer = Url::parse("http://example.com").unwrap(); + + assert_eq!( + parse_npm_pkg_name("fetch-blob", &dummy_referrer).unwrap(), + ("fetch-blob".to_string(), ".".to_string(), false) + ); + assert_eq!( + parse_npm_pkg_name("@vue/plugin-vue", &dummy_referrer).unwrap(), + ("@vue/plugin-vue".to_string(), ".".to_string(), true) + ); + assert_eq!( + parse_npm_pkg_name("@astrojs/prism/dist/highlighter", &dummy_referrer) + .unwrap(), + ( + "@astrojs/prism".to_string(), + "./dist/highlighter".to_string(), + true + ) + ); + } + + #[test] + fn test_with_known_extension() { + let cases = &[ + ("test", "d.ts", "test.d.ts"), + ("test.d.ts", "ts", "test.ts"), + ("test.worker", "d.ts", "test.worker.d.ts"), + ("test.d.mts", "js", "test.js"), + ]; + for (path, ext, expected) in cases { + let actual = with_known_extension(&PathBuf::from(path), ext); + assert_eq!(actual.to_string_lossy(), *expected); + } + } + + #[test] + fn test_types_package_name() { + assert_eq!(types_package_name("name"), "@types/name"); + assert_eq!( + types_package_name("@scoped/package"), + "@types/@scoped__package" + ); + } +} diff --git a/ext/node_resolver/sync.rs b/ext/node_resolver/sync.rs new file mode 100644 index 000000000..f6689a56a --- /dev/null +++ b/ext/node_resolver/sync.rs @@ -0,0 +1,86 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +pub use inner::*; + +#[cfg(feature = "sync")] +mod inner { + #![allow(clippy::disallowed_types)] + + use std::ops::Deref; + use std::ops::DerefMut; + pub use std::sync::Arc as MaybeArc; + + pub struct MaybeArcMutexGuard<'lock, T>(std::sync::MutexGuard<'lock, T>); + + impl<'lock, T> Deref for MaybeArcMutexGuard<'lock, T> { + type Target = std::sync::MutexGuard<'lock, T>; + fn deref(&self) -> &std::sync::MutexGuard<'lock, T> { + &self.0 + } + } + + impl<'lock, T> DerefMut for MaybeArcMutexGuard<'lock, T> { + fn deref_mut(&mut self) -> &mut std::sync::MutexGuard<'lock, T> { + &mut self.0 + } + } + + #[derive(Debug)] + pub struct MaybeArcMutex(std::sync::Arc>); + impl MaybeArcMutex { + pub fn new(val: T) -> Self { + Self(std::sync::Arc::new(std::sync::Mutex::new(val))) + } + } + + impl<'lock, T> MaybeArcMutex { + pub fn lock(&'lock self) -> MaybeArcMutexGuard<'lock, T> { + MaybeArcMutexGuard(self.0.lock().unwrap()) + } + } + + pub use core::marker::Send as MaybeSend; + pub use core::marker::Sync as MaybeSync; +} + +#[cfg(not(feature = "sync"))] +mod inner { + use std::ops::Deref; + use std::ops::DerefMut; + + pub use std::rc::Rc as MaybeArc; + + pub struct MaybeArcMutexGuard<'lock, T>(std::cell::RefMut<'lock, T>); + + impl<'lock, T> Deref for MaybeArcMutexGuard<'lock, T> { + type Target = std::cell::RefMut<'lock, T>; + fn deref(&self) -> &std::cell::RefMut<'lock, T> { + &self.0 + } + } + + impl<'lock, T> DerefMut for MaybeArcMutexGuard<'lock, T> { + fn deref_mut(&mut self) -> &mut std::cell::RefMut<'lock, T> { + &mut self.0 + } + } + + #[derive(Debug)] + pub struct MaybeArcMutex(std::rc::Rc>); + impl MaybeArcMutex { + pub fn new(val: T) -> Self { + Self(std::rc::Rc::new(std::cell::RefCell::new(val))) + } + } + + impl<'lock, T> MaybeArcMutex { + pub fn lock(&'lock self) -> MaybeArcMutexGuard<'lock, T> { + MaybeArcMutexGuard(self.0.borrow_mut()) + } + } + + pub trait MaybeSync {} + impl MaybeSync for T where T: ?Sized {} + pub trait MaybeSend {} + impl MaybeSend for T where T: ?Sized {} +} diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 7772b017b..9980df294 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -94,6 +94,7 @@ deno_webgpu.workspace = true deno_webidl.workspace = true deno_websocket.workspace = true deno_webstorage.workspace = true +node_resolver = { workspace = true, features = ["sync"] } dlopen2.workspace = true encoding_rs.workspace = true diff --git a/runtime/snapshot.rs b/runtime/snapshot.rs index 2144ff07a..da66bff5e 100644 --- a/runtime/snapshot.rs +++ b/runtime/snapshot.rs @@ -254,7 +254,7 @@ pub fn create_runtime_snapshot( deno_http::deno_http::init_ops_and_esm::(), deno_io::deno_io::init_ops_and_esm(Default::default()), deno_fs::deno_fs::init_ops_and_esm::(fs.clone()), - deno_node::deno_node::init_ops_and_esm::(None, None, fs), + deno_node::deno_node::init_ops_and_esm::(None, fs), runtime::init_ops_and_esm(), ops::runtime::deno_runtime::init_ops("deno:runtime".parse().unwrap()), ops::worker_host::deno_worker_host::init_ops( diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index cf0384196..2611b6f34 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -43,7 +43,7 @@ use deno_fs::FileSystem; use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::dynamic::MultiBackendDbHandler; -use deno_node::NodeResolver; +use deno_node::NodeExtInitServices; use deno_permissions::PermissionsContainer; use deno_terminal::colors; use deno_tls::RootCertStoreProvider; @@ -364,8 +364,7 @@ pub struct WebWorkerOptions { pub seed: Option, pub fs: Arc, pub module_loader: Rc, - pub node_resolver: Option>, - pub npm_resolver: Option>, + pub node_services: Option, pub create_web_worker_cb: Arc, pub format_js_error_fn: Option>, pub worker_type: WebWorkerType, @@ -490,8 +489,7 @@ impl WebWorker { options.fs.clone(), ), deno_node::deno_node::init_ops_and_esm::( - options.node_resolver, - options.npm_resolver, + options.node_services, options.fs, ), // Runtime ops that are always initialized for WebWorkers diff --git a/runtime/worker.rs b/runtime/worker.rs index fc11be582..bd67c8706 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -38,6 +38,7 @@ use deno_fs::FileSystem; use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::dynamic::MultiBackendDbHandler; +use deno_node::NodeExtInitServices; use deno_permissions::PermissionsContainer; use deno_tls::RootCertStoreProvider; use deno_tls::TlsKeys; @@ -155,8 +156,7 @@ pub struct WorkerOptions { /// If not provided runtime will error if code being /// executed tries to load modules. pub module_loader: Rc, - pub node_resolver: Option>, - pub npm_resolver: Option>, + pub node_services: Option, // Callbacks invoked when creating new instance of WebWorker pub create_web_worker_cb: Arc, pub format_js_error_fn: Option>, @@ -224,8 +224,7 @@ impl Default for WorkerOptions { cache_storage_dir: Default::default(), broadcast_channel: Default::default(), root_cert_store_provider: Default::default(), - node_resolver: Default::default(), - npm_resolver: Default::default(), + node_services: Default::default(), blob_store: Default::default(), extensions: Default::default(), startup_snapshot: Default::default(), @@ -414,8 +413,7 @@ impl MainWorker { options.fs.clone(), ), deno_node::deno_node::init_ops_and_esm::( - options.node_resolver, - options.npm_resolver, + options.node_services, options.fs, ), // Ops from this crate -- cgit v1.2.3