summaryrefslogtreecommitdiff
path: root/resolvers
diff options
context:
space:
mode:
authorDavid Sherret <dsherret@users.noreply.github.com>2024-09-28 19:17:48 -0400
committerGitHub <noreply@github.com>2024-09-28 19:17:48 -0400
commit5faf769ac61b627d14710cdf487de7cd4eb3f9d3 (patch)
tree2cc4ab975522b3c8845ab3040c010fd998a769a6 /resolvers
parent3138478f66823348eb745c7f0c2d34eed378a3f0 (diff)
refactor: extract out sloppy imports resolution from CLI crate (#25920)
This is slow progress towards creating a `deno_resolver` crate. Waiting on: * https://github.com/denoland/deno/pull/25918 * https://github.com/denoland/deno/pull/25916
Diffstat (limited to 'resolvers')
-rw-r--r--resolvers/deno/Cargo.toml24
-rw-r--r--resolvers/deno/README.md3
-rw-r--r--resolvers/deno/lib.rs3
-rw-r--r--resolvers/deno/sloppy_imports.rs511
-rw-r--r--resolvers/node/Cargo.toml32
-rw-r--r--resolvers/node/README.md6
-rw-r--r--resolvers/node/analyze.rs654
-rw-r--r--resolvers/node/clippy.toml48
-rw-r--r--resolvers/node/env.rs39
-rw-r--r--resolvers/node/errors.rs769
-rw-r--r--resolvers/node/lib.rs27
-rw-r--r--resolvers/node/npm.rs41
-rw-r--r--resolvers/node/package_json.rs53
-rw-r--r--resolvers/node/path.rs179
-rw-r--r--resolvers/node/resolution.rs2023
-rw-r--r--resolvers/node/sync.rs23
16 files changed, 4435 insertions, 0 deletions
diff --git a/resolvers/deno/Cargo.toml b/resolvers/deno/Cargo.toml
new file mode 100644
index 000000000..23c43810a
--- /dev/null
+++ b/resolvers/deno/Cargo.toml
@@ -0,0 +1,24 @@
+# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+[package]
+name = "deno_resolver"
+version = "0.0.1"
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+readme = "README.md"
+repository.workspace = true
+description = "Deno resolution algorithm"
+
+[lib]
+path = "lib.rs"
+
+[features]
+
+[dependencies]
+deno_media_type.workspace = true
+deno_path_util.workspace = true
+url.workspace = true
+
+[dev-dependencies]
+test_util.workspace = true
diff --git a/resolvers/deno/README.md b/resolvers/deno/README.md
new file mode 100644
index 000000000..f51619a31
--- /dev/null
+++ b/resolvers/deno/README.md
@@ -0,0 +1,3 @@
+# deno_resolver
+
+Deno resolution algorithm.
diff --git a/resolvers/deno/lib.rs b/resolvers/deno/lib.rs
new file mode 100644
index 000000000..7d7796d77
--- /dev/null
+++ b/resolvers/deno/lib.rs
@@ -0,0 +1,3 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+pub mod sloppy_imports;
diff --git a/resolvers/deno/sloppy_imports.rs b/resolvers/deno/sloppy_imports.rs
new file mode 100644
index 000000000..e4d0898e5
--- /dev/null
+++ b/resolvers/deno/sloppy_imports.rs
@@ -0,0 +1,511 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::borrow::Cow;
+use std::path::Path;
+use std::path::PathBuf;
+
+use deno_media_type::MediaType;
+use deno_path_util::url_to_file_path;
+use url::Url;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum SloppyImportsFsEntry {
+ File,
+ Dir,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum SloppyImportsResolution {
+ /// Ex. `./file.js` to `./file.ts`
+ JsToTs(Url),
+ /// Ex. `./file` to `./file.ts`
+ NoExtension(Url),
+ /// Ex. `./dir` to `./dir/index.ts`
+ Directory(Url),
+}
+
+impl SloppyImportsResolution {
+ pub fn as_specifier(&self) -> &Url {
+ match self {
+ Self::JsToTs(specifier) => specifier,
+ Self::NoExtension(specifier) => specifier,
+ Self::Directory(specifier) => specifier,
+ }
+ }
+
+ pub fn into_specifier(self) -> Url {
+ match self {
+ Self::JsToTs(specifier) => specifier,
+ Self::NoExtension(specifier) => specifier,
+ Self::Directory(specifier) => specifier,
+ }
+ }
+
+ pub fn as_suggestion_message(&self) -> String {
+ format!("Maybe {}", self.as_base_message())
+ }
+
+ pub fn as_quick_fix_message(&self) -> String {
+ let message = self.as_base_message();
+ let mut chars = message.chars();
+ format!(
+ "{}{}.",
+ chars.next().unwrap().to_uppercase(),
+ chars.as_str()
+ )
+ }
+
+ fn as_base_message(&self) -> String {
+ match self {
+ SloppyImportsResolution::JsToTs(specifier) => {
+ let media_type = MediaType::from_specifier(specifier);
+ format!("change the extension to '{}'", media_type.as_ts_extension())
+ }
+ SloppyImportsResolution::NoExtension(specifier) => {
+ let media_type = MediaType::from_specifier(specifier);
+ format!("add a '{}' extension", media_type.as_ts_extension())
+ }
+ SloppyImportsResolution::Directory(specifier) => {
+ let file_name = specifier
+ .path()
+ .rsplit_once('/')
+ .map(|(_, file_name)| file_name)
+ .unwrap_or(specifier.path());
+ format!("specify path to '{}' file in directory instead", file_name)
+ }
+ }
+ }
+}
+
+/// The kind of resolution currently being done.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum SloppyImportsResolutionMode {
+ /// Resolving for code that will be executed.
+ Execution,
+ /// Resolving for code that will be used for type information.
+ Types,
+}
+
+impl SloppyImportsResolutionMode {
+ pub fn is_types(&self) -> bool {
+ *self == SloppyImportsResolutionMode::Types
+ }
+}
+
+pub trait SloppyImportResolverFs {
+ fn stat_sync(&self, path: &Path) -> Option<SloppyImportsFsEntry>;
+
+ fn is_file(&self, path: &Path) -> bool {
+ self.stat_sync(path) == Some(SloppyImportsFsEntry::File)
+ }
+}
+
+#[derive(Debug)]
+pub struct SloppyImportsResolver<Fs: SloppyImportResolverFs> {
+ fs: Fs,
+}
+
+impl<Fs: SloppyImportResolverFs> SloppyImportsResolver<Fs> {
+ pub fn new(fs: Fs) -> Self {
+ Self { fs }
+ }
+
+ pub fn resolve(
+ &self,
+ specifier: &Url,
+ mode: SloppyImportsResolutionMode,
+ ) -> Option<SloppyImportsResolution> {
+ fn path_without_ext(
+ path: &Path,
+ media_type: MediaType,
+ ) -> Option<Cow<str>> {
+ let old_path_str = path.to_string_lossy();
+ match media_type {
+ MediaType::Unknown => Some(old_path_str),
+ _ => old_path_str
+ .strip_suffix(media_type.as_ts_extension())
+ .map(|s| Cow::Owned(s.to_string())),
+ }
+ }
+
+ fn media_types_to_paths(
+ path_no_ext: &str,
+ original_media_type: MediaType,
+ probe_media_type_types: Vec<MediaType>,
+ reason: SloppyImportsResolutionReason,
+ ) -> Vec<(PathBuf, SloppyImportsResolutionReason)> {
+ probe_media_type_types
+ .into_iter()
+ .filter(|media_type| *media_type != original_media_type)
+ .map(|media_type| {
+ (
+ PathBuf::from(format!(
+ "{}{}",
+ path_no_ext,
+ media_type.as_ts_extension()
+ )),
+ reason,
+ )
+ })
+ .collect::<Vec<_>>()
+ }
+
+ if specifier.scheme() != "file" {
+ return None;
+ }
+
+ let path = url_to_file_path(specifier).ok()?;
+
+ #[derive(Clone, Copy)]
+ enum SloppyImportsResolutionReason {
+ JsToTs,
+ NoExtension,
+ Directory,
+ }
+
+ let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> =
+ match self.fs.stat_sync(&path) {
+ Some(SloppyImportsFsEntry::File) => {
+ if mode.is_types() {
+ let media_type = MediaType::from_specifier(specifier);
+ // attempt to resolve the .d.ts file before the .js file
+ let probe_media_type_types = match media_type {
+ MediaType::JavaScript => {
+ vec![(MediaType::Dts), MediaType::JavaScript]
+ }
+ MediaType::Mjs => {
+ vec![MediaType::Dmts, MediaType::Dts, MediaType::Mjs]
+ }
+ MediaType::Cjs => {
+ vec![MediaType::Dcts, MediaType::Dts, MediaType::Cjs]
+ }
+ _ => return None,
+ };
+ let path_no_ext = path_without_ext(&path, media_type)?;
+ media_types_to_paths(
+ &path_no_ext,
+ media_type,
+ probe_media_type_types,
+ SloppyImportsResolutionReason::JsToTs,
+ )
+ } else {
+ return None;
+ }
+ }
+ entry @ None | entry @ Some(SloppyImportsFsEntry::Dir) => {
+ let media_type = MediaType::from_specifier(specifier);
+ let probe_media_type_types = match media_type {
+ MediaType::JavaScript => (
+ if mode.is_types() {
+ vec![MediaType::TypeScript, MediaType::Tsx, MediaType::Dts]
+ } else {
+ vec![MediaType::TypeScript, MediaType::Tsx]
+ },
+ SloppyImportsResolutionReason::JsToTs,
+ ),
+ MediaType::Jsx => {
+ (vec![MediaType::Tsx], SloppyImportsResolutionReason::JsToTs)
+ }
+ MediaType::Mjs => (
+ if mode.is_types() {
+ vec![MediaType::Mts, MediaType::Dmts, MediaType::Dts]
+ } else {
+ vec![MediaType::Mts]
+ },
+ SloppyImportsResolutionReason::JsToTs,
+ ),
+ MediaType::Cjs => (
+ if mode.is_types() {
+ vec![MediaType::Cts, MediaType::Dcts, MediaType::Dts]
+ } else {
+ vec![MediaType::Cts]
+ },
+ SloppyImportsResolutionReason::JsToTs,
+ ),
+ MediaType::TypeScript
+ | MediaType::Mts
+ | MediaType::Cts
+ | MediaType::Dts
+ | MediaType::Dmts
+ | MediaType::Dcts
+ | MediaType::Tsx
+ | MediaType::Json
+ | MediaType::Wasm
+ | MediaType::TsBuildInfo
+ | MediaType::SourceMap => {
+ return None;
+ }
+ MediaType::Unknown => (
+ if mode.is_types() {
+ vec![
+ MediaType::TypeScript,
+ MediaType::Tsx,
+ MediaType::Mts,
+ MediaType::Dts,
+ MediaType::Dmts,
+ MediaType::Dcts,
+ MediaType::JavaScript,
+ MediaType::Jsx,
+ MediaType::Mjs,
+ ]
+ } else {
+ vec![
+ MediaType::TypeScript,
+ MediaType::JavaScript,
+ MediaType::Tsx,
+ MediaType::Jsx,
+ MediaType::Mts,
+ MediaType::Mjs,
+ ]
+ },
+ SloppyImportsResolutionReason::NoExtension,
+ ),
+ };
+ let mut probe_paths = match path_without_ext(&path, media_type) {
+ Some(path_no_ext) => media_types_to_paths(
+ &path_no_ext,
+ media_type,
+ probe_media_type_types.0,
+ probe_media_type_types.1,
+ ),
+ None => vec![],
+ };
+
+ if matches!(entry, Some(SloppyImportsFsEntry::Dir)) {
+ // try to resolve at the index file
+ if mode.is_types() {
+ probe_paths.push((
+ path.join("index.ts"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+
+ probe_paths.push((
+ path.join("index.mts"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.d.ts"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.d.mts"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.js"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.mjs"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.tsx"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.jsx"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ } else {
+ probe_paths.push((
+ path.join("index.ts"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.mts"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.tsx"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.js"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.mjs"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ probe_paths.push((
+ path.join("index.jsx"),
+ SloppyImportsResolutionReason::Directory,
+ ));
+ }
+ }
+ if probe_paths.is_empty() {
+ return None;
+ }
+ probe_paths
+ }
+ };
+
+ for (probe_path, reason) in probe_paths {
+ if self.fs.is_file(&probe_path) {
+ if let Ok(specifier) = Url::from_file_path(probe_path) {
+ match reason {
+ SloppyImportsResolutionReason::JsToTs => {
+ return Some(SloppyImportsResolution::JsToTs(specifier));
+ }
+ SloppyImportsResolutionReason::NoExtension => {
+ return Some(SloppyImportsResolution::NoExtension(specifier));
+ }
+ SloppyImportsResolutionReason::Directory => {
+ return Some(SloppyImportsResolution::Directory(specifier));
+ }
+ }
+ }
+ }
+ }
+
+ None
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use test_util::TestContext;
+
+ use super::*;
+
+ #[test]
+ fn test_unstable_sloppy_imports() {
+ fn resolve(specifier: &Url) -> Option<SloppyImportsResolution> {
+ resolve_with_mode(specifier, SloppyImportsResolutionMode::Execution)
+ }
+
+ fn resolve_types(specifier: &Url) -> Option<SloppyImportsResolution> {
+ resolve_with_mode(specifier, SloppyImportsResolutionMode::Types)
+ }
+
+ fn resolve_with_mode(
+ specifier: &Url,
+ mode: SloppyImportsResolutionMode,
+ ) -> Option<SloppyImportsResolution> {
+ struct RealSloppyImportsResolverFs;
+ impl SloppyImportResolverFs for RealSloppyImportsResolverFs {
+ fn stat_sync(&self, path: &Path) -> Option<SloppyImportsFsEntry> {
+ let stat = std::fs::metadata(path).ok()?;
+ if stat.is_dir() {
+ Some(SloppyImportsFsEntry::Dir)
+ } else if stat.is_file() {
+ Some(SloppyImportsFsEntry::File)
+ } else {
+ None
+ }
+ }
+ }
+
+ SloppyImportsResolver::new(RealSloppyImportsResolverFs)
+ .resolve(specifier, mode)
+ }
+
+ let context = TestContext::default();
+ let temp_dir = context.temp_dir().path();
+
+ // scenarios like resolving ./example.js to ./example.ts
+ for (ext_from, ext_to) in [("js", "ts"), ("js", "tsx"), ("mjs", "mts")] {
+ let ts_file = temp_dir.join(format!("file.{}", ext_to));
+ ts_file.write("");
+ assert_eq!(resolve(&ts_file.url_file()), None);
+ assert_eq!(
+ resolve(
+ &temp_dir
+ .url_dir()
+ .join(&format!("file.{}", ext_from))
+ .unwrap()
+ ),
+ Some(SloppyImportsResolution::JsToTs(ts_file.url_file())),
+ );
+ ts_file.remove_file();
+ }
+
+ // no extension scenarios
+ for ext in ["js", "ts", "js", "tsx", "jsx", "mjs", "mts"] {
+ let file = temp_dir.join(format!("file.{}", ext));
+ file.write("");
+ assert_eq!(
+ resolve(
+ &temp_dir
+ .url_dir()
+ .join("file") // no ext
+ .unwrap()
+ ),
+ Some(SloppyImportsResolution::NoExtension(file.url_file()))
+ );
+ file.remove_file();
+ }
+
+ // .ts and .js exists, .js specified (goes to specified)
+ {
+ let ts_file = temp_dir.join("file.ts");
+ ts_file.write("");
+ let js_file = temp_dir.join("file.js");
+ js_file.write("");
+ assert_eq!(resolve(&js_file.url_file()), None);
+ }
+
+ // only js exists, .js specified
+ {
+ let js_only_file = temp_dir.join("js_only.js");
+ js_only_file.write("");
+ assert_eq!(resolve(&js_only_file.url_file()), None);
+ assert_eq!(resolve_types(&js_only_file.url_file()), None);
+ }
+
+ // resolving a directory to an index file
+ {
+ let routes_dir = temp_dir.join("routes");
+ routes_dir.create_dir_all();
+ let index_file = routes_dir.join("index.ts");
+ index_file.write("");
+ assert_eq!(
+ resolve(&routes_dir.url_file()),
+ Some(SloppyImportsResolution::Directory(index_file.url_file())),
+ );
+ }
+
+ // both a directory and a file with specifier is present
+ {
+ let api_dir = temp_dir.join("api");
+ api_dir.create_dir_all();
+ let bar_file = api_dir.join("bar.ts");
+ bar_file.write("");
+ let api_file = temp_dir.join("api.ts");
+ api_file.write("");
+ assert_eq!(
+ resolve(&api_dir.url_file()),
+ Some(SloppyImportsResolution::NoExtension(api_file.url_file())),
+ );
+ }
+ }
+
+ #[test]
+ fn test_sloppy_import_resolution_suggestion_message() {
+ // directory
+ assert_eq!(
+ SloppyImportsResolution::Directory(
+ Url::parse("file:///dir/index.js").unwrap()
+ )
+ .as_suggestion_message(),
+ "Maybe specify path to 'index.js' file in directory instead"
+ );
+ // no ext
+ assert_eq!(
+ SloppyImportsResolution::NoExtension(
+ Url::parse("file:///dir/index.mjs").unwrap()
+ )
+ .as_suggestion_message(),
+ "Maybe add a '.mjs' extension"
+ );
+ // js to ts
+ assert_eq!(
+ SloppyImportsResolution::JsToTs(
+ Url::parse("file:///dir/index.mts").unwrap()
+ )
+ .as_suggestion_message(),
+ "Maybe change the extension to '.mts'"
+ );
+ }
+}
diff --git a/resolvers/node/Cargo.toml b/resolvers/node/Cargo.toml
new file mode 100644
index 000000000..104204569
--- /dev/null
+++ b/resolvers/node/Cargo.toml
@@ -0,0 +1,32 @@
+# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+[package]
+name = "node_resolver"
+version = "0.7.0"
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+readme = "README.md"
+repository.workspace = true
+description = "Node.js module resolution algorithm used in Deno"
+
+[lib]
+path = "lib.rs"
+
+[features]
+sync = ["deno_package_json/sync"]
+
+[dependencies]
+anyhow.workspace = true
+async-trait.workspace = true
+deno_media_type.workspace = true
+deno_package_json.workspace = true
+futures.workspace = true
+lazy-regex.workspace = true
+once_cell.workspace = true
+path-clean = "=0.1.0"
+regex.workspace = true
+serde_json.workspace = true
+thiserror.workspace = true
+tokio.workspace = true
+url.workspace = true
diff --git a/resolvers/node/README.md b/resolvers/node/README.md
new file mode 100644
index 000000000..8f2f63ca1
--- /dev/null
+++ b/resolvers/node/README.md
@@ -0,0 +1,6 @@
+# Node Resolver
+
+[![crates](https://img.shields.io/crates/v/node_resolver.svg)](https://crates.io/crates/node_resolver)
+[![docs](https://docs.rs/node_resolver/badge.svg)](https://docs.rs/node_resolver)
+
+Provides Node.js compatible resolution for the Deno project.
diff --git a/resolvers/node/analyze.rs b/resolvers/node/analyze.rs
new file mode 100644
index 000000000..deb56d064
--- /dev/null
+++ b/resolvers/node/analyze.rs
@@ -0,0 +1,654 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::borrow::Cow;
+use std::collections::BTreeSet;
+use std::collections::HashSet;
+use std::path::Path;
+use std::path::PathBuf;
+
+use futures::future::LocalBoxFuture;
+use futures::stream::FuturesUnordered;
+use futures::FutureExt;
+use futures::StreamExt;
+use once_cell::sync::Lazy;
+
+use anyhow::Context;
+use anyhow::Error as AnyError;
+use url::Url;
+
+use crate::env::NodeResolverEnv;
+use crate::package_json::load_pkg_json;
+use crate::path::to_file_specifier;
+use crate::resolution::NodeResolverRc;
+use crate::NodeModuleKind;
+use crate::NodeResolutionMode;
+use crate::NpmResolverRc;
+use crate::PathClean;
+
+#[derive(Debug, Clone)]
+pub enum CjsAnalysis {
+ /// File was found to be an ES module and the translator should
+ /// load the code as ESM.
+ Esm(String),
+ Cjs(CjsAnalysisExports),
+}
+
+#[derive(Debug, Clone)]
+pub struct CjsAnalysisExports {
+ pub exports: Vec<String>,
+ pub reexports: Vec<String>,
+}
+
+/// Code analyzer for CJS and ESM files.
+#[async_trait::async_trait(?Send)]
+pub trait CjsCodeAnalyzer {
+ /// Analyzes CommonJs code for exports and reexports, which is
+ /// then used to determine the wrapper ESM module exports.
+ ///
+ /// Note that the source is provided by the caller when the caller
+ /// already has it. If the source is needed by the implementation,
+ /// then it can use the provided source, or otherwise load it if
+ /// necessary.
+ async fn analyze_cjs(
+ &self,
+ specifier: &Url,
+ maybe_source: Option<String>,
+ ) -> Result<CjsAnalysis, AnyError>;
+}
+
+pub struct NodeCodeTranslator<
+ TCjsCodeAnalyzer: CjsCodeAnalyzer,
+ TNodeResolverEnv: NodeResolverEnv,
+> {
+ cjs_code_analyzer: TCjsCodeAnalyzer,
+ env: TNodeResolverEnv,
+ node_resolver: NodeResolverRc<TNodeResolverEnv>,
+ npm_resolver: NpmResolverRc,
+}
+
+impl<TCjsCodeAnalyzer: CjsCodeAnalyzer, TNodeResolverEnv: NodeResolverEnv>
+ NodeCodeTranslator<TCjsCodeAnalyzer, TNodeResolverEnv>
+{
+ pub fn new(
+ cjs_code_analyzer: TCjsCodeAnalyzer,
+ env: TNodeResolverEnv,
+ node_resolver: NodeResolverRc<TNodeResolverEnv>,
+ npm_resolver: NpmResolverRc,
+ ) -> Self {
+ Self {
+ cjs_code_analyzer,
+ env,
+ node_resolver,
+ npm_resolver,
+ }
+ }
+
+ /// Translates given CJS module into ESM. This function will perform static
+ /// analysis on the file to find defined exports and reexports.
+ ///
+ /// For all discovered reexports the analysis will be performed recursively.
+ ///
+ /// If successful a source code for equivalent ES module is returned.
+ pub async fn translate_cjs_to_esm(
+ &self,
+ entry_specifier: &Url,
+ source: Option<String>,
+ ) -> Result<String, AnyError> {
+ let mut temp_var_count = 0;
+
+ let analysis = self
+ .cjs_code_analyzer
+ .analyze_cjs(entry_specifier, source)
+ .await?;
+
+ let analysis = match analysis {
+ CjsAnalysis::Esm(source) => return Ok(source),
+ CjsAnalysis::Cjs(analysis) => analysis,
+ };
+
+ let mut source = vec![
+ r#"import {createRequire as __internalCreateRequire} from "node:module";
+ const require = __internalCreateRequire(import.meta.url);"#
+ .to_string(),
+ ];
+
+ // use a BTreeSet to make the output deterministic for v8's code cache
+ let mut all_exports = analysis.exports.into_iter().collect::<BTreeSet<_>>();
+
+ if !analysis.reexports.is_empty() {
+ let mut errors = Vec::new();
+ self
+ .analyze_reexports(
+ entry_specifier,
+ analysis.reexports,
+ &mut all_exports,
+ &mut errors,
+ )
+ .await;
+
+ // surface errors afterwards in a deterministic way
+ if !errors.is_empty() {
+ errors.sort_by_cached_key(|e| e.to_string());
+ return Err(errors.remove(0));
+ }
+ }
+
+ source.push(format!(
+ "const mod = require(\"{}\");",
+ entry_specifier
+ .to_file_path()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .replace('\\', "\\\\")
+ .replace('\'', "\\\'")
+ .replace('\"', "\\\"")
+ ));
+
+ for export in &all_exports {
+ if export.as_str() != "default" {
+ add_export(
+ &mut source,
+ export,
+ &format!("mod[\"{}\"]", escape_for_double_quote_string(export)),
+ &mut temp_var_count,
+ );
+ }
+ }
+
+ source.push("export default mod;".to_string());
+
+ let translated_source = source.join("\n");
+ Ok(translated_source)
+ }
+
+ async fn analyze_reexports<'a>(
+ &'a self,
+ entry_specifier: &url::Url,
+ reexports: Vec<String>,
+ all_exports: &mut BTreeSet<String>,
+ // this goes through the modules concurrently, so collect
+ // the errors in order to be deterministic
+ errors: &mut Vec<anyhow::Error>,
+ ) {
+ struct Analysis {
+ reexport_specifier: url::Url,
+ referrer: url::Url,
+ analysis: CjsAnalysis,
+ }
+
+ type AnalysisFuture<'a> = LocalBoxFuture<'a, Result<Analysis, AnyError>>;
+
+ let mut handled_reexports: HashSet<Url> = HashSet::default();
+ handled_reexports.insert(entry_specifier.clone());
+ let mut analyze_futures: FuturesUnordered<AnalysisFuture<'a>> =
+ FuturesUnordered::new();
+ let cjs_code_analyzer = &self.cjs_code_analyzer;
+ let mut handle_reexports =
+ |referrer: url::Url,
+ reexports: Vec<String>,
+ analyze_futures: &mut FuturesUnordered<AnalysisFuture<'a>>,
+ errors: &mut Vec<anyhow::Error>| {
+ // 1. Resolve the re-exports and start a future to analyze each one
+ for reexport in reexports {
+ let result = self.resolve(
+ &reexport,
+ &referrer,
+ // FIXME(bartlomieju): check if these conditions are okay, probably
+ // should be `deno-require`, because `deno` is already used in `esm_resolver.rs`
+ &["deno", "node", "require", "default"],
+ NodeResolutionMode::Execution,
+ );
+ let reexport_specifier = match result {
+ Ok(Some(specifier)) => specifier,
+ Ok(None) => continue,
+ Err(err) => {
+ errors.push(err);
+ continue;
+ }
+ };
+
+ if !handled_reexports.insert(reexport_specifier.clone()) {
+ continue;
+ }
+
+ let referrer = referrer.clone();
+ let future = async move {
+ let analysis = cjs_code_analyzer
+ .analyze_cjs(&reexport_specifier, None)
+ .await
+ .with_context(|| {
+ format!(
+ "Could not load '{}' ({}) referenced from {}",
+ reexport, reexport_specifier, referrer
+ )
+ })?;
+
+ Ok(Analysis {
+ reexport_specifier,
+ referrer,
+ analysis,
+ })
+ }
+ .boxed_local();
+ analyze_futures.push(future);
+ }
+ };
+
+ handle_reexports(
+ entry_specifier.clone(),
+ reexports,
+ &mut analyze_futures,
+ errors,
+ );
+
+ while let Some(analysis_result) = analyze_futures.next().await {
+ // 2. Look at the analysis result and resolve its exports and re-exports
+ let Analysis {
+ reexport_specifier,
+ referrer,
+ analysis,
+ } = match analysis_result {
+ Ok(analysis) => analysis,
+ Err(err) => {
+ errors.push(err);
+ continue;
+ }
+ };
+ match analysis {
+ CjsAnalysis::Esm(_) => {
+ // todo(dsherret): support this once supporting requiring ES modules
+ errors.push(anyhow::anyhow!(
+ "Cannot require ES module '{}' from '{}'",
+ reexport_specifier,
+ referrer,
+ ));
+ }
+ CjsAnalysis::Cjs(analysis) => {
+ if !analysis.reexports.is_empty() {
+ handle_reexports(
+ reexport_specifier.clone(),
+ analysis.reexports,
+ &mut analyze_futures,
+ errors,
+ );
+ }
+
+ all_exports.extend(
+ analysis
+ .exports
+ .into_iter()
+ .filter(|e| e.as_str() != "default"),
+ );
+ }
+ }
+ }
+ }
+
+ // todo(dsherret): what is going on here? Isn't this a bunch of duplicate code?
+ fn resolve(
+ &self,
+ specifier: &str,
+ referrer: &Url,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Option<Url>, AnyError> {
+ if specifier.starts_with('/') {
+ todo!();
+ }
+
+ let referrer_path = referrer.to_file_path().unwrap();
+ if specifier.starts_with("./") || specifier.starts_with("../") {
+ if let Some(parent) = referrer_path.parent() {
+ return Some(
+ self
+ .file_extension_probe(parent.join(specifier), &referrer_path)
+ .map(|p| to_file_specifier(&p)),
+ )
+ .transpose();
+ } else {
+ todo!();
+ }
+ }
+
+ // We've got a bare specifier or maybe bare_specifier/blah.js"
+ let (package_specifier, package_subpath) =
+ parse_specifier(specifier).unwrap();
+
+ let module_dir = match self
+ .npm_resolver
+ .resolve_package_folder_from_package(package_specifier.as_str(), referrer)
+ {
+ Err(err)
+ if matches!(
+ err.as_kind(),
+ crate::errors::PackageFolderResolveErrorKind::PackageNotFound(..)
+ ) =>
+ {
+ return Ok(None);
+ }
+ other => other,
+ }?;
+
+ let package_json_path = module_dir.join("package.json");
+ let maybe_package_json =
+ load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?;
+ if let Some(package_json) = maybe_package_json {
+ if let Some(exports) = &package_json.exports {
+ return Some(
+ self
+ .node_resolver
+ .package_exports_resolve(
+ &package_json_path,
+ &package_subpath,
+ exports,
+ Some(referrer),
+ NodeModuleKind::Esm,
+ conditions,
+ mode,
+ )
+ .map_err(AnyError::from),
+ )
+ .transpose();
+ }
+
+ // old school
+ if package_subpath != "." {
+ let d = module_dir.join(package_subpath);
+ if self.env.is_dir_sync(&d) {
+ // subdir might have a package.json that specifies the entrypoint
+ let package_json_path = d.join("package.json");
+ let maybe_package_json =
+ load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?;
+ if let Some(package_json) = maybe_package_json {
+ if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
+ return Ok(Some(to_file_specifier(&d.join(main).clean())));
+ }
+ }
+
+ return Ok(Some(to_file_specifier(&d.join("index.js").clean())));
+ }
+ return Some(
+ self
+ .file_extension_probe(d, &referrer_path)
+ .map(|p| to_file_specifier(&p)),
+ )
+ .transpose();
+ } else if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
+ return Ok(Some(to_file_specifier(&module_dir.join(main).clean())));
+ } else {
+ return Ok(Some(to_file_specifier(
+ &module_dir.join("index.js").clean(),
+ )));
+ }
+ }
+
+ // as a fallback, attempt to resolve it via the ancestor directories
+ let mut last = referrer_path.as_path();
+ while let Some(parent) = last.parent() {
+ if !self.npm_resolver.in_npm_package_at_dir_path(parent) {
+ break;
+ }
+ let path = if parent.ends_with("node_modules") {
+ parent.join(specifier)
+ } else {
+ parent.join("node_modules").join(specifier)
+ };
+ if let Ok(path) = self.file_extension_probe(path, &referrer_path) {
+ return Ok(Some(to_file_specifier(&path)));
+ }
+ last = parent;
+ }
+
+ Err(not_found(specifier, &referrer_path))
+ }
+
+ fn file_extension_probe(
+ &self,
+ p: PathBuf,
+ referrer: &Path,
+ ) -> Result<PathBuf, AnyError> {
+ let p = p.clean();
+ if self.env.exists_sync(&p) {
+ let file_name = p.file_name().unwrap();
+ let p_js =
+ p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
+ if self.env.is_file_sync(&p_js) {
+ return Ok(p_js);
+ } else if self.env.is_dir_sync(&p) {
+ return Ok(p.join("index.js"));
+ } else {
+ return Ok(p);
+ }
+ } else if let Some(file_name) = p.file_name() {
+ {
+ let p_js =
+ p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
+ if self.env.is_file_sync(&p_js) {
+ return Ok(p_js);
+ }
+ }
+ {
+ let p_json =
+ p.with_file_name(format!("{}.json", file_name.to_str().unwrap()));
+ if self.env.is_file_sync(&p_json) {
+ return Ok(p_json);
+ }
+ }
+ }
+ Err(not_found(&p.to_string_lossy(), referrer))
+ }
+}
+
+static RESERVED_WORDS: Lazy<HashSet<&str>> = Lazy::new(|| {
+ HashSet::from([
+ "abstract",
+ "arguments",
+ "async",
+ "await",
+ "boolean",
+ "break",
+ "byte",
+ "case",
+ "catch",
+ "char",
+ "class",
+ "const",
+ "continue",
+ "debugger",
+ "default",
+ "delete",
+ "do",
+ "double",
+ "else",
+ "enum",
+ "eval",
+ "export",
+ "extends",
+ "false",
+ "final",
+ "finally",
+ "float",
+ "for",
+ "function",
+ "get",
+ "goto",
+ "if",
+ "implements",
+ "import",
+ "in",
+ "instanceof",
+ "int",
+ "interface",
+ "let",
+ "long",
+ "mod",
+ "native",
+ "new",
+ "null",
+ "package",
+ "private",
+ "protected",
+ "public",
+ "return",
+ "set",
+ "short",
+ "static",
+ "super",
+ "switch",
+ "synchronized",
+ "this",
+ "throw",
+ "throws",
+ "transient",
+ "true",
+ "try",
+ "typeof",
+ "var",
+ "void",
+ "volatile",
+ "while",
+ "with",
+ "yield",
+ ])
+});
+
+fn add_export(
+ source: &mut Vec<String>,
+ name: &str,
+ initializer: &str,
+ temp_var_count: &mut usize,
+) {
+ fn is_valid_var_decl(name: &str) -> bool {
+ // it's ok to be super strict here
+ if name.is_empty() {
+ return false;
+ }
+
+ if let Some(first) = name.chars().next() {
+ if !first.is_ascii_alphabetic() && first != '_' && first != '$' {
+ return false;
+ }
+ }
+
+ name
+ .chars()
+ .all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$')
+ }
+
+ // TODO(bartlomieju): Node actually checks if a given export exists in `exports` object,
+ // but it might not be necessary here since our analysis is more detailed?
+ if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) {
+ *temp_var_count += 1;
+ // we can't create an identifier with a reserved word or invalid identifier name,
+ // so assign it to a temporary variable that won't have a conflict, then re-export
+ // it as a string
+ source.push(format!(
+ "const __deno_export_{temp_var_count}__ = {initializer};"
+ ));
+ source.push(format!(
+ "export {{ __deno_export_{temp_var_count}__ as \"{}\" }};",
+ escape_for_double_quote_string(name)
+ ));
+ } else {
+ source.push(format!("export const {name} = {initializer};"));
+ }
+}
+
+fn parse_specifier(specifier: &str) -> Option<(String, String)> {
+ let mut separator_index = specifier.find('/');
+ let mut valid_package_name = true;
+ // let mut is_scoped = false;
+ if specifier.is_empty() {
+ valid_package_name = false;
+ } else if specifier.starts_with('@') {
+ // is_scoped = true;
+ if let Some(index) = separator_index {
+ separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1);
+ } else {
+ valid_package_name = false;
+ }
+ }
+
+ let package_name = if let Some(index) = separator_index {
+ specifier[0..index].to_string()
+ } else {
+ specifier.to_string()
+ };
+
+ // Package name cannot have leading . and cannot have percent-encoding or separators.
+ for ch in package_name.chars() {
+ if ch == '%' || ch == '\\' {
+ valid_package_name = false;
+ break;
+ }
+ }
+
+ if !valid_package_name {
+ return None;
+ }
+
+ let package_subpath = if let Some(index) = separator_index {
+ format!(".{}", specifier.chars().skip(index).collect::<String>())
+ } else {
+ ".".to_string()
+ };
+
+ Some((package_name, package_subpath))
+}
+
+fn not_found(path: &str, referrer: &Path) -> AnyError {
+ let msg = format!(
+ "[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"",
+ path,
+ referrer.to_string_lossy()
+ );
+ std::io::Error::new(std::io::ErrorKind::NotFound, msg).into()
+}
+
+fn escape_for_double_quote_string(text: &str) -> Cow<str> {
+ // this should be rare, so doing a scan first before allocating is ok
+ if text.chars().any(|c| matches!(c, '"' | '\\')) {
+ // don't bother making this more complex for perf because it's rare
+ Cow::Owned(text.replace('\\', "\\\\").replace('"', "\\\""))
+ } else {
+ Cow::Borrowed(text)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_add_export() {
+ let mut temp_var_count = 0;
+ let mut source = vec![];
+
+ let exports = vec!["static", "server", "app", "dashed-export", "3d"];
+ for export in exports {
+ add_export(&mut source, export, "init", &mut temp_var_count);
+ }
+ assert_eq!(
+ source,
+ vec![
+ "const __deno_export_1__ = init;".to_string(),
+ "export { __deno_export_1__ as \"static\" };".to_string(),
+ "export const server = init;".to_string(),
+ "export const app = init;".to_string(),
+ "const __deno_export_2__ = init;".to_string(),
+ "export { __deno_export_2__ as \"dashed-export\" };".to_string(),
+ "const __deno_export_3__ = init;".to_string(),
+ "export { __deno_export_3__ as \"3d\" };".to_string(),
+ ]
+ )
+ }
+
+ #[test]
+ fn test_parse_specifier() {
+ assert_eq!(
+ parse_specifier("@some-package/core/actions"),
+ Some(("@some-package/core".to_string(), "./actions".to_string()))
+ );
+ }
+}
diff --git a/resolvers/node/clippy.toml b/resolvers/node/clippy.toml
new file mode 100644
index 000000000..86150781b
--- /dev/null
+++ b/resolvers/node/clippy.toml
@@ -0,0 +1,48 @@
+disallowed-methods = [
+ { path = "std::env::current_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::is_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::is_file", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::is_symlink", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::metadata", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::read_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::read_link", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::try_exists", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::exists", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::env::set_current_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::env::temp_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::copy", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::create_dir_all", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::create_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::DirBuilder::new", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::hard_link", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::metadata", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::OpenOptions::new", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::read_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::read_link", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::read_to_string", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::read", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::remove_dir_all", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::remove_dir", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::remove_file", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::rename", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::set_permissions", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::fs::write", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
+ { path = "std::path::Path::exists", reason = "File system operations should be done using NodeResolverFs trait" },
+]
+disallowed-types = [
+ { path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" },
+]
diff --git a/resolvers/node/env.rs b/resolvers/node/env.rs
new file mode 100644
index 000000000..b520ece0f
--- /dev/null
+++ b/resolvers/node/env.rs
@@ -0,0 +1,39 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::path::Path;
+use std::path::PathBuf;
+
+use crate::sync::MaybeSend;
+use crate::sync::MaybeSync;
+
+pub struct NodeResolverFsStat {
+ pub is_file: bool,
+ pub is_dir: bool,
+ pub is_symlink: bool,
+}
+
+pub trait NodeResolverEnv: std::fmt::Debug + MaybeSend + MaybeSync {
+ fn is_builtin_node_module(&self, specifier: &str) -> bool;
+
+ fn realpath_sync(&self, path: &Path) -> std::io::Result<PathBuf>;
+
+ fn stat_sync(&self, path: &Path) -> std::io::Result<NodeResolverFsStat>;
+
+ fn exists_sync(&self, path: &Path) -> bool;
+
+ fn is_file_sync(&self, path: &Path) -> bool {
+ self
+ .stat_sync(path)
+ .map(|stat| stat.is_file)
+ .unwrap_or(false)
+ }
+
+ fn is_dir_sync(&self, path: &Path) -> bool {
+ self
+ .stat_sync(path)
+ .map(|stat| stat.is_dir)
+ .unwrap_or(false)
+ }
+
+ fn pkg_json_fs(&self) -> &dyn deno_package_json::fs::DenoPkgJsonFs;
+}
diff --git a/resolvers/node/errors.rs b/resolvers/node/errors.rs
new file mode 100644
index 000000000..4ba829eda
--- /dev/null
+++ b/resolvers/node/errors.rs
@@ -0,0 +1,769 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::borrow::Cow;
+use std::fmt::Write;
+use std::path::PathBuf;
+
+use thiserror::Error;
+use url::Url;
+
+use crate::NodeModuleKind;
+use crate::NodeResolutionMode;
+
+macro_rules! kinded_err {
+ ($name:ident, $kind_name:ident) => {
+ #[derive(Error, Debug)]
+ #[error(transparent)]
+ pub struct $name(pub Box<$kind_name>);
+
+ impl $name {
+ pub fn as_kind(&self) -> &$kind_name {
+ &self.0
+ }
+
+ pub fn into_kind(self) -> $kind_name {
+ *self.0
+ }
+ }
+
+ impl<E> From<E> for $name
+ where
+ $kind_name: From<E>,
+ {
+ fn from(err: E) -> Self {
+ $name(Box::new($kind_name::from(err)))
+ }
+ }
+ };
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+#[allow(non_camel_case_types)]
+pub enum NodeJsErrorCode {
+ ERR_INVALID_MODULE_SPECIFIER,
+ ERR_INVALID_PACKAGE_CONFIG,
+ ERR_INVALID_PACKAGE_TARGET,
+ ERR_MODULE_NOT_FOUND,
+ ERR_PACKAGE_IMPORT_NOT_DEFINED,
+ ERR_PACKAGE_PATH_NOT_EXPORTED,
+ ERR_UNKNOWN_FILE_EXTENSION,
+ ERR_UNSUPPORTED_DIR_IMPORT,
+ ERR_UNSUPPORTED_ESM_URL_SCHEME,
+ /// Deno specific since Node doesn't support TypeScript.
+ ERR_TYPES_NOT_FOUND,
+}
+
+impl std::fmt::Display for NodeJsErrorCode {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.as_str())
+ }
+}
+
+impl NodeJsErrorCode {
+ pub fn as_str(&self) -> &'static str {
+ use NodeJsErrorCode::*;
+ match self {
+ ERR_INVALID_MODULE_SPECIFIER => "ERR_INVALID_MODULE_SPECIFIER",
+ ERR_INVALID_PACKAGE_CONFIG => "ERR_INVALID_PACKAGE_CONFIG",
+ ERR_INVALID_PACKAGE_TARGET => "ERR_INVALID_PACKAGE_TARGET",
+ ERR_MODULE_NOT_FOUND => "ERR_MODULE_NOT_FOUND",
+ ERR_PACKAGE_IMPORT_NOT_DEFINED => "ERR_PACKAGE_IMPORT_NOT_DEFINED",
+ ERR_PACKAGE_PATH_NOT_EXPORTED => "ERR_PACKAGE_PATH_NOT_EXPORTED",
+ ERR_UNKNOWN_FILE_EXTENSION => "ERR_UNKNOWN_FILE_EXTENSION",
+ ERR_UNSUPPORTED_DIR_IMPORT => "ERR_UNSUPPORTED_DIR_IMPORT",
+ ERR_UNSUPPORTED_ESM_URL_SCHEME => "ERR_UNSUPPORTED_ESM_URL_SCHEME",
+ ERR_TYPES_NOT_FOUND => "ERR_TYPES_NOT_FOUND",
+ }
+ }
+}
+
+pub trait NodeJsErrorCoded {
+ fn code(&self) -> NodeJsErrorCode;
+}
+
+kinded_err!(
+ ResolvePkgSubpathFromDenoModuleError,
+ ResolvePkgSubpathFromDenoModuleErrorKind
+);
+
+impl NodeJsErrorCoded for ResolvePkgSubpathFromDenoModuleError {
+ fn code(&self) -> NodeJsErrorCode {
+ use ResolvePkgSubpathFromDenoModuleErrorKind::*;
+ match self.as_kind() {
+ PackageSubpathResolve(e) => e.code(),
+ UrlToNodeResolution(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum ResolvePkgSubpathFromDenoModuleErrorKind {
+ #[error(transparent)]
+ PackageSubpathResolve(#[from] PackageSubpathResolveError),
+ #[error(transparent)]
+ UrlToNodeResolution(#[from] UrlToNodeResolutionError),
+}
+
+// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError
+#[derive(Debug, Clone, Error)]
+#[error(
+ "[{}] Invalid module '{}' {}{}",
+ self.code(),
+ request,
+ reason,
+ maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default()
+)]
+pub struct InvalidModuleSpecifierError {
+ pub request: String,
+ pub reason: Cow<'static, str>,
+ pub maybe_referrer: Option<String>,
+}
+
+impl NodeJsErrorCoded for InvalidModuleSpecifierError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_INVALID_MODULE_SPECIFIER
+ }
+}
+
+kinded_err!(LegacyResolveError, LegacyResolveErrorKind);
+
+#[derive(Debug, Error)]
+pub enum LegacyResolveErrorKind {
+ #[error(transparent)]
+ TypesNotFound(#[from] TypesNotFoundError),
+ #[error(transparent)]
+ ModuleNotFound(#[from] ModuleNotFoundError),
+}
+
+impl NodeJsErrorCoded for LegacyResolveError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ LegacyResolveErrorKind::TypesNotFound(e) => e.code(),
+ LegacyResolveErrorKind::ModuleNotFound(e) => e.code(),
+ }
+ }
+}
+
+kinded_err!(PackageFolderResolveError, PackageFolderResolveErrorKind);
+
+#[derive(Debug, Error)]
+#[error(
+ "Could not find package '{}' from referrer '{}'{}.",
+ package_name,
+ referrer,
+ referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default()
+)]
+pub struct PackageNotFoundError {
+ pub package_name: String,
+ pub referrer: Url,
+ /// Extra information about the referrer.
+ pub referrer_extra: Option<String>,
+}
+
+impl NodeJsErrorCoded for PackageNotFoundError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_MODULE_NOT_FOUND
+ }
+}
+
+#[derive(Debug, Error)]
+#[error(
+ "Could not find referrer npm package '{}'{}.",
+ referrer,
+ referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default()
+)]
+pub struct ReferrerNotFoundError {
+ pub referrer: Url,
+ /// Extra information about the referrer.
+ pub referrer_extra: Option<String>,
+}
+
+impl NodeJsErrorCoded for ReferrerNotFoundError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_MODULE_NOT_FOUND
+ }
+}
+
+#[derive(Debug, Error)]
+#[error("Failed resolving '{package_name}' from referrer '{referrer}'.")]
+pub struct PackageFolderResolveIoError {
+ pub package_name: String,
+ pub referrer: Url,
+ #[source]
+ pub source: std::io::Error,
+}
+
+impl NodeJsErrorCoded for PackageFolderResolveIoError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_MODULE_NOT_FOUND
+ }
+}
+
+impl NodeJsErrorCoded for PackageFolderResolveError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ PackageFolderResolveErrorKind::PackageNotFound(e) => e.code(),
+ PackageFolderResolveErrorKind::ReferrerNotFound(e) => e.code(),
+ PackageFolderResolveErrorKind::Io(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum PackageFolderResolveErrorKind {
+ #[error(transparent)]
+ PackageNotFound(#[from] PackageNotFoundError),
+ #[error(transparent)]
+ ReferrerNotFound(#[from] ReferrerNotFoundError),
+ #[error(transparent)]
+ Io(#[from] PackageFolderResolveIoError),
+}
+
+kinded_err!(PackageSubpathResolveError, PackageSubpathResolveErrorKind);
+
+impl NodeJsErrorCoded for PackageSubpathResolveError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ PackageSubpathResolveErrorKind::PkgJsonLoad(e) => e.code(),
+ PackageSubpathResolveErrorKind::Exports(e) => e.code(),
+ PackageSubpathResolveErrorKind::LegacyResolve(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum PackageSubpathResolveErrorKind {
+ #[error(transparent)]
+ PkgJsonLoad(#[from] PackageJsonLoadError),
+ #[error(transparent)]
+ Exports(PackageExportsResolveError),
+ #[error(transparent)]
+ LegacyResolve(LegacyResolveError),
+}
+
+#[derive(Debug, Error)]
+#[error(
+ "Target '{}' not found from '{}'{}{}.",
+ target,
+ pkg_json_path.display(),
+ maybe_referrer.as_ref().map(|r|
+ format!(
+ " from{} referrer {}",
+ match referrer_kind {
+ NodeModuleKind::Esm => "",
+ NodeModuleKind::Cjs => " cjs",
+ },
+ r
+ )
+ ).unwrap_or_default(),
+ match mode {
+ NodeResolutionMode::Execution => "",
+ NodeResolutionMode::Types => " for types",
+ }
+)]
+pub struct PackageTargetNotFoundError {
+ pub pkg_json_path: PathBuf,
+ pub target: String,
+ pub maybe_referrer: Option<Url>,
+ pub referrer_kind: NodeModuleKind,
+ pub mode: NodeResolutionMode,
+}
+
+impl NodeJsErrorCoded for PackageTargetNotFoundError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_MODULE_NOT_FOUND
+ }
+}
+
+kinded_err!(PackageTargetResolveError, PackageTargetResolveErrorKind);
+
+impl NodeJsErrorCoded for PackageTargetResolveError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ PackageTargetResolveErrorKind::NotFound(e) => e.code(),
+ PackageTargetResolveErrorKind::InvalidPackageTarget(e) => e.code(),
+ PackageTargetResolveErrorKind::InvalidModuleSpecifier(e) => e.code(),
+ PackageTargetResolveErrorKind::PackageResolve(e) => e.code(),
+ PackageTargetResolveErrorKind::TypesNotFound(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum PackageTargetResolveErrorKind {
+ #[error(transparent)]
+ NotFound(#[from] PackageTargetNotFoundError),
+ #[error(transparent)]
+ InvalidPackageTarget(#[from] InvalidPackageTargetError),
+ #[error(transparent)]
+ InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
+ #[error(transparent)]
+ PackageResolve(#[from] PackageResolveError),
+ #[error(transparent)]
+ TypesNotFound(#[from] TypesNotFoundError),
+}
+
+kinded_err!(PackageExportsResolveError, PackageExportsResolveErrorKind);
+
+impl NodeJsErrorCoded for PackageExportsResolveError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ PackageExportsResolveErrorKind::PackagePathNotExported(e) => e.code(),
+ PackageExportsResolveErrorKind::PackageTargetResolve(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum PackageExportsResolveErrorKind {
+ #[error(transparent)]
+ PackagePathNotExported(#[from] PackagePathNotExportedError),
+ #[error(transparent)]
+ PackageTargetResolve(#[from] PackageTargetResolveError),
+}
+
+#[derive(Debug, Error)]
+#[error(
+ "[{}] Could not find types for '{}'{}",
+ self.code(),
+ self.0.code_specifier,
+ self.0.maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(),
+ )]
+pub struct TypesNotFoundError(pub Box<TypesNotFoundErrorData>);
+
+#[derive(Debug)]
+pub struct TypesNotFoundErrorData {
+ pub code_specifier: Url,
+ pub maybe_referrer: Option<Url>,
+}
+
+impl NodeJsErrorCoded for TypesNotFoundError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_TYPES_NOT_FOUND
+ }
+}
+
+#[derive(Debug, Error)]
+#[error(
+ "[{}] Invalid package config. {}",
+ self.code(),
+ self.0
+)]
+pub struct PackageJsonLoadError(
+ #[source]
+ #[from]
+ pub deno_package_json::PackageJsonLoadError,
+);
+
+impl NodeJsErrorCoded for PackageJsonLoadError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_INVALID_PACKAGE_CONFIG
+ }
+}
+
+kinded_err!(ClosestPkgJsonError, ClosestPkgJsonErrorKind);
+
+impl NodeJsErrorCoded for ClosestPkgJsonError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ ClosestPkgJsonErrorKind::CanonicalizingDir(e) => e.code(),
+ ClosestPkgJsonErrorKind::Load(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum ClosestPkgJsonErrorKind {
+ #[error(transparent)]
+ CanonicalizingDir(#[from] CanonicalizingPkgJsonDirError),
+ #[error(transparent)]
+ Load(#[from] PackageJsonLoadError),
+}
+
+#[derive(Debug, Error)]
+#[error("[{}] Failed canonicalizing package.json directory '{}'.", self.code(), dir_path.display())]
+pub struct CanonicalizingPkgJsonDirError {
+ pub dir_path: PathBuf,
+ #[source]
+ pub source: std::io::Error,
+}
+
+impl NodeJsErrorCoded for CanonicalizingPkgJsonDirError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_MODULE_NOT_FOUND
+ }
+}
+
+#[derive(Debug, Error)]
+#[error("TypeScript files are not supported in npm packages: {specifier}")]
+pub struct TypeScriptNotSupportedInNpmError {
+ pub specifier: Url,
+}
+
+impl NodeJsErrorCoded for TypeScriptNotSupportedInNpmError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION
+ }
+}
+
+kinded_err!(UrlToNodeResolutionError, UrlToNodeResolutionErrorKind);
+
+impl NodeJsErrorCoded for UrlToNodeResolutionError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ UrlToNodeResolutionErrorKind::TypeScriptNotSupported(e) => e.code(),
+ UrlToNodeResolutionErrorKind::ClosestPkgJson(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum UrlToNodeResolutionErrorKind {
+ #[error(transparent)]
+ TypeScriptNotSupported(#[from] TypeScriptNotSupportedInNpmError),
+ #[error(transparent)]
+ ClosestPkgJson(#[from] ClosestPkgJsonError),
+}
+
+// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError
+#[derive(Debug, Error)]
+#[error(
+ "[{}] Package import specifier \"{}\" is not defined{}{}",
+ self.code(),
+ name,
+ package_json_path.as_ref().map(|p| format!(" in package {}", p.display())).unwrap_or_default(),
+ maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(),
+)]
+pub struct PackageImportNotDefinedError {
+ pub name: String,
+ pub package_json_path: Option<PathBuf>,
+ pub maybe_referrer: Option<Url>,
+}
+
+impl NodeJsErrorCoded for PackageImportNotDefinedError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_PACKAGE_IMPORT_NOT_DEFINED
+ }
+}
+
+kinded_err!(PackageImportsResolveError, PackageImportsResolveErrorKind);
+
+#[derive(Debug, Error)]
+pub enum PackageImportsResolveErrorKind {
+ #[error(transparent)]
+ ClosestPkgJson(ClosestPkgJsonError),
+ #[error(transparent)]
+ InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
+ #[error(transparent)]
+ NotDefined(#[from] PackageImportNotDefinedError),
+ #[error(transparent)]
+ Target(#[from] PackageTargetResolveError),
+}
+
+impl NodeJsErrorCoded for PackageImportsResolveErrorKind {
+ fn code(&self) -> NodeJsErrorCode {
+ match self {
+ Self::ClosestPkgJson(e) => e.code(),
+ Self::InvalidModuleSpecifier(e) => e.code(),
+ Self::NotDefined(e) => e.code(),
+ Self::Target(e) => e.code(),
+ }
+ }
+}
+
+kinded_err!(PackageResolveError, PackageResolveErrorKind);
+
+impl NodeJsErrorCoded for PackageResolveError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ PackageResolveErrorKind::ClosestPkgJson(e) => e.code(),
+ PackageResolveErrorKind::InvalidModuleSpecifier(e) => e.code(),
+ PackageResolveErrorKind::PackageFolderResolve(e) => e.code(),
+ PackageResolveErrorKind::ExportsResolve(e) => e.code(),
+ PackageResolveErrorKind::SubpathResolve(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum PackageResolveErrorKind {
+ #[error(transparent)]
+ ClosestPkgJson(#[from] ClosestPkgJsonError),
+ #[error(transparent)]
+ InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
+ #[error(transparent)]
+ PackageFolderResolve(#[from] PackageFolderResolveError),
+ #[error(transparent)]
+ ExportsResolve(#[from] PackageExportsResolveError),
+ #[error(transparent)]
+ SubpathResolve(#[from] PackageSubpathResolveError),
+}
+
+#[derive(Debug, Error)]
+#[error("Failed joining '{path}' from '{base}'.")]
+pub struct NodeResolveRelativeJoinError {
+ pub path: String,
+ pub base: Url,
+ #[source]
+ pub source: url::ParseError,
+}
+
+#[derive(Debug, Error)]
+#[error("Failed resolving specifier from data url referrer.")]
+pub struct DataUrlReferrerError {
+ #[source]
+ pub source: url::ParseError,
+}
+
+kinded_err!(NodeResolveError, NodeResolveErrorKind);
+
+#[derive(Debug, Error)]
+pub enum NodeResolveErrorKind {
+ #[error(transparent)]
+ RelativeJoin(#[from] NodeResolveRelativeJoinError),
+ #[error(transparent)]
+ PackageImportsResolve(#[from] PackageImportsResolveError),
+ #[error(transparent)]
+ UnsupportedEsmUrlScheme(#[from] UnsupportedEsmUrlSchemeError),
+ #[error(transparent)]
+ DataUrlReferrer(#[from] DataUrlReferrerError),
+ #[error(transparent)]
+ PackageResolve(#[from] PackageResolveError),
+ #[error(transparent)]
+ TypesNotFound(#[from] TypesNotFoundError),
+ #[error(transparent)]
+ FinalizeResolution(#[from] FinalizeResolutionError),
+ #[error(transparent)]
+ UrlToNodeResolution(#[from] UrlToNodeResolutionError),
+}
+
+kinded_err!(FinalizeResolutionError, FinalizeResolutionErrorKind);
+
+#[derive(Debug, Error)]
+pub enum FinalizeResolutionErrorKind {
+ #[error(transparent)]
+ InvalidModuleSpecifierError(#[from] InvalidModuleSpecifierError),
+ #[error(transparent)]
+ ModuleNotFound(#[from] ModuleNotFoundError),
+ #[error(transparent)]
+ UnsupportedDirImport(#[from] UnsupportedDirImportError),
+}
+
+impl NodeJsErrorCoded for FinalizeResolutionError {
+ fn code(&self) -> NodeJsErrorCode {
+ match self.as_kind() {
+ FinalizeResolutionErrorKind::InvalidModuleSpecifierError(e) => e.code(),
+ FinalizeResolutionErrorKind::ModuleNotFound(e) => e.code(),
+ FinalizeResolutionErrorKind::UnsupportedDirImport(e) => e.code(),
+ }
+ }
+}
+
+#[derive(Debug, Error)]
+#[error(
+ "[{}] Cannot find {} '{}'{}",
+ self.code(),
+ typ,
+ specifier,
+ maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default()
+)]
+pub struct ModuleNotFoundError {
+ pub specifier: Url,
+ pub maybe_referrer: Option<Url>,
+ pub typ: &'static str,
+}
+
+impl NodeJsErrorCoded for ModuleNotFoundError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_MODULE_NOT_FOUND
+ }
+}
+
+#[derive(Debug, Error)]
+#[error(
+ "[{}] Directory import '{}' is not supported resolving ES modules{}",
+ self.code(),
+ dir_url,
+ maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default(),
+)]
+pub struct UnsupportedDirImportError {
+ pub dir_url: Url,
+ pub maybe_referrer: Option<Url>,
+}
+
+impl NodeJsErrorCoded for UnsupportedDirImportError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_UNSUPPORTED_DIR_IMPORT
+ }
+}
+
+#[derive(Debug)]
+pub struct InvalidPackageTargetError {
+ pub pkg_json_path: PathBuf,
+ pub sub_path: String,
+ pub target: String,
+ pub is_import: bool,
+ pub maybe_referrer: Option<Url>,
+}
+
+impl std::error::Error for InvalidPackageTargetError {}
+
+impl std::fmt::Display for InvalidPackageTargetError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let rel_error = !self.is_import
+ && !self.target.is_empty()
+ && !self.target.starts_with("./");
+ f.write_char('[')?;
+ f.write_str(self.code().as_str())?;
+ f.write_char(']')?;
+
+ if self.sub_path == "." {
+ assert!(!self.is_import);
+ write!(
+ f,
+ " Invalid \"exports\" main target {} defined in the package config {}",
+ self.target,
+ self.pkg_json_path.display()
+ )?;
+ } else {
+ let ie = if self.is_import { "imports" } else { "exports" };
+ write!(
+ f,
+ " Invalid \"{}\" target {} defined for '{}' in the package config {}",
+ ie,
+ self.target,
+ self.sub_path,
+ self.pkg_json_path.display()
+ )?;
+ };
+
+ if let Some(referrer) = &self.maybe_referrer {
+ write!(f, " imported from '{}'", referrer)?;
+ }
+ if rel_error {
+ write!(f, "; target must start with \"./\"")?;
+ }
+ Ok(())
+ }
+}
+
+impl NodeJsErrorCoded for InvalidPackageTargetError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET
+ }
+}
+
+#[derive(Debug)]
+pub struct PackagePathNotExportedError {
+ pub pkg_json_path: PathBuf,
+ pub subpath: String,
+ pub maybe_referrer: Option<Url>,
+ pub mode: NodeResolutionMode,
+}
+
+impl NodeJsErrorCoded for PackagePathNotExportedError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_PACKAGE_PATH_NOT_EXPORTED
+ }
+}
+
+impl std::error::Error for PackagePathNotExportedError {}
+
+impl std::fmt::Display for PackagePathNotExportedError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_char('[')?;
+ f.write_str(self.code().as_str())?;
+ f.write_char(']')?;
+
+ let types_msg = match self.mode {
+ NodeResolutionMode::Execution => String::new(),
+ NodeResolutionMode::Types => " for types".to_string(),
+ };
+ if self.subpath == "." {
+ write!(
+ f,
+ " No \"exports\" main defined{} in '{}'",
+ types_msg,
+ self.pkg_json_path.display()
+ )?;
+ } else {
+ write!(
+ f,
+ " Package subpath '{}' is not defined{} by \"exports\" in '{}'",
+ self.subpath,
+ types_msg,
+ self.pkg_json_path.display()
+ )?;
+ };
+
+ if let Some(referrer) = &self.maybe_referrer {
+ write!(f, " imported from '{}'", referrer)?;
+ }
+ Ok(())
+ }
+}
+
+#[derive(Debug, Clone, Error)]
+#[error(
+ "[{}] Only file and data URLs are supported by the default ESM loader.{} Received protocol '{}'",
+ self.code(),
+ if cfg!(windows) && url_scheme.len() == 2 { " On Windows, absolute path must be valid file:// URLS."} else { "" },
+ url_scheme
+)]
+pub struct UnsupportedEsmUrlSchemeError {
+ pub url_scheme: String,
+}
+
+impl NodeJsErrorCoded for UnsupportedEsmUrlSchemeError {
+ fn code(&self) -> NodeJsErrorCode {
+ NodeJsErrorCode::ERR_UNSUPPORTED_ESM_URL_SCHEME
+ }
+}
+
+#[derive(Debug, Error)]
+pub enum ResolvePkgJsonBinExportError {
+ #[error(transparent)]
+ PkgJsonLoad(#[from] PackageJsonLoadError),
+ #[error("Failed resolving binary export. '{}' did not exist", pkg_json_path.display())]
+ MissingPkgJson { pkg_json_path: PathBuf },
+ #[error("Failed resolving binary export. {message}")]
+ InvalidBinProperty { message: String },
+ #[error(transparent)]
+ UrlToNodeResolution(#[from] UrlToNodeResolutionError),
+}
+
+#[derive(Debug, Error)]
+pub enum ResolveBinaryCommandsError {
+ #[error(transparent)]
+ PkgJsonLoad(#[from] PackageJsonLoadError),
+ #[error("'{}' did not have a name", pkg_json_path.display())]
+ MissingPkgJsonName { pkg_json_path: PathBuf },
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn types_resolution_package_path_not_exported() {
+ let separator_char = if cfg!(windows) { '\\' } else { '/' };
+ assert_eq!(
+ PackagePathNotExportedError {
+ pkg_json_path: PathBuf::from("test_path").join("package.json"),
+ subpath: "./jsx-runtime".to_string(),
+ maybe_referrer: None,
+ mode: NodeResolutionMode::Types
+ }.to_string(),
+ format!("[ERR_PACKAGE_PATH_NOT_EXPORTED] Package subpath './jsx-runtime' is not defined for types by \"exports\" in 'test_path{separator_char}package.json'")
+ );
+ assert_eq!(
+ PackagePathNotExportedError {
+ pkg_json_path: PathBuf::from("test_path").join("package.json"),
+ subpath: ".".to_string(),
+ maybe_referrer: None,
+ mode: NodeResolutionMode::Types
+ }.to_string(),
+ format!("[ERR_PACKAGE_PATH_NOT_EXPORTED] No \"exports\" main defined for types in 'test_path{separator_char}package.json'")
+ );
+ }
+}
diff --git a/resolvers/node/lib.rs b/resolvers/node/lib.rs
new file mode 100644
index 000000000..f03f77048
--- /dev/null
+++ b/resolvers/node/lib.rs
@@ -0,0 +1,27 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+#![deny(clippy::print_stderr)]
+#![deny(clippy::print_stdout)]
+
+pub mod analyze;
+pub mod env;
+pub mod errors;
+mod npm;
+mod package_json;
+mod path;
+mod resolution;
+mod sync;
+
+pub use deno_package_json::PackageJson;
+pub use npm::NpmResolver;
+pub use npm::NpmResolverRc;
+pub use package_json::load_pkg_json;
+pub use package_json::PackageJsonThreadLocalCache;
+pub use path::PathClean;
+pub use resolution::parse_npm_pkg_name;
+pub use resolution::NodeModuleKind;
+pub use resolution::NodeResolution;
+pub use resolution::NodeResolutionMode;
+pub use resolution::NodeResolver;
+pub use resolution::DEFAULT_CONDITIONS;
+pub use resolution::REQUIRE_CONDITIONS;
diff --git a/resolvers/node/npm.rs b/resolvers/node/npm.rs
new file mode 100644
index 000000000..77df57c48
--- /dev/null
+++ b/resolvers/node/npm.rs
@@ -0,0 +1,41 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::path::Path;
+use std::path::PathBuf;
+
+use url::Url;
+
+use crate::errors;
+use crate::path::PathClean;
+use crate::sync::MaybeSend;
+use crate::sync::MaybeSync;
+
+#[allow(clippy::disallowed_types)]
+pub type NpmResolverRc = crate::sync::MaybeArc<dyn NpmResolver>;
+
+pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync {
+ /// Resolves an npm package folder path from an npm package referrer.
+ fn resolve_package_folder_from_package(
+ &self,
+ specifier: &str,
+ referrer: &Url,
+ ) -> Result<PathBuf, errors::PackageFolderResolveError>;
+
+ fn in_npm_package(&self, specifier: &Url) -> bool;
+
+ fn in_npm_package_at_dir_path(&self, path: &Path) -> bool {
+ let specifier = match Url::from_directory_path(path.to_path_buf().clean()) {
+ Ok(p) => p,
+ Err(_) => return false,
+ };
+ self.in_npm_package(&specifier)
+ }
+
+ fn in_npm_package_at_file_path(&self, path: &Path) -> bool {
+ let specifier = match Url::from_file_path(path.to_path_buf().clean()) {
+ Ok(p) => p,
+ Err(_) => return false,
+ };
+ self.in_npm_package(&specifier)
+ }
+}
diff --git a/resolvers/node/package_json.rs b/resolvers/node/package_json.rs
new file mode 100644
index 000000000..de750f1d7
--- /dev/null
+++ b/resolvers/node/package_json.rs
@@ -0,0 +1,53 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use deno_package_json::PackageJson;
+use deno_package_json::PackageJsonRc;
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::io::ErrorKind;
+use std::path::Path;
+use std::path::PathBuf;
+
+use crate::errors::PackageJsonLoadError;
+
+// use a thread local cache so that workers have their own distinct cache
+thread_local! {
+ static CACHE: RefCell<HashMap<PathBuf, PackageJsonRc>> = RefCell::new(HashMap::new());
+}
+
+pub struct PackageJsonThreadLocalCache;
+
+impl PackageJsonThreadLocalCache {
+ pub fn clear() {
+ CACHE.with(|cache| cache.borrow_mut().clear());
+ }
+}
+
+impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache {
+ fn get(&self, path: &Path) -> Option<PackageJsonRc> {
+ CACHE.with(|cache| cache.borrow().get(path).cloned())
+ }
+
+ fn set(&self, path: PathBuf, package_json: PackageJsonRc) {
+ CACHE.with(|cache| cache.borrow_mut().insert(path, package_json));
+ }
+}
+
+/// Helper to load a package.json file using the thread local cache
+/// in node_resolver.
+pub fn load_pkg_json(
+ fs: &dyn deno_package_json::fs::DenoPkgJsonFs,
+ path: &Path,
+) -> Result<Option<PackageJsonRc>, PackageJsonLoadError> {
+ let result =
+ PackageJson::load_from_path(path, fs, Some(&PackageJsonThreadLocalCache));
+ match result {
+ Ok(pkg_json) => Ok(Some(pkg_json)),
+ Err(deno_package_json::PackageJsonLoadError::Io { source, .. })
+ if source.kind() == ErrorKind::NotFound =>
+ {
+ Ok(None)
+ }
+ Err(err) => Err(PackageJsonLoadError(err)),
+ }
+}
diff --git a/resolvers/node/path.rs b/resolvers/node/path.rs
new file mode 100644
index 000000000..ece270cd9
--- /dev/null
+++ b/resolvers/node/path.rs
@@ -0,0 +1,179 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::path::Component;
+use std::path::Path;
+use std::path::PathBuf;
+
+use url::Url;
+
+/// Extension to path_clean::PathClean
+pub trait PathClean<T> {
+ fn clean(&self) -> T;
+}
+
+impl PathClean<PathBuf> for PathBuf {
+ fn clean(&self) -> PathBuf {
+ fn is_clean_path(path: &Path) -> bool {
+ let path = path.to_string_lossy();
+ let mut current_index = 0;
+ while let Some(index) = path[current_index..].find("\\.") {
+ let trailing_index = index + current_index + 2;
+ let mut trailing_chars = path[trailing_index..].chars();
+ match trailing_chars.next() {
+ Some('.') => match trailing_chars.next() {
+ Some('/') | Some('\\') | None => {
+ return false;
+ }
+ _ => {}
+ },
+ Some('/') | Some('\\') => {
+ return false;
+ }
+ _ => {}
+ }
+ current_index = trailing_index;
+ }
+ true
+ }
+
+ let path = path_clean::PathClean::clean(self);
+ if cfg!(windows) && !is_clean_path(&path) {
+ // temporary workaround because path_clean::PathClean::clean is
+ // not good enough on windows
+ let mut components = Vec::new();
+
+ for component in path.components() {
+ match component {
+ Component::CurDir => {
+ // skip
+ }
+ Component::ParentDir => {
+ let maybe_last_component = components.pop();
+ if !matches!(maybe_last_component, Some(Component::Normal(_))) {
+ panic!("Error normalizing: {}", path.display());
+ }
+ }
+ Component::Normal(_) | Component::RootDir | Component::Prefix(_) => {
+ components.push(component);
+ }
+ }
+ }
+ components.into_iter().collect::<PathBuf>()
+ } else {
+ path
+ }
+ }
+}
+
+pub(crate) fn to_file_specifier(path: &Path) -> Url {
+ match Url::from_file_path(path) {
+ Ok(url) => url,
+ Err(_) => panic!("Invalid path: {}", path.display()),
+ }
+}
+
+// todo(dsherret): we have the below code also in deno_core and it
+// would be good to somehow re-use it in both places (we don't want
+// to create a dependency on deno_core here)
+
+#[cfg(not(windows))]
+#[inline]
+pub fn strip_unc_prefix(path: PathBuf) -> PathBuf {
+ path
+}
+
+/// Strips the unc prefix (ex. \\?\) from Windows paths.
+#[cfg(windows)]
+pub fn strip_unc_prefix(path: PathBuf) -> PathBuf {
+ use std::path::Component;
+ use std::path::Prefix;
+
+ let mut components = path.components();
+ match components.next() {
+ Some(Component::Prefix(prefix)) => {
+ match prefix.kind() {
+ // \\?\device
+ Prefix::Verbatim(device) => {
+ let mut path = PathBuf::new();
+ path.push(format!(r"\\{}\", device.to_string_lossy()));
+ path.extend(components.filter(|c| !matches!(c, Component::RootDir)));
+ path
+ }
+ // \\?\c:\path
+ Prefix::VerbatimDisk(_) => {
+ let mut path = PathBuf::new();
+ path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", ""));
+ path.extend(components);
+ path
+ }
+ // \\?\UNC\hostname\share_name\path
+ Prefix::VerbatimUNC(hostname, share_name) => {
+ let mut path = PathBuf::new();
+ path.push(format!(
+ r"\\{}\{}\",
+ hostname.to_string_lossy(),
+ share_name.to_string_lossy()
+ ));
+ path.extend(components.filter(|c| !matches!(c, Component::RootDir)));
+ path
+ }
+ _ => path,
+ }
+ }
+ _ => path,
+ }
+}
+
+#[cfg(test)]
+mod test {
+ #[cfg(windows)]
+ #[test]
+ fn test_path_clean() {
+ use super::*;
+
+ run_test("C:\\test\\./file.txt", "C:\\test\\file.txt");
+ run_test("C:\\test\\../other/file.txt", "C:\\other\\file.txt");
+ run_test("C:\\test\\../other\\file.txt", "C:\\other\\file.txt");
+
+ fn run_test(input: &str, expected: &str) {
+ assert_eq!(PathBuf::from(input).clean(), PathBuf::from(expected));
+ }
+ }
+
+ #[cfg(windows)]
+ #[test]
+ fn test_strip_unc_prefix() {
+ use std::path::PathBuf;
+
+ run_test(r"C:\", r"C:\");
+ run_test(r"C:\test\file.txt", r"C:\test\file.txt");
+
+ run_test(r"\\?\C:\", r"C:\");
+ run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt");
+
+ run_test(r"\\.\C:\", r"\\.\C:\");
+ run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt");
+
+ run_test(r"\\?\UNC\localhost\", r"\\localhost");
+ run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$");
+ run_test(
+ r"\\?\UNC\localhost\c$\Windows\file.txt",
+ r"\\localhost\c$\Windows\file.txt",
+ );
+ run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json");
+
+ run_test(r"\\?\server1", r"\\server1");
+ run_test(r"\\?\server1\e$\", r"\\server1\e$\");
+ run_test(
+ r"\\?\server1\e$\test\file.txt",
+ r"\\server1\e$\test\file.txt",
+ );
+
+ fn run_test(input: &str, expected: &str) {
+ assert_eq!(
+ super::strip_unc_prefix(PathBuf::from(input)),
+ PathBuf::from(expected)
+ );
+ }
+ }
+}
diff --git a/resolvers/node/resolution.rs b/resolvers/node/resolution.rs
new file mode 100644
index 000000000..ad9dbb710
--- /dev/null
+++ b/resolvers/node/resolution.rs
@@ -0,0 +1,2023 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::borrow::Cow;
+use std::path::Path;
+use std::path::PathBuf;
+
+use anyhow::bail;
+use anyhow::Error as AnyError;
+use deno_media_type::MediaType;
+use deno_package_json::PackageJsonRc;
+use serde_json::Map;
+use serde_json::Value;
+use url::Url;
+
+use crate::env::NodeResolverEnv;
+use crate::errors;
+use crate::errors::CanonicalizingPkgJsonDirError;
+use crate::errors::ClosestPkgJsonError;
+use crate::errors::DataUrlReferrerError;
+use crate::errors::FinalizeResolutionError;
+use crate::errors::InvalidModuleSpecifierError;
+use crate::errors::InvalidPackageTargetError;
+use crate::errors::LegacyResolveError;
+use crate::errors::ModuleNotFoundError;
+use crate::errors::NodeJsErrorCode;
+use crate::errors::NodeJsErrorCoded;
+use crate::errors::NodeResolveError;
+use crate::errors::NodeResolveRelativeJoinError;
+use crate::errors::PackageExportsResolveError;
+use crate::errors::PackageImportNotDefinedError;
+use crate::errors::PackageImportsResolveError;
+use crate::errors::PackageImportsResolveErrorKind;
+use crate::errors::PackageJsonLoadError;
+use crate::errors::PackagePathNotExportedError;
+use crate::errors::PackageResolveError;
+use crate::errors::PackageSubpathResolveError;
+use crate::errors::PackageSubpathResolveErrorKind;
+use crate::errors::PackageTargetNotFoundError;
+use crate::errors::PackageTargetResolveError;
+use crate::errors::PackageTargetResolveErrorKind;
+use crate::errors::ResolveBinaryCommandsError;
+use crate::errors::ResolvePkgJsonBinExportError;
+use crate::errors::ResolvePkgSubpathFromDenoModuleError;
+use crate::errors::TypeScriptNotSupportedInNpmError;
+use crate::errors::TypesNotFoundError;
+use crate::errors::TypesNotFoundErrorData;
+use crate::errors::UnsupportedDirImportError;
+use crate::errors::UnsupportedEsmUrlSchemeError;
+use crate::errors::UrlToNodeResolutionError;
+use crate::path::strip_unc_prefix;
+use crate::path::to_file_specifier;
+use crate::NpmResolverRc;
+use crate::PathClean;
+use deno_package_json::PackageJson;
+
+pub static DEFAULT_CONDITIONS: &[&str] = &["deno", "node", "import"];
+pub static REQUIRE_CONDITIONS: &[&str] = &["require", "node"];
+static TYPES_ONLY_CONDITIONS: &[&str] = &["types"];
+
+pub type NodeModuleKind = deno_package_json::NodeModuleKind;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum NodeResolutionMode {
+ Execution,
+ Types,
+}
+
+impl NodeResolutionMode {
+ pub fn is_types(&self) -> bool {
+ matches!(self, NodeResolutionMode::Types)
+ }
+}
+
+#[derive(Debug)]
+pub enum NodeResolution {
+ Esm(Url),
+ CommonJs(Url),
+ BuiltIn(String),
+}
+
+impl NodeResolution {
+ pub fn into_url(self) -> Url {
+ match self {
+ Self::Esm(u) => u,
+ Self::CommonJs(u) => u,
+ Self::BuiltIn(specifier) => {
+ if specifier.starts_with("node:") {
+ Url::parse(&specifier).unwrap()
+ } else {
+ Url::parse(&format!("node:{specifier}")).unwrap()
+ }
+ }
+ }
+ }
+
+ pub fn into_specifier_and_media_type(
+ resolution: Option<Self>,
+ ) -> (Url, MediaType) {
+ match resolution {
+ Some(NodeResolution::CommonJs(specifier)) => {
+ let media_type = MediaType::from_specifier(&specifier);
+ (
+ specifier,
+ match media_type {
+ MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs,
+ MediaType::TypeScript | MediaType::Tsx => MediaType::Cts,
+ MediaType::Dts => MediaType::Dcts,
+ _ => media_type,
+ },
+ )
+ }
+ Some(NodeResolution::Esm(specifier)) => {
+ let media_type = MediaType::from_specifier(&specifier);
+ (
+ specifier,
+ match media_type {
+ MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs,
+ MediaType::TypeScript | MediaType::Tsx => MediaType::Mts,
+ MediaType::Dts => MediaType::Dmts,
+ _ => media_type,
+ },
+ )
+ }
+ Some(resolution) => (resolution.into_url(), MediaType::Dts),
+ None => (
+ Url::parse("internal:///missing_dependency.d.ts").unwrap(),
+ MediaType::Dts,
+ ),
+ }
+ }
+}
+
+#[allow(clippy::disallowed_types)]
+pub type NodeResolverRc<TEnv> = crate::sync::MaybeArc<NodeResolver<TEnv>>;
+
+#[derive(Debug)]
+pub struct NodeResolver<TEnv: NodeResolverEnv> {
+ env: TEnv,
+ npm_resolver: NpmResolverRc,
+}
+
+impl<TEnv: NodeResolverEnv> NodeResolver<TEnv> {
+ pub fn new(env: TEnv, npm_resolver: NpmResolverRc) -> Self {
+ Self { env, npm_resolver }
+ }
+
+ pub fn in_npm_package(&self, specifier: &Url) -> bool {
+ self.npm_resolver.in_npm_package(specifier)
+ }
+
+ /// This function is an implementation of `defaultResolve` in
+ /// `lib/internal/modules/esm/resolve.js` from Node.
+ pub fn resolve(
+ &self,
+ specifier: &str,
+ referrer: &Url,
+ referrer_kind: NodeModuleKind,
+ mode: NodeResolutionMode,
+ ) -> Result<NodeResolution, NodeResolveError> {
+ // Note: if we are here, then the referrer is an esm module
+ // TODO(bartlomieju): skipped "policy" part as we don't plan to support it
+
+ if self.env.is_builtin_node_module(specifier) {
+ return Ok(NodeResolution::BuiltIn(specifier.to_string()));
+ }
+
+ if let Ok(url) = Url::parse(specifier) {
+ if url.scheme() == "data" {
+ return Ok(NodeResolution::Esm(url));
+ }
+
+ if let Some(module_name) =
+ get_module_name_from_builtin_node_module_specifier(&url)
+ {
+ return Ok(NodeResolution::BuiltIn(module_name.to_string()));
+ }
+
+ let protocol = url.scheme();
+
+ if protocol != "file" && protocol != "data" {
+ return Err(
+ UnsupportedEsmUrlSchemeError {
+ url_scheme: protocol.to_string(),
+ }
+ .into(),
+ );
+ }
+
+ // todo(dsherret): this seems wrong
+ if referrer.scheme() == "data" {
+ let url = referrer
+ .join(specifier)
+ .map_err(|source| DataUrlReferrerError { source })?;
+ return Ok(NodeResolution::Esm(url));
+ }
+ }
+
+ let url = self.module_resolve(
+ specifier,
+ referrer,
+ referrer_kind,
+ // even though the referrer may be CJS, if we're here that means we're doing ESM resolution
+ DEFAULT_CONDITIONS,
+ mode,
+ )?;
+
+ let url = if mode.is_types() {
+ let file_path = to_file_path(&url);
+ self.path_to_declaration_url(&file_path, Some(referrer), referrer_kind)?
+ } else {
+ url
+ };
+
+ let url = self.finalize_resolution(url, Some(referrer))?;
+ let resolve_response = self.url_to_node_resolution(url)?;
+ // TODO(bartlomieju): skipped checking errors for commonJS resolution and
+ // "preserveSymlinksMain"/"preserveSymlinks" options.
+ Ok(resolve_response)
+ }
+
+ fn module_resolve(
+ &self,
+ specifier: &str,
+ referrer: &Url,
+ referrer_kind: NodeModuleKind,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, NodeResolveError> {
+ if should_be_treated_as_relative_or_absolute_path(specifier) {
+ Ok(referrer.join(specifier).map_err(|err| {
+ NodeResolveRelativeJoinError {
+ path: specifier.to_string(),
+ base: referrer.clone(),
+ source: err,
+ }
+ })?)
+ } else if specifier.starts_with('#') {
+ let pkg_config = self
+ .get_closest_package_json(referrer)
+ .map_err(PackageImportsResolveErrorKind::ClosestPkgJson)
+ .map_err(|err| PackageImportsResolveError(Box::new(err)))?;
+ Ok(self.package_imports_resolve(
+ specifier,
+ Some(referrer),
+ referrer_kind,
+ pkg_config.as_deref(),
+ conditions,
+ mode,
+ )?)
+ } else if let Ok(resolved) = Url::parse(specifier) {
+ Ok(resolved)
+ } else {
+ Ok(self.package_resolve(
+ specifier,
+ referrer,
+ referrer_kind,
+ conditions,
+ mode,
+ )?)
+ }
+ }
+
+ fn finalize_resolution(
+ &self,
+ resolved: Url,
+ maybe_referrer: Option<&Url>,
+ ) -> Result<Url, FinalizeResolutionError> {
+ let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C");
+
+ if encoded_sep_re.is_match(resolved.path()) {
+ return Err(
+ errors::InvalidModuleSpecifierError {
+ request: resolved.to_string(),
+ reason: Cow::Borrowed(
+ "must not include encoded \"/\" or \"\\\\\" characters",
+ ),
+ maybe_referrer: maybe_referrer.map(to_file_path_string),
+ }
+ .into(),
+ );
+ }
+
+ if resolved.scheme() == "node" {
+ return Ok(resolved);
+ }
+
+ let path = to_file_path(&resolved);
+
+ // TODO(bartlomieju): currently not supported
+ // if (getOptionValue('--experimental-specifier-resolution') === 'node') {
+ // ...
+ // }
+
+ let p_str = path.to_str().unwrap();
+ let p = if p_str.ends_with('/') {
+ p_str[p_str.len() - 1..].to_string()
+ } else {
+ p_str.to_string()
+ };
+
+ let (is_dir, is_file) = if let Ok(stats) = self.env.stat_sync(Path::new(&p))
+ {
+ (stats.is_dir, stats.is_file)
+ } else {
+ (false, false)
+ };
+ if is_dir {
+ return Err(
+ UnsupportedDirImportError {
+ dir_url: resolved.clone(),
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ }
+ .into(),
+ );
+ } else if !is_file {
+ return Err(
+ ModuleNotFoundError {
+ specifier: resolved,
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ typ: "module",
+ }
+ .into(),
+ );
+ }
+
+ Ok(resolved)
+ }
+
+ pub fn resolve_package_subpath_from_deno_module(
+ &self,
+ package_dir: &Path,
+ package_subpath: Option<&str>,
+ maybe_referrer: Option<&Url>,
+ mode: NodeResolutionMode,
+ ) -> Result<NodeResolution, ResolvePkgSubpathFromDenoModuleError> {
+ let node_module_kind = NodeModuleKind::Esm;
+ let package_subpath = package_subpath
+ .map(|s| format!("./{s}"))
+ .unwrap_or_else(|| ".".to_string());
+ let resolved_url = self.resolve_package_dir_subpath(
+ package_dir,
+ &package_subpath,
+ maybe_referrer,
+ node_module_kind,
+ DEFAULT_CONDITIONS,
+ mode,
+ )?;
+ let resolve_response = self.url_to_node_resolution(resolved_url)?;
+ // TODO(bartlomieju): skipped checking errors for commonJS resolution and
+ // "preserveSymlinksMain"/"preserveSymlinks" options.
+ Ok(resolve_response)
+ }
+
+ pub fn resolve_binary_commands(
+ &self,
+ package_folder: &Path,
+ ) -> Result<Vec<String>, ResolveBinaryCommandsError> {
+ let pkg_json_path = package_folder.join("package.json");
+ let Some(package_json) = self.load_package_json(&pkg_json_path)? else {
+ return Ok(Vec::new());
+ };
+
+ Ok(match &package_json.bin {
+ Some(Value::String(_)) => {
+ let Some(name) = &package_json.name else {
+ return Err(ResolveBinaryCommandsError::MissingPkgJsonName {
+ pkg_json_path,
+ });
+ };
+ let name = name.split("/").last().unwrap();
+ vec![name.to_string()]
+ }
+ Some(Value::Object(o)) => {
+ o.iter().map(|(key, _)| key.clone()).collect::<Vec<_>>()
+ }
+ _ => Vec::new(),
+ })
+ }
+
+ pub fn resolve_binary_export(
+ &self,
+ package_folder: &Path,
+ sub_path: Option<&str>,
+ ) -> Result<NodeResolution, ResolvePkgJsonBinExportError> {
+ let pkg_json_path = package_folder.join("package.json");
+ let Some(package_json) = self.load_package_json(&pkg_json_path)? else {
+ return Err(ResolvePkgJsonBinExportError::MissingPkgJson {
+ pkg_json_path,
+ });
+ };
+ let bin_entry =
+ resolve_bin_entry_value(&package_json, sub_path).map_err(|err| {
+ ResolvePkgJsonBinExportError::InvalidBinProperty {
+ message: err.to_string(),
+ }
+ })?;
+ let url = to_file_specifier(&package_folder.join(bin_entry));
+
+ let resolve_response = self.url_to_node_resolution(url)?;
+ // TODO(bartlomieju): skipped checking errors for commonJS resolution and
+ // "preserveSymlinksMain"/"preserveSymlinks" options.
+ Ok(resolve_response)
+ }
+
+ pub fn url_to_node_resolution(
+ &self,
+ url: Url,
+ ) -> Result<NodeResolution, UrlToNodeResolutionError> {
+ let url_str = url.as_str().to_lowercase();
+ if url_str.starts_with("http") || url_str.ends_with(".json") {
+ Ok(NodeResolution::Esm(url))
+ } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") {
+ let maybe_package_config = self.get_closest_package_json(&url)?;
+ match maybe_package_config {
+ Some(c) if c.typ == "module" => Ok(NodeResolution::Esm(url)),
+ Some(_) => Ok(NodeResolution::CommonJs(url)),
+ None => Ok(NodeResolution::Esm(url)),
+ }
+ } else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") {
+ Ok(NodeResolution::Esm(url))
+ } else if url_str.ends_with(".ts") || url_str.ends_with(".mts") {
+ if self.in_npm_package(&url) {
+ Err(TypeScriptNotSupportedInNpmError { specifier: url }.into())
+ } else {
+ Ok(NodeResolution::Esm(url))
+ }
+ } else {
+ Ok(NodeResolution::CommonJs(url))
+ }
+ }
+
+ /// Checks if the resolved file has a corresponding declaration file.
+ fn path_to_declaration_url(
+ &self,
+ path: &Path,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ ) -> Result<Url, TypesNotFoundError> {
+ fn probe_extensions<TEnv: NodeResolverEnv>(
+ fs: &TEnv,
+ path: &Path,
+ lowercase_path: &str,
+ referrer_kind: NodeModuleKind,
+ ) -> Option<PathBuf> {
+ let mut searched_for_d_mts = false;
+ let mut searched_for_d_cts = false;
+ if lowercase_path.ends_with(".mjs") {
+ let d_mts_path = with_known_extension(path, "d.mts");
+ if fs.exists_sync(&d_mts_path) {
+ return Some(d_mts_path);
+ }
+ searched_for_d_mts = true;
+ } else if lowercase_path.ends_with(".cjs") {
+ let d_cts_path = with_known_extension(path, "d.cts");
+ if fs.exists_sync(&d_cts_path) {
+ return Some(d_cts_path);
+ }
+ searched_for_d_cts = true;
+ }
+
+ let dts_path = with_known_extension(path, "d.ts");
+ if fs.exists_sync(&dts_path) {
+ return Some(dts_path);
+ }
+
+ let specific_dts_path = match referrer_kind {
+ NodeModuleKind::Cjs if !searched_for_d_cts => {
+ Some(with_known_extension(path, "d.cts"))
+ }
+ NodeModuleKind::Esm if !searched_for_d_mts => {
+ Some(with_known_extension(path, "d.mts"))
+ }
+ _ => None, // already searched above
+ };
+ if let Some(specific_dts_path) = specific_dts_path {
+ if fs.exists_sync(&specific_dts_path) {
+ return Some(specific_dts_path);
+ }
+ }
+ None
+ }
+
+ let lowercase_path = path.to_string_lossy().to_lowercase();
+ if lowercase_path.ends_with(".d.ts")
+ || lowercase_path.ends_with(".d.cts")
+ || lowercase_path.ends_with(".d.mts")
+ {
+ return Ok(to_file_specifier(path));
+ }
+ if let Some(path) =
+ probe_extensions(&self.env, path, &lowercase_path, referrer_kind)
+ {
+ return Ok(to_file_specifier(&path));
+ }
+ if self.env.is_dir_sync(path) {
+ let resolution_result = self.resolve_package_dir_subpath(
+ path,
+ /* sub path */ ".",
+ maybe_referrer,
+ referrer_kind,
+ match referrer_kind {
+ NodeModuleKind::Esm => DEFAULT_CONDITIONS,
+ NodeModuleKind::Cjs => REQUIRE_CONDITIONS,
+ },
+ NodeResolutionMode::Types,
+ );
+ if let Ok(resolution) = resolution_result {
+ return Ok(resolution);
+ }
+ let index_path = path.join("index.js");
+ if let Some(path) = probe_extensions(
+ &self.env,
+ &index_path,
+ &index_path.to_string_lossy().to_lowercase(),
+ referrer_kind,
+ ) {
+ return Ok(to_file_specifier(&path));
+ }
+ }
+ // allow resolving .css files for types resolution
+ if lowercase_path.ends_with(".css") {
+ return Ok(to_file_specifier(path));
+ }
+ Err(TypesNotFoundError(Box::new(TypesNotFoundErrorData {
+ code_specifier: to_file_specifier(path),
+ maybe_referrer: maybe_referrer.cloned(),
+ })))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ pub fn package_imports_resolve(
+ &self,
+ name: &str,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ referrer_pkg_json: Option<&PackageJson>,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, PackageImportsResolveError> {
+ if name == "#" || name.starts_with("#/") || name.ends_with('/') {
+ let reason = "is not a valid internal imports specifier name";
+ return Err(
+ errors::InvalidModuleSpecifierError {
+ request: name.to_string(),
+ reason: Cow::Borrowed(reason),
+ maybe_referrer: maybe_referrer.map(to_specifier_display_string),
+ }
+ .into(),
+ );
+ }
+
+ let mut package_json_path = None;
+ if let Some(pkg_json) = &referrer_pkg_json {
+ package_json_path = Some(pkg_json.path.clone());
+ if let Some(imports) = &pkg_json.imports {
+ if imports.contains_key(name) && !name.contains('*') {
+ let target = imports.get(name).unwrap();
+ let maybe_resolved = self.resolve_package_target(
+ package_json_path.as_ref().unwrap(),
+ target,
+ "",
+ name,
+ maybe_referrer,
+ referrer_kind,
+ false,
+ true,
+ conditions,
+ mode,
+ )?;
+ if let Some(resolved) = maybe_resolved {
+ return Ok(resolved);
+ }
+ } else {
+ let mut best_match = "";
+ let mut best_match_subpath = None;
+ for key in imports.keys() {
+ let pattern_index = key.find('*');
+ if let Some(pattern_index) = pattern_index {
+ let key_sub = &key[0..pattern_index];
+ if name.starts_with(key_sub) {
+ let pattern_trailer = &key[pattern_index + 1..];
+ if name.len() > key.len()
+ && name.ends_with(&pattern_trailer)
+ && pattern_key_compare(best_match, key) == 1
+ && key.rfind('*') == Some(pattern_index)
+ {
+ best_match = key;
+ best_match_subpath = Some(
+ &name[pattern_index..(name.len() - pattern_trailer.len())],
+ );
+ }
+ }
+ }
+ }
+
+ if !best_match.is_empty() {
+ let target = imports.get(best_match).unwrap();
+ let maybe_resolved = self.resolve_package_target(
+ package_json_path.as_ref().unwrap(),
+ target,
+ best_match_subpath.unwrap(),
+ best_match,
+ maybe_referrer,
+ referrer_kind,
+ true,
+ true,
+ conditions,
+ mode,
+ )?;
+ if let Some(resolved) = maybe_resolved {
+ return Ok(resolved);
+ }
+ }
+ }
+ }
+ }
+
+ Err(
+ PackageImportNotDefinedError {
+ name: name.to_string(),
+ package_json_path,
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ }
+ .into(),
+ )
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn resolve_package_target_string(
+ &self,
+ target: &str,
+ subpath: &str,
+ match_: &str,
+ package_json_path: &Path,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ pattern: bool,
+ internal: bool,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, PackageTargetResolveError> {
+ if !subpath.is_empty() && !pattern && !target.ends_with('/') {
+ return Err(
+ InvalidPackageTargetError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ sub_path: match_.to_string(),
+ target: target.to_string(),
+ is_import: internal,
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ }
+ .into(),
+ );
+ }
+ let invalid_segment_re =
+ lazy_regex::regex!(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)");
+ let pattern_re = lazy_regex::regex!(r"\*");
+ if !target.starts_with("./") {
+ if internal && !target.starts_with("../") && !target.starts_with('/') {
+ let target_url = Url::parse(target);
+ match target_url {
+ Ok(url) => {
+ if get_module_name_from_builtin_node_module_specifier(&url)
+ .is_some()
+ {
+ return Ok(url);
+ }
+ }
+ Err(_) => {
+ let export_target = if pattern {
+ pattern_re
+ .replace(target, |_caps: &regex::Captures| subpath)
+ .to_string()
+ } else {
+ format!("{target}{subpath}")
+ };
+ let package_json_url = to_file_specifier(package_json_path);
+ let result = match self.package_resolve(
+ &export_target,
+ &package_json_url,
+ referrer_kind,
+ conditions,
+ mode,
+ ) {
+ Ok(url) => Ok(url),
+ Err(err) => match err.code() {
+ NodeJsErrorCode::ERR_INVALID_MODULE_SPECIFIER
+ | NodeJsErrorCode::ERR_INVALID_PACKAGE_CONFIG
+ | NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET
+ | NodeJsErrorCode::ERR_PACKAGE_IMPORT_NOT_DEFINED
+ | NodeJsErrorCode::ERR_PACKAGE_PATH_NOT_EXPORTED
+ | NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION
+ | NodeJsErrorCode::ERR_UNSUPPORTED_DIR_IMPORT
+ | NodeJsErrorCode::ERR_UNSUPPORTED_ESM_URL_SCHEME
+ | NodeJsErrorCode::ERR_TYPES_NOT_FOUND => {
+ Err(PackageTargetResolveErrorKind::PackageResolve(err).into())
+ }
+ NodeJsErrorCode::ERR_MODULE_NOT_FOUND => Err(
+ PackageTargetResolveErrorKind::NotFound(
+ PackageTargetNotFoundError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ target: export_target.to_string(),
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ referrer_kind,
+ mode,
+ },
+ )
+ .into(),
+ ),
+ },
+ };
+
+ return match result {
+ Ok(url) => Ok(url),
+ Err(err) => {
+ if self.env.is_builtin_node_module(target) {
+ Ok(Url::parse(&format!("node:{}", target)).unwrap())
+ } else {
+ Err(err)
+ }
+ }
+ };
+ }
+ }
+ }
+ return Err(
+ InvalidPackageTargetError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ sub_path: match_.to_string(),
+ target: target.to_string(),
+ is_import: internal,
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ }
+ .into(),
+ );
+ }
+ if invalid_segment_re.is_match(&target[2..]) {
+ return Err(
+ InvalidPackageTargetError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ sub_path: match_.to_string(),
+ target: target.to_string(),
+ is_import: internal,
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ }
+ .into(),
+ );
+ }
+ let package_path = package_json_path.parent().unwrap();
+ let resolved_path = package_path.join(target).clean();
+ if !resolved_path.starts_with(package_path) {
+ return Err(
+ InvalidPackageTargetError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ sub_path: match_.to_string(),
+ target: target.to_string(),
+ is_import: internal,
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ }
+ .into(),
+ );
+ }
+ if subpath.is_empty() {
+ return Ok(to_file_specifier(&resolved_path));
+ }
+ if invalid_segment_re.is_match(subpath) {
+ let request = if pattern {
+ match_.replace('*', subpath)
+ } else {
+ format!("{match_}{subpath}")
+ };
+ return Err(
+ throw_invalid_subpath(
+ request,
+ package_json_path,
+ internal,
+ maybe_referrer,
+ )
+ .into(),
+ );
+ }
+ if pattern {
+ let resolved_path_str = resolved_path.to_string_lossy();
+ let replaced = pattern_re
+ .replace(&resolved_path_str, |_caps: &regex::Captures| subpath);
+ return Ok(to_file_specifier(&PathBuf::from(replaced.to_string())));
+ }
+ Ok(to_file_specifier(&resolved_path.join(subpath).clean()))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn resolve_package_target(
+ &self,
+ package_json_path: &Path,
+ target: &Value,
+ subpath: &str,
+ package_subpath: &str,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ pattern: bool,
+ internal: bool,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Option<Url>, PackageTargetResolveError> {
+ let result = self.resolve_package_target_inner(
+ package_json_path,
+ target,
+ subpath,
+ package_subpath,
+ maybe_referrer,
+ referrer_kind,
+ pattern,
+ internal,
+ conditions,
+ mode,
+ );
+ match result {
+ Ok(maybe_resolved) => Ok(maybe_resolved),
+ Err(err) => {
+ if mode.is_types()
+ && err.code() == NodeJsErrorCode::ERR_TYPES_NOT_FOUND
+ && conditions != TYPES_ONLY_CONDITIONS
+ {
+ // try resolving with just "types" conditions for when someone misconfigures
+ // and puts the "types" condition in the wrong place
+ if let Ok(Some(resolved)) = self.resolve_package_target_inner(
+ package_json_path,
+ target,
+ subpath,
+ package_subpath,
+ maybe_referrer,
+ referrer_kind,
+ pattern,
+ internal,
+ TYPES_ONLY_CONDITIONS,
+ mode,
+ ) {
+ return Ok(Some(resolved));
+ }
+ }
+
+ Err(err)
+ }
+ }
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn resolve_package_target_inner(
+ &self,
+ package_json_path: &Path,
+ target: &Value,
+ subpath: &str,
+ package_subpath: &str,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ pattern: bool,
+ internal: bool,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Option<Url>, PackageTargetResolveError> {
+ if let Some(target) = target.as_str() {
+ let url = self.resolve_package_target_string(
+ target,
+ subpath,
+ package_subpath,
+ package_json_path,
+ maybe_referrer,
+ referrer_kind,
+ pattern,
+ internal,
+ conditions,
+ mode,
+ )?;
+ if mode.is_types() && url.scheme() == "file" {
+ let path = url.to_file_path().unwrap();
+ return Ok(Some(self.path_to_declaration_url(
+ &path,
+ maybe_referrer,
+ referrer_kind,
+ )?));
+ } else {
+ return Ok(Some(url));
+ }
+ } else if let Some(target_arr) = target.as_array() {
+ if target_arr.is_empty() {
+ return Ok(None);
+ }
+
+ let mut last_error = None;
+ for target_item in target_arr {
+ let resolved_result = self.resolve_package_target(
+ package_json_path,
+ target_item,
+ subpath,
+ package_subpath,
+ maybe_referrer,
+ referrer_kind,
+ pattern,
+ internal,
+ conditions,
+ mode,
+ );
+
+ match resolved_result {
+ Ok(Some(resolved)) => return Ok(Some(resolved)),
+ Ok(None) => {
+ last_error = None;
+ continue;
+ }
+ Err(e) => {
+ if e.code() == NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET {
+ last_error = Some(e);
+ continue;
+ } else {
+ return Err(e);
+ }
+ }
+ }
+ }
+ if last_error.is_none() {
+ return Ok(None);
+ }
+ return Err(last_error.unwrap());
+ } else if let Some(target_obj) = target.as_object() {
+ for key in target_obj.keys() {
+ // TODO(bartlomieju): verify that keys are not numeric
+ // return Err(errors::err_invalid_package_config(
+ // to_file_path_string(package_json_url),
+ // Some(base.as_str().to_string()),
+ // Some("\"exports\" cannot contain numeric property keys.".to_string()),
+ // ));
+
+ if key == "default"
+ || conditions.contains(&key.as_str())
+ || mode.is_types() && key.as_str() == "types"
+ {
+ let condition_target = target_obj.get(key).unwrap();
+
+ let resolved = self.resolve_package_target(
+ package_json_path,
+ condition_target,
+ subpath,
+ package_subpath,
+ maybe_referrer,
+ referrer_kind,
+ pattern,
+ internal,
+ conditions,
+ mode,
+ )?;
+ match resolved {
+ Some(resolved) => return Ok(Some(resolved)),
+ None => {
+ continue;
+ }
+ }
+ }
+ }
+ } else if target.is_null() {
+ return Ok(None);
+ }
+
+ Err(
+ InvalidPackageTargetError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ sub_path: package_subpath.to_string(),
+ target: target.to_string(),
+ is_import: internal,
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ }
+ .into(),
+ )
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ pub fn package_exports_resolve(
+ &self,
+ package_json_path: &Path,
+ package_subpath: &str,
+ package_exports: &Map<String, Value>,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, PackageExportsResolveError> {
+ if package_exports.contains_key(package_subpath)
+ && package_subpath.find('*').is_none()
+ && !package_subpath.ends_with('/')
+ {
+ let target = package_exports.get(package_subpath).unwrap();
+ let resolved = self.resolve_package_target(
+ package_json_path,
+ target,
+ "",
+ package_subpath,
+ maybe_referrer,
+ referrer_kind,
+ false,
+ false,
+ conditions,
+ mode,
+ )?;
+ return match resolved {
+ Some(resolved) => Ok(resolved),
+ None => Err(
+ PackagePathNotExportedError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ subpath: package_subpath.to_string(),
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ mode,
+ }
+ .into(),
+ ),
+ };
+ }
+
+ let mut best_match = "";
+ let mut best_match_subpath = None;
+ for key in package_exports.keys() {
+ let pattern_index = key.find('*');
+ if let Some(pattern_index) = pattern_index {
+ let key_sub = &key[0..pattern_index];
+ if package_subpath.starts_with(key_sub) {
+ // When this reaches EOL, this can throw at the top of the whole function:
+ //
+ // if (StringPrototypeEndsWith(packageSubpath, '/'))
+ // throwInvalidSubpath(packageSubpath)
+ //
+ // To match "imports" and the spec.
+ if package_subpath.ends_with('/') {
+ // TODO(bartlomieju):
+ // emitTrailingSlashPatternDeprecation();
+ }
+ let pattern_trailer = &key[pattern_index + 1..];
+ if package_subpath.len() >= key.len()
+ && package_subpath.ends_with(&pattern_trailer)
+ && pattern_key_compare(best_match, key) == 1
+ && key.rfind('*') == Some(pattern_index)
+ {
+ best_match = key;
+ best_match_subpath = Some(
+ package_subpath[pattern_index
+ ..(package_subpath.len() - pattern_trailer.len())]
+ .to_string(),
+ );
+ }
+ }
+ }
+ }
+
+ if !best_match.is_empty() {
+ let target = package_exports.get(best_match).unwrap();
+ let maybe_resolved = self.resolve_package_target(
+ package_json_path,
+ target,
+ &best_match_subpath.unwrap(),
+ best_match,
+ maybe_referrer,
+ referrer_kind,
+ true,
+ false,
+ conditions,
+ mode,
+ )?;
+ if let Some(resolved) = maybe_resolved {
+ return Ok(resolved);
+ } else {
+ return Err(
+ PackagePathNotExportedError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ subpath: package_subpath.to_string(),
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ mode,
+ }
+ .into(),
+ );
+ }
+ }
+
+ Err(
+ PackagePathNotExportedError {
+ pkg_json_path: package_json_path.to_path_buf(),
+ subpath: package_subpath.to_string(),
+ maybe_referrer: maybe_referrer.map(ToOwned::to_owned),
+ mode,
+ }
+ .into(),
+ )
+ }
+
+ pub(super) fn package_resolve(
+ &self,
+ specifier: &str,
+ referrer: &Url,
+ referrer_kind: NodeModuleKind,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, PackageResolveError> {
+ let (package_name, package_subpath, _is_scoped) =
+ parse_npm_pkg_name(specifier, referrer)?;
+
+ if let Some(package_config) = self.get_closest_package_json(referrer)? {
+ // ResolveSelf
+ if package_config.name.as_ref() == Some(&package_name) {
+ if let Some(exports) = &package_config.exports {
+ return self
+ .package_exports_resolve(
+ &package_config.path,
+ &package_subpath,
+ exports,
+ Some(referrer),
+ referrer_kind,
+ conditions,
+ mode,
+ )
+ .map_err(|err| err.into());
+ }
+ }
+ }
+
+ self.resolve_package_subpath_for_package(
+ &package_name,
+ &package_subpath,
+ referrer,
+ referrer_kind,
+ conditions,
+ mode,
+ )
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn resolve_package_subpath_for_package(
+ &self,
+ package_name: &str,
+ package_subpath: &str,
+ referrer: &Url,
+ referrer_kind: NodeModuleKind,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, PackageResolveError> {
+ let result = self.resolve_package_subpath_for_package_inner(
+ package_name,
+ package_subpath,
+ referrer,
+ referrer_kind,
+ conditions,
+ mode,
+ );
+ if mode.is_types() && !matches!(result, Ok(Url { .. })) {
+ // try to resolve with the @types package
+ let package_name = types_package_name(package_name);
+ if let Ok(result) = self.resolve_package_subpath_for_package_inner(
+ &package_name,
+ package_subpath,
+ referrer,
+ referrer_kind,
+ conditions,
+ mode,
+ ) {
+ return Ok(result);
+ }
+ }
+ result
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn resolve_package_subpath_for_package_inner(
+ &self,
+ package_name: &str,
+ package_subpath: &str,
+ referrer: &Url,
+ referrer_kind: NodeModuleKind,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, PackageResolveError> {
+ let package_dir_path = self
+ .npm_resolver
+ .resolve_package_folder_from_package(package_name, referrer)?;
+
+ // todo: error with this instead when can't find package
+ // Err(errors::err_module_not_found(
+ // &package_json_url
+ // .join(".")
+ // .unwrap()
+ // .to_file_path()
+ // .unwrap()
+ // .display()
+ // .to_string(),
+ // &to_file_path_string(referrer),
+ // "package",
+ // ))
+
+ // Package match.
+ self
+ .resolve_package_dir_subpath(
+ &package_dir_path,
+ package_subpath,
+ Some(referrer),
+ referrer_kind,
+ conditions,
+ mode,
+ )
+ .map_err(|err| err.into())
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn resolve_package_dir_subpath(
+ &self,
+ package_dir_path: &Path,
+ package_subpath: &str,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, PackageSubpathResolveError> {
+ let package_json_path = package_dir_path.join("package.json");
+ match self.load_package_json(&package_json_path)? {
+ Some(pkg_json) => self.resolve_package_subpath(
+ &pkg_json,
+ package_subpath,
+ maybe_referrer,
+ referrer_kind,
+ conditions,
+ mode,
+ ),
+ None => self
+ .resolve_package_subpath_no_pkg_json(
+ package_dir_path,
+ package_subpath,
+ maybe_referrer,
+ referrer_kind,
+ mode,
+ )
+ .map_err(|err| {
+ PackageSubpathResolveErrorKind::LegacyResolve(err).into()
+ }),
+ }
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn resolve_package_subpath(
+ &self,
+ package_json: &PackageJson,
+ package_subpath: &str,
+ referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ conditions: &[&str],
+ mode: NodeResolutionMode,
+ ) -> Result<Url, PackageSubpathResolveError> {
+ if let Some(exports) = &package_json.exports {
+ let result = self.package_exports_resolve(
+ &package_json.path,
+ package_subpath,
+ exports,
+ referrer,
+ referrer_kind,
+ conditions,
+ mode,
+ );
+ match result {
+ Ok(found) => return Ok(found),
+ Err(exports_err) => {
+ if mode.is_types() && package_subpath == "." {
+ return self
+ .legacy_main_resolve(package_json, referrer, referrer_kind, mode)
+ .map_err(|err| {
+ PackageSubpathResolveErrorKind::LegacyResolve(err).into()
+ });
+ }
+ return Err(
+ PackageSubpathResolveErrorKind::Exports(exports_err).into(),
+ );
+ }
+ }
+ }
+
+ if package_subpath == "." {
+ return self
+ .legacy_main_resolve(package_json, referrer, referrer_kind, mode)
+ .map_err(|err| {
+ PackageSubpathResolveErrorKind::LegacyResolve(err).into()
+ });
+ }
+
+ self
+ .resolve_subpath_exact(
+ package_json.path.parent().unwrap(),
+ package_subpath,
+ referrer,
+ referrer_kind,
+ mode,
+ )
+ .map_err(|err| {
+ PackageSubpathResolveErrorKind::LegacyResolve(err.into()).into()
+ })
+ }
+
+ fn resolve_subpath_exact(
+ &self,
+ directory: &Path,
+ package_subpath: &str,
+ referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ mode: NodeResolutionMode,
+ ) -> Result<Url, TypesNotFoundError> {
+ assert_ne!(package_subpath, ".");
+ let file_path = directory.join(package_subpath);
+ if mode.is_types() {
+ Ok(self.path_to_declaration_url(&file_path, referrer, referrer_kind)?)
+ } else {
+ Ok(to_file_specifier(&file_path))
+ }
+ }
+
+ fn resolve_package_subpath_no_pkg_json(
+ &self,
+ directory: &Path,
+ package_subpath: &str,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ mode: NodeResolutionMode,
+ ) -> Result<Url, LegacyResolveError> {
+ if package_subpath == "." {
+ self.legacy_index_resolve(directory, maybe_referrer, referrer_kind, mode)
+ } else {
+ self
+ .resolve_subpath_exact(
+ directory,
+ package_subpath,
+ maybe_referrer,
+ referrer_kind,
+ mode,
+ )
+ .map_err(|err| err.into())
+ }
+ }
+
+ pub fn get_closest_package_json(
+ &self,
+ url: &Url,
+ ) -> Result<Option<PackageJsonRc>, ClosestPkgJsonError> {
+ let Ok(file_path) = url.to_file_path() else {
+ return Ok(None);
+ };
+ self.get_closest_package_json_from_path(&file_path)
+ }
+
+ pub fn get_closest_package_json_from_path(
+ &self,
+ file_path: &Path,
+ ) -> Result<Option<PackageJsonRc>, ClosestPkgJsonError> {
+ // we use this for deno compile using byonm because the script paths
+ // won't be in virtual file system, but the package.json paths will be
+ fn canonicalize_first_ancestor_exists(
+ dir_path: &Path,
+ env: &dyn NodeResolverEnv,
+ ) -> Result<Option<PathBuf>, std::io::Error> {
+ for ancestor in dir_path.ancestors() {
+ match env.realpath_sync(ancestor) {
+ Ok(dir_path) => return Ok(Some(dir_path)),
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
+ // keep searching
+ }
+ Err(err) => return Err(err),
+ }
+ }
+ Ok(None)
+ }
+
+ let parent_dir = file_path.parent().unwrap();
+ let Some(start_dir) = canonicalize_first_ancestor_exists(
+ parent_dir, &self.env,
+ )
+ .map_err(|source| CanonicalizingPkgJsonDirError {
+ dir_path: parent_dir.to_path_buf(),
+ source,
+ })?
+ else {
+ return Ok(None);
+ };
+ let start_dir = strip_unc_prefix(start_dir);
+ for current_dir in start_dir.ancestors() {
+ let package_json_path = current_dir.join("package.json");
+ if let Some(pkg_json) = self.load_package_json(&package_json_path)? {
+ return Ok(Some(pkg_json));
+ }
+ }
+
+ Ok(None)
+ }
+
+ pub fn load_package_json(
+ &self,
+ package_json_path: &Path,
+ ) -> Result<Option<PackageJsonRc>, PackageJsonLoadError> {
+ crate::package_json::load_pkg_json(
+ self.env.pkg_json_fs(),
+ package_json_path,
+ )
+ }
+
+ pub(super) fn legacy_main_resolve(
+ &self,
+ package_json: &PackageJson,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ mode: NodeResolutionMode,
+ ) -> Result<Url, LegacyResolveError> {
+ let maybe_main = if mode.is_types() {
+ match package_json.types.as_ref() {
+ Some(types) => Some(types.as_str()),
+ None => {
+ // fallback to checking the main entrypoint for
+ // a corresponding declaration file
+ if let Some(main) = package_json.main(referrer_kind) {
+ let main = package_json.path.parent().unwrap().join(main).clean();
+ let decl_url_result = self.path_to_declaration_url(
+ &main,
+ maybe_referrer,
+ referrer_kind,
+ );
+ // don't surface errors, fallback to checking the index now
+ if let Ok(url) = decl_url_result {
+ return Ok(url);
+ }
+ }
+ None
+ }
+ }
+ } else {
+ package_json.main(referrer_kind)
+ };
+
+ if let Some(main) = maybe_main {
+ let guess = package_json.path.parent().unwrap().join(main).clean();
+ if self.env.is_file_sync(&guess) {
+ return Ok(to_file_specifier(&guess));
+ }
+
+ // todo(dsherret): investigate exactly how node and typescript handles this
+ let endings = if mode.is_types() {
+ match referrer_kind {
+ NodeModuleKind::Cjs => {
+ vec![".d.ts", ".d.cts", "/index.d.ts", "/index.d.cts"]
+ }
+ NodeModuleKind::Esm => vec![
+ ".d.ts",
+ ".d.mts",
+ "/index.d.ts",
+ "/index.d.mts",
+ ".d.cts",
+ "/index.d.cts",
+ ],
+ }
+ } else {
+ vec![".js", "/index.js"]
+ };
+ for ending in endings {
+ let guess = package_json
+ .path
+ .parent()
+ .unwrap()
+ .join(format!("{main}{ending}"))
+ .clean();
+ if self.env.is_file_sync(&guess) {
+ // TODO(bartlomieju): emitLegacyIndexDeprecation()
+ return Ok(to_file_specifier(&guess));
+ }
+ }
+ }
+
+ self.legacy_index_resolve(
+ package_json.path.parent().unwrap(),
+ maybe_referrer,
+ referrer_kind,
+ mode,
+ )
+ }
+
+ fn legacy_index_resolve(
+ &self,
+ directory: &Path,
+ maybe_referrer: Option<&Url>,
+ referrer_kind: NodeModuleKind,
+ mode: NodeResolutionMode,
+ ) -> Result<Url, LegacyResolveError> {
+ let index_file_names = if mode.is_types() {
+ // todo(dsherret): investigate exactly how typescript does this
+ match referrer_kind {
+ NodeModuleKind::Cjs => vec!["index.d.ts", "index.d.cts"],
+ NodeModuleKind::Esm => vec!["index.d.ts", "index.d.mts", "index.d.cts"],
+ }
+ } else {
+ vec!["index.js"]
+ };
+ for index_file_name in index_file_names {
+ let guess = directory.join(index_file_name).clean();
+ if self.env.is_file_sync(&guess) {
+ // TODO(bartlomieju): emitLegacyIndexDeprecation()
+ return Ok(to_file_specifier(&guess));
+ }
+ }
+
+ if mode.is_types() {
+ Err(
+ TypesNotFoundError(Box::new(TypesNotFoundErrorData {
+ code_specifier: to_file_specifier(&directory.join("index.js")),
+ maybe_referrer: maybe_referrer.cloned(),
+ }))
+ .into(),
+ )
+ } else {
+ Err(
+ ModuleNotFoundError {
+ specifier: to_file_specifier(&directory.join("index.js")),
+ typ: "module",
+ maybe_referrer: maybe_referrer.cloned(),
+ }
+ .into(),
+ )
+ }
+ }
+}
+
+fn resolve_bin_entry_value<'a>(
+ package_json: &'a PackageJson,
+ bin_name: Option<&str>,
+) -> Result<&'a str, AnyError> {
+ let bin = match &package_json.bin {
+ Some(bin) => bin,
+ None => bail!(
+ "'{}' did not have a bin property",
+ package_json.path.display(),
+ ),
+ };
+ let bin_entry = match bin {
+ Value::String(_) => {
+ if bin_name.is_some()
+ && bin_name
+ != package_json
+ .name
+ .as_deref()
+ .map(|name| name.rsplit_once('/').map_or(name, |(_, name)| name))
+ {
+ None
+ } else {
+ Some(bin)
+ }
+ }
+ Value::Object(o) => {
+ if let Some(bin_name) = bin_name {
+ o.get(bin_name)
+ } else if o.len() == 1
+ || o.len() > 1 && o.values().all(|v| v == o.values().next().unwrap())
+ {
+ o.values().next()
+ } else {
+ package_json.name.as_ref().and_then(|n| o.get(n))
+ }
+ }
+ _ => bail!(
+ "'{}' did not have a bin property with a string or object value",
+ package_json.path.display()
+ ),
+ };
+ let bin_entry = match bin_entry {
+ Some(e) => e,
+ None => {
+ let prefix = package_json
+ .name
+ .as_ref()
+ .map(|n| {
+ let mut prefix = format!("npm:{}", n);
+ if let Some(version) = &package_json.version {
+ prefix.push('@');
+ prefix.push_str(version);
+ }
+ prefix.push('/');
+ prefix
+ })
+ .unwrap_or_default();
+ let keys = bin
+ .as_object()
+ .map(|o| {
+ o.keys()
+ .map(|k| format!(" * {prefix}{k}"))
+ .collect::<Vec<_>>()
+ })
+ .unwrap_or_default();
+ bail!(
+ "'{}' did not have a bin entry{}{}",
+ package_json.path.display(),
+ bin_name
+ .or(package_json.name.as_deref())
+ .map(|name| format!(" for '{}'", name))
+ .unwrap_or_default(),
+ if keys.is_empty() {
+ "".to_string()
+ } else {
+ format!("\n\nPossibilities:\n{}", keys.join("\n"))
+ }
+ )
+ }
+ };
+ match bin_entry {
+ Value::String(s) => Ok(s),
+ _ => bail!(
+ "'{}' had a non-string sub property of bin",
+ package_json.path.display(),
+ ),
+ }
+}
+
+fn to_file_path(url: &Url) -> PathBuf {
+ url
+ .to_file_path()
+ .unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}"))
+}
+
+fn to_file_path_string(url: &Url) -> String {
+ to_file_path(url).display().to_string()
+}
+
+fn should_be_treated_as_relative_or_absolute_path(specifier: &str) -> bool {
+ if specifier.is_empty() {
+ return false;
+ }
+
+ if specifier.starts_with('/') {
+ return true;
+ }
+
+ is_relative_specifier(specifier)
+}
+
+// TODO(ry) We very likely have this utility function elsewhere in Deno.
+fn is_relative_specifier(specifier: &str) -> bool {
+ let specifier_len = specifier.len();
+ let specifier_chars: Vec<_> = specifier.chars().take(3).collect();
+
+ if !specifier_chars.is_empty() && specifier_chars[0] == '.' {
+ if specifier_len == 1 || specifier_chars[1] == '/' {
+ return true;
+ }
+ if specifier_chars[1] == '.'
+ && (specifier_len == 2 || specifier_chars[2] == '/')
+ {
+ return true;
+ }
+ }
+ false
+}
+
+/// Alternate `PathBuf::with_extension` that will handle known extensions
+/// more intelligently.
+fn with_known_extension(path: &Path, ext: &str) -> PathBuf {
+ const NON_DECL_EXTS: &[&str] = &[
+ "cjs", "js", "json", "jsx", "mjs", "tsx", /* ex. types.d */ "d",
+ ];
+ const DECL_EXTS: &[&str] = &["cts", "mts", "ts"];
+
+ let file_name = match path.file_name() {
+ Some(value) => value.to_string_lossy(),
+ None => return path.to_path_buf(),
+ };
+ let lowercase_file_name = file_name.to_lowercase();
+ let period_index = lowercase_file_name.rfind('.').and_then(|period_index| {
+ let ext = &lowercase_file_name[period_index + 1..];
+ if DECL_EXTS.contains(&ext) {
+ if let Some(next_period_index) =
+ lowercase_file_name[..period_index].rfind('.')
+ {
+ if &lowercase_file_name[next_period_index + 1..period_index] == "d" {
+ Some(next_period_index)
+ } else {
+ Some(period_index)
+ }
+ } else {
+ Some(period_index)
+ }
+ } else if NON_DECL_EXTS.contains(&ext) {
+ Some(period_index)
+ } else {
+ None
+ }
+ });
+
+ let file_name = match period_index {
+ Some(period_index) => &file_name[..period_index],
+ None => &file_name,
+ };
+ path.with_file_name(format!("{file_name}.{ext}"))
+}
+
+fn to_specifier_display_string(url: &Url) -> String {
+ if let Ok(path) = url.to_file_path() {
+ path.display().to_string()
+ } else {
+ url.to_string()
+ }
+}
+
+fn throw_invalid_subpath(
+ subpath: String,
+ package_json_path: &Path,
+ internal: bool,
+ maybe_referrer: Option<&Url>,
+) -> InvalidModuleSpecifierError {
+ let ie = if internal { "imports" } else { "exports" };
+ let reason = format!(
+ "request is not a valid subpath for the \"{}\" resolution of {}",
+ ie,
+ package_json_path.display(),
+ );
+ InvalidModuleSpecifierError {
+ request: subpath,
+ reason: Cow::Owned(reason),
+ maybe_referrer: maybe_referrer.map(to_specifier_display_string),
+ }
+}
+
+pub fn parse_npm_pkg_name(
+ specifier: &str,
+ referrer: &Url,
+) -> Result<(String, String, bool), InvalidModuleSpecifierError> {
+ let mut separator_index = specifier.find('/');
+ let mut valid_package_name = true;
+ let mut is_scoped = false;
+ if specifier.is_empty() {
+ valid_package_name = false;
+ } else if specifier.starts_with('@') {
+ is_scoped = true;
+ if let Some(index) = separator_index {
+ separator_index = specifier[index + 1..]
+ .find('/')
+ .map(|new_index| index + 1 + new_index);
+ } else {
+ valid_package_name = false;
+ }
+ }
+
+ let package_name = if let Some(index) = separator_index {
+ specifier[0..index].to_string()
+ } else {
+ specifier.to_string()
+ };
+
+ // Package name cannot have leading . and cannot have percent-encoding or separators.
+ for ch in package_name.chars() {
+ if ch == '%' || ch == '\\' {
+ valid_package_name = false;
+ break;
+ }
+ }
+
+ if !valid_package_name {
+ return Err(errors::InvalidModuleSpecifierError {
+ request: specifier.to_string(),
+ reason: Cow::Borrowed("is not a valid package name"),
+ maybe_referrer: Some(to_specifier_display_string(referrer)),
+ });
+ }
+
+ let package_subpath = if let Some(index) = separator_index {
+ format!(".{}", specifier.chars().skip(index).collect::<String>())
+ } else {
+ ".".to_string()
+ };
+
+ Ok((package_name, package_subpath, is_scoped))
+}
+
+fn pattern_key_compare(a: &str, b: &str) -> i32 {
+ let a_pattern_index = a.find('*');
+ let b_pattern_index = b.find('*');
+
+ let base_len_a = if let Some(index) = a_pattern_index {
+ index + 1
+ } else {
+ a.len()
+ };
+ let base_len_b = if let Some(index) = b_pattern_index {
+ index + 1
+ } else {
+ b.len()
+ };
+
+ if base_len_a > base_len_b {
+ return -1;
+ }
+
+ if base_len_b > base_len_a {
+ return 1;
+ }
+
+ if a_pattern_index.is_none() {
+ return 1;
+ }
+
+ if b_pattern_index.is_none() {
+ return -1;
+ }
+
+ if a.len() > b.len() {
+ return -1;
+ }
+
+ if b.len() > a.len() {
+ return 1;
+ }
+
+ 0
+}
+
+/// Gets the corresponding @types package for the provided package name.
+fn types_package_name(package_name: &str) -> String {
+ debug_assert!(!package_name.starts_with("@types/"));
+ // Scoped packages will get two underscores for each slash
+ // https://github.com/DefinitelyTyped/DefinitelyTyped/tree/15f1ece08f7b498f4b9a2147c2a46e94416ca777#what-about-scoped-packages
+ format!("@types/{}", package_name.replace('/', "__"))
+}
+
+/// Ex. returns `fs` for `node:fs`
+fn get_module_name_from_builtin_node_module_specifier(
+ specifier: &Url,
+) -> Option<&str> {
+ if specifier.scheme() != "node" {
+ return None;
+ }
+
+ let (_, specifier) = specifier.as_str().split_once(':')?;
+ Some(specifier)
+}
+
+#[cfg(test)]
+mod tests {
+ use serde_json::json;
+
+ use super::*;
+
+ fn build_package_json(json: Value) -> PackageJson {
+ PackageJson::load_from_value(PathBuf::from("/package.json"), json)
+ }
+
+ #[test]
+ fn test_resolve_bin_entry_value() {
+ // should resolve the specified value
+ let pkg_json = build_package_json(json!({
+ "name": "pkg",
+ "version": "1.1.1",
+ "bin": {
+ "bin1": "./value1",
+ "bin2": "./value2",
+ "pkg": "./value3",
+ }
+ }));
+ assert_eq!(
+ resolve_bin_entry_value(&pkg_json, Some("bin1")).unwrap(),
+ "./value1"
+ );
+
+ // should resolve the value with the same name when not specified
+ assert_eq!(
+ resolve_bin_entry_value(&pkg_json, None).unwrap(),
+ "./value3"
+ );
+
+ // should not resolve when specified value does not exist
+ assert_eq!(
+ resolve_bin_entry_value(&pkg_json, Some("other"),)
+ .err()
+ .unwrap()
+ .to_string(),
+ concat!(
+ "'/package.json' did not have a bin entry for 'other'\n",
+ "\n",
+ "Possibilities:\n",
+ " * npm:pkg@1.1.1/bin1\n",
+ " * npm:pkg@1.1.1/bin2\n",
+ " * npm:pkg@1.1.1/pkg"
+ )
+ );
+
+ // should not resolve when default value can't be determined
+ let pkg_json = build_package_json(json!({
+ "name": "pkg",
+ "version": "1.1.1",
+ "bin": {
+ "bin": "./value1",
+ "bin2": "./value2",
+ }
+ }));
+ assert_eq!(
+ resolve_bin_entry_value(&pkg_json, None)
+ .err()
+ .unwrap()
+ .to_string(),
+ concat!(
+ "'/package.json' did not have a bin entry for 'pkg'\n",
+ "\n",
+ "Possibilities:\n",
+ " * npm:pkg@1.1.1/bin\n",
+ " * npm:pkg@1.1.1/bin2",
+ )
+ );
+
+ // should resolve since all the values are the same
+ let pkg_json = build_package_json(json!({
+ "name": "pkg",
+ "version": "1.2.3",
+ "bin": {
+ "bin1": "./value",
+ "bin2": "./value",
+ }
+ }));
+ assert_eq!(
+ resolve_bin_entry_value(&pkg_json, None,).unwrap(),
+ "./value"
+ );
+
+ // should not resolve when specified and is a string
+ let pkg_json = build_package_json(json!({
+ "name": "pkg",
+ "version": "1.2.3",
+ "bin": "./value",
+ }));
+ assert_eq!(
+ resolve_bin_entry_value(&pkg_json, Some("path"),)
+ .err()
+ .unwrap()
+ .to_string(),
+ "'/package.json' did not have a bin entry for 'path'"
+ );
+
+ // no version in the package.json
+ let pkg_json = build_package_json(json!({
+ "name": "pkg",
+ "bin": {
+ "bin1": "./value1",
+ "bin2": "./value2",
+ }
+ }));
+ assert_eq!(
+ resolve_bin_entry_value(&pkg_json, None)
+ .err()
+ .unwrap()
+ .to_string(),
+ concat!(
+ "'/package.json' did not have a bin entry for 'pkg'\n",
+ "\n",
+ "Possibilities:\n",
+ " * npm:pkg/bin1\n",
+ " * npm:pkg/bin2",
+ )
+ );
+
+ // no name or version in the package.json
+ let pkg_json = build_package_json(json!({
+ "bin": {
+ "bin1": "./value1",
+ "bin2": "./value2",
+ }
+ }));
+ assert_eq!(
+ resolve_bin_entry_value(&pkg_json, None)
+ .err()
+ .unwrap()
+ .to_string(),
+ concat!(
+ "'/package.json' did not have a bin entry\n",
+ "\n",
+ "Possibilities:\n",
+ " * bin1\n",
+ " * bin2",
+ )
+ );
+ }
+
+ #[test]
+ fn test_parse_package_name() {
+ let dummy_referrer = Url::parse("http://example.com").unwrap();
+
+ assert_eq!(
+ parse_npm_pkg_name("fetch-blob", &dummy_referrer).unwrap(),
+ ("fetch-blob".to_string(), ".".to_string(), false)
+ );
+ assert_eq!(
+ parse_npm_pkg_name("@vue/plugin-vue", &dummy_referrer).unwrap(),
+ ("@vue/plugin-vue".to_string(), ".".to_string(), true)
+ );
+ assert_eq!(
+ parse_npm_pkg_name("@astrojs/prism/dist/highlighter", &dummy_referrer)
+ .unwrap(),
+ (
+ "@astrojs/prism".to_string(),
+ "./dist/highlighter".to_string(),
+ true
+ )
+ );
+ }
+
+ #[test]
+ fn test_with_known_extension() {
+ let cases = &[
+ ("test", "d.ts", "test.d.ts"),
+ ("test.d.ts", "ts", "test.ts"),
+ ("test.worker", "d.ts", "test.worker.d.ts"),
+ ("test.d.mts", "js", "test.js"),
+ ];
+ for (path, ext, expected) in cases {
+ let actual = with_known_extension(&PathBuf::from(path), ext);
+ assert_eq!(actual.to_string_lossy(), *expected);
+ }
+ }
+
+ #[test]
+ fn test_types_package_name() {
+ assert_eq!(types_package_name("name"), "@types/name");
+ assert_eq!(
+ types_package_name("@scoped/package"),
+ "@types/@scoped__package"
+ );
+ }
+}
diff --git a/resolvers/node/sync.rs b/resolvers/node/sync.rs
new file mode 100644
index 000000000..3c4729aa2
--- /dev/null
+++ b/resolvers/node/sync.rs
@@ -0,0 +1,23 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+pub use inner::*;
+
+#[cfg(feature = "sync")]
+mod inner {
+ #![allow(clippy::disallowed_types)]
+
+ pub use std::sync::Arc as MaybeArc;
+
+ pub use core::marker::Send as MaybeSend;
+ pub use core::marker::Sync as MaybeSync;
+}
+
+#[cfg(not(feature = "sync"))]
+mod inner {
+ pub use std::rc::Rc as MaybeArc;
+
+ pub trait MaybeSync {}
+ impl<T> MaybeSync for T where T: ?Sized {}
+ pub trait MaybeSend {}
+ impl<T> MaybeSend for T where T: ?Sized {}
+}