summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock8
-rw-r--r--cli/Cargo.toml4
-rw-r--r--cli/fs_util.rs79
-rw-r--r--cli/lockfile.rs42
-rw-r--r--cli/lsp/language_server.rs6
-rw-r--r--cli/npm/cache.rs336
-rw-r--r--cli/npm/mod.rs1
-rw-r--r--cli/npm/registry.rs274
-rw-r--r--cli/npm/resolution.rs1051
-rw-r--r--cli/npm/resolution/graph.rs2033
-rw-r--r--cli/npm/resolution/mod.rs676
-rw-r--r--cli/npm/resolution/snapshot.rs470
-rw-r--r--cli/npm/resolvers/common.rs8
-rw-r--r--cli/npm/resolvers/global.rs43
-rw-r--r--cli/npm/resolvers/local.rs174
-rw-r--r--cli/npm/resolvers/mod.rs18
-rw-r--r--cli/npm/semver/errors.rs38
-rw-r--r--cli/npm/semver/mod.rs2
-rw-r--r--cli/npm/semver/specifier.rs1
-rw-r--r--cli/npm/tarball.rs82
-rw-r--r--cli/proc_state.rs6
-rw-r--r--cli/tests/integration/npm_tests.rs178
-rw-r--r--cli/tests/testdata/npm/peer_deps_with_copied_folders/main.out10
-rw-r--r--cli/tests/testdata/npm/peer_deps_with_copied_folders/main.ts5
-rw-r--r--cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info.out14
-rw-r--r--cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info_json.out95
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/index.js1
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/package.json8
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/index.js1
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/package.json8
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/dist/index.js1
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/index.js1
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/package.json7
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/index.js1
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/package.json4
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/index.js1
-rw-r--r--cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/package.json4
-rw-r--r--cli/tools/info.rs20
38 files changed, 4331 insertions, 1380 deletions
diff --git a/Cargo.lock b/Cargo.lock
index b6d2fb629..b6bac7983 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1227,9 +1227,9 @@ dependencies = [
[[package]]
name = "deno_task_shell"
-version = "0.7.0"
+version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a275d3f78e828b4adddf20a472d9ac1927ac311aac48dca869bb8653d5a4a0b9"
+checksum = "e8ad1e1002ecf8bafcb9b968bf19856ba4fe0e6c0c73b3404565bb29b15aae2c"
dependencies = [
"anyhow",
"futures",
@@ -2803,9 +2803,9 @@ dependencies = [
[[package]]
name = "monch"
-version = "0.2.1"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5e2e282addadb529bb31700f7d184797382fa2eb18384986aad78d117eaf0c4"
+checksum = "f13de1c3edc9a5b9dc3a1029f56e9ab3eba34640010aff4fc01044c42ef67afa"
[[package]]
name = "naga"
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
index 36fe74b87..e97a14644 100644
--- a/cli/Cargo.toml
+++ b/cli/Cargo.toml
@@ -57,7 +57,7 @@ deno_emit = "0.10.0"
deno_graph = "0.37.1"
deno_lint = { version = "0.34.0", features = ["docs"] }
deno_runtime = { version = "0.83.0", path = "../runtime" }
-deno_task_shell = "0.7.0"
+deno_task_shell = "0.7.2"
napi_sym = { path = "./napi_sym", version = "0.5.0" }
atty = "=0.2.14"
@@ -86,7 +86,7 @@ libc = "=0.2.126"
log = { version = "=0.4.17", features = ["serde"] }
lsp-types = "=0.93.2" # used by tower-lsp and "proposed" feature is unstable in patch releases
mitata = "=0.0.7"
-monch = "=0.2.1"
+monch = "=0.4.0"
notify = "=5.0.0"
once_cell = "=1.14.0"
os_pipe = "=1.0.1"
diff --git a/cli/fs_util.rs b/cli/fs_util.rs
index fa1535469..843f5e0cf 100644
--- a/cli/fs_util.rs
+++ b/cli/fs_util.rs
@@ -15,6 +15,7 @@ use std::io::ErrorKind;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
+use std::time::Duration;
use walkdir::WalkDir;
pub fn atomic_write_file<T: AsRef<[u8]>>(
@@ -357,6 +358,84 @@ pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
Ok(())
}
+/// Hardlinks the files in one directory to another directory.
+///
+/// Note: Does not handle symlinks.
+pub fn hard_link_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
+ std::fs::create_dir_all(&to)
+ .with_context(|| format!("Creating {}", to.display()))?;
+ let read_dir = std::fs::read_dir(&from)
+ .with_context(|| format!("Reading {}", from.display()))?;
+
+ for entry in read_dir {
+ let entry = entry?;
+ let file_type = entry.file_type()?;
+ let new_from = from.join(entry.file_name());
+ let new_to = to.join(entry.file_name());
+
+ if file_type.is_dir() {
+ hard_link_dir_recursive(&new_from, &new_to).with_context(|| {
+ format!("Dir {} to {}", new_from.display(), new_to.display())
+ })?;
+ } else if file_type.is_file() {
+ // note: chance for race conditions here between attempting to create,
+ // then removing, then attempting to create. There doesn't seem to be
+ // a way to hard link with overwriting in Rust, but maybe there is some
+ // way with platform specific code. The workaround here is to handle
+ // scenarios where something else might create or remove files.
+ if let Err(err) = std::fs::hard_link(&new_from, &new_to) {
+ if err.kind() == ErrorKind::AlreadyExists {
+ if let Err(err) = std::fs::remove_file(&new_to) {
+ if err.kind() == ErrorKind::NotFound {
+ // Assume another process/thread created this hard link to the file we are wanting
+ // to remove then sleep a little bit to let the other process/thread move ahead
+ // faster to reduce contention.
+ std::thread::sleep(Duration::from_millis(10));
+ } else {
+ return Err(err).with_context(|| {
+ format!(
+ "Removing file to hard link {} to {}",
+ new_from.display(),
+ new_to.display()
+ )
+ });
+ }
+ }
+
+ // Always attempt to recreate the hardlink. In contention scenarios, the other process
+ // might have been killed or exited after removing the file, but before creating the hardlink
+ if let Err(err) = std::fs::hard_link(&new_from, &new_to) {
+ // Assume another process/thread created this hard link to the file we are wanting
+ // to now create then sleep a little bit to let the other process/thread move ahead
+ // faster to reduce contention.
+ if err.kind() == ErrorKind::AlreadyExists {
+ std::thread::sleep(Duration::from_millis(10));
+ } else {
+ return Err(err).with_context(|| {
+ format!(
+ "Hard linking {} to {}",
+ new_from.display(),
+ new_to.display()
+ )
+ });
+ }
+ }
+ } else {
+ return Err(err).with_context(|| {
+ format!(
+ "Hard linking {} to {}",
+ new_from.display(),
+ new_to.display()
+ )
+ });
+ }
+ }
+ }
+ }
+
+ Ok(())
+}
+
pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), AnyError> {
let err_mapper = |err: Error| {
Error::new(
diff --git a/cli/lockfile.rs b/cli/lockfile.rs
index d9d0c6d85..aa60b5789 100644
--- a/cli/lockfile.rs
+++ b/cli/lockfile.rs
@@ -16,6 +16,7 @@ use std::rc::Rc;
use std::sync::Arc;
use crate::args::ConfigFile;
+use crate::npm::NpmPackageId;
use crate::npm::NpmPackageReq;
use crate::npm::NpmResolutionPackage;
use crate::tools::fmt::format_json;
@@ -40,7 +41,7 @@ pub struct NpmPackageInfo {
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct NpmContent {
- /// Mapping between requests for npm packages and resolved specifiers, eg.
+ /// Mapping between requests for npm packages and resolved packages, eg.
/// {
/// "chalk": "chalk@5.0.0"
/// "react@17": "react@17.0.1"
@@ -269,7 +270,7 @@ impl Lockfile {
&mut self,
package: &NpmResolutionPackage,
) -> Result<(), LockfileError> {
- let specifier = package.id.serialize_for_lock_file();
+ let specifier = package.id.as_serialized();
if let Some(package_info) = self.content.npm.packages.get(&specifier) {
let integrity = package
.dist
@@ -286,7 +287,7 @@ This could be caused by:
* the source itself may be corrupt
Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
- package.id, self.filename.display()
+ package.id.display(), self.filename.display()
)));
}
} else {
@@ -300,7 +301,7 @@ Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
let dependencies = package
.dependencies
.iter()
- .map(|(name, id)| (name.to_string(), id.serialize_for_lock_file()))
+ .map(|(name, id)| (name.to_string(), id.as_serialized()))
.collect::<BTreeMap<String, String>>();
let integrity = package
@@ -309,7 +310,7 @@ Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
.as_ref()
.unwrap_or(&package.dist.shasum);
self.content.npm.packages.insert(
- package.id.serialize_for_lock_file(),
+ package.id.as_serialized(),
NpmPackageInfo {
integrity: integrity.to_string(),
dependencies,
@@ -321,12 +322,13 @@ Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
pub fn insert_npm_specifier(
&mut self,
package_req: &NpmPackageReq,
- version: String,
+ package_id: &NpmPackageId,
) {
- self.content.npm.specifiers.insert(
- package_req.to_string(),
- format!("{}@{}", package_req.name, version),
- );
+ self
+ .content
+ .npm
+ .specifiers
+ .insert(package_req.to_string(), package_id.as_serialized());
self.has_content_changed = true;
}
}
@@ -559,10 +561,12 @@ mod tests {
id: NpmPackageId {
name: "nanoid".to_string(),
version: NpmVersion::parse("3.3.4").unwrap(),
+ peer_dependencies: Vec::new(),
},
+ copy_index: 0,
dist: NpmPackageVersionDistInfo {
- tarball: "foo".to_string(),
- shasum: "foo".to_string(),
+ tarball: "foo".to_string(),
+ shasum: "foo".to_string(),
integrity: Some("sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==".to_string())
},
dependencies: HashMap::new(),
@@ -574,10 +578,12 @@ mod tests {
id: NpmPackageId {
name: "picocolors".to_string(),
version: NpmVersion::parse("1.0.0").unwrap(),
+ peer_dependencies: Vec::new(),
},
+ copy_index: 0,
dist: NpmPackageVersionDistInfo {
- tarball: "foo".to_string(),
- shasum: "foo".to_string(),
+ tarball: "foo".to_string(),
+ shasum: "foo".to_string(),
integrity: Some("sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==".to_string())
},
dependencies: HashMap::new(),
@@ -590,10 +596,12 @@ mod tests {
id: NpmPackageId {
name: "source-map-js".to_string(),
version: NpmVersion::parse("1.0.2").unwrap(),
+ peer_dependencies: Vec::new(),
},
+ copy_index: 0,
dist: NpmPackageVersionDistInfo {
- tarball: "foo".to_string(),
- shasum: "foo".to_string(),
+ tarball: "foo".to_string(),
+ shasum: "foo".to_string(),
integrity: Some("sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==".to_string())
},
dependencies: HashMap::new(),
@@ -606,7 +614,9 @@ mod tests {
id: NpmPackageId {
name: "source-map-js".to_string(),
version: NpmVersion::parse("1.0.2").unwrap(),
+ peer_dependencies: Vec::new(),
},
+ copy_index: 0,
dist: NpmPackageVersionDistInfo {
tarball: "foo".to_string(),
shasum: "foo".to_string(),
diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs
index 3a0906636..aa4e98b1d 100644
--- a/cli/lsp/language_server.rs
+++ b/cli/lsp/language_server.rs
@@ -71,7 +71,7 @@ use crate::fs_util;
use crate::graph_util::graph_valid;
use crate::npm::NpmCache;
use crate::npm::NpmPackageResolver;
-use crate::npm::NpmRegistryApi;
+use crate::npm::RealNpmRegistryApi;
use crate::proc_state::import_map_from_text;
use crate::proc_state::ProcState;
use crate::progress_bar::ProgressBar;
@@ -258,7 +258,7 @@ impl Inner {
ts_server.clone(),
);
let assets = Assets::new(ts_server.clone());
- let registry_url = NpmRegistryApi::default_url();
+ let registry_url = RealNpmRegistryApi::default_url();
// Use an "only" cache setting in order to make the
// user do an explicit "cache" command and prevent
// the cache from being filled with lots of packages while
@@ -270,7 +270,7 @@ impl Inner {
cache_setting.clone(),
progress_bar.clone(),
);
- let api = NpmRegistryApi::new(
+ let api = RealNpmRegistryApi::new(
registry_url,
npm_cache.clone(),
cache_setting,
diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs
index 6a0d72b3a..2d983fa06 100644
--- a/cli/npm/cache.rs
+++ b/cli/npm/cache.rs
@@ -21,7 +21,6 @@ use crate::progress_bar::ProgressBar;
use super::registry::NpmPackageVersionDistInfo;
use super::semver::NpmVersion;
use super::tarball::verify_and_extract_tarball;
-use super::NpmPackageId;
/// For some of the tests, we want downloading of packages
/// to be deterministic so that the output is always the same
@@ -29,7 +28,107 @@ pub fn should_sync_download() -> bool {
std::env::var("DENO_UNSTABLE_NPM_SYNC_DOWNLOAD") == Ok("1".to_string())
}
-pub const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock";
+const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock";
+
+pub fn with_folder_sync_lock(
+ package: (&str, &NpmVersion),
+ output_folder: &Path,
+ action: impl FnOnce() -> Result<(), AnyError>,
+) -> Result<(), AnyError> {
+ fn inner(
+ output_folder: &Path,
+ action: impl FnOnce() -> Result<(), AnyError>,
+ ) -> Result<(), AnyError> {
+ fs::create_dir_all(output_folder).with_context(|| {
+ format!("Error creating '{}'.", output_folder.display())
+ })?;
+
+ // This sync lock file is a way to ensure that partially created
+ // npm package directories aren't considered valid. This could maybe
+ // be a bit smarter in the future to not bother extracting here
+ // if another process has taken the lock in the past X seconds and
+ // wait for the other process to finish (it could try to create the
+ // file with `create_new(true)` then if it exists, check the metadata
+ // then wait until the other process finishes with a timeout), but
+ // for now this is good enough.
+ let sync_lock_path = output_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME);
+ match fs::OpenOptions::new()
+ .write(true)
+ .create(true)
+ .open(&sync_lock_path)
+ {
+ Ok(_) => {
+ action()?;
+ // extraction succeeded, so only now delete this file
+ let _ignore = std::fs::remove_file(&sync_lock_path);
+ Ok(())
+ }
+ Err(err) => {
+ bail!(
+ concat!(
+ "Error creating package sync lock file at '{}'. ",
+ "Maybe try manually deleting this folder.\n\n{:#}",
+ ),
+ output_folder.display(),
+ err
+ );
+ }
+ }
+ }
+
+ match inner(output_folder, action) {
+ Ok(()) => Ok(()),
+ Err(err) => {
+ if let Err(remove_err) = fs::remove_dir_all(&output_folder) {
+ if remove_err.kind() != std::io::ErrorKind::NotFound {
+ bail!(
+ concat!(
+ "Failed setting up package cache directory for {}@{}, then ",
+ "failed cleaning it up.\n\nOriginal error:\n\n{}\n\n",
+ "Remove error:\n\n{}\n\nPlease manually ",
+ "delete this folder or you will run into issues using this ",
+ "package in the future:\n\n{}"
+ ),
+ package.0,
+ package.1,
+ err,
+ remove_err,
+ output_folder.display(),
+ );
+ }
+ }
+ Err(err)
+ }
+ }
+}
+
+pub struct NpmPackageCacheFolderId {
+ pub name: String,
+ pub version: NpmVersion,
+ /// Peer dependency resolution may require us to have duplicate copies
+ /// of the same package.
+ pub copy_index: usize,
+}
+
+impl NpmPackageCacheFolderId {
+ pub fn with_no_count(&self) -> Self {
+ Self {
+ name: self.name.clone(),
+ version: self.version.clone(),
+ copy_index: 0,
+ }
+ }
+}
+
+impl std::fmt::Display for NpmPackageCacheFolderId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}@{}", self.name, self.version)?;
+ if self.copy_index > 0 {
+ write!(f, "_{}", self.copy_index)?;
+ }
+ Ok(())
+ }
+}
#[derive(Clone, Debug)]
pub struct ReadonlyNpmCache {
@@ -78,32 +177,49 @@ impl ReadonlyNpmCache {
Self::new(dir.root.join("npm"))
}
- pub fn package_folder(
+ pub fn package_folder_for_id(
&self,
- id: &NpmPackageId,
+ id: &NpmPackageCacheFolderId,
+ registry_url: &Url,
+ ) -> PathBuf {
+ if id.copy_index == 0 {
+ self.package_folder_for_name_and_version(
+ &id.name,
+ &id.version,
+ registry_url,
+ )
+ } else {
+ self
+ .package_name_folder(&id.name, registry_url)
+ .join(format!("{}_{}", id.version, id.copy_index))
+ }
+ }
+
+ pub fn package_folder_for_name_and_version(
+ &self,
+ name: &str,
+ version: &NpmVersion,
registry_url: &Url,
) -> PathBuf {
self
- .package_name_folder(&id.name, registry_url)
- .join(id.version.to_string())
+ .package_name_folder(name, registry_url)
+ .join(version.to_string())
}
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
let mut dir = self.registry_folder(registry_url);
- let mut parts = name.split('/').map(Cow::Borrowed).collect::<Vec<_>>();
- // package names were not always enforced to be lowercase and so we need
- // to ensure package names, which are therefore case sensitive, are stored
- // on a case insensitive file system to not have conflicts. We do this by
- // first putting it in a "_" folder then hashing the package name.
+ let parts = name.split('/').map(Cow::Borrowed).collect::<Vec<_>>();
if name.to_lowercase() != name {
- let last_part = parts.last_mut().unwrap();
- *last_part = Cow::Owned(crate::checksum::gen(&[last_part.as_bytes()]));
- // We can't just use the hash as part of the directory because it may
- // have a collision with an actual package name in case someone wanted
- // to name an actual package that. To get around this, put all these
- // in a folder called "_" since npm packages can't start with an underscore
- // and there is no package currently called just "_".
- dir = dir.join("_");
+ // Lowercase package names introduce complications.
+ // When implementing this ensure:
+ // 1. It works on case insensitive filesystems. ex. JSON should not
+ // conflict with json... yes you read that right, those are separate
+ // packages.
+ // 2. We can figure out the package id from the path. This is used
+ // in resolve_package_id_from_specifier
+ // Probably use a hash of the package name at `npm/-/<hash>` then create
+ // a mapping for these package names.
+ todo!("deno currently doesn't support npm package names that are not all lowercase");
}
// ensure backslashes are used on windows
for part in parts {
@@ -118,23 +234,24 @@ impl ReadonlyNpmCache {
.join(fs_util::root_url_to_safe_local_dirname(registry_url))
}
- pub fn resolve_package_id_from_specifier(
+ pub fn resolve_package_folder_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
registry_url: &Url,
- ) -> Result<NpmPackageId, AnyError> {
- match self.maybe_resolve_package_id_from_specifier(specifier, registry_url)
+ ) -> Result<NpmPackageCacheFolderId, AnyError> {
+ match self
+ .maybe_resolve_package_folder_id_from_specifier(specifier, registry_url)
{
Some(id) => Ok(id),
None => bail!("could not find npm package for '{}'", specifier),
}
}
- fn maybe_resolve_package_id_from_specifier(
+ fn maybe_resolve_package_folder_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
registry_url: &Url,
- ) -> Option<NpmPackageId> {
+ ) -> Option<NpmPackageCacheFolderId> {
let registry_root_dir = self
.root_dir_url
.join(&format!(
@@ -153,6 +270,7 @@ impl ReadonlyNpmCache {
// examples:
// * chalk/5.0.1/
// * @types/chalk/5.0.1/
+ // * some-package/5.0.1_1/ -- where the `_1` (/_\d+/) is a copy of the folder for peer deps
let is_scoped_package = relative_url.starts_with('@');
let mut parts = relative_url
.split('/')
@@ -163,11 +281,19 @@ impl ReadonlyNpmCache {
if parts.len() < 2 {
return None;
}
- let version = parts.pop().unwrap();
+ let version_part = parts.pop().unwrap();
let name = parts.join("/");
- NpmVersion::parse(version)
- .ok()
- .map(|version| NpmPackageId { name, version })
+ let (version, copy_index) =
+ if let Some((version, copy_count)) = version_part.split_once('_') {
+ (version, copy_count.parse::<usize>().ok()?)
+ } else {
+ (version_part, 0)
+ };
+ Some(NpmPackageCacheFolderId {
+ name,
+ version: NpmVersion::parse(version).ok()?,
+ copy_index,
+ })
}
pub fn get_cache_location(&self) -> PathBuf {
@@ -202,28 +328,38 @@ impl NpmCache {
pub async fn ensure_package(
&self,
- id: &NpmPackageId,
+ package: (&str, &NpmVersion),
dist: &NpmPackageVersionDistInfo,
registry_url: &Url,
) -> Result<(), AnyError> {
self
- .ensure_package_inner(id, dist, registry_url)
+ .ensure_package_inner(package, dist, registry_url)
.await
- .with_context(|| format!("Failed caching npm package '{}'.", id))
+ .with_context(|| {
+ format!("Failed caching npm package '{}@{}'.", package.0, package.1)
+ })
+ }
+
+ pub fn should_use_cache_for_npm_package(&self, package_name: &str) -> bool {
+ self.cache_setting.should_use_for_npm_package(package_name)
}
async fn ensure_package_inner(
&self,
- id: &NpmPackageId,
+ package: (&str, &NpmVersion),
dist: &NpmPackageVersionDistInfo,
registry_url: &Url,
) -> Result<(), AnyError> {
- let package_folder = self.readonly.package_folder(id, registry_url);
+ let package_folder = self.readonly.package_folder_for_name_and_version(
+ package.0,
+ package.1,
+ registry_url,
+ );
if package_folder.exists()
// if this file exists, then the package didn't successfully extract
// the first time, or another process is currently extracting the zip file
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
- && self.cache_setting.should_use_for_npm_package(&id.name)
+ && self.should_use_cache_for_npm_package(package.0)
{
return Ok(());
} else if self.cache_setting == CacheSetting::Only {
@@ -231,7 +367,7 @@ impl NpmCache {
"NotCached",
format!(
"An npm specifier not found in cache: \"{}\", --cached-only is specified.",
- id.name
+ &package.0
)
)
);
@@ -256,38 +392,66 @@ impl NpmCache {
} else {
let bytes = response.bytes().await?;
- match verify_and_extract_tarball(id, &bytes, dist, &package_folder) {
- Ok(()) => Ok(()),
- Err(err) => {
- if let Err(remove_err) = fs::remove_dir_all(&package_folder) {
- if remove_err.kind() != std::io::ErrorKind::NotFound {
- bail!(
- concat!(
- "Failed verifying and extracting npm tarball for {}, then ",
- "failed cleaning up package cache folder.\n\nOriginal ",
- "error:\n\n{}\n\nRemove error:\n\n{}\n\nPlease manually ",
- "delete this folder or you will run into issues using this ",
- "package in the future:\n\n{}"
- ),
- id,
- err,
- remove_err,
- package_folder.display(),
- );
- }
- }
- Err(err)
- }
- }
+ verify_and_extract_tarball(package, &bytes, dist, &package_folder)
}
}
- pub fn package_folder(
+ /// Ensures a copy of the package exists in the global cache.
+ ///
+ /// This assumes that the original package folder being hard linked
+ /// from exists before this is called.
+ pub fn ensure_copy_package(
&self,
- id: &NpmPackageId,
+ id: &NpmPackageCacheFolderId,
+ registry_url: &Url,
+ ) -> Result<(), AnyError> {
+ assert_ne!(id.copy_index, 0);
+ let package_folder = self.readonly.package_folder_for_id(id, registry_url);
+
+ if package_folder.exists()
+ // if this file exists, then the package didn't successfully extract
+ // the first time, or another process is currently extracting the zip file
+ && !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
+ && self.cache_setting.should_use_for_npm_package(&id.name)
+ {
+ return Ok(());
+ }
+
+ let original_package_folder = self
+ .readonly
+ .package_folder_for_name_and_version(&id.name, &id.version, registry_url);
+ with_folder_sync_lock(
+ (id.name.as_str(), &id.version),
+ &package_folder,
+ || {
+ fs_util::hard_link_dir_recursive(
+ &original_package_folder,
+ &package_folder,
+ )
+ },
+ )?;
+ Ok(())
+ }
+
+ pub fn package_folder_for_id(
+ &self,
+ id: &NpmPackageCacheFolderId,
+ registry_url: &Url,
+ ) -> PathBuf {
+ self.readonly.package_folder_for_id(id, registry_url)
+ }
+
+ pub fn package_folder_for_name_and_version(
+ &self,
+ name: &str,
+ version: &NpmVersion,
registry_url: &Url,
) -> PathBuf {
- self.readonly.package_folder(id, registry_url)
+ self.readonly.package_folder_for_name_and_version(
+ name,
+ version,
+ registry_url,
+ )
}
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
@@ -298,14 +462,14 @@ impl NpmCache {
self.readonly.registry_folder(registry_url)
}
- pub fn resolve_package_id_from_specifier(
+ pub fn resolve_package_folder_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
registry_url: &Url,
- ) -> Result<NpmPackageId, AnyError> {
+ ) -> Result<NpmPackageCacheFolderId, AnyError> {
self
.readonly
- .resolve_package_id_from_specifier(specifier, registry_url)
+ .resolve_package_folder_id_from_specifier(specifier, registry_url)
}
}
@@ -314,8 +478,8 @@ mod test {
use deno_core::url::Url;
use super::ReadonlyNpmCache;
+ use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::semver::NpmVersion;
- use crate::npm::NpmPackageId;
#[test]
fn should_get_lowercase_package_folder() {
@@ -323,12 +487,12 @@ mod test {
let cache = ReadonlyNpmCache::new(root_dir.clone());
let registry_url = Url::parse("https://registry.npmjs.org/").unwrap();
- // all lowercase should be as-is
assert_eq!(
- cache.package_folder(
- &NpmPackageId {
+ cache.package_folder_for_id(
+ &NpmPackageCacheFolderId {
name: "json".to_string(),
version: NpmVersion::parse("1.2.5").unwrap(),
+ copy_index: 0,
},
&registry_url,
),
@@ -337,44 +501,20 @@ mod test {
.join("json")
.join("1.2.5"),
);
- }
- #[test]
- fn should_handle_non_all_lowercase_package_names() {
- // it was possible at one point for npm packages to not just be lowercase
- let root_dir = crate::deno_dir::DenoDir::new(None).unwrap().root;
- let cache = ReadonlyNpmCache::new(root_dir.clone());
- let registry_url = Url::parse("https://registry.npmjs.org/").unwrap();
- let json_uppercase_hash =
- "db1a21a0bc2ef8fbe13ac4cf044e8c9116d29137d5ed8b916ab63dcb2d4290df";
assert_eq!(
- cache.package_folder(
- &NpmPackageId {
- name: "JSON".to_string(),
- version: NpmVersion::parse("1.2.5").unwrap(),
- },
- &registry_url,
- ),
- root_dir
- .join("registry.npmjs.org")
- .join("_")
- .join(json_uppercase_hash)
- .join("1.2.5"),
- );
- assert_eq!(
- cache.package_folder(
- &NpmPackageId {
- name: "@types/JSON".to_string(),
+ cache.package_folder_for_id(
+ &NpmPackageCacheFolderId {
+ name: "json".to_string(),
version: NpmVersion::parse("1.2.5").unwrap(),
+ copy_index: 1,
},
&registry_url,
),
root_dir
.join("registry.npmjs.org")
- .join("_")
- .join("@types")
- .join(json_uppercase_hash)
- .join("1.2.5"),
+ .join("json")
+ .join("1.2.5_1"),
);
}
}
diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs
index 1c37276db..86ed8572c 100644
--- a/cli/npm/mod.rs
+++ b/cli/npm/mod.rs
@@ -13,6 +13,7 @@ pub use cache::NpmCache;
#[cfg(test)]
pub use registry::NpmPackageVersionDistInfo;
pub use registry::NpmRegistryApi;
+pub use registry::RealNpmRegistryApi;
pub use resolution::NpmPackageId;
pub use resolution::NpmPackageReference;
pub use resolution::NpmPackageReq;
diff --git a/cli/npm/registry.rs b/cli/npm/registry.rs
index ccbe18c7f..2a89d4463 100644
--- a/cli/npm/registry.rs
+++ b/cli/npm/registry.rs
@@ -1,5 +1,6 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+use std::cmp::Ordering;
use std::collections::HashMap;
use std::fs;
use std::io::ErrorKind;
@@ -10,6 +11,8 @@ use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
+use deno_core::futures::future::BoxFuture;
+use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::serde::Deserialize;
use deno_core::serde_json;
@@ -24,11 +27,13 @@ use crate::http_cache::CACHE_PERM;
use crate::progress_bar::ProgressBar;
use super::cache::NpmCache;
+use super::resolution::NpmVersionMatcher;
+use super::semver::NpmVersion;
use super::semver::NpmVersionReq;
// npm registry docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md
-#[derive(Debug, Deserialize, Serialize, Clone)]
+#[derive(Debug, Default, Deserialize, Serialize, Clone)]
pub struct NpmPackageInfo {
pub name: String,
pub versions: HashMap<String, NpmPackageVersionInfo>,
@@ -36,13 +41,59 @@ pub struct NpmPackageInfo {
pub dist_tags: HashMap<String, String>,
}
+#[derive(Debug, Eq, PartialEq)]
+pub enum NpmDependencyEntryKind {
+ Dep,
+ Peer,
+ OptionalPeer,
+}
+
+impl NpmDependencyEntryKind {
+ pub fn is_optional(&self) -> bool {
+ matches!(self, NpmDependencyEntryKind::OptionalPeer)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
pub struct NpmDependencyEntry {
+ pub kind: NpmDependencyEntryKind,
pub bare_specifier: String,
pub name: String,
pub version_req: NpmVersionReq,
+ /// When the dependency is also marked as a peer dependency,
+ /// use this entry to resolve the dependency when it can't
+ /// be resolved as a peer dependency.
+ pub peer_dep_version_req: Option<NpmVersionReq>,
+}
+
+impl PartialOrd for NpmDependencyEntry {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for NpmDependencyEntry {
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ // sort the dependencies alphabetically by name then by version descending
+ match self.name.cmp(&other.name) {
+ // sort by newest to oldest
+ Ordering::Equal => other
+ .version_req
+ .version_text()
+ .cmp(&self.version_req.version_text()),
+ ordering => ordering,
+ }
+ }
+}
+
+#[derive(Debug, Default, Deserialize, Serialize, Clone)]
+pub struct NpmPeerDependencyMeta {
+ #[serde(default)]
+ optional: bool,
}
#[derive(Debug, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
pub struct NpmPackageVersionInfo {
pub version: String,
pub dist: NpmPackageVersionDistInfo,
@@ -50,14 +101,19 @@ pub struct NpmPackageVersionInfo {
// package and version (ex. `"typescript-3.0.1": "npm:typescript@3.0.1"`).
#[serde(default)]
pub dependencies: HashMap<String, String>,
+ #[serde(default)]
+ pub peer_dependencies: HashMap<String, String>,
+ #[serde(default)]
+ pub peer_dependencies_meta: HashMap<String, NpmPeerDependencyMeta>,
}
impl NpmPackageVersionInfo {
pub fn dependencies_as_entries(
&self,
) -> Result<Vec<NpmDependencyEntry>, AnyError> {
- fn entry_as_bare_specifier_and_reference(
+ fn parse_dep_entry(
entry: (&String, &String),
+ kind: NpmDependencyEntryKind,
) -> Result<NpmDependencyEntry, AnyError> {
let bare_specifier = entry.0.clone();
let (name, version_req) =
@@ -78,21 +134,46 @@ impl NpmPackageVersionInfo {
)
})?;
Ok(NpmDependencyEntry {
+ kind,
bare_specifier,
name,
version_req,
+ peer_dep_version_req: None,
})
}
- self
- .dependencies
- .iter()
- .map(entry_as_bare_specifier_and_reference)
- .collect::<Result<Vec<_>, AnyError>>()
+ let mut result = HashMap::with_capacity(
+ self.dependencies.len() + self.peer_dependencies.len(),
+ );
+ for entry in &self.peer_dependencies {
+ let is_optional = self
+ .peer_dependencies_meta
+ .get(entry.0)
+ .map(|d| d.optional)
+ .unwrap_or(false);
+ let kind = match is_optional {
+ true => NpmDependencyEntryKind::OptionalPeer,
+ false => NpmDependencyEntryKind::Peer,
+ };
+ let entry = parse_dep_entry(entry, kind)?;
+ result.insert(entry.bare_specifier.clone(), entry);
+ }
+ for entry in &self.dependencies {
+ let entry = parse_dep_entry(entry, NpmDependencyEntryKind::Dep)?;
+ // people may define a dependency as a peer dependency as well,
+ // so in those cases, attempt to resolve as a peer dependency,
+ // but then use this dependency version requirement otherwise
+ if let Some(peer_dep_entry) = result.get_mut(&entry.bare_specifier) {
+ peer_dep_entry.peer_dep_version_req = Some(entry.version_req);
+ } else {
+ result.insert(entry.bare_specifier.clone(), entry);
+ }
+ }
+ Ok(result.into_values().collect())
}
}
-#[derive(Debug, Default, Clone, Serialize, Deserialize)]
+#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NpmPackageVersionDistInfo {
/// URL to the tarball.
pub tarball: String,
@@ -100,16 +181,50 @@ pub struct NpmPackageVersionDistInfo {
pub integrity: Option<String>,
}
-#[derive(Clone)]
-pub struct NpmRegistryApi {
- base_url: Url,
- cache: NpmCache,
- mem_cache: Arc<Mutex<HashMap<String, Option<NpmPackageInfo>>>>,
- cache_setting: CacheSetting,
- progress_bar: ProgressBar,
+pub trait NpmRegistryApi: Clone + Sync + Send + 'static {
+ fn maybe_package_info(
+ &self,
+ name: &str,
+ ) -> BoxFuture<'static, Result<Option<NpmPackageInfo>, AnyError>>;
+
+ fn package_info(
+ &self,
+ name: &str,
+ ) -> BoxFuture<'static, Result<NpmPackageInfo, AnyError>> {
+ let api = self.clone();
+ let name = name.to_string();
+ async move {
+ let maybe_package_info = api.maybe_package_info(&name).await?;
+ match maybe_package_info {
+ Some(package_info) => Ok(package_info),
+ None => bail!("npm package '{}' does not exist", name),
+ }
+ }
+ .boxed()
+ }
+
+ fn package_version_info(
+ &self,
+ name: &str,
+ version: &NpmVersion,
+ ) -> BoxFuture<'static, Result<Option<NpmPackageVersionInfo>, AnyError>> {
+ let api = self.clone();
+ let name = name.to_string();
+ let version = version.to_string();
+ async move {
+ // todo(dsherret): this could be optimized to not clone the
+ // entire package info in the case of the RealNpmRegistryApi
+ let mut package_info = api.package_info(&name).await?;
+ Ok(package_info.versions.remove(&version))
+ }
+ .boxed()
+ }
}
-impl NpmRegistryApi {
+#[derive(Clone)]
+pub struct RealNpmRegistryApi(Arc<RealNpmRegistryApiInner>);
+
+impl RealNpmRegistryApi {
pub fn default_url() -> Url {
let env_var_name = "DENO_NPM_REGISTRY";
if let Ok(registry_url) = std::env::var(env_var_name) {
@@ -135,30 +250,40 @@ impl NpmRegistryApi {
cache_setting: CacheSetting,
progress_bar: ProgressBar,
) -> Self {
- Self {
+ Self(Arc::new(RealNpmRegistryApiInner {
base_url,
cache,
mem_cache: Default::default(),
cache_setting,
progress_bar,
- }
+ }))
}
pub fn base_url(&self) -> &Url {
- &self.base_url
+ &self.0.base_url
}
+}
- pub async fn package_info(
+impl NpmRegistryApi for RealNpmRegistryApi {
+ fn maybe_package_info(
&self,
name: &str,
- ) -> Result<NpmPackageInfo, AnyError> {
- let maybe_package_info = self.maybe_package_info(name).await?;
- match maybe_package_info {
- Some(package_info) => Ok(package_info),
- None => bail!("npm package '{}' does not exist", name),
- }
+ ) -> BoxFuture<'static, Result<Option<NpmPackageInfo>, AnyError>> {
+ let api = self.clone();
+ let name = name.to_string();
+ async move { api.0.maybe_package_info(&name).await }.boxed()
}
+}
+struct RealNpmRegistryApiInner {
+ base_url: Url,
+ cache: NpmCache,
+ mem_cache: Mutex<HashMap<String, Option<NpmPackageInfo>>>,
+ cache_setting: CacheSetting,
+ progress_bar: ProgressBar,
+}
+
+impl RealNpmRegistryApiInner {
pub async fn maybe_package_info(
&self,
name: &str,
@@ -331,3 +456,100 @@ impl NpmRegistryApi {
name_folder_path.join("registry.json")
}
}
+
+/// Note: This test struct is not thread safe for setup
+/// purposes. Construct everything on the same thread.
+#[cfg(test)]
+#[derive(Clone, Default)]
+pub struct TestNpmRegistryApi {
+ package_infos: Arc<Mutex<HashMap<String, NpmPackageInfo>>>,
+}
+
+#[cfg(test)]
+impl TestNpmRegistryApi {
+ pub fn add_package_info(&self, name: &str, info: NpmPackageInfo) {
+ let previous = self.package_infos.lock().insert(name.to_string(), info);
+ assert!(previous.is_none());
+ }
+
+ pub fn ensure_package(&self, name: &str) {
+ if !self.package_infos.lock().contains_key(name) {
+ self.add_package_info(
+ name,
+ NpmPackageInfo {
+ name: name.to_string(),
+ ..Default::default()
+ },
+ );
+ }
+ }
+
+ pub fn ensure_package_version(&self, name: &str, version: &str) {
+ self.ensure_package(name);
+ let mut infos = self.package_infos.lock();
+ let info = infos.get_mut(name).unwrap();
+ if !info.versions.contains_key(version) {
+ info.versions.insert(
+ version.to_string(),
+ NpmPackageVersionInfo {
+ version: version.to_string(),
+ ..Default::default()
+ },
+ );
+ }
+ }
+
+ pub fn add_dependency(
+ &self,
+ package_from: (&str, &str),
+ package_to: (&str, &str),
+ ) {
+ let mut infos = self.package_infos.lock();
+ let info = infos.get_mut(package_from.0).unwrap();
+ let version = info.versions.get_mut(package_from.1).unwrap();
+ version
+ .dependencies
+ .insert(package_to.0.to_string(), package_to.1.to_string());
+ }
+
+ pub fn add_peer_dependency(
+ &self,
+ package_from: (&str, &str),
+ package_to: (&str, &str),
+ ) {
+ let mut infos = self.package_infos.lock();
+ let info = infos.get_mut(package_from.0).unwrap();
+ let version = info.versions.get_mut(package_from.1).unwrap();
+ version
+ .peer_dependencies
+ .insert(package_to.0.to_string(), package_to.1.to_string());
+ }
+
+ pub fn add_optional_peer_dependency(
+ &self,
+ package_from: (&str, &str),
+ package_to: (&str, &str),
+ ) {
+ let mut infos = self.package_infos.lock();
+ let info = infos.get_mut(package_from.0).unwrap();
+ let version = info.versions.get_mut(package_from.1).unwrap();
+ version
+ .peer_dependencies
+ .insert(package_to.0.to_string(), package_to.1.to_string());
+ version.peer_dependencies_meta.insert(
+ package_to.0.to_string(),
+ NpmPeerDependencyMeta { optional: true },
+ );
+ }
+}
+
+#[cfg(test)]
+impl NpmRegistryApi for TestNpmRegistryApi {
+ fn maybe_package_info(
+ &self,
+ name: &str,
+ ) -> BoxFuture<'static, Result<Option<NpmPackageInfo>, AnyError>> {
+ let result = self.package_infos.lock().get(name).cloned();
+ Box::pin(deno_core::futures::future::ready(Ok(result)))
+ }
+}
diff --git a/cli/npm/resolution.rs b/cli/npm/resolution.rs
deleted file mode 100644
index 3df2e4ce5..000000000
--- a/cli/npm/resolution.rs
+++ /dev/null
@@ -1,1051 +0,0 @@
-// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
-
-use std::cmp::Ordering;
-use std::collections::HashMap;
-use std::collections::HashSet;
-use std::collections::VecDeque;
-
-use deno_ast::ModuleSpecifier;
-use deno_core::anyhow::bail;
-use deno_core::anyhow::Context;
-use deno_core::error::generic_error;
-use deno_core::error::AnyError;
-use deno_core::futures;
-use deno_core::parking_lot::Mutex;
-use deno_core::parking_lot::RwLock;
-use serde::Deserialize;
-use serde::Serialize;
-use std::sync::Arc;
-
-use crate::lockfile::Lockfile;
-
-use super::cache::should_sync_download;
-use super::registry::NpmPackageInfo;
-use super::registry::NpmPackageVersionDistInfo;
-use super::registry::NpmPackageVersionInfo;
-use super::registry::NpmRegistryApi;
-use super::semver::NpmVersion;
-use super::semver::NpmVersionReq;
-use super::semver::SpecifierVersionReq;
-
-/// The version matcher used for npm schemed urls is more strict than
-/// the one used by npm packages and so we represent either via a trait.
-pub trait NpmVersionMatcher {
- fn tag(&self) -> Option<&str>;
- fn matches(&self, version: &NpmVersion) -> bool;
- fn version_text(&self) -> String;
-}
-
-#[derive(Clone, Debug, Default, PartialEq, Eq)]
-pub struct NpmPackageReference {
- pub req: NpmPackageReq,
- pub sub_path: Option<String>,
-}
-
-impl NpmPackageReference {
- pub fn from_specifier(
- specifier: &ModuleSpecifier,
- ) -> Result<NpmPackageReference, AnyError> {
- Self::from_str(specifier.as_str())
- }
-
- pub fn from_str(specifier: &str) -> Result<NpmPackageReference, AnyError> {
- let specifier = match specifier.strip_prefix("npm:") {
- Some(s) => s,
- None => {
- bail!("Not an npm specifier: {}", specifier);
- }
- };
- let parts = specifier.split('/').collect::<Vec<_>>();
- let name_part_len = if specifier.starts_with('@') { 2 } else { 1 };
- if parts.len() < name_part_len {
- return Err(generic_error(format!("Not a valid package: {}", specifier)));
- }
- let name_parts = &parts[0..name_part_len];
- let last_name_part = &name_parts[name_part_len - 1];
- let (name, version_req) = if let Some(at_index) = last_name_part.rfind('@')
- {
- let version = &last_name_part[at_index + 1..];
- let last_name_part = &last_name_part[..at_index];
- let version_req = SpecifierVersionReq::parse(version)
- .with_context(|| "Invalid version requirement.")?;
- let name = if name_part_len == 1 {
- last_name_part.to_string()
- } else {
- format!("{}/{}", name_parts[0], last_name_part)
- };
- (name, Some(version_req))
- } else {
- (name_parts.join("/"), None)
- };
- let sub_path = if parts.len() == name_parts.len() {
- None
- } else {
- Some(parts[name_part_len..].join("/"))
- };
-
- if let Some(sub_path) = &sub_path {
- if let Some(at_index) = sub_path.rfind('@') {
- let (new_sub_path, version) = sub_path.split_at(at_index);
- let msg = format!(
- "Invalid package specifier 'npm:{}/{}'. Did you mean to write 'npm:{}{}/{}'?",
- name, sub_path, name, version, new_sub_path
- );
- return Err(generic_error(msg));
- }
- }
-
- Ok(NpmPackageReference {
- req: NpmPackageReq { name, version_req },
- sub_path,
- })
- }
-}
-
-impl std::fmt::Display for NpmPackageReference {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- if let Some(sub_path) = &self.sub_path {
- write!(f, "{}/{}", self.req, sub_path)
- } else {
- write!(f, "{}", self.req)
- }
- }
-}
-
-#[derive(
- Clone, Debug, Default, PartialEq, Eq, Hash, Serialize, Deserialize,
-)]
-pub struct NpmPackageReq {
- pub name: String,
- pub version_req: Option<SpecifierVersionReq>,
-}
-
-impl std::fmt::Display for NpmPackageReq {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match &self.version_req {
- Some(req) => write!(f, "{}@{}", self.name, req),
- None => write!(f, "{}", self.name),
- }
- }
-}
-
-impl NpmVersionMatcher for NpmPackageReq {
- fn tag(&self) -> Option<&str> {
- match &self.version_req {
- Some(version_req) => version_req.tag(),
- None => Some("latest"),
- }
- }
-
- fn matches(&self, version: &NpmVersion) -> bool {
- match self.version_req.as_ref() {
- Some(req) => {
- assert_eq!(self.tag(), None);
- match req.range() {
- Some(range) => range.satisfies(version),
- None => false,
- }
- }
- None => version.pre.is_empty(),
- }
- }
-
- fn version_text(&self) -> String {
- self
- .version_req
- .as_ref()
- .map(|v| format!("{}", v))
- .unwrap_or_else(|| "non-prerelease".to_string())
- }
-}
-
-#[derive(
- Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize,
-)]
-pub struct NpmPackageId {
- pub name: String,
- pub version: NpmVersion,
-}
-
-impl NpmPackageId {
- #[allow(unused)]
- pub fn scope(&self) -> Option<&str> {
- if self.name.starts_with('@') && self.name.contains('/') {
- self.name.split('/').next()
- } else {
- None
- }
- }
-
- pub fn serialize_for_lock_file(&self) -> String {
- format!("{}@{}", self.name, self.version)
- }
-
- pub fn deserialize_from_lock_file(id: &str) -> Result<Self, AnyError> {
- let reference = NpmPackageReference::from_str(&format!("npm:{}", id))
- .with_context(|| {
- format!("Unable to deserialize npm package reference: {}", id)
- })?;
- let version =
- NpmVersion::parse(&reference.req.version_req.unwrap().to_string())
- .unwrap();
- Ok(Self {
- name: reference.req.name,
- version,
- })
- }
-}
-
-impl std::fmt::Display for NpmPackageId {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(f, "{}@{}", self.name, self.version)
- }
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct NpmResolutionPackage {
- pub id: NpmPackageId,
- pub dist: NpmPackageVersionDistInfo,
- /// Key is what the package refers to the other package as,
- /// which could be different from the package name.
- pub dependencies: HashMap<String, NpmPackageId>,
-}
-
-#[derive(Debug, Clone, Default, Serialize, Deserialize)]
-pub struct NpmResolutionSnapshot {
- #[serde(with = "map_to_vec")]
- package_reqs: HashMap<NpmPackageReq, NpmVersion>,
- packages_by_name: HashMap<String, Vec<NpmVersion>>,
- #[serde(with = "map_to_vec")]
- packages: HashMap<NpmPackageId, NpmResolutionPackage>,
-}
-
-// This is done so the maps with non-string keys get serialized and deserialized as vectors.
-// Adapted from: https://github.com/serde-rs/serde/issues/936#issuecomment-302281792
-mod map_to_vec {
- use std::collections::HashMap;
-
- use serde::de::Deserialize;
- use serde::de::Deserializer;
- use serde::ser::Serializer;
- use serde::Serialize;
-
- pub fn serialize<S, K: Serialize, V: Serialize>(
- map: &HashMap<K, V>,
- serializer: S,
- ) -> Result<S::Ok, S::Error>
- where
- S: Serializer,
- {
- serializer.collect_seq(map.iter())
- }
-
- pub fn deserialize<
- 'de,
- D,
- K: Deserialize<'de> + Eq + std::hash::Hash,
- V: Deserialize<'de>,
- >(
- deserializer: D,
- ) -> Result<HashMap<K, V>, D::Error>
- where
- D: Deserializer<'de>,
- {
- let mut map = HashMap::new();
- for (key, value) in Vec::<(K, V)>::deserialize(deserializer)? {
- map.insert(key, value);
- }
- Ok(map)
- }
-}
-
-impl NpmResolutionSnapshot {
- /// Resolve a node package from a deno module.
- pub fn resolve_package_from_deno_module(
- &self,
- req: &NpmPackageReq,
- ) -> Result<&NpmResolutionPackage, AnyError> {
- match self.package_reqs.get(req) {
- Some(version) => Ok(
- self
- .packages
- .get(&NpmPackageId {
- name: req.name.clone(),
- version: version.clone(),
- })
- .unwrap(),
- ),
- None => bail!("could not find npm package directory for '{}'", req),
- }
- }
-
- pub fn top_level_packages(&self) -> Vec<NpmPackageId> {
- self
- .package_reqs
- .iter()
- .map(|(req, version)| NpmPackageId {
- name: req.name.clone(),
- version: version.clone(),
- })
- .collect::<HashSet<_>>()
- .into_iter()
- .collect::<Vec<_>>()
- }
-
- pub fn package_from_id(
- &self,
- id: &NpmPackageId,
- ) -> Option<&NpmResolutionPackage> {
- self.packages.get(id)
- }
-
- pub fn resolve_package_from_package(
- &self,
- name: &str,
- referrer: &NpmPackageId,
- ) -> Result<&NpmResolutionPackage, AnyError> {
- match self.packages.get(referrer) {
- Some(referrer_package) => {
- let name_ = name_without_path(name);
- if let Some(id) = referrer_package.dependencies.get(name_) {
- return Ok(self.packages.get(id).unwrap());
- }
-
- if referrer_package.id.name == name_ {
- return Ok(referrer_package);
- }
-
- // TODO(bartlomieju): this should use a reverse lookup table in the
- // snapshot instead of resolving best version again.
- let req = NpmPackageReq {
- name: name_.to_string(),
- version_req: None,
- };
-
- if let Some(version) = self.resolve_best_package_version(name_, &req) {
- let id = NpmPackageId {
- name: name_.to_string(),
- version,
- };
- if let Some(pkg) = self.packages.get(&id) {
- return Ok(pkg);
- }
- }
-
- bail!(
- "could not find npm package '{}' referenced by '{}'",
- name,
- referrer
- )
- }
- None => bail!("could not find referrer npm package '{}'", referrer),
- }
- }
-
- pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
- self.packages.values().cloned().collect()
- }
-
- pub fn resolve_best_package_version(
- &self,
- name: &str,
- version_matcher: &impl NpmVersionMatcher,
- ) -> Option<NpmVersion> {
- let mut maybe_best_version: Option<&NpmVersion> = None;
- if let Some(versions) = self.packages_by_name.get(name) {
- for version in versions {
- if version_matcher.matches(version) {
- let is_best_version = maybe_best_version
- .as_ref()
- .map(|best_version| (*best_version).cmp(version).is_lt())
- .unwrap_or(true);
- if is_best_version {
- maybe_best_version = Some(version);
- }
- }
- }
- }
- maybe_best_version.cloned()
- }
-
- pub async fn from_lockfile(
- lockfile: Arc<Mutex<Lockfile>>,
- api: &NpmRegistryApi,
- ) -> Result<Self, AnyError> {
- let mut package_reqs: HashMap<NpmPackageReq, NpmVersion>;
- let mut packages_by_name: HashMap<String, Vec<NpmVersion>>;
- let mut packages: HashMap<NpmPackageId, NpmResolutionPackage>;
-
- {
- let lockfile = lockfile.lock();
-
- // pre-allocate collections
- package_reqs =
- HashMap::with_capacity(lockfile.content.npm.specifiers.len());
- packages = HashMap::with_capacity(lockfile.content.npm.packages.len());
- packages_by_name =
- HashMap::with_capacity(lockfile.content.npm.packages.len()); // close enough
- let mut verify_ids =
- HashSet::with_capacity(lockfile.content.npm.packages.len());
-
- // collect the specifiers to version mappings
- for (key, value) in &lockfile.content.npm.specifiers {
- let reference = NpmPackageReference::from_str(&format!("npm:{}", key))
- .with_context(|| format!("Unable to parse npm specifier: {}", key))?;
- let package_id = NpmPackageId::deserialize_from_lock_file(value)?;
- package_reqs.insert(reference.req, package_id.version.clone());
- verify_ids.insert(package_id.clone());
- }
-
- // then the packages
- for (key, value) in &lockfile.content.npm.packages {
- let package_id = NpmPackageId::deserialize_from_lock_file(key)?;
- let mut dependencies = HashMap::default();
-
- packages_by_name
- .entry(package_id.name.to_string())
- .or_default()
- .push(package_id.version.clone());
-
- for (name, specifier) in &value.dependencies {
- let dep_id = NpmPackageId::deserialize_from_lock_file(specifier)?;
- dependencies.insert(name.to_string(), dep_id.clone());
- verify_ids.insert(dep_id);
- }
-
- let package = NpmResolutionPackage {
- id: package_id.clone(),
- // temporary dummy value
- dist: NpmPackageVersionDistInfo {
- tarball: "foobar".to_string(),
- shasum: "foobar".to_string(),
- integrity: Some("foobar".to_string()),
- },
- dependencies,
- };
-
- packages.insert(package_id, package);
- }
-
- // verify that all these ids exist in packages
- for id in &verify_ids {
- if !packages.contains_key(id) {
- bail!(
- "the lockfile ({}) is corrupt. You can recreate it with --lock-write",
- lockfile.filename.display(),
- );
- }
- }
- }
-
- let mut unresolved_tasks = Vec::with_capacity(packages_by_name.len());
-
- // cache the package names in parallel in the registry api
- for package_name in packages_by_name.keys() {
- let package_name = package_name.clone();
- let api = api.clone();
- unresolved_tasks.push(tokio::task::spawn(async move {
- api.package_info(&package_name).await?;
- Result::<_, AnyError>::Ok(())
- }));
- }
- for result in futures::future::join_all(unresolved_tasks).await {
- result??;
- }
-
- // ensure the dist is set for each package
- for package in packages.values_mut() {
- // this will read from the memory cache now
- let package_info = api.package_info(&package.id.name).await?;
- let version_info = match package_info
- .versions
- .get(&package.id.version.to_string())
- {
- Some(version_info) => version_info,
- None => {
- bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", package.id);
- }
- };
- package.dist = version_info.dist.clone();
- }
-
- Ok(Self {
- package_reqs,
- packages_by_name,
- packages,
- })
- }
-}
-
-pub struct NpmResolution {
- api: NpmRegistryApi,
- snapshot: RwLock<NpmResolutionSnapshot>,
- update_sempahore: tokio::sync::Semaphore,
-}
-
-impl std::fmt::Debug for NpmResolution {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let snapshot = self.snapshot.read();
- f.debug_struct("NpmResolution")
- .field("snapshot", &snapshot)
- .finish()
- }
-}
-
-impl NpmResolution {
- pub fn new(
- api: NpmRegistryApi,
- initial_snapshot: Option<NpmResolutionSnapshot>,
- ) -> Self {
- Self {
- api,
- snapshot: RwLock::new(initial_snapshot.unwrap_or_default()),
- update_sempahore: tokio::sync::Semaphore::new(1),
- }
- }
-
- pub async fn add_package_reqs(
- &self,
- package_reqs: Vec<NpmPackageReq>,
- ) -> Result<(), AnyError> {
- // only allow one thread in here at a time
- let _permit = self.update_sempahore.acquire().await.unwrap();
- let snapshot = self.snapshot.read().clone();
-
- let snapshot = self
- .add_package_reqs_to_snapshot(package_reqs, snapshot)
- .await?;
-
- *self.snapshot.write() = snapshot;
- Ok(())
- }
-
- pub async fn set_package_reqs(
- &self,
- package_reqs: HashSet<NpmPackageReq>,
- ) -> Result<(), AnyError> {
- // only allow one thread in here at a time
- let _permit = self.update_sempahore.acquire().await.unwrap();
- let snapshot = self.snapshot.read().clone();
-
- let has_removed_package = !snapshot
- .package_reqs
- .keys()
- .all(|req| package_reqs.contains(req));
- // if any packages were removed, we need to completely recreate the npm resolution snapshot
- let snapshot = if has_removed_package {
- NpmResolutionSnapshot::default()
- } else {
- snapshot
- };
- let snapshot = self
- .add_package_reqs_to_snapshot(
- package_reqs.into_iter().collect(),
- snapshot,
- )
- .await?;
-
- *self.snapshot.write() = snapshot;
-
- Ok(())
- }
-
- async fn add_package_reqs_to_snapshot(
- &self,
- mut package_reqs: Vec<NpmPackageReq>,
- mut snapshot: NpmResolutionSnapshot,
- ) -> Result<NpmResolutionSnapshot, AnyError> {
- // multiple packages are resolved in alphabetical order
- package_reqs.sort_by(|a, b| a.name.cmp(&b.name));
-
- // go over the top level packages first, then down the
- // tree one level at a time through all the branches
- let mut unresolved_tasks = Vec::with_capacity(package_reqs.len());
- for package_req in package_reqs {
- if snapshot.package_reqs.contains_key(&package_req) {
- // skip analyzing this package, as there's already a matching top level package
- continue;
- }
- // inspect the list of current packages
- if let Some(version) =
- snapshot.resolve_best_package_version(&package_req.name, &package_req)
- {
- snapshot.package_reqs.insert(package_req, version);
- continue; // done, no need to continue
- }
-
- // no existing best version, so resolve the current packages
- let api = self.api.clone();
- let maybe_info = if should_sync_download() {
- // for deterministic test output
- Some(api.package_info(&package_req.name).await)
- } else {
- None
- };
- unresolved_tasks.push(tokio::task::spawn(async move {
- let info = match maybe_info {
- Some(info) => info?,
- None => api.package_info(&package_req.name).await?,
- };
- Result::<_, AnyError>::Ok((package_req, info))
- }));
- }
-
- let mut pending_dependencies = VecDeque::new();
- for result in futures::future::join_all(unresolved_tasks).await {
- let (package_req, info) = result??;
- let version_and_info = get_resolved_package_version_and_info(
- &package_req.name,
- &package_req,
- info,
- None,
- )?;
- let id = NpmPackageId {
- name: package_req.name.clone(),
- version: version_and_info.version.clone(),
- };
- let dependencies = version_and_info
- .info
- .dependencies_as_entries()
- .with_context(|| format!("npm package: {}", id))?;
-
- pending_dependencies.push_back((id.clone(), dependencies));
- snapshot.packages.insert(
- id.clone(),
- NpmResolutionPackage {
- id,
- dist: version_and_info.info.dist,
- dependencies: Default::default(),
- },
- );
- snapshot
- .packages_by_name
- .entry(package_req.name.clone())
- .or_default()
- .push(version_and_info.version.clone());
- snapshot
- .package_reqs
- .insert(package_req, version_and_info.version);
- }
-
- // now go down through the dependencies by tree depth
- while let Some((parent_package_id, mut deps)) =
- pending_dependencies.pop_front()
- {
- // sort the dependencies alphabetically by name then by version descending
- deps.sort_by(|a, b| match a.name.cmp(&b.name) {
- // sort by newest to oldest
- Ordering::Equal => b
- .version_req
- .version_text()
- .cmp(&a.version_req.version_text()),
- ordering => ordering,
- });
-
- // cache all the dependencies' registry infos in parallel if should
- if !should_sync_download() {
- let handles = deps
- .iter()
- .map(|dep| {
- let name = dep.name.clone();
- let api = self.api.clone();
- tokio::task::spawn(async move {
- // it's ok to call this without storing the result, because
- // NpmRegistryApi will cache the package info in memory
- api.package_info(&name).await
- })
- })
- .collect::<Vec<_>>();
- let results = futures::future::join_all(handles).await;
- for result in results {
- result??; // surface the first error
- }
- }
-
- // now resolve them
- for dep in deps {
- // check if an existing dependency matches this
- let id = if let Some(version) =
- snapshot.resolve_best_package_version(&dep.name, &dep.version_req)
- {
- NpmPackageId {
- name: dep.name.clone(),
- version,
- }
- } else {
- // get the information
- let info = self.api.package_info(&dep.name).await?;
- let version_and_info = get_resolved_package_version_and_info(
- &dep.name,
- &dep.version_req,
- info,
- None,
- )?;
- let dependencies = version_and_info
- .info
- .dependencies_as_entries()
- .with_context(|| {
- format!("npm package: {}@{}", dep.name, version_and_info.version)
- })?;
-
- let id = NpmPackageId {
- name: dep.name.clone(),
- version: version_and_info.version.clone(),
- };
- pending_dependencies.push_back((id.clone(), dependencies));
- snapshot.packages.insert(
- id.clone(),
- NpmResolutionPackage {
- id: id.clone(),
- dist: version_and_info.info.dist,
- dependencies: Default::default(),
- },
- );
- snapshot
- .packages_by_name
- .entry(dep.name.clone())
- .or_default()
- .push(id.version.clone());
-
- id
- };
-
- // add this version as a dependency of the package
- snapshot
- .packages
- .get_mut(&parent_package_id)
- .unwrap()
- .dependencies
- .insert(dep.bare_specifier.clone(), id);
- }
- }
-
- Ok(snapshot)
- }
-
- pub fn resolve_package_from_id(
- &self,
- id: &NpmPackageId,
- ) -> Option<NpmResolutionPackage> {
- self.snapshot.read().package_from_id(id).cloned()
- }
-
- pub fn resolve_package_from_package(
- &self,
- name: &str,
- referrer: &NpmPackageId,
- ) -> Result<NpmResolutionPackage, AnyError> {
- self
- .snapshot
- .read()
- .resolve_package_from_package(name, referrer)
- .cloned()
- }
-
- /// Resolve a node package from a deno module.
- pub fn resolve_package_from_deno_module(
- &self,
- package: &NpmPackageReq,
- ) -> Result<NpmResolutionPackage, AnyError> {
- self
- .snapshot
- .read()
- .resolve_package_from_deno_module(package)
- .cloned()
- }
-
- pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
- self.snapshot.read().all_packages()
- }
-
- pub fn has_packages(&self) -> bool {
- !self.snapshot.read().packages.is_empty()
- }
-
- pub fn snapshot(&self) -> NpmResolutionSnapshot {
- self.snapshot.read().clone()
- }
-
- pub fn lock(
- &self,
- lockfile: &mut Lockfile,
- snapshot: &NpmResolutionSnapshot,
- ) -> Result<(), AnyError> {
- for (package_req, version) in snapshot.package_reqs.iter() {
- lockfile.insert_npm_specifier(package_req, version.to_string());
- }
- for package in self.all_packages() {
- lockfile.check_or_insert_npm_package(&package)?;
- }
- Ok(())
- }
-}
-
-#[derive(Clone)]
-struct VersionAndInfo {
- version: NpmVersion,
- info: NpmPackageVersionInfo,
-}
-
-fn get_resolved_package_version_and_info(
- pkg_name: &str,
- version_matcher: &impl NpmVersionMatcher,
- info: NpmPackageInfo,
- parent: Option<&NpmPackageId>,
-) -> Result<VersionAndInfo, AnyError> {
- let mut maybe_best_version: Option<VersionAndInfo> = None;
- if let Some(tag) = version_matcher.tag() {
- // For when someone just specifies @types/node, we want to pull in a
- // "known good" version of @types/node that works well with Deno and
- // not necessarily the latest version. For example, we might only be
- // compatible with Node vX, but then Node vY is published so we wouldn't
- // want to pull that in.
- // Note: If the user doesn't want this behavior, then they can specify an
- // explicit version.
- if tag == "latest" && pkg_name == "@types/node" {
- return get_resolved_package_version_and_info(
- pkg_name,
- &NpmVersionReq::parse("18.0.0 - 18.8.2").unwrap(),
- info,
- parent,
- );
- }
-
- if let Some(version) = info.dist_tags.get(tag) {
- match info.versions.get(version) {
- Some(info) => {
- return Ok(VersionAndInfo {
- version: NpmVersion::parse(version)?,
- info: info.clone(),
- });
- }
- None => {
- bail!(
- "Could not find version '{}' referenced in dist-tag '{}'.",
- version,
- tag,
- )
- }
- }
- } else {
- bail!("Could not find dist-tag '{}'.", tag,)
- }
- } else {
- for (_, version_info) in info.versions.into_iter() {
- let version = NpmVersion::parse(&version_info.version)?;
- if version_matcher.matches(&version) {
- let is_best_version = maybe_best_version
- .as_ref()
- .map(|best_version| best_version.version.cmp(&version).is_lt())
- .unwrap_or(true);
- if is_best_version {
- maybe_best_version = Some(VersionAndInfo {
- version,
- info: version_info,
- });
- }
- }
- }
- }
-
- match maybe_best_version {
- Some(v) => Ok(v),
- // If the package isn't found, it likely means that the user needs to use
- // `--reload` to get the latest npm package information. Although it seems
- // like we could make this smart by fetching the latest information for
- // this package here, we really need a full restart. There could be very
- // interesting bugs that occur if this package's version was resolved by
- // something previous using the old information, then now being smart here
- // causes a new fetch of the package information, meaning this time the
- // previous resolution of this package's version resolved to an older
- // version, but next time to a different version because it has new information.
- None => bail!(
- concat!(
- "Could not find npm package '{}' matching {}{}. ",
- "Try retrieving the latest npm package information by running with --reload",
- ),
- pkg_name,
- version_matcher.version_text(),
- match parent {
- Some(id) => format!(" as specified in {}", id),
- None => String::new(),
- }
- ),
- }
-}
-
-fn name_without_path(name: &str) -> &str {
- let mut search_start_index = 0;
- if name.starts_with('@') {
- if let Some(slash_index) = name.find('/') {
- search_start_index = slash_index + 1;
- }
- }
- if let Some(slash_index) = &name[search_start_index..].find('/') {
- // get the name up until the path slash
- &name[0..search_start_index + slash_index]
- } else {
- name
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn parse_npm_package_ref() {
- assert_eq!(
- NpmPackageReference::from_str("npm:@package/test").unwrap(),
- NpmPackageReference {
- req: NpmPackageReq {
- name: "@package/test".to_string(),
- version_req: None,
- },
- sub_path: None,
- }
- );
-
- assert_eq!(
- NpmPackageReference::from_str("npm:@package/test@1").unwrap(),
- NpmPackageReference {
- req: NpmPackageReq {
- name: "@package/test".to_string(),
- version_req: Some(SpecifierVersionReq::parse("1").unwrap()),
- },
- sub_path: None,
- }
- );
-
- assert_eq!(
- NpmPackageReference::from_str("npm:@package/test@~1.1/sub_path").unwrap(),
- NpmPackageReference {
- req: NpmPackageReq {
- name: "@package/test".to_string(),
- version_req: Some(SpecifierVersionReq::parse("~1.1").unwrap()),
- },
- sub_path: Some("sub_path".to_string()),
- }
- );
-
- assert_eq!(
- NpmPackageReference::from_str("npm:@package/test/sub_path").unwrap(),
- NpmPackageReference {
- req: NpmPackageReq {
- name: "@package/test".to_string(),
- version_req: None,
- },
- sub_path: Some("sub_path".to_string()),
- }
- );
-
- assert_eq!(
- NpmPackageReference::from_str("npm:test").unwrap(),
- NpmPackageReference {
- req: NpmPackageReq {
- name: "test".to_string(),
- version_req: None,
- },
- sub_path: None,
- }
- );
-
- assert_eq!(
- NpmPackageReference::from_str("npm:test@^1.2").unwrap(),
- NpmPackageReference {
- req: NpmPackageReq {
- name: "test".to_string(),
- version_req: Some(SpecifierVersionReq::parse("^1.2").unwrap()),
- },
- sub_path: None,
- }
- );
-
- assert_eq!(
- NpmPackageReference::from_str("npm:test@~1.1/sub_path").unwrap(),
- NpmPackageReference {
- req: NpmPackageReq {
- name: "test".to_string(),
- version_req: Some(SpecifierVersionReq::parse("~1.1").unwrap()),
- },
- sub_path: Some("sub_path".to_string()),
- }
- );
-
- assert_eq!(
- NpmPackageReference::from_str("npm:@package/test/sub_path").unwrap(),
- NpmPackageReference {
- req: NpmPackageReq {
- name: "@package/test".to_string(),
- version_req: None,
- },
- sub_path: Some("sub_path".to_string()),
- }
- );
-
- assert_eq!(
- NpmPackageReference::from_str("npm:@package")
- .err()
- .unwrap()
- .to_string(),
- "Not a valid package: @package"
- );
- }
-
- #[test]
- fn test_name_without_path() {
- assert_eq!(name_without_path("foo"), "foo");
- assert_eq!(name_without_path("@foo/bar"), "@foo/bar");
- assert_eq!(name_without_path("@foo/bar/baz"), "@foo/bar");
- assert_eq!(name_without_path("@hello"), "@hello");
- }
-
- #[test]
- fn test_get_resolved_package_version_and_info() {
- // dist tag where version doesn't exist
- let package_ref = NpmPackageReference::from_str("npm:test").unwrap();
- let result = get_resolved_package_version_and_info(
- "test",
- &package_ref.req,
- NpmPackageInfo {
- name: "test".to_string(),
- versions: HashMap::new(),
- dist_tags: HashMap::from([(
- "latest".to_string(),
- "1.0.0-alpha".to_string(),
- )]),
- },
- None,
- );
- assert_eq!(
- result.err().unwrap().to_string(),
- "Could not find version '1.0.0-alpha' referenced in dist-tag 'latest'."
- );
-
- // dist tag where version is a pre-release
- let package_ref = NpmPackageReference::from_str("npm:test").unwrap();
- let result = get_resolved_package_version_and_info(
- "test",
- &package_ref.req,
- NpmPackageInfo {
- name: "test".to_string(),
- versions: HashMap::from([
- ("0.1.0".to_string(), NpmPackageVersionInfo::default()),
- (
- "1.0.0-alpha".to_string(),
- NpmPackageVersionInfo {
- version: "0.1.0-alpha".to_string(),
- ..Default::default()
- },
- ),
- ]),
- dist_tags: HashMap::from([(
- "latest".to_string(),
- "1.0.0-alpha".to_string(),
- )]),
- },
- None,
- );
- assert_eq!(result.unwrap().version.to_string(), "1.0.0-alpha");
- }
-}
diff --git a/cli/npm/resolution/graph.rs b/cli/npm/resolution/graph.rs
new file mode 100644
index 000000000..497067925
--- /dev/null
+++ b/cli/npm/resolution/graph.rs
@@ -0,0 +1,2033 @@
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+use std::borrow::Cow;
+use std::collections::BTreeMap;
+use std::collections::BTreeSet;
+use std::collections::HashMap;
+use std::collections::VecDeque;
+use std::sync::Arc;
+
+use deno_core::anyhow::bail;
+use deno_core::anyhow::Context;
+use deno_core::error::AnyError;
+use deno_core::futures;
+use deno_core::parking_lot::Mutex;
+use deno_core::parking_lot::MutexGuard;
+use log::debug;
+
+use crate::npm::cache::should_sync_download;
+use crate::npm::registry::NpmDependencyEntry;
+use crate::npm::registry::NpmDependencyEntryKind;
+use crate::npm::registry::NpmPackageInfo;
+use crate::npm::registry::NpmPackageVersionInfo;
+use crate::npm::semver::NpmVersion;
+use crate::npm::semver::NpmVersionReq;
+use crate::npm::NpmRegistryApi;
+
+use super::snapshot::NpmResolutionSnapshot;
+use super::snapshot::SnapshotPackageCopyIndexResolver;
+use super::NpmPackageId;
+use super::NpmPackageReq;
+use super::NpmResolutionPackage;
+use super::NpmVersionMatcher;
+
+/// A memory efficient path of visited name and versions in the graph
+/// which is used to detect cycles.
+///
+/// note(dsherret): although this is definitely more memory efficient
+/// than a HashSet, I haven't done any tests about whether this is
+/// faster in practice.
+#[derive(Default, Clone)]
+struct VisitedVersionsPath {
+ previous_node: Option<Arc<VisitedVersionsPath>>,
+ visited_version_key: String,
+}
+
+impl VisitedVersionsPath {
+ pub fn new(id: &NpmPackageId) -> Arc<Self> {
+ Arc::new(Self {
+ previous_node: None,
+ visited_version_key: Self::id_to_key(id),
+ })
+ }
+
+ pub fn with_parent(
+ self: &Arc<VisitedVersionsPath>,
+ parent: &NodeParent,
+ ) -> Option<Arc<Self>> {
+ match parent {
+ NodeParent::Node(id) => self.with_id(id),
+ NodeParent::Req => Some(self.clone()),
+ }
+ }
+
+ pub fn with_id(
+ self: &Arc<VisitedVersionsPath>,
+ id: &NpmPackageId,
+ ) -> Option<Arc<Self>> {
+ if self.has_visited(id) {
+ None
+ } else {
+ Some(Arc::new(Self {
+ previous_node: Some(self.clone()),
+ visited_version_key: Self::id_to_key(id),
+ }))
+ }
+ }
+
+ pub fn has_visited(self: &Arc<Self>, id: &NpmPackageId) -> bool {
+ let mut maybe_next_node = Some(self);
+ let key = Self::id_to_key(id);
+ while let Some(next_node) = maybe_next_node {
+ if next_node.visited_version_key == key {
+ return true;
+ }
+ maybe_next_node = next_node.previous_node.as_ref();
+ }
+ false
+ }
+
+ fn id_to_key(id: &NpmPackageId) -> String {
+ format!("{}@{}", id.name, id.version)
+ }
+}
+
+/// A memory efficient path of the visited specifiers in the tree.
+#[derive(Default, Clone)]
+struct GraphSpecifierPath {
+ previous_node: Option<Arc<GraphSpecifierPath>>,
+ specifier: String,
+}
+
+impl GraphSpecifierPath {
+ pub fn new(specifier: String) -> Arc<Self> {
+ Arc::new(Self {
+ previous_node: None,
+ specifier,
+ })
+ }
+
+ pub fn with_specifier(self: &Arc<Self>, specifier: String) -> Arc<Self> {
+ Arc::new(Self {
+ previous_node: Some(self.clone()),
+ specifier,
+ })
+ }
+
+ pub fn pop(&self) -> Option<&Arc<Self>> {
+ self.previous_node.as_ref()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
+enum NodeParent {
+ /// These are top of the graph npm package requirements
+ /// as specified in Deno code.
+ Req,
+ /// A reference to another node, which is a resolved package.
+ Node(NpmPackageId),
+}
+
+/// A resolved package in the resolution graph.
+#[derive(Debug)]
+struct Node {
+ pub id: NpmPackageId,
+ /// If the node was forgotten due to having no parents.
+ pub forgotten: bool,
+ // Use BTreeMap and BTreeSet in order to create determinism
+ // when going up and down the tree
+ pub parents: BTreeMap<String, BTreeSet<NodeParent>>,
+ pub children: BTreeMap<String, NpmPackageId>,
+ pub deps: Arc<Vec<NpmDependencyEntry>>,
+}
+
+impl Node {
+ pub fn add_parent(&mut self, specifier: String, parent: NodeParent) {
+ self.parents.entry(specifier).or_default().insert(parent);
+ }
+
+ pub fn remove_parent(&mut self, specifier: &str, parent: &NodeParent) {
+ if let Some(parents) = self.parents.get_mut(specifier) {
+ parents.remove(parent);
+ if parents.is_empty() {
+ self.parents.remove(specifier);
+ }
+ }
+ }
+}
+
+#[derive(Debug, Default)]
+pub struct Graph {
+ package_reqs: HashMap<String, NpmPackageId>,
+ packages_by_name: HashMap<String, Vec<NpmPackageId>>,
+ // Ideally this value would be Rc<RefCell<Node>>, but we need to use a Mutex
+ // because the lsp requires Send and this code is executed in the lsp.
+ // Would be nice if the lsp wasn't Send.
+ packages: HashMap<NpmPackageId, Arc<Mutex<Node>>>,
+ // This will be set when creating from a snapshot, then
+ // inform the final snapshot creation.
+ packages_to_copy_index: HashMap<NpmPackageId, usize>,
+}
+
+impl Graph {
+ pub fn from_snapshot(snapshot: NpmResolutionSnapshot) -> Self {
+ fn fill_for_id(
+ graph: &mut Graph,
+ id: &NpmPackageId,
+ packages: &HashMap<NpmPackageId, NpmResolutionPackage>,
+ ) -> Arc<Mutex<Node>> {
+ let resolution = packages.get(id).unwrap();
+ let (created, node) = graph.get_or_create_for_id(id);
+ if created {
+ for (name, child_id) in &resolution.dependencies {
+ let child_node = fill_for_id(graph, child_id, packages);
+ graph.set_child_parent_node(name, &child_node, id);
+ }
+ }
+ node
+ }
+
+ let mut graph = Self {
+ // Note: It might be more correct to store the copy index
+ // from past resolutions with the node somehow, but maybe not.
+ packages_to_copy_index: snapshot
+ .packages
+ .iter()
+ .map(|(id, p)| (id.clone(), p.copy_index))
+ .collect(),
+ ..Default::default()
+ };
+ for (package_req, id) in &snapshot.package_reqs {
+ let node = fill_for_id(&mut graph, id, &snapshot.packages);
+ let package_req_text = package_req.to_string();
+ (*node)
+ .lock()
+ .add_parent(package_req_text.clone(), NodeParent::Req);
+ graph.package_reqs.insert(package_req_text, id.clone());
+ }
+ graph
+ }
+
+ pub fn has_package_req(&self, req: &NpmPackageReq) -> bool {
+ self.package_reqs.contains_key(&req.to_string())
+ }
+
+ fn get_or_create_for_id(
+ &mut self,
+ id: &NpmPackageId,
+ ) -> (bool, Arc<Mutex<Node>>) {
+ if let Some(node) = self.packages.get(id) {
+ (false, node.clone())
+ } else {
+ let node = Arc::new(Mutex::new(Node {
+ id: id.clone(),
+ forgotten: false,
+ parents: Default::default(),
+ children: Default::default(),
+ deps: Default::default(),
+ }));
+ self
+ .packages_by_name
+ .entry(id.name.clone())
+ .or_default()
+ .push(id.clone());
+ self.packages.insert(id.clone(), node.clone());
+ (true, node)
+ }
+ }
+
+ fn borrow_node(&self, id: &NpmPackageId) -> MutexGuard<Node> {
+ (**self.packages.get(id).unwrap_or_else(|| {
+ panic!("could not find id {} in the tree", id.as_serialized())
+ }))
+ .lock()
+ }
+
+ fn forget_orphan(&mut self, node_id: &NpmPackageId) {
+ if let Some(node) = self.packages.remove(node_id) {
+ let mut node = (*node).lock();
+ node.forgotten = true;
+ assert_eq!(node.parents.len(), 0);
+
+ // Remove the id from the list of packages by name.
+ let packages_with_name =
+ self.packages_by_name.get_mut(&node.id.name).unwrap();
+ let remove_index = packages_with_name
+ .iter()
+ .position(|id| id == &node.id)
+ .unwrap();
+ packages_with_name.remove(remove_index);
+
+ let parent = NodeParent::Node(node.id.clone());
+ for (specifier, child_id) in &node.children {
+ let mut child = self.borrow_node(child_id);
+ child.remove_parent(specifier, &parent);
+ if child.parents.is_empty() {
+ drop(child); // stop borrowing from self
+ self.forget_orphan(child_id);
+ }
+ }
+ }
+ }
+
+ fn set_child_parent(
+ &mut self,
+ specifier: &str,
+ child: &Mutex<Node>,
+ parent: &NodeParent,
+ ) {
+ match parent {
+ NodeParent::Node(parent_id) => {
+ self.set_child_parent_node(specifier, child, parent_id);
+ }
+ NodeParent::Req => {
+ let mut node = (*child).lock();
+ node.add_parent(specifier.to_string(), parent.clone());
+ self
+ .package_reqs
+ .insert(specifier.to_string(), node.id.clone());
+ }
+ }
+ }
+
+ fn set_child_parent_node(
+ &mut self,
+ specifier: &str,
+ child: &Mutex<Node>,
+ parent_id: &NpmPackageId,
+ ) {
+ let mut child = (*child).lock();
+ let mut parent = (**self.packages.get(parent_id).unwrap_or_else(|| {
+ panic!(
+ "could not find {} in list of packages when setting child {}",
+ parent_id.as_serialized(),
+ child.id.as_serialized()
+ )
+ }))
+ .lock();
+ assert_ne!(parent.id, child.id);
+ parent
+ .children
+ .insert(specifier.to_string(), child.id.clone());
+ child
+ .add_parent(specifier.to_string(), NodeParent::Node(parent.id.clone()));
+ }
+
+ fn remove_child_parent(
+ &mut self,
+ specifier: &str,
+ child_id: &NpmPackageId,
+ parent: &NodeParent,
+ ) {
+ match parent {
+ NodeParent::Node(parent_id) => {
+ let mut node = self.borrow_node(parent_id);
+ if let Some(removed_child_id) = node.children.remove(specifier) {
+ assert_eq!(removed_child_id, *child_id);
+ }
+ }
+ NodeParent::Req => {
+ if let Some(removed_child_id) = self.package_reqs.remove(specifier) {
+ assert_eq!(removed_child_id, *child_id);
+ }
+ }
+ }
+ self.borrow_node(child_id).remove_parent(specifier, parent);
+ }
+
+ pub async fn into_snapshot(
+ self,
+ api: &impl NpmRegistryApi,
+ ) -> Result<NpmResolutionSnapshot, AnyError> {
+ let mut copy_index_resolver =
+ SnapshotPackageCopyIndexResolver::from_map_with_capacity(
+ self.packages_to_copy_index,
+ self.packages.len(),
+ );
+
+ // Iterate through the packages vector in each packages_by_name in order
+ // to set the copy index as this will be deterministic rather than
+ // iterating over the hashmap below.
+ for packages in self.packages_by_name.values() {
+ if packages.len() > 1 {
+ for id in packages {
+ copy_index_resolver.resolve(id);
+ }
+ }
+ }
+
+ let mut packages = HashMap::with_capacity(self.packages.len());
+ for (id, node) in self.packages {
+ let dist = api
+ .package_version_info(&id.name, &id.version)
+ .await?
+ .unwrap()
+ .dist;
+ let node = node.lock();
+ packages.insert(
+ id.clone(),
+ NpmResolutionPackage {
+ copy_index: copy_index_resolver.resolve(&id),
+ id,
+ dist,
+ dependencies: node
+ .children
+ .iter()
+ .map(|(key, value)| (key.clone(), value.clone()))
+ .collect(),
+ },
+ );
+ }
+
+ Ok(NpmResolutionSnapshot {
+ package_reqs: self
+ .package_reqs
+ .into_iter()
+ .map(|(specifier, id)| {
+ (NpmPackageReq::from_str(&specifier).unwrap(), id)
+ })
+ .collect(),
+ packages_by_name: self.packages_by_name,
+ packages,
+ })
+ }
+}
+
+pub struct GraphDependencyResolver<'a, TNpmRegistryApi: NpmRegistryApi> {
+ graph: &'a mut Graph,
+ api: &'a TNpmRegistryApi,
+ pending_unresolved_nodes:
+ VecDeque<(Arc<VisitedVersionsPath>, Arc<Mutex<Node>>)>,
+}
+
+impl<'a, TNpmRegistryApi: NpmRegistryApi>
+ GraphDependencyResolver<'a, TNpmRegistryApi>
+{
+ pub fn new(graph: &'a mut Graph, api: &'a TNpmRegistryApi) -> Self {
+ Self {
+ graph,
+ api,
+ pending_unresolved_nodes: Default::default(),
+ }
+ }
+
+ fn resolve_best_package_version_and_info(
+ &self,
+ name: &str,
+ version_matcher: &impl NpmVersionMatcher,
+ package_info: NpmPackageInfo,
+ ) -> Result<VersionAndInfo, AnyError> {
+ if let Some(version) =
+ self.resolve_best_package_version(name, version_matcher)
+ {
+ match package_info.versions.get(&version.to_string()) {
+ Some(version_info) => Ok(VersionAndInfo {
+ version,
+ info: version_info.clone(),
+ }),
+ None => {
+ bail!("could not find version '{}' for '{}'", version, name)
+ }
+ }
+ } else {
+ // get the information
+ get_resolved_package_version_and_info(
+ name,
+ version_matcher,
+ package_info,
+ None,
+ )
+ }
+ }
+
+ fn resolve_best_package_version(
+ &self,
+ name: &str,
+ version_matcher: &impl NpmVersionMatcher,
+ ) -> Option<NpmVersion> {
+ let mut maybe_best_version: Option<&NpmVersion> = None;
+ if let Some(ids) = self.graph.packages_by_name.get(name) {
+ for version in ids.iter().map(|id| &id.version) {
+ if version_matcher.matches(version) {
+ let is_best_version = maybe_best_version
+ .as_ref()
+ .map(|best_version| (*best_version).cmp(version).is_lt())
+ .unwrap_or(true);
+ if is_best_version {
+ maybe_best_version = Some(version);
+ }
+ }
+ }
+ }
+ maybe_best_version.cloned()
+ }
+
+ pub fn add_package_req(
+ &mut self,
+ package_req: &NpmPackageReq,
+ package_info: NpmPackageInfo,
+ ) -> Result<(), AnyError> {
+ let node = self.resolve_node_from_info(
+ &package_req.name,
+ package_req,
+ package_info,
+ )?;
+ self.graph.set_child_parent(
+ &package_req.to_string(),
+ &node,
+ &NodeParent::Req,
+ );
+ self.try_add_pending_unresolved_node(None, &node);
+ Ok(())
+ }
+
+ fn analyze_dependency(
+ &mut self,
+ entry: &NpmDependencyEntry,
+ package_info: NpmPackageInfo,
+ parent_id: &NpmPackageId,
+ visited_versions: &Arc<VisitedVersionsPath>,
+ ) -> Result<(), AnyError> {
+ let node = self.resolve_node_from_info(
+ &entry.name,
+ match entry.kind {
+ NpmDependencyEntryKind::Dep => &entry.version_req,
+ // when resolving a peer dependency as a dependency, it should
+ // use the "dependencies" entry version requirement if it exists
+ NpmDependencyEntryKind::Peer | NpmDependencyEntryKind::OptionalPeer => {
+ entry
+ .peer_dep_version_req
+ .as_ref()
+ .unwrap_or(&entry.version_req)
+ }
+ },
+ package_info,
+ )?;
+ self.graph.set_child_parent(
+ &entry.bare_specifier,
+ &node,
+ &NodeParent::Node(parent_id.clone()),
+ );
+ self.try_add_pending_unresolved_node(Some(visited_versions), &node);
+ Ok(())
+ }
+
+ fn try_add_pending_unresolved_node(
+ &mut self,
+ maybe_previous_visited_versions: Option<&Arc<VisitedVersionsPath>>,
+ node: &Arc<Mutex<Node>>,
+ ) {
+ let node_id = node.lock().id.clone();
+ let visited_versions = match maybe_previous_visited_versions {
+ Some(previous_visited_versions) => {
+ match previous_visited_versions.with_id(&node_id) {
+ Some(visited_versions) => visited_versions,
+ None => return, // circular, don't visit this node
+ }
+ }
+ None => VisitedVersionsPath::new(&node_id),
+ };
+ self
+ .pending_unresolved_nodes
+ .push_back((visited_versions, node.clone()));
+ }
+
+ fn resolve_node_from_info(
+ &mut self,
+ name: &str,
+ version_matcher: &impl NpmVersionMatcher,
+ package_info: NpmPackageInfo,
+ ) -> Result<Arc<Mutex<Node>>, AnyError> {
+ let version_and_info = self.resolve_best_package_version_and_info(
+ name,
+ version_matcher,
+ package_info,
+ )?;
+ let id = NpmPackageId {
+ name: name.to_string(),
+ version: version_and_info.version.clone(),
+ peer_dependencies: Vec::new(),
+ };
+ debug!(
+ "Resolved {}@{} to {}",
+ name,
+ version_matcher.version_text(),
+ id.as_serialized()
+ );
+ let (created, node) = self.graph.get_or_create_for_id(&id);
+ if created {
+ let mut node = (*node).lock();
+ let mut deps = version_and_info
+ .info
+ .dependencies_as_entries()
+ .with_context(|| format!("npm package: {}", id.display()))?;
+ // Ensure name alphabetical and then version descending
+ // so these are resolved in that order
+ deps.sort();
+ node.deps = Arc::new(deps);
+ }
+
+ Ok(node)
+ }
+
+ pub async fn resolve_pending(&mut self) -> Result<(), AnyError> {
+ while !self.pending_unresolved_nodes.is_empty() {
+ // now go down through the dependencies by tree depth
+ while let Some((visited_versions, parent_node)) =
+ self.pending_unresolved_nodes.pop_front()
+ {
+ let (mut parent_id, deps) = {
+ let parent_node = parent_node.lock();
+ if parent_node.forgotten {
+ // todo(dsherret): we should try to reproduce this scenario and write a test
+ continue;
+ }
+ (parent_node.id.clone(), parent_node.deps.clone())
+ };
+
+ // cache all the dependencies' registry infos in parallel if should
+ if !should_sync_download() {
+ let handles = deps
+ .iter()
+ .map(|dep| {
+ let name = dep.name.clone();
+ let api = self.api.clone();
+ tokio::task::spawn(async move {
+ // it's ok to call this without storing the result, because
+ // NpmRegistryApi will cache the package info in memory
+ api.package_info(&name).await
+ })
+ })
+ .collect::<Vec<_>>();
+ let results = futures::future::join_all(handles).await;
+ for result in results {
+ result??; // surface the first error
+ }
+ }
+
+ // resolve the dependencies
+ for dep in deps.iter() {
+ let package_info = self.api.package_info(&dep.name).await?;
+
+ match dep.kind {
+ NpmDependencyEntryKind::Dep => {
+ self.analyze_dependency(
+ dep,
+ package_info,
+ &parent_id,
+ &visited_versions,
+ )?;
+ }
+ NpmDependencyEntryKind::Peer
+ | NpmDependencyEntryKind::OptionalPeer => {
+ let maybe_new_parent_id = self.resolve_peer_dep(
+ &dep.bare_specifier,
+ &parent_id,
+ dep,
+ package_info,
+ &visited_versions,
+ )?;
+ if let Some(new_parent_id) = maybe_new_parent_id {
+ assert_eq!(
+ (&new_parent_id.name, &new_parent_id.version),
+ (&parent_id.name, &parent_id.version)
+ );
+ parent_id = new_parent_id;
+ }
+ }
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn resolve_peer_dep(
+ &mut self,
+ specifier: &str,
+ parent_id: &NpmPackageId,
+ peer_dep: &NpmDependencyEntry,
+ peer_package_info: NpmPackageInfo,
+ visited_ancestor_versions: &Arc<VisitedVersionsPath>,
+ ) -> Result<Option<NpmPackageId>, AnyError> {
+ fn find_matching_child<'a>(
+ peer_dep: &NpmDependencyEntry,
+ children: impl Iterator<Item = &'a NpmPackageId>,
+ ) -> Option<NpmPackageId> {
+ for child_id in children {
+ if child_id.name == peer_dep.name
+ && peer_dep.version_req.satisfies(&child_id.version)
+ {
+ return Some(child_id.clone());
+ }
+ }
+ None
+ }
+
+ // Peer dependencies are resolved based on its ancestors' siblings.
+ // If not found, then it resolves based on the version requirement if non-optional.
+ let mut pending_ancestors = VecDeque::new(); // go up the tree by depth
+ let path = GraphSpecifierPath::new(specifier.to_string());
+ let visited_versions = VisitedVersionsPath::new(parent_id);
+
+ // skip over the current node
+ for (specifier, grand_parents) in
+ self.graph.borrow_node(parent_id).parents.clone()
+ {
+ let path = path.with_specifier(specifier);
+ for grand_parent in grand_parents {
+ if let Some(visited_versions) =
+ visited_versions.with_parent(&grand_parent)
+ {
+ pending_ancestors.push_back((
+ grand_parent,
+ path.clone(),
+ visited_versions,
+ ));
+ }
+ }
+ }
+
+ while let Some((ancestor, path, visited_versions)) =
+ pending_ancestors.pop_front()
+ {
+ match &ancestor {
+ NodeParent::Node(ancestor_node_id) => {
+ let maybe_peer_dep_id = if ancestor_node_id.name == peer_dep.name
+ && peer_dep.version_req.satisfies(&ancestor_node_id.version)
+ {
+ Some(ancestor_node_id.clone())
+ } else {
+ let ancestor = self.graph.borrow_node(ancestor_node_id);
+ for (specifier, parents) in &ancestor.parents {
+ let new_path = path.with_specifier(specifier.clone());
+ for parent in parents {
+ if let Some(visited_versions) =
+ visited_versions.with_parent(parent)
+ {
+ pending_ancestors.push_back((
+ parent.clone(),
+ new_path.clone(),
+ visited_versions,
+ ));
+ }
+ }
+ }
+ find_matching_child(peer_dep, ancestor.children.values())
+ };
+ if let Some(peer_dep_id) = maybe_peer_dep_id {
+ let parents =
+ self.graph.borrow_node(ancestor_node_id).parents.clone();
+ return Ok(Some(self.set_new_peer_dep(
+ parents,
+ ancestor_node_id,
+ &peer_dep_id,
+ &path,
+ visited_ancestor_versions,
+ )));
+ }
+ }
+ NodeParent::Req => {
+ // in this case, the parent is the root so the children are all the package requirements
+ if let Some(child_id) =
+ find_matching_child(peer_dep, self.graph.package_reqs.values())
+ {
+ let specifier = path.specifier.to_string();
+ let path = path.pop().unwrap(); // go back down one level from the package requirement
+ let old_id =
+ self.graph.package_reqs.get(&specifier).unwrap().clone();
+ return Ok(Some(self.set_new_peer_dep(
+ BTreeMap::from([(specifier, BTreeSet::from([NodeParent::Req]))]),
+ &old_id,
+ &child_id,
+ path,
+ visited_ancestor_versions,
+ )));
+ }
+ }
+ }
+ }
+
+ // We didn't find anything by searching the ancestor siblings, so we need
+ // to resolve based on the package info and will treat this just like any
+ // other dependency when not optional
+ if !peer_dep.kind.is_optional() {
+ self.analyze_dependency(
+ peer_dep,
+ peer_package_info,
+ parent_id,
+ visited_ancestor_versions,
+ )?;
+ }
+
+ Ok(None)
+ }
+
+ fn set_new_peer_dep(
+ &mut self,
+ previous_parents: BTreeMap<String, BTreeSet<NodeParent>>,
+ node_id: &NpmPackageId,
+ peer_dep_id: &NpmPackageId,
+ path: &Arc<GraphSpecifierPath>,
+ visited_ancestor_versions: &Arc<VisitedVersionsPath>,
+ ) -> NpmPackageId {
+ let mut peer_dep_id = Cow::Borrowed(peer_dep_id);
+ let old_id = node_id;
+ let (new_id, old_node_children) =
+ if old_id.peer_dependencies.contains(&peer_dep_id) {
+ // the parent has already resolved to using this peer dependency
+ // via some other path, so we don't need to update its ids,
+ // but instead only make a link to it
+ (
+ old_id.clone(),
+ self.graph.borrow_node(old_id).children.clone(),
+ )
+ } else {
+ let mut new_id = old_id.clone();
+ new_id.peer_dependencies.push(peer_dep_id.as_ref().clone());
+
+ // this will happen for circular dependencies
+ if *old_id == *peer_dep_id {
+ peer_dep_id = Cow::Owned(new_id.clone());
+ }
+
+ // remove the previous parents from the old node
+ let old_node_children = {
+ for (specifier, parents) in &previous_parents {
+ for parent in parents {
+ self.graph.remove_child_parent(specifier, old_id, parent);
+ }
+ }
+ let old_node = self.graph.borrow_node(old_id);
+ old_node.children.clone()
+ };
+
+ let (_, new_node) = self.graph.get_or_create_for_id(&new_id);
+
+ // update the previous parent to point to the new node
+ // and this node to point at those parents
+ for (specifier, parents) in previous_parents {
+ for parent in parents {
+ self.graph.set_child_parent(&specifier, &new_node, &parent);
+ }
+ }
+
+ // now add the previous children to this node
+ let new_id_as_parent = NodeParent::Node(new_id.clone());
+ for (specifier, child_id) in &old_node_children {
+ let child = self.graph.packages.get(child_id).unwrap().clone();
+ self
+ .graph
+ .set_child_parent(specifier, &child, &new_id_as_parent);
+ }
+ (new_id, old_node_children)
+ };
+
+ // this is the parent id found at the bottom of the path
+ let mut bottom_parent_id = new_id.clone();
+
+ // continue going down the path
+ let next_specifier = &path.specifier;
+ if let Some(path) = path.pop() {
+ let next_node_id = old_node_children.get(next_specifier).unwrap();
+ bottom_parent_id = self.set_new_peer_dep(
+ BTreeMap::from([(
+ next_specifier.to_string(),
+ BTreeSet::from([NodeParent::Node(new_id.clone())]),
+ )]),
+ next_node_id,
+ &peer_dep_id,
+ path,
+ visited_ancestor_versions,
+ );
+ } else {
+ // this means we're at the peer dependency now
+ debug!(
+ "Resolved peer dependency for {} in {} to {}",
+ next_specifier,
+ &new_id.as_serialized(),
+ &peer_dep_id.as_serialized(),
+ );
+ assert!(!old_node_children.contains_key(next_specifier));
+ let node = self.graph.get_or_create_for_id(&peer_dep_id).1;
+ self.try_add_pending_unresolved_node(
+ Some(visited_ancestor_versions),
+ &node,
+ );
+ self
+ .graph
+ .set_child_parent_node(next_specifier, &node, &new_id);
+ }
+
+ // forget the old node at this point if it has no parents
+ if new_id != *old_id {
+ let old_node = self.graph.borrow_node(old_id);
+ if old_node.parents.is_empty() {
+ drop(old_node); // stop borrowing
+ self.graph.forget_orphan(old_id);
+ }
+ }
+
+ bottom_parent_id
+ }
+}
+
+#[derive(Clone)]
+struct VersionAndInfo {
+ version: NpmVersion,
+ info: NpmPackageVersionInfo,
+}
+
+fn get_resolved_package_version_and_info(
+ pkg_name: &str,
+ version_matcher: &impl NpmVersionMatcher,
+ info: NpmPackageInfo,
+ parent: Option<&NpmPackageId>,
+) -> Result<VersionAndInfo, AnyError> {
+ let mut maybe_best_version: Option<VersionAndInfo> = None;
+ if let Some(tag) = version_matcher.tag() {
+ // For when someone just specifies @types/node, we want to pull in a
+ // "known good" version of @types/node that works well with Deno and
+ // not necessarily the latest version. For example, we might only be
+ // compatible with Node vX, but then Node vY is published so we wouldn't
+ // want to pull that in.
+ // Note: If the user doesn't want this behavior, then they can specify an
+ // explicit version.
+ if tag == "latest" && pkg_name == "@types/node" {
+ return get_resolved_package_version_and_info(
+ pkg_name,
+ &NpmVersionReq::parse("18.0.0 - 18.8.2").unwrap(),
+ info,
+ parent,
+ );
+ }
+
+ if let Some(version) = info.dist_tags.get(tag) {
+ match info.versions.get(version) {
+ Some(info) => {
+ return Ok(VersionAndInfo {
+ version: NpmVersion::parse(version)?,
+ info: info.clone(),
+ });
+ }
+ None => {
+ bail!(
+ "Could not find version '{}' referenced in dist-tag '{}'.",
+ version,
+ tag,
+ )
+ }
+ }
+ } else {
+ bail!("Could not find dist-tag '{}'.", tag)
+ }
+ } else {
+ for (_, version_info) in info.versions.into_iter() {
+ let version = NpmVersion::parse(&version_info.version)?;
+ if version_matcher.matches(&version) {
+ let is_best_version = maybe_best_version
+ .as_ref()
+ .map(|best_version| best_version.version.cmp(&version).is_lt())
+ .unwrap_or(true);
+ if is_best_version {
+ maybe_best_version = Some(VersionAndInfo {
+ version,
+ info: version_info,
+ });
+ }
+ }
+ }
+ }
+
+ match maybe_best_version {
+ Some(v) => Ok(v),
+ // If the package isn't found, it likely means that the user needs to use
+ // `--reload` to get the latest npm package information. Although it seems
+ // like we could make this smart by fetching the latest information for
+ // this package here, we really need a full restart. There could be very
+ // interesting bugs that occur if this package's version was resolved by
+ // something previous using the old information, then now being smart here
+ // causes a new fetch of the package information, meaning this time the
+ // previous resolution of this package's version resolved to an older
+ // version, but next time to a different version because it has new information.
+ None => bail!(
+ concat!(
+ "Could not find npm package '{}' matching {}{}. ",
+ "Try retrieving the latest npm package information by running with --reload",
+ ),
+ pkg_name,
+ version_matcher.version_text(),
+ match parent {
+ Some(id) => format!(" as specified in {}", id.display()),
+ None => String::new(),
+ }
+ ),
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use pretty_assertions::assert_eq;
+
+ use crate::npm::registry::TestNpmRegistryApi;
+ use crate::npm::NpmPackageReference;
+
+ use super::*;
+
+ #[test]
+ fn test_get_resolved_package_version_and_info() {
+ // dist tag where version doesn't exist
+ let package_ref = NpmPackageReference::from_str("npm:test").unwrap();
+ let result = get_resolved_package_version_and_info(
+ "test",
+ &package_ref.req,
+ NpmPackageInfo {
+ name: "test".to_string(),
+ versions: HashMap::new(),
+ dist_tags: HashMap::from([(
+ "latest".to_string(),
+ "1.0.0-alpha".to_string(),
+ )]),
+ },
+ None,
+ );
+ assert_eq!(
+ result.err().unwrap().to_string(),
+ "Could not find version '1.0.0-alpha' referenced in dist-tag 'latest'."
+ );
+
+ // dist tag where version is a pre-release
+ let package_ref = NpmPackageReference::from_str("npm:test").unwrap();
+ let result = get_resolved_package_version_and_info(
+ "test",
+ &package_ref.req,
+ NpmPackageInfo {
+ name: "test".to_string(),
+ versions: HashMap::from([
+ ("0.1.0".to_string(), NpmPackageVersionInfo::default()),
+ (
+ "1.0.0-alpha".to_string(),
+ NpmPackageVersionInfo {
+ version: "0.1.0-alpha".to_string(),
+ ..Default::default()
+ },
+ ),
+ ]),
+ dist_tags: HashMap::from([(
+ "latest".to_string(),
+ "1.0.0-alpha".to_string(),
+ )]),
+ },
+ None,
+ );
+ assert_eq!(result.unwrap().version.to_string(), "1.0.0-alpha");
+ }
+
+ #[tokio::test]
+ async fn resolve_deps_no_peer() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.ensure_package_version("package-c", "0.1.0");
+ api.ensure_package_version("package-c", "0.0.10");
+ api.ensure_package_version("package-d", "3.2.1");
+ api.ensure_package_version("package-d", "3.2.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "^2"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-c", "^0.1"));
+ api.add_dependency(("package-c", "0.1.0"), ("package-d", "*"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-a@1"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
+ ),
+ (
+ "package-c".to_string(),
+ NpmPackageId::from_serialized("package-c@0.1.0").unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-c@0.1.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-d".to_string(),
+ NpmPackageId::from_serialized("package-d@3.2.1").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-d@3.2.1").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-a@1".to_string(), "package-a@1.0.0".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_deps_circular() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "*"));
+ api.add_dependency(("package-b", "2.0.0"), ("package-a", "1"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-a@1.0"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-a@1.0".to_string(), "package-a@1.0.0".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_with_peer_deps_top_tree() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.ensure_package_version("package-c", "3.0.0");
+ api.ensure_package_version("package-peer", "4.0.0");
+ api.ensure_package_version("package-peer", "4.1.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "^2"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-c", "^3"));
+ api.add_peer_dependency(("package-b", "2.0.0"), ("package-peer", "4"));
+ api.add_peer_dependency(("package-c", "3.0.0"), ("package-peer", "*"));
+
+ let (packages, package_reqs) = run_resolver_and_get_output(
+ api,
+ // the peer dependency is specified here at the top of the tree
+ // so it should resolve to 4.0.0 instead of 4.1.0
+ vec!["npm:package-a@1", "npm:package-peer@4.0.0"],
+ )
+ .await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-b".to_string(),
+ NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-c".to_string(),
+ NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![
+ (
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@4.0.0".to_string()
+ ),
+ (
+ "package-peer@4.0.0".to_string(),
+ "package-peer@4.0.0".to_string()
+ )
+ ]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_with_peer_deps_ancestor_sibling_not_top_tree() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-0", "1.1.1");
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.ensure_package_version("package-c", "3.0.0");
+ api.ensure_package_version("package-peer", "4.0.0");
+ api.ensure_package_version("package-peer", "4.1.0");
+ api.add_dependency(("package-0", "1.1.1"), ("package-a", "1"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "^2"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-c", "^3"));
+ // the peer dependency is specified here as a sibling of "a" and "b"
+ // so it should resolve to 4.0.0 instead of 4.1.0
+ api.add_dependency(("package-a", "1.0.0"), ("package-peer", "4.0.0"));
+ api.add_peer_dependency(("package-b", "2.0.0"), ("package-peer", "4"));
+ api.add_peer_dependency(("package-c", "3.0.0"), ("package-peer", "*"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-0@1.1.1"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-0@1.1.1").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-peer@4.0.0")
+ .unwrap(),
+ ),]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-b".to_string(),
+ NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-c".to_string(),
+ NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-0@1.1.1".to_string(), "package-0@1.1.1".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_with_peer_deps_auto_resolved() {
+ // in this case, the peer dependency is not found in the tree
+ // so it's auto-resolved based on the registry
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.ensure_package_version("package-c", "3.0.0");
+ api.ensure_package_version("package-peer", "4.0.0");
+ api.ensure_package_version("package-peer", "4.1.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "^2"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-c", "^3"));
+ api.add_peer_dependency(("package-b", "2.0.0"), ("package-peer", "4"));
+ api.add_peer_dependency(("package-c", "3.0.0"), ("package-peer", "*"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-a@1"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
+ ),
+ (
+ "package-c".to_string(),
+ NpmPackageId::from_serialized("package-c@3.0.0").unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.1.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-c@3.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.1.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer@4.1.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-a@1".to_string(), "package-a@1.0.0".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_with_optional_peer_dep_not_resolved() {
+ // in this case, the peer dependency is not found in the tree
+ // so it's auto-resolved based on the registry
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.ensure_package_version("package-c", "3.0.0");
+ api.ensure_package_version("package-peer", "4.0.0");
+ api.ensure_package_version("package-peer", "4.1.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "^2"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-c", "^3"));
+ api.add_optional_peer_dependency(
+ ("package-b", "2.0.0"),
+ ("package-peer", "4"),
+ );
+ api.add_optional_peer_dependency(
+ ("package-c", "3.0.0"),
+ ("package-peer", "*"),
+ );
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-a@1"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
+ ),
+ (
+ "package-c".to_string(),
+ NpmPackageId::from_serialized("package-c@3.0.0").unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::new(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-c@3.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::new(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-a@1".to_string(), "package-a@1.0.0".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_with_optional_peer_found() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.ensure_package_version("package-c", "3.0.0");
+ api.ensure_package_version("package-peer", "4.0.0");
+ api.ensure_package_version("package-peer", "4.1.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "^2"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-c", "^3"));
+ api.add_optional_peer_dependency(
+ ("package-b", "2.0.0"),
+ ("package-peer", "4"),
+ );
+ api.add_optional_peer_dependency(
+ ("package-c", "3.0.0"),
+ ("package-peer", "*"),
+ );
+
+ let (packages, package_reqs) = run_resolver_and_get_output(
+ api,
+ vec!["npm:package-a@1", "npm:package-peer@4.0.0"],
+ )
+ .await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-b".to_string(),
+ NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-c".to_string(),
+ NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![
+ (
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@4.0.0".to_string()
+ ),
+ (
+ "package-peer@4.0.0".to_string(),
+ "package-peer@4.0.0".to_string()
+ )
+ ]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_nested_peer_deps_auto_resolved() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-0", "1.0.0");
+ api.ensure_package_version("package-peer-a", "2.0.0");
+ api.ensure_package_version("package-peer-b", "3.0.0");
+ api.add_peer_dependency(("package-0", "1.0.0"), ("package-peer-a", "2"));
+ api.add_peer_dependency(
+ ("package-peer-a", "2.0.0"),
+ ("package-peer-b", "3"),
+ );
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-0@1.0"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-0@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-peer-a".to_string(),
+ NpmPackageId::from_serialized("package-peer-a@2.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer-a@2.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-peer-b".to_string(),
+ NpmPackageId::from_serialized("package-peer-b@3.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer-b@3.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::new(),
+ dist: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-0@1.0".to_string(), "package-0@1.0.0".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_nested_peer_deps_ancestor_sibling_deps() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-0", "1.0.0");
+ api.ensure_package_version("package-peer-a", "2.0.0");
+ api.ensure_package_version("package-peer-b", "3.0.0");
+ api.add_dependency(("package-0", "1.0.0"), ("package-peer-b", "*"));
+ api.add_peer_dependency(("package-0", "1.0.0"), ("package-peer-a", "2"));
+ api.add_peer_dependency(
+ ("package-peer-a", "2.0.0"),
+ ("package-peer-b", "3"),
+ );
+
+ let (packages, package_reqs) = run_resolver_and_get_output(
+ api,
+ vec![
+ "npm:package-0@1.0",
+ "npm:package-peer-a@2",
+ "npm:package-peer-b@3",
+ ],
+ )
+ .await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-0@1.0.0_package-peer-a@2.0.0_package-peer-b@3.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-peer-a".to_string(),
+ NpmPackageId::from_serialized(
+ "package-peer-a@2.0.0_package-peer-b@3.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-peer-b".to_string(),
+ NpmPackageId::from_serialized("package-peer-b@3.0.0").unwrap(),
+ )
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-peer-a@2.0.0_package-peer-b@3.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-peer-b".to_string(),
+ NpmPackageId::from_serialized("package-peer-b@3.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer-b@3.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::new(),
+ dist: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![
+ (
+ "package-0@1.0".to_string(),
+ "package-0@1.0.0_package-peer-a@2.0.0_package-peer-b@3.0.0"
+ .to_string()
+ ),
+ (
+ "package-peer-a@2".to_string(),
+ "package-peer-a@2.0.0_package-peer-b@3.0.0".to_string()
+ ),
+ (
+ "package-peer-b@3".to_string(),
+ "package-peer-b@3.0.0".to_string()
+ )
+ ]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_with_peer_deps_multiple() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-0", "1.1.1");
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.ensure_package_version("package-c", "3.0.0");
+ api.ensure_package_version("package-d", "3.5.0");
+ api.ensure_package_version("package-e", "3.6.0");
+ api.ensure_package_version("package-peer-a", "4.0.0");
+ api.ensure_package_version("package-peer-a", "4.1.0");
+ api.ensure_package_version("package-peer-b", "5.3.0");
+ api.ensure_package_version("package-peer-b", "5.4.1");
+ api.ensure_package_version("package-peer-c", "6.2.0");
+ api.add_dependency(("package-0", "1.1.1"), ("package-a", "1"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "^2"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-c", "^3"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-d", "^3"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-peer-a", "4.0.0"));
+ api.add_peer_dependency(("package-b", "2.0.0"), ("package-peer-a", "4"));
+ api.add_peer_dependency(
+ ("package-b", "2.0.0"),
+ ("package-peer-c", "=6.2.0"),
+ );
+ api.add_peer_dependency(("package-c", "3.0.0"), ("package-peer-a", "*"));
+ api.add_peer_dependency(
+ ("package-peer-a", "4.0.0"),
+ ("package-peer-b", "^5.4"), // will be auto-resolved
+ );
+
+ let (packages, package_reqs) = run_resolver_and_get_output(
+ api,
+ vec!["npm:package-0@1.1.1", "npm:package-e@3"],
+ )
+ .await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-0@1.1.1").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer-a@4.0.0"
+ )
+ .unwrap(),
+ ),]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer-a@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-b".to_string(),
+ NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer-a@4.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-c".to_string(),
+ NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer-a@4.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-d".to_string(),
+ NpmPackageId::from_serialized("package-d@3.5.0").unwrap(),
+ ),
+ (
+ "package-peer-a".to_string(),
+ NpmPackageId::from_serialized("package-peer-a@4.0.0").unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer-a@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([
+ (
+ "package-peer-a".to_string(),
+ NpmPackageId::from_serialized("package-peer-a@4.0.0").unwrap(),
+ ),
+ (
+ "package-peer-c".to_string(),
+ NpmPackageId::from_serialized("package-peer-c@6.2.0").unwrap(),
+ )
+ ])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer-a@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer-a".to_string(),
+ NpmPackageId::from_serialized("package-peer-a@4.0.0").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-d@3.5.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-e@3.6.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer-a@4.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: HashMap::from([(
+ "package-peer-b".to_string(),
+ NpmPackageId::from_serialized("package-peer-b@5.4.1").unwrap(),
+ )])
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer-b@5.4.1").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer-c@6.2.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![
+ ("package-0@1.1.1".to_string(), "package-0@1.1.1".to_string()),
+ ("package-e@3".to_string(), "package-e@3.6.0".to_string()),
+ ]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_peer_deps_circular() {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "*"));
+ api.add_peer_dependency(("package-b", "2.0.0"), ("package-a", "1"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-a@1.0"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-a@1.0.0_package-a@1.0.0")
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-a@1.0.0__package-a@1.0.0"
+ )
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-a@1.0.0__package-a@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-a@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![(
+ "package-a@1.0".to_string(),
+ "package-a@1.0.0_package-a@1.0.0".to_string()
+ )]
+ );
+ }
+
+ #[tokio::test]
+ async fn resolve_peer_deps_multiple_copies() {
+ // repeat this a few times to have a higher probability of surfacing indeterminism
+ for _ in 0..3 {
+ let api = TestNpmRegistryApi::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.ensure_package_version("package-dep", "3.0.0");
+ api.ensure_package_version("package-peer", "4.0.0");
+ api.ensure_package_version("package-peer", "5.0.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-dep", "*"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-peer", "4"));
+ api.add_dependency(("package-b", "2.0.0"), ("package-dep", "*"));
+ api.add_dependency(("package-b", "2.0.0"), ("package-peer", "5"));
+ api.add_peer_dependency(("package-dep", "3.0.0"), ("package-peer", "*"));
+
+ let (packages, package_reqs) = run_resolver_and_get_output(
+ api,
+ vec!["npm:package-a@1", "npm:package-b@2"],
+ )
+ .await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-dep".to_string(),
+ NpmPackageId::from_serialized(
+ "package-dep@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@5.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-dep".to_string(),
+ NpmPackageId::from_serialized(
+ "package-dep@3.0.0_package-peer@5.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@5.0.0").unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-dep@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized(
+ "package-dep@3.0.0_package-peer@5.0.0"
+ )
+ .unwrap(),
+ copy_index: 1,
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@5.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::new(),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ id: NpmPackageId::from_serialized("package-peer@5.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::new(),
+ dist: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![
+ (
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@4.0.0".to_string()
+ ),
+ (
+ "package-b@2".to_string(),
+ "package-b@2.0.0_package-peer@5.0.0".to_string()
+ )
+ ]
+ );
+ }
+ }
+
+ async fn run_resolver_and_get_output(
+ api: TestNpmRegistryApi,
+ reqs: Vec<&str>,
+ ) -> (Vec<NpmResolutionPackage>, Vec<(String, String)>) {
+ let mut graph = Graph::default();
+ let mut resolver = GraphDependencyResolver::new(&mut graph, &api);
+
+ for req in reqs {
+ let req = NpmPackageReference::from_str(req).unwrap().req;
+ resolver
+ .add_package_req(&req, api.package_info(&req.name).await.unwrap())
+ .unwrap();
+ }
+
+ resolver.resolve_pending().await.unwrap();
+ let snapshot = graph.into_snapshot(&api).await.unwrap();
+ let mut packages = snapshot.all_packages();
+ packages.sort_by(|a, b| a.id.cmp(&b.id));
+ let mut package_reqs = snapshot
+ .package_reqs
+ .into_iter()
+ .map(|(a, b)| (a.to_string(), b.as_serialized()))
+ .collect::<Vec<_>>();
+ package_reqs.sort_by(|a, b| a.0.to_string().cmp(&b.0.to_string()));
+ (packages, package_reqs)
+ }
+}
diff --git a/cli/npm/resolution/mod.rs b/cli/npm/resolution/mod.rs
new file mode 100644
index 000000000..934cfb59b
--- /dev/null
+++ b/cli/npm/resolution/mod.rs
@@ -0,0 +1,676 @@
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+use std::collections::HashMap;
+use std::collections::HashSet;
+
+use deno_ast::ModuleSpecifier;
+use deno_core::anyhow::bail;
+use deno_core::anyhow::Context;
+use deno_core::error::generic_error;
+use deno_core::error::AnyError;
+use deno_core::futures;
+use deno_core::parking_lot::RwLock;
+use serde::Deserialize;
+use serde::Serialize;
+
+use crate::lockfile::Lockfile;
+
+use self::graph::GraphDependencyResolver;
+use self::snapshot::NpmPackagesPartitioned;
+
+use super::cache::should_sync_download;
+use super::cache::NpmPackageCacheFolderId;
+use super::registry::NpmPackageVersionDistInfo;
+use super::registry::RealNpmRegistryApi;
+use super::semver::NpmVersion;
+use super::semver::SpecifierVersionReq;
+use super::NpmRegistryApi;
+
+mod graph;
+mod snapshot;
+
+use graph::Graph;
+pub use snapshot::NpmResolutionSnapshot;
+
+/// The version matcher used for npm schemed urls is more strict than
+/// the one used by npm packages and so we represent either via a trait.
+pub trait NpmVersionMatcher {
+ fn tag(&self) -> Option<&str>;
+ fn matches(&self, version: &NpmVersion) -> bool;
+ fn version_text(&self) -> String;
+}
+
+#[derive(Clone, Debug, Default, PartialEq, Eq)]
+pub struct NpmPackageReference {
+ pub req: NpmPackageReq,
+ pub sub_path: Option<String>,
+}
+
+impl NpmPackageReference {
+ pub fn from_specifier(
+ specifier: &ModuleSpecifier,
+ ) -> Result<NpmPackageReference, AnyError> {
+ Self::from_str(specifier.as_str())
+ }
+
+ pub fn from_str(specifier: &str) -> Result<NpmPackageReference, AnyError> {
+ let specifier = match specifier.strip_prefix("npm:") {
+ Some(s) => s,
+ None => {
+ bail!("Not an npm specifier: {}", specifier);
+ }
+ };
+ let parts = specifier.split('/').collect::<Vec<_>>();
+ let name_part_len = if specifier.starts_with('@') { 2 } else { 1 };
+ if parts.len() < name_part_len {
+ return Err(generic_error(format!("Not a valid package: {}", specifier)));
+ }
+ let name_parts = &parts[0..name_part_len];
+ let last_name_part = &name_parts[name_part_len - 1];
+ let (name, version_req) = if let Some(at_index) = last_name_part.rfind('@')
+ {
+ let version = &last_name_part[at_index + 1..];
+ let last_name_part = &last_name_part[..at_index];
+ let version_req = SpecifierVersionReq::parse(version)
+ .with_context(|| "Invalid version requirement.")?;
+ let name = if name_part_len == 1 {
+ last_name_part.to_string()
+ } else {
+ format!("{}/{}", name_parts[0], last_name_part)
+ };
+ (name, Some(version_req))
+ } else {
+ (name_parts.join("/"), None)
+ };
+ let sub_path = if parts.len() == name_parts.len() {
+ None
+ } else {
+ Some(parts[name_part_len..].join("/"))
+ };
+
+ if let Some(sub_path) = &sub_path {
+ if let Some(at_index) = sub_path.rfind('@') {
+ let (new_sub_path, version) = sub_path.split_at(at_index);
+ let msg = format!(
+ "Invalid package specifier 'npm:{}/{}'. Did you mean to write 'npm:{}{}/{}'?",
+ name, sub_path, name, version, new_sub_path
+ );
+ return Err(generic_error(msg));
+ }
+ }
+
+ Ok(NpmPackageReference {
+ req: NpmPackageReq { name, version_req },
+ sub_path,
+ })
+ }
+}
+
+impl std::fmt::Display for NpmPackageReference {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ if let Some(sub_path) = &self.sub_path {
+ write!(f, "npm:{}/{}", self.req, sub_path)
+ } else {
+ write!(f, "npm:{}", self.req)
+ }
+ }
+}
+
+#[derive(
+ Clone, Debug, Default, PartialEq, Eq, Hash, Serialize, Deserialize,
+)]
+pub struct NpmPackageReq {
+ pub name: String,
+ pub version_req: Option<SpecifierVersionReq>,
+}
+
+impl std::fmt::Display for NpmPackageReq {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match &self.version_req {
+ Some(req) => write!(f, "{}@{}", self.name, req),
+ None => write!(f, "{}", self.name),
+ }
+ }
+}
+
+impl NpmPackageReq {
+ pub fn from_str(text: &str) -> Result<Self, AnyError> {
+ // probably should do something more targetted in the future
+ let reference = NpmPackageReference::from_str(&format!("npm:{}", text))?;
+ Ok(reference.req)
+ }
+}
+
+impl NpmVersionMatcher for NpmPackageReq {
+ fn tag(&self) -> Option<&str> {
+ match &self.version_req {
+ Some(version_req) => version_req.tag(),
+ None => Some("latest"),
+ }
+ }
+
+ fn matches(&self, version: &NpmVersion) -> bool {
+ match self.version_req.as_ref() {
+ Some(req) => {
+ assert_eq!(self.tag(), None);
+ match req.range() {
+ Some(range) => range.satisfies(version),
+ None => false,
+ }
+ }
+ None => version.pre.is_empty(),
+ }
+ }
+
+ fn version_text(&self) -> String {
+ self
+ .version_req
+ .as_ref()
+ .map(|v| format!("{}", v))
+ .unwrap_or_else(|| "non-prerelease".to_string())
+ }
+}
+
+#[derive(
+ Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize,
+)]
+pub struct NpmPackageId {
+ pub name: String,
+ pub version: NpmVersion,
+ pub peer_dependencies: Vec<NpmPackageId>,
+}
+
+impl NpmPackageId {
+ #[allow(unused)]
+ pub fn scope(&self) -> Option<&str> {
+ if self.name.starts_with('@') && self.name.contains('/') {
+ self.name.split('/').next()
+ } else {
+ None
+ }
+ }
+
+ pub fn as_serialized(&self) -> String {
+ self.as_serialized_with_level(0)
+ }
+
+ fn as_serialized_with_level(&self, level: usize) -> String {
+ // WARNING: This should not change because it's used in the lockfile
+ let mut result = format!(
+ "{}@{}",
+ if level == 0 {
+ self.name.to_string()
+ } else {
+ self.name.replace('/', "+")
+ },
+ self.version
+ );
+ for peer in &self.peer_dependencies {
+ // unfortunately we can't do something like `_3` when
+ // this gets deep because npm package names can start
+ // with a number
+ result.push_str(&"_".repeat(level + 1));
+ result.push_str(&peer.as_serialized_with_level(level + 1));
+ }
+ result
+ }
+
+ pub fn from_serialized(id: &str) -> Result<Self, AnyError> {
+ use monch::*;
+
+ fn parse_name(input: &str) -> ParseResult<&str> {
+ if_not_empty(substring(move |input| {
+ for (pos, c) in input.char_indices() {
+ // first character might be a scope, so skip it
+ if pos > 0 && c == '@' {
+ return Ok((&input[pos..], ()));
+ }
+ }
+ ParseError::backtrace()
+ }))(input)
+ }
+
+ fn parse_version(input: &str) -> ParseResult<&str> {
+ if_not_empty(substring(skip_while(|c| c != '_')))(input)
+ }
+
+ fn parse_name_and_version(
+ input: &str,
+ ) -> ParseResult<(String, NpmVersion)> {
+ let (input, name) = parse_name(input)?;
+ let (input, _) = ch('@')(input)?;
+ let at_version_input = input;
+ let (input, version) = parse_version(input)?;
+ match NpmVersion::parse(version) {
+ Ok(version) => Ok((input, (name.to_string(), version))),
+ Err(err) => ParseError::fail(at_version_input, format!("{:#}", err)),
+ }
+ }
+
+ fn parse_level_at_level<'a>(
+ level: usize,
+ ) -> impl Fn(&'a str) -> ParseResult<'a, ()> {
+ fn parse_level(input: &str) -> ParseResult<usize> {
+ let level = input.chars().take_while(|c| *c == '_').count();
+ Ok((&input[level..], level))
+ }
+
+ move |input| {
+ let (input, parsed_level) = parse_level(input)?;
+ if parsed_level == level {
+ Ok((input, ()))
+ } else {
+ ParseError::backtrace()
+ }
+ }
+ }
+
+ fn parse_peers_at_level<'a>(
+ level: usize,
+ ) -> impl Fn(&'a str) -> ParseResult<'a, Vec<NpmPackageId>> {
+ move |mut input| {
+ let mut peers = Vec::new();
+ while let Ok((level_input, _)) = parse_level_at_level(level)(input) {
+ input = level_input;
+ let peer_result = parse_id_at_level(level)(input)?;
+ input = peer_result.0;
+ peers.push(peer_result.1);
+ }
+ Ok((input, peers))
+ }
+ }
+
+ fn parse_id_at_level<'a>(
+ level: usize,
+ ) -> impl Fn(&'a str) -> ParseResult<'a, NpmPackageId> {
+ move |input| {
+ let (input, (name, version)) = parse_name_and_version(input)?;
+ let name = if level > 0 {
+ name.replace('+', "/")
+ } else {
+ name
+ };
+ let (input, peer_dependencies) =
+ parse_peers_at_level(level + 1)(input)?;
+ Ok((
+ input,
+ NpmPackageId {
+ name,
+ version,
+ peer_dependencies,
+ },
+ ))
+ }
+ }
+
+ with_failure_handling(parse_id_at_level(0))(id)
+ .with_context(|| format!("Invalid npm package id '{}'.", id))
+ }
+
+ pub fn display(&self) -> String {
+ // Don't implement std::fmt::Display because we don't
+ // want this to be used by accident in certain scenarios.
+ format!("{}@{}", self.name, self.version)
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
+pub struct NpmResolutionPackage {
+ pub id: NpmPackageId,
+ /// The peer dependency resolution can differ for the same
+ /// package (name and version) depending on where it is in
+ /// the resolution tree. This copy index indicates which
+ /// copy of the package this is.
+ pub copy_index: usize,
+ pub dist: NpmPackageVersionDistInfo,
+ /// Key is what the package refers to the other package as,
+ /// which could be different from the package name.
+ pub dependencies: HashMap<String, NpmPackageId>,
+}
+
+impl NpmResolutionPackage {
+ pub fn get_package_cache_folder_id(&self) -> NpmPackageCacheFolderId {
+ NpmPackageCacheFolderId {
+ name: self.id.name.clone(),
+ version: self.id.version.clone(),
+ copy_index: self.copy_index,
+ }
+ }
+}
+
+pub struct NpmResolution {
+ api: RealNpmRegistryApi,
+ snapshot: RwLock<NpmResolutionSnapshot>,
+ update_sempahore: tokio::sync::Semaphore,
+}
+
+impl std::fmt::Debug for NpmResolution {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let snapshot = self.snapshot.read();
+ f.debug_struct("NpmResolution")
+ .field("snapshot", &snapshot)
+ .finish()
+ }
+}
+
+impl NpmResolution {
+ pub fn new(
+ api: RealNpmRegistryApi,
+ initial_snapshot: Option<NpmResolutionSnapshot>,
+ ) -> Self {
+ Self {
+ api,
+ snapshot: RwLock::new(initial_snapshot.unwrap_or_default()),
+ update_sempahore: tokio::sync::Semaphore::new(1),
+ }
+ }
+
+ pub async fn add_package_reqs(
+ &self,
+ package_reqs: Vec<NpmPackageReq>,
+ ) -> Result<(), AnyError> {
+ // only allow one thread in here at a time
+ let _permit = self.update_sempahore.acquire().await.unwrap();
+ let snapshot = self.snapshot.read().clone();
+
+ let snapshot = self
+ .add_package_reqs_to_snapshot(package_reqs, snapshot)
+ .await?;
+
+ *self.snapshot.write() = snapshot;
+ Ok(())
+ }
+
+ pub async fn set_package_reqs(
+ &self,
+ package_reqs: HashSet<NpmPackageReq>,
+ ) -> Result<(), AnyError> {
+ // only allow one thread in here at a time
+ let _permit = self.update_sempahore.acquire().await.unwrap();
+ let snapshot = self.snapshot.read().clone();
+
+ let has_removed_package = !snapshot
+ .package_reqs
+ .keys()
+ .all(|req| package_reqs.contains(req));
+ // if any packages were removed, we need to completely recreate the npm resolution snapshot
+ let snapshot = if has_removed_package {
+ NpmResolutionSnapshot::default()
+ } else {
+ snapshot
+ };
+ let snapshot = self
+ .add_package_reqs_to_snapshot(
+ package_reqs.into_iter().collect(),
+ snapshot,
+ )
+ .await?;
+
+ *self.snapshot.write() = snapshot;
+
+ Ok(())
+ }
+
+ async fn add_package_reqs_to_snapshot(
+ &self,
+ mut package_reqs: Vec<NpmPackageReq>,
+ snapshot: NpmResolutionSnapshot,
+ ) -> Result<NpmResolutionSnapshot, AnyError> {
+ // convert the snapshot to a traversable graph
+ let mut graph = Graph::from_snapshot(snapshot);
+
+ // multiple packages are resolved in alphabetical order
+ package_reqs.sort_by(|a, b| a.name.cmp(&b.name));
+
+ // go over the top level packages first, then down the
+ // tree one level at a time through all the branches
+ let mut unresolved_tasks = Vec::with_capacity(package_reqs.len());
+ for package_req in package_reqs {
+ if graph.has_package_req(&package_req) {
+ // skip analyzing this package, as there's already a matching top level package
+ continue;
+ }
+
+ // no existing best version, so resolve the current packages
+ let api = self.api.clone();
+ let maybe_info = if should_sync_download() {
+ // for deterministic test output
+ Some(api.package_info(&package_req.name).await)
+ } else {
+ None
+ };
+ unresolved_tasks.push(tokio::task::spawn(async move {
+ let info = match maybe_info {
+ Some(info) => info?,
+ None => api.package_info(&package_req.name).await?,
+ };
+ Result::<_, AnyError>::Ok((package_req, info))
+ }));
+ }
+
+ let mut resolver = GraphDependencyResolver::new(&mut graph, &self.api);
+
+ for result in futures::future::join_all(unresolved_tasks).await {
+ let (package_req, info) = result??;
+ resolver.add_package_req(&package_req, info)?;
+ }
+
+ resolver.resolve_pending().await?;
+
+ graph.into_snapshot(&self.api).await
+ }
+
+ pub fn resolve_package_from_id(
+ &self,
+ id: &NpmPackageId,
+ ) -> Option<NpmResolutionPackage> {
+ self.snapshot.read().package_from_id(id).cloned()
+ }
+
+ pub fn resolve_package_cache_folder_id_from_id(
+ &self,
+ id: &NpmPackageId,
+ ) -> Option<NpmPackageCacheFolderId> {
+ self
+ .snapshot
+ .read()
+ .package_from_id(id)
+ .map(|p| p.get_package_cache_folder_id())
+ }
+
+ pub fn resolve_package_from_package(
+ &self,
+ name: &str,
+ referrer: &NpmPackageCacheFolderId,
+ ) -> Result<NpmResolutionPackage, AnyError> {
+ self
+ .snapshot
+ .read()
+ .resolve_package_from_package(name, referrer)
+ .cloned()
+ }
+
+ /// Resolve a node package from a deno module.
+ pub fn resolve_package_from_deno_module(
+ &self,
+ package: &NpmPackageReq,
+ ) -> Result<NpmResolutionPackage, AnyError> {
+ self
+ .snapshot
+ .read()
+ .resolve_package_from_deno_module(package)
+ .cloned()
+ }
+
+ pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
+ self.snapshot.read().all_packages()
+ }
+
+ pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
+ self.snapshot.read().all_packages_partitioned()
+ }
+
+ pub fn has_packages(&self) -> bool {
+ !self.snapshot.read().packages.is_empty()
+ }
+
+ pub fn snapshot(&self) -> NpmResolutionSnapshot {
+ self.snapshot.read().clone()
+ }
+
+ pub fn lock(
+ &self,
+ lockfile: &mut Lockfile,
+ snapshot: &NpmResolutionSnapshot,
+ ) -> Result<(), AnyError> {
+ for (package_req, package_id) in snapshot.package_reqs.iter() {
+ lockfile.insert_npm_specifier(package_req, package_id);
+ }
+ for package in self.all_packages() {
+ lockfile.check_or_insert_npm_package(&package)?;
+ }
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn parse_npm_package_ref() {
+ assert_eq!(
+ NpmPackageReference::from_str("npm:@package/test").unwrap(),
+ NpmPackageReference {
+ req: NpmPackageReq {
+ name: "@package/test".to_string(),
+ version_req: None,
+ },
+ sub_path: None,
+ }
+ );
+
+ assert_eq!(
+ NpmPackageReference::from_str("npm:@package/test@1").unwrap(),
+ NpmPackageReference {
+ req: NpmPackageReq {
+ name: "@package/test".to_string(),
+ version_req: Some(SpecifierVersionReq::parse("1").unwrap()),
+ },
+ sub_path: None,
+ }
+ );
+
+ assert_eq!(
+ NpmPackageReference::from_str("npm:@package/test@~1.1/sub_path").unwrap(),
+ NpmPackageReference {
+ req: NpmPackageReq {
+ name: "@package/test".to_string(),
+ version_req: Some(SpecifierVersionReq::parse("~1.1").unwrap()),
+ },
+ sub_path: Some("sub_path".to_string()),
+ }
+ );
+
+ assert_eq!(
+ NpmPackageReference::from_str("npm:@package/test/sub_path").unwrap(),
+ NpmPackageReference {
+ req: NpmPackageReq {
+ name: "@package/test".to_string(),
+ version_req: None,
+ },
+ sub_path: Some("sub_path".to_string()),
+ }
+ );
+
+ assert_eq!(
+ NpmPackageReference::from_str("npm:test").unwrap(),
+ NpmPackageReference {
+ req: NpmPackageReq {
+ name: "test".to_string(),
+ version_req: None,
+ },
+ sub_path: None,
+ }
+ );
+
+ assert_eq!(
+ NpmPackageReference::from_str("npm:test@^1.2").unwrap(),
+ NpmPackageReference {
+ req: NpmPackageReq {
+ name: "test".to_string(),
+ version_req: Some(SpecifierVersionReq::parse("^1.2").unwrap()),
+ },
+ sub_path: None,
+ }
+ );
+
+ assert_eq!(
+ NpmPackageReference::from_str("npm:test@~1.1/sub_path").unwrap(),
+ NpmPackageReference {
+ req: NpmPackageReq {
+ name: "test".to_string(),
+ version_req: Some(SpecifierVersionReq::parse("~1.1").unwrap()),
+ },
+ sub_path: Some("sub_path".to_string()),
+ }
+ );
+
+ assert_eq!(
+ NpmPackageReference::from_str("npm:@package/test/sub_path").unwrap(),
+ NpmPackageReference {
+ req: NpmPackageReq {
+ name: "@package/test".to_string(),
+ version_req: None,
+ },
+ sub_path: Some("sub_path".to_string()),
+ }
+ );
+
+ assert_eq!(
+ NpmPackageReference::from_str("npm:@package")
+ .err()
+ .unwrap()
+ .to_string(),
+ "Not a valid package: @package"
+ );
+ }
+
+ #[test]
+ fn serialize_npm_package_id() {
+ let id = NpmPackageId {
+ name: "pkg-a".to_string(),
+ version: NpmVersion::parse("1.2.3").unwrap(),
+ peer_dependencies: vec![
+ NpmPackageId {
+ name: "pkg-b".to_string(),
+ version: NpmVersion::parse("3.2.1").unwrap(),
+ peer_dependencies: vec![
+ NpmPackageId {
+ name: "pkg-c".to_string(),
+ version: NpmVersion::parse("1.3.2").unwrap(),
+ peer_dependencies: vec![],
+ },
+ NpmPackageId {
+ name: "pkg-d".to_string(),
+ version: NpmVersion::parse("2.3.4").unwrap(),
+ peer_dependencies: vec![],
+ },
+ ],
+ },
+ NpmPackageId {
+ name: "pkg-e".to_string(),
+ version: NpmVersion::parse("2.3.1").unwrap(),
+ peer_dependencies: vec![NpmPackageId {
+ name: "pkg-f".to_string(),
+ version: NpmVersion::parse("2.3.1").unwrap(),
+ peer_dependencies: vec![],
+ }],
+ },
+ ],
+ };
+ let serialized = id.as_serialized();
+ assert_eq!(serialized, "pkg-a@1.2.3_pkg-b@3.2.1__pkg-c@1.3.2__pkg-d@2.3.4_pkg-e@2.3.1__pkg-f@2.3.1");
+ assert_eq!(NpmPackageId::from_serialized(&serialized).unwrap(), id);
+ }
+}
diff --git a/cli/npm/resolution/snapshot.rs b/cli/npm/resolution/snapshot.rs
new file mode 100644
index 000000000..d76ba8b1a
--- /dev/null
+++ b/cli/npm/resolution/snapshot.rs
@@ -0,0 +1,470 @@
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+use std::collections::HashMap;
+use std::collections::HashSet;
+use std::sync::Arc;
+
+use deno_core::anyhow::anyhow;
+use deno_core::anyhow::bail;
+use deno_core::anyhow::Context;
+use deno_core::error::AnyError;
+use deno_core::futures;
+use deno_core::parking_lot::Mutex;
+use serde::Deserialize;
+use serde::Serialize;
+
+use crate::lockfile::Lockfile;
+use crate::npm::cache::should_sync_download;
+use crate::npm::cache::NpmPackageCacheFolderId;
+use crate::npm::registry::NpmPackageVersionDistInfo;
+use crate::npm::registry::NpmRegistryApi;
+use crate::npm::registry::RealNpmRegistryApi;
+
+use super::NpmPackageId;
+use super::NpmPackageReq;
+use super::NpmResolutionPackage;
+use super::NpmVersionMatcher;
+
+/// Packages partitioned by if they are "copy" packages or not.
+pub struct NpmPackagesPartitioned {
+ pub packages: Vec<NpmResolutionPackage>,
+ /// Since peer dependency resolution occurs based on ancestors and ancestor
+ /// siblings, this may sometimes cause the same package (name and version)
+ /// to have different dependencies based on where it appears in the tree.
+ /// For these packages, we create a "copy package" or duplicate of the package
+ /// whose dependencies are that of where in the tree they've resolved to.
+ pub copy_packages: Vec<NpmResolutionPackage>,
+}
+
+impl NpmPackagesPartitioned {
+ pub fn into_all(self) -> Vec<NpmResolutionPackage> {
+ let mut packages = self.packages;
+ packages.extend(self.copy_packages);
+ packages
+ }
+}
+
+#[derive(Debug, Clone, Default, Serialize, Deserialize)]
+pub struct NpmResolutionSnapshot {
+ #[serde(with = "map_to_vec")]
+ pub(super) package_reqs: HashMap<NpmPackageReq, NpmPackageId>,
+ pub(super) packages_by_name: HashMap<String, Vec<NpmPackageId>>,
+ #[serde(with = "map_to_vec")]
+ pub(super) packages: HashMap<NpmPackageId, NpmResolutionPackage>,
+}
+
+// This is done so the maps with non-string keys get serialized and deserialized as vectors.
+// Adapted from: https://github.com/serde-rs/serde/issues/936#issuecomment-302281792
+mod map_to_vec {
+ use std::collections::HashMap;
+
+ use serde::de::Deserialize;
+ use serde::de::Deserializer;
+ use serde::ser::Serializer;
+ use serde::Serialize;
+
+ pub fn serialize<S, K: Serialize, V: Serialize>(
+ map: &HashMap<K, V>,
+ serializer: S,
+ ) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ serializer.collect_seq(map.iter())
+ }
+
+ pub fn deserialize<
+ 'de,
+ D,
+ K: Deserialize<'de> + Eq + std::hash::Hash,
+ V: Deserialize<'de>,
+ >(
+ deserializer: D,
+ ) -> Result<HashMap<K, V>, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ let mut map = HashMap::new();
+ for (key, value) in Vec::<(K, V)>::deserialize(deserializer)? {
+ map.insert(key, value);
+ }
+ Ok(map)
+ }
+}
+
+impl NpmResolutionSnapshot {
+ /// Resolve a node package from a deno module.
+ pub fn resolve_package_from_deno_module(
+ &self,
+ req: &NpmPackageReq,
+ ) -> Result<&NpmResolutionPackage, AnyError> {
+ match self.package_reqs.get(req) {
+ Some(id) => Ok(self.packages.get(id).unwrap()),
+ None => bail!("could not find npm package directory for '{}'", req),
+ }
+ }
+
+ pub fn top_level_packages(&self) -> Vec<NpmPackageId> {
+ self
+ .package_reqs
+ .values()
+ .cloned()
+ .collect::<HashSet<_>>()
+ .into_iter()
+ .collect::<Vec<_>>()
+ }
+
+ pub fn package_from_id(
+ &self,
+ id: &NpmPackageId,
+ ) -> Option<&NpmResolutionPackage> {
+ self.packages.get(id)
+ }
+
+ pub fn resolve_package_from_package(
+ &self,
+ name: &str,
+ referrer: &NpmPackageCacheFolderId,
+ ) -> Result<&NpmResolutionPackage, AnyError> {
+ // todo(dsherret): do we need an additional hashmap to get this quickly?
+ let referrer_package = self
+ .packages_by_name
+ .get(&referrer.name)
+ .and_then(|packages| {
+ packages
+ .iter()
+ .filter(|p| p.version == referrer.version)
+ .filter_map(|id| {
+ let package = self.packages.get(id)?;
+ if package.copy_index == referrer.copy_index {
+ Some(package)
+ } else {
+ None
+ }
+ })
+ .next()
+ })
+ .ok_or_else(|| {
+ anyhow!("could not find referrer npm package '{}'", referrer)
+ })?;
+
+ let name = name_without_path(name);
+ if let Some(id) = referrer_package.dependencies.get(name) {
+ return Ok(self.packages.get(id).unwrap());
+ }
+
+ if referrer_package.id.name == name {
+ return Ok(referrer_package);
+ }
+
+ // TODO(bartlomieju): this should use a reverse lookup table in the
+ // snapshot instead of resolving best version again.
+ let req = NpmPackageReq {
+ name: name.to_string(),
+ version_req: None,
+ };
+
+ if let Some(id) = self.resolve_best_package_id(name, &req) {
+ if let Some(pkg) = self.packages.get(&id) {
+ return Ok(pkg);
+ }
+ }
+
+ bail!(
+ "could not find npm package '{}' referenced by '{}'",
+ name,
+ referrer
+ )
+ }
+
+ pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
+ self.packages.values().cloned().collect()
+ }
+
+ pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
+ let mut packages = self.all_packages();
+ let mut copy_packages = Vec::with_capacity(packages.len() / 2); // at most 1 copy for every package
+
+ // partition out any packages that are "copy" packages
+ for i in (0..packages.len()).rev() {
+ if packages[i].copy_index > 0 {
+ copy_packages.push(packages.swap_remove(i));
+ }
+ }
+
+ NpmPackagesPartitioned {
+ packages,
+ copy_packages,
+ }
+ }
+
+ pub fn resolve_best_package_id(
+ &self,
+ name: &str,
+ version_matcher: &impl NpmVersionMatcher,
+ ) -> Option<NpmPackageId> {
+ // todo(dsherret): this is not exactly correct because some ids
+ // will be better than others due to peer dependencies
+ let mut maybe_best_id: Option<&NpmPackageId> = None;
+ if let Some(ids) = self.packages_by_name.get(name) {
+ for id in ids {
+ if version_matcher.matches(&id.version) {
+ let is_best_version = maybe_best_id
+ .as_ref()
+ .map(|best_id| best_id.version.cmp(&id.version).is_lt())
+ .unwrap_or(true);
+ if is_best_version {
+ maybe_best_id = Some(id);
+ }
+ }
+ }
+ }
+ maybe_best_id.cloned()
+ }
+
+ pub async fn from_lockfile(
+ lockfile: Arc<Mutex<Lockfile>>,
+ api: &RealNpmRegistryApi,
+ ) -> Result<Self, AnyError> {
+ let mut package_reqs: HashMap<NpmPackageReq, NpmPackageId>;
+ let mut packages_by_name: HashMap<String, Vec<NpmPackageId>>;
+ let mut packages: HashMap<NpmPackageId, NpmResolutionPackage>;
+ let mut copy_index_resolver: SnapshotPackageCopyIndexResolver;
+
+ {
+ let lockfile = lockfile.lock();
+
+ // pre-allocate collections
+ package_reqs =
+ HashMap::with_capacity(lockfile.content.npm.specifiers.len());
+ let packages_len = lockfile.content.npm.packages.len();
+ packages = HashMap::with_capacity(packages_len);
+ packages_by_name = HashMap::with_capacity(packages_len); // close enough
+ copy_index_resolver =
+ SnapshotPackageCopyIndexResolver::with_capacity(packages_len);
+ let mut verify_ids = HashSet::with_capacity(packages_len);
+
+ // collect the specifiers to version mappings
+ for (key, value) in &lockfile.content.npm.specifiers {
+ let package_req = NpmPackageReq::from_str(key)
+ .with_context(|| format!("Unable to parse npm specifier: {}", key))?;
+ let package_id = NpmPackageId::from_serialized(value)?;
+ package_reqs.insert(package_req, package_id.clone());
+ verify_ids.insert(package_id.clone());
+ }
+
+ // then the packages
+ for (key, value) in &lockfile.content.npm.packages {
+ let package_id = NpmPackageId::from_serialized(key)?;
+
+ // collect the dependencies
+ let mut dependencies = HashMap::default();
+
+ packages_by_name
+ .entry(package_id.name.to_string())
+ .or_default()
+ .push(package_id.clone());
+
+ for (name, specifier) in &value.dependencies {
+ let dep_id = NpmPackageId::from_serialized(specifier)?;
+ dependencies.insert(name.to_string(), dep_id.clone());
+ verify_ids.insert(dep_id);
+ }
+
+ let package = NpmResolutionPackage {
+ id: package_id.clone(),
+ copy_index: copy_index_resolver.resolve(&package_id),
+ // temporary dummy value
+ dist: NpmPackageVersionDistInfo {
+ tarball: "foobar".to_string(),
+ shasum: "foobar".to_string(),
+ integrity: Some("foobar".to_string()),
+ },
+ dependencies,
+ };
+
+ packages.insert(package_id, package);
+ }
+
+ // verify that all these ids exist in packages
+ for id in &verify_ids {
+ if !packages.contains_key(id) {
+ bail!(
+ "the lockfile is corrupt. You can recreate it with --lock-write"
+ );
+ }
+ }
+ }
+
+ let mut unresolved_tasks = Vec::with_capacity(packages_by_name.len());
+
+ // cache the package names in parallel in the registry api
+ // unless synchronous download should occur
+ if should_sync_download() {
+ let mut package_names = packages_by_name.keys().collect::<Vec<_>>();
+ package_names.sort();
+ for package_name in package_names {
+ api.package_info(package_name).await?;
+ }
+ } else {
+ for package_name in packages_by_name.keys() {
+ let package_name = package_name.clone();
+ let api = api.clone();
+ unresolved_tasks.push(tokio::task::spawn(async move {
+ api.package_info(&package_name).await?;
+ Result::<_, AnyError>::Ok(())
+ }));
+ }
+ }
+ for result in futures::future::join_all(unresolved_tasks).await {
+ result??;
+ }
+
+ // ensure the dist is set for each package
+ for package in packages.values_mut() {
+ // this will read from the memory cache now
+ let version_info = match api
+ .package_version_info(&package.id.name, &package.id.version)
+ .await?
+ {
+ Some(version_info) => version_info,
+ None => {
+ bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", package.id.display());
+ }
+ };
+ package.dist = version_info.dist;
+ }
+
+ Ok(Self {
+ package_reqs,
+ packages_by_name,
+ packages,
+ })
+ }
+}
+
+pub struct SnapshotPackageCopyIndexResolver {
+ packages_to_copy_index: HashMap<NpmPackageId, usize>,
+ package_name_version_to_copy_count: HashMap<(String, String), usize>,
+}
+
+impl SnapshotPackageCopyIndexResolver {
+ pub fn with_capacity(capacity: usize) -> Self {
+ Self {
+ packages_to_copy_index: HashMap::with_capacity(capacity),
+ package_name_version_to_copy_count: HashMap::with_capacity(capacity), // close enough
+ }
+ }
+
+ pub fn from_map_with_capacity(
+ mut packages_to_copy_index: HashMap<NpmPackageId, usize>,
+ capacity: usize,
+ ) -> Self {
+ let mut package_name_version_to_copy_count =
+ HashMap::with_capacity(capacity); // close enough
+ if capacity > packages_to_copy_index.len() {
+ packages_to_copy_index.reserve(capacity - packages_to_copy_index.len());
+ }
+
+ for (id, index) in &packages_to_copy_index {
+ let entry = package_name_version_to_copy_count
+ .entry((id.name.to_string(), id.version.to_string()))
+ .or_insert(0);
+ if *entry < *index {
+ *entry = *index;
+ }
+ }
+ Self {
+ packages_to_copy_index,
+ package_name_version_to_copy_count,
+ }
+ }
+
+ pub fn resolve(&mut self, id: &NpmPackageId) -> usize {
+ if let Some(index) = self.packages_to_copy_index.get(id) {
+ *index
+ } else {
+ let index = *self
+ .package_name_version_to_copy_count
+ .entry((id.name.to_string(), id.version.to_string()))
+ .and_modify(|count| {
+ *count += 1;
+ })
+ .or_insert(0);
+ self.packages_to_copy_index.insert(id.clone(), index);
+ index
+ }
+ }
+}
+
+fn name_without_path(name: &str) -> &str {
+ let mut search_start_index = 0;
+ if name.starts_with('@') {
+ if let Some(slash_index) = name.find('/') {
+ search_start_index = slash_index + 1;
+ }
+ }
+ if let Some(slash_index) = &name[search_start_index..].find('/') {
+ // get the name up until the path slash
+ &name[0..search_start_index + slash_index]
+ } else {
+ name
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_name_without_path() {
+ assert_eq!(name_without_path("foo"), "foo");
+ assert_eq!(name_without_path("@foo/bar"), "@foo/bar");
+ assert_eq!(name_without_path("@foo/bar/baz"), "@foo/bar");
+ assert_eq!(name_without_path("@hello"), "@hello");
+ }
+
+ #[test]
+ fn test_copy_index_resolver() {
+ let mut copy_index_resolver =
+ SnapshotPackageCopyIndexResolver::with_capacity(10);
+ assert_eq!(
+ copy_index_resolver
+ .resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
+ 0
+ );
+ assert_eq!(
+ copy_index_resolver
+ .resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
+ 0
+ );
+ assert_eq!(
+ copy_index_resolver.resolve(
+ &NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
+ .unwrap()
+ ),
+ 1
+ );
+ assert_eq!(
+ copy_index_resolver.resolve(
+ &NpmPackageId::from_serialized(
+ "package@1.0.0_package-b@1.0.0__package-c@2.0.0"
+ )
+ .unwrap()
+ ),
+ 2
+ );
+ assert_eq!(
+ copy_index_resolver.resolve(
+ &NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
+ .unwrap()
+ ),
+ 1
+ );
+ assert_eq!(
+ copy_index_resolver
+ .resolve(&NpmPackageId::from_serialized("package-b@1.0.0").unwrap()),
+ 0
+ );
+ }
+}
diff --git a/cli/npm/resolvers/common.rs b/cli/npm/resolvers/common.rs
index 07996c4e1..32b8293cd 100644
--- a/cli/npm/resolvers/common.rs
+++ b/cli/npm/resolvers/common.rs
@@ -70,13 +70,19 @@ pub async fn cache_packages(
// and we want the output to be deterministic
packages.sort_by(|a, b| a.id.cmp(&b.id));
}
+
let mut handles = Vec::with_capacity(packages.len());
for package in packages {
+ assert_eq!(package.copy_index, 0); // the caller should not provide any of these
let cache = cache.clone();
let registry_url = registry_url.clone();
let handle = tokio::task::spawn(async move {
cache
- .ensure_package(&package.id, &package.dist, &registry_url)
+ .ensure_package(
+ (package.id.name.as_str(), &package.id.version),
+ &package.dist,
+ &registry_url,
+ )
.await
});
if sync_download {
diff --git a/cli/npm/resolvers/global.rs b/cli/npm/resolvers/global.rs
index 42090415a..474cb55d6 100644
--- a/cli/npm/resolvers/global.rs
+++ b/cli/npm/resolvers/global.rs
@@ -23,7 +23,7 @@ use crate::npm::resolvers::common::cache_packages;
use crate::npm::NpmCache;
use crate::npm::NpmPackageId;
use crate::npm::NpmPackageReq;
-use crate::npm::NpmRegistryApi;
+use crate::npm::RealNpmRegistryApi;
use super::common::ensure_registry_read_permission;
use super::common::InnerNpmPackageResolver;
@@ -39,7 +39,7 @@ pub struct GlobalNpmPackageResolver {
impl GlobalNpmPackageResolver {
pub fn new(
cache: NpmCache,
- api: NpmRegistryApi,
+ api: RealNpmRegistryApi,
initial_snapshot: Option<NpmResolutionSnapshot>,
) -> Self {
let registry_url = api.base_url().to_owned();
@@ -53,7 +53,13 @@ impl GlobalNpmPackageResolver {
}
fn package_folder(&self, id: &NpmPackageId) -> PathBuf {
- self.cache.package_folder(id, &self.registry_url)
+ let folder_id = self
+ .resolution
+ .resolve_package_cache_folder_id_from_id(id)
+ .unwrap();
+ self
+ .cache
+ .package_folder_for_id(&folder_id, &self.registry_url)
}
}
@@ -74,7 +80,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
) -> Result<PathBuf, AnyError> {
let referrer_pkg_id = self
.cache
- .resolve_package_id_from_specifier(referrer, &self.registry_url)?;
+ .resolve_package_folder_id_from_specifier(referrer, &self.registry_url)?;
let pkg_result = self
.resolution
.resolve_package_from_package(name, &referrer_pkg_id);
@@ -105,10 +111,15 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
&self,
specifier: &ModuleSpecifier,
) -> Result<PathBuf, AnyError> {
- let pkg_id = self
- .cache
- .resolve_package_id_from_specifier(specifier, &self.registry_url)?;
- Ok(self.package_folder(&pkg_id))
+ let pkg_folder_id = self.cache.resolve_package_folder_id_from_specifier(
+ specifier,
+ &self.registry_url,
+ )?;
+ Ok(
+ self
+ .cache
+ .package_folder_for_id(&pkg_folder_id, &self.registry_url),
+ )
}
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
@@ -162,10 +173,22 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
async fn cache_packages_in_resolver(
resolver: &GlobalNpmPackageResolver,
) -> Result<(), AnyError> {
+ let package_partitions = resolver.resolution.all_packages_partitioned();
+
cache_packages(
- resolver.resolution.all_packages(),
+ package_partitions.packages,
&resolver.cache,
&resolver.registry_url,
)
- .await
+ .await?;
+
+ // create the copy package folders
+ for copy in package_partitions.copy_packages {
+ resolver.cache.ensure_copy_package(
+ &copy.get_package_cache_folder_id(),
+ &resolver.registry_url,
+ )?;
+ }
+
+ Ok(())
}
diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs
index cad940d56..678f776f3 100644
--- a/cli/npm/resolvers/local.rs
+++ b/cli/npm/resolvers/local.rs
@@ -24,12 +24,14 @@ use tokio::task::JoinHandle;
use crate::fs_util;
use crate::lockfile::Lockfile;
use crate::npm::cache::should_sync_download;
+use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::resolution::NpmResolution;
use crate::npm::resolution::NpmResolutionSnapshot;
use crate::npm::NpmCache;
use crate::npm::NpmPackageId;
use crate::npm::NpmPackageReq;
-use crate::npm::NpmRegistryApi;
+use crate::npm::NpmResolutionPackage;
+use crate::npm::RealNpmRegistryApi;
use super::common::ensure_registry_read_permission;
use super::common::InnerNpmPackageResolver;
@@ -48,7 +50,7 @@ pub struct LocalNpmPackageResolver {
impl LocalNpmPackageResolver {
pub fn new(
cache: NpmCache,
- api: NpmRegistryApi,
+ api: RealNpmRegistryApi,
node_modules_folder: PathBuf,
initial_snapshot: Option<NpmResolutionSnapshot>,
) -> Self {
@@ -101,6 +103,35 @@ impl LocalNpmPackageResolver {
// it's within the directory, so use it
specifier.to_file_path().ok()
}
+
+ fn get_package_id_folder(
+ &self,
+ package_id: &NpmPackageId,
+ ) -> Result<PathBuf, AnyError> {
+ match self.resolution.resolve_package_from_id(package_id) {
+ Some(package) => Ok(self.get_package_id_folder_from_package(&package)),
+ None => bail!(
+ "Could not find package information for '{}'",
+ package_id.as_serialized()
+ ),
+ }
+ }
+
+ fn get_package_id_folder_from_package(
+ &self,
+ package: &NpmResolutionPackage,
+ ) -> PathBuf {
+ // package is stored at:
+ // node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
+ self
+ .root_node_modules_path
+ .join(".deno")
+ .join(get_package_folder_id_folder_name(
+ &package.get_package_cache_folder_id(),
+ ))
+ .join("node_modules")
+ .join(&package.id.name)
+ }
}
impl InnerNpmPackageResolver for LocalNpmPackageResolver {
@@ -108,19 +139,8 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver {
&self,
pkg_req: &NpmPackageReq,
) -> Result<PathBuf, AnyError> {
- let resolved_package =
- self.resolution.resolve_package_from_deno_module(pkg_req)?;
-
- // it might be at the full path if there are duplicate names
- let fully_resolved_folder_path = join_package_name(
- &self.root_node_modules_path,
- &resolved_package.id.to_string(),
- );
- Ok(if fully_resolved_folder_path.exists() {
- fully_resolved_folder_path
- } else {
- join_package_name(&self.root_node_modules_path, &resolved_package.id.name)
- })
+ let package = self.resolution.resolve_package_from_deno_module(pkg_req)?;
+ Ok(self.get_package_id_folder_from_package(&package))
}
fn resolve_package_folder_from_package(
@@ -178,19 +198,9 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver {
}
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
- match self.resolution.resolve_package_from_id(package_id) {
- Some(package) => Ok(fs_util::dir_size(
- // package is stored at:
- // node_modules/.deno/<package_id>/node_modules/<package_name>
- &self
- .root_node_modules_path
- .join(".deno")
- .join(package.id.to_string())
- .join("node_modules")
- .join(package.id.name),
- )?),
- None => bail!("Could not find package folder for '{}'", package_id),
- }
+ let package_folder_path = self.get_package_id_folder(package_id)?;
+
+ Ok(fs_util::dir_size(&package_folder_path)?)
}
fn has_packages(&self) -> bool {
@@ -255,10 +265,6 @@ async fn sync_resolution_with_fs(
registry_url: &Url,
root_node_modules_dir_path: &Path,
) -> Result<(), AnyError> {
- fn get_package_folder_name(package_id: &NpmPackageId) -> String {
- package_id.to_string().replace('/', "+")
- }
-
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
fs::create_dir_all(&deno_local_registry_dir).with_context(|| {
format!("Creating '{}'", deno_local_registry_dir.display())
@@ -267,34 +273,45 @@ async fn sync_resolution_with_fs(
// 1. Write all the packages out the .deno directory.
//
// Copy (hardlink in future) <global_registry_cache>/<package_id>/ to
- // node_modules/.deno/<package_id>/node_modules/<package_name>
+ // node_modules/.deno/<package_folder_id_folder_name>/node_modules/<package_name>
let sync_download = should_sync_download();
- let mut all_packages = snapshot.all_packages();
+ let mut package_partitions = snapshot.all_packages_partitioned();
if sync_download {
// we're running the tests not with --quiet
// and we want the output to be deterministic
- all_packages.sort_by(|a, b| a.id.cmp(&b.id));
+ package_partitions.packages.sort_by(|a, b| a.id.cmp(&b.id));
}
let mut handles: Vec<JoinHandle<Result<(), AnyError>>> =
- Vec::with_capacity(all_packages.len());
- for package in &all_packages {
- let folder_name = get_package_folder_name(&package.id);
+ Vec::with_capacity(package_partitions.packages.len());
+ for package in &package_partitions.packages {
+ let folder_name =
+ get_package_folder_id_folder_name(&package.get_package_cache_folder_id());
let folder_path = deno_local_registry_dir.join(&folder_name);
- let initialized_file = folder_path.join("deno_initialized");
- if !initialized_file.exists() {
+ let initialized_file = folder_path.join(".initialized");
+ if !cache.should_use_cache_for_npm_package(&package.id.name)
+ || !initialized_file.exists()
+ {
let cache = cache.clone();
let registry_url = registry_url.clone();
let package = package.clone();
let handle = tokio::task::spawn(async move {
cache
- .ensure_package(&package.id, &package.dist, &registry_url)
+ .ensure_package(
+ (&package.id.name, &package.id.version),
+ &package.dist,
+ &registry_url,
+ )
.await?;
let sub_node_modules = folder_path.join("node_modules");
let package_path =
join_package_name(&sub_node_modules, &package.id.name);
fs::create_dir_all(&package_path)
.with_context(|| format!("Creating '{}'", folder_path.display()))?;
- let cache_folder = cache.package_folder(&package.id, &registry_url);
+ let cache_folder = cache.package_folder_for_name_and_version(
+ &package.id.name,
+ &package.id.version,
+ &registry_url,
+ );
// for now copy, but in the future consider hard linking
fs_util::copy_dir_recursive(&cache_folder, &package_path)?;
// write out a file that indicates this folder has been initialized
@@ -314,16 +331,51 @@ async fn sync_resolution_with_fs(
result??; // surface the first error
}
- // 2. Symlink all the dependencies into the .deno directory.
+ // 2. Create any "copy" packages, which are used for peer dependencies
+ for package in &package_partitions.copy_packages {
+ let package_cache_folder_id = package.get_package_cache_folder_id();
+ let destination_path = deno_local_registry_dir
+ .join(&get_package_folder_id_folder_name(&package_cache_folder_id));
+ let initialized_file = destination_path.join(".initialized");
+ if !initialized_file.exists() {
+ let sub_node_modules = destination_path.join("node_modules");
+ let package_path = join_package_name(&sub_node_modules, &package.id.name);
+ fs::create_dir_all(&package_path).with_context(|| {
+ format!("Creating '{}'", destination_path.display())
+ })?;
+ let source_path = join_package_name(
+ &deno_local_registry_dir
+ .join(&get_package_folder_id_folder_name(
+ &package_cache_folder_id.with_no_count(),
+ ))
+ .join("node_modules"),
+ &package.id.name,
+ );
+ fs_util::hard_link_dir_recursive(&source_path, &package_path)?;
+ // write out a file that indicates this folder has been initialized
+ fs::write(initialized_file, "")?;
+ }
+ }
+
+ let all_packages = package_partitions.into_all();
+
+ // 3. Symlink all the dependencies into the .deno directory.
//
// Symlink node_modules/.deno/<package_id>/node_modules/<dep_name> to
// node_modules/.deno/<dep_id>/node_modules/<dep_package_name>
for package in &all_packages {
let sub_node_modules = deno_local_registry_dir
- .join(&get_package_folder_name(&package.id))
+ .join(&get_package_folder_id_folder_name(
+ &package.get_package_cache_folder_id(),
+ ))
.join("node_modules");
for (name, dep_id) in &package.dependencies {
- let dep_folder_name = get_package_folder_name(dep_id);
+ let dep_cache_folder_id = snapshot
+ .package_from_id(dep_id)
+ .unwrap()
+ .get_package_cache_folder_id();
+ let dep_folder_name =
+ get_package_folder_id_folder_name(&dep_cache_folder_id);
let dep_folder_path = join_package_name(
&deno_local_registry_dir
.join(dep_folder_name)
@@ -337,7 +389,7 @@ async fn sync_resolution_with_fs(
}
}
- // 3. Create all the packages in the node_modules folder, which are symlinks.
+ // 4. Create all the packages in the node_modules folder, which are symlinks.
//
// Symlink node_modules/<package_name> to
// node_modules/.deno/<package_id>/node_modules/<package_name>
@@ -353,29 +405,41 @@ async fn sync_resolution_with_fs(
let root_folder_name = if found_names.insert(package_id.name.clone()) {
package_id.name.clone()
} else if is_top_level {
- package_id.to_string()
+ package_id.display()
} else {
continue; // skip, already handled
};
- let local_registry_package_path = deno_local_registry_dir
- .join(&get_package_folder_name(&package_id))
- .join("node_modules")
- .join(&package_id.name);
+ let package = snapshot.package_from_id(&package_id).unwrap();
+ let local_registry_package_path = join_package_name(
+ &deno_local_registry_dir
+ .join(&get_package_folder_id_folder_name(
+ &package.get_package_cache_folder_id(),
+ ))
+ .join("node_modules"),
+ &package_id.name,
+ );
symlink_package_dir(
&local_registry_package_path,
&join_package_name(root_node_modules_dir_path, &root_folder_name),
)?;
- if let Some(package) = snapshot.package_from_id(&package_id) {
- for id in package.dependencies.values() {
- pending_packages.push_back((id.clone(), false));
- }
+ for id in package.dependencies.values() {
+ pending_packages.push_back((id.clone(), false));
}
}
Ok(())
}
+fn get_package_folder_id_folder_name(id: &NpmPackageCacheFolderId) -> String {
+ let copy_str = if id.copy_index == 0 {
+ "".to_string()
+ } else {
+ format!("_{}", id.copy_index)
+ };
+ format!("{}@{}{}", id.name, id.version, copy_str).replace('/', "+")
+}
+
fn symlink_package_dir(
old_path: &Path,
new_path: &Path,
diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs
index 71c2abc00..6cd40594b 100644
--- a/cli/npm/resolvers/mod.rs
+++ b/cli/npm/resolvers/mod.rs
@@ -6,6 +6,7 @@ mod local;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
+use deno_core::anyhow::Context;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
@@ -29,8 +30,8 @@ use self::local::LocalNpmPackageResolver;
use super::NpmCache;
use super::NpmPackageId;
use super::NpmPackageReq;
-use super::NpmRegistryApi;
use super::NpmResolutionSnapshot;
+use super::RealNpmRegistryApi;
const RESOLUTION_STATE_ENV_VAR_NAME: &str =
"DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE";
@@ -71,7 +72,7 @@ pub struct NpmPackageResolver {
no_npm: bool,
inner: Arc<dyn InnerNpmPackageResolver>,
local_node_modules_path: Option<PathBuf>,
- api: NpmRegistryApi,
+ api: RealNpmRegistryApi,
cache: NpmCache,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
}
@@ -90,7 +91,7 @@ impl std::fmt::Debug for NpmPackageResolver {
impl NpmPackageResolver {
pub fn new(
cache: NpmCache,
- api: NpmRegistryApi,
+ api: RealNpmRegistryApi,
unstable: bool,
no_npm: bool,
local_node_modules_path: Option<PathBuf>,
@@ -112,7 +113,14 @@ impl NpmPackageResolver {
lockfile: Arc<Mutex<Lockfile>>,
) -> Result<(), AnyError> {
let snapshot =
- NpmResolutionSnapshot::from_lockfile(lockfile.clone(), &self.api).await?;
+ NpmResolutionSnapshot::from_lockfile(lockfile.clone(), &self.api)
+ .await
+ .with_context(|| {
+ format!(
+ "failed reading lockfile '{}'",
+ lockfile.lock().filename.display()
+ )
+ })?;
self.maybe_lockfile = Some(lockfile);
if let Some(node_modules_folder) = &self.local_node_modules_path {
self.inner = Arc::new(LocalNpmPackageResolver::new(
@@ -133,7 +141,7 @@ impl NpmPackageResolver {
fn new_with_maybe_snapshot(
cache: NpmCache,
- api: NpmRegistryApi,
+ api: RealNpmRegistryApi,
unstable: bool,
no_npm: bool,
local_node_modules_path: Option<PathBuf>,
diff --git a/cli/npm/semver/errors.rs b/cli/npm/semver/errors.rs
deleted file mode 100644
index 530d73c55..000000000
--- a/cli/npm/semver/errors.rs
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
-
-use deno_core::anyhow::bail;
-use deno_core::error::AnyError;
-use monch::ParseError;
-use monch::ParseErrorFailure;
-use monch::ParseResult;
-
-pub fn with_failure_handling<'a, T>(
- combinator: impl Fn(&'a str) -> ParseResult<T>,
-) -> impl Fn(&'a str) -> Result<T, AnyError> {
- move |input| match combinator(input) {
- Ok((input, result)) => {
- if !input.is_empty() {
- error_for_failure(fail_for_trailing_input(input))
- } else {
- Ok(result)
- }
- }
- Err(ParseError::Backtrace) => {
- error_for_failure(fail_for_trailing_input(input))
- }
- Err(ParseError::Failure(e)) => error_for_failure(e),
- }
-}
-
-fn error_for_failure<T>(e: ParseErrorFailure) -> Result<T, AnyError> {
- bail!(
- "{}\n {}\n ~",
- e.message,
- // truncate the output to prevent wrapping in the console
- e.input.chars().take(60).collect::<String>()
- )
-}
-
-fn fail_for_trailing_input(input: &str) -> ParseErrorFailure {
- ParseErrorFailure::new(input, "Unexpected character.")
-}
diff --git a/cli/npm/semver/mod.rs b/cli/npm/semver/mod.rs
index 90352817f..cd63b2a29 100644
--- a/cli/npm/semver/mod.rs
+++ b/cli/npm/semver/mod.rs
@@ -11,7 +11,6 @@ use serde::Serialize;
use crate::npm::resolution::NpmVersionMatcher;
-use self::errors::with_failure_handling;
use self::range::Partial;
use self::range::VersionBoundKind;
use self::range::VersionRange;
@@ -20,7 +19,6 @@ use self::range::VersionRangeSet;
use self::range::XRange;
pub use self::specifier::SpecifierVersionReq;
-mod errors;
mod range;
mod specifier;
diff --git a/cli/npm/semver/specifier.rs b/cli/npm/semver/specifier.rs
index c3e7f716b..dc4fe1010 100644
--- a/cli/npm/semver/specifier.rs
+++ b/cli/npm/semver/specifier.rs
@@ -6,7 +6,6 @@ use monch::*;
use serde::Deserialize;
use serde::Serialize;
-use super::errors::with_failure_handling;
use super::range::Partial;
use super::range::VersionRange;
use super::range::XRange;
diff --git a/cli/npm/tarball.rs b/cli/npm/tarball.rs
index 3971e0b07..751e093f5 100644
--- a/cli/npm/tarball.rs
+++ b/cli/npm/tarball.rs
@@ -6,18 +6,17 @@ use std::path::Path;
use std::path::PathBuf;
use deno_core::anyhow::bail;
-use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use flate2::read::GzDecoder;
use tar::Archive;
use tar::EntryType;
-use super::cache::NPM_PACKAGE_SYNC_LOCK_FILENAME;
+use super::cache::with_folder_sync_lock;
use super::registry::NpmPackageVersionDistInfo;
-use super::NpmPackageId;
+use super::semver::NpmVersion;
pub fn verify_and_extract_tarball(
- package: &NpmPackageId,
+ package: (&str, &NpmVersion),
data: &[u8],
dist_info: &NpmPackageVersionDistInfo,
output_folder: &Path,
@@ -27,50 +26,19 @@ pub fn verify_and_extract_tarball(
} else {
// todo(dsherret): check shasum here
bail!(
- "Errored on '{}': npm packages with no integrity are not implemented.",
- package
+ "Errored on '{}@{}': npm packages with no integrity are not implemented.",
+ package.0,
+ package.1,
);
}
- fs::create_dir_all(output_folder).with_context(|| {
- format!("Error creating '{}'.", output_folder.display())
- })?;
-
- // This sync lock file is a way to ensure that partially created
- // npm package directories aren't considered valid. This could maybe
- // be a bit smarter in the future to not bother extracting here
- // if another process has taken the lock in the past X seconds and
- // wait for the other process to finish (it could try to create the
- // file with `create_new(true)` then if it exists, check the metadata
- // then wait until the other process finishes with a timeout), but
- // for now this is good enough.
- let sync_lock_path = output_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME);
- match fs::OpenOptions::new()
- .write(true)
- .create(true)
- .open(&sync_lock_path)
- {
- Ok(_) => {
- extract_tarball(data, output_folder)?;
- // extraction succeeded, so only now delete this file
- let _ignore = std::fs::remove_file(&sync_lock_path);
- Ok(())
- }
- Err(err) => {
- bail!(
- concat!(
- "Error creating package sync lock file at '{}'. ",
- "Maybe try manually deleting this folder.\n\n{:#}",
- ),
- output_folder.display(),
- err
- );
- }
- }
+ with_folder_sync_lock(package, output_folder, || {
+ extract_tarball(data, output_folder)
+ })
}
fn verify_tarball_integrity(
- package: &NpmPackageId,
+ package: (&str, &NpmVersion),
data: &[u8],
npm_integrity: &str,
) -> Result<(), AnyError> {
@@ -81,16 +49,18 @@ fn verify_tarball_integrity(
let algo = match hash_kind {
"sha512" => &SHA512,
hash_kind => bail!(
- "Not implemented hash function for {}: {}",
- package,
+ "Not implemented hash function for {}@{}: {}",
+ package.0,
+ package.1,
hash_kind
),
};
(algo, checksum.to_lowercase())
}
None => bail!(
- "Not implemented integrity kind for {}: {}",
- package,
+ "Not implemented integrity kind for {}@{}: {}",
+ package.0,
+ package.1,
npm_integrity
),
};
@@ -101,8 +71,9 @@ fn verify_tarball_integrity(
let tarball_checksum = base64::encode(digest.as_ref()).to_lowercase();
if tarball_checksum != expected_checksum {
bail!(
- "Tarball checksum did not match what was provided by npm registry for {}.\n\nExpected: {}\nActual: {}",
- package,
+ "Tarball checksum did not match what was provided by npm registry for {}@{}.\n\nExpected: {}\nActual: {}",
+ package.0,
+ package.1,
expected_checksum,
tarball_checksum,
)
@@ -162,32 +133,31 @@ mod test {
#[test]
pub fn test_verify_tarball() {
- let package_id = NpmPackageId {
- name: "package".to_string(),
- version: NpmVersion::parse("1.0.0").unwrap(),
- };
+ let package_name = "package".to_string();
+ let package_version = NpmVersion::parse("1.0.0").unwrap();
+ let package = (package_name.as_str(), &package_version);
let actual_checksum =
"z4phnx7vul3xvchq1m2ab9yg5aulvxxcg/spidns6c5h0ne8xyxysp+dgnkhfuwvy7kxvudbeoglodj6+sfapg==";
assert_eq!(
- verify_tarball_integrity(&package_id, &Vec::new(), "test")
+ verify_tarball_integrity(package, &Vec::new(), "test")
.unwrap_err()
.to_string(),
"Not implemented integrity kind for package@1.0.0: test",
);
assert_eq!(
- verify_tarball_integrity(&package_id, &Vec::new(), "sha1-test")
+ verify_tarball_integrity(package, &Vec::new(), "sha1-test")
.unwrap_err()
.to_string(),
"Not implemented hash function for package@1.0.0: sha1",
);
assert_eq!(
- verify_tarball_integrity(&package_id, &Vec::new(), "sha512-test")
+ verify_tarball_integrity(package, &Vec::new(), "sha512-test")
.unwrap_err()
.to_string(),
format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {}", actual_checksum),
);
assert!(verify_tarball_integrity(
- &package_id,
+ package,
&Vec::new(),
&format!("sha512-{}", actual_checksum)
)
diff --git a/cli/proc_state.rs b/cli/proc_state.rs
index 148f44923..ae3a54a20 100644
--- a/cli/proc_state.rs
+++ b/cli/proc_state.rs
@@ -26,7 +26,7 @@ use crate::node::NodeResolution;
use crate::npm::NpmCache;
use crate::npm::NpmPackageReference;
use crate::npm::NpmPackageResolver;
-use crate::npm::NpmRegistryApi;
+use crate::npm::RealNpmRegistryApi;
use crate::progress_bar::ProgressBar;
use crate::resolver::CliResolver;
use crate::tools::check;
@@ -211,13 +211,13 @@ impl ProcState {
let emit_cache = EmitCache::new(dir.gen_cache.clone());
let parsed_source_cache =
ParsedSourceCache::new(Some(dir.dep_analysis_db_file_path()));
- let registry_url = NpmRegistryApi::default_url();
+ let registry_url = RealNpmRegistryApi::default_url();
let npm_cache = NpmCache::from_deno_dir(
&dir,
cli_options.cache_setting(),
progress_bar.clone(),
);
- let api = NpmRegistryApi::new(
+ let api = RealNpmRegistryApi::new(
registry_url,
npm_cache.clone(),
cli_options.cache_setting(),
diff --git a/cli/tests/integration/npm_tests.rs b/cli/tests/integration/npm_tests.rs
index 5dae2fd1c..87e853850 100644
--- a/cli/tests/integration/npm_tests.rs
+++ b/cli/tests/integration/npm_tests.rs
@@ -1002,7 +1002,7 @@ fn lock_file_missing_top_level_package() {
let stderr = String::from_utf8(output.stderr).unwrap();
assert_eq!(
stderr,
- "error: the lockfile (deno.lock) is corrupt. You can recreate it with --lock-write\n"
+ "error: failed reading lockfile 'deno.lock'\n\nCaused by:\n the lockfile is corrupt. You can recreate it with --lock-write\n"
);
}
@@ -1054,6 +1054,182 @@ fn auto_discover_lock_file() {
));
}
+#[test]
+fn peer_deps_with_copied_folders_and_lockfile() {
+ let _server = http_server();
+
+ let deno_dir = util::new_deno_dir();
+ let temp_dir = util::TempDir::new();
+
+ // write empty config file
+ temp_dir.write("deno.json", "{}");
+ let test_folder_path = test_util::testdata_path()
+ .join("npm")
+ .join("peer_deps_with_copied_folders");
+ let main_contents =
+ std::fs::read_to_string(test_folder_path.join("main.ts")).unwrap();
+ temp_dir.write("./main.ts", main_contents);
+
+ let deno = util::deno_cmd_with_deno_dir(&deno_dir)
+ .current_dir(temp_dir.path())
+ .arg("run")
+ .arg("--unstable")
+ .arg("-A")
+ .arg("main.ts")
+ .envs(env_vars())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .unwrap();
+ let output = deno.wait_with_output().unwrap();
+ assert!(output.status.success());
+
+ let expected_output =
+ std::fs::read_to_string(test_folder_path.join("main.out")).unwrap();
+
+ assert_eq!(String::from_utf8(output.stderr).unwrap(), expected_output);
+
+ assert!(temp_dir.path().join("deno.lock").exists());
+ let grandchild_path = deno_dir
+ .path()
+ .join("npm")
+ .join("localhost_4545")
+ .join("npm")
+ .join("registry")
+ .join("@denotest")
+ .join("peer-dep-test-grandchild");
+ assert!(grandchild_path.join("1.0.0").exists());
+ assert!(grandchild_path.join("1.0.0_1").exists()); // copy folder, which is hardlinked
+
+ // run again
+ let deno = util::deno_cmd_with_deno_dir(&deno_dir)
+ .current_dir(temp_dir.path())
+ .arg("run")
+ .arg("--unstable")
+ .arg("-A")
+ .arg("main.ts")
+ .envs(env_vars())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .unwrap();
+ let output = deno.wait_with_output().unwrap();
+ assert_eq!(String::from_utf8(output.stderr).unwrap(), "1\n2\n");
+ assert!(output.status.success());
+
+ let deno = util::deno_cmd_with_deno_dir(&deno_dir)
+ .current_dir(temp_dir.path())
+ .arg("run")
+ .arg("--unstable")
+ .arg("--reload")
+ .arg("-A")
+ .arg("main.ts")
+ .envs(env_vars())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .unwrap();
+ let output = deno.wait_with_output().unwrap();
+ assert_eq!(String::from_utf8(output.stderr).unwrap(), expected_output);
+ assert!(output.status.success());
+
+ // now run with local node modules
+ let deno = util::deno_cmd_with_deno_dir(&deno_dir)
+ .current_dir(temp_dir.path())
+ .arg("run")
+ .arg("--unstable")
+ .arg("--node-modules-dir")
+ .arg("-A")
+ .arg("main.ts")
+ .envs(env_vars())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .unwrap();
+ let output = deno.wait_with_output().unwrap();
+ assert_eq!(String::from_utf8(output.stderr).unwrap(), "1\n2\n");
+ assert!(output.status.success());
+
+ let deno_folder = temp_dir.path().join("node_modules").join(".deno");
+ assert!(deno_folder
+ .join("@denotest+peer-dep-test-grandchild@1.0.0")
+ .exists());
+ assert!(deno_folder
+ .join("@denotest+peer-dep-test-grandchild@1.0.0_1")
+ .exists()); // copy folder
+
+ // now again run with local node modules
+ let deno = util::deno_cmd_with_deno_dir(&deno_dir)
+ .current_dir(temp_dir.path())
+ .arg("run")
+ .arg("--unstable")
+ .arg("--node-modules-dir")
+ .arg("-A")
+ .arg("main.ts")
+ .envs(env_vars())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .unwrap();
+ let output = deno.wait_with_output().unwrap();
+ assert!(output.status.success());
+ assert_eq!(String::from_utf8(output.stderr).unwrap(), "1\n2\n");
+
+ // now ensure it works with reloading
+ let deno = util::deno_cmd_with_deno_dir(&deno_dir)
+ .current_dir(temp_dir.path())
+ .arg("run")
+ .arg("--unstable")
+ .arg("--node-modules-dir")
+ .arg("--reload")
+ .arg("-A")
+ .arg("main.ts")
+ .envs(env_vars())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .unwrap();
+ let output = deno.wait_with_output().unwrap();
+ assert!(output.status.success());
+ assert_eq!(String::from_utf8(output.stderr).unwrap(), expected_output);
+
+ // now ensure it works with reloading and no lockfile
+ let deno = util::deno_cmd_with_deno_dir(&deno_dir)
+ .current_dir(temp_dir.path())
+ .arg("run")
+ .arg("--unstable")
+ .arg("--node-modules-dir")
+ .arg("--no-lock")
+ .arg("--reload")
+ .arg("-A")
+ .arg("main.ts")
+ .envs(env_vars())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .unwrap();
+ let output = deno.wait_with_output().unwrap();
+ assert_eq!(String::from_utf8(output.stderr).unwrap(), expected_output,);
+ assert!(output.status.success());
+}
+
+itest!(info_peer_deps {
+ args: "info --quiet --unstable npm/peer_deps_with_copied_folders/main.ts",
+ output: "npm/peer_deps_with_copied_folders/main_info.out",
+ exit_code: 0,
+ envs: env_vars(),
+ http_server: true,
+});
+
+itest!(info_peer_deps_json {
+ args:
+ "info --quiet --unstable --json npm/peer_deps_with_copied_folders/main.ts",
+ output: "npm/peer_deps_with_copied_folders/main_info_json.out",
+ exit_code: 0,
+ envs: env_vars(),
+ http_server: true,
+});
+
fn env_vars_no_sync_download() -> Vec<(String, String)> {
vec![
("DENO_NODE_COMPAT_URL".to_string(), util::std_file_url()),
diff --git a/cli/tests/testdata/npm/peer_deps_with_copied_folders/main.out b/cli/tests/testdata/npm/peer_deps_with_copied_folders/main.out
new file mode 100644
index 000000000..ce0dc6896
--- /dev/null
+++ b/cli/tests/testdata/npm/peer_deps_with_copied_folders/main.out
@@ -0,0 +1,10 @@
+Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-child
+Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-grandchild
+Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-peer
+Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-child/1.0.0.tgz
+Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-child/2.0.0.tgz
+Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0.tgz
+Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-peer/1.0.0.tgz
+Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-peer/2.0.0.tgz
+1
+2
diff --git a/cli/tests/testdata/npm/peer_deps_with_copied_folders/main.ts b/cli/tests/testdata/npm/peer_deps_with_copied_folders/main.ts
new file mode 100644
index 000000000..a8ea8104a
--- /dev/null
+++ b/cli/tests/testdata/npm/peer_deps_with_copied_folders/main.ts
@@ -0,0 +1,5 @@
+import version1 from "npm:@denotest/peer-dep-test-child@1";
+import version2 from "npm:@denotest/peer-dep-test-child@2";
+
+console.error(version1);
+console.error(version2);
diff --git a/cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info.out b/cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info.out
new file mode 100644
index 000000000..c9c4a59c1
--- /dev/null
+++ b/cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info.out
@@ -0,0 +1,14 @@
+local: [WILDCARD]main.ts
+type: TypeScript
+dependencies: 6 unique
+size: [WILDCARD]
+
+file:///[WILDCARD]/testdata/npm/peer_deps_with_copied_folders/main.ts (171B)
+├─┬ npm:@denotest/peer-dep-test-child@1 - 1.0.0 ([WILDCARD])
+│ ├─┬ npm:@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@1.0.0 ([WILDCARD])
+│ │ └── npm:@denotest/peer-dep-test-peer@1.0.0 ([WILDCARD])
+│ └── npm:@denotest/peer-dep-test-peer@1.0.0 ([WILDCARD])
+└─┬ npm:@denotest/peer-dep-test-child@2 - 2.0.0 ([WILDCARD])
+ ├─┬ npm:@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@2.0.0 ([WILDCARD])
+ │ └── npm:@denotest/peer-dep-test-peer@2.0.0 ([WILDCARD])
+ └── npm:@denotest/peer-dep-test-peer@2.0.0 ([WILDCARD])
diff --git a/cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info_json.out b/cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info_json.out
new file mode 100644
index 000000000..634ec6251
--- /dev/null
+++ b/cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info_json.out
@@ -0,0 +1,95 @@
+{
+ "roots": [
+ "[WILDCARD]/npm/peer_deps_with_copied_folders/main.ts"
+ ],
+ "modules": [
+ {
+ "dependencies": [
+ {
+ "specifier": "npm:@denotest/peer-dep-test-child@1",
+ "code": {
+ "specifier": "npm:@denotest/peer-dep-test-child@1",
+ "span": {
+ "start": {
+ "line": 0,
+ "character": 21
+ },
+ "end": {
+ "line": 0,
+ "character": 58
+ }
+ }
+ },
+ "npmPackage": "@denotest/peer-dep-test-child@1.0.0_@denotest+peer-dep-test-peer@1.0.0"
+ },
+ {
+ "specifier": "npm:@denotest/peer-dep-test-child@2",
+ "code": {
+ "specifier": "npm:@denotest/peer-dep-test-child@2",
+ "span": {
+ "start": {
+ "line": 1,
+ "character": 21
+ },
+ "end": {
+ "line": 1,
+ "character": 58
+ }
+ }
+ },
+ "npmPackage": "@denotest/peer-dep-test-child@2.0.0_@denotest+peer-dep-test-peer@2.0.0"
+ }
+ ],
+ "kind": "esm",
+ "local": "[WILDCARD]main.ts",
+ "emit": null,
+ "map": null,
+ "size": 171,
+ "mediaType": "TypeScript",
+ "specifier": "file://[WILDCARD]/main.ts"
+ }
+ ],
+ "redirects": {},
+ "npmPackages": {
+ "@denotest/peer-dep-test-child@1.0.0_@denotest+peer-dep-test-peer@1.0.0": {
+ "name": "@denotest/peer-dep-test-child",
+ "version": "1.0.0",
+ "dependencies": [
+ "@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@1.0.0",
+ "@denotest/peer-dep-test-peer@1.0.0"
+ ]
+ },
+ "@denotest/peer-dep-test-child@2.0.0_@denotest+peer-dep-test-peer@2.0.0": {
+ "name": "@denotest/peer-dep-test-child",
+ "version": "2.0.0",
+ "dependencies": [
+ "@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@2.0.0",
+ "@denotest/peer-dep-test-peer@2.0.0"
+ ]
+ },
+ "@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@1.0.0": {
+ "name": "@denotest/peer-dep-test-grandchild",
+ "version": "1.0.0",
+ "dependencies": [
+ "@denotest/peer-dep-test-peer@1.0.0"
+ ]
+ },
+ "@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@2.0.0": {
+ "name": "@denotest/peer-dep-test-grandchild",
+ "version": "1.0.0",
+ "dependencies": [
+ "@denotest/peer-dep-test-peer@2.0.0"
+ ]
+ },
+ "@denotest/peer-dep-test-peer@1.0.0": {
+ "name": "@denotest/peer-dep-test-peer",
+ "version": "1.0.0",
+ "dependencies": []
+ },
+ "@denotest/peer-dep-test-peer@2.0.0": {
+ "name": "@denotest/peer-dep-test-peer",
+ "version": "2.0.0",
+ "dependencies": []
+ }
+ }
+}
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/index.js b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/index.js
new file mode 100644
index 000000000..636ec3c35
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/index.js
@@ -0,0 +1 @@
+module.exports = require("@denotest/peer-dep-test-grandchild");
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/package.json b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/package.json
new file mode 100644
index 000000000..32eb49851
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/package.json
@@ -0,0 +1,8 @@
+{
+ "name": "@denotest/peer-dep-test-child",
+ "version": "1.0.0",
+ "dependencies": {
+ "@denotest/peer-dep-test-grandchild": "*",
+ "@denotest/peer-dep-test-peer": "^1"
+ }
+}
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/index.js b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/index.js
new file mode 100644
index 000000000..636ec3c35
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/index.js
@@ -0,0 +1 @@
+module.exports = require("@denotest/peer-dep-test-grandchild");
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/package.json b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/package.json
new file mode 100644
index 000000000..3c82c01f9
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/package.json
@@ -0,0 +1,8 @@
+{
+ "name": "@denotest/peer-dep-test-child",
+ "version": "2.0.0",
+ "dependencies": {
+ "@denotest/peer-dep-test-grandchild": "*",
+ "@denotest/peer-dep-test-peer": "^2"
+ }
+}
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/dist/index.js b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/dist/index.js
new file mode 100644
index 000000000..9a0d9730b
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/dist/index.js
@@ -0,0 +1 @@
+module.exports = require("@denotest/peer-dep-test-peer");
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/index.js b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/index.js
new file mode 100644
index 000000000..7d44863df
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/index.js
@@ -0,0 +1 @@
+module.exports = require("./dist/index");
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/package.json b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/package.json
new file mode 100644
index 000000000..845ef414d
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/package.json
@@ -0,0 +1,7 @@
+{
+ "name": "@denotest/peer-dep-test-child-2",
+ "version": "1.0.0",
+ "peerDependencies": {
+ "@denotest/peer-dep-test-peer": "*"
+ }
+}
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/index.js b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/index.js
new file mode 100644
index 000000000..bd816eaba
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/index.js
@@ -0,0 +1 @@
+module.exports = 1;
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/package.json b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/package.json
new file mode 100644
index 000000000..cedb3609e
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/package.json
@@ -0,0 +1,4 @@
+{
+ "name": "@denotest/peer-dep-test-peer",
+ "version": "1.0.0"
+}
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/index.js b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/index.js
new file mode 100644
index 000000000..4bbffde10
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/index.js
@@ -0,0 +1 @@
+module.exports = 2;
diff --git a/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/package.json b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/package.json
new file mode 100644
index 000000000..90c24f875
--- /dev/null
+++ b/cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/package.json
@@ -0,0 +1,4 @@
+{
+ "name": "@denotest/peer-dep-test-peer",
+ "version": "2.0.0"
+}
diff --git a/cli/tools/info.rs b/cli/tools/info.rs
index 12b1ae4c3..99541c207 100644
--- a/cli/tools/info.rs
+++ b/cli/tools/info.rs
@@ -157,7 +157,8 @@ fn add_npm_packages_to_json(
});
if let Some(pkg) = maybe_package {
if let Some(module) = module.as_object_mut() {
- module.insert("npmPackage".to_string(), format!("{}", pkg.id).into());
+ module
+ .insert("npmPackage".to_string(), pkg.id.as_serialized().into());
// change the "kind" to be "npm"
module.insert("kind".to_string(), "npm".into());
}
@@ -190,7 +191,7 @@ fn add_npm_packages_to_json(
{
dep.insert(
"npmPackage".to_string(),
- format!("{}", pkg.id).into(),
+ pkg.id.as_serialized().into(),
);
}
}
@@ -212,11 +213,11 @@ fn add_npm_packages_to_json(
deps.sort();
let deps = deps
.into_iter()
- .map(|id| serde_json::Value::String(format!("{}", id)))
+ .map(|id| serde_json::Value::String(id.as_serialized()))
.collect::<Vec<_>>();
kv.insert("dependencies".to_string(), deps.into());
- json_packages.insert(format!("{}", &pkg.id), kv.into());
+ json_packages.insert(pkg.id.as_serialized(), kv.into());
}
json.insert("npmPackages".to_string(), json_packages.into());
@@ -504,7 +505,7 @@ impl<'a> GraphDisplayContext<'a> {
None => Specifier(module.specifier.clone()),
};
let was_seen = !self.seen.insert(match &package_or_specifier {
- Package(package) => package.id.to_string(),
+ Package(package) => package.id.as_serialized(),
Specifier(specifier) => specifier.to_string(),
});
let header_text = if was_seen {
@@ -572,11 +573,14 @@ impl<'a> GraphDisplayContext<'a> {
for dep_id in deps.into_iter() {
let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned();
let size_str = maybe_size_to_text(maybe_size);
- let mut child =
- TreeNode::from_text(format!("npm:{} {}", dep_id, size_str));
+ let mut child = TreeNode::from_text(format!(
+ "npm:{} {}",
+ dep_id.as_serialized(),
+ size_str
+ ));
if let Some(package) = self.npm_info.packages.get(dep_id) {
if !package.dependencies.is_empty() {
- if self.seen.contains(&package.id.to_string()) {
+ if self.seen.contains(&package.id.as_serialized()) {
child.text = format!("{} {}", child.text, colors::gray("*"));
} else {
let package = package.clone();