summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock5
-rw-r--r--cli/Cargo.toml3
-rw-r--r--cli/args/lockfile.rs4
-rw-r--r--cli/cache/mod.rs3
-rw-r--r--cli/lsp/diagnostics.rs9
-rw-r--r--cli/lsp/documents.rs8
-rw-r--r--cli/lsp/language_server.rs8
-rw-r--r--cli/node/mod.rs4
-rw-r--r--cli/npm/cache.rs61
-rw-r--r--cli/npm/mod.rs3
-rw-r--r--cli/npm/registry.rs256
-rw-r--r--cli/npm/resolution/common.rs241
-rw-r--r--cli/npm/resolution/graph.rs3025
-rw-r--r--cli/npm/resolution/mod.rs265
-rw-r--r--cli/npm/resolution/snapshot.rs151
-rw-r--r--cli/npm/resolution/specifier.rs6
-rw-r--r--cli/npm/resolvers/common.rs11
-rw-r--r--cli/npm/resolvers/global.rs17
-rw-r--r--cli/npm/resolvers/local.rs61
-rw-r--r--cli/npm/resolvers/mod.rs14
-rw-r--r--cli/proc_state.rs19
-rw-r--r--cli/tools/info.rs57
-rw-r--r--cli/tools/installer.rs6
-rw-r--r--cli/tools/repl/session.rs4
-rw-r--r--cli/tools/run.rs14
-rw-r--r--cli/tsc/mod.rs9
-rw-r--r--cli/worker.rs6
27 files changed, 2717 insertions, 1553 deletions
diff --git a/Cargo.lock b/Cargo.lock
index b5a464dde..9427558bb 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -794,6 +794,7 @@ checksum = "8d7439c3735f405729d52c3fbbe4de140eaf938a1fe47d227c27f8254d4302a5"
name = "deno"
version = "1.30.3"
dependencies = [
+ "async-trait",
"atty",
"base32",
"base64 0.13.1",
@@ -1092,9 +1093,9 @@ dependencies = [
[[package]]
name = "deno_graph"
-version = "0.43.2"
+version = "0.43.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2c8c63d016d4bbc7fcd0cecfb3903b92591ace5ba61229e2c5cf61337b21d165"
+checksum = "3b654f093ae79ca93715d6df840f45890dc61fc93e7f63ab0a7442c2a494ecac"
dependencies = [
"anyhow",
"data-url",
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
index f3bb67765..28aba8648 100644
--- a/cli/Cargo.toml
+++ b/cli/Cargo.toml
@@ -46,13 +46,14 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_gra
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = "0.55.0"
deno_emit = "0.15.0"
-deno_graph = "0.43.2"
+deno_graph = "0.43.3"
deno_lint = { version = "0.38.0", features = ["docs"] }
deno_lockfile.workspace = true
deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] }
deno_task_shell = "0.8.1"
napi_sym.workspace = true
+async-trait.workspace = true
atty.workspace = true
base32 = "=0.4.0"
base64.workspace = true
diff --git a/cli/args/lockfile.rs b/cli/args/lockfile.rs
index f93027430..1a3233c5a 100644
--- a/cli/args/lockfile.rs
+++ b/cli/args/lockfile.rs
@@ -75,8 +75,8 @@ impl Into<NpmPackageLockfileInfo> for NpmResolutionPackage {
.collect();
NpmPackageLockfileInfo {
- display_id: self.id.display(),
- serialized_id: self.id.as_serialized(),
+ display_id: self.pkg_id.nv.to_string(),
+ serialized_id: self.pkg_id.as_serialized(),
integrity: self.dist.integrity().to_string(),
dependencies,
}
diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs
index ca03ca940..5a65bd0a2 100644
--- a/cli/cache/mod.rs
+++ b/cli/cache/mod.rs
@@ -103,7 +103,8 @@ impl Loader for FetchCacher {
) -> LoadFuture {
if specifier.scheme() == "npm" {
return Box::pin(futures::future::ready(
- match deno_graph::npm::NpmPackageReference::from_specifier(specifier) {
+ match deno_graph::npm::NpmPackageReqReference::from_specifier(specifier)
+ {
Ok(_) => Ok(Some(deno_graph::source::LoadResponse::External {
specifier: specifier.clone(),
})),
diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs
index e33059001..f8f3aa371 100644
--- a/cli/lsp/diagnostics.rs
+++ b/cli/lsp/diagnostics.rs
@@ -26,7 +26,7 @@ use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::ModuleSpecifier;
-use deno_graph::npm::NpmPackageReference;
+use deno_graph::npm::NpmPackageReqReference;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
@@ -614,7 +614,7 @@ pub enum DenoDiagnostic {
/// A data module was not found in the cache.
NoCacheData(ModuleSpecifier),
/// A remote npm package reference was not found in the cache.
- NoCacheNpm(NpmPackageReference, ModuleSpecifier),
+ NoCacheNpm(NpmPackageReqReference, ModuleSpecifier),
/// A local module was not found on the local file system.
NoLocal(ModuleSpecifier),
/// The specifier resolved to a remote specifier that was redirected to
@@ -905,7 +905,8 @@ fn diagnose_resolution(
.push(DenoDiagnostic::NoAssertType.to_lsp_diagnostic(&range)),
}
}
- } else if let Ok(pkg_ref) = NpmPackageReference::from_specifier(specifier)
+ } else if let Ok(pkg_ref) =
+ NpmPackageReqReference::from_specifier(specifier)
{
if let Some(npm_resolver) = &snapshot.maybe_npm_resolver {
// show diagnostics for npm package references that aren't cached
@@ -929,7 +930,7 @@ fn diagnose_resolution(
} else if let Some(npm_resolver) = &snapshot.maybe_npm_resolver {
// check that a @types/node package exists in the resolver
let types_node_ref =
- NpmPackageReference::from_str("npm:@types/node").unwrap();
+ NpmPackageReqReference::from_str("npm:@types/node").unwrap();
if npm_resolver
.resolve_package_folder_from_deno_module(&types_node_ref.req)
.is_err()
diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs
index faabb6a2f..c97555911 100644
--- a/cli/lsp/documents.rs
+++ b/cli/lsp/documents.rs
@@ -30,8 +30,8 @@ use deno_core::futures::future;
use deno_core::parking_lot::Mutex;
use deno_core::url;
use deno_core::ModuleSpecifier;
-use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
+use deno_graph::npm::NpmPackageReqReference;
use deno_graph::GraphImport;
use deno_graph::Resolution;
use deno_runtime::deno_node::NodeResolutionMode;
@@ -1103,7 +1103,7 @@ impl Documents {
.and_then(|r| r.maybe_specifier())
{
results.push(self.resolve_dependency(specifier, maybe_npm_resolver));
- } else if let Ok(npm_ref) = NpmPackageReference::from_str(&specifier) {
+ } else if let Ok(npm_ref) = NpmPackageReqReference::from_str(&specifier) {
results.push(maybe_npm_resolver.map(|npm_resolver| {
NodeResolution::into_specifier_and_media_type(
node_resolve_npm_reference(
@@ -1243,7 +1243,7 @@ impl Documents {
// perf: ensure this is not added to unless this specifier has never
// been analyzed in order to not cause an extra file system lookup
self.pending_specifiers.push_back(dep.clone());
- if let Ok(reference) = NpmPackageReference::from_specifier(dep) {
+ if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
self.npm_reqs.insert(reference.req);
}
}
@@ -1321,7 +1321,7 @@ impl Documents {
specifier: &ModuleSpecifier,
maybe_npm_resolver: Option<&NpmPackageResolver>,
) -> Option<(ModuleSpecifier, MediaType)> {
- if let Ok(npm_ref) = NpmPackageReference::from_specifier(specifier) {
+ if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) {
return maybe_npm_resolver.map(|npm_resolver| {
NodeResolution::into_specifier_and_media_type(
node_resolve_npm_reference(
diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs
index 1bde85d8f..33b3379a2 100644
--- a/cli/lsp/language_server.rs
+++ b/cli/lsp/language_server.rs
@@ -73,7 +73,7 @@ use crate::graph_util;
use crate::http_util::HttpClient;
use crate::npm::NpmCache;
use crate::npm::NpmPackageResolver;
-use crate::npm::RealNpmRegistryApi;
+use crate::npm::NpmRegistryApi;
use crate::proc_state::ProcState;
use crate::tools::fmt::format_file;
use crate::tools::fmt::format_parsed_source;
@@ -304,7 +304,7 @@ fn create_lsp_npm_resolver(
dir: &DenoDir,
http_client: HttpClient,
) -> NpmPackageResolver {
- let registry_url = RealNpmRegistryApi::default_url();
+ let registry_url = NpmRegistryApi::default_url();
let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly);
let npm_cache = NpmCache::from_deno_dir(
dir,
@@ -316,8 +316,8 @@ fn create_lsp_npm_resolver(
http_client.clone(),
progress_bar.clone(),
);
- let api = RealNpmRegistryApi::new(
- registry_url,
+ let api = NpmRegistryApi::new(
+ registry_url.clone(),
npm_cache.clone(),
http_client,
progress_bar,
diff --git a/cli/node/mod.rs b/cli/node/mod.rs
index 397e189d6..9286400d4 100644
--- a/cli/node/mod.rs
+++ b/cli/node/mod.rs
@@ -15,8 +15,8 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::serde_json::Value;
use deno_core::url::Url;
-use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
+use deno_graph::npm::NpmPackageReqReference;
use deno_runtime::deno_node;
use deno_runtime::deno_node::errors;
use deno_runtime::deno_node::find_builtin_node_module;
@@ -241,7 +241,7 @@ pub fn node_resolve(
}
pub fn node_resolve_npm_reference(
- reference: &NpmPackageReference,
+ reference: &NpmPackageReqReference,
mode: NodeResolutionMode,
npm_resolver: &NpmPackageResolver,
permissions: &mut dyn NodePermissions,
diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs
index a602feb57..b2d932911 100644
--- a/cli/npm/cache.rs
+++ b/cli/npm/cache.rs
@@ -13,6 +13,7 @@ use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
+use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
use crate::args::CacheSetting;
@@ -107,8 +108,7 @@ pub fn with_folder_sync_lock(
}
pub struct NpmPackageCacheFolderId {
- pub name: String,
- pub version: Version,
+ pub nv: NpmPackageNv,
/// Peer dependency resolution may require us to have duplicate copies
/// of the same package.
pub copy_index: usize,
@@ -117,8 +117,7 @@ pub struct NpmPackageCacheFolderId {
impl NpmPackageCacheFolderId {
pub fn with_no_count(&self) -> Self {
Self {
- name: self.name.clone(),
- version: self.version.clone(),
+ nv: self.nv.clone(),
copy_index: 0,
}
}
@@ -126,7 +125,7 @@ impl NpmPackageCacheFolderId {
impl std::fmt::Display for NpmPackageCacheFolderId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(f, "{}@{}", self.name, self.version)?;
+ write!(f, "{}", self.nv)?;
if self.copy_index > 0 {
write!(f, "_{}", self.copy_index)?;
}
@@ -188,14 +187,14 @@ impl ReadonlyNpmCache {
) -> PathBuf {
if id.copy_index == 0 {
self.package_folder_for_name_and_version(
- &id.name,
- &id.version,
+ &id.nv.name,
+ &id.nv.version,
registry_url,
)
} else {
self
- .package_name_folder(&id.name, registry_url)
- .join(format!("{}_{}", id.version, id.copy_index))
+ .package_name_folder(&id.nv.name, registry_url)
+ .join(format!("{}_{}", id.nv.version, id.copy_index))
}
}
@@ -304,8 +303,10 @@ impl ReadonlyNpmCache {
(version_part, 0)
};
Some(NpmPackageCacheFolderId {
- name,
- version: Version::parse_from_npm(version).ok()?,
+ nv: NpmPackageNv {
+ name,
+ version: Version::parse_from_npm(version).ok()?,
+ },
copy_index,
})
}
@@ -440,16 +441,19 @@ impl NpmCache {
// if this file exists, then the package didn't successfully extract
// the first time, or another process is currently extracting the zip file
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
- && self.cache_setting.should_use_for_npm_package(&id.name)
+ && self.cache_setting.should_use_for_npm_package(&id.nv.name)
{
return Ok(());
}
- let original_package_folder = self
- .readonly
- .package_folder_for_name_and_version(&id.name, &id.version, registry_url);
+ let original_package_folder =
+ self.readonly.package_folder_for_name_and_version(
+ &id.nv.name,
+ &id.nv.version,
+ registry_url,
+ );
with_folder_sync_lock(
- (id.name.as_str(), &id.version),
+ (id.nv.name.as_str(), &id.nv.version),
&package_folder,
|| hard_link_dir_recursive(&original_package_folder, &package_folder),
)?;
@@ -514,6 +518,7 @@ pub fn mixed_case_package_name_decode(name: &str) -> Option<String> {
#[cfg(test)]
mod test {
use deno_core::url::Url;
+ use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
use super::ReadonlyNpmCache;
@@ -529,8 +534,10 @@ mod test {
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
- name: "json".to_string(),
- version: Version::parse_from_npm("1.2.5").unwrap(),
+ nv: NpmPackageNv {
+ name: "json".to_string(),
+ version: Version::parse_from_npm("1.2.5").unwrap(),
+ },
copy_index: 0,
},
&registry_url,
@@ -544,8 +551,10 @@ mod test {
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
- name: "json".to_string(),
- version: Version::parse_from_npm("1.2.5").unwrap(),
+ nv: NpmPackageNv {
+ name: "json".to_string(),
+ version: Version::parse_from_npm("1.2.5").unwrap(),
+ },
copy_index: 1,
},
&registry_url,
@@ -559,8 +568,10 @@ mod test {
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
- name: "JSON".to_string(),
- version: Version::parse_from_npm("2.1.5").unwrap(),
+ nv: NpmPackageNv {
+ name: "JSON".to_string(),
+ version: Version::parse_from_npm("2.1.5").unwrap(),
+ },
copy_index: 0,
},
&registry_url,
@@ -574,8 +585,10 @@ mod test {
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
- name: "@types/JSON".to_string(),
- version: Version::parse_from_npm("2.1.5").unwrap(),
+ nv: NpmPackageNv {
+ name: "@types/JSON".to_string(),
+ version: Version::parse_from_npm("2.1.5").unwrap(),
+ },
copy_index: 0,
},
&registry_url,
diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs
index 20db61081..2a58bb01a 100644
--- a/cli/npm/mod.rs
+++ b/cli/npm/mod.rs
@@ -10,9 +10,8 @@ pub use cache::NpmCache;
#[cfg(test)]
pub use registry::NpmPackageVersionDistInfo;
pub use registry::NpmRegistryApi;
-pub use registry::RealNpmRegistryApi;
pub use resolution::resolve_graph_npm_info;
-pub use resolution::NpmPackageNodeId;
+pub use resolution::NpmPackageId;
pub use resolution::NpmResolutionPackage;
pub use resolution::NpmResolutionSnapshot;
pub use resolvers::NpmPackageResolver;
diff --git a/cli/npm/registry.rs b/cli/npm/registry.rs
index a758ae7be..510f58132 100644
--- a/cli/npm/registry.rs
+++ b/cli/npm/registry.rs
@@ -9,19 +9,19 @@ use std::io::ErrorKind;
use std::path::PathBuf;
use std::sync::Arc;
+use async_trait::async_trait;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
-use deno_core::futures::future::BoxFuture;
-use deno_core::futures::FutureExt;
+use deno_core::futures;
use deno_core::parking_lot::Mutex;
use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::url::Url;
-use deno_graph::semver::Version;
+use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::VersionReq;
-use deno_runtime::colors;
+use once_cell::sync::Lazy;
use serde::Serialize;
use crate::args::package_json::parse_dep_entry_name_and_raw_version;
@@ -31,6 +31,7 @@ use crate::http_util::HttpClient;
use crate::util::fs::atomic_write_file;
use crate::util::progress_bar::ProgressBar;
+use super::cache::should_sync_download;
use super::cache::NpmCache;
// npm registry docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md
@@ -43,7 +44,7 @@ pub struct NpmPackageInfo {
pub dist_tags: HashMap<String, String>,
}
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Debug, Clone, Eq, PartialEq)]
pub enum NpmDependencyEntryKind {
Dep,
Peer,
@@ -56,7 +57,7 @@ impl NpmDependencyEntryKind {
}
}
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Debug, Clone, Eq, PartialEq)]
pub struct NpmDependencyEntry {
pub kind: NpmDependencyEntryKind,
pub bare_specifier: String,
@@ -181,83 +182,30 @@ impl NpmPackageVersionDistInfo {
}
}
-pub trait NpmRegistryApi: Clone + Sync + Send + 'static {
- fn maybe_package_info(
- &self,
- name: &str,
- ) -> BoxFuture<'static, Result<Option<Arc<NpmPackageInfo>>, AnyError>>;
-
- fn package_info(
- &self,
- name: &str,
- ) -> BoxFuture<'static, Result<Arc<NpmPackageInfo>, AnyError>> {
- let api = self.clone();
- let name = name.to_string();
- async move {
- let maybe_package_info = api.maybe_package_info(&name).await?;
- match maybe_package_info {
- Some(package_info) => Ok(package_info),
- None => bail!("npm package '{}' does not exist", name),
+static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> = Lazy::new(|| {
+ let env_var_name = "NPM_CONFIG_REGISTRY";
+ if let Ok(registry_url) = std::env::var(env_var_name) {
+ // ensure there is a trailing slash for the directory
+ let registry_url = format!("{}/", registry_url.trim_end_matches('/'));
+ match Url::parse(&registry_url) {
+ Ok(url) => {
+ return url;
+ }
+ Err(err) => {
+ log::debug!("Invalid {} environment variable: {:#}", env_var_name, err,);
}
}
- .boxed()
- }
-
- fn package_version_info(
- &self,
- name: &str,
- version: &Version,
- ) -> BoxFuture<'static, Result<Option<NpmPackageVersionInfo>, AnyError>> {
- let api = self.clone();
- let name = name.to_string();
- let version = version.to_string();
- async move {
- let package_info = api.package_info(&name).await?;
- Ok(package_info.versions.get(&version).cloned())
- }
- .boxed()
}
- /// Clears the internal memory cache.
- fn clear_memory_cache(&self);
-}
+ Url::parse("https://registry.npmjs.org").unwrap()
+});
-#[derive(Clone)]
-pub struct RealNpmRegistryApi(Arc<RealNpmRegistryApiInner>);
-
-impl RealNpmRegistryApi {
- pub fn default_url() -> Url {
- // todo(dsherret): remove DENO_NPM_REGISTRY in the future (maybe May 2023)
- let env_var_names = ["NPM_CONFIG_REGISTRY", "DENO_NPM_REGISTRY"];
- for env_var_name in env_var_names {
- if let Ok(registry_url) = std::env::var(env_var_name) {
- // ensure there is a trailing slash for the directory
- let registry_url = format!("{}/", registry_url.trim_end_matches('/'));
- match Url::parse(&registry_url) {
- Ok(url) => {
- if env_var_name == "DENO_NPM_REGISTRY" {
- log::warn!(
- "{}",
- colors::yellow(concat!(
- "DENO_NPM_REGISTRY was intended for internal testing purposes only. ",
- "Please update to NPM_CONFIG_REGISTRY instead.",
- )),
- );
- }
- return url;
- }
- Err(err) => {
- log::debug!(
- "Invalid {} environment variable: {:#}",
- env_var_name,
- err,
- );
- }
- }
- }
- }
+#[derive(Clone, Debug)]
+pub struct NpmRegistryApi(Arc<dyn NpmRegistryApiInner>);
- Url::parse("https://registry.npmjs.org").unwrap()
+impl NpmRegistryApi {
+ pub fn default_url() -> &'static Url {
+ &NPM_REGISTRY_DEFAULT_URL
}
pub fn new(
@@ -276,26 +224,110 @@ impl RealNpmRegistryApi {
}))
}
+ #[cfg(test)]
+ pub fn new_for_test(api: TestNpmRegistryApiInner) -> NpmRegistryApi {
+ Self(Arc::new(api))
+ }
+
+ pub async fn package_info(
+ &self,
+ name: &str,
+ ) -> Result<Arc<NpmPackageInfo>, AnyError> {
+ let maybe_package_info = self.0.maybe_package_info(name).await?;
+ match maybe_package_info {
+ Some(package_info) => Ok(package_info),
+ None => bail!("npm package '{}' does not exist", name),
+ }
+ }
+
+ pub async fn package_version_info(
+ &self,
+ nv: &NpmPackageNv,
+ ) -> Result<Option<NpmPackageVersionInfo>, AnyError> {
+ let package_info = self.package_info(&nv.name).await?;
+ Ok(package_info.versions.get(&nv.version.to_string()).cloned())
+ }
+
+ /// Caches all the package information in memory in parallel.
+ pub async fn cache_in_parallel(
+ &self,
+ package_names: Vec<String>,
+ ) -> Result<(), AnyError> {
+ let mut unresolved_tasks = Vec::with_capacity(package_names.len());
+
+ // cache the package info up front in parallel
+ if should_sync_download() {
+ // for deterministic test output
+ let mut ordered_names = package_names;
+ ordered_names.sort();
+ for name in ordered_names {
+ self.package_info(&name).await?;
+ }
+ } else {
+ for name in package_names {
+ let api = self.clone();
+ unresolved_tasks.push(tokio::task::spawn(async move {
+ // This is ok to call because api will internally cache
+ // the package information in memory.
+ api.package_info(&name).await
+ }));
+ }
+ };
+
+ for result in futures::future::join_all(unresolved_tasks).await {
+ result??; // surface the first error
+ }
+
+ Ok(())
+ }
+
+ /// Clears the internal memory cache.
+ pub fn clear_memory_cache(&self) {
+ self.0.clear_memory_cache();
+ }
+
pub fn base_url(&self) -> &Url {
- &self.0.base_url
+ self.0.base_url()
}
}
-impl NpmRegistryApi for RealNpmRegistryApi {
- fn maybe_package_info(
+#[async_trait]
+trait NpmRegistryApiInner: std::fmt::Debug + Sync + Send + 'static {
+ async fn maybe_package_info(
&self,
name: &str,
- ) -> BoxFuture<'static, Result<Option<Arc<NpmPackageInfo>>, AnyError>> {
- let api = self.clone();
- let name = name.to_string();
- async move { api.0.maybe_package_info(&name).await }.boxed()
+ ) -> Result<Option<Arc<NpmPackageInfo>>, AnyError>;
+
+ fn clear_memory_cache(&self);
+
+ fn get_cached_package_info(&self, name: &str) -> Option<Arc<NpmPackageInfo>>;
+
+ fn base_url(&self) -> &Url;
+}
+
+#[async_trait]
+impl NpmRegistryApiInner for RealNpmRegistryApiInner {
+ fn base_url(&self) -> &Url {
+ &self.base_url
+ }
+
+ async fn maybe_package_info(
+ &self,
+ name: &str,
+ ) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
+ self.maybe_package_info(name).await
}
fn clear_memory_cache(&self) {
- self.0.mem_cache.lock().clear();
+ self.mem_cache.lock().clear();
+ }
+
+ fn get_cached_package_info(&self, name: &str) -> Option<Arc<NpmPackageInfo>> {
+ self.mem_cache.lock().get(name).cloned().flatten()
}
}
+#[derive(Debug)]
struct RealNpmRegistryApiInner {
base_url: Url,
cache: NpmCache,
@@ -461,16 +493,44 @@ impl RealNpmRegistryApiInner {
}
}
+#[derive(Debug)]
+struct NullNpmRegistryApiInner;
+
+#[async_trait]
+impl NpmRegistryApiInner for NullNpmRegistryApiInner {
+ async fn maybe_package_info(
+ &self,
+ _name: &str,
+ ) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
+ Err(deno_core::anyhow::anyhow!(
+ "Deno bug. Please report. Registry API was not initialized."
+ ))
+ }
+
+ fn clear_memory_cache(&self) {}
+
+ fn get_cached_package_info(
+ &self,
+ _name: &str,
+ ) -> Option<Arc<NpmPackageInfo>> {
+ None
+ }
+
+ fn base_url(&self) -> &Url {
+ NpmRegistryApi::default_url()
+ }
+}
+
/// Note: This test struct is not thread safe for setup
/// purposes. Construct everything on the same thread.
#[cfg(test)]
-#[derive(Clone, Default)]
-pub struct TestNpmRegistryApi {
+#[derive(Clone, Default, Debug)]
+pub struct TestNpmRegistryApiInner {
package_infos: Arc<Mutex<HashMap<String, NpmPackageInfo>>>,
}
#[cfg(test)]
-impl TestNpmRegistryApi {
+impl TestNpmRegistryApiInner {
pub fn add_package_info(&self, name: &str, info: NpmPackageInfo) {
let previous = self.package_infos.lock().insert(name.to_string(), info);
assert!(previous.is_none());
@@ -554,16 +614,28 @@ impl TestNpmRegistryApi {
}
#[cfg(test)]
-impl NpmRegistryApi for TestNpmRegistryApi {
- fn maybe_package_info(
+#[async_trait]
+impl NpmRegistryApiInner for TestNpmRegistryApiInner {
+ async fn maybe_package_info(
&self,
name: &str,
- ) -> BoxFuture<'static, Result<Option<Arc<NpmPackageInfo>>, AnyError>> {
+ ) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
let result = self.package_infos.lock().get(name).cloned();
- Box::pin(deno_core::futures::future::ready(Ok(result.map(Arc::new))))
+ Ok(result.map(Arc::new))
}
fn clear_memory_cache(&self) {
// do nothing for the test api
}
+
+ fn get_cached_package_info(
+ &self,
+ _name: &str,
+ ) -> Option<Arc<NpmPackageInfo>> {
+ None
+ }
+
+ fn base_url(&self) -> &Url {
+ NpmRegistryApi::default_url()
+ }
}
diff --git a/cli/npm/resolution/common.rs b/cli/npm/resolution/common.rs
new file mode 100644
index 000000000..b733d8eeb
--- /dev/null
+++ b/cli/npm/resolution/common.rs
@@ -0,0 +1,241 @@
+// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
+
+use deno_core::anyhow::bail;
+use deno_core::error::AnyError;
+use deno_graph::semver::Version;
+use deno_graph::semver::VersionReq;
+use once_cell::sync::Lazy;
+
+use super::NpmPackageId;
+use crate::npm::registry::NpmPackageInfo;
+use crate::npm::registry::NpmPackageVersionInfo;
+
+pub static LATEST_VERSION_REQ: Lazy<VersionReq> =
+ Lazy::new(|| VersionReq::parse_from_specifier("latest").unwrap());
+
+pub fn resolve_best_package_version_and_info<'info, 'version>(
+ version_req: &VersionReq,
+ package_info: &'info NpmPackageInfo,
+ existing_versions: impl Iterator<Item = &'version Version>,
+) -> Result<VersionAndInfo<'info>, AnyError> {
+ if let Some(version) = resolve_best_from_existing_versions(
+ version_req,
+ package_info,
+ existing_versions,
+ )? {
+ match package_info.versions.get(&version.to_string()) {
+ Some(version_info) => Ok(VersionAndInfo {
+ version,
+ info: version_info,
+ }),
+ None => {
+ bail!(
+ "could not find version '{}' for '{}'",
+ version,
+ &package_info.name
+ )
+ }
+ }
+ } else {
+ // get the information
+ get_resolved_package_version_and_info(version_req, package_info, None)
+ }
+}
+
+#[derive(Clone)]
+pub struct VersionAndInfo<'a> {
+ pub version: Version,
+ pub info: &'a NpmPackageVersionInfo,
+}
+
+fn get_resolved_package_version_and_info<'a>(
+ version_req: &VersionReq,
+ info: &'a NpmPackageInfo,
+ parent: Option<&NpmPackageId>,
+) -> Result<VersionAndInfo<'a>, AnyError> {
+ if let Some(tag) = version_req.tag() {
+ tag_to_version_info(info, tag, parent)
+ } else {
+ let mut maybe_best_version: Option<VersionAndInfo> = None;
+ for version_info in info.versions.values() {
+ let version = Version::parse_from_npm(&version_info.version)?;
+ if version_req.matches(&version) {
+ let is_best_version = maybe_best_version
+ .as_ref()
+ .map(|best_version| best_version.version.cmp(&version).is_lt())
+ .unwrap_or(true);
+ if is_best_version {
+ maybe_best_version = Some(VersionAndInfo {
+ version,
+ info: version_info,
+ });
+ }
+ }
+ }
+
+ match maybe_best_version {
+ Some(v) => Ok(v),
+ // If the package isn't found, it likely means that the user needs to use
+ // `--reload` to get the latest npm package information. Although it seems
+ // like we could make this smart by fetching the latest information for
+ // this package here, we really need a full restart. There could be very
+ // interesting bugs that occur if this package's version was resolved by
+ // something previous using the old information, then now being smart here
+ // causes a new fetch of the package information, meaning this time the
+ // previous resolution of this package's version resolved to an older
+ // version, but next time to a different version because it has new information.
+ None => bail!(
+ concat!(
+ "Could not find npm package '{}' matching {}{}. ",
+ "Try retrieving the latest npm package information by running with --reload",
+ ),
+ info.name,
+ version_req.version_text(),
+ match parent {
+ Some(resolved_id) => format!(" as specified in {}", resolved_id.nv),
+ None => String::new(),
+ }
+ ),
+ }
+ }
+}
+
+pub fn version_req_satisfies(
+ version_req: &VersionReq,
+ version: &Version,
+ package_info: &NpmPackageInfo,
+ parent: Option<&NpmPackageId>,
+) -> Result<bool, AnyError> {
+ match version_req.tag() {
+ Some(tag) => {
+ let tag_version = tag_to_version_info(package_info, tag, parent)?.version;
+ Ok(tag_version == *version)
+ }
+ None => Ok(version_req.matches(version)),
+ }
+}
+
+fn resolve_best_from_existing_versions<'a>(
+ version_req: &VersionReq,
+ package_info: &NpmPackageInfo,
+ existing_versions: impl Iterator<Item = &'a Version>,
+) -> Result<Option<Version>, AnyError> {
+ let mut maybe_best_version: Option<&Version> = None;
+ for version in existing_versions {
+ if version_req_satisfies(version_req, version, package_info, None)? {
+ let is_best_version = maybe_best_version
+ .as_ref()
+ .map(|best_version| (*best_version).cmp(version).is_lt())
+ .unwrap_or(true);
+ if is_best_version {
+ maybe_best_version = Some(version);
+ }
+ }
+ }
+ Ok(maybe_best_version.cloned())
+}
+
+fn tag_to_version_info<'a>(
+ info: &'a NpmPackageInfo,
+ tag: &str,
+ parent: Option<&NpmPackageId>,
+) -> Result<VersionAndInfo<'a>, AnyError> {
+ // For when someone just specifies @types/node, we want to pull in a
+ // "known good" version of @types/node that works well with Deno and
+ // not necessarily the latest version. For example, we might only be
+ // compatible with Node vX, but then Node vY is published so we wouldn't
+ // want to pull that in.
+ // Note: If the user doesn't want this behavior, then they can specify an
+ // explicit version.
+ if tag == "latest" && info.name == "@types/node" {
+ return get_resolved_package_version_and_info(
+ &VersionReq::parse_from_npm("18.0.0 - 18.11.18").unwrap(),
+ info,
+ parent,
+ );
+ }
+
+ if let Some(version) = info.dist_tags.get(tag) {
+ match info.versions.get(version) {
+ Some(info) => Ok(VersionAndInfo {
+ version: Version::parse_from_npm(version)?,
+ info,
+ }),
+ None => {
+ bail!(
+ "Could not find version '{}' referenced in dist-tag '{}'.",
+ version,
+ tag,
+ )
+ }
+ }
+ } else {
+ bail!("Could not find dist-tag '{}'.", tag)
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use std::collections::HashMap;
+
+ use deno_graph::npm::NpmPackageReqReference;
+
+ use super::*;
+
+ #[test]
+ fn test_get_resolved_package_version_and_info() {
+ // dist tag where version doesn't exist
+ let package_ref = NpmPackageReqReference::from_str("npm:test").unwrap();
+ let package_info = NpmPackageInfo {
+ name: "test".to_string(),
+ versions: HashMap::new(),
+ dist_tags: HashMap::from([(
+ "latest".to_string(),
+ "1.0.0-alpha".to_string(),
+ )]),
+ };
+ let result = get_resolved_package_version_and_info(
+ package_ref
+ .req
+ .version_req
+ .as_ref()
+ .unwrap_or(&*LATEST_VERSION_REQ),
+ &package_info,
+ None,
+ );
+ assert_eq!(
+ result.err().unwrap().to_string(),
+ "Could not find version '1.0.0-alpha' referenced in dist-tag 'latest'."
+ );
+
+ // dist tag where version is a pre-release
+ let package_ref = NpmPackageReqReference::from_str("npm:test").unwrap();
+ let package_info = NpmPackageInfo {
+ name: "test".to_string(),
+ versions: HashMap::from([
+ ("0.1.0".to_string(), NpmPackageVersionInfo::default()),
+ (
+ "1.0.0-alpha".to_string(),
+ NpmPackageVersionInfo {
+ version: "0.1.0-alpha".to_string(),
+ ..Default::default()
+ },
+ ),
+ ]),
+ dist_tags: HashMap::from([(
+ "latest".to_string(),
+ "1.0.0-alpha".to_string(),
+ )]),
+ };
+ let result = get_resolved_package_version_and_info(
+ package_ref
+ .req
+ .version_req
+ .as_ref()
+ .unwrap_or(&*LATEST_VERSION_REQ),
+ &package_info,
+ None,
+ );
+ assert_eq!(result.unwrap().version.to_string(), "1.0.0-alpha");
+ }
+}
diff --git a/cli/npm/resolution/graph.rs b/cli/npm/resolution/graph.rs
index 32c00826b..76ec2e62c 100644
--- a/cli/npm/resolution/graph.rs
+++ b/cli/npm/resolution/graph.rs
@@ -1,198 +1,353 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
-use std::borrow::Cow;
+use std::collections::hash_map::DefaultHasher;
use std::collections::BTreeMap;
-use std::collections::BTreeSet;
use std::collections::HashMap;
+use std::collections::HashSet;
use std::collections::VecDeque;
+use std::hash::Hash;
+use std::hash::Hasher;
use std::sync::Arc;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
-use deno_core::futures;
use deno_core::parking_lot::Mutex;
-use deno_core::parking_lot::MutexGuard;
+use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageReq;
-use deno_graph::semver::Version;
use deno_graph::semver::VersionReq;
use log::debug;
-use once_cell::sync::Lazy;
-use crate::npm::cache::should_sync_download;
use crate::npm::registry::NpmDependencyEntry;
use crate::npm::registry::NpmDependencyEntryKind;
use crate::npm::registry::NpmPackageInfo;
use crate::npm::registry::NpmPackageVersionInfo;
+use crate::npm::resolution::common::resolve_best_package_version_and_info;
+use crate::npm::resolution::snapshot::SnapshotPackageCopyIndexResolver;
use crate::npm::NpmRegistryApi;
+use super::common::version_req_satisfies;
+use super::common::LATEST_VERSION_REQ;
use super::snapshot::NpmResolutionSnapshot;
-use super::snapshot::SnapshotPackageCopyIndexResolver;
-use super::NpmPackageNodeId;
+use super::NpmPackageId;
use super::NpmResolutionPackage;
-pub static LATEST_VERSION_REQ: Lazy<VersionReq> =
- Lazy::new(|| VersionReq::parse_from_specifier("latest").unwrap());
-
-/// A memory efficient path of visited name and versions in the graph
-/// which is used to detect cycles.
-///
-/// note(dsherret): although this is definitely more memory efficient
-/// than a HashSet, I haven't done any tests about whether this is
-/// faster in practice.
-#[derive(Default, Clone)]
-struct VisitedVersionsPath {
- previous_node: Option<Arc<VisitedVersionsPath>>,
- visited_version_key: String,
+/// A unique identifier to a node in the graph.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+struct NodeId(u32);
+
+/// A resolved package in the resolution graph.
+#[derive(Debug)]
+struct Node {
+ /// The specifier to child relationship in the graph. The specifier is
+ /// the key in an npm package's dependencies map (ex. "express"). We
+ /// use a BTreeMap for some determinism when creating the snapshot.
+ ///
+ /// Note: We don't want to store the children as a `NodeRef` because
+ /// multiple paths might visit through the children and we don't want
+ /// to share those references with those paths.
+ pub children: BTreeMap<String, NodeId>,
+ /// Whether the node has demonstrated to have no peer dependencies in its
+ /// descendants. If this is true then we can skip analyzing this node
+ /// again when we encounter it another time in the dependency tree, which
+ /// is much faster.
+ pub no_peers: bool,
}
-impl VisitedVersionsPath {
- pub fn new(id: &NpmPackageNodeId) -> Arc<Self> {
- Arc::new(Self {
- previous_node: None,
- visited_version_key: Self::id_to_key(id),
- })
+#[derive(Clone)]
+enum ResolvedIdPeerDep {
+ /// This is a reference to the parent instead of the child because we only have a
+ /// node reference to the parent, since we've traversed it, but the child node may
+ /// change from under it.
+ ParentReference {
+ parent: GraphPathNodeOrRoot,
+ child_pkg_nv: NpmPackageNv,
+ },
+ /// A node that was created during snapshotting and is not being used in any path.
+ SnapshotNodeId(NodeId),
+}
+
+impl ResolvedIdPeerDep {
+ pub fn current_state_hash(&self) -> u64 {
+ let mut hasher = DefaultHasher::new();
+ self.current_state_hash_with_hasher(&mut hasher);
+ hasher.finish()
}
- pub fn with_parent(
- self: &Arc<VisitedVersionsPath>,
- parent: &NodeParent,
- ) -> Option<Arc<Self>> {
- match parent {
- NodeParent::Node(id) => self.with_id(id),
- NodeParent::Req => Some(self.clone()),
+ pub fn current_state_hash_with_hasher(&self, hasher: &mut DefaultHasher) {
+ match self {
+ ResolvedIdPeerDep::ParentReference {
+ parent,
+ child_pkg_nv,
+ } => {
+ match parent {
+ GraphPathNodeOrRoot::Root(root) => root.hash(hasher),
+ GraphPathNodeOrRoot::Node(node) => node.node_id().hash(hasher),
+ }
+ child_pkg_nv.hash(hasher);
+ }
+ ResolvedIdPeerDep::SnapshotNodeId(node_id) => {
+ node_id.hash(hasher);
+ }
}
}
+}
- pub fn with_id(
- self: &Arc<VisitedVersionsPath>,
- id: &NpmPackageNodeId,
- ) -> Option<Arc<Self>> {
- if self.has_visited(id) {
- None
- } else {
- Some(Arc::new(Self {
- previous_node: Some(self.clone()),
- visited_version_key: Self::id_to_key(id),
- }))
+/// A pending resolved identifier used in the graph. At the end of resolution, these
+/// will become fully resolved to an `NpmPackageId`.
+#[derive(Clone)]
+struct ResolvedId {
+ nv: NpmPackageNv,
+ peer_dependencies: Vec<ResolvedIdPeerDep>,
+}
+
+impl ResolvedId {
+ /// Gets a hash of the resolved identifier at this current moment in time.
+ ///
+ /// WARNING: A resolved identifier references a value that could change in
+ /// the future, so this should be used with that in mind.
+ pub fn current_state_hash(&self) -> u64 {
+ let mut hasher = DefaultHasher::new();
+ self.nv.hash(&mut hasher);
+ for dep in &self.peer_dependencies {
+ dep.current_state_hash_with_hasher(&mut hasher);
}
+ hasher.finish()
}
- pub fn has_visited(self: &Arc<Self>, id: &NpmPackageNodeId) -> bool {
- let mut maybe_next_node = Some(self);
- let key = Self::id_to_key(id);
- while let Some(next_node) = maybe_next_node {
- if next_node.visited_version_key == key {
- return true;
+ pub fn push_peer_dep(&mut self, peer_dep: ResolvedIdPeerDep) {
+ let new_hash = peer_dep.current_state_hash();
+ for dep in &self.peer_dependencies {
+ if new_hash == dep.current_state_hash() {
+ return;
}
- maybe_next_node = next_node.previous_node.as_ref();
}
- false
+ self.peer_dependencies.push(peer_dep);
+ }
+}
+
+/// Mappings of node identifiers to resolved identifiers. Each node has exactly
+/// one resolved identifier.
+#[derive(Default)]
+struct ResolvedNodeIds {
+ node_to_resolved_id: HashMap<NodeId, (ResolvedId, u64)>,
+ resolved_to_node_id: HashMap<u64, NodeId>,
+}
+
+impl ResolvedNodeIds {
+ pub fn set(&mut self, node_id: NodeId, resolved_id: ResolvedId) {
+ let resolved_id_hash = resolved_id.current_state_hash();
+ if let Some((_, old_resolved_id_key)) = self
+ .node_to_resolved_id
+ .insert(node_id, (resolved_id, resolved_id_hash))
+ {
+ // ensure the old resolved id key is removed as it might be stale
+ self.resolved_to_node_id.remove(&old_resolved_id_key);
+ }
+ self.resolved_to_node_id.insert(resolved_id_hash, node_id);
}
- fn id_to_key(id: &NpmPackageNodeId) -> String {
- format!("{}@{}", id.name, id.version)
+ pub fn get(&self, node_id: NodeId) -> Option<&ResolvedId> {
+ self.node_to_resolved_id.get(&node_id).map(|(id, _)| id)
+ }
+
+ pub fn get_node_id(&self, resolved_id: &ResolvedId) -> Option<NodeId> {
+ self
+ .resolved_to_node_id
+ .get(&resolved_id.current_state_hash())
+ .copied()
}
}
-/// A memory efficient path of the visited specifiers in the tree.
-#[derive(Default, Clone)]
-struct GraphSpecifierPath {
- previous_node: Option<Arc<GraphSpecifierPath>>,
+/// A pointer to a specific node in a graph path. The underlying node id
+/// may change as peer dependencies are created.
+#[derive(Clone, Debug)]
+struct NodeIdRef(Arc<Mutex<NodeId>>);
+
+impl NodeIdRef {
+ pub fn new(node_id: NodeId) -> Self {
+ NodeIdRef(Arc::new(Mutex::new(node_id)))
+ }
+
+ pub fn change(&self, node_id: NodeId) {
+ *self.0.lock() = node_id;
+ }
+
+ pub fn get(&self) -> NodeId {
+ *self.0.lock()
+ }
+}
+
+#[derive(Clone)]
+enum GraphPathNodeOrRoot {
+ Node(Arc<GraphPath>),
+ Root(NpmPackageNv),
+}
+
+/// Path through the graph that represents a traversal through the graph doing
+/// the dependency resolution. The graph tries to share duplicate package
+/// information and we try to avoid traversing parts of the graph that we know
+/// are resolved.
+#[derive(Clone)]
+struct GraphPath {
+ previous_node: Option<GraphPathNodeOrRoot>,
+ node_id_ref: NodeIdRef,
+ // todo(dsherret): I think we might be able to get rid of specifier and
+ // node version here, but I added them for extra protection for the time being.
specifier: String,
+ nv: NpmPackageNv,
}
-impl GraphSpecifierPath {
- pub fn new(specifier: String) -> Arc<Self> {
+impl GraphPath {
+ pub fn for_root(node_id: NodeId, nv: NpmPackageNv) -> Arc<Self> {
Arc::new(Self {
- previous_node: None,
- specifier,
+ previous_node: Some(GraphPathNodeOrRoot::Root(nv.clone())),
+ node_id_ref: NodeIdRef::new(node_id),
+ // use an empty specifier
+ specifier: "".to_string(),
+ nv,
})
}
- pub fn with_specifier(self: &Arc<Self>, specifier: String) -> Arc<Self> {
- Arc::new(Self {
- previous_node: Some(self.clone()),
- specifier,
- })
+ pub fn node_id(&self) -> NodeId {
+ self.node_id_ref.get()
}
- pub fn pop(&self) -> Option<&Arc<Self>> {
- self.previous_node.as_ref()
+ pub fn specifier(&self) -> &str {
+ &self.specifier
}
-}
-#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
-enum NodeParent {
- /// These are top of the graph npm package requirements
- /// as specified in Deno code.
- Req,
- /// A reference to another node, which is a resolved package.
- Node(NpmPackageNodeId),
-}
+ pub fn change_id(&self, node_id: NodeId) {
+ self.node_id_ref.change(node_id)
+ }
-/// A resolved package in the resolution graph.
-#[derive(Debug)]
-struct Node {
- pub id: NpmPackageNodeId,
- /// If the node was forgotten due to having no parents.
- pub forgotten: bool,
- // Use BTreeMap and BTreeSet in order to create determinism
- // when going up and down the tree
- pub parents: BTreeMap<String, BTreeSet<NodeParent>>,
- pub children: BTreeMap<String, NpmPackageNodeId>,
- pub deps: Arc<Vec<NpmDependencyEntry>>,
- /// Whether the node has demonstrated to have no peer dependencies in its
- /// descendants. If this is true then we can skip analyzing this node
- /// again when we encounter it another time in the dependency tree, which
- /// is much faster.
- pub no_peers: bool,
-}
+ pub fn with_id(
+ self: &Arc<GraphPath>,
+ node_id: NodeId,
+ specifier: &str,
+ nv: NpmPackageNv,
+ ) -> Option<Arc<Self>> {
+ if self.has_visited(&nv) {
+ None
+ } else {
+ Some(Arc::new(Self {
+ previous_node: Some(GraphPathNodeOrRoot::Node(self.clone())),
+ node_id_ref: NodeIdRef::new(node_id),
+ specifier: specifier.to_string(),
+ nv,
+ }))
+ }
+ }
-impl Node {
- pub fn add_parent(&mut self, specifier: String, parent: NodeParent) {
- self.parents.entry(specifier).or_default().insert(parent);
+ /// Each time an identifier is added, we do a check to ensure
+ /// that we haven't previously visited this node. I suspect this
+ /// might be a little slow since it has to go up through the ancestors,
+ /// so some optimizations could be made here in the future.
+ pub fn has_visited(self: &Arc<Self>, nv: &NpmPackageNv) -> bool {
+ if self.nv == *nv {
+ return true;
+ }
+ let mut maybe_next_node = self.previous_node.as_ref();
+ while let Some(GraphPathNodeOrRoot::Node(next_node)) = maybe_next_node {
+ // we've visited this before, so stop
+ if next_node.nv == *nv {
+ return true;
+ }
+ maybe_next_node = next_node.previous_node.as_ref();
+ }
+ false
}
- pub fn remove_parent(&mut self, specifier: &str, parent: &NodeParent) {
- if let Some(parents) = self.parents.get_mut(specifier) {
- parents.remove(parent);
- if parents.is_empty() {
- self.parents.remove(specifier);
+ pub fn ancestors(&self) -> GraphPathAncestorIterator {
+ GraphPathAncestorIterator {
+ next: self.previous_node.as_ref(),
+ }
+ }
+}
+
+struct GraphPathAncestorIterator<'a> {
+ next: Option<&'a GraphPathNodeOrRoot>,
+}
+
+impl<'a> Iterator for GraphPathAncestorIterator<'a> {
+ type Item = &'a GraphPathNodeOrRoot;
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(next) = self.next.take() {
+ if let GraphPathNodeOrRoot::Node(node) = next {
+ self.next = node.previous_node.as_ref();
}
+ Some(next)
+ } else {
+ None
}
}
}
-#[derive(Debug, Default)]
+#[derive(Default)]
pub struct Graph {
- package_reqs: HashMap<String, NpmPackageNodeId>,
- packages_by_name: HashMap<String, Vec<NpmPackageNodeId>>,
- // Ideally this value would be Rc<RefCell<Node>>, but we need to use a Mutex
- // because the lsp requires Send and this code is executed in the lsp.
- // Would be nice if the lsp wasn't Send.
- packages: HashMap<NpmPackageNodeId, Arc<Mutex<Node>>>,
+ /// Each requirement is mapped to a specific name and version.
+ package_reqs: HashMap<NpmPackageReq, NpmPackageNv>,
+ /// Then each name and version is mapped to an exact node id.
+ /// Note: Uses a BTreeMap in order to create some determinism
+ /// when creating the snapshot.
+ root_packages: BTreeMap<NpmPackageNv, NodeId>,
+ nodes_by_package_name: HashMap<String, Vec<NodeId>>,
+ nodes: HashMap<NodeId, Node>,
+ resolved_node_ids: ResolvedNodeIds,
// This will be set when creating from a snapshot, then
// inform the final snapshot creation.
- packages_to_copy_index: HashMap<NpmPackageNodeId, usize>,
+ packages_to_copy_index: HashMap<NpmPackageId, usize>,
}
impl Graph {
- pub fn from_snapshot(snapshot: NpmResolutionSnapshot) -> Self {
- fn fill_for_id(
+ pub fn from_snapshot(
+ snapshot: NpmResolutionSnapshot,
+ ) -> Result<Self, AnyError> {
+ fn get_or_create_graph_node(
graph: &mut Graph,
- id: &NpmPackageNodeId,
- packages: &HashMap<NpmPackageNodeId, NpmResolutionPackage>,
- ) -> Arc<Mutex<Node>> {
- let resolution = packages.get(id).unwrap();
- let (created, node) = graph.get_or_create_for_id(id);
- if created {
- for (name, child_id) in &resolution.dependencies {
- let child_node = fill_for_id(graph, child_id, packages);
- graph.set_child_parent_node(name, &child_node, id);
- }
+ resolved_id: &NpmPackageId,
+ packages: &HashMap<NpmPackageId, NpmResolutionPackage>,
+ created_package_ids: &mut HashMap<NpmPackageId, NodeId>,
+ ) -> Result<NodeId, AnyError> {
+ if let Some(id) = created_package_ids.get(resolved_id) {
+ return Ok(*id);
}
- node
+
+ let node_id = graph.create_node(&resolved_id.nv);
+ created_package_ids.insert(resolved_id.clone(), node_id);
+
+ let peer_dep_ids = resolved_id
+ .peer_dependencies
+ .iter()
+ .map(|peer_dep| {
+ Ok(ResolvedIdPeerDep::SnapshotNodeId(get_or_create_graph_node(
+ graph,
+ peer_dep,
+ packages,
+ created_package_ids,
+ )?))
+ })
+ .collect::<Result<Vec<_>, AnyError>>()?;
+ let graph_resolved_id = ResolvedId {
+ nv: resolved_id.nv.clone(),
+ peer_dependencies: peer_dep_ids,
+ };
+ graph.resolved_node_ids.set(node_id, graph_resolved_id);
+ let resolution = match packages.get(resolved_id) {
+ Some(resolved_id) => resolved_id,
+ // maybe the user messed around with the lockfile
+ None => bail!("not found package: {}", resolved_id.as_serialized()),
+ };
+ for (name, child_id) in &resolution.dependencies {
+ let child_node_id = get_or_create_graph_node(
+ graph,
+ child_id,
+ packages,
+ created_package_ids,
+ )?;
+ graph.set_child_parent_node(name, child_node_id, node_id);
+ }
+ Ok(node_id)
}
let mut graph = Self {
@@ -203,274 +358,360 @@ impl Graph {
.iter()
.map(|(id, p)| (id.clone(), p.copy_index))
.collect(),
+ package_reqs: snapshot.package_reqs,
..Default::default()
};
- for (package_req, id) in &snapshot.package_reqs {
- let node = fill_for_id(&mut graph, id, &snapshot.packages);
- let package_req_text = package_req.to_string();
- (*node)
- .lock()
- .add_parent(package_req_text.clone(), NodeParent::Req);
- graph.package_reqs.insert(package_req_text, id.clone());
+ let mut created_package_ids =
+ HashMap::with_capacity(snapshot.packages.len());
+ for (id, resolved_id) in snapshot.root_packages {
+ let node_id = get_or_create_graph_node(
+ &mut graph,
+ &resolved_id,
+ &snapshot.packages,
+ &mut created_package_ids,
+ )?;
+ graph.root_packages.insert(id, node_id);
}
- graph
+ Ok(graph)
}
pub fn has_package_req(&self, req: &NpmPackageReq) -> bool {
- self.package_reqs.contains_key(&req.to_string())
- }
-
- fn get_or_create_for_id(
- &mut self,
- id: &NpmPackageNodeId,
- ) -> (bool, Arc<Mutex<Node>>) {
- if let Some(node) = self.packages.get(id) {
- (false, node.clone())
- } else {
- let node = Arc::new(Mutex::new(Node {
- id: id.clone(),
- forgotten: false,
- parents: Default::default(),
- children: Default::default(),
- deps: Default::default(),
- no_peers: false,
- }));
- self
- .packages_by_name
- .entry(id.name.clone())
- .or_default()
- .push(id.clone());
- self.packages.insert(id.clone(), node.clone());
- (true, node)
- }
+ self.package_reqs.contains_key(req)
}
- fn borrow_node(&self, id: &NpmPackageNodeId) -> MutexGuard<Node> {
- (**self.packages.get(id).unwrap_or_else(|| {
- panic!("could not find id {} in the tree", id.as_serialized())
- }))
- .lock()
+ fn get_npm_pkg_id(&self, node_id: NodeId) -> NpmPackageId {
+ let resolved_id = self.resolved_node_ids.get(node_id).unwrap();
+ self.get_npm_pkg_id_from_resolved_id(resolved_id, HashSet::new())
}
- fn forget_orphan(&mut self, node_id: &NpmPackageNodeId) {
- if let Some(node) = self.packages.remove(node_id) {
- let mut node = (*node).lock();
- node.forgotten = true;
- assert_eq!(node.parents.len(), 0);
-
- // Remove the id from the list of packages by name.
- let packages_with_name =
- self.packages_by_name.get_mut(&node.id.name).unwrap();
- let remove_index = packages_with_name
- .iter()
- .position(|id| id == &node.id)
- .unwrap();
- packages_with_name.remove(remove_index);
-
- let parent = NodeParent::Node(node.id.clone());
- for (specifier, child_id) in &node.children {
- let mut child = self.borrow_node(child_id);
- child.remove_parent(specifier, &parent);
- if child.parents.is_empty() {
- drop(child); // stop borrowing from self
- self.forget_orphan(child_id);
+ fn get_npm_pkg_id_from_resolved_id(
+ &self,
+ resolved_id: &ResolvedId,
+ seen: HashSet<NodeId>,
+ ) -> NpmPackageId {
+ if resolved_id.peer_dependencies.is_empty() {
+ NpmPackageId {
+ nv: resolved_id.nv.clone(),
+ peer_dependencies: Vec::new(),
+ }
+ } else {
+ let mut npm_pkg_id = NpmPackageId {
+ nv: resolved_id.nv.clone(),
+ peer_dependencies: Vec::with_capacity(
+ resolved_id.peer_dependencies.len(),
+ ),
+ };
+ let mut seen_children_resolved_ids =
+ HashSet::with_capacity(resolved_id.peer_dependencies.len());
+ for peer_dep in &resolved_id.peer_dependencies {
+ let maybe_node_and_resolved_id = match peer_dep {
+ ResolvedIdPeerDep::SnapshotNodeId(node_id) => self
+ .resolved_node_ids
+ .get(*node_id)
+ .map(|resolved_id| (*node_id, resolved_id)),
+ ResolvedIdPeerDep::ParentReference {
+ parent,
+ child_pkg_nv: child_nv,
+ } => match &parent {
+ GraphPathNodeOrRoot::Root(_) => {
+ self.root_packages.get(child_nv).and_then(|node_id| {
+ self
+ .resolved_node_ids
+ .get(*node_id)
+ .map(|resolved_id| (*node_id, resolved_id))
+ })
+ }
+ GraphPathNodeOrRoot::Node(parent_path) => {
+ self.nodes.get(&parent_path.node_id()).and_then(|parent| {
+ parent
+ .children
+ .values()
+ .filter_map(|child_id| {
+ let child_id = *child_id;
+ self
+ .resolved_node_ids
+ .get(child_id)
+ .map(|resolved_id| (child_id, resolved_id))
+ })
+ .find(|(_, resolved_id)| resolved_id.nv == *child_nv)
+ })
+ }
+ },
+ };
+ // this should always be set
+ debug_assert!(maybe_node_and_resolved_id.is_some());
+ if let Some((child_id, child_resolved_id)) = maybe_node_and_resolved_id
+ {
+ let mut new_seen = seen.clone();
+ if new_seen.insert(child_id) {
+ let child_peer = self.get_npm_pkg_id_from_resolved_id(
+ child_resolved_id,
+ new_seen.clone(),
+ );
+
+ // This condition prevents a name showing up in the peer_dependencies
+ // list that matches the current name. Checking just the name and
+ // version should be sufficient because the rest of the peer dependency
+ // resolutions should be the same
+ let is_pkg_same = child_peer.nv == npm_pkg_id.nv;
+ if !is_pkg_same
+ && seen_children_resolved_ids.insert(child_peer.clone())
+ {
+ npm_pkg_id.peer_dependencies.push(child_peer);
+ }
+ }
}
}
+ npm_pkg_id
}
}
- fn set_child_parent(
+ fn get_or_create_for_id(
&mut self,
- specifier: &str,
- child: &Mutex<Node>,
- parent: &NodeParent,
- ) {
- match parent {
- NodeParent::Node(parent_id) => {
- self.set_child_parent_node(specifier, child, parent_id);
- }
- NodeParent::Req => {
- let mut node = (*child).lock();
- node.add_parent(specifier.to_string(), parent.clone());
- self
- .package_reqs
- .insert(specifier.to_string(), node.id.clone());
- }
+ resolved_id: &ResolvedId,
+ ) -> (bool, NodeId) {
+ if let Some(node_id) = self.resolved_node_ids.get_node_id(resolved_id) {
+ return (false, node_id);
}
+
+ let node_id = self.create_node(&resolved_id.nv);
+ self.resolved_node_ids.set(node_id, resolved_id.clone());
+ (true, node_id)
}
- fn set_child_parent_node(
- &mut self,
- specifier: &str,
- child: &Mutex<Node>,
- parent_id: &NpmPackageNodeId,
- ) {
- let mut child = (*child).lock();
- assert_ne!(child.id, *parent_id);
- let mut parent = (**self.packages.get(parent_id).unwrap_or_else(|| {
- panic!(
- "could not find {} in list of packages when setting child {}",
- parent_id.as_serialized(),
- child.id.as_serialized()
- )
- }))
- .lock();
- parent
- .children
- .insert(specifier.to_string(), child.id.clone());
- child
- .add_parent(specifier.to_string(), NodeParent::Node(parent.id.clone()));
+ fn create_node(&mut self, pkg_nv: &NpmPackageNv) -> NodeId {
+ let node_id = NodeId(self.nodes.len() as u32);
+ let node = Node {
+ children: Default::default(),
+ no_peers: false,
+ };
+
+ self
+ .nodes_by_package_name
+ .entry(pkg_nv.name.clone())
+ .or_default()
+ .push(node_id);
+ self.nodes.insert(node_id, node);
+
+ node_id
+ }
+
+ fn borrow_node_mut(&mut self, node_id: NodeId) -> &mut Node {
+ self.nodes.get_mut(&node_id).unwrap()
}
- fn remove_child_parent(
+ fn set_child_parent_node(
&mut self,
specifier: &str,
- child_id: &NpmPackageNodeId,
- parent: &NodeParent,
+ child_id: NodeId,
+ parent_id: NodeId,
) {
- match parent {
- NodeParent::Node(parent_id) => {
- let mut node = self.borrow_node(parent_id);
- if let Some(removed_child_id) = node.children.remove(specifier) {
- assert_eq!(removed_child_id, *child_id);
- }
- }
- NodeParent::Req => {
- if let Some(removed_child_id) = self.package_reqs.remove(specifier) {
- assert_eq!(removed_child_id, *child_id);
- }
- }
- }
- self.borrow_node(child_id).remove_parent(specifier, parent);
+ assert_ne!(child_id, parent_id);
+ let parent = self.borrow_node_mut(parent_id);
+ parent.children.insert(specifier.to_string(), child_id);
}
pub async fn into_snapshot(
self,
- api: &impl NpmRegistryApi,
+ api: &NpmRegistryApi,
) -> Result<NpmResolutionSnapshot, AnyError> {
+ let packages_to_resolved_id = self
+ .nodes
+ .keys()
+ .map(|node_id| (*node_id, self.get_npm_pkg_id(*node_id)))
+ .collect::<HashMap<_, _>>();
let mut copy_index_resolver =
SnapshotPackageCopyIndexResolver::from_map_with_capacity(
self.packages_to_copy_index,
- self.packages.len(),
+ self.nodes.len(),
);
-
- // Iterate through the packages vector in each packages_by_name in order
- // to set the copy index as this will be deterministic rather than
- // iterating over the hashmap below.
- for packages in self.packages_by_name.values() {
- if packages.len() > 1 {
- for id in packages {
- copy_index_resolver.resolve(id);
- }
+ let mut packages = HashMap::with_capacity(self.nodes.len());
+ let mut traversed_node_ids = HashSet::with_capacity(self.nodes.len());
+ let mut pending = VecDeque::new();
+ for root_id in self.root_packages.values() {
+ if traversed_node_ids.insert(*root_id) {
+ pending.push_back(*root_id);
}
}
-
- let mut packages = HashMap::with_capacity(self.packages.len());
- for (id, node) in self.packages {
+ while let Some(node_id) = pending.pop_front() {
+ let node = self.nodes.get(&node_id).unwrap();
+ let resolved_id = packages_to_resolved_id.get(&node_id).unwrap();
+ // todo(dsherret): grab this from the dep entry cache, which should have it
let dist = api
- .package_version_info(&id.name, &id.version)
+ .package_version_info(&resolved_id.nv)
.await?
- .unwrap()
+ .unwrap_or_else(|| panic!("missing: {:?}", resolved_id.nv))
.dist;
- let node = node.lock();
packages.insert(
- id.clone(),
+ (*resolved_id).clone(),
NpmResolutionPackage {
- copy_index: copy_index_resolver.resolve(&id),
- id,
+ copy_index: copy_index_resolver.resolve(resolved_id),
+ pkg_id: (*resolved_id).clone(),
dist,
dependencies: node
.children
.iter()
- .map(|(key, value)| (key.clone(), value.clone()))
+ .map(|(key, value)| {
+ (
+ key.clone(),
+ packages_to_resolved_id
+ .get(value)
+ .unwrap_or_else(|| {
+ panic!("{node_id:?} -- missing child: {value:?}")
+ })
+ .clone(),
+ )
+ })
.collect(),
},
);
+ for child_id in node.children.values() {
+ if traversed_node_ids.insert(*child_id) {
+ pending.push_back(*child_id);
+ }
+ }
}
Ok(NpmResolutionSnapshot {
- package_reqs: self
- .package_reqs
+ root_packages: self
+ .root_packages
.into_iter()
- .map(|(specifier, id)| {
- (NpmPackageReq::from_str(&specifier).unwrap(), id)
+ .map(|(id, node_id)| {
+ (id, packages_to_resolved_id.get(&node_id).unwrap().clone())
+ })
+ .collect(),
+ packages_by_name: self
+ .nodes_by_package_name
+ .into_iter()
+ .map(|(name, ids)| {
+ (
+ name,
+ ids
+ .into_iter()
+ .filter(|id| traversed_node_ids.contains(id))
+ .map(|id| packages_to_resolved_id.get(&id).unwrap().clone())
+ .collect(),
+ )
})
.collect(),
- packages_by_name: self.packages_by_name,
packages,
+ package_reqs: self.package_reqs,
})
}
-}
-pub struct GraphDependencyResolver<'a, TNpmRegistryApi: NpmRegistryApi> {
- graph: &'a mut Graph,
- api: &'a TNpmRegistryApi,
- pending_unresolved_nodes:
- VecDeque<(Arc<VisitedVersionsPath>, Arc<Mutex<Node>>)>,
-}
-
-impl<'a, TNpmRegistryApi: NpmRegistryApi>
- GraphDependencyResolver<'a, TNpmRegistryApi>
-{
- pub fn new(graph: &'a mut Graph, api: &'a TNpmRegistryApi) -> Self {
- Self {
- graph,
- api,
- pending_unresolved_nodes: Default::default(),
- }
- }
-
- fn resolve_best_package_version_and_info<'info>(
- &self,
- version_req: &VersionReq,
- package_info: &'info NpmPackageInfo,
- ) -> Result<VersionAndInfo<'info>, AnyError> {
- if let Some(version) =
- self.resolve_best_package_version(package_info, version_req)?
- {
- match package_info.versions.get(&version.to_string()) {
- Some(version_info) => Ok(VersionAndInfo {
- version,
- info: version_info,
- }),
- None => {
- bail!(
- "could not find version '{}' for '{}'",
- version,
- &package_info.name
+ // Debugging methods
+
+ #[cfg(debug_assertions)]
+ #[allow(unused)]
+ fn output_path(&self, path: &Arc<GraphPath>) {
+ eprintln!("-----------");
+ self.output_node(path.node_id(), false);
+ for path in path.ancestors() {
+ match path {
+ GraphPathNodeOrRoot::Node(node) => {
+ self.output_node(node.node_id(), false)
+ }
+ GraphPathNodeOrRoot::Root(pkg_id) => {
+ let node_id = self.root_packages.get(pkg_id).unwrap();
+ eprintln!(
+ "Root: {} ({}: {})",
+ pkg_id,
+ node_id.0,
+ self.get_npm_pkg_id(*node_id).as_serialized()
)
}
}
- } else {
- // get the information
- get_resolved_package_version_and_info(version_req, package_info, None)
}
+ eprintln!("-----------");
}
- fn resolve_best_package_version(
- &self,
- package_info: &NpmPackageInfo,
- version_req: &VersionReq,
- ) -> Result<Option<Version>, AnyError> {
- let mut maybe_best_version: Option<&Version> = None;
- if let Some(ids) = self.graph.packages_by_name.get(&package_info.name) {
- for version in ids.iter().map(|id| &id.version) {
- if version_req_satisfies(version_req, version, package_info, None)? {
- let is_best_version = maybe_best_version
- .as_ref()
- .map(|best_version| (*best_version).cmp(version).is_lt())
- .unwrap_or(true);
- if is_best_version {
- maybe_best_version = Some(version);
- }
- }
+ #[cfg(debug_assertions)]
+ #[allow(unused)]
+ fn output_node(&self, node_id: NodeId, show_children: bool) {
+ eprintln!(
+ "{:>4}: {}",
+ node_id.0,
+ self.get_npm_pkg_id(node_id).as_serialized()
+ );
+
+ if show_children {
+ let node = self.nodes.get(&node_id).unwrap();
+ eprintln!(" Children:");
+ for (specifier, child_id) in &node.children {
+ eprintln!(" {}: {}", specifier, child_id.0);
}
}
- Ok(maybe_best_version.cloned())
}
- pub fn has_package_req(&self, req: &NpmPackageReq) -> bool {
- self.graph.has_package_req(req)
+ #[cfg(debug_assertions)]
+ #[allow(unused)]
+ pub fn output_nodes(&self) {
+ eprintln!("~~~");
+ let mut node_ids = self
+ .resolved_node_ids
+ .node_to_resolved_id
+ .keys()
+ .copied()
+ .collect::<Vec<_>>();
+ node_ids.sort_by(|a, b| a.0.cmp(&b.0));
+ for node_id in node_ids {
+ self.output_node(node_id, true);
+ }
+ eprintln!("~~~");
+ }
+}
+
+#[derive(Default)]
+struct DepEntryCache(HashMap<NpmPackageNv, Arc<Vec<NpmDependencyEntry>>>);
+
+impl DepEntryCache {
+ pub fn store(
+ &mut self,
+ nv: NpmPackageNv,
+ version_info: &NpmPackageVersionInfo,
+ ) -> Result<Arc<Vec<NpmDependencyEntry>>, AnyError> {
+ debug_assert!(!self.0.contains_key(&nv)); // we should not be re-inserting
+ let mut deps = version_info
+ .dependencies_as_entries()
+ .with_context(|| format!("npm package: {nv}"))?;
+ // Ensure name alphabetical and then version descending
+ // so these are resolved in that order
+ deps.sort();
+ let deps = Arc::new(deps);
+ self.0.insert(nv, deps.clone());
+ Ok(deps)
+ }
+
+ pub fn get(
+ &self,
+ id: &NpmPackageNv,
+ ) -> Option<&Arc<Vec<NpmDependencyEntry>>> {
+ self.0.get(id)
+ }
+}
+
+struct UnresolvedOptionalPeer {
+ specifier: String,
+ graph_path: Arc<GraphPath>,
+}
+
+pub struct GraphDependencyResolver<'a> {
+ graph: &'a mut Graph,
+ api: &'a NpmRegistryApi,
+ pending_unresolved_nodes: VecDeque<Arc<GraphPath>>,
+ unresolved_optional_peers: HashMap<NpmPackageNv, Vec<UnresolvedOptionalPeer>>,
+ dep_entry_cache: DepEntryCache,
+}
+
+impl<'a> GraphDependencyResolver<'a> {
+ pub fn new(graph: &'a mut Graph, api: &'a NpmRegistryApi) -> Self {
+ Self {
+ graph,
+ api,
+ pending_unresolved_nodes: Default::default(),
+ unresolved_optional_peers: Default::default(),
+ dep_entry_cache: Default::default(),
+ }
}
pub fn add_package_req(
@@ -478,7 +719,7 @@ impl<'a, TNpmRegistryApi: NpmRegistryApi>
package_req: &NpmPackageReq,
package_info: &NpmPackageInfo,
) -> Result<(), AnyError> {
- let (_, node) = self.resolve_node_from_info(
+ let (pkg_id, node_id) = self.resolve_node_from_info(
&package_req.name,
package_req
.version_req
@@ -487,12 +728,14 @@ impl<'a, TNpmRegistryApi: NpmRegistryApi>
package_info,
None,
)?;
- self.graph.set_child_parent(
- &package_req.to_string(),
- &node,
- &NodeParent::Req,
- );
- self.try_add_pending_unresolved_node(None, &node);
+ self
+ .graph
+ .package_reqs
+ .insert(package_req.clone(), pkg_id.clone());
+ self.graph.root_packages.insert(pkg_id.clone(), node_id);
+ self
+ .pending_unresolved_nodes
+ .push_back(GraphPath::for_root(node_id, pkg_id));
Ok(())
}
@@ -500,63 +743,55 @@ impl<'a, TNpmRegistryApi: NpmRegistryApi>
&mut self,
entry: &NpmDependencyEntry,
package_info: &NpmPackageInfo,
- parent_id: &NpmPackageNodeId,
- visited_versions: &Arc<VisitedVersionsPath>,
- ) -> Result<Arc<Mutex<Node>>, AnyError> {
- let (id, node) = self.resolve_node_from_info(
+ graph_path: &Arc<GraphPath>,
+ ) -> Result<NodeId, AnyError> {
+ debug_assert_eq!(entry.kind, NpmDependencyEntryKind::Dep);
+ let parent_id = graph_path.node_id();
+ let (_, node_id) = self.resolve_node_from_info(
&entry.name,
- match entry.kind {
- NpmDependencyEntryKind::Dep => &entry.version_req,
- // when resolving a peer dependency as a dependency, it should
- // use the "dependencies" entry version requirement if it exists
- NpmDependencyEntryKind::Peer | NpmDependencyEntryKind::OptionalPeer => {
- entry
- .peer_dep_version_req
- .as_ref()
- .unwrap_or(&entry.version_req)
- }
- },
+ &entry.version_req,
package_info,
Some(parent_id),
)?;
// Some packages may resolves to themselves as a dependency. If this occurs,
// just ignore adding these as dependencies because this is likely a mistake
// in the package.
- if id != *parent_id {
- self.graph.set_child_parent(
+ if node_id != parent_id {
+ self.graph.set_child_parent_node(
+ &entry.bare_specifier,
+ node_id,
+ parent_id,
+ );
+ self.try_add_pending_unresolved_node(
+ graph_path,
+ node_id,
&entry.bare_specifier,
- &node,
- &NodeParent::Node(parent_id.clone()),
);
- self.try_add_pending_unresolved_node(Some(visited_versions), &node);
}
- Ok(node)
+ Ok(node_id)
}
fn try_add_pending_unresolved_node(
&mut self,
- maybe_previous_visited_versions: Option<&Arc<VisitedVersionsPath>>,
- node: &Arc<Mutex<Node>>,
+ path: &Arc<GraphPath>,
+ node_id: NodeId,
+ specifier: &str,
) {
- let node_id = {
- let node = node.lock();
- if node.no_peers {
- return; // skip, no need to analyze this again
- }
- node.id.clone()
- };
- let visited_versions = match maybe_previous_visited_versions {
- Some(previous_visited_versions) => {
- match previous_visited_versions.with_id(&node_id) {
- Some(visited_versions) => visited_versions,
- None => return, // circular, don't visit this node
- }
- }
- None => VisitedVersionsPath::new(&node_id),
+ if self.graph.nodes.get(&node_id).unwrap().no_peers {
+ return; // skip, no need to analyze this again
+ }
+ let node_nv = self
+ .graph
+ .resolved_node_ids
+ .get(node_id)
+ .unwrap()
+ .nv
+ .clone();
+ let new_path = match path.with_id(node_id, specifier, node_nv) {
+ Some(visited_versions) => visited_versions,
+ None => return, // circular, don't visit this node
};
- self
- .pending_unresolved_nodes
- .push_back((visited_versions, node.clone()));
+ self.pending_unresolved_nodes.push_back(new_path);
}
fn resolve_node_from_info(
@@ -564,123 +799,187 @@ impl<'a, TNpmRegistryApi: NpmRegistryApi>
pkg_req_name: &str,
version_req: &VersionReq,
package_info: &NpmPackageInfo,
- parent_id: Option<&NpmPackageNodeId>,
- ) -> Result<(NpmPackageNodeId, Arc<Mutex<Node>>), AnyError> {
- let version_and_info =
- self.resolve_best_package_version_and_info(version_req, package_info)?;
- let id = NpmPackageNodeId {
- name: package_info.name.to_string(),
- version: version_and_info.version.clone(),
+ parent_id: Option<NodeId>,
+ ) -> Result<(NpmPackageNv, NodeId), AnyError> {
+ let version_and_info = resolve_best_package_version_and_info(
+ version_req,
+ package_info,
+ self
+ .graph
+ .nodes_by_package_name
+ .entry(package_info.name.clone())
+ .or_default()
+ .iter()
+ .map(|node_id| {
+ &self
+ .graph
+ .resolved_node_ids
+ .get(*node_id)
+ .unwrap()
+ .nv
+ .version
+ }),
+ )?;
+ let resolved_id = ResolvedId {
+ nv: NpmPackageNv {
+ name: package_info.name.to_string(),
+ version: version_and_info.version.clone(),
+ },
peer_dependencies: Vec::new(),
};
+ let (_, node_id) = self.graph.get_or_create_for_id(&resolved_id);
+ let pkg_id = resolved_id.nv;
+
+ let has_deps = if let Some(deps) = self.dep_entry_cache.get(&pkg_id) {
+ !deps.is_empty()
+ } else {
+ let deps = self
+ .dep_entry_cache
+ .store(pkg_id.clone(), version_and_info.info)?;
+ !deps.is_empty()
+ };
+
+ if !has_deps {
+ // ensure this is set if not, as it's an optimization
+ let mut node = self.graph.borrow_node_mut(node_id);
+ node.no_peers = true;
+ }
+
debug!(
"{} - Resolved {}@{} to {}",
match parent_id {
- Some(id) => id.as_serialized(),
+ Some(parent_id) => self.graph.get_npm_pkg_id(parent_id).as_serialized(),
None => "<package-req>".to_string(),
},
pkg_req_name,
version_req.version_text(),
- id.as_serialized(),
- );
- let (created, node) = self.graph.get_or_create_for_id(&id);
- if created {
- let mut node = (*node).lock();
- let mut deps = version_and_info
- .info
- .dependencies_as_entries()
- .with_context(|| format!("npm package: {}", id.display()))?;
- // Ensure name alphabetical and then version descending
- // so these are resolved in that order
- deps.sort();
- node.deps = Arc::new(deps);
- node.no_peers = node.deps.is_empty();
- }
+ pkg_id.to_string(),
+ );
- Ok((id, node))
+ Ok((pkg_id, node_id))
}
pub async fn resolve_pending(&mut self) -> Result<(), AnyError> {
while !self.pending_unresolved_nodes.is_empty() {
// now go down through the dependencies by tree depth
- while let Some((visited_versions, parent_node)) =
- self.pending_unresolved_nodes.pop_front()
- {
- let (mut parent_id, deps, existing_children) = {
- let parent_node = parent_node.lock();
- if parent_node.forgotten || parent_node.no_peers {
- // todo(dsherret): we should try to reproduce this forgotten scenario and write a test
+ while let Some(graph_path) = self.pending_unresolved_nodes.pop_front() {
+ let (pkg_id, deps) = {
+ let node_id = graph_path.node_id();
+ if self.graph.nodes.get(&node_id).unwrap().no_peers {
+ // We can skip as there's no reason to analyze this graph segment further
+ // Note that we don't need to count parent references here because that's
+ // only necessary for graph segments that could potentially have peer
+ // dependencies within them.
continue;
}
- (
- parent_node.id.clone(),
- parent_node.deps.clone(),
- parent_node.children.clone(),
- )
+ let pkg_nv = self
+ .graph
+ .resolved_node_ids
+ .get(node_id)
+ .unwrap()
+ .nv
+ .clone();
+ let deps = if let Some(deps) = self.dep_entry_cache.get(&pkg_nv) {
+ deps.clone()
+ } else {
+ // the api should have this in the cache at this point, so no need to parallelize
+ match self.api.package_version_info(&pkg_nv).await? {
+ Some(version_info) => {
+ self.dep_entry_cache.store(pkg_nv.clone(), &version_info)?
+ }
+ None => {
+ bail!("Could not find version information for {}", pkg_nv)
+ }
+ }
+ };
+
+ (pkg_nv, deps)
};
// cache all the dependencies' registry infos in parallel if should
- if !should_sync_download() {
- let handles = deps
- .iter()
- .map(|dep| {
- let name = dep.name.clone();
- let api = self.api.clone();
- tokio::task::spawn(async move {
- // it's ok to call this without storing the result, because
- // NpmRegistryApi will cache the package info in memory
- api.package_info(&name).await
- })
- })
- .collect::<Vec<_>>();
- let results = futures::future::join_all(handles).await;
- for result in results {
- result??; // surface the first error
- }
- }
+ self
+ .api
+ .cache_in_parallel({
+ deps.iter().map(|dep| dep.name.clone()).collect()
+ })
+ .await?;
// resolve the dependencies
let mut found_peer = false;
+
for dep in deps.iter() {
let package_info = self.api.package_info(&dep.name).await?;
match dep.kind {
NpmDependencyEntryKind::Dep => {
- let node = self.analyze_dependency(
- dep,
- &package_info,
- &parent_id,
- &visited_versions,
- )?;
+ // todo(dsherret): look into skipping dependency analysis if
+ // it was done previously again
+ let child_id =
+ self.analyze_dependency(dep, &package_info, &graph_path)?;
+
if !found_peer {
- found_peer = !node.lock().no_peers;
+ found_peer = !self.graph.borrow_node_mut(child_id).no_peers;
}
}
NpmDependencyEntryKind::Peer
| NpmDependencyEntryKind::OptionalPeer => {
found_peer = true;
- let maybe_new_parent_id = self.resolve_peer_dep(
+ // we need to re-evaluate peer dependencies every time and can't
+ // skip over them because they might be evaluated differently based
+ // on the current path
+ let maybe_new_id = self.resolve_peer_dep(
&dep.bare_specifier,
- &parent_id,
dep,
&package_info,
- &visited_versions,
- existing_children.get(&dep.bare_specifier),
+ &graph_path,
)?;
- if let Some(new_parent_id) = maybe_new_parent_id {
- assert_eq!(
- (&new_parent_id.name, &new_parent_id.version),
- (&parent_id.name, &parent_id.version)
- );
- parent_id = new_parent_id;
+
+ // For optional dependencies, we want to resolve them if any future
+ // same parent version resolves them. So when not resolved, store them to be
+ // potentially resolved later.
+ //
+ // Note: This is not a good solution, but will probably work ok in most
+ // scenarios. We can work on improving this in the future. We probably
+ // want to resolve future optional peers to the same dependency for example.
+ if dep.kind == NpmDependencyEntryKind::OptionalPeer {
+ match maybe_new_id {
+ Some(new_id) => {
+ if let Some(unresolved_optional_peers) =
+ self.unresolved_optional_peers.remove(&pkg_id)
+ {
+ for optional_peer in unresolved_optional_peers {
+ let peer_parent = GraphPathNodeOrRoot::Node(
+ optional_peer.graph_path.clone(),
+ );
+ self.set_new_peer_dep(
+ vec![&optional_peer.graph_path],
+ peer_parent,
+ &optional_peer.specifier,
+ new_id,
+ );
+ }
+ }
+ }
+ None => {
+ // store this for later if it's resolved for this version
+ self
+ .unresolved_optional_peers
+ .entry(pkg_id.clone())
+ .or_default()
+ .push(UnresolvedOptionalPeer {
+ specifier: dep.bare_specifier.clone(),
+ graph_path: graph_path.clone(),
+ });
+ }
+ }
}
}
}
}
if !found_peer {
- self.graph.borrow_node(&parent_id).no_peers = true;
+ self.graph.borrow_node_mut(graph_path.node_id()).no_peers = true;
}
}
}
@@ -690,496 +989,284 @@ impl<'a, TNpmRegistryApi: NpmRegistryApi>
fn resolve_peer_dep(
&mut self,
specifier: &str,
- parent_id: &NpmPackageNodeId,
peer_dep: &NpmDependencyEntry,
peer_package_info: &NpmPackageInfo,
- visited_ancestor_versions: &Arc<VisitedVersionsPath>,
- existing_dep_id: Option<&NpmPackageNodeId>,
- ) -> Result<Option<NpmPackageNodeId>, AnyError> {
- fn find_matching_child<'a>(
- peer_dep: &NpmDependencyEntry,
- peer_package_info: &NpmPackageInfo,
- children: impl Iterator<Item = &'a NpmPackageNodeId>,
- ) -> Result<Option<NpmPackageNodeId>, AnyError> {
- for child_id in children {
- if child_id.name == peer_dep.name
- && version_req_satisfies(
- &peer_dep.version_req,
- &child_id.version,
- peer_package_info,
- None,
- )?
- {
- return Ok(Some(child_id.clone()));
- }
- }
- Ok(None)
- }
+ ancestor_path: &Arc<GraphPath>,
+ ) -> Result<Option<NodeId>, AnyError> {
+ debug_assert!(matches!(
+ peer_dep.kind,
+ NpmDependencyEntryKind::Peer | NpmDependencyEntryKind::OptionalPeer
+ ));
- // Peer dependencies are resolved based on its ancestors' siblings.
- // If not found, then it resolves based on the version requirement if non-optional.
- let mut pending_ancestors = VecDeque::new(); // go up the tree by depth
- let path = GraphSpecifierPath::new(specifier.to_string());
- let visited_versions = VisitedVersionsPath::new(parent_id);
+ let mut path = vec![ancestor_path];
- // skip over the current node
- for (specifier, grand_parents) in
- self.graph.borrow_node(parent_id).parents.clone()
+ // the current dependency might have had the peer dependency
+ // in another bare specifier slot... if so resolve it to that
{
- let path = path.with_specifier(specifier);
- for grand_parent in grand_parents {
- if let Some(visited_versions) =
- visited_versions.with_parent(&grand_parent)
- {
- pending_ancestors.push_back((
- grand_parent,
- path.clone(),
- visited_versions,
- ));
- }
+ let maybe_peer_dep = self.find_peer_dep_in_node(
+ ancestor_path,
+ peer_dep,
+ peer_package_info,
+ )?;
+
+ if let Some((peer_parent, peer_dep_id)) = maybe_peer_dep {
+ // this will always have an ancestor because we're not at the root
+ self.set_new_peer_dep(path, peer_parent, specifier, peer_dep_id);
+ return Ok(Some(peer_dep_id));
}
}
- while let Some((ancestor, path, visited_versions)) =
- pending_ancestors.pop_front()
- {
- match &ancestor {
- NodeParent::Node(ancestor_node_id) => {
- let maybe_peer_dep_id = if ancestor_node_id.name == peer_dep.name
- && version_req_satisfies(
- &peer_dep.version_req,
- &ancestor_node_id.version,
- peer_package_info,
- None,
- )? {
- Some(ancestor_node_id.clone())
- } else {
- let ancestor = self.graph.borrow_node(ancestor_node_id);
- for (specifier, parents) in &ancestor.parents {
- let new_path = path.with_specifier(specifier.clone());
- for parent in parents {
- if let Some(visited_versions) =
- visited_versions.with_parent(parent)
- {
- pending_ancestors.push_back((
- parent.clone(),
- new_path.clone(),
- visited_versions,
- ));
- }
- }
- }
- find_matching_child(
- peer_dep,
- peer_package_info,
- ancestor.children.values(),
- )?
- };
- if let Some(peer_dep_id) = maybe_peer_dep_id {
- if existing_dep_id == Some(&peer_dep_id) {
- return Ok(None); // do nothing, there's already an existing child dep id for this
- }
-
- // handle optional dependency that's never been set
- if existing_dep_id.is_none() && peer_dep.kind.is_optional() {
- self.set_previously_unresolved_optional_dependency(
- &peer_dep_id,
- parent_id,
- peer_dep,
- visited_ancestor_versions,
- );
- return Ok(None);
- }
-
- let parents =
- self.graph.borrow_node(ancestor_node_id).parents.clone();
- return Ok(Some(self.set_new_peer_dep(
- parents,
- ancestor_node_id,
- &peer_dep_id,
- &path,
- visited_ancestor_versions,
- )));
+ // Peer dependencies are resolved based on its ancestors' siblings.
+ // If not found, then it resolves based on the version requirement if non-optional.
+ for ancestor_node in ancestor_path.ancestors() {
+ match ancestor_node {
+ GraphPathNodeOrRoot::Node(ancestor_graph_path_node) => {
+ path.push(ancestor_graph_path_node);
+ let maybe_peer_dep = self.find_peer_dep_in_node(
+ ancestor_graph_path_node,
+ peer_dep,
+ peer_package_info,
+ )?;
+ if let Some((parent, peer_dep_id)) = maybe_peer_dep {
+ // this will always have an ancestor because we're not at the root
+ self.set_new_peer_dep(path, parent, specifier, peer_dep_id);
+ return Ok(Some(peer_dep_id));
}
}
- NodeParent::Req => {
+ GraphPathNodeOrRoot::Root(root_pkg_id) => {
// in this case, the parent is the root so the children are all the package requirements
if let Some(child_id) = find_matching_child(
peer_dep,
peer_package_info,
- self.graph.package_reqs.values(),
+ self
+ .graph
+ .root_packages
+ .iter()
+ .map(|(pkg_id, id)| (*id, pkg_id)),
)? {
- if existing_dep_id == Some(&child_id) {
- return Ok(None); // do nothing, there's already an existing child dep id for this
- }
-
- // handle optional dependency that's never been set
- if existing_dep_id.is_none() && peer_dep.kind.is_optional() {
- self.set_previously_unresolved_optional_dependency(
- &child_id,
- parent_id,
- peer_dep,
- visited_ancestor_versions,
- );
- return Ok(None);
- }
-
- let specifier = path.specifier.to_string();
- let path = path.pop().unwrap(); // go back down one level from the package requirement
- let old_id =
- self.graph.package_reqs.get(&specifier).unwrap().clone();
- return Ok(Some(self.set_new_peer_dep(
- BTreeMap::from([(specifier, BTreeSet::from([NodeParent::Req]))]),
- &old_id,
- &child_id,
- path,
- visited_ancestor_versions,
- )));
+ let peer_parent = GraphPathNodeOrRoot::Root(root_pkg_id.clone());
+ self.set_new_peer_dep(path, peer_parent, specifier, child_id);
+ return Ok(Some(child_id));
}
}
}
}
// We didn't find anything by searching the ancestor siblings, so we need
- // to resolve based on the package info and will treat this just like any
- // other dependency when not optional
- if !peer_dep.kind.is_optional()
- // prefer the existing dep id if it exists
- && existing_dep_id.is_none()
- {
- self.analyze_dependency(
- peer_dep,
+ // to resolve based on the package info
+ if !peer_dep.kind.is_optional() {
+ let parent_id = ancestor_path.node_id();
+ let (_, node_id) = self.resolve_node_from_info(
+ &peer_dep.name,
+ peer_dep
+ .peer_dep_version_req
+ .as_ref()
+ .unwrap_or(&peer_dep.version_req),
peer_package_info,
- parent_id,
- visited_ancestor_versions,
+ Some(parent_id),
)?;
+ let peer_parent = GraphPathNodeOrRoot::Node(ancestor_path.clone());
+ self.set_new_peer_dep(
+ vec![ancestor_path],
+ peer_parent,
+ specifier,
+ node_id,
+ );
+ Ok(Some(node_id))
+ } else {
+ Ok(None)
}
-
- Ok(None)
}
- /// Optional peer dependencies that have never been set before are
- /// simply added to the existing peer dependency instead of affecting
- /// the entire sub tree.
- fn set_previously_unresolved_optional_dependency(
- &mut self,
- peer_dep_id: &NpmPackageNodeId,
- parent_id: &NpmPackageNodeId,
+ fn find_peer_dep_in_node(
+ &self,
+ path: &Arc<GraphPath>,
peer_dep: &NpmDependencyEntry,
- visited_ancestor_versions: &Arc<VisitedVersionsPath>,
- ) {
- let (_, node) = self.graph.get_or_create_for_id(peer_dep_id);
- self.graph.set_child_parent(
- &peer_dep.bare_specifier,
- &node,
- &NodeParent::Node(parent_id.clone()),
- );
- self
- .try_add_pending_unresolved_node(Some(visited_ancestor_versions), &node);
+ peer_package_info: &NpmPackageInfo,
+ ) -> Result<Option<(GraphPathNodeOrRoot, NodeId)>, AnyError> {
+ let node_id = path.node_id();
+ let resolved_node_id = self.graph.resolved_node_ids.get(node_id).unwrap();
+ // check if this node itself is a match for
+ // the peer dependency and if so use that
+ if resolved_node_id.nv.name == peer_dep.name
+ && version_req_satisfies(
+ &peer_dep.version_req,
+ &resolved_node_id.nv.version,
+ peer_package_info,
+ None,
+ )?
+ {
+ let parent = path.previous_node.as_ref().unwrap().clone();
+ Ok(Some((parent, node_id)))
+ } else {
+ let node = self.graph.nodes.get(&node_id).unwrap();
+ let children = node.children.values().map(|child_node_id| {
+ let child_node_id = *child_node_id;
+ (
+ child_node_id,
+ &self.graph.resolved_node_ids.get(child_node_id).unwrap().nv,
+ )
+ });
+ find_matching_child(peer_dep, peer_package_info, children).map(
+ |maybe_child_id| {
+ maybe_child_id.map(|child_id| {
+ let parent = GraphPathNodeOrRoot::Node(path.clone());
+ (parent, child_id)
+ })
+ },
+ )
+ }
}
fn set_new_peer_dep(
&mut self,
- previous_parents: BTreeMap<String, BTreeSet<NodeParent>>,
- node_id: &NpmPackageNodeId,
- peer_dep_id: &NpmPackageNodeId,
- path: &Arc<GraphSpecifierPath>,
- visited_ancestor_versions: &Arc<VisitedVersionsPath>,
- ) -> NpmPackageNodeId {
- let peer_dep_id = Cow::Borrowed(peer_dep_id);
- let old_id = node_id;
- let (new_id, mut old_node_children) =
- if old_id.peer_dependencies.contains(&peer_dep_id)
- || *old_id == *peer_dep_id
- {
- // the parent has already resolved to using this peer dependency
- // via some other path or the parent is the peer dependency,
- // so we don't need to update its ids, but instead only make a link to it
- (
- old_id.clone(),
- self.graph.borrow_node(old_id).children.clone(),
- )
- } else {
- let mut new_id = old_id.clone();
- new_id.peer_dependencies.push(peer_dep_id.as_ref().clone());
-
- // remove the previous parents from the old node
- let old_node_children = {
- for (specifier, parents) in &previous_parents {
- for parent in parents {
- self.graph.remove_child_parent(specifier, old_id, parent);
- }
- }
- let old_node = self.graph.borrow_node(old_id);
- old_node.children.clone()
- };
+ // path from the node above the resolved dep to just above the peer dep
+ path: Vec<&Arc<GraphPath>>,
+ peer_dep_parent: GraphPathNodeOrRoot,
+ peer_dep_specifier: &str,
+ mut peer_dep_id: NodeId,
+ ) {
+ debug_assert!(!path.is_empty());
+ let peer_dep_pkg_id = self
+ .graph
+ .resolved_node_ids
+ .get(peer_dep_id)
+ .unwrap()
+ .nv
+ .clone();
+
+ let peer_dep = ResolvedIdPeerDep::ParentReference {
+ parent: peer_dep_parent,
+ child_pkg_nv: peer_dep_pkg_id,
+ };
+ for graph_path_node in path.iter().rev() {
+ let old_node_id = graph_path_node.node_id();
+ let old_resolved_id = self
+ .graph
+ .resolved_node_ids
+ .get(old_node_id)
+ .unwrap()
+ .clone();
- let (_, new_node) = self.graph.get_or_create_for_id(&new_id);
+ let mut new_resolved_id = old_resolved_id.clone();
+ new_resolved_id.push_peer_dep(peer_dep.clone());
+ let (created, new_node_id) =
+ self.graph.get_or_create_for_id(&new_resolved_id);
- // update the previous parent to point to the new node
- // and this node to point at those parents
- for (specifier, parents) in previous_parents {
- for parent in parents {
- self.graph.set_child_parent(&specifier, &new_node, &parent);
- }
- }
+ // this will occur when the peer dependency is in an ancestor
+ if old_node_id == peer_dep_id {
+ peer_dep_id = new_node_id;
+ }
- // now add the previous children to this node
- let new_id_as_parent = NodeParent::Node(new_id.clone());
- for (specifier, child_id) in &old_node_children {
- let child = self.graph.packages.get(child_id).unwrap().clone();
+ if created {
+ let old_children =
+ self.graph.borrow_node_mut(old_node_id).children.clone();
+ // copy over the old children to this new one
+ for (specifier, child_id) in &old_children {
self
.graph
- .set_child_parent(specifier, &child, &new_id_as_parent);
+ .set_child_parent_node(specifier, *child_id, new_node_id);
}
- (new_id, old_node_children)
- };
-
- // this is the parent id found at the bottom of the path
- let mut bottom_parent_id = new_id.clone();
-
- // continue going down the path
- let next_specifier = &path.specifier;
- if let Some(path) = path.pop() {
- let next_node_id = old_node_children.get(next_specifier).unwrap();
- bottom_parent_id = self.set_new_peer_dep(
- BTreeMap::from([(
- next_specifier.to_string(),
- BTreeSet::from([NodeParent::Node(new_id.clone())]),
- )]),
- next_node_id,
- &peer_dep_id,
- path,
- visited_ancestor_versions,
- );
- } else {
- // this means we're at the peer dependency now
- debug!(
- "Resolved peer dependency for {} in {} to {}",
- next_specifier,
- &new_id.as_serialized(),
- &peer_dep_id.as_serialized(),
- );
-
- // handle this node having a previous child due to another peer dependency
- if let Some(child_id) = old_node_children.remove(next_specifier) {
- if let Some(node) = self.graph.packages.get(&child_id) {
- let is_orphan = {
- let mut node = node.lock();
- node
- .remove_parent(next_specifier, &NodeParent::Node(new_id.clone()));
- node.parents.is_empty()
- };
- if is_orphan {
- self.graph.forget_orphan(&child_id);
- }
- }
- }
-
- let node = self.graph.get_or_create_for_id(&peer_dep_id).1;
- self.try_add_pending_unresolved_node(
- Some(visited_ancestor_versions),
- &node,
- );
- self
- .graph
- .set_child_parent_node(next_specifier, &node, &new_id);
- }
-
- // forget the old node at this point if it has no parents
- if new_id != *old_id {
- let old_node = self.graph.borrow_node(old_id);
- if old_node.parents.is_empty() {
- drop(old_node); // stop borrowing
- self.graph.forget_orphan(old_id);
}
- }
- bottom_parent_id
- }
-}
+ debug_assert_eq!(graph_path_node.node_id(), old_node_id);
+ graph_path_node.change_id(new_node_id);
-#[derive(Clone)]
-struct VersionAndInfo<'a> {
- version: Version,
- info: &'a NpmPackageVersionInfo,
-}
-
-fn get_resolved_package_version_and_info<'a>(
- version_req: &VersionReq,
- info: &'a NpmPackageInfo,
- parent: Option<&NpmPackageNodeId>,
-) -> Result<VersionAndInfo<'a>, AnyError> {
- if let Some(tag) = version_req.tag() {
- tag_to_version_info(info, tag, parent)
- } else {
- let mut maybe_best_version: Option<VersionAndInfo> = None;
- for version_info in info.versions.values() {
- let version = Version::parse_from_npm(&version_info.version)?;
- if version_req.matches(&version) {
- let is_best_version = maybe_best_version
- .as_ref()
- .map(|best_version| best_version.version.cmp(&version).is_lt())
- .unwrap_or(true);
- if is_best_version {
- maybe_best_version = Some(VersionAndInfo {
- version,
- info: version_info,
- });
+ // update the previous parent to have this as its child
+ match graph_path_node.previous_node.as_ref().unwrap() {
+ GraphPathNodeOrRoot::Root(pkg_id) => {
+ self.graph.root_packages.insert(pkg_id.clone(), new_node_id);
+ }
+ GraphPathNodeOrRoot::Node(parent_node_path) => {
+ let parent_node_id = parent_node_path.node_id();
+ let parent_node = self.graph.borrow_node_mut(parent_node_id);
+ parent_node
+ .children
+ .insert(graph_path_node.specifier().to_string(), new_node_id);
}
}
}
- match maybe_best_version {
- Some(v) => Ok(v),
- // If the package isn't found, it likely means that the user needs to use
- // `--reload` to get the latest npm package information. Although it seems
- // like we could make this smart by fetching the latest information for
- // this package here, we really need a full restart. There could be very
- // interesting bugs that occur if this package's version was resolved by
- // something previous using the old information, then now being smart here
- // causes a new fetch of the package information, meaning this time the
- // previous resolution of this package's version resolved to an older
- // version, but next time to a different version because it has new information.
- None => bail!(
- concat!(
- "Could not find npm package '{}' matching {}{}. ",
- "Try retrieving the latest npm package information by running with --reload",
- ),
- info.name,
- version_req.version_text(),
- match parent {
- Some(id) => format!(" as specified in {}", id.display()),
- None => String::new(),
- }
- ),
- }
- }
-}
+ // now set the peer dependency
+ let bottom_node = path.first().unwrap();
+ let parent_node_id = bottom_node.node_id();
+ self.graph.set_child_parent_node(
+ peer_dep_specifier,
+ peer_dep_id,
+ parent_node_id,
+ );
-fn version_req_satisfies(
- version_req: &VersionReq,
- version: &Version,
- package_info: &NpmPackageInfo,
- parent: Option<&NpmPackageNodeId>,
-) -> Result<bool, AnyError> {
- match version_req.tag() {
- Some(tag) => {
- let tag_version = tag_to_version_info(package_info, tag, parent)?.version;
- Ok(tag_version == *version)
- }
- None => Ok(version_req.matches(version)),
+ // mark the peer dependency to be analyzed
+ self.try_add_pending_unresolved_node(
+ bottom_node,
+ peer_dep_id,
+ peer_dep_specifier,
+ );
+
+ debug!(
+ "Resolved peer dependency for {} in {} to {}",
+ peer_dep_specifier,
+ &self.graph.get_npm_pkg_id(parent_node_id).as_serialized(),
+ &self.graph.get_npm_pkg_id(peer_dep_id).as_serialized(),
+ );
}
}
-fn tag_to_version_info<'a>(
- info: &'a NpmPackageInfo,
- tag: &str,
- parent: Option<&NpmPackageNodeId>,
-) -> Result<VersionAndInfo<'a>, AnyError> {
- // For when someone just specifies @types/node, we want to pull in a
- // "known good" version of @types/node that works well with Deno and
- // not necessarily the latest version. For example, we might only be
- // compatible with Node vX, but then Node vY is published so we wouldn't
- // want to pull that in.
- // Note: If the user doesn't want this behavior, then they can specify an
- // explicit version.
- if tag == "latest" && info.name == "@types/node" {
- return get_resolved_package_version_and_info(
- &VersionReq::parse_from_npm("18.0.0 - 18.11.18").unwrap(),
- info,
- parent,
- );
- }
-
- if let Some(version) = info.dist_tags.get(tag) {
- match info.versions.get(version) {
- Some(info) => Ok(VersionAndInfo {
- version: Version::parse_from_npm(version)?,
- info,
- }),
- None => {
- bail!(
- "Could not find version '{}' referenced in dist-tag '{}'.",
- version,
- tag,
- )
- }
+fn find_matching_child<'a>(
+ peer_dep: &NpmDependencyEntry,
+ peer_package_info: &NpmPackageInfo,
+ children: impl Iterator<Item = (NodeId, &'a NpmPackageNv)>,
+) -> Result<Option<NodeId>, AnyError> {
+ for (child_id, pkg_id) in children {
+ if pkg_id.name == peer_dep.name
+ && version_req_satisfies(
+ &peer_dep.version_req,
+ &pkg_id.version,
+ peer_package_info,
+ None,
+ )?
+ {
+ return Ok(Some(child_id));
}
- } else {
- bail!("Could not find dist-tag '{}'.", tag)
}
+ Ok(None)
}
#[cfg(test)]
mod test {
- use deno_graph::npm::NpmPackageReference;
+ use deno_graph::npm::NpmPackageReqReference;
use pretty_assertions::assert_eq;
- use crate::npm::registry::TestNpmRegistryApi;
+ use crate::npm::registry::TestNpmRegistryApiInner;
use super::*;
#[test]
- fn test_get_resolved_package_version_and_info() {
- // dist tag where version doesn't exist
- let package_ref = NpmPackageReference::from_str("npm:test").unwrap();
- let package_info = NpmPackageInfo {
- name: "test".to_string(),
- versions: HashMap::new(),
- dist_tags: HashMap::from([(
- "latest".to_string(),
- "1.0.0-alpha".to_string(),
- )]),
+ fn resolved_id_tests() {
+ let mut ids = ResolvedNodeIds::default();
+ let node_id = NodeId(0);
+ let resolved_id = ResolvedId {
+ nv: NpmPackageNv::from_str("package@1.1.1").unwrap(),
+ peer_dependencies: Vec::new(),
};
- let result = get_resolved_package_version_and_info(
- package_ref
- .req
- .version_req
- .as_ref()
- .unwrap_or(&*LATEST_VERSION_REQ),
- &package_info,
- None,
- );
- assert_eq!(
- result.err().unwrap().to_string(),
- "Could not find version '1.0.0-alpha' referenced in dist-tag 'latest'."
- );
+ ids.set(node_id, resolved_id.clone());
+ assert!(ids.get(node_id).is_some());
+ assert!(ids.get(NodeId(1)).is_none());
+ assert_eq!(ids.get_node_id(&resolved_id), Some(node_id));
- // dist tag where version is a pre-release
- let package_ref = NpmPackageReference::from_str("npm:test").unwrap();
- let package_info = NpmPackageInfo {
- name: "test".to_string(),
- versions: HashMap::from([
- ("0.1.0".to_string(), NpmPackageVersionInfo::default()),
- (
- "1.0.0-alpha".to_string(),
- NpmPackageVersionInfo {
- version: "0.1.0-alpha".to_string(),
- ..Default::default()
- },
- ),
- ]),
- dist_tags: HashMap::from([(
- "latest".to_string(),
- "1.0.0-alpha".to_string(),
- )]),
+ let resolved_id_new = ResolvedId {
+ nv: NpmPackageNv::from_str("package@1.1.2").unwrap(),
+ peer_dependencies: Vec::new(),
};
- let result = get_resolved_package_version_and_info(
- package_ref
- .req
- .version_req
- .as_ref()
- .unwrap_or(&*LATEST_VERSION_REQ),
- &package_info,
- None,
- );
- assert_eq!(result.unwrap().version.to_string(), "1.0.0-alpha");
+ ids.set(node_id, resolved_id_new.clone());
+ assert_eq!(ids.get_node_id(&resolved_id), None); // stale entry should have been removed
+ assert!(ids.get(node_id).is_some());
+ assert_eq!(ids.get_node_id(&resolved_id_new), Some(node_id));
}
#[tokio::test]
async fn resolve_deps_no_peer() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.ensure_package_version("package-c", "0.1.0");
@@ -1196,37 +1283,37 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
),
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized("package-c@0.1.0").unwrap(),
+ NpmPackageId::from_serialized("package-c@0.1.0").unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-c@0.1.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-c@0.1.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([(
"package-d".to_string(),
- NpmPackageNodeId::from_serialized("package-d@3.2.1").unwrap(),
+ NpmPackageId::from_serialized("package-d@3.2.1").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-d@3.2.1").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-d@3.2.1").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: Default::default(),
@@ -1241,7 +1328,7 @@ mod test {
#[tokio::test]
async fn resolve_deps_circular() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.add_dependency(("package-a", "1.0.0"), ("package-b", "*"));
@@ -1253,20 +1340,20 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-a".to_string(),
- NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
)]),
dist: Default::default(),
},
@@ -1279,8 +1366,242 @@ mod test {
}
#[tokio::test]
+ async fn peer_deps_simple_top_tree() {
+ let api = TestNpmRegistryApiInner::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "1.0.0");
+ api.ensure_package_version("package-peer", "1.0.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "1"));
+ api.add_peer_dependency(("package-b", "1.0.0"), ("package-peer", "*"));
+
+ let (packages, package_reqs) = run_resolver_and_get_output(
+ api,
+ vec!["npm:package-a@1.0", "npm:package-peer@1.0"],
+ )
+ .await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@1.0.0_package-peer@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: Default::default(),
+ dist: Default::default(),
+ }
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![
+ (
+ "package-a@1.0".to_string(),
+ "package-a@1.0.0_package-peer@1.0.0".to_string()
+ ),
+ (
+ "package-peer@1.0".to_string(),
+ "package-peer@1.0.0".to_string()
+ )
+ ]
+ );
+ }
+
+ #[tokio::test]
+ async fn peer_deps_simple_root_pkg_children() {
+ let api = TestNpmRegistryApiInner::default();
+ api.ensure_package_version("package-0", "1.0.0");
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "1.0.0");
+ api.ensure_package_version("package-peer", "1.0.0");
+ api.add_dependency(("package-0", "1.0.0"), ("package-a", "1"));
+ api.add_dependency(("package-0", "1.0.0"), ("package-peer", "1"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "1"));
+ api.add_peer_dependency(("package-b", "1.0.0"), ("package-peer", "*"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-0@1.0"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-0@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-a".to_string(),
+ NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ )
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@1.0.0_package-peer@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: Default::default(),
+ dist: Default::default(),
+ }
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![(
+ "package-0@1.0".to_string(),
+ "package-0@1.0.0_package-peer@1.0.0".to_string()
+ ),]
+ );
+ }
+
+ #[tokio::test]
+ async fn peer_deps_simple_deeper() {
+ let api = TestNpmRegistryApiInner::default();
+ api.ensure_package_version("package-0", "1.0.0");
+ api.ensure_package_version("package-1", "1.0.0");
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "1.0.0");
+ api.ensure_package_version("package-peer", "1.0.0");
+ api.add_dependency(("package-0", "1.0.0"), ("package-1", "1"));
+ api.add_dependency(("package-1", "1.0.0"), ("package-a", "1"));
+ api.add_dependency(("package-1", "1.0.0"), ("package-peer", "1"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "1"));
+ api.add_peer_dependency(("package-b", "1.0.0"), ("package-peer", "*"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-0@1.0"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-0@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-1".to_string(),
+ NpmPackageId::from_serialized("package-1@1.0.0_package-peer@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-1@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-a".to_string(),
+ NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ )
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@1.0.0_package-peer@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: Default::default(),
+ dist: Default::default(),
+ }
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-0@1.0".to_string(), "package-0@1.0.0".to_string()),]
+ );
+ }
+
+ #[tokio::test]
async fn resolve_with_peer_deps_top_tree() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.ensure_package_version("package-c", "3.0.0");
@@ -1302,7 +1623,7 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-a@1.0.0_package-peer@4.0.0"
)
.unwrap(),
@@ -1310,14 +1631,14 @@ mod test {
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized(
+ NpmPackageId::from_serialized(
"package-b@2.0.0_package-peer@4.0.0"
)
.unwrap(),
),
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized(
+ NpmPackageId::from_serialized(
"package-c@3.0.0_package-peer@4.0.0"
)
.unwrap(),
@@ -1326,7 +1647,7 @@ mod test {
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-b@2.0.0_package-peer@4.0.0"
)
.unwrap(),
@@ -1334,11 +1655,11 @@ mod test {
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-c@3.0.0_package-peer@4.0.0"
)
.unwrap(),
@@ -1346,11 +1667,11 @@ mod test {
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: Default::default(),
@@ -1374,7 +1695,7 @@ mod test {
#[tokio::test]
async fn resolve_with_peer_deps_ancestor_sibling_not_top_tree() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-0", "1.1.1");
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
@@ -1396,19 +1717,17 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-0@1.1.1").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-0@1.1.1").unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-a".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-a@1.0.0_package-peer@4.0.0"
- )
- .unwrap(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-peer@4.0.0")
+ .unwrap(),
),]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-a@1.0.0_package-peer@4.0.0"
)
.unwrap(),
@@ -1416,27 +1735,27 @@ mod test {
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized(
+ NpmPackageId::from_serialized(
"package-b@2.0.0_package-peer@4.0.0"
)
.unwrap(),
),
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized(
+ NpmPackageId::from_serialized(
"package-c@3.0.0_package-peer@4.0.0"
)
.unwrap(),
),
(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-b@2.0.0_package-peer@4.0.0"
)
.unwrap(),
@@ -1444,11 +1763,11 @@ mod test {
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-c@3.0.0_package-peer@4.0.0"
)
.unwrap(),
@@ -1456,11 +1775,11 @@ mod test {
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: Default::default(),
@@ -1477,7 +1796,7 @@ mod test {
async fn resolve_with_peer_deps_auto_resolved() {
// in this case, the peer dependency is not found in the tree
// so it's auto-resolved based on the registry
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.ensure_package_version("package-c", "3.0.0");
@@ -1494,40 +1813,52 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.1.0"
+ )
+ .unwrap(),
),
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized("package-c@3.0.0").unwrap(),
+ NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.1.0"
+ )
+ .unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.1.0"
+ )
+ .unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.1.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.1.0").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-c@3.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.1.0"
+ )
+ .unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.1.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.1.0").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@4.1.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@4.1.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: Default::default(),
@@ -1544,7 +1875,7 @@ mod test {
async fn resolve_with_optional_peer_dep_not_resolved() {
// in this case, the peer dependency is not found in the tree
// so it's auto-resolved based on the registry
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.ensure_package_version("package-c", "3.0.0");
@@ -1567,28 +1898,28 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
),
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized("package-c@3.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-c@3.0.0").unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::new(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-c@3.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-c@3.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::new(),
@@ -1603,7 +1934,7 @@ mod test {
#[tokio::test]
async fn resolve_with_optional_peer_found() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.ensure_package_version("package-c", "3.0.0");
@@ -1629,40 +1960,55 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
),
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized("package-c@3.0.0").unwrap(),
+ NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-c@3.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer@4.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: Default::default(),
@@ -1672,7 +2018,10 @@ mod test {
assert_eq!(
package_reqs,
vec![
- ("package-a@1".to_string(), "package-a@1.0.0".to_string()),
+ (
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@4.0.0".to_string()
+ ),
(
"package-peer@4.0.0".to_string(),
"package-peer@4.0.0".to_string()
@@ -1687,7 +2036,7 @@ mod test {
// When resolving a dependency a second time and it has an optional
// peer dependency that wasn't previously resolved, it should resolve all the
// previous versions to the new one
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "1.0.0");
api.ensure_package_version("package-peer", "1.0.0");
@@ -1707,31 +2056,40 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@1.0.0").unwrap(),
+ NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
),
(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
)]),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::new(),
@@ -1741,8 +2099,14 @@ mod test {
assert_eq!(
package_reqs,
vec![
- ("package-a@1".to_string(), "package-a@1.0.0".to_string()),
- ("package-b@1".to_string(), "package-b@1.0.0".to_string())
+ (
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@1.0.0".to_string()
+ ),
+ (
+ "package-b@1".to_string(),
+ "package-b@1.0.0_package-peer@1.0.0".to_string()
+ )
]
);
}
@@ -1750,7 +2114,7 @@ mod test {
#[tokio::test]
async fn resolve_optional_peer_first_not_resolved_second_resolved_scenario2()
{
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "1.0.0");
api.ensure_package_version("package-peer", "2.0.0");
@@ -1770,31 +2134,40 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@2.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@2.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([
(
"package-a".to_string(),
- NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@2.0.0"
+ )
+ .unwrap(),
),
(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
)
]),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@2.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::new(),
@@ -1804,15 +2177,21 @@ mod test {
assert_eq!(
package_reqs,
vec![
- ("package-a@1".to_string(), "package-a@1.0.0".to_string()),
- ("package-b@1".to_string(), "package-b@1.0.0".to_string())
+ (
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@2.0.0".to_string()
+ ),
+ (
+ "package-b@1".to_string(),
+ "package-b@1.0.0_package-peer@2.0.0".to_string()
+ )
]
);
}
#[tokio::test]
async fn resolve_optional_dep_npm_req_top() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-peer", "1.0.0");
api.add_optional_peer_dependency(
@@ -1829,16 +2208,19 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::new(),
@@ -1848,7 +2230,10 @@ mod test {
assert_eq!(
package_reqs,
vec![
- ("package-a@1".to_string(), "package-a@1.0.0".to_string()),
+ (
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@1.0.0".to_string()
+ ),
(
"package-peer@1".to_string(),
"package-peer@1.0.0".to_string()
@@ -1859,7 +2244,7 @@ mod test {
#[tokio::test]
async fn resolve_optional_dep_different_resolution_second_time() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "1.0.0");
api.ensure_package_version("package-peer", "1.0.0");
@@ -1884,28 +2269,31 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-a@1.0.0_package-peer@2.0.0"
)
.unwrap(),
copy_index: 1,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-b@1.0.0_package-peer@2.0.0"
)
.unwrap(),
@@ -1914,11 +2302,11 @@ mod test {
dependencies: HashMap::from([
(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
),
(
"package-a".to_string(),
- NpmPackageNodeId::from_serialized(
+ NpmPackageId::from_serialized(
"package-a@1.0.0_package-peer@2.0.0"
)
.unwrap(),
@@ -1926,13 +2314,13 @@ mod test {
]),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::new(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@2.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::new(),
@@ -1942,7 +2330,10 @@ mod test {
assert_eq!(
package_reqs,
vec![
- ("package-a@1".to_string(), "package-a@1.0.0".to_string()),
+ (
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@1.0.0".to_string()
+ ),
(
"package-b@1".to_string(),
"package-b@1.0.0_package-peer@2.0.0".to_string()
@@ -1954,10 +2345,61 @@ mod test {
]
);
}
+ #[tokio::test]
+ async fn resolve_peer_dep_other_specifier_slot() {
+ let api = TestNpmRegistryApiInner::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-peer", "2.0.0");
+ // bit of an edge case... probably nobody has ever done this
+ api.add_dependency(
+ ("package-a", "1.0.0"),
+ ("package-peer2", "npm:package-peer@2"),
+ );
+ api.add_peer_dependency(("package-a", "1.0.0"), ("package-peer", "2"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-a@1"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@2.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
+ ),
+ (
+ "package-peer2".to_string(),
+ NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
+ ),
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-peer@2.0.0").unwrap(),
+ copy_index: 0,
+ dist: Default::default(),
+ dependencies: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![(
+ "package-a@1".to_string(),
+ "package-a@1.0.0_package-peer@2.0.0".to_string()
+ ),]
+ );
+ }
#[tokio::test]
async fn resolve_nested_peer_deps_auto_resolved() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-0", "1.0.0");
api.ensure_package_version("package-peer-a", "2.0.0");
api.ensure_package_version("package-peer-b", "3.0.0");
@@ -1973,26 +2415,34 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-0@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-0@1.0.0_package-peer-a@2.0.0__package-peer-b@3.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-peer-a".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-a@2.0.0").unwrap(),
+ NpmPackageId::from_serialized(
+ "package-peer-a@2.0.0_package-peer-b@3.0.0"
+ )
+ .unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer-a@2.0.0")
- .unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-peer-a@2.0.0_package-peer-b@3.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-peer-b".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-b@3.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer-b@3.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer-b@3.0.0")
+ pkg_id: NpmPackageId::from_serialized("package-peer-b@3.0.0")
.unwrap(),
copy_index: 0,
dependencies: HashMap::new(),
@@ -2002,13 +2452,17 @@ mod test {
);
assert_eq!(
package_reqs,
- vec![("package-0@1.0".to_string(), "package-0@1.0.0".to_string())]
+ vec![(
+ "package-0@1.0".to_string(),
+ "package-0@1.0.0_package-peer-a@2.0.0__package-peer-b@3.0.0"
+ .to_string()
+ )]
);
}
#[tokio::test]
async fn resolve_nested_peer_deps_ancestor_sibling_deps() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-0", "1.0.0");
api.ensure_package_version("package-peer-a", "2.0.0");
api.ensure_package_version("package-peer-b", "3.0.0");
@@ -2032,41 +2486,42 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-0@1.0.0_package-peer-a@2.0.0_package-peer-b@3.0.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-0@1.0.0_package-peer-a@2.0.0__package-peer-b@3.0.0_package-peer-b@3.0.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-peer-a".to_string(),
- NpmPackageNodeId::from_serialized(
+ NpmPackageId::from_serialized(
"package-peer-a@2.0.0_package-peer-b@3.0.0"
)
.unwrap(),
),
(
"package-peer-b".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-b@3.0.0")
+ NpmPackageId::from_serialized("package-peer-b@3.0.0")
.unwrap(),
)
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-peer-a@2.0.0_package-peer-b@3.0.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-peer-b".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-b@3.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer-b@3.0.0")
+ .unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer-b@3.0.0")
+ pkg_id: NpmPackageId::from_serialized("package-peer-b@3.0.0")
.unwrap(),
copy_index: 0,
dependencies: HashMap::new(),
@@ -2079,7 +2534,7 @@ mod test {
vec![
(
"package-0@1.0".to_string(),
- "package-0@1.0.0_package-peer-a@2.0.0_package-peer-b@3.0.0"
+ "package-0@1.0.0_package-peer-a@2.0.0__package-peer-b@3.0.0_package-peer-b@3.0.0"
.to_string()
),
(
@@ -2096,7 +2551,7 @@ mod test {
#[tokio::test]
async fn resolve_with_peer_deps_multiple() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-0", "1.1.1");
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
@@ -2116,7 +2571,7 @@ mod test {
api.add_peer_dependency(("package-b", "2.0.0"), ("package-peer-a", "4"));
api.add_peer_dependency(
("package-b", "2.0.0"),
- ("package-peer-c", "=6.2.0"),
+ ("package-peer-c", "=6.2.0"), // will be auto-resolved
);
api.add_peer_dependency(("package-c", "3.0.0"), ("package-peer-a", "*"));
api.add_peer_dependency(
@@ -2133,53 +2588,56 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-0@1.1.1").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-0@1.1.1")
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-a".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-a@1.0.0_package-peer-a@4.0.0"
+ NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer-a@4.0.0__package-peer-b@5.4.1"
)
.unwrap(),
),]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-a@1.0.0_package-peer-a@4.0.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer-a@4.0.0__package-peer-b@5.4.1"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-b@2.0.0_package-peer-a@4.0.0"
+ NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer-a@4.0.0__package-peer-b@5.4.1_package-peer-c@6.2.0"
)
.unwrap(),
),
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-c@3.0.0_package-peer-a@4.0.0"
+ NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer-a@4.0.0__package-peer-b@5.4.1"
)
.unwrap(),
),
(
"package-d".to_string(),
- NpmPackageNodeId::from_serialized("package-d@3.5.0").unwrap(),
+ NpmPackageId::from_serialized("package-d@3.5.0").unwrap(),
),
(
"package-peer-a".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-a@4.0.0")
- .unwrap(),
+ NpmPackageId::from_serialized(
+ "package-peer-a@4.0.0_package-peer-b@5.4.1"
+ )
+ .unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-b@2.0.0_package-peer-a@4.0.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-peer-a@4.0.0__package-peer-b@5.4.1_package-peer-c@6.2.0"
)
.unwrap(),
copy_index: 0,
@@ -2187,59 +2645,63 @@ mod test {
dependencies: HashMap::from([
(
"package-peer-a".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-a@4.0.0")
+ NpmPackageId::from_serialized("package-peer-a@4.0.0_package-peer-b@5.4.1")
.unwrap(),
),
(
"package-peer-c".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-c@6.2.0")
+ NpmPackageId::from_serialized("package-peer-c@6.2.0")
.unwrap(),
)
])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-c@3.0.0_package-peer-a@4.0.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-c@3.0.0_package-peer-a@4.0.0__package-peer-b@5.4.1"
)
.unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer-a".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-a@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer-a@4.0.0_package-peer-b@5.4.1")
+ .unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-d@3.5.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-d@3.5.0")
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-e@3.6.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-e@3.6.0")
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer-a@4.0.0")
+ pkg_id: NpmPackageId::from_serialized("package-peer-a@4.0.0_package-peer-b@5.4.1")
.unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: HashMap::from([(
"package-peer-b".to_string(),
- NpmPackageNodeId::from_serialized("package-peer-b@5.4.1").unwrap(),
+ NpmPackageId::from_serialized("package-peer-b@5.4.1")
+ .unwrap(),
)])
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer-b@5.4.1")
+ pkg_id: NpmPackageId::from_serialized("package-peer-b@5.4.1")
.unwrap(),
copy_index: 0,
dist: Default::default(),
dependencies: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer-c@6.2.0")
+ pkg_id: NpmPackageId::from_serialized("package-peer-c@6.2.0")
.unwrap(),
copy_index: 0,
dist: Default::default(),
@@ -2258,7 +2720,7 @@ mod test {
#[tokio::test]
async fn resolve_peer_deps_circular() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.add_dependency(("package-a", "1.0.0"), ("package-b", "*"));
@@ -2270,26 +2732,24 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-b@2.0.0_package-a@1.0.0"
- )
- .unwrap(),
+ NpmPackageId::from_serialized("package-b@2.0.0_package-a@1.0.0")
+ .unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-b@2.0.0_package-a@1.0.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-a".to_string(),
- NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
)]),
dist: Default::default(),
},
@@ -2305,7 +2765,7 @@ mod test {
async fn resolve_peer_deps_multiple_copies() {
// repeat this a few times to have a higher probability of surfacing indeterminism
for _ in 0..3 {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.ensure_package_version("package-dep", "3.0.0");
@@ -2326,7 +2786,7 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-a@1.0.0_package-peer@4.0.0"
)
.unwrap(),
@@ -2334,21 +2794,20 @@ mod test {
dependencies: HashMap::from([
(
"package-dep".to_string(),
- NpmPackageNodeId::from_serialized(
+ NpmPackageId::from_serialized(
"package-dep@3.0.0_package-peer@4.0.0"
)
.unwrap(),
),
(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0")
- .unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-b@2.0.0_package-peer@5.0.0"
)
.unwrap(),
@@ -2356,52 +2815,51 @@ mod test {
dependencies: HashMap::from([
(
"package-dep".to_string(),
- NpmPackageNodeId::from_serialized(
+ NpmPackageId::from_serialized(
"package-dep@3.0.0_package-peer@5.0.0"
)
.unwrap(),
),
(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@5.0.0")
- .unwrap(),
+ NpmPackageId::from_serialized("package-peer@5.0.0").unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-dep@3.0.0_package-peer@4.0.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@4.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@4.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-dep@3.0.0_package-peer@5.0.0"
)
.unwrap(),
copy_index: 1,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@5.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@5.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@4.0.0")
+ pkg_id: NpmPackageId::from_serialized("package-peer@4.0.0")
.unwrap(),
copy_index: 0,
dependencies: HashMap::new(),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@5.0.0")
+ pkg_id: NpmPackageId::from_serialized("package-peer@5.0.0")
.unwrap(),
copy_index: 0,
dependencies: HashMap::new(),
@@ -2427,7 +2885,7 @@ mod test {
#[tokio::test]
async fn resolve_dep_with_peer_deps_dep_then_peer() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "1.0.0");
api.ensure_package_version("package-c", "1.0.0");
@@ -2446,49 +2904,53 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-a@1.0.0_package-b@1.0.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-b@1.0.0__package-peer@1.0.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-c@1.0.0_package-b@1.0.0"
+ NpmPackageId::from_serialized(
+ "package-c@1.0.0_package-b@1.0.0__package-peer@1.0.0"
)
.unwrap(),
),
(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
)
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-c@1.0.0_package-b@1.0.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-c@1.0.0_package-b@1.0.0__package-peer@1.0.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-b@1.0.0_package-peer@1.0.0")
+ .unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([]),
dist: Default::default(),
@@ -2500,25 +2962,28 @@ mod test {
vec![
(
"package-a@1.0".to_string(),
- "package-a@1.0.0_package-b@1.0.0".to_string()
+ "package-a@1.0.0_package-b@1.0.0__package-peer@1.0.0".to_string()
),
- ("package-b@1.0".to_string(), "package-b@1.0.0".to_string())
+ (
+ "package-b@1.0".to_string(),
+ "package-b@1.0.0_package-peer@1.0.0".to_string()
+ )
]
);
}
#[tokio::test]
- async fn resolve_dep_with_peer_deps_dep_then_different_peer() {
- let api = TestNpmRegistryApi::default();
+ async fn resolve_dep_with_peer_deps_then_other_dep_with_different_peer() {
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "1.0.0");
api.ensure_package_version("package-c", "1.0.0");
api.ensure_package_version("package-peer", "1.1.0");
api.ensure_package_version("package-peer", "1.2.0");
- api.add_peer_dependency(("package-a", "1.0.0"), ("package-peer", "*")); // should select 1.2.0
+ api.add_peer_dependency(("package-a", "1.0.0"), ("package-peer", "*")); // should select 1.2.0, then 1.1.0
api.add_dependency(("package-b", "1.0.0"), ("package-c", "1"));
api.add_dependency(("package-b", "1.0.0"), ("package-peer", "=1.1.0"));
- api.add_peer_dependency(("package-c", "1.0.0"), ("package-a", "1"));
+ api.add_dependency(("package-c", "1.0.0"), ("package-a", "1"));
let (packages, package_reqs) = run_resolver_and_get_output(
api,
@@ -2529,70 +2994,71 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
- copy_index: 0,
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.1.0"
+ )
+ .unwrap(),
+ copy_index: 1,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.2.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.1.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-a@1.0.0_package-peer@1.1.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.2.0"
)
.unwrap(),
- copy_index: 1,
+ copy_index: 0,
dependencies: HashMap::from([(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.1.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.2.0").unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-b@1.0.0_package-a@1.0.0_package-peer@1.1.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@1.1.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-c@1.0.0_package-a@1.0.0_package-peer@1.1.0"
+ NpmPackageId::from_serialized(
+ "package-c@1.0.0_package-peer@1.1.0"
)
.unwrap(),
),
(
"package-peer".to_string(),
- NpmPackageNodeId::from_serialized("package-peer@1.1.0").unwrap(),
+ NpmPackageId::from_serialized("package-peer@1.1.0").unwrap(),
)
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
- "package-c@1.0.0_package-a@1.0.0_package-peer@1.1.0"
+ pkg_id: NpmPackageId::from_serialized(
+ "package-c@1.0.0_package-peer@1.1.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-a".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-a@1.0.0_package-peer@1.1.0"
- )
- .unwrap(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-peer@1.1.0")
+ .unwrap(),
)]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@1.1.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.1.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-peer@1.2.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.2.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([]),
dist: Default::default(),
@@ -2602,10 +3068,13 @@ mod test {
assert_eq!(
package_reqs,
vec![
- ("package-a@1.0".to_string(), "package-a@1.0.0".to_string()),
+ (
+ "package-a@1.0".to_string(),
+ "package-a@1.0.0_package-peer@1.2.0".to_string()
+ ),
(
"package-b@1.0".to_string(),
- "package-b@1.0.0_package-a@1.0.0_package-peer@1.1.0".to_string()
+ "package-b@1.0.0_package-peer@1.1.0".to_string()
)
]
);
@@ -2613,7 +3082,7 @@ mod test {
#[tokio::test]
async fn resolve_dep_and_peer_dist_tag() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-b", "2.0.0");
api.ensure_package_version("package-b", "3.0.0");
@@ -2635,7 +3104,7 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-a@1.0.0_package-d@1.0.0"
)
.unwrap(),
@@ -2643,56 +3112,54 @@ mod test {
dependencies: HashMap::from([
(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
),
(
"package-c".to_string(),
- NpmPackageNodeId::from_serialized(
- "package-c@1.0.0_package-d@1.0.0"
- )
- .unwrap(),
+ NpmPackageId::from_serialized("package-c@1.0.0_package-d@1.0.0")
+ .unwrap(),
),
(
"package-d".to_string(),
- NpmPackageNodeId::from_serialized("package-d@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-d@1.0.0").unwrap(),
),
(
"package-e".to_string(),
- NpmPackageNodeId::from_serialized("package-e@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-e@1.0.0").unwrap(),
),
]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::new(),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized(
+ pkg_id: NpmPackageId::from_serialized(
"package-c@1.0.0_package-d@1.0.0"
)
.unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-d".to_string(),
- NpmPackageNodeId::from_serialized("package-d@1.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-d@1.0.0").unwrap(),
),]),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-d@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-d@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::new(),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-e@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-e@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-b".to_string(),
- NpmPackageNodeId::from_serialized("package-b@2.0.0").unwrap(),
+ NpmPackageId::from_serialized("package-b@2.0.0").unwrap(),
)]),
dist: Default::default(),
},
@@ -2709,7 +3176,7 @@ mod test {
#[tokio::test]
async fn package_has_self_as_dependency() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.add_dependency(("package-a", "1.0.0"), ("package-a", "1"));
@@ -2718,7 +3185,7 @@ mod test {
assert_eq!(
packages,
vec![NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
copy_index: 0,
// in this case, we just ignore that the package did this
dependencies: Default::default(),
@@ -2733,7 +3200,7 @@ mod test {
#[tokio::test]
async fn package_has_self_but_different_version_as_dependency() {
- let api = TestNpmRegistryApi::default();
+ let api = TestNpmRegistryApiInner::default();
api.ensure_package_version("package-a", "1.0.0");
api.ensure_package_version("package-a", "0.5.0");
api.add_dependency(("package-a", "1.0.0"), ("package-a", "^0.5"));
@@ -2744,17 +3211,17 @@ mod test {
packages,
vec![
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@0.5.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-a@0.5.0").unwrap(),
copy_index: 0,
dependencies: Default::default(),
dist: Default::default(),
},
NpmResolutionPackage {
- id: NpmPackageNodeId::from_serialized("package-a@1.0.0").unwrap(),
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
copy_index: 0,
dependencies: HashMap::from([(
"package-a".to_string(),
- NpmPackageNodeId::from_serialized("package-a@0.5.0").unwrap(),
+ NpmPackageId::from_serialized("package-a@0.5.0").unwrap(),
)]),
dist: Default::default(),
},
@@ -2766,15 +3233,350 @@ mod test {
);
}
+ #[tokio::test]
+ async fn grand_child_package_has_self_as_peer_dependency_root() {
+ let api = TestNpmRegistryApiInner::default();
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "2"));
+ api.add_peer_dependency(("package-b", "2.0.0"), ("package-a", "*"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-a@1.0"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@2.0.0_package-a@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-a@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ }
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-a@1.0".to_string(), "package-a@1.0.0".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn grand_child_package_has_self_as_peer_dependency_under_root() {
+ let api = TestNpmRegistryApiInner::default();
+ api.ensure_package_version("package-0", "1.0.0");
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "2.0.0");
+ api.add_dependency(("package-0", "1.0.0"), ("package-a", "*"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "2"));
+ api.add_peer_dependency(("package-b", "2.0.0"), ("package-a", "*"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-0@1.0"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-0@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@2.0.0_package-a@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@2.0.0_package-a@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ }
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-0@1.0".to_string(), "package-0@1.0.0".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn nested_deps_same_peer_dep_ancestor() {
+ let api = TestNpmRegistryApiInner::default();
+ api.ensure_package_version("package-0", "1.0.0");
+ api.ensure_package_version("package-1", "1.0.0");
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "1.0.0");
+ api.ensure_package_version("package-c", "1.0.0");
+ api.ensure_package_version("package-d", "1.0.0");
+ api.add_dependency(("package-0", "1.0.0"), ("package-a", "1"));
+ api.add_dependency(("package-0", "1.0.0"), ("package-1", "1"));
+ api.add_dependency(("package-1", "1.0.0"), ("package-a", "1"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "1"));
+ api.add_dependency(("package-b", "1.0.0"), ("package-c", "1"));
+ api.add_dependency(("package-c", "1.0.0"), ("package-d", "1"));
+ api.add_peer_dependency(("package-b", "1.0.0"), ("package-a", "*"));
+ api.add_peer_dependency(("package-c", "1.0.0"), ("package-a", "*"));
+ api.add_peer_dependency(("package-d", "1.0.0"), ("package-a", "*"));
+ api.add_peer_dependency(("package-b", "1.0.0"), ("package-0", "*"));
+ api.add_peer_dependency(("package-c", "1.0.0"), ("package-0", "*"));
+ api.add_peer_dependency(("package-d", "1.0.0"), ("package-0", "*"));
+
+ let (packages, package_reqs) =
+ run_resolver_and_get_output(api, vec!["npm:package-0@1.0"]).await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-0@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-0@1.0.0").unwrap(),
+ ), (
+ "package-1".to_string(),
+ NpmPackageId::from_serialized("package-1@1.0.0_package-0@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-1@1.0.0_package-0@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-0@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-a@1.0.0_package-0@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@1.0.0_package-0@1.0.0_package-a@1.0.0__package-0@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-0@1.0.0_package-a@1.0.0__package-0@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-0".to_string(),
+ NpmPackageId::from_serialized("package-0@1.0.0").unwrap(),
+ ),
+ (
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-0@1.0.0").unwrap(),
+ ),
+ (
+ "package-c".to_string(),
+ NpmPackageId::from_serialized("package-c@1.0.0_package-0@1.0.0_package-a@1.0.0__package-0@1.0.0")
+ .unwrap(),
+ )
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-c@1.0.0_package-0@1.0.0_package-a@1.0.0__package-0@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-0".to_string(),
+ NpmPackageId::from_serialized("package-0@1.0.0").unwrap(),
+ ),
+ (
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-0@1.0.0").unwrap(),
+ ),
+ (
+ "package-d".to_string(),
+ NpmPackageId::from_serialized("package-d@1.0.0_package-0@1.0.0_package-a@1.0.0__package-0@1.0.0")
+ .unwrap(),
+ )
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-d@1.0.0_package-0@1.0.0_package-a@1.0.0__package-0@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-0".to_string(),
+ NpmPackageId::from_serialized("package-0@1.0.0").unwrap(),
+ ),
+ (
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-0@1.0.0").unwrap(),
+ )
+ ]),
+ dist: Default::default(),
+ }
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![("package-0@1.0".to_string(), "package-0@1.0.0".to_string())]
+ );
+ }
+
+ #[tokio::test]
+ async fn peer_dep_resolved_then_resolved_deeper() {
+ let api = TestNpmRegistryApiInner::default();
+ api.ensure_package_version("package-0", "1.0.0");
+ api.ensure_package_version("package-1", "1.0.0");
+ api.ensure_package_version("package-a", "1.0.0");
+ api.ensure_package_version("package-b", "1.0.0");
+ api.ensure_package_version("package-peer", "1.0.0");
+ api.add_dependency(("package-0", "1.0.0"), ("package-a", "1"));
+ api.add_dependency(("package-0", "1.0.0"), ("package-1", "1"));
+ api.add_dependency(("package-1", "1.0.0"), ("package-a", "1"));
+ api.add_dependency(("package-a", "1.0.0"), ("package-b", "1"));
+ api.add_peer_dependency(("package-b", "1.0.0"), ("package-peer", "*"));
+
+ let (packages, package_reqs) = run_resolver_and_get_output(
+ api,
+ vec!["npm:package-0@1.0", "npm:package-peer@1.0"],
+ )
+ .await;
+ assert_eq!(
+ packages,
+ vec![
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-0@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([
+ (
+ "package-1".to_string(),
+ NpmPackageId::from_serialized(
+ "package-1@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ ),
+ (
+ "package-a".to_string(),
+ NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ )
+ ]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-1@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-a".to_string(),
+ NpmPackageId::from_serialized("package-a@1.0.0_package-peer@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-a@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-b".to_string(),
+ NpmPackageId::from_serialized("package-b@1.0.0_package-peer@1.0.0")
+ .unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized(
+ "package-b@1.0.0_package-peer@1.0.0"
+ )
+ .unwrap(),
+ copy_index: 0,
+ dependencies: HashMap::from([(
+ "package-peer".to_string(),
+ NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ )]),
+ dist: Default::default(),
+ },
+ NpmResolutionPackage {
+ pkg_id: NpmPackageId::from_serialized("package-peer@1.0.0").unwrap(),
+ copy_index: 0,
+ dependencies: Default::default(),
+ dist: Default::default(),
+ }
+ ]
+ );
+ assert_eq!(
+ package_reqs,
+ vec![
+ (
+ "package-0@1.0".to_string(),
+ "package-0@1.0.0_package-peer@1.0.0".to_string()
+ ),
+ (
+ "package-peer@1.0".to_string(),
+ "package-peer@1.0.0".to_string()
+ )
+ ]
+ );
+ }
+
async fn run_resolver_and_get_output(
- api: TestNpmRegistryApi,
+ api: TestNpmRegistryApiInner,
reqs: Vec<&str>,
) -> (Vec<NpmResolutionPackage>, Vec<(String, String)>) {
let mut graph = Graph::default();
+ let api = NpmRegistryApi::new_for_test(api);
let mut resolver = GraphDependencyResolver::new(&mut graph, &api);
for req in reqs {
- let req = NpmPackageReference::from_str(req).unwrap().req;
+ let req = NpmPackageReqReference::from_str(req).unwrap().req;
resolver
.add_package_req(&req, &api.package_info(&req.name).await.unwrap())
.unwrap();
@@ -2782,14 +3584,43 @@ mod test {
resolver.resolve_pending().await.unwrap();
let snapshot = graph.into_snapshot(&api).await.unwrap();
+
+ {
+ // let new_snapshot = Graph::from_snapshot(snapshot.clone())
+ // .unwrap()
+ // .into_snapshot(&api)
+ // .await
+ // .unwrap();
+ // assert_eq!(
+ // snapshot, new_snapshot,
+ // "recreated snapshot should be the same"
+ // );
+ // // create one again from the new snapshot
+ // let new_snapshot2 = Graph::from_snapshot(new_snapshot.clone())
+ // .unwrap()
+ // .into_snapshot(&api)
+ // .await
+ // .unwrap();
+ // assert_eq!(
+ // snapshot, new_snapshot2,
+ // "second recreated snapshot should be the same"
+ // );
+ }
+
let mut packages = snapshot.all_packages();
- packages.sort_by(|a, b| a.id.cmp(&b.id));
+ packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
let mut package_reqs = snapshot
.package_reqs
.into_iter()
- .map(|(a, b)| (a.to_string(), b.as_serialized()))
+ .map(|(a, b)| {
+ (
+ a.to_string(),
+ snapshot.root_packages.get(&b).unwrap().as_serialized(),
+ )
+ })
.collect::<Vec<_>>();
package_reqs.sort_by(|a, b| a.0.to_string().cmp(&b.0.to_string()));
+
(packages, package_reqs)
}
}
diff --git a/cli/npm/resolution/mod.rs b/cli/npm/resolution/mod.rs
index ec6ee0dda..90bf16c43 100644
--- a/cli/npm/resolution/mod.rs
+++ b/cli/npm/resolution/mod.rs
@@ -1,11 +1,13 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
+use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::HashSet;
+use deno_core::anyhow::Context;
use deno_core::error::AnyError;
-use deno_core::futures;
use deno_core::parking_lot::RwLock;
+use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageReq;
use deno_graph::semver::Version;
use serde::Deserialize;
@@ -17,12 +19,11 @@ use crate::args::Lockfile;
use self::graph::GraphDependencyResolver;
use self::snapshot::NpmPackagesPartitioned;
-use super::cache::should_sync_download;
use super::cache::NpmPackageCacheFolderId;
use super::registry::NpmPackageVersionDistInfo;
-use super::registry::RealNpmRegistryApi;
-use super::NpmRegistryApi;
+use super::registry::NpmRegistryApi;
+mod common;
mod graph;
mod snapshot;
mod specifier;
@@ -40,25 +41,20 @@ pub struct NpmPackageNodeIdDeserializationError {
/// A resolved unique identifier for an npm package. This contains
/// the resolved name, version, and peer dependency resolution identifiers.
-#[derive(
- Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize,
-)]
-pub struct NpmPackageNodeId {
- pub name: String,
- pub version: Version,
- pub peer_dependencies: Vec<NpmPackageNodeId>,
+#[derive(Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
+pub struct NpmPackageId {
+ pub nv: NpmPackageNv,
+ pub peer_dependencies: Vec<NpmPackageId>,
}
-impl NpmPackageNodeId {
- #[allow(unused)]
- pub fn scope(&self) -> Option<&str> {
- if self.name.starts_with('@') && self.name.contains('/') {
- self.name.split('/').next()
- } else {
- None
- }
+// Custom debug implementation for more concise test output
+impl std::fmt::Debug for NpmPackageId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.as_serialized())
}
+}
+impl NpmPackageId {
pub fn as_serialized(&self) -> String {
self.as_serialized_with_level(0)
}
@@ -68,11 +64,11 @@ impl NpmPackageNodeId {
let mut result = format!(
"{}@{}",
if level == 0 {
- self.name.to_string()
+ self.nv.name.to_string()
} else {
- self.name.replace('/', "+")
+ self.nv.name.replace('/', "+")
},
- self.version
+ self.nv.version
);
for peer in &self.peer_dependencies {
// unfortunately we can't do something like `_3` when
@@ -136,7 +132,7 @@ impl NpmPackageNodeId {
fn parse_peers_at_level<'a>(
level: usize,
- ) -> impl Fn(&'a str) -> ParseResult<'a, Vec<NpmPackageNodeId>> {
+ ) -> impl Fn(&'a str) -> ParseResult<'a, Vec<NpmPackageId>> {
move |mut input| {
let mut peers = Vec::new();
while let Ok((level_input, _)) = parse_level_at_level(level)(input) {
@@ -151,7 +147,7 @@ impl NpmPackageNodeId {
fn parse_id_at_level<'a>(
level: usize,
- ) -> impl Fn(&'a str) -> ParseResult<'a, NpmPackageNodeId> {
+ ) -> impl Fn(&'a str) -> ParseResult<'a, NpmPackageId> {
move |input| {
let (input, (name, version)) = parse_name_and_version(input)?;
let name = if level > 0 {
@@ -163,9 +159,8 @@ impl NpmPackageNodeId {
parse_peers_at_level(level + 1)(input)?;
Ok((
input,
- NpmPackageNodeId {
- name,
- version,
+ NpmPackageId {
+ nv: NpmPackageNv { name, version },
peer_dependencies,
},
))
@@ -179,17 +174,26 @@ impl NpmPackageNodeId {
}
})
}
+}
+
+impl Ord for NpmPackageId {
+ fn cmp(&self, other: &Self) -> Ordering {
+ match self.nv.cmp(&other.nv) {
+ Ordering::Equal => self.peer_dependencies.cmp(&other.peer_dependencies),
+ ordering => ordering,
+ }
+ }
+}
- pub fn display(&self) -> String {
- // Don't implement std::fmt::Display because we don't
- // want this to be used by accident in certain scenarios.
- format!("{}@{}", self.name, self.version)
+impl PartialOrd for NpmPackageId {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NpmResolutionPackage {
- pub id: NpmPackageNodeId,
+ pub pkg_id: NpmPackageId,
/// The peer dependency resolution can differ for the same
/// package (name and version) depending on where it is in
/// the resolution tree. This copy index indicates which
@@ -198,21 +202,20 @@ pub struct NpmResolutionPackage {
pub dist: NpmPackageVersionDistInfo,
/// Key is what the package refers to the other package as,
/// which could be different from the package name.
- pub dependencies: HashMap<String, NpmPackageNodeId>,
+ pub dependencies: HashMap<String, NpmPackageId>,
}
impl NpmResolutionPackage {
pub fn get_package_cache_folder_id(&self) -> NpmPackageCacheFolderId {
NpmPackageCacheFolderId {
- name: self.id.name.clone(),
- version: self.id.version.clone(),
+ nv: self.pkg_id.nv.clone(),
copy_index: self.copy_index,
}
}
}
pub struct NpmResolution {
- api: RealNpmRegistryApi,
+ api: NpmRegistryApi,
snapshot: RwLock<NpmResolutionSnapshot>,
update_semaphore: tokio::sync::Semaphore,
}
@@ -228,7 +231,7 @@ impl std::fmt::Debug for NpmResolution {
impl NpmResolution {
pub fn new(
- api: RealNpmRegistryApi,
+ api: NpmRegistryApi,
initial_snapshot: Option<NpmResolutionSnapshot>,
) -> Self {
Self {
@@ -246,9 +249,8 @@ impl NpmResolution {
let _permit = self.update_semaphore.acquire().await?;
let snapshot = self.snapshot.read().clone();
- let snapshot = self
- .add_package_reqs_to_snapshot(package_reqs, snapshot)
- .await?;
+ let snapshot =
+ add_package_reqs_to_snapshot(&self.api, package_reqs, snapshot).await?;
*self.snapshot.write() = snapshot;
Ok(())
@@ -272,88 +274,28 @@ impl NpmResolution {
} else {
snapshot
};
- let snapshot = self
- .add_package_reqs_to_snapshot(
- package_reqs.into_iter().collect(),
- snapshot,
- )
- .await?;
+ let snapshot = add_package_reqs_to_snapshot(
+ &self.api,
+ package_reqs.into_iter().collect(),
+ snapshot,
+ )
+ .await?;
*self.snapshot.write() = snapshot;
Ok(())
}
- async fn add_package_reqs_to_snapshot(
- &self,
- package_reqs: Vec<NpmPackageReq>,
- snapshot: NpmResolutionSnapshot,
- ) -> Result<NpmResolutionSnapshot, AnyError> {
- // convert the snapshot to a traversable graph
- let mut graph = Graph::from_snapshot(snapshot);
-
- // go over the top level package names first, then down the
- // tree one level at a time through all the branches
- let mut unresolved_tasks = Vec::with_capacity(package_reqs.len());
- let mut resolving_package_names =
- HashSet::with_capacity(package_reqs.len());
- for package_req in &package_reqs {
- if graph.has_package_req(package_req) {
- // skip analyzing this package, as there's already a matching top level package
- continue;
- }
- if !resolving_package_names.insert(package_req.name.clone()) {
- continue; // already resolving
- }
-
- // cache the package info up front in parallel
- if should_sync_download() {
- // for deterministic test output
- self.api.package_info(&package_req.name).await?;
- } else {
- let api = self.api.clone();
- let package_name = package_req.name.clone();
- unresolved_tasks.push(tokio::task::spawn(async move {
- // This is ok to call because api will internally cache
- // the package information in memory.
- api.package_info(&package_name).await
- }));
- };
- }
-
- for result in futures::future::join_all(unresolved_tasks).await {
- result??; // surface the first error
- }
-
- let mut resolver = GraphDependencyResolver::new(&mut graph, &self.api);
-
- // These package_reqs should already be sorted in the order they should
- // be resolved in.
- for package_req in package_reqs {
- // avoid loading the info if this is already in the graph
- if !resolver.has_package_req(&package_req) {
- let info = self.api.package_info(&package_req.name).await?;
- resolver.add_package_req(&package_req, &info)?;
- }
- }
-
- resolver.resolve_pending().await?;
-
- let result = graph.into_snapshot(&self.api).await;
- self.api.clear_memory_cache();
- result
- }
-
pub fn resolve_package_from_id(
&self,
- id: &NpmPackageNodeId,
+ id: &NpmPackageId,
) -> Option<NpmResolutionPackage> {
self.snapshot.read().package_from_id(id).cloned()
}
pub fn resolve_package_cache_folder_id_from_id(
&self,
- id: &NpmPackageNodeId,
+ id: &NpmPackageId,
) -> Option<NpmPackageCacheFolderId> {
self
.snapshot
@@ -400,7 +342,8 @@ impl NpmResolution {
pub fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> {
let snapshot = self.snapshot.read();
- for (package_req, package_id) in snapshot.package_reqs.iter() {
+ for (package_req, nv) in snapshot.package_reqs.iter() {
+ let package_id = snapshot.root_packages.get(nv).unwrap();
lockfile.insert_npm_specifier(
package_req.to_string(),
package_id.as_serialized(),
@@ -413,40 +356,106 @@ impl NpmResolution {
}
}
+async fn add_package_reqs_to_snapshot(
+ api: &NpmRegistryApi,
+ package_reqs: Vec<NpmPackageReq>,
+ snapshot: NpmResolutionSnapshot,
+) -> Result<NpmResolutionSnapshot, AnyError> {
+ if package_reqs
+ .iter()
+ .all(|req| snapshot.package_reqs.contains_key(req))
+ {
+ return Ok(snapshot); // already up to date
+ }
+
+ // convert the snapshot to a traversable graph
+ let mut graph = Graph::from_snapshot(snapshot).with_context(|| {
+ deno_core::anyhow::anyhow!(
+ "Failed creating npm state. Try recreating your lockfile."
+ )
+ })?;
+
+ // avoid loading the info if this is already in the graph
+ let package_reqs = package_reqs
+ .into_iter()
+ .filter(|r| !graph.has_package_req(r))
+ .collect::<Vec<_>>();
+
+ // go over the top level package names first, then down the tree
+ // one level at a time through all the branches
+ api
+ .cache_in_parallel(
+ package_reqs
+ .iter()
+ .map(|r| r.name.clone())
+ .into_iter()
+ .collect::<Vec<_>>(),
+ )
+ .await?;
+
+ let mut resolver = GraphDependencyResolver::new(&mut graph, api);
+
+ // The package reqs should already be sorted
+ // in the order they should be resolved in.
+ for package_req in package_reqs {
+ let info = api.package_info(&package_req.name).await?;
+ resolver.add_package_req(&package_req, &info)?;
+ }
+
+ resolver.resolve_pending().await?;
+
+ let result = graph.into_snapshot(api).await;
+ api.clear_memory_cache();
+ result
+}
+
#[cfg(test)]
mod test {
+ use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
- use super::NpmPackageNodeId;
+ use super::NpmPackageId;
#[test]
fn serialize_npm_package_id() {
- let id = NpmPackageNodeId {
- name: "pkg-a".to_string(),
- version: Version::parse_from_npm("1.2.3").unwrap(),
+ let id = NpmPackageId {
+ nv: NpmPackageNv {
+ name: "pkg-a".to_string(),
+ version: Version::parse_from_npm("1.2.3").unwrap(),
+ },
peer_dependencies: vec![
- NpmPackageNodeId {
- name: "pkg-b".to_string(),
- version: Version::parse_from_npm("3.2.1").unwrap(),
+ NpmPackageId {
+ nv: NpmPackageNv {
+ name: "pkg-b".to_string(),
+ version: Version::parse_from_npm("3.2.1").unwrap(),
+ },
peer_dependencies: vec![
- NpmPackageNodeId {
- name: "pkg-c".to_string(),
- version: Version::parse_from_npm("1.3.2").unwrap(),
+ NpmPackageId {
+ nv: NpmPackageNv {
+ name: "pkg-c".to_string(),
+ version: Version::parse_from_npm("1.3.2").unwrap(),
+ },
peer_dependencies: vec![],
},
- NpmPackageNodeId {
- name: "pkg-d".to_string(),
- version: Version::parse_from_npm("2.3.4").unwrap(),
+ NpmPackageId {
+ nv: NpmPackageNv {
+ name: "pkg-d".to_string(),
+ version: Version::parse_from_npm("2.3.4").unwrap(),
+ },
peer_dependencies: vec![],
},
],
},
- NpmPackageNodeId {
- name: "pkg-e".to_string(),
- version: Version::parse_from_npm("2.3.1").unwrap(),
- peer_dependencies: vec![NpmPackageNodeId {
- name: "pkg-f".to_string(),
+ NpmPackageId {
+ nv: NpmPackageNv {
+ name: "pkg-e".to_string(),
version: Version::parse_from_npm("2.3.1").unwrap(),
+ },
+ peer_dependencies: vec![NpmPackageId {
+ nv: NpmPackageNv {
+ name: "pkg-f".to_string(),
+ version: Version::parse_from_npm("2.3.1").unwrap(),
+ },
peer_dependencies: vec![],
}],
},
@@ -456,6 +465,6 @@ mod test {
// this shouldn't change because it's used in the lockfile
let serialized = id.as_serialized();
assert_eq!(serialized, "pkg-a@1.2.3_pkg-b@3.2.1__pkg-c@1.3.2__pkg-d@2.3.4_pkg-e@2.3.1__pkg-f@2.3.1");
- assert_eq!(NpmPackageNodeId::from_serialized(&serialized).unwrap(), id);
+ assert_eq!(NpmPackageId::from_serialized(&serialized).unwrap(), id);
}
}
diff --git a/cli/npm/resolution/snapshot.rs b/cli/npm/resolution/snapshot.rs
index c717e74a8..bdc204ce8 100644
--- a/cli/npm/resolution/snapshot.rs
+++ b/cli/npm/resolution/snapshot.rs
@@ -8,21 +8,19 @@ use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
-use deno_core::futures;
use deno_core::parking_lot::Mutex;
+use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageReq;
use deno_graph::semver::VersionReq;
use serde::Deserialize;
use serde::Serialize;
use crate::args::Lockfile;
-use crate::npm::cache::should_sync_download;
use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::registry::NpmPackageVersionDistInfo;
use crate::npm::registry::NpmRegistryApi;
-use crate::npm::registry::RealNpmRegistryApi;
-use super::NpmPackageNodeId;
+use super::NpmPackageId;
use super::NpmResolutionPackage;
/// Packages partitioned by if they are "copy" packages or not.
@@ -44,13 +42,17 @@ impl NpmPackagesPartitioned {
}
}
-#[derive(Debug, Clone, Default, Serialize, Deserialize)]
+#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct NpmResolutionSnapshot {
+ /// The unique package requirements map to a single npm package name and version.
#[serde(with = "map_to_vec")]
- pub(super) package_reqs: HashMap<NpmPackageReq, NpmPackageNodeId>,
- pub(super) packages_by_name: HashMap<String, Vec<NpmPackageNodeId>>,
+ pub(super) package_reqs: HashMap<NpmPackageReq, NpmPackageNv>,
+ // Each root level npm package name and version maps to an exact npm package node id.
#[serde(with = "map_to_vec")]
- pub(super) packages: HashMap<NpmPackageNodeId, NpmResolutionPackage>,
+ pub(super) root_packages: HashMap<NpmPackageNv, NpmPackageId>,
+ pub(super) packages_by_name: HashMap<String, Vec<NpmPackageId>>,
+ #[serde(with = "map_to_vec")]
+ pub(super) packages: HashMap<NpmPackageId, NpmResolutionPackage>,
}
// This is done so the maps with non-string keys get serialized and deserialized as vectors.
@@ -98,25 +100,24 @@ impl NpmResolutionSnapshot {
&self,
req: &NpmPackageReq,
) -> Result<&NpmResolutionPackage, AnyError> {
- match self.package_reqs.get(req) {
- Some(id) => Ok(self.packages.get(id).unwrap()),
+ match self
+ .package_reqs
+ .get(req)
+ .and_then(|nv| self.root_packages.get(nv))
+ .and_then(|id| self.packages.get(id))
+ {
+ Some(id) => Ok(id),
None => bail!("could not find npm package directory for '{}'", req),
}
}
- pub fn top_level_packages(&self) -> Vec<NpmPackageNodeId> {
- self
- .package_reqs
- .values()
- .cloned()
- .collect::<HashSet<_>>()
- .into_iter()
- .collect::<Vec<_>>()
+ pub fn top_level_packages(&self) -> Vec<NpmPackageId> {
+ self.root_packages.values().cloned().collect::<Vec<_>>()
}
pub fn package_from_id(
&self,
- id: &NpmPackageNodeId,
+ id: &NpmPackageId,
) -> Option<&NpmResolutionPackage> {
self.packages.get(id)
}
@@ -129,13 +130,13 @@ impl NpmResolutionSnapshot {
// todo(dsherret): do we need an additional hashmap to get this quickly?
let referrer_package = self
.packages_by_name
- .get(&referrer.name)
+ .get(&referrer.nv.name)
.and_then(|packages| {
packages
.iter()
- .filter(|p| p.version == referrer.version)
- .filter_map(|id| {
- let package = self.packages.get(id)?;
+ .filter(|p| p.nv.version == referrer.nv.version)
+ .filter_map(|node_id| {
+ let package = self.packages.get(node_id)?;
if package.copy_index == referrer.copy_index {
Some(package)
} else {
@@ -153,7 +154,7 @@ impl NpmResolutionSnapshot {
return Ok(self.packages.get(id).unwrap());
}
- if referrer_package.id.name == name {
+ if referrer_package.pkg_id.nv.name == name {
return Ok(referrer_package);
}
@@ -198,19 +199,19 @@ impl NpmResolutionSnapshot {
&self,
name: &str,
version_req: &VersionReq,
- ) -> Option<NpmPackageNodeId> {
+ ) -> Option<NpmPackageId> {
// todo(dsherret): this is not exactly correct because some ids
// will be better than others due to peer dependencies
- let mut maybe_best_id: Option<&NpmPackageNodeId> = None;
- if let Some(ids) = self.packages_by_name.get(name) {
- for id in ids {
- if version_req.matches(&id.version) {
+ let mut maybe_best_id: Option<&NpmPackageId> = None;
+ if let Some(node_ids) = self.packages_by_name.get(name) {
+ for node_id in node_ids.iter() {
+ if version_req.matches(&node_id.nv.version) {
let is_best_version = maybe_best_id
.as_ref()
- .map(|best_id| best_id.version.cmp(&id.version).is_lt())
+ .map(|best_id| best_id.nv.version.cmp(&node_id.nv.version).is_lt())
.unwrap_or(true);
if is_best_version {
- maybe_best_id = Some(id);
+ maybe_best_id = Some(node_id);
}
}
}
@@ -220,11 +221,12 @@ impl NpmResolutionSnapshot {
pub async fn from_lockfile(
lockfile: Arc<Mutex<Lockfile>>,
- api: &RealNpmRegistryApi,
+ api: &NpmRegistryApi,
) -> Result<Self, AnyError> {
- let mut package_reqs: HashMap<NpmPackageReq, NpmPackageNodeId>;
- let mut packages_by_name: HashMap<String, Vec<NpmPackageNodeId>>;
- let mut packages: HashMap<NpmPackageNodeId, NpmResolutionPackage>;
+ let mut package_reqs: HashMap<NpmPackageReq, NpmPackageNv>;
+ let mut root_packages: HashMap<NpmPackageNv, NpmPackageId>;
+ let mut packages_by_name: HashMap<String, Vec<NpmPackageId>>;
+ let mut packages: HashMap<NpmPackageId, NpmResolutionPackage>;
let mut copy_index_resolver: SnapshotPackageCopyIndexResolver;
{
@@ -233,6 +235,8 @@ impl NpmResolutionSnapshot {
// pre-allocate collections
package_reqs =
HashMap::with_capacity(lockfile.content.npm.specifiers.len());
+ root_packages =
+ HashMap::with_capacity(lockfile.content.npm.specifiers.len());
let packages_len = lockfile.content.npm.packages.len();
packages = HashMap::with_capacity(packages_len);
packages_by_name = HashMap::with_capacity(packages_len); // close enough
@@ -244,31 +248,32 @@ impl NpmResolutionSnapshot {
for (key, value) in &lockfile.content.npm.specifiers {
let package_req = NpmPackageReq::from_str(key)
.with_context(|| format!("Unable to parse npm specifier: {key}"))?;
- let package_id = NpmPackageNodeId::from_serialized(value)?;
- package_reqs.insert(package_req, package_id.clone());
+ let package_id = NpmPackageId::from_serialized(value)?;
+ package_reqs.insert(package_req, package_id.nv.clone());
+ root_packages.insert(package_id.nv.clone(), package_id.clone());
verify_ids.insert(package_id.clone());
}
// then the packages
for (key, value) in &lockfile.content.npm.packages {
- let package_id = NpmPackageNodeId::from_serialized(key)?;
+ let package_id = NpmPackageId::from_serialized(key)?;
// collect the dependencies
let mut dependencies = HashMap::default();
packages_by_name
- .entry(package_id.name.to_string())
+ .entry(package_id.nv.name.to_string())
.or_default()
.push(package_id.clone());
for (name, specifier) in &value.dependencies {
- let dep_id = NpmPackageNodeId::from_serialized(specifier)?;
+ let dep_id = NpmPackageId::from_serialized(specifier)?;
dependencies.insert(name.to_string(), dep_id.clone());
verify_ids.insert(dep_id);
}
let package = NpmResolutionPackage {
- id: package_id.clone(),
+ pkg_id: package_id.clone(),
copy_index: copy_index_resolver.resolve(&package_id),
// temporary dummy value
dist: NpmPackageVersionDistInfo::default(),
@@ -288,40 +293,20 @@ impl NpmResolutionSnapshot {
}
}
- let mut unresolved_tasks = Vec::with_capacity(packages_by_name.len());
-
- // cache the package names in parallel in the registry api
- // unless synchronous download should occur
- if should_sync_download() {
- let mut package_names = packages_by_name.keys().collect::<Vec<_>>();
- package_names.sort();
- for package_name in package_names {
- api.package_info(package_name).await?;
- }
- } else {
- for package_name in packages_by_name.keys() {
- let package_name = package_name.clone();
- let api = api.clone();
- unresolved_tasks.push(tokio::task::spawn(async move {
- api.package_info(&package_name).await?;
- Result::<_, AnyError>::Ok(())
- }));
- }
- }
- for result in futures::future::join_all(unresolved_tasks).await {
- result??;
- }
+ api
+ .cache_in_parallel(packages_by_name.keys().cloned().collect())
+ .await?;
// ensure the dist is set for each package
for package in packages.values_mut() {
// this will read from the memory cache now
let version_info = match api
- .package_version_info(&package.id.name, &package.id.version)
+ .package_version_info(&package.pkg_id.nv)
.await?
{
Some(version_info) => version_info,
None => {
- bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", package.id.display());
+ bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", package.pkg_id.nv);
}
};
package.dist = version_info.dist;
@@ -329,6 +314,7 @@ impl NpmResolutionSnapshot {
Ok(Self {
package_reqs,
+ root_packages,
packages_by_name,
packages,
})
@@ -336,8 +322,8 @@ impl NpmResolutionSnapshot {
}
pub struct SnapshotPackageCopyIndexResolver {
- packages_to_copy_index: HashMap<NpmPackageNodeId, usize>,
- package_name_version_to_copy_count: HashMap<(String, String), usize>,
+ packages_to_copy_index: HashMap<NpmPackageId, usize>,
+ package_name_version_to_copy_count: HashMap<NpmPackageNv, usize>,
}
impl SnapshotPackageCopyIndexResolver {
@@ -349,7 +335,7 @@ impl SnapshotPackageCopyIndexResolver {
}
pub fn from_map_with_capacity(
- mut packages_to_copy_index: HashMap<NpmPackageNodeId, usize>,
+ mut packages_to_copy_index: HashMap<NpmPackageId, usize>,
capacity: usize,
) -> Self {
let mut package_name_version_to_copy_count =
@@ -358,9 +344,9 @@ impl SnapshotPackageCopyIndexResolver {
packages_to_copy_index.reserve(capacity - packages_to_copy_index.len());
}
- for (id, index) in &packages_to_copy_index {
+ for (node_id, index) in &packages_to_copy_index {
let entry = package_name_version_to_copy_count
- .entry((id.name.to_string(), id.version.to_string()))
+ .entry(node_id.nv.clone())
.or_insert(0);
if *entry < *index {
*entry = *index;
@@ -372,18 +358,18 @@ impl SnapshotPackageCopyIndexResolver {
}
}
- pub fn resolve(&mut self, id: &NpmPackageNodeId) -> usize {
- if let Some(index) = self.packages_to_copy_index.get(id) {
+ pub fn resolve(&mut self, node_id: &NpmPackageId) -> usize {
+ if let Some(index) = self.packages_to_copy_index.get(node_id) {
*index
} else {
let index = *self
.package_name_version_to_copy_count
- .entry((id.name.to_string(), id.version.to_string()))
+ .entry(node_id.nv.clone())
.and_modify(|count| {
*count += 1;
})
.or_insert(0);
- self.packages_to_copy_index.insert(id.clone(), index);
+ self.packages_to_copy_index.insert(node_id.clone(), index);
index
}
}
@@ -422,24 +408,24 @@ mod tests {
SnapshotPackageCopyIndexResolver::with_capacity(10);
assert_eq!(
copy_index_resolver
- .resolve(&NpmPackageNodeId::from_serialized("package@1.0.0").unwrap()),
+ .resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
0
);
assert_eq!(
copy_index_resolver
- .resolve(&NpmPackageNodeId::from_serialized("package@1.0.0").unwrap()),
+ .resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
0
);
assert_eq!(
copy_index_resolver.resolve(
- &NpmPackageNodeId::from_serialized("package@1.0.0_package-b@1.0.0")
+ &NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
.unwrap()
),
1
);
assert_eq!(
copy_index_resolver.resolve(
- &NpmPackageNodeId::from_serialized(
+ &NpmPackageId::from_serialized(
"package@1.0.0_package-b@1.0.0__package-c@2.0.0"
)
.unwrap()
@@ -448,15 +434,14 @@ mod tests {
);
assert_eq!(
copy_index_resolver.resolve(
- &NpmPackageNodeId::from_serialized("package@1.0.0_package-b@1.0.0")
+ &NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
.unwrap()
),
1
);
assert_eq!(
- copy_index_resolver.resolve(
- &NpmPackageNodeId::from_serialized("package-b@1.0.0").unwrap()
- ),
+ copy_index_resolver
+ .resolve(&NpmPackageId::from_serialized("package-b@1.0.0").unwrap()),
0
);
}
diff --git a/cli/npm/resolution/specifier.rs b/cli/npm/resolution/specifier.rs
index f8b3776a3..29d65c747 100644
--- a/cli/npm/resolution/specifier.rs
+++ b/cli/npm/resolution/specifier.rs
@@ -6,8 +6,8 @@ use std::collections::HashSet;
use std::collections::VecDeque;
use deno_ast::ModuleSpecifier;
-use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
+use deno_graph::npm::NpmPackageReqReference;
use deno_graph::ModuleGraph;
pub struct GraphNpmInfo {
@@ -113,7 +113,7 @@ pub fn resolve_graph_npm_info(graph: &ModuleGraph) -> GraphNpmInfo {
// fill this leaf's information
for specifier in &specifiers {
- if let Ok(npm_ref) = NpmPackageReference::from_specifier(specifier) {
+ if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) {
leaf.reqs.insert(npm_ref.req);
} else if !specifier.as_str().starts_with(parent_specifier.as_str()) {
leaf
@@ -165,7 +165,7 @@ pub fn resolve_graph_npm_info(graph: &ModuleGraph) -> GraphNpmInfo {
let mut result = Vec::new();
for specifier in &root_specifiers {
- match NpmPackageReference::from_specifier(specifier) {
+ match NpmPackageReqReference::from_specifier(specifier) {
Ok(npm_ref) => result.push(npm_ref.req),
Err(_) => {
pending_specifiers.push_back(get_folder_path_specifier(specifier))
diff --git a/cli/npm/resolvers/common.rs b/cli/npm/resolvers/common.rs
index 69950bee9..2b02e7721 100644
--- a/cli/npm/resolvers/common.rs
+++ b/cli/npm/resolvers/common.rs
@@ -18,7 +18,7 @@ use crate::args::Lockfile;
use crate::npm::cache::should_sync_download;
use crate::npm::resolution::NpmResolutionSnapshot;
use crate::npm::NpmCache;
-use crate::npm::NpmPackageNodeId;
+use crate::npm::NpmPackageId;
use crate::npm::NpmResolutionPackage;
pub trait InnerNpmPackageResolver: Send + Sync {
@@ -39,10 +39,7 @@ pub trait InnerNpmPackageResolver: Send + Sync {
specifier: &ModuleSpecifier,
) -> Result<PathBuf, AnyError>;
- fn package_size(
- &self,
- package_id: &NpmPackageNodeId,
- ) -> Result<u64, AnyError>;
+ fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError>;
fn has_packages(&self) -> bool;
@@ -79,7 +76,7 @@ pub async fn cache_packages(
if sync_download {
// we're running the tests not with --quiet
// and we want the output to be deterministic
- packages.sort_by(|a, b| a.id.cmp(&b.id));
+ packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
}
let mut handles = Vec::with_capacity(packages.len());
@@ -90,7 +87,7 @@ pub async fn cache_packages(
let handle = tokio::task::spawn(async move {
cache
.ensure_package(
- (package.id.name.as_str(), &package.id.version),
+ (package.pkg_id.nv.name.as_str(), &package.pkg_id.nv.version),
&package.dist,
&registry_url,
)
diff --git a/cli/npm/resolvers/global.rs b/cli/npm/resolvers/global.rs
index c8e2c2bb0..e7bdbb1b4 100644
--- a/cli/npm/resolvers/global.rs
+++ b/cli/npm/resolvers/global.rs
@@ -22,9 +22,9 @@ use crate::npm::resolution::NpmResolution;
use crate::npm::resolution::NpmResolutionSnapshot;
use crate::npm::resolvers::common::cache_packages;
use crate::npm::NpmCache;
-use crate::npm::NpmPackageNodeId;
+use crate::npm::NpmPackageId;
+use crate::npm::NpmRegistryApi;
use crate::npm::NpmResolutionPackage;
-use crate::npm::RealNpmRegistryApi;
use super::common::ensure_registry_read_permission;
use super::common::types_package_name;
@@ -41,7 +41,7 @@ pub struct GlobalNpmPackageResolver {
impl GlobalNpmPackageResolver {
pub fn new(
cache: NpmCache,
- api: RealNpmRegistryApi,
+ api: NpmRegistryApi,
initial_snapshot: Option<NpmResolutionSnapshot>,
) -> Self {
let registry_url = api.base_url().to_owned();
@@ -54,7 +54,7 @@ impl GlobalNpmPackageResolver {
}
}
- fn package_folder(&self, id: &NpmPackageNodeId) -> PathBuf {
+ fn package_folder(&self, id: &NpmPackageId) -> PathBuf {
let folder_id = self
.resolution
.resolve_package_cache_folder_id_from_id(id)
@@ -82,7 +82,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
pkg_req: &NpmPackageReq,
) -> Result<PathBuf, AnyError> {
let pkg = self.resolution.resolve_package_from_deno_module(pkg_req)?;
- Ok(self.package_folder(&pkg.id))
+ Ok(self.package_folder(&pkg.pkg_id))
}
fn resolve_package_folder_from_package(
@@ -107,7 +107,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
.resolution
.resolve_package_from_package(name, &referrer_pkg_id)?
};
- Ok(self.package_folder(&pkg.id))
+ Ok(self.package_folder(&pkg.pkg_id))
}
fn resolve_package_folder_from_specifier(
@@ -125,10 +125,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
)
}
- fn package_size(
- &self,
- package_id: &NpmPackageNodeId,
- ) -> Result<u64, AnyError> {
+ fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
let package_folder = self.package_folder(package_id);
Ok(crate::util::fs::dir_size(&package_folder)?)
}
diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs
index 0eb2c24ca..aa6233d61 100644
--- a/cli/npm/resolvers/local.rs
+++ b/cli/npm/resolvers/local.rs
@@ -32,9 +32,9 @@ use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::resolution::NpmResolution;
use crate::npm::resolution::NpmResolutionSnapshot;
use crate::npm::NpmCache;
-use crate::npm::NpmPackageNodeId;
+use crate::npm::NpmPackageId;
+use crate::npm::NpmRegistryApi;
use crate::npm::NpmResolutionPackage;
-use crate::npm::RealNpmRegistryApi;
use crate::util::fs::copy_dir_recursive;
use crate::util::fs::hard_link_dir_recursive;
@@ -56,7 +56,7 @@ pub struct LocalNpmPackageResolver {
impl LocalNpmPackageResolver {
pub fn new(
cache: NpmCache,
- api: RealNpmRegistryApi,
+ api: NpmRegistryApi,
node_modules_folder: PathBuf,
initial_snapshot: Option<NpmResolutionSnapshot>,
) -> Self {
@@ -112,7 +112,7 @@ impl LocalNpmPackageResolver {
fn get_package_id_folder(
&self,
- package_id: &NpmPackageNodeId,
+ package_id: &NpmPackageId,
) -> Result<PathBuf, AnyError> {
match self.resolution.resolve_package_from_id(package_id) {
Some(package) => Ok(self.get_package_id_folder_from_package(&package)),
@@ -136,7 +136,7 @@ impl LocalNpmPackageResolver {
&package.get_package_cache_folder_id(),
))
.join("node_modules")
- .join(&package.id.name)
+ .join(&package.pkg_id.nv.name)
}
}
@@ -203,10 +203,7 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver {
Ok(package_root_path)
}
- fn package_size(
- &self,
- package_id: &NpmPackageNodeId,
- ) -> Result<u64, AnyError> {
+ fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
let package_folder_path = self.get_package_id_folder(package_id)?;
Ok(crate::util::fs::dir_size(&package_folder_path)?)
@@ -303,7 +300,9 @@ async fn sync_resolution_with_fs(
if sync_download {
// we're running the tests not with --quiet
// and we want the output to be deterministic
- package_partitions.packages.sort_by(|a, b| a.id.cmp(&b.id));
+ package_partitions
+ .packages
+ .sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
}
let mut handles: Vec<JoinHandle<Result<(), AnyError>>> =
Vec::with_capacity(package_partitions.packages.len());
@@ -314,7 +313,7 @@ async fn sync_resolution_with_fs(
let initialized_file = folder_path.join(".initialized");
if !cache
.cache_setting()
- .should_use_for_npm_package(&package.id.name)
+ .should_use_for_npm_package(&package.pkg_id.nv.name)
|| !initialized_file.exists()
{
let cache = cache.clone();
@@ -323,19 +322,19 @@ async fn sync_resolution_with_fs(
let handle = tokio::task::spawn(async move {
cache
.ensure_package(
- (&package.id.name, &package.id.version),
+ (&package.pkg_id.nv.name, &package.pkg_id.nv.version),
&package.dist,
&registry_url,
)
.await?;
let sub_node_modules = folder_path.join("node_modules");
let package_path =
- join_package_name(&sub_node_modules, &package.id.name);
+ join_package_name(&sub_node_modules, &package.pkg_id.nv.name);
fs::create_dir_all(&package_path)
.with_context(|| format!("Creating '{}'", folder_path.display()))?;
let cache_folder = cache.package_folder_for_name_and_version(
- &package.id.name,
- &package.id.version,
+ &package.pkg_id.nv.name,
+ &package.pkg_id.nv.version,
&registry_url,
);
// for now copy, but in the future consider hard linking
@@ -365,7 +364,8 @@ async fn sync_resolution_with_fs(
let initialized_file = destination_path.join(".initialized");
if !initialized_file.exists() {
let sub_node_modules = destination_path.join("node_modules");
- let package_path = join_package_name(&sub_node_modules, &package.id.name);
+ let package_path =
+ join_package_name(&sub_node_modules, &package.pkg_id.nv.name);
fs::create_dir_all(&package_path).with_context(|| {
format!("Creating '{}'", destination_path.display())
})?;
@@ -375,7 +375,7 @@ async fn sync_resolution_with_fs(
&package_cache_folder_id.with_no_count(),
))
.join("node_modules"),
- &package.id.name,
+ &package.pkg_id.nv.name,
);
hard_link_dir_recursive(&source_path, &package_path)?;
// write out a file that indicates this folder has been initialized
@@ -406,7 +406,7 @@ async fn sync_resolution_with_fs(
&deno_local_registry_dir
.join(dep_folder_name)
.join("node_modules"),
- &dep_id.name,
+ &dep_id.nv.name,
);
symlink_package_dir(
&dep_folder_path,
@@ -428,10 +428,10 @@ async fn sync_resolution_with_fs(
.map(|id| (id, true)),
);
while let Some((package_id, is_top_level)) = pending_packages.pop_front() {
- let root_folder_name = if found_names.insert(package_id.name.clone()) {
- package_id.name.clone()
+ let root_folder_name = if found_names.insert(package_id.nv.name.clone()) {
+ package_id.nv.name.clone()
} else if is_top_level {
- package_id.display()
+ format!("{}@{}", package_id.nv.name, package_id.nv.version)
} else {
continue; // skip, already handled
};
@@ -442,7 +442,7 @@ async fn sync_resolution_with_fs(
&package.get_package_cache_folder_id(),
))
.join("node_modules"),
- &package_id.name,
+ &package_id.nv.name,
);
symlink_package_dir(
@@ -457,18 +457,21 @@ async fn sync_resolution_with_fs(
Ok(())
}
-fn get_package_folder_id_folder_name(id: &NpmPackageCacheFolderId) -> String {
- let copy_str = if id.copy_index == 0 {
+fn get_package_folder_id_folder_name(
+ folder_id: &NpmPackageCacheFolderId,
+) -> String {
+ let copy_str = if folder_id.copy_index == 0 {
"".to_string()
} else {
- format!("_{}", id.copy_index)
+ format!("_{}", folder_id.copy_index)
};
- let name = if id.name.to_lowercase() == id.name {
- Cow::Borrowed(&id.name)
+ let nv = &folder_id.nv;
+ let name = if nv.name.to_lowercase() == nv.name {
+ Cow::Borrowed(&nv.name)
} else {
- Cow::Owned(format!("_{}", mixed_case_package_name_encode(&id.name)))
+ Cow::Owned(format!("_{}", mixed_case_package_name_encode(&nv.name)))
};
- format!("{}@{}{}", name, id.version, copy_str).replace('/', "+")
+ format!("{}@{}{}", name, nv.version, copy_str).replace('/', "+")
}
fn symlink_package_dir(
diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs
index a2638a15b..3ac373a54 100644
--- a/cli/npm/resolvers/mod.rs
+++ b/cli/npm/resolvers/mod.rs
@@ -30,9 +30,9 @@ use crate::util::fs::canonicalize_path_maybe_not_exists;
use self::common::InnerNpmPackageResolver;
use self::local::LocalNpmPackageResolver;
use super::NpmCache;
-use super::NpmPackageNodeId;
+use super::NpmPackageId;
+use super::NpmRegistryApi;
use super::NpmResolutionSnapshot;
-use super::RealNpmRegistryApi;
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]
@@ -46,7 +46,7 @@ pub struct NpmPackageResolver {
no_npm: bool,
inner: Arc<dyn InnerNpmPackageResolver>,
local_node_modules_path: Option<PathBuf>,
- api: RealNpmRegistryApi,
+ api: NpmRegistryApi,
cache: NpmCache,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
}
@@ -62,13 +62,13 @@ impl std::fmt::Debug for NpmPackageResolver {
}
impl NpmPackageResolver {
- pub fn new(cache: NpmCache, api: RealNpmRegistryApi) -> Self {
+ pub fn new(cache: NpmCache, api: NpmRegistryApi) -> Self {
Self::new_inner(cache, api, false, None, None, None)
}
pub async fn new_with_maybe_lockfile(
cache: NpmCache,
- api: RealNpmRegistryApi,
+ api: NpmRegistryApi,
no_npm: bool,
local_node_modules_path: Option<PathBuf>,
initial_snapshot: Option<NpmResolutionSnapshot>,
@@ -105,7 +105,7 @@ impl NpmPackageResolver {
fn new_inner(
cache: NpmCache,
- api: RealNpmRegistryApi,
+ api: NpmRegistryApi,
no_npm: bool,
local_node_modules_path: Option<PathBuf>,
maybe_snapshot: Option<NpmResolutionSnapshot>,
@@ -187,7 +187,7 @@ impl NpmPackageResolver {
/// Attempts to get the package size in bytes.
pub fn package_size(
&self,
- package_id: &NpmPackageNodeId,
+ package_id: &NpmPackageId,
) -> Result<u64, AnyError> {
self.inner.package_size(package_id)
}
diff --git a/cli/proc_state.rs b/cli/proc_state.rs
index b4467b6be..edd59427c 100644
--- a/cli/proc_state.rs
+++ b/cli/proc_state.rs
@@ -25,7 +25,7 @@ use crate::node::NodeResolution;
use crate::npm::resolve_graph_npm_info;
use crate::npm::NpmCache;
use crate::npm::NpmPackageResolver;
-use crate::npm::RealNpmRegistryApi;
+use crate::npm::NpmRegistryApi;
use crate::resolver::CliGraphResolver;
use crate::tools::check;
use crate::util::progress_bar::ProgressBar;
@@ -43,8 +43,8 @@ use deno_core::resolve_url_or_path;
use deno_core::CompiledWasmModuleStore;
use deno_core::ModuleSpecifier;
use deno_core::SharedArrayBufferStore;
-use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
+use deno_graph::npm::NpmPackageReqReference;
use deno_graph::source::Loader;
use deno_graph::source::Resolver;
use deno_graph::ModuleGraph;
@@ -244,15 +244,15 @@ impl ProcState {
let emit_cache = EmitCache::new(dir.gen_cache.clone());
let parsed_source_cache =
ParsedSourceCache::new(Some(dir.dep_analysis_db_file_path()));
- let registry_url = RealNpmRegistryApi::default_url();
+ let registry_url = NpmRegistryApi::default_url();
let npm_cache = NpmCache::from_deno_dir(
&dir,
cli_options.cache_setting(),
http_client.clone(),
progress_bar.clone(),
);
- let api = RealNpmRegistryApi::new(
- registry_url,
+ let api = NpmRegistryApi::new(
+ registry_url.clone(),
npm_cache.clone(),
http_client.clone(),
progress_bar.clone(),
@@ -516,7 +516,8 @@ impl ProcState {
return node::resolve_builtin_node_module(specifier.path());
}
- if let Ok(reference) = NpmPackageReference::from_specifier(specifier)
+ if let Ok(reference) =
+ NpmPackageReqReference::from_specifier(specifier)
{
if !self.options.unstable()
&& matches!(found_referrer.scheme(), "http" | "https")
@@ -575,7 +576,9 @@ impl ProcState {
.map(Cow::Borrowed)
.or_else(|| ModuleSpecifier::parse(specifier).ok().map(Cow::Owned));
if let Some(specifier) = specifier {
- if let Ok(reference) = NpmPackageReference::from_specifier(&specifier) {
+ if let Ok(reference) =
+ NpmPackageReqReference::from_specifier(&specifier)
+ {
return self
.handle_node_resolve_result(node::node_resolve_npm_reference(
&reference,
@@ -747,7 +750,7 @@ impl GraphData {
}
"npm" => {
if !has_npm_specifier_in_graph
- && NpmPackageReference::from_specifier(specifier).is_ok()
+ && NpmPackageReqReference::from_specifier(specifier).is_ok()
{
has_npm_specifier_in_graph = true;
}
diff --git a/cli/tools/info.rs b/cli/tools/info.rs
index ffd276417..2f9b2a183 100644
--- a/cli/tools/info.rs
+++ b/cli/tools/info.rs
@@ -10,8 +10,8 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::serde_json;
use deno_core::serde_json::json;
-use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
+use deno_graph::npm::NpmPackageReqReference;
use deno_graph::Dependency;
use deno_graph::Module;
use deno_graph::ModuleGraph;
@@ -22,7 +22,7 @@ use deno_runtime::colors;
use crate::args::Flags;
use crate::args::InfoFlags;
use crate::display;
-use crate::npm::NpmPackageNodeId;
+use crate::npm::NpmPackageId;
use crate::npm::NpmPackageResolver;
use crate::npm::NpmResolutionPackage;
use crate::npm::NpmResolutionSnapshot;
@@ -150,7 +150,7 @@ fn add_npm_packages_to_json(
let maybe_package = module
.get("specifier")
.and_then(|k| k.as_str())
- .and_then(|specifier| NpmPackageReference::from_str(specifier).ok())
+ .and_then(|specifier| NpmPackageReqReference::from_str(specifier).ok())
.and_then(|package_ref| {
snapshot
.resolve_package_from_deno_module(&package_ref.req)
@@ -158,8 +158,10 @@ fn add_npm_packages_to_json(
});
if let Some(pkg) = maybe_package {
if let Some(module) = module.as_object_mut() {
- module
- .insert("npmPackage".to_string(), pkg.id.as_serialized().into());
+ module.insert(
+ "npmPackage".to_string(),
+ pkg.pkg_id.as_serialized().into(),
+ );
// change the "kind" to be "npm"
module.insert("kind".to_string(), "npm".into());
}
@@ -186,13 +188,13 @@ fn add_npm_packages_to_json(
if let serde_json::Value::Object(dep) = dep {
let specifier = dep.get("specifier").and_then(|s| s.as_str());
if let Some(specifier) = specifier {
- if let Ok(npm_ref) = NpmPackageReference::from_str(specifier) {
+ if let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier) {
if let Ok(pkg) =
snapshot.resolve_package_from_deno_module(&npm_ref.req)
{
dep.insert(
"npmPackage".to_string(),
- pkg.id.as_serialized().into(),
+ pkg.pkg_id.as_serialized().into(),
);
}
}
@@ -204,12 +206,15 @@ fn add_npm_packages_to_json(
}
let mut sorted_packages = snapshot.all_packages();
- sorted_packages.sort_by(|a, b| a.id.cmp(&b.id));
+ sorted_packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
for pkg in sorted_packages {
let mut kv = serde_json::Map::new();
- kv.insert("name".to_string(), pkg.id.name.to_string().into());
- kv.insert("version".to_string(), pkg.id.version.to_string().into());
+ kv.insert("name".to_string(), pkg.pkg_id.nv.name.to_string().into());
+ kv.insert(
+ "version".to_string(),
+ pkg.pkg_id.nv.version.to_string().into(),
+ );
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
deps.sort();
let deps = deps
@@ -218,7 +223,7 @@ fn add_npm_packages_to_json(
.collect::<Vec<_>>();
kv.insert("dependencies".to_string(), deps.into());
- json_packages.insert(pkg.id.as_serialized(), kv.into());
+ json_packages.insert(pkg.pkg_id.as_serialized(), kv.into());
}
json.insert("npmPackages".to_string(), json_packages.into());
@@ -297,9 +302,9 @@ fn print_tree_node<TWrite: Write>(
/// Precached information about npm packages that are used in deno info.
#[derive(Default)]
struct NpmInfo {
- package_sizes: HashMap<NpmPackageNodeId, u64>,
- resolved_reqs: HashMap<NpmPackageReq, NpmPackageNodeId>,
- packages: HashMap<NpmPackageNodeId, NpmResolutionPackage>,
+ package_sizes: HashMap<NpmPackageId, u64>,
+ resolved_reqs: HashMap<NpmPackageReq, NpmPackageId>,
+ packages: HashMap<NpmPackageId, NpmResolutionPackage>,
specifiers: HashMap<ModuleSpecifier, NpmPackageReq>,
}
@@ -315,15 +320,17 @@ impl NpmInfo {
}
for (specifier, _) in graph.specifiers() {
- if let Ok(reference) = NpmPackageReference::from_specifier(specifier) {
+ if let Ok(reference) = NpmPackageReqReference::from_specifier(specifier) {
info
.specifiers
.insert(specifier.clone(), reference.req.clone());
if let Ok(package) =
npm_snapshot.resolve_package_from_deno_module(&reference.req)
{
- info.resolved_reqs.insert(reference.req, package.id.clone());
- if !info.packages.contains_key(&package.id) {
+ info
+ .resolved_reqs
+ .insert(reference.req, package.pkg_id.clone());
+ if !info.packages.contains_key(&package.pkg_id) {
info.fill_package_info(package, npm_resolver, npm_snapshot);
}
}
@@ -339,9 +346,11 @@ impl NpmInfo {
npm_resolver: &'a NpmPackageResolver,
npm_snapshot: &'a NpmResolutionSnapshot,
) {
- self.packages.insert(package.id.clone(), package.clone());
- if let Ok(size) = npm_resolver.package_size(&package.id) {
- self.package_sizes.insert(package.id.clone(), size);
+ self
+ .packages
+ .insert(package.pkg_id.clone(), package.clone());
+ if let Ok(size) = npm_resolver.package_size(&package.pkg_id) {
+ self.package_sizes.insert(package.pkg_id.clone(), size);
}
for id in package.dependencies.values() {
if !self.packages.contains_key(id) {
@@ -504,7 +513,7 @@ impl<'a> GraphDisplayContext<'a> {
None => Specifier(module.specifier.clone()),
};
let was_seen = !self.seen.insert(match &package_or_specifier {
- Package(package) => package.id.as_serialized(),
+ Package(package) => package.pkg_id.as_serialized(),
Specifier(specifier) => specifier.to_string(),
});
let header_text = if was_seen {
@@ -522,13 +531,13 @@ impl<'a> GraphDisplayContext<'a> {
};
let header_text = match &package_or_specifier {
Package(package) => {
- format!("{} - {}", specifier_str, package.id.version)
+ format!("{} - {}", specifier_str, package.pkg_id.nv.version)
}
Specifier(_) => specifier_str,
};
let maybe_size = match &package_or_specifier {
Package(package) => {
- self.npm_info.package_sizes.get(&package.id).copied()
+ self.npm_info.package_sizes.get(&package.pkg_id).copied()
}
Specifier(_) => module
.maybe_source
@@ -579,7 +588,7 @@ impl<'a> GraphDisplayContext<'a> {
));
if let Some(package) = self.npm_info.packages.get(dep_id) {
if !package.dependencies.is_empty() {
- let was_seen = !self.seen.insert(package.id.as_serialized());
+ let was_seen = !self.seen.insert(package.pkg_id.as_serialized());
if was_seen {
child.text = format!("{} {}", child.text, colors::gray("*"));
} else {
diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs
index 68e52d8f4..a43ec84d5 100644
--- a/cli/tools/installer.rs
+++ b/cli/tools/installer.rs
@@ -15,7 +15,7 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
-use deno_graph::npm::NpmPackageReference;
+use deno_graph::npm::NpmPackageReqReference;
use log::Level;
use once_cell::sync::Lazy;
use regex::Regex;
@@ -139,7 +139,7 @@ pub async fn infer_name_from_url(url: &Url) -> Option<String> {
}
}
- if let Ok(npm_ref) = NpmPackageReference::from_specifier(&url) {
+ if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&url) {
if let Some(sub_path) = npm_ref.sub_path {
if !sub_path.contains('/') {
return Some(sub_path);
@@ -430,7 +430,7 @@ async fn resolve_shim_data(
executable_args.push("--no-lock".to_string());
} else if flags.lock.is_some()
// always use a lockfile for an npm entrypoint unless --no-lock
- || NpmPackageReference::from_specifier(&module_url).is_ok()
+ || NpmPackageReqReference::from_specifier(&module_url).is_ok()
{
let copy_path = get_hidden_file_with_ext(&file_path, "lock.json");
executable_args.push("--lock".to_string());
diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs
index 4563aa0a2..cb3862a63 100644
--- a/cli/tools/repl/session.rs
+++ b/cli/tools/repl/session.rs
@@ -18,7 +18,7 @@ use deno_core::futures::StreamExt;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::LocalInspectorSession;
-use deno_graph::npm::NpmPackageReference;
+use deno_graph::npm::NpmPackageReqReference;
use deno_graph::source::Resolver;
use deno_runtime::deno_node;
use deno_runtime::worker::MainWorker;
@@ -454,7 +454,7 @@ impl ReplSession {
let npm_imports = resolved_imports
.iter()
- .flat_map(|url| NpmPackageReference::from_specifier(url).ok())
+ .flat_map(|url| NpmPackageReqReference::from_specifier(url).ok())
.map(|r| r.req)
.collect::<Vec<_>>();
let has_node_specifier =
diff --git a/cli/tools/run.rs b/cli/tools/run.rs
index d48946d41..d72378510 100644
--- a/cli/tools/run.rs
+++ b/cli/tools/run.rs
@@ -7,7 +7,7 @@ use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
-use deno_graph::npm::NpmPackageReference;
+use deno_graph::npm::NpmPackageReqReference;
use deno_runtime::permissions::Permissions;
use deno_runtime::permissions::PermissionsContainer;
@@ -50,12 +50,12 @@ To grant permissions, set them before the script argument. For example:
ps.dir.upgrade_check_file_path(),
);
- let main_module = if NpmPackageReference::from_str(&run_flags.script).is_ok()
- {
- ModuleSpecifier::parse(&run_flags.script)?
- } else {
- resolve_url_or_path(&run_flags.script)?
- };
+ let main_module =
+ if NpmPackageReqReference::from_str(&run_flags.script).is_ok() {
+ ModuleSpecifier::parse(&run_flags.script)?
+ } else {
+ resolve_url_or_path(&run_flags.script)?
+ };
let permissions = PermissionsContainer::new(Permissions::from_options(
&ps.options.permissions_options(),
)?);
diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs
index afa301ce8..00186ca3b 100644
--- a/cli/tsc/mod.rs
+++ b/cli/tsc/mod.rs
@@ -28,7 +28,7 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::RuntimeOptions;
use deno_core::Snapshot;
-use deno_graph::npm::NpmPackageReference;
+use deno_graph::npm::NpmPackageReqReference;
use deno_graph::ModuleGraph;
use deno_graph::ModuleKind;
use deno_graph::ResolutionResolved;
@@ -654,7 +654,7 @@ fn op_resolve(
if module.kind == ModuleKind::External {
// handle npm:<package> urls
if let Ok(npm_ref) =
- NpmPackageReference::from_specifier(&module.specifier)
+ NpmPackageReqReference::from_specifier(&module.specifier)
{
if let Some(npm_resolver) = &state.maybe_npm_resolver {
Some(resolve_npm_package_reference_types(
@@ -689,7 +689,8 @@ fn op_resolve(
.ok()
.flatten(),
))
- } else if let Ok(npm_ref) = NpmPackageReference::from_str(&specifier)
+ } else if let Ok(npm_ref) =
+ NpmPackageReqReference::from_str(&specifier)
{
// this could occur when resolving npm:@types/node when it is
// injected and not part of the graph
@@ -740,7 +741,7 @@ fn op_resolve(
}
pub fn resolve_npm_package_reference_types(
- npm_ref: &NpmPackageReference,
+ npm_ref: &NpmPackageReqReference,
npm_resolver: &NpmPackageResolver,
) -> Result<(ModuleSpecifier, MediaType), AnyError> {
let maybe_resolution = node_resolve_npm_reference(
diff --git a/cli/worker.rs b/cli/worker.rs
index 7293183bf..9a89c8060 100644
--- a/cli/worker.rs
+++ b/cli/worker.rs
@@ -14,7 +14,7 @@ use deno_core::serde_v8;
use deno_core::v8;
use deno_core::Extension;
use deno_core::ModuleId;
-use deno_graph::npm::NpmPackageReference;
+use deno_graph::npm::NpmPackageReqReference;
use deno_runtime::colors;
use deno_runtime::deno_node;
use deno_runtime::fmt_errors::format_js_error;
@@ -308,7 +308,7 @@ impl CliMainWorker {
)
.await?;
if let DenoSubcommand::Run(flags) = self.ps.options.sub_command() {
- if let Ok(pkg_ref) = NpmPackageReference::from_str(&flags.script) {
+ if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) {
// if the user ran a binary command, we'll need to set process.argv[0]
// to be the name of the binary command instead of deno
let binary_name = pkg_ref
@@ -443,7 +443,7 @@ async fn create_main_worker_internal(
bench_or_test: bool,
) -> Result<CliMainWorker, AnyError> {
let (main_module, is_main_cjs) = if let Ok(package_ref) =
- NpmPackageReference::from_specifier(&main_module)
+ NpmPackageReqReference::from_specifier(&main_module)
{
ps.npm_resolver
.add_package_reqs(vec![package_ref.req.clone()])