diff options
author | David Sherret <dsherret@users.noreply.github.com> | 2023-02-22 14:15:25 -0500 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-02-22 14:15:25 -0500 |
commit | a6ca4d0d61c95b9f7fa79ecce81a31a6d1f6cc5d (patch) | |
tree | 278a915d7722a8a3d1fffbfa1f3a12752f44d13f /cli/npm | |
parent | 0f9daaeacb402a7199e58b14ad01ec0091ac2c8d (diff) |
refactor: use deno_graph for npm specifiers (#17858)
This changes npm specifiers to be handled by deno_graph and resolved to
an npm package name and version when the specifier is encountered. It
also slightly changes how npm specifier resolution occurs—previously it
would collect all the npm specifiers and resolve them all at once, but
now it resolves them on the fly as they are encountered in the module
graph.
https://github.com/denoland/deno_graph/pull/232
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
Diffstat (limited to 'cli/npm')
-rw-r--r-- | cli/npm/cache.rs | 91 | ||||
-rw-r--r-- | cli/npm/mod.rs | 5 | ||||
-rw-r--r-- | cli/npm/registry.rs | 14 | ||||
-rw-r--r-- | cli/npm/resolution/graph.rs | 47 | ||||
-rw-r--r-- | cli/npm/resolution/mod.rs | 220 | ||||
-rw-r--r-- | cli/npm/resolution/snapshot.rs | 32 | ||||
-rw-r--r-- | cli/npm/resolution/specifier.rs | 666 | ||||
-rw-r--r-- | cli/npm/resolvers/common.rs | 36 | ||||
-rw-r--r-- | cli/npm/resolvers/global.rs | 65 | ||||
-rw-r--r-- | cli/npm/resolvers/local.rs | 131 | ||||
-rw-r--r-- | cli/npm/resolvers/mod.rs | 131 | ||||
-rw-r--r-- | cli/npm/tarball.rs | 40 |
12 files changed, 453 insertions, 1025 deletions
diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs index b2d932911..9f58bcd0d 100644 --- a/cli/npm/cache.rs +++ b/cli/npm/cache.rs @@ -36,7 +36,7 @@ pub fn should_sync_download() -> bool { const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock"; pub fn with_folder_sync_lock( - package: (&str, &Version), + package: &NpmPackageNv, output_folder: &Path, action: impl FnOnce() -> Result<(), AnyError>, ) -> Result<(), AnyError> { @@ -88,14 +88,13 @@ pub fn with_folder_sync_lock( if remove_err.kind() != std::io::ErrorKind::NotFound { bail!( concat!( - "Failed setting up package cache directory for {}@{}, then ", + "Failed setting up package cache directory for {}, then ", "failed cleaning it up.\n\nOriginal error:\n\n{}\n\n", "Remove error:\n\n{}\n\nPlease manually ", "delete this folder or you will run into issues using this ", "package in the future:\n\n{}" ), - package.0, - package.1, + package, err, remove_err, output_folder.display(), @@ -182,31 +181,26 @@ impl ReadonlyNpmCache { pub fn package_folder_for_id( &self, - id: &NpmPackageCacheFolderId, + folder_id: &NpmPackageCacheFolderId, registry_url: &Url, ) -> PathBuf { - if id.copy_index == 0 { - self.package_folder_for_name_and_version( - &id.nv.name, - &id.nv.version, - registry_url, - ) + if folder_id.copy_index == 0 { + self.package_folder_for_name_and_version(&folder_id.nv, registry_url) } else { self - .package_name_folder(&id.nv.name, registry_url) - .join(format!("{}_{}", id.nv.version, id.copy_index)) + .package_name_folder(&folder_id.nv.name, registry_url) + .join(format!("{}_{}", folder_id.nv.version, folder_id.copy_index)) } } pub fn package_folder_for_name_and_version( &self, - name: &str, - version: &Version, + package: &NpmPackageNv, registry_url: &Url, ) -> PathBuf { self - .package_name_folder(name, registry_url) - .join(version.to_string()) + .package_name_folder(&package.name, registry_url) + .join(package.version.to_string()) } pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf { @@ -324,7 +318,7 @@ pub struct NpmCache { http_client: HttpClient, progress_bar: ProgressBar, /// ensures a package is only downloaded once per run - previously_reloaded_packages: Arc<Mutex<HashSet<String>>>, + previously_reloaded_packages: Arc<Mutex<HashSet<NpmPackageNv>>>, } impl NpmCache { @@ -358,40 +352,36 @@ impl NpmCache { /// and imports a dynamic import that imports the same package again for example. fn should_use_global_cache_for_package( &self, - package: (&str, &Version), + package: &NpmPackageNv, ) -> bool { - self.cache_setting.should_use_for_npm_package(package.0) + self.cache_setting.should_use_for_npm_package(&package.name) || !self .previously_reloaded_packages .lock() - .insert(format!("{}@{}", package.0, package.1)) + .insert(package.clone()) } pub async fn ensure_package( &self, - package: (&str, &Version), + package: &NpmPackageNv, dist: &NpmPackageVersionDistInfo, registry_url: &Url, ) -> Result<(), AnyError> { self .ensure_package_inner(package, dist, registry_url) .await - .with_context(|| { - format!("Failed caching npm package '{}@{}'.", package.0, package.1) - }) + .with_context(|| format!("Failed caching npm package '{package}'.")) } async fn ensure_package_inner( &self, - package: (&str, &Version), + package: &NpmPackageNv, dist: &NpmPackageVersionDistInfo, registry_url: &Url, ) -> Result<(), AnyError> { - let package_folder = self.readonly.package_folder_for_name_and_version( - package.0, - package.1, - registry_url, - ); + let package_folder = self + .readonly + .package_folder_for_name_and_version(package, registry_url); if self.should_use_global_cache_for_package(package) && package_folder.exists() // if this file exists, then the package didn't successfully extract @@ -404,7 +394,7 @@ impl NpmCache { "NotCached", format!( "An npm specifier not found in cache: \"{}\", --cached-only is specified.", - &package.0 + &package.name ) ) ); @@ -431,32 +421,28 @@ impl NpmCache { /// from exists before this is called. pub fn ensure_copy_package( &self, - id: &NpmPackageCacheFolderId, + folder_id: &NpmPackageCacheFolderId, registry_url: &Url, ) -> Result<(), AnyError> { - assert_ne!(id.copy_index, 0); - let package_folder = self.readonly.package_folder_for_id(id, registry_url); + assert_ne!(folder_id.copy_index, 0); + let package_folder = + self.readonly.package_folder_for_id(folder_id, registry_url); if package_folder.exists() // if this file exists, then the package didn't successfully extract // the first time, or another process is currently extracting the zip file && !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists() - && self.cache_setting.should_use_for_npm_package(&id.nv.name) + && self.cache_setting.should_use_for_npm_package(&folder_id.nv.name) { return Ok(()); } - let original_package_folder = - self.readonly.package_folder_for_name_and_version( - &id.nv.name, - &id.nv.version, - registry_url, - ); - with_folder_sync_lock( - (id.nv.name.as_str(), &id.nv.version), - &package_folder, - || hard_link_dir_recursive(&original_package_folder, &package_folder), - )?; + let original_package_folder = self + .readonly + .package_folder_for_name_and_version(&folder_id.nv, registry_url); + with_folder_sync_lock(&folder_id.nv, &package_folder, || { + hard_link_dir_recursive(&original_package_folder, &package_folder) + })?; Ok(()) } @@ -470,15 +456,12 @@ impl NpmCache { pub fn package_folder_for_name_and_version( &self, - name: &str, - version: &Version, + package: &NpmPackageNv, registry_url: &Url, ) -> PathBuf { - self.readonly.package_folder_for_name_and_version( - name, - version, - registry_url, - ) + self + .readonly + .package_folder_for_name_and_version(package, registry_url) } pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf { diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 2a58bb01a..602b4ad44 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -6,12 +6,15 @@ mod resolution; mod resolvers; mod tarball; +pub use cache::should_sync_download; pub use cache::NpmCache; #[cfg(test)] pub use registry::NpmPackageVersionDistInfo; pub use registry::NpmRegistryApi; -pub use resolution::resolve_graph_npm_info; +#[cfg(test)] +pub use registry::TestNpmRegistryApiInner; pub use resolution::NpmPackageId; +pub use resolution::NpmResolution; pub use resolution::NpmResolutionPackage; pub use resolution::NpmResolutionSnapshot; pub use resolvers::NpmPackageResolver; diff --git a/cli/npm/registry.rs b/cli/npm/registry.rs index bcdada30d..75760c171 100644 --- a/cli/npm/registry.rs +++ b/cli/npm/registry.rs @@ -232,6 +232,13 @@ impl NpmRegistryApi { })) } + /// Creates an npm registry API that will be uninitialized + /// and error for every request. This is useful for tests + /// or for initializing the LSP. + pub fn new_uninitialized() -> Self { + Self(Arc::new(NullNpmRegistryApiInner)) + } + #[cfg(test)] pub fn new_for_test(api: TestNpmRegistryApiInner) -> NpmRegistryApi { Self(Arc::new(api)) @@ -294,6 +301,13 @@ impl NpmRegistryApi { self.0.clear_memory_cache(); } + pub fn get_cached_package_info( + &self, + name: &str, + ) -> Option<Arc<NpmPackageInfo>> { + self.0.get_cached_package_info(name) + } + pub fn base_url(&self) -> &Url { self.0.base_url() } diff --git a/cli/npm/resolution/graph.rs b/cli/npm/resolution/graph.rs index 966e1f010..87579dad3 100644 --- a/cli/npm/resolution/graph.rs +++ b/cli/npm/resolution/graph.rs @@ -159,6 +159,9 @@ impl ResolvedNodeIds { } } +// todo(dsherret): for some reason the lsp errors when using an Rc<RefCell<NodeId>> here +// instead of an Arc<Mutex<NodeId>>. We should investigate and fix. + /// A pointer to a specific node in a graph path. The underlying node id /// may change as peer dependencies are created. #[derive(Clone, Debug)] @@ -297,6 +300,8 @@ pub struct Graph { // This will be set when creating from a snapshot, then // inform the final snapshot creation. packages_to_copy_index: HashMap<NpmPackageId, usize>, + /// Packages that the resolver should resolve first. + pending_unresolved_packages: Vec<NpmPackageNv>, } impl Graph { @@ -359,6 +364,7 @@ impl Graph { .map(|(id, p)| (id.clone(), p.copy_index)) .collect(), package_reqs: snapshot.package_reqs, + pending_unresolved_packages: snapshot.pending_unresolved_packages, ..Default::default() }; let mut created_package_ids = @@ -375,10 +381,18 @@ impl Graph { Ok(graph) } + pub fn take_pending_unresolved(&mut self) -> Vec<NpmPackageNv> { + std::mem::take(&mut self.pending_unresolved_packages) + } + pub fn has_package_req(&self, req: &NpmPackageReq) -> bool { self.package_reqs.contains_key(req) } + pub fn has_root_package(&self, id: &NpmPackageNv) -> bool { + self.root_packages.contains_key(id) + } + fn get_npm_pkg_id(&self, node_id: NodeId) -> NpmPackageId { let resolved_id = self.resolved_node_ids.get(node_id).unwrap(); self.get_npm_pkg_id_from_resolved_id(resolved_id, HashSet::new()) @@ -596,6 +610,7 @@ impl Graph { .collect(), packages, package_reqs: self.package_reqs, + pending_unresolved_packages: self.pending_unresolved_packages, }) } @@ -714,11 +729,43 @@ impl<'a> GraphDependencyResolver<'a> { } } + pub fn add_root_package( + &mut self, + package_nv: &NpmPackageNv, + package_info: &NpmPackageInfo, + ) -> Result<(), AnyError> { + if self.graph.root_packages.contains_key(package_nv) { + return Ok(()); // already added + } + + // todo(dsherret): using a version requirement here is a temporary hack + // to reuse code in a large refactor. We should resolve the node directly + // from the package name and version + let version_req = + VersionReq::parse_from_specifier(&format!("{}", package_nv.version)) + .unwrap(); + let (pkg_id, node_id) = self.resolve_node_from_info( + &package_nv.name, + &version_req, + package_info, + None, + )?; + self.graph.root_packages.insert(pkg_id.clone(), node_id); + self + .pending_unresolved_nodes + .push_back(GraphPath::for_root(node_id, pkg_id)); + Ok(()) + } + pub fn add_package_req( &mut self, package_req: &NpmPackageReq, package_info: &NpmPackageInfo, ) -> Result<(), AnyError> { + if self.graph.package_reqs.contains_key(package_req) { + return Ok(()); // already added + } + let (pkg_id, node_id) = self.resolve_node_from_info( &package_req.name, package_req diff --git a/cli/npm/resolution/mod.rs b/cli/npm/resolution/mod.rs index c95124b61..8584958b5 100644 --- a/cli/npm/resolution/mod.rs +++ b/cli/npm/resolution/mod.rs @@ -4,19 +4,26 @@ use std::cmp::Ordering; use std::collections::BTreeMap; use std::collections::HashMap; use std::collections::HashSet; +use std::sync::Arc; use deno_core::anyhow::Context; use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; use deno_core::parking_lot::RwLock; use deno_graph::npm::NpmPackageNv; +use deno_graph::npm::NpmPackageNvReference; use deno_graph::npm::NpmPackageReq; +use deno_graph::npm::NpmPackageReqReference; use deno_graph::semver::Version; +use log::debug; use serde::Deserialize; use serde::Serialize; use thiserror::Error; use crate::args::Lockfile; +use crate::npm::resolution::common::LATEST_VERSION_REQ; +use self::common::resolve_best_package_version_and_info; use self::graph::GraphDependencyResolver; use self::snapshot::NpmPackagesPartitioned; @@ -27,11 +34,9 @@ use super::registry::NpmRegistryApi; mod common; mod graph; mod snapshot; -mod specifier; use graph::Graph; pub use snapshot::NpmResolutionSnapshot; -pub use specifier::resolve_graph_npm_info; #[derive(Debug, Error)] #[error("Invalid npm package id '{text}'. {message}")] @@ -230,15 +235,19 @@ impl NpmResolutionPackage { } } -pub struct NpmResolution { +#[derive(Clone)] +pub struct NpmResolution(Arc<NpmResolutionInner>); + +struct NpmResolutionInner { api: NpmRegistryApi, snapshot: RwLock<NpmResolutionSnapshot>, update_semaphore: tokio::sync::Semaphore, + maybe_lockfile: Option<Arc<Mutex<Lockfile>>>, } impl std::fmt::Debug for NpmResolution { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let snapshot = self.snapshot.read(); + let snapshot = self.0.snapshot.read(); f.debug_struct("NpmResolution") .field("snapshot", &snapshot) .finish() @@ -249,26 +258,35 @@ impl NpmResolution { pub fn new( api: NpmRegistryApi, initial_snapshot: Option<NpmResolutionSnapshot>, + maybe_lockfile: Option<Arc<Mutex<Lockfile>>>, ) -> Self { - Self { + Self(Arc::new(NpmResolutionInner { api, snapshot: RwLock::new(initial_snapshot.unwrap_or_default()), update_semaphore: tokio::sync::Semaphore::new(1), - } + maybe_lockfile, + })) } pub async fn add_package_reqs( &self, package_reqs: Vec<NpmPackageReq>, ) -> Result<(), AnyError> { + let inner = &self.0; + // only allow one thread in here at a time - let _permit = self.update_semaphore.acquire().await?; - let snapshot = self.snapshot.read().clone(); + let _permit = inner.update_semaphore.acquire().await?; + let snapshot = inner.snapshot.read().clone(); - let snapshot = - add_package_reqs_to_snapshot(&self.api, package_reqs, snapshot).await?; + let snapshot = add_package_reqs_to_snapshot( + &inner.api, + package_reqs, + snapshot, + self.0.maybe_lockfile.clone(), + ) + .await?; - *self.snapshot.write() = snapshot; + *inner.snapshot.write() = snapshot; Ok(()) } @@ -276,9 +294,10 @@ impl NpmResolution { &self, package_reqs: HashSet<NpmPackageReq>, ) -> Result<(), AnyError> { + let inner = &self.0; // only allow one thread in here at a time - let _permit = self.update_semaphore.acquire().await?; - let snapshot = self.snapshot.read().clone(); + let _permit = inner.update_semaphore.acquire().await?; + let snapshot = inner.snapshot.read().clone(); let has_removed_package = !snapshot .package_reqs @@ -291,22 +310,46 @@ impl NpmResolution { snapshot }; let snapshot = add_package_reqs_to_snapshot( - &self.api, + &inner.api, package_reqs.into_iter().collect(), snapshot, + self.0.maybe_lockfile.clone(), ) .await?; - *self.snapshot.write() = snapshot; + *inner.snapshot.write() = snapshot; Ok(()) } - pub fn resolve_package_from_id( + pub async fn resolve_pending(&self) -> Result<(), AnyError> { + let inner = &self.0; + // only allow one thread in here at a time + let _permit = inner.update_semaphore.acquire().await?; + let snapshot = inner.snapshot.read().clone(); + + let snapshot = add_package_reqs_to_snapshot( + &inner.api, + Vec::new(), + snapshot, + self.0.maybe_lockfile.clone(), + ) + .await?; + + *inner.snapshot.write() = snapshot; + + Ok(()) + } + + pub fn pkg_req_ref_to_nv_ref( &self, - id: &NpmPackageId, - ) -> Option<NpmResolutionPackage> { - self.snapshot.read().package_from_id(id).cloned() + req_ref: NpmPackageReqReference, + ) -> Result<NpmPackageNvReference, AnyError> { + let node_id = self.resolve_pkg_id_from_pkg_req(&req_ref.req)?; + Ok(NpmPackageNvReference { + nv: node_id.nv, + sub_path: req_ref.sub_path, + }) } pub fn resolve_package_cache_folder_id_from_id( @@ -314,6 +357,7 @@ impl NpmResolution { id: &NpmPackageId, ) -> Option<NpmPackageCacheFolderId> { self + .0 .snapshot .read() .package_from_id(id) @@ -326,6 +370,7 @@ impl NpmResolution { referrer: &NpmPackageCacheFolderId, ) -> Result<NpmResolutionPackage, AnyError> { self + .0 .snapshot .read() .resolve_package_from_package(name, referrer) @@ -333,36 +378,100 @@ impl NpmResolution { } /// Resolve a node package from a deno module. - pub fn resolve_package_from_deno_module( + pub fn resolve_pkg_id_from_pkg_req( &self, - package: &NpmPackageReq, - ) -> Result<NpmResolutionPackage, AnyError> { + req: &NpmPackageReq, + ) -> Result<NpmPackageId, AnyError> { self + .0 .snapshot .read() - .resolve_package_from_deno_module(package) - .cloned() + .resolve_pkg_from_pkg_req(req) + .map(|pkg| pkg.pkg_id.clone()) + } + + pub fn resolve_pkg_id_from_deno_module( + &self, + id: &NpmPackageNv, + ) -> Result<NpmPackageId, AnyError> { + self + .0 + .snapshot + .read() + .resolve_package_from_deno_module(id) + .map(|pkg| pkg.pkg_id.clone()) + } + + /// Resolves a package requirement for deno graph. This should only be + /// called by deno_graph's NpmResolver. + pub fn resolve_package_req_for_deno_graph( + &self, + pkg_req: &NpmPackageReq, + ) -> Result<NpmPackageNv, AnyError> { + let inner = &self.0; + // we should always have this because it should have been cached before here + let package_info = + inner.api.get_cached_package_info(&pkg_req.name).unwrap(); + + let mut snapshot = inner.snapshot.write(); + let version_req = + pkg_req.version_req.as_ref().unwrap_or(&*LATEST_VERSION_REQ); + let version_and_info = + match snapshot.packages_by_name.get(&package_info.name) { + Some(existing_versions) => resolve_best_package_version_and_info( + version_req, + &package_info, + existing_versions.iter().map(|p| &p.nv.version), + )?, + None => resolve_best_package_version_and_info( + version_req, + &package_info, + Vec::new().iter(), + )?, + }; + let id = NpmPackageNv { + name: package_info.name.to_string(), + version: version_and_info.version, + }; + debug!( + "Resolved {}@{} to {}", + pkg_req.name, + version_req.version_text(), + id.to_string(), + ); + snapshot.package_reqs.insert(pkg_req.clone(), id.clone()); + let packages_with_name = snapshot + .packages_by_name + .entry(package_info.name.clone()) + .or_default(); + if !packages_with_name.iter().any(|p| p.nv == id) { + packages_with_name.push(NpmPackageId { + nv: id.clone(), + peer_dependencies: Vec::new(), + }); + } + snapshot.pending_unresolved_packages.push(id.clone()); + Ok(id) } pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned { - self.snapshot.read().all_packages_partitioned() + self.0.snapshot.read().all_packages_partitioned() } pub fn has_packages(&self) -> bool { - !self.snapshot.read().packages.is_empty() + !self.0.snapshot.read().packages.is_empty() } pub fn snapshot(&self) -> NpmResolutionSnapshot { - self.snapshot.read().clone() + self.0.snapshot.read().clone() } pub fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> { - let snapshot = self.snapshot.read(); + let snapshot = self.0.snapshot.read(); for (package_req, nv) in snapshot.package_reqs.iter() { - let package_id = snapshot.root_packages.get(nv).unwrap(); lockfile.insert_npm_specifier( package_req.to_string(), - package_id.as_serialized(), + snapshot.root_packages.get(nv).unwrap().as_serialized(), ); } for package in snapshot.all_packages() { @@ -376,10 +485,12 @@ async fn add_package_reqs_to_snapshot( api: &NpmRegistryApi, package_reqs: Vec<NpmPackageReq>, snapshot: NpmResolutionSnapshot, + maybe_lockfile: Option<Arc<Mutex<Lockfile>>>, ) -> Result<NpmResolutionSnapshot, AnyError> { - if package_reqs - .iter() - .all(|req| snapshot.package_reqs.contains_key(req)) + if snapshot.pending_unresolved_packages.is_empty() + && package_reqs + .iter() + .all(|req| snapshot.package_reqs.contains_key(req)) { return Ok(snapshot); // already up to date } @@ -390,39 +501,72 @@ async fn add_package_reqs_to_snapshot( "Failed creating npm state. Try recreating your lockfile." ) })?; + let pending_unresolved = graph.take_pending_unresolved(); // avoid loading the info if this is already in the graph let package_reqs = package_reqs .into_iter() .filter(|r| !graph.has_package_req(r)) .collect::<Vec<_>>(); + let pending_unresolved = pending_unresolved + .into_iter() + .filter(|p| !graph.has_root_package(p)) + .collect::<Vec<_>>(); - // go over the top level package names first, then down the tree - // one level at a time through all the branches + // cache the packages in parallel api .cache_in_parallel( package_reqs .iter() - .map(|r| r.name.clone()) + .map(|req| req.name.clone()) + .chain(pending_unresolved.iter().map(|id| id.name.clone())) + .collect::<HashSet<_>>() .into_iter() .collect::<Vec<_>>(), ) .await?; + // go over the top level package names first (npm package reqs and pending unresolved), + // then down the tree one level at a time through all the branches let mut resolver = GraphDependencyResolver::new(&mut graph, api); - // The package reqs should already be sorted + // The package reqs and ids should already be sorted // in the order they should be resolved in. for package_req in package_reqs { let info = api.package_info(&package_req.name).await?; resolver.add_package_req(&package_req, &info)?; } + for pkg_id in pending_unresolved { + let info = api.package_info(&pkg_id.name).await?; + resolver.add_root_package(&pkg_id, &info)?; + } + resolver.resolve_pending().await?; let result = graph.into_snapshot(api).await; api.clear_memory_cache(); - result + + if let Some(lockfile_mutex) = maybe_lockfile { + let mut lockfile = lockfile_mutex.lock(); + match result { + Ok(snapshot) => { + for (package_req, nv) in snapshot.package_reqs.iter() { + lockfile.insert_npm_specifier( + package_req.to_string(), + snapshot.root_packages.get(nv).unwrap().as_serialized(), + ); + } + for package in snapshot.all_packages() { + lockfile.check_or_insert_npm_package(package.into())?; + } + Ok(snapshot) + } + Err(err) => Err(err), + } + } else { + result + } } #[cfg(test)] diff --git a/cli/npm/resolution/snapshot.rs b/cli/npm/resolution/snapshot.rs index 3fc82cbb8..e986294ec 100644 --- a/cli/npm/resolution/snapshot.rs +++ b/cli/npm/resolution/snapshot.rs @@ -54,6 +54,9 @@ pub struct NpmResolutionSnapshot { pub(super) packages_by_name: HashMap<String, Vec<NpmPackageId>>, #[serde(with = "map_to_vec")] pub(super) packages: HashMap<NpmPackageId, NpmResolutionPackage>, + /// Ordered list based on resolution of packages whose dependencies + /// have not yet been resolved + pub(super) pending_unresolved_packages: Vec<NpmPackageNv>, } impl std::fmt::Debug for NpmResolutionSnapshot { @@ -76,6 +79,10 @@ impl std::fmt::Debug for NpmResolutionSnapshot { "packages", &self.packages.iter().collect::<BTreeMap<_, _>>(), ) + .field( + "pending_unresolved_packages", + &self.pending_unresolved_packages, + ) .finish() } } @@ -120,22 +127,28 @@ mod map_to_vec { } impl NpmResolutionSnapshot { - /// Resolve a node package from a deno module. - pub fn resolve_package_from_deno_module( + /// Resolve a package from a package requirement. + pub fn resolve_pkg_from_pkg_req( &self, req: &NpmPackageReq, ) -> Result<&NpmResolutionPackage, AnyError> { - match self - .package_reqs - .get(req) - .and_then(|nv| self.root_packages.get(nv)) - .and_then(|id| self.packages.get(id)) - { - Some(id) => Ok(id), + match self.package_reqs.get(req) { + Some(id) => self.resolve_package_from_deno_module(id), None => bail!("could not find npm package directory for '{}'", req), } } + /// Resolve a package from a deno module. + pub fn resolve_package_from_deno_module( + &self, + id: &NpmPackageNv, + ) -> Result<&NpmResolutionPackage, AnyError> { + match self.root_packages.get(id) { + Some(id) => Ok(self.packages.get(id).unwrap()), + None => bail!("could not find npm package directory for '{}'", id), + } + } + pub fn top_level_packages(&self) -> Vec<NpmPackageId> { self.root_packages.values().cloned().collect::<Vec<_>>() } @@ -342,6 +355,7 @@ impl NpmResolutionSnapshot { root_packages, packages_by_name, packages, + pending_unresolved_packages: Default::default(), }) } } diff --git a/cli/npm/resolution/specifier.rs b/cli/npm/resolution/specifier.rs deleted file mode 100644 index 29d65c747..000000000 --- a/cli/npm/resolution/specifier.rs +++ /dev/null @@ -1,666 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -use std::cmp::Ordering; -use std::collections::HashMap; -use std::collections::HashSet; -use std::collections::VecDeque; - -use deno_ast::ModuleSpecifier; -use deno_graph::npm::NpmPackageReq; -use deno_graph::npm::NpmPackageReqReference; -use deno_graph::ModuleGraph; - -pub struct GraphNpmInfo { - /// The order of these package requirements is the order they - /// should be resolved in. - pub package_reqs: Vec<NpmPackageReq>, - /// Gets if the graph had a built-in node specifier (ex. `node:fs`). - pub has_node_builtin_specifier: bool, -} - -/// Resolves npm specific information from the graph. -/// -/// This function will analyze the module graph for parent-most folder -/// specifiers of all modules, then group npm specifiers together as found in -/// those descendant modules and return them in the order found spreading out -/// from the root of the graph. -/// -/// For example, given the following module graph: -/// -/// file:///dev/local_module_a/mod.ts -/// ├── npm:package-a@1 -/// ├─┬ https://deno.land/x/module_d/mod.ts -/// │ └─┬ https://deno.land/x/module_d/other.ts -/// │ └── npm:package-a@3 -/// ├─┬ file:///dev/local_module_a/other.ts -/// │ └── npm:package-b@2 -/// ├─┬ file:///dev/local_module_b/mod.ts -/// │ └── npm:package-b@2 -/// └─┬ https://deno.land/x/module_a/mod.ts -/// ├── npm:package-a@4 -/// ├── npm:package-c@5 -/// ├─┬ https://deno.land/x/module_c/sub_folder/mod.ts -/// │ ├── https://deno.land/x/module_c/mod.ts -/// │ ├─┬ https://deno.land/x/module_d/sub_folder/mod.ts -/// │ │ └── npm:package-other@2 -/// │ └── npm:package-c@5 -/// └── https://deno.land/x/module_b/mod.ts -/// -/// The graph above would be grouped down to the topmost specifier folders like -/// so and npm specifiers under each path would be resolved for that group -/// prioritizing file specifiers and sorting by end folder name alphabetically: -/// -/// file:///dev/local_module_a/ -/// ├── file:///dev/local_module_b/ -/// ├─┬ https://deno.land/x/module_a/ -/// │ ├── https://deno.land/x/module_b/ -/// │ └─┬ https://deno.land/x/module_c/ -/// │ └── https://deno.land/x/module_d/ -/// └── https://deno.land/x/module_d/ -/// -/// Then it would resolve the npm specifiers in each of those groups according -/// to that tree going by tree depth. -pub fn resolve_graph_npm_info(graph: &ModuleGraph) -> GraphNpmInfo { - fn collect_specifiers<'a>( - graph: &'a ModuleGraph, - module: &'a deno_graph::Module, - ) -> Vec<&'a ModuleSpecifier> { - let mut specifiers = Vec::with_capacity(module.dependencies.len() * 2 + 1); - let maybe_types = module - .maybe_types_dependency - .as_ref() - .map(|d| &d.dependency); - if let Some(specifier) = maybe_types.and_then(|d| d.maybe_specifier()) { - specifiers.push(specifier); - } - for dep in module.dependencies.values() { - #[allow(clippy::manual_flatten)] - for resolved in [&dep.maybe_code, &dep.maybe_type] { - if let Some(specifier) = resolved.maybe_specifier() { - specifiers.push(specifier); - } - } - } - - // flatten any data urls into this list of specifiers - for i in (0..specifiers.len()).rev() { - if specifiers[i].scheme() == "data" { - let data_specifier = specifiers.swap_remove(i); - if let Some(module) = graph.get(data_specifier) { - specifiers.extend(collect_specifiers(graph, module)); - } - } - } - - specifiers - } - - fn analyze_module( - module: &deno_graph::Module, - graph: &ModuleGraph, - specifier_graph: &mut SpecifierTree, - seen: &mut HashSet<ModuleSpecifier>, - has_node_builtin_specifier: &mut bool, - ) { - if !seen.insert(module.specifier.clone()) { - return; // already visited - } - - let parent_specifier = get_folder_path_specifier(&module.specifier); - let leaf = specifier_graph.get_leaf(&parent_specifier); - - let specifiers = collect_specifiers(graph, module); - - // fill this leaf's information - for specifier in &specifiers { - if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) { - leaf.reqs.insert(npm_ref.req); - } else if !specifier.as_str().starts_with(parent_specifier.as_str()) { - leaf - .dependencies - .insert(get_folder_path_specifier(specifier)); - } - - if !*has_node_builtin_specifier && specifier.scheme() == "node" { - *has_node_builtin_specifier = true; - } - } - - // now visit all the dependencies - for specifier in &specifiers { - if let Some(module) = graph.get(specifier) { - analyze_module( - module, - graph, - specifier_graph, - seen, - has_node_builtin_specifier, - ); - } - } - } - - let root_specifiers = graph - .roots - .iter() - .map(|url| graph.resolve(url)) - .collect::<Vec<_>>(); - let mut seen = HashSet::new(); - let mut specifier_graph = SpecifierTree::default(); - let mut has_node_builtin_specifier = false; - for root in &root_specifiers { - if let Some(module) = graph.get(root) { - analyze_module( - module, - graph, - &mut specifier_graph, - &mut seen, - &mut has_node_builtin_specifier, - ); - } - } - - let mut seen = HashSet::new(); - let mut pending_specifiers = VecDeque::new(); - let mut result = Vec::new(); - - for specifier in &root_specifiers { - match NpmPackageReqReference::from_specifier(specifier) { - Ok(npm_ref) => result.push(npm_ref.req), - Err(_) => { - pending_specifiers.push_back(get_folder_path_specifier(specifier)) - } - } - } - - while let Some(specifier) = pending_specifiers.pop_front() { - let leaf = specifier_graph.get_leaf(&specifier); - if !seen.insert(leaf.specifier.clone()) { - continue; // already seen - } - - let reqs = std::mem::take(&mut leaf.reqs); - let mut reqs = reqs.into_iter().collect::<Vec<_>>(); - reqs.sort(); - result.extend(reqs); - - let mut deps = std::mem::take(&mut leaf.dependencies) - .into_iter() - .collect::<Vec<_>>(); - deps.sort_by(cmp_folder_specifiers); - - for dep in deps { - pending_specifiers.push_back(dep); - } - } - - GraphNpmInfo { - has_node_builtin_specifier, - package_reqs: result, - } -} - -fn get_folder_path_specifier(specifier: &ModuleSpecifier) -> ModuleSpecifier { - let mut specifier = specifier.clone(); - specifier.set_query(None); - specifier.set_fragment(None); - if !specifier.path().ends_with('/') { - // remove the last path part, but keep the trailing slash - let mut path_parts = specifier.path().split('/').collect::<Vec<_>>(); - let path_parts_len = path_parts.len(); // make borrow checker happy for some reason - if path_parts_len > 0 { - path_parts[path_parts_len - 1] = ""; - } - specifier.set_path(&path_parts.join("/")); - } - specifier -} - -#[derive(Debug)] -enum SpecifierTreeNode { - Parent(SpecifierTreeParentNode), - Leaf(SpecifierTreeLeafNode), -} - -impl SpecifierTreeNode { - pub fn mut_to_leaf(&mut self) { - if let SpecifierTreeNode::Parent(node) = self { - let node = std::mem::replace( - node, - SpecifierTreeParentNode { - specifier: node.specifier.clone(), - dependencies: Default::default(), - }, - ); - *self = SpecifierTreeNode::Leaf(node.into_leaf()); - } - } -} - -#[derive(Debug)] -struct SpecifierTreeParentNode { - specifier: ModuleSpecifier, - dependencies: HashMap<String, SpecifierTreeNode>, -} - -impl SpecifierTreeParentNode { - pub fn into_leaf(self) -> SpecifierTreeLeafNode { - fn fill_new_leaf( - deps: HashMap<String, SpecifierTreeNode>, - new_leaf: &mut SpecifierTreeLeafNode, - ) { - for node in deps.into_values() { - match node { - SpecifierTreeNode::Parent(node) => { - fill_new_leaf(node.dependencies, new_leaf) - } - SpecifierTreeNode::Leaf(leaf) => { - for dep in leaf.dependencies { - // don't insert if the dependency is found within the new leaf - if !dep.as_str().starts_with(new_leaf.specifier.as_str()) { - new_leaf.dependencies.insert(dep); - } - } - new_leaf.reqs.extend(leaf.reqs); - } - } - } - } - - let mut new_leaf = SpecifierTreeLeafNode { - specifier: self.specifier, - reqs: Default::default(), - dependencies: Default::default(), - }; - fill_new_leaf(self.dependencies, &mut new_leaf); - new_leaf - } -} - -#[derive(Debug)] -struct SpecifierTreeLeafNode { - specifier: ModuleSpecifier, - reqs: HashSet<NpmPackageReq>, - dependencies: HashSet<ModuleSpecifier>, -} - -#[derive(Default)] -struct SpecifierTree { - root_nodes: HashMap<ModuleSpecifier, SpecifierTreeNode>, -} - -impl SpecifierTree { - pub fn get_leaf( - &mut self, - specifier: &ModuleSpecifier, - ) -> &mut SpecifierTreeLeafNode { - let root_specifier = { - let mut specifier = specifier.clone(); - specifier.set_path(""); - specifier - }; - let root_node = self - .root_nodes - .entry(root_specifier.clone()) - .or_insert_with(|| { - SpecifierTreeNode::Parent(SpecifierTreeParentNode { - specifier: root_specifier.clone(), - dependencies: Default::default(), - }) - }); - let mut current_node = root_node; - if !matches!(specifier.path(), "" | "/") { - let mut current_parts = Vec::new(); - let path = specifier.path(); - for part in path[1..path.len() - 1].split('/') { - current_parts.push(part); - match current_node { - SpecifierTreeNode::Leaf(leaf) => return leaf, - SpecifierTreeNode::Parent(node) => { - current_node = node - .dependencies - .entry(part.to_string()) - .or_insert_with(|| { - SpecifierTreeNode::Parent(SpecifierTreeParentNode { - specifier: { - let mut specifier = root_specifier.clone(); - specifier.set_path(¤t_parts.join("/")); - specifier - }, - dependencies: Default::default(), - }) - }); - } - } - } - } - current_node.mut_to_leaf(); - match current_node { - SpecifierTreeNode::Leaf(leaf) => leaf, - _ => unreachable!(), - } - } -} - -// prefer file: specifiers, then sort by folder name, then by specifier -fn cmp_folder_specifiers(a: &ModuleSpecifier, b: &ModuleSpecifier) -> Ordering { - fn order_folder_name(path_a: &str, path_b: &str) -> Option<Ordering> { - let path_a = path_a.trim_end_matches('/'); - let path_b = path_b.trim_end_matches('/'); - match path_a.rfind('/') { - Some(a_index) => match path_b.rfind('/') { - Some(b_index) => match path_a[a_index..].cmp(&path_b[b_index..]) { - Ordering::Equal => None, - ordering => Some(ordering), - }, - None => None, - }, - None => None, - } - } - - fn order_specifiers(a: &ModuleSpecifier, b: &ModuleSpecifier) -> Ordering { - match order_folder_name(a.path(), b.path()) { - Some(ordering) => ordering, - None => a.as_str().cmp(b.as_str()), // fallback to just comparing the entire url - } - } - - if a.scheme() == "file" { - if b.scheme() == "file" { - order_specifiers(a, b) - } else { - Ordering::Less - } - } else if b.scheme() == "file" { - Ordering::Greater - } else { - order_specifiers(a, b) - } -} - -#[cfg(test)] -mod tests { - use pretty_assertions::assert_eq; - - use super::*; - - #[test] - fn sorting_folder_specifiers() { - fn cmp(a: &str, b: &str) -> Ordering { - let a = ModuleSpecifier::parse(a).unwrap(); - let b = ModuleSpecifier::parse(b).unwrap(); - cmp_folder_specifiers(&a, &b) - } - - // prefer file urls - assert_eq!( - cmp("file:///test/", "https://deno.land/x/module/"), - Ordering::Less - ); - assert_eq!( - cmp("https://deno.land/x/module/", "file:///test/"), - Ordering::Greater - ); - - // sort by folder name - assert_eq!( - cmp( - "https://deno.land/x/module_a/", - "https://deno.land/x/module_b/" - ), - Ordering::Less - ); - assert_eq!( - cmp( - "https://deno.land/x/module_b/", - "https://deno.land/x/module_a/" - ), - Ordering::Greater - ); - assert_eq!( - cmp( - "https://deno.land/x/module_a/", - "https://deno.land/std/module_b/" - ), - Ordering::Less - ); - assert_eq!( - cmp( - "https://deno.land/std/module_b/", - "https://deno.land/x/module_a/" - ), - Ordering::Greater - ); - - // by specifier, since folder names match - assert_eq!( - cmp( - "https://deno.land/std/module_a/", - "https://deno.land/x/module_a/" - ), - Ordering::Less - ); - } - - #[test] - fn test_get_folder_path_specifier() { - fn get(a: &str) -> String { - get_folder_path_specifier(&ModuleSpecifier::parse(a).unwrap()).to_string() - } - - assert_eq!(get("https://deno.land/"), "https://deno.land/"); - assert_eq!(get("https://deno.land"), "https://deno.land/"); - assert_eq!(get("https://deno.land/test"), "https://deno.land/"); - assert_eq!(get("https://deno.land/test/"), "https://deno.land/test/"); - assert_eq!( - get("https://deno.land/test/other"), - "https://deno.land/test/" - ); - assert_eq!( - get("https://deno.land/test/other/"), - "https://deno.land/test/other/" - ); - assert_eq!( - get("https://deno.land/test/other/test?test#other"), - "https://deno.land/test/other/" - ); - } - - #[tokio::test] - async fn test_resolve_npm_package_reqs() { - let mut loader = deno_graph::source::MemoryLoader::new( - vec![ - ( - "file:///dev/local_module_a/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "file:///dev/local_module_a/mod.ts".to_string(), - content: concat!( - "import 'https://deno.land/x/module_d/mod.ts';", - "import 'file:///dev/local_module_a/other.ts';", - "import 'file:///dev/local_module_b/mod.ts';", - "import 'https://deno.land/x/module_a/mod.ts';", - "import 'npm:package-a@local_module_a';", - "import 'https://deno.land/x/module_e/';", - ) - .to_string(), - maybe_headers: None, - }, - ), - ( - "file:///dev/local_module_a/other.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "file:///dev/local_module_a/other.ts".to_string(), - content: "import 'npm:package-b@local_module_a';".to_string(), - maybe_headers: None, - }, - ), - ( - "file:///dev/local_module_b/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "file:///dev/local_module_b/mod.ts".to_string(), - content: concat!( - "export * from 'npm:package-b@local_module_b';", - "import * as test from 'data:application/typescript,export%20*%20from%20%22npm:package-data%40local_module_b%22;';", - ).to_string(), - maybe_headers: None, - }, - ), - ( - "https://deno.land/x/module_d/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_d/mod.ts".to_string(), - content: concat!( - "import './other.ts';", - "import 'npm:package-a@module_d';", - ) - .to_string(), - maybe_headers: None, - }, - ), - ( - "https://deno.land/x/module_d/other.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_d/other.ts".to_string(), - content: "import 'npm:package-c@module_d'".to_string(), - maybe_headers: None, - }, - ), - ( - "https://deno.land/x/module_a/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_a/mod.ts".to_string(), - content: concat!( - "import 'npm:package-a@module_a';", - "import 'npm:package-b@module_a';", - "import '../module_c/sub/sub/mod.ts';", - "import '../module_b/mod.ts';", - ) - .to_string(), - maybe_headers: None, - }, - ), - ( - "https://deno.land/x/module_b/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_b/mod.ts".to_string(), - content: "import 'npm:package-a@module_b'".to_string(), - maybe_headers: None, - }, - ), - ( - "https://deno.land/x/module_c/sub/sub/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_c/sub/sub/mod.ts" - .to_string(), - content: concat!( - "import 'npm:package-a@module_c';", - "import '../../mod.ts';", - ) - .to_string(), - maybe_headers: None, - }, - ), - ( - "https://deno.land/x/module_c/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_c/mod.ts".to_string(), - content: concat!( - "import 'npm:package-b@module_c';", - "import '../module_d/sub_folder/mod.ts';", - ) - .to_string(), - maybe_headers: None, - }, - ), - ( - "https://deno.land/x/module_d/sub_folder/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_d/sub_folder/mod.ts" - .to_string(), - content: "import 'npm:package-b@module_d';".to_string(), - maybe_headers: None, - }, - ), - ( - // ensure a module at a directory is treated as being at a directory - "https://deno.land/x/module_e/".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_e/" - .to_string(), - content: "import 'npm:package-a@module_e';".to_string(), - maybe_headers: Some(vec![( - "content-type".to_string(), - "application/javascript".to_string(), - )]), - }, - ), - // redirect module - ( - "https://deno.land/x/module_redirect/mod.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_redirect@0.0.1/mod.ts".to_string(), - content: concat!( - "import 'npm:package-a@module_redirect';", - // try another redirect here - "import 'https://deno.land/x/module_redirect/other.ts';", - ).to_string(), - maybe_headers: None, - } - ), - ( - "https://deno.land/x/module_redirect/other.ts".to_string(), - deno_graph::source::Source::Module { - specifier: "https://deno.land/x/module_redirect@0.0.1/other.ts".to_string(), - content: "import 'npm:package-b@module_redirect';".to_string(), - maybe_headers: None, - } - ), - ], - Vec::new(), - ); - let analyzer = deno_graph::CapturingModuleAnalyzer::default(); - let mut graph = deno_graph::ModuleGraph::default(); - graph - .build( - vec![ - ModuleSpecifier::parse("file:///dev/local_module_a/mod.ts").unwrap(), - // test redirect at root - ModuleSpecifier::parse("https://deno.land/x/module_redirect/mod.ts") - .unwrap(), - ], - &mut loader, - deno_graph::BuildOptions { - module_analyzer: Some(&analyzer), - ..Default::default() - }, - ) - .await; - let reqs = resolve_graph_npm_info(&graph) - .package_reqs - .into_iter() - .map(|r| r.to_string()) - .collect::<Vec<_>>(); - - assert_eq!( - reqs, - vec![ - "package-a@local_module_a", - "package-b@local_module_a", - "package-a@module_redirect", - "package-b@module_redirect", - "package-b@local_module_b", - "package-data@local_module_b", - "package-a@module_a", - "package-b@module_a", - "package-a@module_d", - "package-b@module_d", - "package-c@module_d", - "package-a@module_e", - "package-a@module_b", - "package-a@module_c", - "package-b@module_c", - ] - ); - } -} diff --git a/cli/npm/resolvers/common.rs b/cli/npm/resolvers/common.rs index 2b02e7721..8c1ecd892 100644 --- a/cli/npm/resolvers/common.rs +++ b/cli/npm/resolvers/common.rs @@ -1,30 +1,28 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use std::collections::HashSet; use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; +use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_core::futures; -use deno_core::futures::future::BoxFuture; use deno_core::url::Url; -use deno_graph::npm::NpmPackageReq; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; -use crate::args::Lockfile; use crate::npm::cache::should_sync_download; -use crate::npm::resolution::NpmResolutionSnapshot; use crate::npm::NpmCache; use crate::npm::NpmPackageId; use crate::npm::NpmResolutionPackage; -pub trait InnerNpmPackageResolver: Send + Sync { +/// Part of the resolution that interacts with the file system. +#[async_trait] +pub trait NpmPackageFsResolver: Send + Sync { fn resolve_package_folder_from_deno_module( &self, - pkg_req: &NpmPackageReq, + id: &NpmPackageId, ) -> Result<PathBuf, AnyError>; fn resolve_package_folder_from_package( @@ -41,29 +39,13 @@ pub trait InnerNpmPackageResolver: Send + Sync { fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError>; - fn has_packages(&self) -> bool; - - fn add_package_reqs( - &self, - packages: Vec<NpmPackageReq>, - ) -> BoxFuture<'static, Result<(), AnyError>>; - - fn set_package_reqs( - &self, - packages: HashSet<NpmPackageReq>, - ) -> BoxFuture<'static, Result<(), AnyError>>; - - fn cache_packages(&self) -> BoxFuture<'static, Result<(), AnyError>>; + async fn cache_packages(&self) -> Result<(), AnyError>; fn ensure_read_permission( &self, permissions: &mut dyn NodePermissions, path: &Path, ) -> Result<(), AnyError>; - - fn snapshot(&self) -> NpmResolutionSnapshot; - - fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError>; } /// Caches all the packages in parallel. @@ -86,11 +68,7 @@ pub async fn cache_packages( let registry_url = registry_url.clone(); let handle = tokio::task::spawn(async move { cache - .ensure_package( - (package.pkg_id.nv.name.as_str(), &package.pkg_id.nv.version), - &package.dist, - ®istry_url, - ) + .ensure_package(&package.pkg_id.nv, &package.dist, ®istry_url) .await }); if sync_download { diff --git a/cli/npm/resolvers/global.rs b/cli/npm/resolvers/global.rs index e7bdbb1b4..1d4d14ac8 100644 --- a/cli/npm/resolvers/global.rs +++ b/cli/npm/resolvers/global.rs @@ -2,51 +2,41 @@ //! Code for global npm cache resolution. -use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; -use std::sync::Arc; +use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; -use deno_core::futures::future::BoxFuture; -use deno_core::futures::FutureExt; use deno_core::url::Url; -use deno_graph::npm::NpmPackageReq; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; -use crate::args::Lockfile; use crate::npm::cache::NpmPackageCacheFolderId; use crate::npm::resolution::NpmResolution; -use crate::npm::resolution::NpmResolutionSnapshot; use crate::npm::resolvers::common::cache_packages; use crate::npm::NpmCache; use crate::npm::NpmPackageId; -use crate::npm::NpmRegistryApi; use crate::npm::NpmResolutionPackage; use super::common::ensure_registry_read_permission; use super::common::types_package_name; -use super::common::InnerNpmPackageResolver; +use super::common::NpmPackageFsResolver; /// Resolves packages from the global npm cache. #[derive(Debug, Clone)] pub struct GlobalNpmPackageResolver { cache: NpmCache, - resolution: Arc<NpmResolution>, + resolution: NpmResolution, registry_url: Url, } impl GlobalNpmPackageResolver { pub fn new( cache: NpmCache, - api: NpmRegistryApi, - initial_snapshot: Option<NpmResolutionSnapshot>, + registry_url: Url, + resolution: NpmResolution, ) -> Self { - let registry_url = api.base_url().to_owned(); - let resolution = Arc::new(NpmResolution::new(api, initial_snapshot)); - Self { cache, resolution, @@ -76,13 +66,13 @@ impl GlobalNpmPackageResolver { } } -impl InnerNpmPackageResolver for GlobalNpmPackageResolver { +#[async_trait] +impl NpmPackageFsResolver for GlobalNpmPackageResolver { fn resolve_package_folder_from_deno_module( &self, - pkg_req: &NpmPackageReq, + id: &NpmPackageId, ) -> Result<PathBuf, AnyError> { - let pkg = self.resolution.resolve_package_from_deno_module(pkg_req)?; - Ok(self.package_folder(&pkg.pkg_id)) + Ok(self.package_folder(id)) } fn resolve_package_folder_from_package( @@ -125,34 +115,13 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver { ) } - fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> { - let package_folder = self.package_folder(package_id); + fn package_size(&self, id: &NpmPackageId) -> Result<u64, AnyError> { + let package_folder = self.package_folder(id); Ok(crate::util::fs::dir_size(&package_folder)?) } - fn has_packages(&self) -> bool { - self.resolution.has_packages() - } - - fn add_package_reqs( - &self, - packages: Vec<NpmPackageReq>, - ) -> BoxFuture<'static, Result<(), AnyError>> { - let resolver = self.clone(); - async move { resolver.resolution.add_package_reqs(packages).await }.boxed() - } - - fn set_package_reqs( - &self, - packages: HashSet<NpmPackageReq>, - ) -> BoxFuture<'static, Result<(), AnyError>> { - let resolver = self.clone(); - async move { resolver.resolution.set_package_reqs(packages).await }.boxed() - } - - fn cache_packages(&self) -> BoxFuture<'static, Result<(), AnyError>> { - let resolver = self.clone(); - async move { cache_packages_in_resolver(&resolver).await }.boxed() + async fn cache_packages(&self) -> Result<(), AnyError> { + cache_packages_in_resolver(self).await } fn ensure_read_permission( @@ -163,14 +132,6 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver { let registry_path = self.cache.registry_folder(&self.registry_url); ensure_registry_read_permission(permissions, ®istry_path, path) } - - fn snapshot(&self) -> NpmResolutionSnapshot { - self.resolution.snapshot() - } - - fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> { - self.resolution.lock(lockfile) - } } async fn cache_packages_in_resolver( diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index aa6233d61..ba395d1b6 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -8,24 +8,20 @@ use std::collections::VecDeque; use std::fs; use std::path::Path; use std::path::PathBuf; -use std::sync::Arc; use crate::util::fs::symlink_dir; +use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; -use deno_core::futures::future::BoxFuture; -use deno_core::futures::FutureExt; use deno_core::url::Url; -use deno_graph::npm::NpmPackageReq; use deno_runtime::deno_core::futures; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::PackageJson; use tokio::task::JoinHandle; -use crate::args::Lockfile; use crate::npm::cache::mixed_case_package_name_encode; use crate::npm::cache::should_sync_download; use crate::npm::cache::NpmPackageCacheFolderId; @@ -33,21 +29,19 @@ use crate::npm::resolution::NpmResolution; use crate::npm::resolution::NpmResolutionSnapshot; use crate::npm::NpmCache; use crate::npm::NpmPackageId; -use crate::npm::NpmRegistryApi; -use crate::npm::NpmResolutionPackage; use crate::util::fs::copy_dir_recursive; use crate::util::fs::hard_link_dir_recursive; use super::common::ensure_registry_read_permission; use super::common::types_package_name; -use super::common::InnerNpmPackageResolver; +use super::common::NpmPackageFsResolver; /// Resolver that creates a local node_modules directory /// and resolves packages from it. #[derive(Debug, Clone)] pub struct LocalNpmPackageResolver { cache: NpmCache, - resolution: Arc<NpmResolution>, + resolution: NpmResolution, registry_url: Url, root_node_modules_path: PathBuf, root_node_modules_specifier: ModuleSpecifier, @@ -56,13 +50,10 @@ pub struct LocalNpmPackageResolver { impl LocalNpmPackageResolver { pub fn new( cache: NpmCache, - api: NpmRegistryApi, + registry_url: Url, node_modules_folder: PathBuf, - initial_snapshot: Option<NpmResolutionSnapshot>, + resolution: NpmResolution, ) -> Self { - let registry_url = api.base_url().to_owned(); - let resolution = Arc::new(NpmResolution::new(api, initial_snapshot)); - Self { cache, resolution, @@ -112,41 +103,34 @@ impl LocalNpmPackageResolver { fn get_package_id_folder( &self, - package_id: &NpmPackageId, + id: &NpmPackageId, ) -> Result<PathBuf, AnyError> { - match self.resolution.resolve_package_from_id(package_id) { - Some(package) => Ok(self.get_package_id_folder_from_package(&package)), + match self.resolution.resolve_package_cache_folder_id_from_id(id) { + // package is stored at: + // node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name> + Some(cache_folder_id) => Ok( + self + .root_node_modules_path + .join(".deno") + .join(get_package_folder_id_folder_name(&cache_folder_id)) + .join("node_modules") + .join(&cache_folder_id.nv.name), + ), None => bail!( "Could not find package information for '{}'", - package_id.as_serialized() + id.as_serialized() ), } } - - fn get_package_id_folder_from_package( - &self, - package: &NpmResolutionPackage, - ) -> PathBuf { - // package is stored at: - // node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name> - self - .root_node_modules_path - .join(".deno") - .join(get_package_folder_id_folder_name( - &package.get_package_cache_folder_id(), - )) - .join("node_modules") - .join(&package.pkg_id.nv.name) - } } -impl InnerNpmPackageResolver for LocalNpmPackageResolver { +#[async_trait] +impl NpmPackageFsResolver for LocalNpmPackageResolver { fn resolve_package_folder_from_deno_module( &self, - pkg_req: &NpmPackageReq, + node_id: &NpmPackageId, ) -> Result<PathBuf, AnyError> { - let package = self.resolution.resolve_package_from_deno_module(pkg_req)?; - Ok(self.get_package_id_folder_from_package(&package)) + self.get_package_id_folder(node_id) } fn resolve_package_folder_from_package( @@ -203,47 +187,15 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver { Ok(package_root_path) } - fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> { - let package_folder_path = self.get_package_id_folder(package_id)?; + fn package_size(&self, id: &NpmPackageId) -> Result<u64, AnyError> { + let package_folder_path = self.get_package_id_folder(id)?; Ok(crate::util::fs::dir_size(&package_folder_path)?) } - fn has_packages(&self) -> bool { - self.resolution.has_packages() - } - - fn add_package_reqs( - &self, - packages: Vec<NpmPackageReq>, - ) -> BoxFuture<'static, Result<(), AnyError>> { - let resolver = self.clone(); - async move { - resolver.resolution.add_package_reqs(packages).await?; - Ok(()) - } - .boxed() - } - - fn set_package_reqs( - &self, - packages: HashSet<NpmPackageReq>, - ) -> BoxFuture<'static, Result<(), AnyError>> { - let resolver = self.clone(); - async move { - resolver.resolution.set_package_reqs(packages).await?; - Ok(()) - } - .boxed() - } - - fn cache_packages(&self) -> BoxFuture<'static, Result<(), AnyError>> { - let resolver = self.clone(); - async move { - sync_resolver_with_fs(&resolver).await?; - Ok(()) - } - .boxed() + async fn cache_packages(&self) -> Result<(), AnyError> { + sync_resolver_with_fs(self).await?; + Ok(()) } fn ensure_read_permission( @@ -257,14 +209,6 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver { path, ) } - - fn snapshot(&self) -> NpmResolutionSnapshot { - self.resolution.snapshot() - } - - fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> { - self.resolution.lock(lockfile) - } } async fn sync_resolver_with_fs( @@ -321,11 +265,7 @@ async fn sync_resolution_with_fs( let package = package.clone(); let handle = tokio::task::spawn(async move { cache - .ensure_package( - (&package.pkg_id.nv.name, &package.pkg_id.nv.version), - &package.dist, - ®istry_url, - ) + .ensure_package(&package.pkg_id.nv, &package.dist, ®istry_url) .await?; let sub_node_modules = folder_path.join("node_modules"); let package_path = @@ -333,8 +273,7 @@ async fn sync_resolution_with_fs( fs::create_dir_all(&package_path) .with_context(|| format!("Creating '{}'", folder_path.display()))?; let cache_folder = cache.package_folder_for_name_and_version( - &package.pkg_id.nv.name, - &package.pkg_id.nv.version, + &package.pkg_id.nv, ®istry_url, ); // for now copy, but in the future consider hard linking @@ -427,22 +366,22 @@ async fn sync_resolution_with_fs( .into_iter() .map(|id| (id, true)), ); - while let Some((package_id, is_top_level)) = pending_packages.pop_front() { - let root_folder_name = if found_names.insert(package_id.nv.name.clone()) { - package_id.nv.name.clone() + while let Some((id, is_top_level)) = pending_packages.pop_front() { + let root_folder_name = if found_names.insert(id.nv.name.clone()) { + id.nv.name.clone() } else if is_top_level { - format!("{}@{}", package_id.nv.name, package_id.nv.version) + id.nv.to_string() } else { continue; // skip, already handled }; - let package = snapshot.package_from_id(&package_id).unwrap(); + let package = snapshot.package_from_id(&id).unwrap(); let local_registry_package_path = join_package_name( &deno_local_registry_dir .join(get_package_folder_id_folder_name( &package.get_package_cache_folder_id(), )) .join("node_modules"), - &package_id.nv.name, + &id.nv.name, ); symlink_package_dir( diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index 3ac373a54..2450638bf 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -7,10 +7,10 @@ mod local; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::anyhow::Context; -use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::serde_json; +use deno_graph::npm::NpmPackageNv; use deno_graph::npm::NpmPackageReq; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; @@ -27,8 +27,9 @@ use std::sync::Arc; use crate::args::Lockfile; use crate::util::fs::canonicalize_path_maybe_not_exists; -use self::common::InnerNpmPackageResolver; +use self::common::NpmPackageFsResolver; use self::local::LocalNpmPackageResolver; +use super::resolution::NpmResolution; use super::NpmCache; use super::NpmPackageId; use super::NpmRegistryApi; @@ -43,10 +44,10 @@ pub struct NpmProcessState { #[derive(Clone)] pub struct NpmPackageResolver { - no_npm: bool, - inner: Arc<dyn InnerNpmPackageResolver>, + fs_resolver: Arc<dyn NpmPackageFsResolver>, local_node_modules_path: Option<PathBuf>, api: NpmRegistryApi, + resolution: NpmResolution, cache: NpmCache, maybe_lockfile: Option<Arc<Mutex<Lockfile>>>, } @@ -54,22 +55,24 @@ pub struct NpmPackageResolver { impl std::fmt::Debug for NpmPackageResolver { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("NpmPackageResolver") - .field("no_npm", &self.no_npm) - .field("inner", &"<omitted>") + .field("fs_resolver", &"<omitted>") .field("local_node_modules_path", &self.local_node_modules_path) + .field("api", &"<omitted>") + .field("resolution", &"<omitted>") + .field("cache", &"<omitted>") + .field("maybe_lockfile", &"<omitted>") .finish() } } impl NpmPackageResolver { pub fn new(cache: NpmCache, api: NpmRegistryApi) -> Self { - Self::new_inner(cache, api, false, None, None, None) + Self::new_inner(cache, api, None, None, None) } pub async fn new_with_maybe_lockfile( cache: NpmCache, api: NpmRegistryApi, - no_npm: bool, local_node_modules_path: Option<PathBuf>, initial_snapshot: Option<NpmResolutionSnapshot>, maybe_lockfile: Option<Arc<Mutex<Lockfile>>>, @@ -96,7 +99,6 @@ impl NpmPackageResolver { Ok(Self::new_inner( cache, api, - no_npm, local_node_modules_path, initial_snapshot, maybe_lockfile, @@ -106,47 +108,67 @@ impl NpmPackageResolver { fn new_inner( cache: NpmCache, api: NpmRegistryApi, - no_npm: bool, local_node_modules_path: Option<PathBuf>, maybe_snapshot: Option<NpmResolutionSnapshot>, maybe_lockfile: Option<Arc<Mutex<Lockfile>>>, ) -> Self { - let inner: Arc<dyn InnerNpmPackageResolver> = match &local_node_modules_path - { - Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new( - cache.clone(), - api.clone(), - node_modules_folder.clone(), - maybe_snapshot, - )), - None => Arc::new(GlobalNpmPackageResolver::new( - cache.clone(), - api.clone(), - maybe_snapshot, - )), - }; + let registry_url = api.base_url().to_owned(); + let resolution = + NpmResolution::new(api.clone(), maybe_snapshot, maybe_lockfile.clone()); + let fs_resolver: Arc<dyn NpmPackageFsResolver> = + match &local_node_modules_path { + Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new( + cache.clone(), + registry_url, + node_modules_folder.clone(), + resolution.clone(), + )), + None => Arc::new(GlobalNpmPackageResolver::new( + cache.clone(), + registry_url, + resolution.clone(), + )), + }; Self { - no_npm, - inner, + fs_resolver, local_node_modules_path, api, + resolution, cache, maybe_lockfile, } } + pub fn api(&self) -> &NpmRegistryApi { + &self.api + } + + pub fn resolution(&self) -> &NpmResolution { + &self.resolution + } + /// Resolves an npm package folder path from a Deno module. pub fn resolve_package_folder_from_deno_module( &self, - pkg_req: &NpmPackageReq, + package_id: &NpmPackageNv, + ) -> Result<PathBuf, AnyError> { + let node_id = self + .resolution + .resolve_pkg_id_from_deno_module(package_id)?; + self.resolve_pkg_folder_from_deno_module_at_node_id(&node_id) + } + + fn resolve_pkg_folder_from_deno_module_at_node_id( + &self, + package_id: &NpmPackageId, ) -> Result<PathBuf, AnyError> { let path = self - .inner - .resolve_package_folder_from_deno_module(pkg_req)?; + .fs_resolver + .resolve_package_folder_from_deno_module(package_id)?; let path = canonicalize_path_maybe_not_exists(&path)?; log::debug!( "Resolved package folder of {} to {}", - pkg_req, + package_id.as_serialized(), path.display() ); Ok(path) @@ -160,7 +182,7 @@ impl NpmPackageResolver { mode: NodeResolutionMode, ) -> Result<PathBuf, AnyError> { let path = self - .inner + .fs_resolver .resolve_package_folder_from_package(name, referrer, mode)?; log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); Ok(path) @@ -174,7 +196,7 @@ impl NpmPackageResolver { specifier: &ModuleSpecifier, ) -> Result<PathBuf, AnyError> { let path = self - .inner + .fs_resolver .resolve_package_folder_from_specifier(specifier)?; log::debug!( "Resolved package folder of {} to {}", @@ -189,7 +211,7 @@ impl NpmPackageResolver { &self, package_id: &NpmPackageId, ) -> Result<u64, AnyError> { - self.inner.package_size(package_id) + self.fs_resolver.package_size(package_id) } /// Gets if the provided specifier is in an npm package. @@ -201,7 +223,7 @@ impl NpmPackageResolver { /// If the resolver has resolved any npm packages. pub fn has_packages(&self) -> bool { - self.inner.has_packages() + self.resolution.has_packages() } /// Adds package requirements to the resolver and ensures everything is setup. @@ -213,24 +235,8 @@ impl NpmPackageResolver { return Ok(()); } - if self.no_npm { - let fmt_reqs = packages - .iter() - .collect::<HashSet<_>>() // prevent duplicates - .iter() - .map(|p| format!("\"{p}\"")) - .collect::<Vec<_>>() - .join(", "); - return Err(custom_error( - "NoNpm", - format!( - "Following npm specifiers were requested: {fmt_reqs}; but --no-npm is specified." - ), - )); - } - - self.inner.add_package_reqs(packages).await?; - self.inner.cache_packages().await?; + self.resolution.add_package_reqs(packages).await?; + self.fs_resolver.cache_packages().await?; // If there's a lock file, update it with all discovered npm packages if let Some(lockfile_mutex) = &self.maybe_lockfile { @@ -248,13 +254,13 @@ impl NpmPackageResolver { &self, packages: HashSet<NpmPackageReq>, ) -> Result<(), AnyError> { - self.inner.set_package_reqs(packages).await + self.resolution.set_package_reqs(packages).await } /// Gets the state of npm for the process. pub fn get_npm_process_state(&self) -> String { serde_json::to_string(&NpmProcessState { - snapshot: self.inner.snapshot(), + snapshot: self.snapshot(), local_node_modules_path: self .local_node_modules_path .as_ref() @@ -268,7 +274,6 @@ impl NpmPackageResolver { Self::new_inner( self.cache.clone(), self.api.clone(), - self.no_npm, self.local_node_modules_path.clone(), Some(self.snapshot()), None, @@ -276,11 +281,11 @@ impl NpmPackageResolver { } pub fn snapshot(&self) -> NpmResolutionSnapshot { - self.inner.snapshot() + self.resolution.snapshot() } pub fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> { - self.inner.lock(lockfile) + self.resolution.lock(lockfile) } pub async fn inject_synthetic_types_node_package( @@ -288,11 +293,17 @@ impl NpmPackageResolver { ) -> Result<(), AnyError> { // add and ensure this isn't added to the lockfile self - .inner + .resolution .add_package_reqs(vec![NpmPackageReq::from_str("@types/node").unwrap()]) .await?; - self.inner.cache_packages().await?; + self.fs_resolver.cache_packages().await?; + + Ok(()) + } + pub async fn resolve_pending(&self) -> Result<(), AnyError> { + self.resolution.resolve_pending().await?; + self.fs_resolver.cache_packages().await?; Ok(()) } } @@ -332,7 +343,7 @@ impl RequireNpmResolver for NpmPackageResolver { permissions: &mut dyn NodePermissions, path: &Path, ) -> Result<(), AnyError> { - self.inner.ensure_read_permission(permissions, path) + self.fs_resolver.ensure_read_permission(permissions, path) } } diff --git a/cli/npm/tarball.rs b/cli/npm/tarball.rs index 302c6308a..1f804a9aa 100644 --- a/cli/npm/tarball.rs +++ b/cli/npm/tarball.rs @@ -7,7 +7,7 @@ use std::path::PathBuf; use deno_core::anyhow::bail; use deno_core::error::AnyError; -use deno_graph::semver::Version; +use deno_graph::npm::NpmPackageNv; use flate2::read::GzDecoder; use tar::Archive; use tar::EntryType; @@ -16,7 +16,7 @@ use super::cache::with_folder_sync_lock; use super::registry::NpmPackageVersionDistInfo; pub fn verify_and_extract_tarball( - package: (&str, &Version), + package: &NpmPackageNv, data: &[u8], dist_info: &NpmPackageVersionDistInfo, output_folder: &Path, @@ -29,7 +29,7 @@ pub fn verify_and_extract_tarball( } fn verify_tarball_integrity( - package: (&str, &Version), + package: &NpmPackageNv, data: &[u8], npm_integrity: &str, ) -> Result<(), AnyError> { @@ -40,18 +40,16 @@ fn verify_tarball_integrity( "sha512" => &ring::digest::SHA512, "sha1" => &ring::digest::SHA1_FOR_LEGACY_USE_ONLY, hash_kind => bail!( - "Not implemented hash function for {}@{}: {}", - package.0, - package.1, + "Not implemented hash function for {}: {}", + package, hash_kind ), }; (algo, checksum.to_lowercase()) } None => bail!( - "Not implemented integrity kind for {}@{}: {}", - package.0, - package.1, + "Not implemented integrity kind for {}: {}", + package, npm_integrity ), }; @@ -62,9 +60,8 @@ fn verify_tarball_integrity( let tarball_checksum = base64::encode(digest.as_ref()).to_lowercase(); if tarball_checksum != expected_checksum { bail!( - "Tarball checksum did not match what was provided by npm registry for {}@{}.\n\nExpected: {}\nActual: {}", - package.0, - package.1, + "Tarball checksum did not match what was provided by npm registry for {}.\n\nExpected: {}\nActual: {}", + package, expected_checksum, tarball_checksum, ) @@ -119,29 +116,32 @@ fn extract_tarball(data: &[u8], output_folder: &Path) -> Result<(), AnyError> { #[cfg(test)] mod test { + use deno_graph::semver::Version; + use super::*; #[test] pub fn test_verify_tarball() { - let package_name = "package".to_string(); - let package_version = Version::parse_from_npm("1.0.0").unwrap(); - let package = (package_name.as_str(), &package_version); + let package = NpmPackageNv { + name: "package".to_string(), + version: Version::parse_from_npm("1.0.0").unwrap(), + }; let actual_checksum = "z4phnx7vul3xvchq1m2ab9yg5aulvxxcg/spidns6c5h0ne8xyxysp+dgnkhfuwvy7kxvudbeoglodj6+sfapg=="; assert_eq!( - verify_tarball_integrity(package, &Vec::new(), "test") + verify_tarball_integrity(&package, &Vec::new(), "test") .unwrap_err() .to_string(), "Not implemented integrity kind for package@1.0.0: test", ); assert_eq!( - verify_tarball_integrity(package, &Vec::new(), "notimplemented-test") + verify_tarball_integrity(&package, &Vec::new(), "notimplemented-test") .unwrap_err() .to_string(), "Not implemented hash function for package@1.0.0: notimplemented", ); assert_eq!( - verify_tarball_integrity(package, &Vec::new(), "sha1-test") + verify_tarball_integrity(&package, &Vec::new(), "sha1-test") .unwrap_err() .to_string(), concat!( @@ -150,13 +150,13 @@ mod test { ), ); assert_eq!( - verify_tarball_integrity(package, &Vec::new(), "sha512-test") + verify_tarball_integrity(&package, &Vec::new(), "sha512-test") .unwrap_err() .to_string(), format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {actual_checksum}"), ); assert!(verify_tarball_integrity( - package, + &package, &Vec::new(), &format!("sha512-{actual_checksum}") ) |