summaryrefslogtreecommitdiff
path: root/cli/npm/cache.rs
diff options
context:
space:
mode:
Diffstat (limited to 'cli/npm/cache.rs')
-rw-r--r--cli/npm/cache.rs91
1 files changed, 37 insertions, 54 deletions
diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs
index b2d932911..9f58bcd0d 100644
--- a/cli/npm/cache.rs
+++ b/cli/npm/cache.rs
@@ -36,7 +36,7 @@ pub fn should_sync_download() -> bool {
const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock";
pub fn with_folder_sync_lock(
- package: (&str, &Version),
+ package: &NpmPackageNv,
output_folder: &Path,
action: impl FnOnce() -> Result<(), AnyError>,
) -> Result<(), AnyError> {
@@ -88,14 +88,13 @@ pub fn with_folder_sync_lock(
if remove_err.kind() != std::io::ErrorKind::NotFound {
bail!(
concat!(
- "Failed setting up package cache directory for {}@{}, then ",
+ "Failed setting up package cache directory for {}, then ",
"failed cleaning it up.\n\nOriginal error:\n\n{}\n\n",
"Remove error:\n\n{}\n\nPlease manually ",
"delete this folder or you will run into issues using this ",
"package in the future:\n\n{}"
),
- package.0,
- package.1,
+ package,
err,
remove_err,
output_folder.display(),
@@ -182,31 +181,26 @@ impl ReadonlyNpmCache {
pub fn package_folder_for_id(
&self,
- id: &NpmPackageCacheFolderId,
+ folder_id: &NpmPackageCacheFolderId,
registry_url: &Url,
) -> PathBuf {
- if id.copy_index == 0 {
- self.package_folder_for_name_and_version(
- &id.nv.name,
- &id.nv.version,
- registry_url,
- )
+ if folder_id.copy_index == 0 {
+ self.package_folder_for_name_and_version(&folder_id.nv, registry_url)
} else {
self
- .package_name_folder(&id.nv.name, registry_url)
- .join(format!("{}_{}", id.nv.version, id.copy_index))
+ .package_name_folder(&folder_id.nv.name, registry_url)
+ .join(format!("{}_{}", folder_id.nv.version, folder_id.copy_index))
}
}
pub fn package_folder_for_name_and_version(
&self,
- name: &str,
- version: &Version,
+ package: &NpmPackageNv,
registry_url: &Url,
) -> PathBuf {
self
- .package_name_folder(name, registry_url)
- .join(version.to_string())
+ .package_name_folder(&package.name, registry_url)
+ .join(package.version.to_string())
}
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
@@ -324,7 +318,7 @@ pub struct NpmCache {
http_client: HttpClient,
progress_bar: ProgressBar,
/// ensures a package is only downloaded once per run
- previously_reloaded_packages: Arc<Mutex<HashSet<String>>>,
+ previously_reloaded_packages: Arc<Mutex<HashSet<NpmPackageNv>>>,
}
impl NpmCache {
@@ -358,40 +352,36 @@ impl NpmCache {
/// and imports a dynamic import that imports the same package again for example.
fn should_use_global_cache_for_package(
&self,
- package: (&str, &Version),
+ package: &NpmPackageNv,
) -> bool {
- self.cache_setting.should_use_for_npm_package(package.0)
+ self.cache_setting.should_use_for_npm_package(&package.name)
|| !self
.previously_reloaded_packages
.lock()
- .insert(format!("{}@{}", package.0, package.1))
+ .insert(package.clone())
}
pub async fn ensure_package(
&self,
- package: (&str, &Version),
+ package: &NpmPackageNv,
dist: &NpmPackageVersionDistInfo,
registry_url: &Url,
) -> Result<(), AnyError> {
self
.ensure_package_inner(package, dist, registry_url)
.await
- .with_context(|| {
- format!("Failed caching npm package '{}@{}'.", package.0, package.1)
- })
+ .with_context(|| format!("Failed caching npm package '{package}'."))
}
async fn ensure_package_inner(
&self,
- package: (&str, &Version),
+ package: &NpmPackageNv,
dist: &NpmPackageVersionDistInfo,
registry_url: &Url,
) -> Result<(), AnyError> {
- let package_folder = self.readonly.package_folder_for_name_and_version(
- package.0,
- package.1,
- registry_url,
- );
+ let package_folder = self
+ .readonly
+ .package_folder_for_name_and_version(package, registry_url);
if self.should_use_global_cache_for_package(package)
&& package_folder.exists()
// if this file exists, then the package didn't successfully extract
@@ -404,7 +394,7 @@ impl NpmCache {
"NotCached",
format!(
"An npm specifier not found in cache: \"{}\", --cached-only is specified.",
- &package.0
+ &package.name
)
)
);
@@ -431,32 +421,28 @@ impl NpmCache {
/// from exists before this is called.
pub fn ensure_copy_package(
&self,
- id: &NpmPackageCacheFolderId,
+ folder_id: &NpmPackageCacheFolderId,
registry_url: &Url,
) -> Result<(), AnyError> {
- assert_ne!(id.copy_index, 0);
- let package_folder = self.readonly.package_folder_for_id(id, registry_url);
+ assert_ne!(folder_id.copy_index, 0);
+ let package_folder =
+ self.readonly.package_folder_for_id(folder_id, registry_url);
if package_folder.exists()
// if this file exists, then the package didn't successfully extract
// the first time, or another process is currently extracting the zip file
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
- && self.cache_setting.should_use_for_npm_package(&id.nv.name)
+ && self.cache_setting.should_use_for_npm_package(&folder_id.nv.name)
{
return Ok(());
}
- let original_package_folder =
- self.readonly.package_folder_for_name_and_version(
- &id.nv.name,
- &id.nv.version,
- registry_url,
- );
- with_folder_sync_lock(
- (id.nv.name.as_str(), &id.nv.version),
- &package_folder,
- || hard_link_dir_recursive(&original_package_folder, &package_folder),
- )?;
+ let original_package_folder = self
+ .readonly
+ .package_folder_for_name_and_version(&folder_id.nv, registry_url);
+ with_folder_sync_lock(&folder_id.nv, &package_folder, || {
+ hard_link_dir_recursive(&original_package_folder, &package_folder)
+ })?;
Ok(())
}
@@ -470,15 +456,12 @@ impl NpmCache {
pub fn package_folder_for_name_and_version(
&self,
- name: &str,
- version: &Version,
+ package: &NpmPackageNv,
registry_url: &Url,
) -> PathBuf {
- self.readonly.package_folder_for_name_and_version(
- name,
- version,
- registry_url,
- )
+ self
+ .readonly
+ .package_folder_for_name_and_version(package, registry_url)
}
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {