summaryrefslogtreecommitdiff
path: root/cli/npm/managed/cache.rs
diff options
context:
space:
mode:
authorDavid Sherret <dsherret@users.noreply.github.com>2024-05-14 14:26:48 -0400
committerGitHub <noreply@github.com>2024-05-14 14:26:48 -0400
commitf16b4d4df8bd03f2d0c5e5d6855e334fa768e828 (patch)
tree6b99b3a79486cd73c32145bca981e13fc626549b /cli/npm/managed/cache.rs
parentc0e3b6ed9d955bc59a8d88e177219b541881c63d (diff)
fix(npm): make tarball extraction more reliable (#23759)
1. Extracts to a directory beside the destination. 2. Renames to the destination with retries.
Diffstat (limited to 'cli/npm/managed/cache.rs')
-rw-r--r--cli/npm/managed/cache.rs51
1 files changed, 38 insertions, 13 deletions
diff --git a/cli/npm/managed/cache.rs b/cli/npm/managed/cache.rs
index 9ba5c1c99..44b98fcee 100644
--- a/cli/npm/managed/cache.rs
+++ b/cli/npm/managed/cache.rs
@@ -25,6 +25,7 @@ use crate::util::fs::hard_link_dir_recursive;
use crate::util::progress_bar::ProgressBar;
use super::tarball::verify_and_extract_tarball;
+use super::tarball::TarballExtractionMode;
/// Stores a single copy of npm packages in a cache.
#[derive(Debug)]
@@ -69,7 +70,7 @@ impl NpmCache {
/// to ensure a package is only downloaded once per run of the CLI. This
/// prevents downloads from re-occurring when someone has `--reload` and
/// and imports a dynamic import that imports the same package again for example.
- fn should_use_global_cache_for_package(&self, package: &PackageNv) -> bool {
+ fn should_use_cache_for_package(&self, package: &PackageNv) -> bool {
self.cache_setting.should_use_for_npm_package(&package.name)
|| !self
.previously_reloaded_packages
@@ -91,26 +92,23 @@ impl NpmCache {
async fn ensure_package_inner(
&self,
- package: &PackageNv,
+ package_nv: &PackageNv,
dist: &NpmPackageVersionDistInfo,
registry_url: &Url,
) -> Result<(), AnyError> {
let package_folder = self
.cache_dir
- .package_folder_for_name_and_version(package, registry_url);
- if self.should_use_global_cache_for_package(package)
- && self.fs.exists_sync(&package_folder)
- // if this file exists, then the package didn't successfully extract
- // the first time, or another process is currently extracting the zip file
- && !self.fs.exists_sync(&package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME))
- {
+ .package_folder_for_name_and_version(package_nv, registry_url);
+ let should_use_cache = self.should_use_cache_for_package(package_nv);
+ let package_folder_exists = self.fs.exists_sync(&package_folder);
+ if should_use_cache && package_folder_exists {
return Ok(());
} else if self.cache_setting == CacheSetting::Only {
return Err(custom_error(
"NotCached",
format!(
"An npm specifier not found in cache: \"{}\", --cached-only is specified.",
- &package.name
+ &package_nv.name
)
)
);
@@ -127,7 +125,31 @@ impl NpmCache {
.await?;
match maybe_bytes {
Some(bytes) => {
- verify_and_extract_tarball(package, &bytes, dist, &package_folder)
+ let extraction_mode = if should_use_cache || !package_folder_exists {
+ TarballExtractionMode::SiblingTempDir
+ } else {
+ // The user ran with `--reload`, so overwrite the package instead of
+ // deleting it since the package might get corrupted if a user kills
+ // their deno process while it's deleting a package directory
+ //
+ // We can't rename this folder and delete it because the folder
+ // may be in use by another process or may now contain hardlinks,
+ // which will cause windows to throw an "AccessDenied" error when
+ // renaming. So we settle for overwriting.
+ TarballExtractionMode::Overwrite
+ };
+ let dist = dist.clone();
+ let package_nv = package_nv.clone();
+ deno_core::unsync::spawn_blocking(move || {
+ verify_and_extract_tarball(
+ &package_nv,
+ &bytes,
+ &dist,
+ &package_folder,
+ extraction_mode,
+ )
+ })
+ .await?
}
None => {
bail!("Could not find npm package tarball at: {}", dist.tarball);
@@ -150,7 +172,7 @@ impl NpmCache {
.package_folder_for_id(folder_id, registry_url);
if package_folder.exists()
- // if this file exists, then the package didn't successfully extract
+ // if this file exists, then the package didn't successfully initialize
// the first time, or another process is currently extracting the zip file
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
&& self.cache_setting.should_use_for_npm_package(&folder_id.nv.name)
@@ -161,6 +183,9 @@ impl NpmCache {
let original_package_folder = self
.cache_dir
.package_folder_for_name_and_version(&folder_id.nv, registry_url);
+
+ // it seems Windows does an "AccessDenied" error when moving a
+ // directory with hard links, so that's why this solution is done
with_folder_sync_lock(&folder_id.nv, &package_folder, || {
hard_link_dir_recursive(&original_package_folder, &package_folder)
})?;
@@ -206,7 +231,7 @@ impl NpmCache {
const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock";
-pub fn with_folder_sync_lock(
+fn with_folder_sync_lock(
package: &PackageNv,
output_folder: &Path,
action: impl FnOnce() -> Result<(), AnyError>,