summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Sherret <dsherret@users.noreply.github.com>2023-05-22 16:55:04 -0400
committerGitHub <noreply@github.com>2023-05-22 16:55:04 -0400
commit7b4c483aa159794cdb9d57d3252c2980fba45469 (patch)
treeb1c3b0ab080e98ab20f470fbad5b1aba05045dfb
parent612226de8e2fe3068d981866242bacedfceb9734 (diff)
fix(npm): store npm binary command resolution in lockfile (#19219)
Part of #19038 Closes #19034 (eliminates the time spent re-resolving)
-rw-r--r--Cargo.lock8
-rw-r--r--Cargo.toml4
-rw-r--r--cli/factory.rs3
-rw-r--r--cli/lsp/language_server.rs5
-rw-r--r--cli/module_loader.rs2
-rw-r--r--cli/npm/installer.rs32
-rw-r--r--cli/npm/resolution.rs18
-rw-r--r--cli/npm/resolvers/mod.rs6
-rw-r--r--cli/standalone/mod.rs1
-rw-r--r--cli/tests/integration/npm_tests.rs13
-rw-r--r--cli/tests/integration/run_tests.rs31
-rw-r--r--cli/tools/repl/session.rs2
-rw-r--r--cli/worker.rs19
13 files changed, 103 insertions, 41 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 57e089f9b..30d42d8d1 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1116,9 +1116,9 @@ dependencies = [
[[package]]
name = "deno_lockfile"
-version = "0.14.0"
+version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "54cecfa877ecd31bb7f694826a2b6566ff77515f527bddae296aff455e6999c2"
+checksum = "aa35b176a415501662244f99846b3d0dd1a7792b2135bf7f80007f8eca8b24ac"
dependencies = [
"ring",
"serde",
@@ -1216,9 +1216,9 @@ dependencies = [
[[package]]
name = "deno_npm"
-version = "0.4.0"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "007cdc59079448de7af94510f7e9652bc6f17e9029741b26f3754f86a3cc2dd0"
+checksum = "4c617e46dc692e6bb77ec5da8d66a048cbd9756d7c1f9272d5d30548a017542e"
dependencies = [
"anyhow",
"async-trait",
diff --git a/Cargo.toml b/Cargo.toml
index dbc17d17f..25065bbf0 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -51,9 +51,9 @@ deno_runtime = { version = "0.113.0", path = "./runtime" }
napi_sym = { version = "0.35.0", path = "./cli/napi/sym" }
deno_bench_util = { version = "0.99.0", path = "./bench_util" }
test_util = { path = "./test_util" }
-deno_lockfile = "0.14.0"
+deno_lockfile = "0.14.1"
deno_media_type = { version = "0.1.0", features = ["module_specifier"] }
-deno_npm = "0.4.0"
+deno_npm = "0.5.0"
deno_semver = "0.2.1"
# exts
diff --git a/cli/factory.rs b/cli/factory.rs
index c64268ce3..3b171414f 100644
--- a/cli/factory.rs
+++ b/cli/factory.rs
@@ -603,6 +603,7 @@ impl CliFactory {
let node_resolver = self.node_resolver().await?.clone();
let npm_resolver = self.npm_resolver().await?.clone();
let maybe_inspector_server = self.maybe_inspector_server().clone();
+ let maybe_lockfile = self.maybe_lockfile().clone();
Ok(Arc::new(move || {
CliMainWorkerFactory::new(
StorageKeyResolver::from_options(&options),
@@ -627,6 +628,7 @@ impl CliFactory {
root_cert_store_provider.clone(),
fs.clone(),
maybe_inspector_server.clone(),
+ maybe_lockfile.clone(),
main_worker_options.clone(),
)
}))
@@ -660,6 +662,7 @@ impl CliFactory {
self.root_cert_store_provider().clone(),
self.fs().clone(),
self.maybe_inspector_server().clone(),
+ self.maybe_lockfile().clone(),
self.create_cli_main_worker_options()?,
))
}
diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs
index 96329e4a1..25d908a52 100644
--- a/cli/lsp/language_server.rs
+++ b/cli/lsp/language_server.rs
@@ -1247,9 +1247,8 @@ impl Inner {
let package_reqs = self.documents.npm_package_reqs();
let npm_resolver = self.npm_resolver.clone();
// spawn to avoid the LSP's Send requirements
- let handle = spawn(async move {
- npm_resolver.set_package_reqs((*package_reqs).clone()).await
- });
+ let handle =
+ spawn(async move { npm_resolver.set_package_reqs(&package_reqs).await });
if let Err(err) = handle.await.unwrap() {
lsp_warn!("Could not set npm package requirements. {:#}", err);
}
diff --git a/cli/module_loader.rs b/cli/module_loader.rs
index 5465ad1b8..73fd2f5f4 100644
--- a/cli/module_loader.rs
+++ b/cli/module_loader.rs
@@ -163,7 +163,7 @@ impl ModuleLoadPreparer {
// validate the integrity of all the modules
graph_lock_or_exit(graph, &mut lockfile);
// update it with anything new
- lockfile.write()?;
+ lockfile.write().context("Failed writing lockfile.")?;
}
// save the graph and get a reference to the new graph
diff --git a/cli/npm/installer.rs b/cli/npm/installer.rs
index 43f79d8f0..8590feb9c 100644
--- a/cli/npm/installer.rs
+++ b/cli/npm/installer.rs
@@ -25,23 +25,22 @@ struct PackageJsonDepsInstallerInner {
}
impl PackageJsonDepsInstallerInner {
- pub fn reqs_with_info_futures(
+ pub fn reqs_with_info_futures<'a>(
&self,
+ reqs: &'a [&'a NpmPackageReq],
) -> FuturesOrdered<
impl Future<
Output = Result<
- (&NpmPackageReq, Arc<deno_npm::registry::NpmPackageInfo>),
+ (&'a NpmPackageReq, Arc<deno_npm::registry::NpmPackageInfo>),
NpmRegistryPackageInfoLoadError,
>,
>,
> {
- let package_reqs = self.deps_provider.reqs();
-
- FuturesOrdered::from_iter(package_reqs.into_iter().map(|req| {
+ FuturesOrdered::from_iter(reqs.iter().map(|req| {
let api = self.npm_registry_api.clone();
async move {
let info = api.package_info(&req.name).await?;
- Ok::<_, NpmRegistryPackageInfoLoadError>((req, info))
+ Ok::<_, NpmRegistryPackageInfoLoadError>((*req, info))
}
}))
}
@@ -77,7 +76,24 @@ impl PackageJsonDepsInstaller {
return Ok(()); // already installed by something else
}
- let mut reqs_with_info_futures = inner.reqs_with_info_futures();
+ let package_reqs = inner.deps_provider.reqs();
+
+ // check if something needs resolving before bothering to load all
+ // the package information (which is slow)
+ if package_reqs.iter().all(|req| {
+ inner
+ .npm_resolution
+ .resolve_pkg_id_from_pkg_req(req)
+ .is_ok()
+ }) {
+ log::debug!(
+ "All package.json deps resolvable. Skipping top level install."
+ );
+ return Ok(()); // everything is already resolvable
+ }
+
+ let mut reqs_with_info_futures =
+ inner.reqs_with_info_futures(&package_reqs);
while let Some(result) = reqs_with_info_futures.next().await {
let (req, info) = result?;
@@ -88,7 +104,7 @@ impl PackageJsonDepsInstaller {
if inner.npm_registry_api.mark_force_reload() {
log::debug!("Failed to resolve package. Retrying. Error: {err:#}");
// re-initialize
- reqs_with_info_futures = inner.reqs_with_info_futures();
+ reqs_with_info_futures = inner.reqs_with_info_futures(&package_reqs);
} else {
return Err(err.into());
}
diff --git a/cli/npm/resolution.rs b/cli/npm/resolution.rs
index 3e9438ffa..eba4069e7 100644
--- a/cli/npm/resolution.rs
+++ b/cli/npm/resolution.rs
@@ -89,7 +89,7 @@ impl NpmResolution {
pub async fn add_package_reqs(
&self,
- package_reqs: Vec<NpmPackageReq>,
+ package_reqs: &[NpmPackageReq],
) -> Result<(), AnyError> {
// only allow one thread in here at a time
let _permit = self.update_queue.acquire().await;
@@ -107,12 +107,12 @@ impl NpmResolution {
pub async fn set_package_reqs(
&self,
- package_reqs: Vec<NpmPackageReq>,
+ package_reqs: &[NpmPackageReq],
) -> Result<(), AnyError> {
// only allow one thread in here at a time
let _permit = self.update_queue.acquire().await;
- let reqs_set = package_reqs.iter().cloned().collect::<HashSet<_>>();
+ let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
let snapshot = add_package_reqs_to_snapshot(
&self.api,
package_reqs,
@@ -144,7 +144,7 @@ impl NpmResolution {
let snapshot = add_package_reqs_to_snapshot(
&self.api,
- Vec::new(),
+ &Vec::new(),
self.maybe_lockfile.clone(),
|| self.snapshot.read().clone(),
)
@@ -275,10 +275,7 @@ impl NpmResolution {
async fn add_package_reqs_to_snapshot(
api: &CliNpmRegistryApi,
- // todo(18079): it should be possible to pass &[NpmPackageReq] in here
- // and avoid all these clones, but the LSP complains because of its
- // `Send` requirement
- package_reqs: Vec<NpmPackageReq>,
+ package_reqs: &[NpmPackageReq],
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
get_new_snapshot: impl Fn() -> NpmResolutionSnapshot,
) -> Result<NpmResolutionSnapshot, AnyError> {
@@ -288,10 +285,11 @@ async fn add_package_reqs_to_snapshot(
.iter()
.all(|req| snapshot.package_reqs().contains_key(req))
{
- return Ok(snapshot); // already up to date
+ log::debug!("Snapshot already up to date. Skipping pending resolution.");
+ return Ok(snapshot);
}
- let result = snapshot.resolve_pending(package_reqs.clone()).await;
+ let result = snapshot.resolve_pending(package_reqs).await;
api.clear_memory_cache();
let snapshot = match result {
Ok(snapshot) => snapshot,
diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs
index 0f123c382..168786407 100644
--- a/cli/npm/resolvers/mod.rs
+++ b/cli/npm/resolvers/mod.rs
@@ -159,7 +159,7 @@ impl CliNpmResolver {
/// Adds package requirements to the resolver and ensures everything is setup.
pub async fn add_package_reqs(
&self,
- packages: Vec<NpmPackageReq>,
+ packages: &[NpmPackageReq],
) -> Result<(), AnyError> {
if packages.is_empty() {
return Ok(());
@@ -182,7 +182,7 @@ impl CliNpmResolver {
/// This will retrieve and resolve package information, but not cache any package files.
pub async fn set_package_reqs(
&self,
- packages: Vec<NpmPackageReq>,
+ packages: &[NpmPackageReq],
) -> Result<(), AnyError> {
self.resolution.set_package_reqs(packages).await
}
@@ -212,7 +212,7 @@ impl CliNpmResolver {
) -> Result<(), AnyError> {
// add and ensure this isn't added to the lockfile
let package_reqs = vec![NpmPackageReq::from_str("@types/node").unwrap()];
- self.resolution.add_package_reqs(package_reqs).await?;
+ self.resolution.add_package_reqs(&package_reqs).await?;
self.fs_resolver.cache_packages().await?;
Ok(())
diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs
index 64143c08f..82c41b882 100644
--- a/cli/standalone/mod.rs
+++ b/cli/standalone/mod.rs
@@ -408,6 +408,7 @@ pub async fn run(
root_cert_store_provider,
fs,
None,
+ None,
CliMainWorkerOptions {
argv: metadata.argv,
debug: false,
diff --git a/cli/tests/integration/npm_tests.rs b/cli/tests/integration/npm_tests.rs
index 40214315a..73ac029df 100644
--- a/cli/tests/integration/npm_tests.rs
+++ b/cli/tests/integration/npm_tests.rs
@@ -723,6 +723,19 @@ itest!(deno_run_bin_cjs {
http_server: true,
});
+#[test]
+fn deno_run_bin_lockfile() {
+ let context = TestContextBuilder::for_npm().use_temp_cwd().build();
+ let temp_dir = context.temp_dir();
+ temp_dir.write("deno.json", "{}");
+ let output = context
+ .new_command()
+ .args("run -A --quiet npm:@denotest/bin/cli-esm this is a test")
+ .run();
+ output.assert_matches_file("npm/deno_run_esm.out");
+ assert!(temp_dir.path().join("deno.lock").exists());
+}
+
itest!(deno_run_non_existent {
args: "run npm:mkdirp@0.5.125",
output: "npm/deno_run_non_existent.out",
diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs
index eecf8537f..31b541e1c 100644
--- a/cli/tests/integration/run_tests.rs
+++ b/cli/tests/integration/run_tests.rs
@@ -3030,14 +3030,29 @@ itest!(package_json_auto_discovered_no_package_json_imports {
copy_temp_dir: Some("run/with_package_json/no_deno_json"),
});
-itest!(package_json_with_deno_json {
- args: "run --quiet -A main.ts",
- output: "package_json/deno_json/main.out",
- cwd: Some("package_json/deno_json/"),
- copy_temp_dir: Some("package_json/deno_json/"),
- envs: env_vars_for_npm_tests_no_sync_download(),
- http_server: true,
-});
+#[test]
+fn package_json_with_deno_json() {
+ let context = TestContextBuilder::for_npm()
+ .use_copy_temp_dir("package_json/deno_json/")
+ .cwd("package_json/deno_json/")
+ .build();
+ let output = context.new_command().args("run --quiet -A main.ts").run();
+ output.assert_matches_file("package_json/deno_json/main.out");
+
+ assert!(context
+ .temp_dir()
+ .path()
+ .join("package_json/deno_json/deno.lock")
+ .exists());
+
+ // run again and ensure the top level install doesn't happen twice
+ let output = context
+ .new_command()
+ .args("run --log-level=debug -A main.ts")
+ .run();
+ let output = output.combined_output();
+ assert_contains!(output, "Skipping top level install.");
+}
#[test]
fn package_json_error_dep_value_test() {
diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs
index b8daf505b..40cf7d3b0 100644
--- a/cli/tools/repl/session.rs
+++ b/cli/tools/repl/session.rs
@@ -518,7 +518,7 @@ impl ReplSession {
self.has_initialized_node_runtime = true;
}
- self.npm_resolver.add_package_reqs(npm_imports).await?;
+ self.npm_resolver.add_package_reqs(&npm_imports).await?;
// prevent messages in the repl about @types/node not being cached
if has_node_specifier {
diff --git a/cli/worker.rs b/cli/worker.rs
index 4d8e500b7..4a41da1a5 100644
--- a/cli/worker.rs
+++ b/cli/worker.rs
@@ -5,10 +5,12 @@ use std::rc::Rc;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
+use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::task::LocalFutureObj;
use deno_core::futures::FutureExt;
use deno_core::located_script_name;
+use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_core::CompiledWasmModuleStore;
use deno_core::Extension;
@@ -16,6 +18,7 @@ use deno_core::ModuleId;
use deno_core::ModuleLoader;
use deno_core::SharedArrayBufferStore;
use deno_core::SourceMapGetter;
+use deno_lockfile::Lockfile;
use deno_runtime::colors;
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
use deno_runtime::deno_fs;
@@ -100,6 +103,7 @@ struct SharedWorkerState {
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
fs: Arc<dyn deno_fs::FileSystem>,
maybe_inspector_server: Option<Arc<InspectorServer>>,
+ maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
}
impl SharedWorkerState {
@@ -311,6 +315,7 @@ impl CliMainWorkerFactory {
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
fs: Arc<dyn deno_fs::FileSystem>,
maybe_inspector_server: Option<Arc<InspectorServer>>,
+ maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
options: CliMainWorkerOptions,
) -> Self {
Self {
@@ -328,6 +333,7 @@ impl CliMainWorkerFactory {
root_cert_store_provider,
fs,
maybe_inspector_server,
+ maybe_lockfile,
}),
}
}
@@ -360,11 +366,22 @@ impl CliMainWorkerFactory {
{
shared
.npm_resolver
- .add_package_reqs(vec![package_ref.req.clone()])
+ .add_package_reqs(&[package_ref.req.clone()])
.await?;
let node_resolution =
shared.node_resolver.resolve_binary_export(&package_ref)?;
let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_));
+
+ if let Some(lockfile) = &shared.maybe_lockfile {
+ // For npm binary commands, ensure that the lockfile gets updated
+ // so that we can re-use the npm resolution the next time it runs
+ // for better performance
+ lockfile
+ .lock()
+ .write()
+ .context("Failed writing lockfile.")?;
+ }
+
(node_resolution.into_url(), is_main_cjs)
} else if shared.options.is_npm_main {
let node_resolution =