From 36ebc03f177cc7db5deb93f4d403cafbed756eb5 Mon Sep 17 00:00:00 2001 From: Nathan Whitaker <17734409+nathanwhit@users.noreply.github.com> Date: Tue, 24 Sep 2024 12:23:57 -0700 Subject: fix(cli): Warn on not-run lifecycle scripts with global cache (#25786) Refactors the lifecycle scripts code to extract out the common functionality and then uses that to provide a warning in the global resolver. While ideally we would still support them with the global cache, for now a warning is at least better than the status quo (where people are unaware why their packages aren't working). --- cli/args/flags.rs | 3 +- cli/args/mod.rs | 10 +- cli/npm/managed/resolvers/common.rs | 5 +- cli/npm/managed/resolvers/common/bin_entries.rs | 328 +++++++++++++++++++ .../managed/resolvers/common/lifecycle_scripts.rs | 306 ++++++++++++++++++ cli/npm/managed/resolvers/global.rs | 100 +++++- cli/npm/managed/resolvers/local.rs | 360 +++++++-------------- cli/npm/managed/resolvers/local/bin_entries.rs | 333 ------------------- cli/npm/managed/resolvers/mod.rs | 1 + 9 files changed, 856 insertions(+), 590 deletions(-) create mode 100644 cli/npm/managed/resolvers/common/bin_entries.rs create mode 100644 cli/npm/managed/resolvers/common/lifecycle_scripts.rs delete mode 100644 cli/npm/managed/resolvers/local/bin_entries.rs (limited to 'cli') diff --git a/cli/args/flags.rs b/cli/args/flags.rs index d325ce7bc..10fa07bed 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -544,7 +544,8 @@ pub enum CaData { #[derive(Clone, Debug, Eq, PartialEq, Default)] pub struct LifecycleScriptsConfig { pub allowed: PackagesAllowedScripts, - pub initial_cwd: Option, + pub initial_cwd: PathBuf, + pub root_dir: PathBuf, } #[derive(Debug, Clone, Eq, PartialEq, Default)] diff --git a/cli/args/mod.rs b/cli/args/mod.rs index b8a05f325..1c92777ae 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -1652,14 +1652,8 @@ impl CliOptions { pub fn lifecycle_scripts_config(&self) -> LifecycleScriptsConfig { LifecycleScriptsConfig { allowed: self.flags.allow_scripts.clone(), - initial_cwd: if matches!( - self.flags.allow_scripts, - PackagesAllowedScripts::None - ) { - None - } else { - Some(self.initial_cwd.clone()) - }, + initial_cwd: self.initial_cwd.clone(), + root_dir: self.workspace().root_dir_path(), } } } diff --git a/cli/npm/managed/resolvers/common.rs b/cli/npm/managed/resolvers/common.rs index 1893aa56a..620daf4b3 100644 --- a/cli/npm/managed/resolvers/common.rs +++ b/cli/npm/managed/resolvers/common.rs @@ -1,5 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +pub mod bin_entries; +pub mod lifecycle_scripts; + use std::collections::HashMap; use std::io::ErrorKind; use std::path::Path; @@ -134,7 +137,7 @@ impl RegistryReadPermissionChecker { /// Caches all the packages in parallel. pub async fn cache_packages( - packages: Vec, + packages: &[NpmResolutionPackage], tarball_cache: &Arc, ) -> Result<(), AnyError> { let mut futures_unordered = futures::stream::FuturesUnordered::new(); diff --git a/cli/npm/managed/resolvers/common/bin_entries.rs b/cli/npm/managed/resolvers/common/bin_entries.rs new file mode 100644 index 000000000..25a020c2b --- /dev/null +++ b/cli/npm/managed/resolvers/common/bin_entries.rs @@ -0,0 +1,328 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use crate::npm::managed::NpmResolutionPackage; +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +use deno_npm::resolution::NpmResolutionSnapshot; +use deno_npm::NpmPackageId; +use std::collections::HashMap; +use std::collections::HashSet; +use std::collections::VecDeque; +use std::path::Path; +use std::path::PathBuf; + +#[derive(Default)] +pub struct BinEntries<'a> { + /// Packages that have colliding bin names + collisions: HashSet<&'a NpmPackageId>, + seen_names: HashMap<&'a str, &'a NpmPackageId>, + /// The bin entries + entries: Vec<(&'a NpmResolutionPackage, PathBuf)>, +} + +/// Returns the name of the default binary for the given package. +/// This is the package name without the organization (`@org/`), if any. +fn default_bin_name(package: &NpmResolutionPackage) -> &str { + package + .id + .nv + .name + .rsplit_once('/') + .map_or(package.id.nv.name.as_str(), |(_, name)| name) +} + +impl<'a> BinEntries<'a> { + pub fn new() -> Self { + Self::default() + } + + /// Add a new bin entry (package with a bin field) + pub fn add( + &mut self, + package: &'a NpmResolutionPackage, + package_path: PathBuf, + ) { + // check for a new collision, if we haven't already + // found one + match package.bin.as_ref().unwrap() { + deno_npm::registry::NpmPackageVersionBinEntry::String(_) => { + let bin_name = default_bin_name(package); + + if let Some(other) = self.seen_names.insert(bin_name, &package.id) { + self.collisions.insert(&package.id); + self.collisions.insert(other); + } + } + deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => { + for name in entries.keys() { + if let Some(other) = self.seen_names.insert(name, &package.id) { + self.collisions.insert(&package.id); + self.collisions.insert(other); + } + } + } + } + + self.entries.push((package, package_path)); + } + + fn for_each_entry( + &mut self, + snapshot: &NpmResolutionSnapshot, + mut f: impl FnMut( + &NpmResolutionPackage, + &Path, + &str, // bin name + &str, // bin script + ) -> Result<(), AnyError>, + ) -> Result<(), AnyError> { + if !self.collisions.is_empty() { + // walking the dependency tree to find out the depth of each package + // is sort of expensive, so we only do it if there's a collision + sort_by_depth(snapshot, &mut self.entries, &mut self.collisions); + } + + let mut seen = HashSet::new(); + + for (package, package_path) in &self.entries { + if let Some(bin_entries) = &package.bin { + match bin_entries { + deno_npm::registry::NpmPackageVersionBinEntry::String(script) => { + let name = default_bin_name(package); + if !seen.insert(name) { + // we already set up a bin entry with this name + continue; + } + f(package, package_path, name, script)?; + } + deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => { + for (name, script) in entries { + if !seen.insert(name) { + // we already set up a bin entry with this name + continue; + } + f(package, package_path, name, script)?; + } + } + } + } + } + + Ok(()) + } + + /// Collect the bin entries into a vec of (name, script path) + pub fn into_bin_files( + mut self, + snapshot: &NpmResolutionSnapshot, + ) -> Vec<(String, PathBuf)> { + let mut bins = Vec::new(); + self + .for_each_entry(snapshot, |_, package_path, name, script| { + bins.push((name.to_string(), package_path.join(script))); + Ok(()) + }) + .unwrap(); + bins + } + + /// Finish setting up the bin entries, writing the necessary files + /// to disk. + pub fn finish( + mut self, + snapshot: &NpmResolutionSnapshot, + bin_node_modules_dir_path: &Path, + ) -> Result<(), AnyError> { + if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() { + std::fs::create_dir_all(bin_node_modules_dir_path).with_context( + || format!("Creating '{}'", bin_node_modules_dir_path.display()), + )?; + } + + self.for_each_entry(snapshot, |package, package_path, name, script| { + set_up_bin_entry( + package, + name, + script, + package_path, + bin_node_modules_dir_path, + ) + })?; + + Ok(()) + } +} + +// walk the dependency tree to find out the depth of each package +// that has a bin entry, then sort them by depth +fn sort_by_depth( + snapshot: &NpmResolutionSnapshot, + bin_entries: &mut [(&NpmResolutionPackage, PathBuf)], + collisions: &mut HashSet<&NpmPackageId>, +) { + enum Entry<'a> { + Pkg(&'a NpmPackageId), + IncreaseDepth, + } + + let mut seen = HashSet::new(); + let mut depths: HashMap<&NpmPackageId, u64> = + HashMap::with_capacity(collisions.len()); + + let mut queue = VecDeque::new(); + queue.extend(snapshot.top_level_packages().map(Entry::Pkg)); + seen.extend(snapshot.top_level_packages()); + queue.push_back(Entry::IncreaseDepth); + + let mut current_depth = 0u64; + + while let Some(entry) = queue.pop_front() { + if collisions.is_empty() { + break; + } + let id = match entry { + Entry::Pkg(id) => id, + Entry::IncreaseDepth => { + current_depth += 1; + if queue.is_empty() { + break; + } + queue.push_back(Entry::IncreaseDepth); + continue; + } + }; + if let Some(package) = snapshot.package_from_id(id) { + if collisions.remove(&package.id) { + depths.insert(&package.id, current_depth); + } + for dep in package.dependencies.values() { + if seen.insert(dep) { + queue.push_back(Entry::Pkg(dep)); + } + } + } + } + + bin_entries.sort_by(|(a, _), (b, _)| { + depths + .get(&a.id) + .unwrap_or(&u64::MAX) + .cmp(depths.get(&b.id).unwrap_or(&u64::MAX)) + .then_with(|| a.id.nv.cmp(&b.id.nv).reverse()) + }); +} + +pub fn set_up_bin_entry( + package: &NpmResolutionPackage, + bin_name: &str, + #[allow(unused_variables)] bin_script: &str, + #[allow(unused_variables)] package_path: &Path, + bin_node_modules_dir_path: &Path, +) -> Result<(), AnyError> { + #[cfg(windows)] + { + set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?; + } + #[cfg(unix)] + { + symlink_bin_entry( + package, + bin_name, + bin_script, + package_path, + bin_node_modules_dir_path, + )?; + } + Ok(()) +} + +#[cfg(windows)] +fn set_up_bin_shim( + package: &NpmResolutionPackage, + bin_name: &str, + bin_node_modules_dir_path: &Path, +) -> Result<(), AnyError> { + use std::fs; + let mut cmd_shim = bin_node_modules_dir_path.join(bin_name); + + cmd_shim.set_extension("cmd"); + let shim = format!("@deno run -A npm:{}/{bin_name} %*", package.id.nv); + fs::write(&cmd_shim, shim).with_context(|| { + format!("Can't set up '{}' bin at {}", bin_name, cmd_shim.display()) + })?; + + Ok(()) +} + +#[cfg(unix)] +fn symlink_bin_entry( + _package: &NpmResolutionPackage, + bin_name: &str, + bin_script: &str, + package_path: &Path, + bin_node_modules_dir_path: &Path, +) -> Result<(), AnyError> { + use std::io; + use std::os::unix::fs::symlink; + let link = bin_node_modules_dir_path.join(bin_name); + let original = package_path.join(bin_script); + + use std::os::unix::fs::PermissionsExt; + let mut perms = match std::fs::metadata(&original) { + Ok(metadata) => metadata.permissions(), + Err(err) => { + if err.kind() == io::ErrorKind::NotFound { + log::warn!( + "{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.", + deno_terminal::colors::yellow("Warning"), + bin_name, + package_path.display(), + original.display() + ); + return Ok(()); + } + return Err(err).with_context(|| { + format!("Can't set up '{}' bin at {}", bin_name, original.display()) + }); + } + }; + if perms.mode() & 0o111 == 0 { + // if the original file is not executable, make it executable + perms.set_mode(perms.mode() | 0o111); + std::fs::set_permissions(&original, perms).with_context(|| { + format!("Setting permissions on '{}'", original.display()) + })?; + } + let original_relative = + crate::util::path::relative_path(bin_node_modules_dir_path, &original) + .unwrap_or(original); + + if let Err(err) = symlink(&original_relative, &link) { + if err.kind() == io::ErrorKind::AlreadyExists { + // remove and retry + std::fs::remove_file(&link).with_context(|| { + format!( + "Failed to remove existing bin symlink at {}", + link.display() + ) + })?; + symlink(&original_relative, &link).with_context(|| { + format!( + "Can't set up '{}' bin at {}", + bin_name, + original_relative.display() + ) + })?; + return Ok(()); + } + return Err(err).with_context(|| { + format!( + "Can't set up '{}' bin at {}", + bin_name, + original_relative.display() + ) + }); + } + + Ok(()) +} diff --git a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs new file mode 100644 index 000000000..a3c72634b --- /dev/null +++ b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs @@ -0,0 +1,306 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use super::bin_entries::BinEntries; +use crate::args::LifecycleScriptsConfig; +use deno_npm::resolution::NpmResolutionSnapshot; +use deno_semver::package::PackageNv; +use std::borrow::Cow; +use std::rc::Rc; + +use std::path::Path; +use std::path::PathBuf; + +use deno_core::error::AnyError; +use deno_npm::NpmResolutionPackage; + +pub trait LifecycleScriptsStrategy { + fn can_run_scripts(&self) -> bool { + true + } + fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf; + + fn warn_on_scripts_not_run( + &self, + packages: &[(&NpmResolutionPackage, PathBuf)], + ) -> Result<(), AnyError>; + + fn has_warned(&self, package: &NpmResolutionPackage) -> bool; + + fn has_run(&self, package: &NpmResolutionPackage) -> bool; + + fn did_run_scripts( + &self, + package: &NpmResolutionPackage, + ) -> Result<(), AnyError>; +} + +pub struct LifecycleScripts<'a> { + packages_with_scripts: Vec<(&'a NpmResolutionPackage, PathBuf)>, + packages_with_scripts_not_run: Vec<(&'a NpmResolutionPackage, PathBuf)>, + + config: &'a LifecycleScriptsConfig, + strategy: Box, +} + +impl<'a> LifecycleScripts<'a> { + pub fn new( + config: &'a LifecycleScriptsConfig, + strategy: T, + ) -> Self { + Self { + config, + packages_with_scripts: Vec::new(), + packages_with_scripts_not_run: Vec::new(), + strategy: Box::new(strategy), + } + } +} + +fn has_lifecycle_scripts( + package: &NpmResolutionPackage, + package_path: &Path, +) -> bool { + if let Some(install) = package.scripts.get("install") { + // default script + if !is_broken_default_install_script(install, package_path) { + return true; + } + } + package.scripts.contains_key("preinstall") + || package.scripts.contains_key("postinstall") +} + +// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file +// but it always fails if the package excludes the `binding.gyp` file when they publish. +// (for example, `fsevents` hits this) +fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool { + script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists() +} + +impl<'a> LifecycleScripts<'a> { + fn can_run_scripts(&self, package_nv: &PackageNv) -> bool { + if !self.strategy.can_run_scripts() { + return false; + } + use crate::args::PackagesAllowedScripts; + match &self.config.allowed { + PackagesAllowedScripts::All => true, + // TODO: make this more correct + PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| { + let s = s.strip_prefix("npm:").unwrap_or(s); + s == package_nv.name || s == package_nv.to_string() + }), + PackagesAllowedScripts::None => false, + } + } + /// Register a package for running lifecycle scripts, if applicable. + /// + /// `package_path` is the path containing the package's code (its root dir). + /// `package_meta_path` is the path to serve as the base directory for lifecycle + /// script-related metadata (e.g. to store whether the scripts have been run already) + pub fn add( + &mut self, + package: &'a NpmResolutionPackage, + package_path: Cow, + ) { + if has_lifecycle_scripts(package, &package_path) { + if self.can_run_scripts(&package.id.nv) { + if !self.strategy.has_run(package) { + self + .packages_with_scripts + .push((package, package_path.into_owned())); + } + } else if !self.strategy.has_run(package) + && !self.strategy.has_warned(package) + { + self + .packages_with_scripts_not_run + .push((package, package_path.into_owned())); + } + } + } + + pub fn warn_not_run_scripts(&self) -> Result<(), AnyError> { + if !self.packages_with_scripts_not_run.is_empty() { + self + .strategy + .warn_on_scripts_not_run(&self.packages_with_scripts_not_run)?; + } + Ok(()) + } + + pub async fn finish( + self, + snapshot: &NpmResolutionSnapshot, + packages: &[NpmResolutionPackage], + root_node_modules_dir_path: Option<&Path>, + ) -> Result<(), AnyError> { + self.warn_not_run_scripts()?; + let get_package_path = + |p: &NpmResolutionPackage| self.strategy.package_path(p); + let mut failed_packages = Vec::new(); + if !self.packages_with_scripts.is_empty() { + // get custom commands for each bin available in the node_modules dir (essentially + // the scripts that are in `node_modules/.bin`) + let base = + resolve_baseline_custom_commands(snapshot, packages, get_package_path)?; + let init_cwd = &self.config.initial_cwd; + let process_state = crate::npm::managed::npm_process_state( + snapshot.as_valid_serialized(), + root_node_modules_dir_path, + ); + + let mut env_vars = crate::task_runner::real_env_vars(); + env_vars.insert( + crate::args::NPM_RESOLUTION_STATE_ENV_VAR_NAME.to_string(), + process_state, + ); + for (package, package_path) in self.packages_with_scripts { + // add custom commands for binaries from the package's dependencies. this will take precedence over the + // baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the + // correct bin will be used. + let custom_commands = resolve_custom_commands_from_deps( + base.clone(), + package, + snapshot, + get_package_path, + )?; + for script_name in ["preinstall", "install", "postinstall"] { + if let Some(script) = package.scripts.get(script_name) { + if script_name == "install" + && is_broken_default_install_script(script, &package_path) + { + continue; + } + let exit_code = crate::task_runner::run_task( + crate::task_runner::RunTaskOptions { + task_name: script_name, + script, + cwd: &package_path, + env_vars: env_vars.clone(), + custom_commands: custom_commands.clone(), + init_cwd, + argv: &[], + root_node_modules_dir: root_node_modules_dir_path, + }, + ) + .await?; + if exit_code != 0 { + log::warn!( + "error: script '{}' in '{}' failed with exit code {}", + script_name, + package.id.nv, + exit_code, + ); + failed_packages.push(&package.id.nv); + // assume if earlier script fails, later ones will fail too + break; + } + } + } + self.strategy.did_run_scripts(package)?; + } + } + if failed_packages.is_empty() { + Ok(()) + } else { + Err(AnyError::msg(format!( + "failed to run scripts for packages: {}", + failed_packages + .iter() + .map(|p| p.to_string()) + .collect::>() + .join(", ") + ))) + } + } +} + +// take in all (non copy) packages from snapshot, +// and resolve the set of available binaries to create +// custom commands available to the task runner +fn resolve_baseline_custom_commands( + snapshot: &NpmResolutionSnapshot, + packages: &[NpmResolutionPackage], + get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, +) -> Result { + let mut custom_commands = crate::task_runner::TaskCustomCommands::new(); + custom_commands + .insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand)); + + custom_commands + .insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand)); + + custom_commands + .insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand)); + + custom_commands.insert( + "node-gyp".to_string(), + Rc::new(crate::task_runner::NodeGypCommand), + ); + + // TODO: this recreates the bin entries which could be redoing some work, but the ones + // we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip + // doing it for packages that are set up already. + // realistically, scripts won't be run very often so it probably isn't too big of an issue. + resolve_custom_commands_from_packages( + custom_commands, + snapshot, + packages, + get_package_path, + ) +} + +// resolves the custom commands from an iterator of packages +// and adds them to the existing custom commands. +// note that this will overwrite any existing custom commands +fn resolve_custom_commands_from_packages< + 'a, + P: IntoIterator, +>( + mut commands: crate::task_runner::TaskCustomCommands, + snapshot: &'a NpmResolutionSnapshot, + packages: P, + get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf, +) -> Result { + let mut bin_entries = BinEntries::new(); + for package in packages { + let package_path = get_package_path(package); + + if package.bin.is_some() { + bin_entries.add(package, package_path); + } + } + let bins = bin_entries.into_bin_files(snapshot); + for (bin_name, script_path) in bins { + commands.insert( + bin_name.clone(), + Rc::new(crate::task_runner::NodeModulesFileRunCommand { + command_name: bin_name, + path: script_path, + }), + ); + } + + Ok(commands) +} + +// resolves the custom commands from the dependencies of a package +// and adds them to the existing custom commands. +// note that this will overwrite any existing custom commands. +fn resolve_custom_commands_from_deps( + baseline: crate::task_runner::TaskCustomCommands, + package: &NpmResolutionPackage, + snapshot: &NpmResolutionSnapshot, + get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, +) -> Result { + resolve_custom_commands_from_packages( + baseline, + snapshot, + package + .dependencies + .values() + .map(|id| snapshot.package_from_id(id).unwrap()), + get_package_path, + ) +} diff --git a/cli/npm/managed/resolvers/global.rs b/cli/npm/managed/resolvers/global.rs index 7f8f285f3..187e6b277 100644 --- a/cli/npm/managed/resolvers/global.rs +++ b/cli/npm/managed/resolvers/global.rs @@ -2,16 +2,19 @@ //! Code for global npm cache resolution. +use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use crate::colors; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_core::url::Url; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; +use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; @@ -19,10 +22,14 @@ use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::ReferrerNotFoundError; +use crate::args::LifecycleScriptsConfig; +use crate::cache::FastInsecureHasher; + use super::super::cache::NpmCache; use super::super::cache::TarballCache; use super::super::resolution::NpmResolution; use super::common::cache_packages; +use super::common::lifecycle_scripts::LifecycleScriptsStrategy; use super::common::NpmPackageFsResolver; use super::common::RegistryReadPermissionChecker; @@ -34,6 +41,7 @@ pub struct GlobalNpmPackageResolver { resolution: Arc, system_info: NpmSystemInfo, registry_read_permission_checker: RegistryReadPermissionChecker, + lifecycle_scripts: LifecycleScriptsConfig, } impl GlobalNpmPackageResolver { @@ -43,6 +51,7 @@ impl GlobalNpmPackageResolver { tarball_cache: Arc, resolution: Arc, system_info: NpmSystemInfo, + lifecycle_scripts: LifecycleScriptsConfig, ) -> Self { Self { registry_read_permission_checker: RegistryReadPermissionChecker::new( @@ -53,6 +62,7 @@ impl GlobalNpmPackageResolver { tarball_cache, resolution, system_info, + lifecycle_scripts, } } } @@ -149,8 +159,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { let package_partitions = self .resolution .all_system_packages_partitioned(&self.system_info); - - cache_packages(package_partitions.packages, &self.tarball_cache).await?; + cache_packages(&package_partitions.packages, &self.tarball_cache).await?; // create the copy package folders for copy in package_partitions.copy_packages { @@ -159,6 +168,18 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { .ensure_copy_package(©.get_package_cache_folder_id())?; } + let mut lifecycle_scripts = + super::common::lifecycle_scripts::LifecycleScripts::new( + &self.lifecycle_scripts, + GlobalLifecycleScripts::new(self, &self.lifecycle_scripts.root_dir), + ); + for package in &package_partitions.packages { + let package_folder = self.cache.package_folder_for_nv(&package.id.nv); + lifecycle_scripts.add(package, Cow::Borrowed(&package_folder)); + } + + lifecycle_scripts.warn_not_run_scripts()?; + Ok(()) } @@ -172,3 +193,78 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { .ensure_registry_read_permission(permissions, path) } } + +struct GlobalLifecycleScripts<'a> { + resolver: &'a GlobalNpmPackageResolver, + path_hash: u64, +} + +impl<'a> GlobalLifecycleScripts<'a> { + fn new(resolver: &'a GlobalNpmPackageResolver, root_dir: &Path) -> Self { + let mut hasher = FastInsecureHasher::new_without_deno_version(); + hasher.write(root_dir.to_string_lossy().as_bytes()); + let path_hash = hasher.finish(); + Self { + resolver, + path_hash, + } + } + + fn warned_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf { + self + .package_path(package) + .join(format!(".scripts-warned-{}", self.path_hash)) + } +} + +impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy + for GlobalLifecycleScripts<'a> +{ + fn can_run_scripts(&self) -> bool { + false + } + fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf { + self.resolver.cache.package_folder_for_nv(&package.id.nv) + } + + fn warn_on_scripts_not_run( + &self, + packages: &[(&NpmResolutionPackage, PathBuf)], + ) -> std::result::Result<(), deno_core::anyhow::Error> { + log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall")); + for (package, _) in packages { + log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv))); + } + log::warn!("┃"); + log::warn!( + "┠─ {}", + colors::italic("This may cause the packages to not work correctly.") + ); + log::warn!("┠─ {}", colors::italic("Lifecycle scripts are only supported when using a `node_modules` directory.")); + log::warn!( + "┠─ {}", + colors::italic("Enable it in your deno config file:") + ); + log::warn!("┖─ {}", colors::bold("\"nodeModulesDir\": \"auto\"")); + + for (package, _) in packages { + std::fs::write(self.warned_scripts_file(package), "")?; + } + Ok(()) + } + + fn did_run_scripts( + &self, + _package: &NpmResolutionPackage, + ) -> std::result::Result<(), deno_core::anyhow::Error> { + Ok(()) + } + + fn has_warned(&self, package: &NpmResolutionPackage) -> bool { + self.warned_scripts_file(package).exists() + } + + fn has_run(&self, _package: &NpmResolutionPackage) -> bool { + false + } +} diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index c582c369e..5a90f252d 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -2,8 +2,6 @@ //! Code for local node_modules resolution. -mod bin_entries; - use std::borrow::Cow; use std::cell::RefCell; use std::cmp::Ordering; @@ -18,11 +16,9 @@ use std::rc::Rc; use std::sync::Arc; use crate::args::LifecycleScriptsConfig; -use crate::args::PackagesAllowedScripts; use crate::colors; use async_trait::async_trait; use deno_ast::ModuleSpecifier; -use deno_core::anyhow; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures::stream::FuturesUnordered; @@ -272,77 +268,10 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { } } -// take in all (non copy) packages from snapshot, -// and resolve the set of available binaries to create -// custom commands available to the task runner -fn resolve_baseline_custom_commands( - snapshot: &NpmResolutionSnapshot, - packages: &[NpmResolutionPackage], - local_registry_dir: &Path, -) -> Result { - let mut custom_commands = crate::task_runner::TaskCustomCommands::new(); - custom_commands - .insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand)); - - custom_commands - .insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand)); - - custom_commands - .insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand)); - - custom_commands.insert( - "node-gyp".to_string(), - Rc::new(crate::task_runner::NodeGypCommand), - ); - - // TODO: this recreates the bin entries which could be redoing some work, but the ones - // we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip - // doing it for packages that are set up already. - // realistically, scripts won't be run very often so it probably isn't too big of an issue. - resolve_custom_commands_from_packages( - custom_commands, - snapshot, - packages, - local_registry_dir, - ) -} - -// resolves the custom commands from an iterator of packages -// and adds them to the existing custom commands. -// note that this will overwrite any existing custom commands -fn resolve_custom_commands_from_packages< - 'a, - P: IntoIterator, ->( - mut commands: crate::task_runner::TaskCustomCommands, - snapshot: &'a NpmResolutionSnapshot, - packages: P, - local_registry_dir: &Path, -) -> Result { - let mut bin_entries = bin_entries::BinEntries::new(); - for package in packages { - let package_path = - local_node_modules_package_path(local_registry_dir, package); - - if package.bin.is_some() { - bin_entries.add(package.clone(), package_path); - } - } - let bins = bin_entries.into_bin_files(snapshot); - for (bin_name, script_path) in bins { - commands.insert( - bin_name.clone(), - Rc::new(crate::task_runner::NodeModulesFileRunCommand { - command_name: bin_name, - path: script_path, - }), - ); - } - - Ok(commands) -} - -fn local_node_modules_package_path( +/// `node_modules/.deno//node_modules/` +/// +/// Where the actual package is stored. +fn local_node_modules_package_contents_path( local_registry_dir: &Path, package: &NpmResolutionPackage, ) -> PathBuf { @@ -354,62 +283,6 @@ fn local_node_modules_package_path( .join(&package.id.nv.name) } -// resolves the custom commands from the dependencies of a package -// and adds them to the existing custom commands. -// note that this will overwrite any existing custom commands. -fn resolve_custom_commands_from_deps( - baseline: crate::task_runner::TaskCustomCommands, - package: &NpmResolutionPackage, - snapshot: &NpmResolutionSnapshot, - local_registry_dir: &Path, -) -> Result { - resolve_custom_commands_from_packages( - baseline, - snapshot, - package - .dependencies - .values() - .map(|id| snapshot.package_from_id(id).unwrap()), - local_registry_dir, - ) -} - -fn can_run_scripts( - allow_scripts: &PackagesAllowedScripts, - package_nv: &PackageNv, -) -> bool { - match allow_scripts { - PackagesAllowedScripts::All => true, - // TODO: make this more correct - PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| { - let s = s.strip_prefix("npm:").unwrap_or(s); - s == package_nv.name || s == package_nv.to_string() - }), - PackagesAllowedScripts::None => false, - } -} - -// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file -// but it always fails if the package excludes the `binding.gyp` file when they publish. -// (for example, `fsevents` hits this) -fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool { - script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists() -} - -fn has_lifecycle_scripts( - package: &NpmResolutionPackage, - package_path: &Path, -) -> bool { - if let Some(install) = package.scripts.get("install") { - // default script - if !is_broken_default_install_script(install, package_path) { - return true; - } - } - package.scripts.contains_key("preinstall") - || package.scripts.contains_key("postinstall") -} - /// Creates a pnpm style folder structure. #[allow(clippy::too_many_arguments)] async fn sync_resolution_with_fs( @@ -460,9 +333,15 @@ async fn sync_resolution_with_fs( let mut cache_futures = FuturesUnordered::new(); let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> = HashMap::with_capacity(package_partitions.packages.len()); - let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new())); - let mut packages_with_scripts = Vec::with_capacity(2); - let mut packages_with_scripts_not_run = Vec::new(); + let bin_entries = + Rc::new(RefCell::new(super::common::bin_entries::BinEntries::new())); + let mut lifecycle_scripts = + super::common::lifecycle_scripts::LifecycleScripts::new( + lifecycle_scripts, + LocalLifecycleScripts { + deno_local_registry_dir: &deno_local_registry_dir, + }, + ); let packages_with_deprecation_warnings = Arc::new(Mutex::new(Vec::new())); for package in &package_partitions.packages { if let Some(current_pkg) = @@ -518,9 +397,7 @@ async fn sync_resolution_with_fs( .await??; if package.bin.is_some() { - bin_entries_to_setup - .borrow_mut() - .add(package.clone(), package_path); + bin_entries_to_setup.borrow_mut().add(package, package_path); } if let Some(deprecated) = &package.deprecated { @@ -538,21 +415,7 @@ async fn sync_resolution_with_fs( let sub_node_modules = folder_path.join("node_modules"); let package_path = join_package_name(&sub_node_modules, &package.id.nv.name); - if has_lifecycle_scripts(package, &package_path) { - let scripts_run = folder_path.join(".scripts-run"); - let has_warned = folder_path.join(".scripts-warned"); - if can_run_scripts(&lifecycle_scripts.allowed, &package.id.nv) { - if !scripts_run.exists() { - packages_with_scripts.push(( - package.clone(), - package_path, - scripts_run, - )); - } - } else if !scripts_run.exists() && !has_warned.exists() { - packages_with_scripts_not_run.push((has_warned, package.id.nv.clone())); - } - } + lifecycle_scripts.add(package, package_path.into()); } while let Some(result) = cache_futures.next().await { @@ -789,74 +652,12 @@ async fn sync_resolution_with_fs( } } - if !packages_with_scripts.is_empty() { - // get custom commands for each bin available in the node_modules dir (essentially - // the scripts that are in `node_modules/.bin`) - let base = resolve_baseline_custom_commands( - snapshot, - &package_partitions.packages, - &deno_local_registry_dir, - )?; - let init_cwd = lifecycle_scripts.initial_cwd.as_deref().unwrap(); - let process_state = crate::npm::managed::npm_process_state( - snapshot.as_valid_serialized(), - Some(root_node_modules_dir_path), - ); - - let mut env_vars = crate::task_runner::real_env_vars(); - env_vars.insert( - crate::args::NPM_RESOLUTION_STATE_ENV_VAR_NAME.to_string(), - process_state, - ); - for (package, package_path, scripts_run_path) in packages_with_scripts { - // add custom commands for binaries from the package's dependencies. this will take precedence over the - // baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the - // correct bin will be used. - let custom_commands = resolve_custom_commands_from_deps( - base.clone(), - &package, - snapshot, - &deno_local_registry_dir, - )?; - for script_name in ["preinstall", "install", "postinstall"] { - if let Some(script) = package.scripts.get(script_name) { - if script_name == "install" - && is_broken_default_install_script(script, &package_path) - { - continue; - } - let exit_code = - crate::task_runner::run_task(crate::task_runner::RunTaskOptions { - task_name: script_name, - script, - cwd: &package_path, - env_vars: env_vars.clone(), - custom_commands: custom_commands.clone(), - init_cwd, - argv: &[], - root_node_modules_dir: Some(root_node_modules_dir_path), - }) - .await?; - if exit_code != 0 { - anyhow::bail!( - "script '{}' in '{}' failed with exit code {}", - script_name, - package.id.nv, - exit_code, - ); - } - } - } - fs::write(scripts_run_path, "")?; - } - } - { let packages_with_deprecation_warnings = packages_with_deprecation_warnings.lock(); if !packages_with_deprecation_warnings.is_empty() { log::warn!( - "{} Following packages are deprecated:", + "{} The following packages are deprecated:", colors::yellow("Warning") ); let len = packages_with_deprecation_warnings.len(); @@ -870,7 +671,7 @@ async fn sync_resolution_with_fs( ); } else { log::warn!( - "┗─ {}", + "┖─ {}", colors::gray(format!("npm:{:?} ({})", package_id, msg)) ); } @@ -878,42 +679,111 @@ async fn sync_resolution_with_fs( } } - if !packages_with_scripts_not_run.is_empty() { - log::warn!("{} Following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall")); + lifecycle_scripts + .finish( + snapshot, + &package_partitions.packages, + Some(root_node_modules_dir_path), + ) + .await?; - for (_, package_nv) in packages_with_scripts_not_run.iter() { - log::warn!("┠─ {}", colors::gray(format!("npm:{package_nv}"))); - } + setup_cache.save(); + drop(single_process_lock); + drop(pb_clear_guard); - log::warn!("┃"); - log::warn!( - "┠─ {}", - colors::italic("This may cause the packages to not work correctly.") - ); - log::warn!("┗─ {}", colors::italic("To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`:")); - let packages_comma_separated = packages_with_scripts_not_run - .iter() - .map(|(_, p)| format!("npm:{p}")) - .collect::>() - .join(","); - log::warn!( - " {}", - colors::bold(format!( - "deno install --allow-scripts={}", - packages_comma_separated - )) - ); + Ok(()) +} + +/// `node_modules/.deno//` +fn local_node_modules_package_folder( + local_registry_dir: &Path, + package: &NpmResolutionPackage, +) -> PathBuf { + local_registry_dir.join(get_package_folder_id_folder_name( + &package.get_package_cache_folder_id(), + )) +} - for (scripts_warned_path, _) in packages_with_scripts_not_run { - let _ignore_err = fs::write(scripts_warned_path, ""); +struct LocalLifecycleScripts<'a> { + deno_local_registry_dir: &'a Path, +} + +impl<'a> LocalLifecycleScripts<'a> { + /// `node_modules/.deno//.scripts-run` + fn ran_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf { + local_node_modules_package_folder(self.deno_local_registry_dir, package) + .join(".scripts-run") + } + + /// `node_modules/.deno//.scripts-warned` + fn warned_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf { + local_node_modules_package_folder(self.deno_local_registry_dir, package) + .join(".scripts-warned") + } +} + +impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy + for LocalLifecycleScripts<'a> +{ + fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf { + local_node_modules_package_contents_path( + self.deno_local_registry_dir, + package, + ) + } + + fn did_run_scripts( + &self, + package: &NpmResolutionPackage, + ) -> std::result::Result<(), deno_core::anyhow::Error> { + std::fs::write(self.ran_scripts_file(package), "")?; + Ok(()) + } + + fn warn_on_scripts_not_run( + &self, + packages: &[(&NpmResolutionPackage, std::path::PathBuf)], + ) -> Result<(), AnyError> { + if !packages.is_empty() { + log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall")); + + for (package, _) in packages { + log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv))); + } + + log::warn!("┃"); + log::warn!( + "┠─ {}", + colors::italic("This may cause the packages to not work correctly.") + ); + log::warn!("┖─ {}", colors::italic("To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`:")); + let packages_comma_separated = packages + .iter() + .map(|(p, _)| format!("npm:{}", p.id.nv)) + .collect::>() + .join(","); + log::warn!( + " {}", + colors::bold(format!( + "deno install --allow-scripts={}", + packages_comma_separated + )) + ); + + for (package, _) in packages { + let _ignore_err = fs::write(self.warned_scripts_file(package), ""); + } } + Ok(()) } - setup_cache.save(); - drop(single_process_lock); - drop(pb_clear_guard); + fn has_warned(&self, package: &NpmResolutionPackage) -> bool { + self.warned_scripts_file(package).exists() + } - Ok(()) + fn has_run(&self, package: &NpmResolutionPackage) -> bool { + self.ran_scripts_file(package).exists() + } } // Uses BTreeMap to preserve the ordering of the elements in memory, to ensure diff --git a/cli/npm/managed/resolvers/local/bin_entries.rs b/cli/npm/managed/resolvers/local/bin_entries.rs deleted file mode 100644 index 980a2653b..000000000 --- a/cli/npm/managed/resolvers/local/bin_entries.rs +++ /dev/null @@ -1,333 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use crate::npm::managed::NpmResolutionPackage; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; -use deno_npm::resolution::NpmResolutionSnapshot; -use deno_npm::NpmPackageId; -use std::collections::HashMap; -use std::collections::HashSet; -use std::collections::VecDeque; -use std::path::Path; -use std::path::PathBuf; - -#[derive(Default)] -pub(super) struct BinEntries { - /// Packages that have colliding bin names - collisions: HashSet, - seen_names: HashMap, - /// The bin entries - entries: Vec<(NpmResolutionPackage, PathBuf)>, -} - -/// Returns the name of the default binary for the given package. -/// This is the package name without the organization (`@org/`), if any. -fn default_bin_name(package: &NpmResolutionPackage) -> &str { - package - .id - .nv - .name - .rsplit_once('/') - .map_or(package.id.nv.name.as_str(), |(_, name)| name) -} - -impl BinEntries { - pub(super) fn new() -> Self { - Self::default() - } - - /// Add a new bin entry (package with a bin field) - pub(super) fn add( - &mut self, - package: NpmResolutionPackage, - package_path: PathBuf, - ) { - // check for a new collision, if we haven't already - // found one - match package.bin.as_ref().unwrap() { - deno_npm::registry::NpmPackageVersionBinEntry::String(_) => { - let bin_name = default_bin_name(&package); - - if let Some(other) = self - .seen_names - .insert(bin_name.to_string(), package.id.clone()) - { - self.collisions.insert(package.id.clone()); - self.collisions.insert(other); - } - } - deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => { - for name in entries.keys() { - if let Some(other) = - self.seen_names.insert(name.to_string(), package.id.clone()) - { - self.collisions.insert(package.id.clone()); - self.collisions.insert(other); - } - } - } - } - - self.entries.push((package, package_path)); - } - - fn for_each_entry( - &mut self, - snapshot: &NpmResolutionSnapshot, - mut f: impl FnMut( - &NpmResolutionPackage, - &Path, - &str, // bin name - &str, // bin script - ) -> Result<(), AnyError>, - ) -> Result<(), AnyError> { - if !self.collisions.is_empty() { - // walking the dependency tree to find out the depth of each package - // is sort of expensive, so we only do it if there's a collision - sort_by_depth(snapshot, &mut self.entries, &mut self.collisions); - } - - let mut seen = HashSet::new(); - - for (package, package_path) in &self.entries { - if let Some(bin_entries) = &package.bin { - match bin_entries { - deno_npm::registry::NpmPackageVersionBinEntry::String(script) => { - let name = default_bin_name(package); - if !seen.insert(name) { - // we already set up a bin entry with this name - continue; - } - f(package, package_path, name, script)?; - } - deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => { - for (name, script) in entries { - if !seen.insert(name) { - // we already set up a bin entry with this name - continue; - } - f(package, package_path, name, script)?; - } - } - } - } - } - - Ok(()) - } - - /// Collect the bin entries into a vec of (name, script path) - pub(super) fn into_bin_files( - mut self, - snapshot: &NpmResolutionSnapshot, - ) -> Vec<(String, PathBuf)> { - let mut bins = Vec::new(); - self - .for_each_entry(snapshot, |_, package_path, name, script| { - bins.push((name.to_string(), package_path.join(script))); - Ok(()) - }) - .unwrap(); - bins - } - - /// Finish setting up the bin entries, writing the necessary files - /// to disk. - pub(super) fn finish( - mut self, - snapshot: &NpmResolutionSnapshot, - bin_node_modules_dir_path: &Path, - ) -> Result<(), AnyError> { - if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() { - std::fs::create_dir_all(bin_node_modules_dir_path).with_context( - || format!("Creating '{}'", bin_node_modules_dir_path.display()), - )?; - } - - self.for_each_entry(snapshot, |package, package_path, name, script| { - set_up_bin_entry( - package, - name, - script, - package_path, - bin_node_modules_dir_path, - ) - })?; - - Ok(()) - } -} - -// walk the dependency tree to find out the depth of each package -// that has a bin entry, then sort them by depth -fn sort_by_depth( - snapshot: &NpmResolutionSnapshot, - bin_entries: &mut [(NpmResolutionPackage, PathBuf)], - collisions: &mut HashSet, -) { - enum Entry<'a> { - Pkg(&'a NpmPackageId), - IncreaseDepth, - } - - let mut seen = HashSet::new(); - let mut depths: HashMap<&NpmPackageId, u64> = - HashMap::with_capacity(collisions.len()); - - let mut queue = VecDeque::new(); - queue.extend(snapshot.top_level_packages().map(Entry::Pkg)); - seen.extend(snapshot.top_level_packages()); - queue.push_back(Entry::IncreaseDepth); - - let mut current_depth = 0u64; - - while let Some(entry) = queue.pop_front() { - if collisions.is_empty() { - break; - } - let id = match entry { - Entry::Pkg(id) => id, - Entry::IncreaseDepth => { - current_depth += 1; - if queue.is_empty() { - break; - } - queue.push_back(Entry::IncreaseDepth); - continue; - } - }; - if let Some(package) = snapshot.package_from_id(id) { - if collisions.remove(&package.id) { - depths.insert(&package.id, current_depth); - } - for dep in package.dependencies.values() { - if seen.insert(dep) { - queue.push_back(Entry::Pkg(dep)); - } - } - } - } - - bin_entries.sort_by(|(a, _), (b, _)| { - depths - .get(&a.id) - .unwrap_or(&u64::MAX) - .cmp(depths.get(&b.id).unwrap_or(&u64::MAX)) - .then_with(|| a.id.nv.cmp(&b.id.nv).reverse()) - }); -} - -pub(super) fn set_up_bin_entry( - package: &NpmResolutionPackage, - bin_name: &str, - #[allow(unused_variables)] bin_script: &str, - #[allow(unused_variables)] package_path: &Path, - bin_node_modules_dir_path: &Path, -) -> Result<(), AnyError> { - #[cfg(windows)] - { - set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?; - } - #[cfg(unix)] - { - symlink_bin_entry( - package, - bin_name, - bin_script, - package_path, - bin_node_modules_dir_path, - )?; - } - Ok(()) -} - -#[cfg(windows)] -fn set_up_bin_shim( - package: &NpmResolutionPackage, - bin_name: &str, - bin_node_modules_dir_path: &Path, -) -> Result<(), AnyError> { - use std::fs; - let mut cmd_shim = bin_node_modules_dir_path.join(bin_name); - - cmd_shim.set_extension("cmd"); - let shim = format!("@deno run -A npm:{}/{bin_name} %*", package.id.nv); - fs::write(&cmd_shim, shim).with_context(|| { - format!("Can't set up '{}' bin at {}", bin_name, cmd_shim.display()) - })?; - - Ok(()) -} - -#[cfg(unix)] -fn symlink_bin_entry( - _package: &NpmResolutionPackage, - bin_name: &str, - bin_script: &str, - package_path: &Path, - bin_node_modules_dir_path: &Path, -) -> Result<(), AnyError> { - use std::io; - use std::os::unix::fs::symlink; - let link = bin_node_modules_dir_path.join(bin_name); - let original = package_path.join(bin_script); - - use std::os::unix::fs::PermissionsExt; - let mut perms = match std::fs::metadata(&original) { - Ok(metadata) => metadata.permissions(), - Err(err) => { - if err.kind() == io::ErrorKind::NotFound { - log::warn!( - "{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.", - deno_terminal::colors::yellow("Warning"), - bin_name, - package_path.display(), - original.display() - ); - return Ok(()); - } - return Err(err).with_context(|| { - format!("Can't set up '{}' bin at {}", bin_name, original.display()) - }); - } - }; - if perms.mode() & 0o111 == 0 { - // if the original file is not executable, make it executable - perms.set_mode(perms.mode() | 0o111); - std::fs::set_permissions(&original, perms).with_context(|| { - format!("Setting permissions on '{}'", original.display()) - })?; - } - let original_relative = - crate::util::path::relative_path(bin_node_modules_dir_path, &original) - .unwrap_or(original); - - if let Err(err) = symlink(&original_relative, &link) { - if err.kind() == io::ErrorKind::AlreadyExists { - // remove and retry - std::fs::remove_file(&link).with_context(|| { - format!( - "Failed to remove existing bin symlink at {}", - link.display() - ) - })?; - symlink(&original_relative, &link).with_context(|| { - format!( - "Can't set up '{}' bin at {}", - bin_name, - original_relative.display() - ) - })?; - return Ok(()); - } - return Err(err).with_context(|| { - format!( - "Can't set up '{}' bin at {}", - bin_name, - original_relative.display() - ) - }); - } - - Ok(()) -} diff --git a/cli/npm/managed/resolvers/mod.rs b/cli/npm/managed/resolvers/mod.rs index f5d9e4b05..234a6e4db 100644 --- a/cli/npm/managed/resolvers/mod.rs +++ b/cli/npm/managed/resolvers/mod.rs @@ -54,6 +54,7 @@ pub fn create_npm_fs_resolver( tarball_cache, resolution, system_info, + lifecycle_scripts, )), } } -- cgit v1.2.3