summaryrefslogtreecommitdiff
path: root/cli/tools
diff options
context:
space:
mode:
authorDavid Sherret <dsherret@users.noreply.github.com>2024-07-03 20:54:33 -0400
committerGitHub <noreply@github.com>2024-07-04 00:54:33 +0000
commit147411e64b22fe74cb258125acab83f9182c9f81 (patch)
treea1f63dcbf0404c20534986b10f02b649df5a3ad5 /cli/tools
parentdd6d19e12051fac2ea5639f621501f4710a1b8e1 (diff)
feat: npm workspace and better Deno workspace support (#24334)
Adds much better support for the unstable Deno workspaces as well as support for npm workspaces. npm workspaces is still lacking in that we only install packages into the root node_modules folder. We'll make it smarter over time in order for it to figure out when to add node_modules folders within packages. This includes a breaking change in config file resolution where we stop searching for config files on the first found package.json unless it's in a workspace. For the previous behaviour, the root deno.json needs to be updated to be a workspace by adding `"workspace": ["./path-to-pkg-json-folder-goes-here"]`. See details in https://github.com/denoland/deno_config/pull/66 Closes #24340 Closes #24159 Closes #24161 Closes #22020 Closes #18546 Closes #16106 Closes #24160
Diffstat (limited to 'cli/tools')
-rw-r--r--cli/tools/bench/mod.rs93
-rw-r--r--cli/tools/check.rs2
-rw-r--r--cli/tools/compile.rs113
-rw-r--r--cli/tools/doc.rs51
-rw-r--r--cli/tools/fmt.rs471
-rw-r--r--cli/tools/info.rs23
-rw-r--r--cli/tools/lint/mod.rs429
-rw-r--r--cli/tools/registry/mod.rs111
-rw-r--r--cli/tools/registry/pm.rs25
-rw-r--r--cli/tools/registry/publish_order.rs21
-rw-r--r--cli/tools/registry/unfurl.rs70
-rw-r--r--cli/tools/task.rs319
-rw-r--r--cli/tools/test/mod.rs118
-rw-r--r--cli/tools/vendor/build.rs12
-rw-r--r--cli/tools/vendor/import_map.rs16
-rw-r--r--cli/tools/vendor/mod.rs25
-rw-r--r--cli/tools/vendor/test.rs18
17 files changed, 1162 insertions, 755 deletions
diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs
index 0378d6ae2..d801b908c 100644
--- a/cli/tools/bench/mod.rs
+++ b/cli/tools/bench/mod.rs
@@ -407,7 +407,8 @@ pub async fn run_benchmarks(
bench_flags: BenchFlags,
) -> Result<(), AnyError> {
let cli_options = CliOptions::from_flags(flags)?;
- let bench_options = cli_options.resolve_bench_options(bench_flags)?;
+ let workspace_bench_options =
+ cli_options.resolve_workspace_bench_options(&bench_flags);
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options();
// Various bench files should not share the same permissions in terms of
@@ -416,11 +417,21 @@ pub async fn run_benchmarks(
let permissions =
Permissions::from_options(&cli_options.permissions_options()?)?;
- let specifiers = collect_specifiers(
- bench_options.files,
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- is_supported_bench_path,
- )?;
+ let members_with_bench_options =
+ cli_options.resolve_bench_options_for_members(&bench_flags)?;
+ let specifiers = members_with_bench_options
+ .iter()
+ .map(|(_, bench_options)| {
+ collect_specifiers(
+ bench_options.files.clone(),
+ cli_options.vendor_dir_path().map(ToOwned::to_owned),
+ is_supported_bench_path,
+ )
+ })
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
if specifiers.is_empty() {
return Err(generic_error("No bench modules found"));
@@ -429,7 +440,7 @@ pub async fn run_benchmarks(
let main_graph_container = factory.main_module_graph_container().await?;
main_graph_container.check_specifiers(&specifiers).await?;
- if bench_options.no_run {
+ if workspace_bench_options.no_run {
return Ok(());
}
@@ -441,8 +452,8 @@ pub async fn run_benchmarks(
&permissions,
specifiers,
BenchSpecifierOptions {
- filter: TestFilter::from_flag(&bench_options.filter),
- json: bench_options.json,
+ filter: TestFilter::from_flag(&workspace_bench_options.filter),
+ json: workspace_bench_options.json,
log_level,
},
)
@@ -472,24 +483,40 @@ pub async fn run_benchmarks_with_watch(
let factory = CliFactoryBuilder::new()
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
let cli_options = factory.cli_options();
- let bench_options = cli_options.resolve_bench_options(bench_flags)?;
+ let workspace_bench_options =
+ cli_options.resolve_workspace_bench_options(&bench_flags);
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
- if let Some(set) = &bench_options.files.include {
- let watch_paths = set.base_paths();
- if !watch_paths.is_empty() {
- let _ = watcher_communicator.watch_paths(watch_paths);
- }
- }
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let module_graph_creator = factory.module_graph_creator().await?;
-
- let bench_modules = collect_specifiers(
- bench_options.files.clone(),
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- is_supported_bench_path,
- )?;
+ let members_with_bench_options =
+ cli_options.resolve_bench_options_for_members(&bench_flags)?;
+ let watch_paths = members_with_bench_options
+ .iter()
+ .filter_map(|(_, bench_options)| {
+ bench_options
+ .files
+ .include
+ .as_ref()
+ .map(|set| set.base_paths())
+ })
+ .flatten()
+ .collect::<Vec<_>>();
+ let _ = watcher_communicator.watch_paths(watch_paths);
+ let collected_bench_modules = members_with_bench_options
+ .iter()
+ .map(|(_, bench_options)| {
+ collect_specifiers(
+ bench_options.files.clone(),
+ cli_options.vendor_dir_path().map(ToOwned::to_owned),
+ is_supported_bench_path,
+ )
+ })
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
// Various bench files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
@@ -498,7 +525,7 @@ pub async fn run_benchmarks_with_watch(
Permissions::from_options(&cli_options.permissions_options()?)?;
let graph = module_graph_creator
- .create_graph(graph_kind, bench_modules)
+ .create_graph(graph_kind, collected_bench_modules.clone())
.await?;
module_graph_creator.graph_valid(&graph)?;
let bench_modules = &graph.roots;
@@ -524,16 +551,10 @@ pub async fn run_benchmarks_with_watch(
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
- // todo(dsherret): why are we collecting specifiers twice in a row?
- // Seems like a perf bug.
- let specifiers = collect_specifiers(
- bench_options.files,
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- is_supported_bench_path,
- )?
- .into_iter()
- .filter(|specifier| bench_modules_to_reload.contains(specifier))
- .collect::<Vec<ModuleSpecifier>>();
+ let specifiers = collected_bench_modules
+ .into_iter()
+ .filter(|specifier| bench_modules_to_reload.contains(specifier))
+ .collect::<Vec<ModuleSpecifier>>();
factory
.main_module_graph_container()
@@ -541,7 +562,7 @@ pub async fn run_benchmarks_with_watch(
.check_specifiers(&specifiers)
.await?;
- if bench_options.no_run {
+ if workspace_bench_options.no_run {
return Ok(());
}
@@ -551,8 +572,8 @@ pub async fn run_benchmarks_with_watch(
&permissions,
specifiers,
BenchSpecifierOptions {
- filter: TestFilter::from_flag(&bench_options.filter),
- json: bench_options.json,
+ filter: TestFilter::from_flag(&workspace_bench_options.filter),
+ json: workspace_bench_options.json,
log_level,
},
)
diff --git a/cli/tools/check.rs b/cli/tools/check.rs
index 6eb7a071c..4ec677f8f 100644
--- a/cli/tools/check.rs
+++ b/cli/tools/check.rs
@@ -183,7 +183,7 @@ impl TypeChecker {
self.module_graph_builder.build_fast_check_graph(
&mut graph,
BuildFastCheckGraphOptions {
- workspace_fast_check: false,
+ workspace_fast_check: deno_graph::WorkspaceFastCheckOption::Disabled,
},
)?;
}
diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs
index b7aa94691..e395c351b 100644
--- a/cli/tools/compile.rs
+++ b/cli/tools/compile.rs
@@ -5,6 +5,7 @@ use crate::args::Flags;
use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider;
use crate::standalone::is_standalone_binary;
+use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
@@ -12,6 +13,7 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
use deno_terminal::colors;
+use eszip::EszipRelativeFileBaseUrl;
use rand::Rng;
use std::path::Path;
use std::path::PathBuf;
@@ -82,12 +84,24 @@ pub async fn compile(
ts_config_for_emit.ts_config,
)?;
let parser = parsed_source_cache.as_capturing_parser();
+ let root_dir_url = resolve_root_dir_from_specifiers(
+ cli_options.workspace.root_folder().0,
+ graph.specifiers().map(|(s, _)| s).chain(
+ cli_options
+ .node_modules_dir_path()
+ .and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
+ .iter(),
+ ),
+ );
+ log::debug!("Binary root dir: {}", root_dir_url);
+ let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url);
let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
graph,
parser,
transpile_options,
emit_options,
- relative_file_base: None,
+ // make all the modules relative to the root folder
+ relative_file_base: Some(root_dir_url),
})?;
log::info!(
@@ -116,6 +130,7 @@ pub async fn compile(
.write_bin(
&mut file,
eszip,
+ root_dir_url,
&module_specifier,
&compile_flags,
cli_options,
@@ -268,6 +283,68 @@ fn get_os_specific_filepath(
}
}
+fn resolve_root_dir_from_specifiers<'a>(
+ starting_dir: &ModuleSpecifier,
+ specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
+) -> ModuleSpecifier {
+ fn select_common_root<'a>(a: &'a str, b: &'a str) -> &'a str {
+ let min_length = a.len().min(b.len());
+
+ let mut last_slash = 0;
+ for i in 0..min_length {
+ if a.as_bytes()[i] == b.as_bytes()[i] && a.as_bytes()[i] == b'/' {
+ last_slash = i;
+ } else if a.as_bytes()[i] != b.as_bytes()[i] {
+ break;
+ }
+ }
+
+ // Return the common root path up to the last common slash.
+ // This returns a slice of the original string 'a', up to and including the last matching '/'.
+ let common = &a[..=last_slash];
+ if cfg!(windows) && common == "file:///" {
+ a
+ } else {
+ common
+ }
+ }
+
+ fn is_file_system_root(url: &str) -> bool {
+ let Some(path) = url.strip_prefix("file:///") else {
+ return false;
+ };
+ if cfg!(windows) {
+ let Some((_drive, path)) = path.split_once('/') else {
+ return true;
+ };
+ path.is_empty()
+ } else {
+ path.is_empty()
+ }
+ }
+
+ let mut found_dir = starting_dir.as_str();
+ if !is_file_system_root(found_dir) {
+ for specifier in specifiers {
+ if specifier.scheme() == "file" {
+ found_dir = select_common_root(found_dir, specifier.as_str());
+ }
+ }
+ }
+ let found_dir = if is_file_system_root(found_dir) {
+ found_dir
+ } else {
+ // include the parent dir name because it helps create some context
+ found_dir
+ .strip_suffix('/')
+ .unwrap_or(found_dir)
+ .rfind('/')
+ .map(|i| &found_dir[..i + 1])
+ .unwrap_or(found_dir)
+ };
+ ModuleSpecifier::parse(found_dir).unwrap()
+}
+
#[cfg(test)]
mod test {
pub use super::*;
@@ -342,4 +419,38 @@ mod test {
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
}
+
+ #[test]
+ fn test_resolve_root_dir_from_specifiers() {
+ fn resolve(start: &str, specifiers: &[&str]) -> String {
+ let specifiers = specifiers
+ .iter()
+ .map(|s| ModuleSpecifier::parse(s).unwrap())
+ .collect::<Vec<_>>();
+ resolve_root_dir_from_specifiers(
+ &ModuleSpecifier::parse(start).unwrap(),
+ specifiers.iter(),
+ )
+ .to_string()
+ }
+
+ assert_eq!(resolve("file:///a/b/c", &["file:///a/b/c/d"]), "file:///a/");
+ assert_eq!(
+ resolve("file:///a/b/c/", &["file:///a/b/c/d"]),
+ "file:///a/b/"
+ );
+ assert_eq!(
+ resolve("file:///a/b/c/", &["file:///a/b/c/d", "file:///a/b/c/e"]),
+ "file:///a/b/"
+ );
+ assert_eq!(resolve("file:///", &["file:///a/b/c/d"]), "file:///");
+ if cfg!(windows) {
+ assert_eq!(resolve("file:///c:/", &["file:///c:/test"]), "file:///c:/");
+ // this will ignore the other one because it's on a separate drive
+ assert_eq!(
+ resolve("file:///c:/a/b/c/", &["file:///v:/a/b/c/d"]),
+ "file:///c:/a/b/"
+ );
+ }
+ }
}
diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs
index f123fc55a..79765a91d 100644
--- a/cli/tools/doc.rs
+++ b/cli/tools/doc.rs
@@ -187,31 +187,32 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
Default::default()
};
- let rewrite_map =
- if let Some(config_file) = cli_options.maybe_config_file().clone() {
- let config = config_file.to_exports_config()?;
-
- let rewrite_map = config
- .clone()
- .into_map()
- .into_keys()
- .map(|key| {
- Ok((
- config.get_resolved(&key)?.unwrap(),
- key
- .strip_prefix('.')
- .unwrap_or(&key)
- .strip_prefix('/')
- .unwrap_or(&key)
- .to_owned(),
- ))
- })
- .collect::<Result<IndexMap<_, _>, AnyError>>()?;
-
- Some(rewrite_map)
- } else {
- None
- };
+ let rewrite_map = if let Some(config_file) =
+ cli_options.workspace.resolve_start_ctx().maybe_deno_json()
+ {
+ let config = config_file.to_exports_config()?;
+
+ let rewrite_map = config
+ .clone()
+ .into_map()
+ .into_keys()
+ .map(|key| {
+ Ok((
+ config.get_resolved(&key)?.unwrap(),
+ key
+ .strip_prefix('.')
+ .unwrap_or(&key)
+ .strip_prefix('/')
+ .unwrap_or(&key)
+ .to_owned(),
+ ))
+ })
+ .collect::<Result<IndexMap<_, _>, AnyError>>()?;
+
+ Some(rewrite_map)
+ } else {
+ None
+ };
generate_docs_directory(
doc_nodes_by_url,
diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs
index b37a8e06b..c16be9fb2 100644
--- a/cli/tools/fmt.rs
+++ b/cli/tools/fmt.rs
@@ -13,6 +13,7 @@ use crate::args::FmtFlags;
use crate::args::FmtOptions;
use crate::args::FmtOptionsConfig;
use crate::args::ProseWrap;
+use crate::cache::Caches;
use crate::colors;
use crate::factory::CliFactory;
use crate::util::diff::diff;
@@ -20,6 +21,7 @@ use crate::util::file_watcher;
use crate::util::fs::canonicalize_path;
use crate::util::fs::FileCollector;
use crate::util::path::get_extension;
+use async_trait::async_trait;
use deno_ast::ParsedSource;
use deno_config::glob::FilePatterns;
use deno_core::anyhow::anyhow;
@@ -50,8 +52,11 @@ use crate::cache::IncrementalCache;
pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
if fmt_flags.is_stdin() {
let cli_options = CliOptions::from_flags(flags)?;
- let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
+ let start_ctx = cli_options.workspace.resolve_start_ctx();
+ let fmt_options =
+ cli_options.resolve_fmt_options(&fmt_flags, &start_ctx)?;
return format_stdin(
+ &fmt_flags,
fmt_options,
cli_options
.ext_flag()
@@ -70,42 +75,42 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
Ok(async move {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
- let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
- let files = collect_fmt_files(cli_options, fmt_options.files.clone())
- .and_then(|files| {
- if files.is_empty() {
- Err(generic_error("No target files found."))
+ let caches = factory.caches()?;
+ let mut paths_with_options_batches =
+ resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
+
+ for paths_with_options in &mut paths_with_options_batches {
+ let _ = watcher_communicator
+ .watch_paths(paths_with_options.paths.clone());
+ let files = std::mem::take(&mut paths_with_options.paths);
+ paths_with_options.paths = if let Some(paths) = &changed_paths {
+ if fmt_flags.check {
+ // check all files on any changed (https://github.com/denoland/deno/issues/12446)
+ files
+ .iter()
+ .any(|path| {
+ canonicalize_path(path)
+ .map(|path| paths.contains(&path))
+ .unwrap_or(false)
+ })
+ .then_some(files)
+ .unwrap_or_else(|| [].to_vec())
} else {
- Ok(files)
+ files
+ .into_iter()
+ .filter(|path| {
+ canonicalize_path(path)
+ .map(|path| paths.contains(&path))
+ .unwrap_or(false)
+ })
+ .collect::<Vec<_>>()
}
- })?;
- let _ = watcher_communicator.watch_paths(files.clone());
- let refmt_files = if let Some(paths) = changed_paths {
- if fmt_options.check {
- // check all files on any changed (https://github.com/denoland/deno/issues/12446)
- files
- .iter()
- .any(|path| {
- canonicalize_path(path)
- .map(|path| paths.contains(&path))
- .unwrap_or(false)
- })
- .then_some(files)
- .unwrap_or_else(|| [].to_vec())
} else {
files
- .into_iter()
- .filter(|path| {
- canonicalize_path(path)
- .map(|path| paths.contains(&path))
- .unwrap_or(false)
- })
- .collect::<Vec<_>>()
- }
- } else {
- files
- };
- format_files(factory, fmt_options, refmt_files).await?;
+ };
+ }
+
+ format_files(caches, &fmt_flags, paths_with_options_batches).await?;
Ok(())
})
@@ -114,43 +119,77 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
.await?;
} else {
let factory = CliFactory::from_flags(flags)?;
+ let caches = factory.caches()?;
let cli_options = factory.cli_options();
- let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
- let files = collect_fmt_files(cli_options, fmt_options.files.clone())
- .and_then(|files| {
- if files.is_empty() {
- Err(generic_error("No target files found."))
- } else {
- Ok(files)
- }
- })?;
- format_files(factory, fmt_options, files).await?;
+ let paths_with_options_batches =
+ resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
+ format_files(caches, &fmt_flags, paths_with_options_batches).await?;
}
Ok(())
}
-async fn format_files(
- factory: CliFactory,
- fmt_options: FmtOptions,
+struct PathsWithOptions {
+ base: PathBuf,
paths: Vec<PathBuf>,
+ options: FmtOptions,
+}
+
+fn resolve_paths_with_options_batches(
+ cli_options: &CliOptions,
+ fmt_flags: &FmtFlags,
+) -> Result<Vec<PathsWithOptions>, AnyError> {
+ let members_fmt_options =
+ cli_options.resolve_fmt_options_for_members(fmt_flags)?;
+ let mut paths_with_options_batches =
+ Vec::with_capacity(members_fmt_options.len());
+ for member_fmt_options in members_fmt_options {
+ let files =
+ collect_fmt_files(cli_options, member_fmt_options.files.clone())?;
+ if !files.is_empty() {
+ paths_with_options_batches.push(PathsWithOptions {
+ base: member_fmt_options.files.base.clone(),
+ paths: files,
+ options: member_fmt_options,
+ });
+ }
+ }
+ if paths_with_options_batches.is_empty() {
+ return Err(generic_error("No target files found."));
+ }
+ Ok(paths_with_options_batches)
+}
+
+async fn format_files(
+ caches: &Arc<Caches>,
+ fmt_flags: &FmtFlags,
+ paths_with_options_batches: Vec<PathsWithOptions>,
) -> Result<(), AnyError> {
- let caches = factory.caches()?;
- let check = fmt_options.check;
- let incremental_cache = Arc::new(IncrementalCache::new(
- caches.fmt_incremental_cache_db(),
- &fmt_options.options,
- &paths,
- ));
- if check {
- check_source_files(paths, fmt_options.options, incremental_cache.clone())
- .await?;
+ let formatter: Box<dyn Formatter> = if fmt_flags.check {
+ Box::new(CheckFormatter::default())
} else {
- format_source_files(paths, fmt_options.options, incremental_cache.clone())
+ Box::new(RealFormatter::default())
+ };
+ for paths_with_options in paths_with_options_batches {
+ log::debug!(
+ "Formatting {} file(s) in {}",
+ paths_with_options.paths.len(),
+ paths_with_options.base.display()
+ );
+ let fmt_options = paths_with_options.options;
+ let paths = paths_with_options.paths;
+ let incremental_cache = Arc::new(IncrementalCache::new(
+ caches.fmt_incremental_cache_db(),
+ &fmt_options.options,
+ &paths,
+ ));
+ formatter
+ .handle_files(paths, fmt_options.options, incremental_cache.clone())
.await?;
+ incremental_cache.wait_completion().await;
}
- incremental_cache.wait_completion().await;
- Ok(())
+
+ formatter.finish()
}
fn collect_fmt_files(
@@ -274,156 +313,190 @@ pub fn format_parsed_source(
)
}
-async fn check_source_files(
- paths: Vec<PathBuf>,
- fmt_options: FmtOptionsConfig,
- incremental_cache: Arc<IncrementalCache>,
-) -> Result<(), AnyError> {
- let not_formatted_files_count = Arc::new(AtomicUsize::new(0));
- let checked_files_count = Arc::new(AtomicUsize::new(0));
-
- // prevent threads outputting at the same time
- let output_lock = Arc::new(Mutex::new(0));
-
- run_parallelized(paths, {
- let not_formatted_files_count = not_formatted_files_count.clone();
- let checked_files_count = checked_files_count.clone();
- move |file_path| {
- checked_files_count.fetch_add(1, Ordering::Relaxed);
- let file_text = read_file_contents(&file_path)?.text;
-
- // skip checking the file if we know it's formatted
- if incremental_cache.is_file_same(&file_path, &file_text) {
- return Ok(());
- }
+#[async_trait]
+trait Formatter {
+ async fn handle_files(
+ &self,
+ paths: Vec<PathBuf>,
+ fmt_options: FmtOptionsConfig,
+ incremental_cache: Arc<IncrementalCache>,
+ ) -> Result<(), AnyError>;
- match format_file(&file_path, &file_text, &fmt_options) {
- Ok(Some(formatted_text)) => {
- not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
- let _g = output_lock.lock();
- let diff = diff(&file_text, &formatted_text);
- info!("");
- info!("{} {}:", colors::bold("from"), file_path.display());
- info!("{}", diff);
- }
- Ok(None) => {
- // When checking formatting, only update the incremental cache when
- // the file is the same since we don't bother checking for stable
- // formatting here. Additionally, ensure this is done during check
- // so that CIs that cache the DENO_DIR will get the benefit of
- // incremental formatting
- incremental_cache.update_file(&file_path, &file_text);
+ fn finish(&self) -> Result<(), AnyError>;
+}
+
+#[derive(Default)]
+struct CheckFormatter {
+ not_formatted_files_count: Arc<AtomicUsize>,
+ checked_files_count: Arc<AtomicUsize>,
+}
+
+#[async_trait]
+impl Formatter for CheckFormatter {
+ async fn handle_files(
+ &self,
+ paths: Vec<PathBuf>,
+ fmt_options: FmtOptionsConfig,
+ incremental_cache: Arc<IncrementalCache>,
+ ) -> Result<(), AnyError> {
+ // prevent threads outputting at the same time
+ let output_lock = Arc::new(Mutex::new(0));
+
+ run_parallelized(paths, {
+ let not_formatted_files_count = self.not_formatted_files_count.clone();
+ let checked_files_count = self.checked_files_count.clone();
+ move |file_path| {
+ checked_files_count.fetch_add(1, Ordering::Relaxed);
+ let file_text = read_file_contents(&file_path)?.text;
+
+ // skip checking the file if we know it's formatted
+ if incremental_cache.is_file_same(&file_path, &file_text) {
+ return Ok(());
}
- Err(e) => {
- not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
- let _g = output_lock.lock();
- warn!("Error checking: {}", file_path.to_string_lossy());
- warn!(
- "{}",
- format!("{e}")
- .split('\n')
- .map(|l| {
- if l.trim().is_empty() {
- String::new()
- } else {
- format!(" {l}")
- }
- })
- .collect::<Vec<_>>()
- .join("\n")
- );
+
+ match format_file(&file_path, &file_text, &fmt_options) {
+ Ok(Some(formatted_text)) => {
+ not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
+ let _g = output_lock.lock();
+ let diff = diff(&file_text, &formatted_text);
+ info!("");
+ info!("{} {}:", colors::bold("from"), file_path.display());
+ info!("{}", diff);
+ }
+ Ok(None) => {
+ // When checking formatting, only update the incremental cache when
+ // the file is the same since we don't bother checking for stable
+ // formatting here. Additionally, ensure this is done during check
+ // so that CIs that cache the DENO_DIR will get the benefit of
+ // incremental formatting
+ incremental_cache.update_file(&file_path, &file_text);
+ }
+ Err(e) => {
+ not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
+ let _g = output_lock.lock();
+ warn!("Error checking: {}", file_path.to_string_lossy());
+ warn!(
+ "{}",
+ format!("{e}")
+ .split('\n')
+ .map(|l| {
+ if l.trim().is_empty() {
+ String::new()
+ } else {
+ format!(" {l}")
+ }
+ })
+ .collect::<Vec<_>>()
+ .join("\n")
+ );
+ }
}
+ Ok(())
}
+ })
+ .await?;
+
+ Ok(())
+ }
+
+ fn finish(&self) -> Result<(), AnyError> {
+ let not_formatted_files_count =
+ self.not_formatted_files_count.load(Ordering::Relaxed);
+ let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
+ let checked_files_str =
+ format!("{} {}", checked_files_count, files_str(checked_files_count));
+ if not_formatted_files_count == 0 {
+ info!("Checked {}", checked_files_str);
Ok(())
+ } else {
+ let not_formatted_files_str = files_str(not_formatted_files_count);
+ Err(generic_error(format!(
+ "Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
+ )))
}
- })
- .await?;
-
- let not_formatted_files_count =
- not_formatted_files_count.load(Ordering::Relaxed);
- let checked_files_count = checked_files_count.load(Ordering::Relaxed);
- let checked_files_str =
- format!("{} {}", checked_files_count, files_str(checked_files_count));
- if not_formatted_files_count == 0 {
- info!("Checked {}", checked_files_str);
- Ok(())
- } else {
- let not_formatted_files_str = files_str(not_formatted_files_count);
- Err(generic_error(format!(
- "Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
- )))
}
}
-async fn format_source_files(
- paths: Vec<PathBuf>,
- fmt_options: FmtOptionsConfig,
- incremental_cache: Arc<IncrementalCache>,
-) -> Result<(), AnyError> {
- let formatted_files_count = Arc::new(AtomicUsize::new(0));
- let checked_files_count = Arc::new(AtomicUsize::new(0));
- let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
-
- run_parallelized(paths, {
- let formatted_files_count = formatted_files_count.clone();
- let checked_files_count = checked_files_count.clone();
- move |file_path| {
- checked_files_count.fetch_add(1, Ordering::Relaxed);
- let file_contents = read_file_contents(&file_path)?;
-
- // skip formatting the file if we know it's formatted
- if incremental_cache.is_file_same(&file_path, &file_contents.text) {
- return Ok(());
- }
+#[derive(Default)]
+struct RealFormatter {
+ formatted_files_count: Arc<AtomicUsize>,
+ checked_files_count: Arc<AtomicUsize>,
+}
- match format_ensure_stable(
- &file_path,
- &file_contents.text,
- &fmt_options,
- format_file,
- ) {
- Ok(Some(formatted_text)) => {
- incremental_cache.update_file(&file_path, &formatted_text);
- write_file_contents(
- &file_path,
- FileContents {
- had_bom: file_contents.had_bom,
- text: formatted_text,
- },
- )?;
- formatted_files_count.fetch_add(1, Ordering::Relaxed);
- let _g = output_lock.lock();
- info!("{}", file_path.to_string_lossy());
- }
- Ok(None) => {
- incremental_cache.update_file(&file_path, &file_contents.text);
+#[async_trait]
+impl Formatter for RealFormatter {
+ async fn handle_files(
+ &self,
+ paths: Vec<PathBuf>,
+ fmt_options: FmtOptionsConfig,
+ incremental_cache: Arc<IncrementalCache>,
+ ) -> Result<(), AnyError> {
+ let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
+
+ run_parallelized(paths, {
+ let formatted_files_count = self.formatted_files_count.clone();
+ let checked_files_count = self.checked_files_count.clone();
+ move |file_path| {
+ checked_files_count.fetch_add(1, Ordering::Relaxed);
+ let file_contents = read_file_contents(&file_path)?;
+
+ // skip formatting the file if we know it's formatted
+ if incremental_cache.is_file_same(&file_path, &file_contents.text) {
+ return Ok(());
}
- Err(e) => {
- let _g = output_lock.lock();
- log::error!("Error formatting: {}", file_path.to_string_lossy());
- log::error!(" {e}");
+
+ match format_ensure_stable(
+ &file_path,
+ &file_contents.text,
+ &fmt_options,
+ format_file,
+ ) {
+ Ok(Some(formatted_text)) => {
+ incremental_cache.update_file(&file_path, &formatted_text);
+ write_file_contents(
+ &file_path,
+ FileContents {
+ had_bom: file_contents.had_bom,
+ text: formatted_text,
+ },
+ )?;
+ formatted_files_count.fetch_add(1, Ordering::Relaxed);
+ let _g = output_lock.lock();
+ info!("{}", file_path.to_string_lossy());
+ }
+ Ok(None) => {
+ incremental_cache.update_file(&file_path, &file_contents.text);
+ }
+ Err(e) => {
+ let _g = output_lock.lock();
+ log::error!("Error formatting: {}", file_path.to_string_lossy());
+ log::error!(" {e}");
+ }
}
+ Ok(())
}
- Ok(())
- }
- })
- .await?;
-
- let formatted_files_count = formatted_files_count.load(Ordering::Relaxed);
- debug!(
- "Formatted {} {}",
- formatted_files_count,
- files_str(formatted_files_count),
- );
-
- let checked_files_count = checked_files_count.load(Ordering::Relaxed);
- info!(
- "Checked {} {}",
- checked_files_count,
- files_str(checked_files_count)
- );
+ })
+ .await?;
+ Ok(())
+ }
- Ok(())
+ fn finish(&self) -> Result<(), AnyError> {
+ let formatted_files_count =
+ self.formatted_files_count.load(Ordering::Relaxed);
+ debug!(
+ "Formatted {} {}",
+ formatted_files_count,
+ files_str(formatted_files_count),
+ );
+
+ let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
+ info!(
+ "Checked {} {}",
+ checked_files_count,
+ files_str(checked_files_count)
+ );
+ Ok(())
+ }
}
/// When storing any formatted text in the incremental cache, we want
@@ -491,14 +564,18 @@ fn format_ensure_stable(
/// Format stdin and write result to stdout.
/// Treats input as set by `--ext` flag.
/// Compatible with `--check` flag.
-fn format_stdin(fmt_options: FmtOptions, ext: &str) -> Result<(), AnyError> {
+fn format_stdin(
+ fmt_flags: &FmtFlags,
+ fmt_options: FmtOptions,
+ ext: &str,
+) -> Result<(), AnyError> {
let mut source = String::new();
if stdin().read_to_string(&mut source).is_err() {
bail!("Failed to read from stdin");
}
let file_path = PathBuf::from(format!("_stdin.{ext}"));
let formatted_text = format_file(&file_path, &source, &fmt_options.options)?;
- if fmt_options.check {
+ if fmt_flags.check {
#[allow(clippy::print_stdout)]
if formatted_text.is_some() {
println!("Not formatted stdin");
diff --git a/cli/tools/info.rs b/cli/tools/info.rs
index 76951b13d..18a4bed57 100644
--- a/cli/tools/info.rs
+++ b/cli/tools/info.rs
@@ -42,19 +42,20 @@ pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> {
let module_graph_creator = factory.module_graph_creator().await?;
let npm_resolver = factory.npm_resolver().await?;
let maybe_lockfile = factory.maybe_lockfile();
- let maybe_imports_map = factory.maybe_import_map().await?;
-
- let maybe_import_specifier = if let Some(imports_map) = maybe_imports_map {
- if let Ok(imports_specifier) =
- imports_map.resolve(&specifier, imports_map.base_url())
- {
- Some(imports_specifier)
+ let resolver = factory.workspace_resolver().await?;
+
+ let maybe_import_specifier =
+ if let Some(import_map) = resolver.maybe_import_map() {
+ if let Ok(imports_specifier) =
+ import_map.resolve(&specifier, import_map.base_url())
+ {
+ Some(imports_specifier)
+ } else {
+ None
+ }
} else {
None
- }
- } else {
- None
- };
+ };
let specifier = match maybe_import_specifier {
Some(specifier) => specifier,
diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs
index 0d9868cf2..e3f2844a7 100644
--- a/cli/tools/lint/mod.rs
+++ b/cli/tools/lint/mod.rs
@@ -9,13 +9,21 @@ use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
use deno_config::glob::FilePatterns;
+use deno_config::workspace::Workspace;
+use deno_config::workspace::WorkspaceMemberContext;
+use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
+use deno_core::futures::future::LocalBoxFuture;
+use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
+use deno_core::unsync::future::LocalFutureExt;
+use deno_core::unsync::future::SharedLocal;
use deno_graph::FastCheckDiagnostic;
+use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::LintConfig;
use deno_lint::linter::LintFileOptions;
@@ -33,6 +41,7 @@ use std::io::stdin;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
+use std::rc::Rc;
use std::sync::Arc;
use crate::args::CliOptions;
@@ -41,9 +50,12 @@ use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::args::LintReporterKind;
use crate::args::LintRulesConfig;
+use crate::args::WorkspaceLintOptions;
+use crate::cache::Caches;
use crate::cache::IncrementalCache;
use crate::colors;
use crate::factory::CliFactory;
+use crate::graph_util::ModuleGraphCreator;
use crate::tools::fmt::run_parallelized;
use crate::util::file_watcher;
use crate::util::fs::canonicalize_path;
@@ -79,35 +91,49 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
Ok(async move {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
- let lint_options = cli_options.resolve_lint_options(lint_flags)?;
let lint_config = cli_options.resolve_lint_config()?;
- let files =
- collect_lint_files(cli_options, lint_options.files.clone())
- .and_then(|files| {
- if files.is_empty() {
- Err(generic_error("No target files found."))
- } else {
- Ok(files)
- }
- })?;
- _ = watcher_communicator.watch_paths(files.clone());
-
- let lint_paths = if let Some(paths) = changed_paths {
- // lint all files on any changed (https://github.com/denoland/deno/issues/12446)
- files
- .iter()
- .any(|path| {
- canonicalize_path(path)
- .map(|p| paths.contains(&p))
- .unwrap_or(false)
- })
- .then_some(files)
- .unwrap_or_else(|| [].to_vec())
- } else {
- files
- };
-
- lint_files(factory, lint_options, lint_config, lint_paths).await?;
+ let mut paths_with_options_batches =
+ resolve_paths_with_options_batches(cli_options, &lint_flags)?;
+ for paths_with_options in &mut paths_with_options_batches {
+ _ = watcher_communicator
+ .watch_paths(paths_with_options.paths.clone());
+
+ let files = std::mem::take(&mut paths_with_options.paths);
+ paths_with_options.paths = if let Some(paths) = &changed_paths {
+ // lint all files on any changed (https://github.com/denoland/deno/issues/12446)
+ files
+ .iter()
+ .any(|path| {
+ canonicalize_path(path)
+ .map(|p| paths.contains(&p))
+ .unwrap_or(false)
+ })
+ .then_some(files)
+ .unwrap_or_else(|| [].to_vec())
+ } else {
+ files
+ };
+ }
+
+ let mut linter = WorkspaceLinter::new(
+ factory.caches()?.clone(),
+ factory.module_graph_creator().await?.clone(),
+ cli_options.workspace.clone(),
+ &cli_options.resolve_workspace_lint_options(&lint_flags)?,
+ );
+ for paths_with_options in paths_with_options_batches {
+ linter
+ .lint_files(
+ paths_with_options.options,
+ lint_config.clone(),
+ paths_with_options.ctx,
+ paths_with_options.paths,
+ )
+ .await?;
+ }
+
+ linter.finish();
+
Ok(())
})
},
@@ -117,15 +143,19 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
let is_stdin = lint_flags.is_stdin();
- let lint_options = cli_options.resolve_lint_options(lint_flags)?;
let lint_config = cli_options.resolve_lint_config()?;
- let files = &lint_options.files;
+ let workspace_lint_options =
+ cli_options.resolve_workspace_lint_options(&lint_flags)?;
let success = if is_stdin {
- let reporter_kind = lint_options.reporter_kind;
- let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind)));
+ let start_ctx = cli_options.workspace.resolve_start_ctx();
+ let reporter_lock = Arc::new(Mutex::new(create_reporter(
+ workspace_lint_options.reporter_kind,
+ )));
+ let lint_options =
+ cli_options.resolve_lint_options(lint_flags, &start_ctx)?;
let lint_rules = get_config_rules_err_empty(
lint_options.rules,
- cli_options.maybe_config_file().as_ref(),
+ start_ctx.maybe_deno_json().map(|c| c.as_ref()),
)?;
let file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
let r = lint_stdin(&file_path, lint_rules.rules, lint_config);
@@ -137,16 +167,25 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
reporter_lock.lock().close(1);
success
} else {
- let target_files = collect_lint_files(cli_options, files.clone())
- .and_then(|files| {
- if files.is_empty() {
- Err(generic_error("No target files found."))
- } else {
- Ok(files)
- }
- })?;
- debug!("Found {} files", target_files.len());
- lint_files(factory, lint_options, lint_config, target_files).await?
+ let mut linter = WorkspaceLinter::new(
+ factory.caches()?.clone(),
+ factory.module_graph_creator().await?.clone(),
+ cli_options.workspace.clone(),
+ &workspace_lint_options,
+ );
+ let paths_with_options_batches =
+ resolve_paths_with_options_batches(cli_options, &lint_flags)?;
+ for paths_with_options in paths_with_options_batches {
+ linter
+ .lint_files(
+ paths_with_options.options,
+ lint_config.clone(),
+ paths_with_options.ctx,
+ paths_with_options.paths,
+ )
+ .await?;
+ }
+ linter.finish()
};
if !success {
std::process::exit(1);
@@ -156,121 +195,202 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
Ok(())
}
-async fn lint_files(
- factory: CliFactory,
- lint_options: LintOptions,
- lint_config: LintConfig,
+struct PathsWithOptions {
+ ctx: WorkspaceMemberContext,
paths: Vec<PathBuf>,
-) -> Result<bool, AnyError> {
- let caches = factory.caches()?;
- let maybe_config_file = factory.cli_options().maybe_config_file().as_ref();
- let lint_rules =
- get_config_rules_err_empty(lint_options.rules, maybe_config_file)?;
- let incremental_cache = Arc::new(IncrementalCache::new(
- caches.lint_incremental_cache_db(),
- &lint_rules.incremental_cache_state(),
- &paths,
- ));
- let target_files_len = paths.len();
- let reporter_kind = lint_options.reporter_kind;
- // todo(dsherret): abstract away this lock behind a performant interface
- let reporter_lock =
- Arc::new(Mutex::new(create_reporter(reporter_kind.clone())));
- let has_error = Arc::new(AtomicFlag::default());
-
- let mut futures = Vec::with_capacity(2);
- if lint_rules.no_slow_types {
- if let Some(config_file) = maybe_config_file {
- let members = config_file.to_workspace_members()?;
- let has_error = has_error.clone();
- let reporter_lock = reporter_lock.clone();
- let module_graph_creator = factory.module_graph_creator().await?.clone();
- let path_urls = paths
- .iter()
- .filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
- .collect::<HashSet<_>>();
- futures.push(deno_core::unsync::spawn(async move {
- let graph = module_graph_creator
- .create_and_validate_publish_graph(&members, true)
- .await?;
- // todo(dsherret): this isn't exactly correct as linting isn't properly
- // setup to handle workspaces. Iterating over the workspace members
- // should be done at a higher level because it also needs to take into
- // account the config per workspace member.
- for member in &members {
- let export_urls = member.config_file.resolve_export_value_urls()?;
- if !export_urls.iter().any(|url| path_urls.contains(url)) {
- continue; // entrypoint is not specified, so skip
+ options: LintOptions,
+}
+
+fn resolve_paths_with_options_batches(
+ cli_options: &CliOptions,
+ lint_flags: &LintFlags,
+) -> Result<Vec<PathsWithOptions>, AnyError> {
+ let members_lint_options =
+ cli_options.resolve_lint_options_for_members(lint_flags)?;
+ let mut paths_with_options_batches =
+ Vec::with_capacity(members_lint_options.len());
+ for (ctx, lint_options) in members_lint_options {
+ let files = collect_lint_files(cli_options, lint_options.files.clone())?;
+ if !files.is_empty() {
+ paths_with_options_batches.push(PathsWithOptions {
+ ctx,
+ paths: files,
+ options: lint_options,
+ });
+ }
+ }
+ if paths_with_options_batches.is_empty() {
+ return Err(generic_error("No target files found."));
+ }
+ Ok(paths_with_options_batches)
+}
+
+type WorkspaceModuleGraphFuture =
+ SharedLocal<LocalBoxFuture<'static, Result<Rc<ModuleGraph>, Rc<AnyError>>>>;
+
+struct WorkspaceLinter {
+ caches: Arc<Caches>,
+ module_graph_creator: Arc<ModuleGraphCreator>,
+ workspace: Arc<Workspace>,
+ reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
+ workspace_module_graph: Option<WorkspaceModuleGraphFuture>,
+ has_error: Arc<AtomicFlag>,
+ file_count: usize,
+}
+
+impl WorkspaceLinter {
+ pub fn new(
+ caches: Arc<Caches>,
+ module_graph_creator: Arc<ModuleGraphCreator>,
+ workspace: Arc<Workspace>,
+ workspace_options: &WorkspaceLintOptions,
+ ) -> Self {
+ let reporter_lock =
+ Arc::new(Mutex::new(create_reporter(workspace_options.reporter_kind)));
+ Self {
+ caches,
+ module_graph_creator,
+ workspace,
+ reporter_lock,
+ workspace_module_graph: None,
+ has_error: Default::default(),
+ file_count: 0,
+ }
+ }
+
+ pub async fn lint_files(
+ &mut self,
+ lint_options: LintOptions,
+ lint_config: LintConfig,
+ member_ctx: WorkspaceMemberContext,
+ paths: Vec<PathBuf>,
+ ) -> Result<(), AnyError> {
+ self.file_count += paths.len();
+
+ let lint_rules = get_config_rules_err_empty(
+ lint_options.rules,
+ member_ctx.maybe_deno_json().map(|c| c.as_ref()),
+ )?;
+ let incremental_cache = Arc::new(IncrementalCache::new(
+ self.caches.lint_incremental_cache_db(),
+ &lint_rules.incremental_cache_state(),
+ &paths,
+ ));
+
+ let mut futures = Vec::with_capacity(2);
+ if lint_rules.no_slow_types {
+ if self.workspace_module_graph.is_none() {
+ let module_graph_creator = self.module_graph_creator.clone();
+ let packages = self.workspace.jsr_packages_for_publish();
+ self.workspace_module_graph = Some(
+ async move {
+ module_graph_creator
+ .create_and_validate_publish_graph(&packages, true)
+ .await
+ .map(Rc::new)
+ .map_err(Rc::new)
}
- let diagnostics = no_slow_types::collect_no_slow_type_diagnostics(
- &export_urls,
- &graph,
- );
- if !diagnostics.is_empty() {
- has_error.raise();
- let mut reporter = reporter_lock.lock();
- for diagnostic in &diagnostics {
- reporter
- .visit_diagnostic(LintOrCliDiagnostic::FastCheck(diagnostic));
+ .boxed_local()
+ .shared_local(),
+ );
+ }
+ let workspace_module_graph_future =
+ self.workspace_module_graph.as_ref().unwrap().clone();
+ let publish_config = member_ctx.maybe_package_config();
+ if let Some(publish_config) = publish_config {
+ let has_error = self.has_error.clone();
+ let reporter_lock = self.reporter_lock.clone();
+ let path_urls = paths
+ .iter()
+ .filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
+ .collect::<HashSet<_>>();
+ futures.push(
+ async move {
+ let graph = workspace_module_graph_future
+ .await
+ .map_err(|err| anyhow!("{:#}", err))?;
+ let export_urls =
+ publish_config.config_file.resolve_export_value_urls()?;
+ if !export_urls.iter().any(|url| path_urls.contains(url)) {
+ return Ok(()); // entrypoint is not specified, so skip
}
+ let diagnostics = no_slow_types::collect_no_slow_type_diagnostics(
+ &export_urls,
+ &graph,
+ );
+ if !diagnostics.is_empty() {
+ has_error.raise();
+ let mut reporter = reporter_lock.lock();
+ for diagnostic in &diagnostics {
+ reporter
+ .visit_diagnostic(LintOrCliDiagnostic::FastCheck(diagnostic));
+ }
+ }
+ Ok(())
}
- }
- Ok(())
- }));
- }
- }
-
- futures.push({
- let has_error = has_error.clone();
- let linter = create_linter(lint_rules.rules);
- let reporter_lock = reporter_lock.clone();
- let incremental_cache = incremental_cache.clone();
- let lint_config = lint_config.clone();
- let fix = lint_options.fix;
- deno_core::unsync::spawn(async move {
- run_parallelized(paths, {
- move |file_path| {
- let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?);
-
- // don't bother rechecking this file if it didn't have any diagnostics before
- if incremental_cache.is_file_same(&file_path, &file_text) {
- return Ok(());
- }
+ .boxed_local(),
+ );
+ }
+ }
- let r = lint_file(&linter, &file_path, file_text, lint_config, fix);
- if let Ok((file_source, file_diagnostics)) = &r {
- if file_diagnostics.is_empty() {
- // update the incremental cache if there were no diagnostics
- incremental_cache.update_file(
- &file_path,
- // ensure the returned text is used here as it may have been modified via --fix
- file_source.text(),
- )
+ futures.push({
+ let has_error = self.has_error.clone();
+ let linter = create_linter(lint_rules.rules);
+ let reporter_lock = self.reporter_lock.clone();
+ let incremental_cache = incremental_cache.clone();
+ let lint_config = lint_config.clone();
+ let fix = lint_options.fix;
+ async move {
+ run_parallelized(paths, {
+ move |file_path| {
+ let file_text =
+ deno_ast::strip_bom(fs::read_to_string(&file_path)?);
+
+ // don't bother rechecking this file if it didn't have any diagnostics before
+ if incremental_cache.is_file_same(&file_path, &file_text) {
+ return Ok(());
}
- }
- let success = handle_lint_result(
- &file_path.to_string_lossy(),
- r,
- reporter_lock.clone(),
- );
- if !success {
- has_error.raise();
- }
+ let r = lint_file(&linter, &file_path, file_text, lint_config, fix);
+ if let Ok((file_source, file_diagnostics)) = &r {
+ if file_diagnostics.is_empty() {
+ // update the incremental cache if there were no diagnostics
+ incremental_cache.update_file(
+ &file_path,
+ // ensure the returned text is used here as it may have been modified via --fix
+ file_source.text(),
+ )
+ }
+ }
- Ok(())
- }
- })
- .await
- })
- });
+ let success = handle_lint_result(
+ &file_path.to_string_lossy(),
+ r,
+ reporter_lock.clone(),
+ );
+ if !success {
+ has_error.raise();
+ }
- deno_core::futures::future::try_join_all(futures).await?;
+ Ok(())
+ }
+ })
+ .await
+ }
+ .boxed_local()
+ });
- incremental_cache.wait_completion().await;
- reporter_lock.lock().close(target_files_len);
+ deno_core::futures::future::try_join_all(futures).await?;
- Ok(!has_error.is_raised())
+ incremental_cache.wait_completion().await;
+ Ok(())
+ }
+
+ pub fn finish(self) -> bool {
+ debug!("Found {} files", self.file_count);
+ self.reporter_lock.lock().close(self.file_count);
+ !self.has_error.is_raised() // success
+ }
}
fn collect_lint_files(
@@ -692,9 +812,8 @@ impl LintReporter for PrettyLintReporter {
}
match check_count {
- n if n <= 1 => info!("Checked {} file", n),
- n if n > 1 => info!("Checked {} files", n),
- _ => unreachable!(),
+ 1 => info!("Checked 1 file"),
+ n => info!("Checked {} files", n),
}
}
}
@@ -744,9 +863,8 @@ impl LintReporter for CompactLintReporter {
}
match check_count {
- n if n <= 1 => info!("Checked {} file", n),
- n if n > 1 => info!("Checked {} files", n),
- _ => unreachable!(),
+ 1 => info!("Checked 1 file"),
+ n => info!("Checked {} files", n),
}
}
}
@@ -910,9 +1028,8 @@ pub fn get_configured_rules(
maybe_config_file: Option<&deno_config::ConfigFile>,
) -> ConfiguredRules {
const NO_SLOW_TYPES_NAME: &str = "no-slow-types";
- let implicit_no_slow_types = maybe_config_file
- .map(|c| c.is_package() || c.json.workspace.is_some())
- .unwrap_or(false);
+ let implicit_no_slow_types =
+ maybe_config_file.map(|c| c.is_package()).unwrap_or(false);
let no_slow_types = implicit_no_slow_types
&& !rules
.exclude
diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs
index d300e5eaf..134a973f7 100644
--- a/cli/tools/registry/mod.rs
+++ b/cli/tools/registry/mod.rs
@@ -11,9 +11,8 @@ use std::sync::Arc;
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
use deno_ast::ModuleSpecifier;
-use deno_config::glob::FilePatterns;
-use deno_config::ConfigFile;
-use deno_config::WorkspaceMemberConfig;
+use deno_config::workspace::JsrPackageConfig;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
@@ -27,7 +26,6 @@ use deno_core::serde_json::Value;
use deno_runtime::deno_fetch::reqwest;
use deno_runtime::deno_fs::FileSystem;
use deno_terminal::colors;
-use import_map::ImportMap;
use lsp_types::Url;
use serde::Deserialize;
use serde::Serialize;
@@ -44,7 +42,6 @@ use crate::cache::ParsedSourceCache;
use crate::factory::CliFactory;
use crate::graph_util::ModuleGraphCreator;
use crate::http_util::HttpClient;
-use crate::resolver::MappedSpecifierResolver;
use crate::resolver::SloppyImportsResolver;
use crate::tools::check::CheckOptions;
use crate::tools::lint::no_slow_types;
@@ -84,27 +81,28 @@ pub async fn publish(
let auth_method =
get_auth_method(publish_flags.token, publish_flags.dry_run)?;
- let import_map = cli_factory
- .maybe_import_map()
- .await?
- .clone()
- .unwrap_or_else(|| {
- Arc::new(ImportMap::new(Url::parse("file:///dev/null").unwrap()))
- });
+ let workspace_resolver = cli_factory.workspace_resolver().await?.clone();
let directory_path = cli_factory.cli_options().initial_cwd();
-
- let mapped_resolver = Arc::new(MappedSpecifierResolver::new(
- Some(import_map),
- cli_factory.package_json_deps_provider().clone(),
- ));
let cli_options = cli_factory.cli_options();
- let Some(config_file) = cli_options.maybe_config_file() else {
- bail!(
- "Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
- directory_path.display()
- );
- };
+ let publish_configs = cli_options.workspace.jsr_packages_for_publish();
+ if publish_configs.is_empty() {
+ match cli_options.workspace.resolve_start_ctx().maybe_deno_json() {
+ Some(deno_json) => {
+ debug_assert!(!deno_json.is_package());
+ bail!(
+ "Missing 'name', 'version' and 'exports' field in '{}'.",
+ deno_json.specifier
+ );
+ }
+ None => {
+ bail!(
+ "Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
+ directory_path.display()
+ );
+ }
+ }
+ }
let diagnostics_collector = PublishDiagnosticsCollector::default();
let publish_preparer = PublishPreparer::new(
@@ -114,14 +112,14 @@ pub async fn publish(
cli_factory.type_checker().await?.clone(),
cli_factory.fs().clone(),
cli_factory.cli_options().clone(),
- mapped_resolver,
+ workspace_resolver,
);
let prepared_data = publish_preparer
.prepare_packages_for_publishing(
publish_flags.allow_slow_types,
&diagnostics_collector,
- config_file.clone(),
+ publish_configs,
)
.await?;
@@ -193,8 +191,8 @@ struct PublishPreparer {
source_cache: Arc<ParsedSourceCache>,
type_checker: Arc<TypeChecker>,
cli_options: Arc<CliOptions>,
- mapped_resolver: Arc<MappedSpecifierResolver>,
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
+ workspace_resolver: Arc<WorkspaceResolver>,
}
impl PublishPreparer {
@@ -205,7 +203,7 @@ impl PublishPreparer {
type_checker: Arc<TypeChecker>,
fs: Arc<dyn FileSystem>,
cli_options: Arc<CliOptions>,
- mapped_resolver: Arc<MappedSpecifierResolver>,
+ workspace_resolver: Arc<WorkspaceResolver>,
) -> Self {
let sloppy_imports_resolver = if cli_options.unstable_sloppy_imports() {
Some(Arc::new(SloppyImportsResolver::new(fs.clone())))
@@ -218,8 +216,8 @@ impl PublishPreparer {
source_cache,
type_checker,
cli_options,
- mapped_resolver,
sloppy_imports_resolver,
+ workspace_resolver,
}
}
@@ -227,11 +225,9 @@ impl PublishPreparer {
&self,
allow_slow_types: bool,
diagnostics_collector: &PublishDiagnosticsCollector,
- deno_json: ConfigFile,
+ publish_configs: Vec<JsrPackageConfig>,
) -> Result<PreparePackagesData, AnyError> {
- let members = deno_json.to_workspace_members()?;
-
- if members.len() > 1 {
+ if publish_configs.len() > 1 {
log::info!("Publishing a workspace...");
}
@@ -240,31 +236,24 @@ impl PublishPreparer {
.build_and_check_graph_for_publish(
allow_slow_types,
diagnostics_collector,
- &members,
+ &publish_configs,
)
.await?;
- let mut package_by_name = HashMap::with_capacity(members.len());
+ let mut package_by_name = HashMap::with_capacity(publish_configs.len());
let publish_order_graph =
- publish_order::build_publish_order_graph(&graph, &members)?;
+ publish_order::build_publish_order_graph(&graph, &publish_configs)?;
- let results = members
+ let results = publish_configs
.into_iter()
.map(|member| {
let graph = graph.clone();
async move {
let package = self
- .prepare_publish(
- &member.package_name,
- &member.config_file,
- graph,
- diagnostics_collector,
- )
+ .prepare_publish(&member, graph, diagnostics_collector)
.await
- .with_context(|| {
- format!("Failed preparing '{}'.", member.package_name)
- })?;
- Ok::<_, AnyError>((member.package_name, package))
+ .with_context(|| format!("Failed preparing '{}'.", member.name))?;
+ Ok::<_, AnyError>((member.name, package))
}
.boxed()
})
@@ -284,12 +273,15 @@ impl PublishPreparer {
&self,
allow_slow_types: bool,
diagnostics_collector: &PublishDiagnosticsCollector,
- packages: &[WorkspaceMemberConfig],
+ package_configs: &[JsrPackageConfig],
) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> {
let build_fast_check_graph = !allow_slow_types;
let graph = self
.module_graph_creator
- .create_and_validate_publish_graph(packages, build_fast_check_graph)
+ .create_and_validate_publish_graph(
+ package_configs,
+ build_fast_check_graph,
+ )
.await?;
// todo(dsherret): move to lint rule
@@ -335,7 +327,7 @@ impl PublishPreparer {
} else {
log::info!("Checking for slow types in the public API...");
let mut any_pkg_had_diagnostics = false;
- for package in packages {
+ for package in package_configs {
let export_urls = package.config_file.resolve_export_value_urls()?;
let diagnostics =
no_slow_types::collect_no_slow_type_diagnostics(&export_urls, &graph);
@@ -389,14 +381,14 @@ impl PublishPreparer {
#[allow(clippy::too_many_arguments)]
async fn prepare_publish(
&self,
- package_name: &str,
- deno_json: &ConfigFile,
+ package: &JsrPackageConfig,
graph: Arc<deno_graph::ModuleGraph>,
diagnostics_collector: &PublishDiagnosticsCollector,
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
["mod.ts", "mod.js", "index.ts", "index.js"];
+ let deno_json = &package.config_file;
let config_path = deno_json.specifier.to_file_path().unwrap();
let root_dir = config_path.parent().unwrap().to_path_buf();
let Some(version) = deno_json.json.version.clone() else {
@@ -418,32 +410,29 @@ impl PublishPreparer {
"version": "{}",
"exports": "{}"
}}"#,
- package_name,
+ package.name,
version,
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
);
bail!(
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
- package_name,
+ package.name,
deno_json.specifier,
exports_content
);
}
- let Some(name_no_at) = package_name.strip_prefix('@') else {
+ let Some(name_no_at) = package.name.strip_prefix('@') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
};
let Some((scope, name_no_scope)) = name_no_at.split_once('/') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
};
- let file_patterns = deno_json
- .to_publish_config()?
- .map(|c| c.files)
- .unwrap_or_else(|| FilePatterns::new_with_base(root_dir.to_path_buf()));
+ let file_patterns = package.member_ctx.to_publish_config()?.files;
let tarball = deno_core::unsync::spawn_blocking({
let diagnostics_collector = diagnostics_collector.clone();
- let mapped_resolver = self.mapped_resolver.clone();
+ let workspace_resolver = self.workspace_resolver.clone();
let sloppy_imports_resolver = self.sloppy_imports_resolver.clone();
let cli_options = self.cli_options.clone();
let source_cache = self.source_cache.clone();
@@ -451,8 +440,8 @@ impl PublishPreparer {
move || {
let bare_node_builtins = cli_options.unstable_bare_node_builtins();
let unfurler = SpecifierUnfurler::new(
- &mapped_resolver,
sloppy_imports_resolver.as_deref(),
+ &workspace_resolver,
bare_node_builtins,
);
let root_specifier =
@@ -482,7 +471,7 @@ impl PublishPreparer {
})
.await??;
- log::debug!("Tarball size ({}): {}", package_name, tarball.bytes.len());
+ log::debug!("Tarball size ({}): {}", package.name, tarball.bytes.len());
Ok(Rc::new(PreparedPublishPackage {
scope: scope.to_string(),
diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs
index 4fdc02550..e3e2f1b55 100644
--- a/cli/tools/registry/pm.rs
+++ b/cli/tools/registry/pm.rs
@@ -49,7 +49,7 @@ impl DenoConfigFormat {
}
enum DenoOrPackageJson {
- Deno(deno_config::ConfigFile, DenoConfigFormat),
+ Deno(Arc<deno_config::ConfigFile>, DenoConfigFormat),
Npm(Arc<deno_node::PackageJson>, Option<FmtOptionsConfig>),
}
@@ -87,7 +87,6 @@ impl DenoOrPackageJson {
DenoOrPackageJson::Deno(deno, ..) => deno
.to_fmt_config()
.ok()
- .flatten()
.map(|f| f.options)
.unwrap_or_default(),
DenoOrPackageJson::Npm(_, config) => config.clone().unwrap_or_default(),
@@ -122,9 +121,10 @@ impl DenoOrPackageJson {
/// the new config
fn from_flags(flags: Flags) -> Result<(Self, CliFactory), AnyError> {
let factory = CliFactory::from_flags(flags.clone())?;
- let options = factory.cli_options().clone();
+ let options = factory.cli_options();
+ let start_ctx = options.workspace.resolve_start_ctx();
- match (options.maybe_config_file(), options.maybe_package_json()) {
+ match (start_ctx.maybe_deno_json(), start_ctx.maybe_pkg_json()) {
// when both are present, for now,
// default to deno.json
(Some(deno), Some(_) | None) => Ok((
@@ -141,20 +141,17 @@ impl DenoOrPackageJson {
std::fs::write(options.initial_cwd().join("deno.json"), "{}\n")
.context("Failed to create deno.json file")?;
log::info!("Created deno.json configuration file.");
- let new_factory = CliFactory::from_flags(flags.clone())?;
- let new_options = new_factory.cli_options().clone();
+ let factory = CliFactory::from_flags(flags.clone())?;
+ let options = factory.cli_options().clone();
+ let start_ctx = options.workspace.resolve_start_ctx();
Ok((
DenoOrPackageJson::Deno(
- new_options
- .maybe_config_file()
- .as_ref()
- .ok_or_else(|| {
- anyhow!("config not found, but it was just created")
- })?
- .clone(),
+ start_ctx.maybe_deno_json().cloned().ok_or_else(|| {
+ anyhow!("config not found, but it was just created")
+ })?,
DenoConfigFormat::Json,
),
- new_factory,
+ factory,
))
}
}
diff --git a/cli/tools/registry/publish_order.rs b/cli/tools/registry/publish_order.rs
index ad0f72272..ad77a56bb 100644
--- a/cli/tools/registry/publish_order.rs
+++ b/cli/tools/registry/publish_order.rs
@@ -5,7 +5,7 @@ use std::collections::HashSet;
use std::collections::VecDeque;
use deno_ast::ModuleSpecifier;
-use deno_config::WorkspaceMemberConfig;
+use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::ModuleGraph;
@@ -114,7 +114,7 @@ impl PublishOrderGraph {
pub fn build_publish_order_graph(
graph: &ModuleGraph,
- roots: &[WorkspaceMemberConfig],
+ roots: &[JsrPackageConfig],
) -> Result<PublishOrderGraph, AnyError> {
let packages = build_pkg_deps(graph, roots)?;
Ok(build_publish_order_graph_from_pkgs_deps(packages))
@@ -122,18 +122,23 @@ pub fn build_publish_order_graph(
fn build_pkg_deps(
graph: &deno_graph::ModuleGraph,
- roots: &[WorkspaceMemberConfig],
+ roots: &[JsrPackageConfig],
) -> Result<HashMap<String, HashSet<String>>, AnyError> {
let mut members = HashMap::with_capacity(roots.len());
let mut seen_modules = HashSet::with_capacity(graph.modules().count());
let roots = roots
.iter()
- .map(|r| (ModuleSpecifier::from_file_path(&r.dir_path).unwrap(), r))
+ .map(|r| {
+ (
+ ModuleSpecifier::from_directory_path(r.config_file.dir_path()).unwrap(),
+ r,
+ )
+ })
.collect::<Vec<_>>();
- for (root_dir_url, root) in &roots {
+ for (root_dir_url, pkg_config) in &roots {
let mut deps = HashSet::new();
let mut pending = VecDeque::new();
- pending.extend(root.config_file.resolve_export_value_urls()?);
+ pending.extend(pkg_config.config_file.resolve_export_value_urls()?);
while let Some(specifier) = pending.pop_front() {
let Some(module) = graph.get(&specifier).and_then(|m| m.js()) else {
continue;
@@ -168,12 +173,12 @@ fn build_pkg_deps(
specifier.as_str().starts_with(dir_url.as_str())
});
if let Some(root) = found_root {
- deps.insert(root.1.package_name.clone());
+ deps.insert(root.1.name.clone());
}
}
}
}
- members.insert(root.package_name.clone(), deps);
+ members.insert(pkg_config.name.clone(), deps);
}
Ok(members)
}
diff --git a/cli/tools/registry/unfurl.rs b/cli/tools/registry/unfurl.rs
index 36bff64bb..147b59f30 100644
--- a/cli/tools/registry/unfurl.rs
+++ b/cli/tools/registry/unfurl.rs
@@ -3,6 +3,9 @@
use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
+use deno_config::package_json::PackageJsonDepValue;
+use deno_config::workspace::MappedResolution;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::ModuleSpecifier;
use deno_graph::DependencyDescriptor;
use deno_graph::DynamicTemplatePart;
@@ -10,7 +13,6 @@ use deno_graph::ParserModuleAnalyzer;
use deno_graph::TypeScriptReference;
use deno_runtime::deno_node::is_builtin_node_module;
-use crate::resolver::MappedSpecifierResolver;
use crate::resolver::SloppyImportsResolver;
#[derive(Debug, Clone)]
@@ -39,20 +41,20 @@ impl SpecifierUnfurlerDiagnostic {
}
pub struct SpecifierUnfurler<'a> {
- mapped_resolver: &'a MappedSpecifierResolver,
sloppy_imports_resolver: Option<&'a SloppyImportsResolver>,
+ workspace_resolver: &'a WorkspaceResolver,
bare_node_builtins: bool,
}
impl<'a> SpecifierUnfurler<'a> {
pub fn new(
- mapped_resolver: &'a MappedSpecifierResolver,
sloppy_imports_resolver: Option<&'a SloppyImportsResolver>,
+ workspace_resolver: &'a WorkspaceResolver,
bare_node_builtins: bool,
) -> Self {
Self {
- mapped_resolver,
sloppy_imports_resolver,
+ workspace_resolver,
bare_node_builtins,
}
}
@@ -62,12 +64,46 @@ impl<'a> SpecifierUnfurler<'a> {
referrer: &ModuleSpecifier,
specifier: &str,
) -> Option<String> {
- let resolved =
- if let Ok(resolved) = self.mapped_resolver.resolve(specifier, referrer) {
- resolved.into_specifier()
- } else {
- None
- };
+ let resolved = if let Ok(resolved) =
+ self.workspace_resolver.resolve(specifier, referrer)
+ {
+ match resolved {
+ MappedResolution::Normal(specifier)
+ | MappedResolution::ImportMap(specifier) => Some(specifier),
+ MappedResolution::PackageJson {
+ sub_path,
+ dep_result,
+ ..
+ } => match dep_result {
+ Ok(dep) => match dep {
+ PackageJsonDepValue::Req(req) => ModuleSpecifier::parse(&format!(
+ "npm:{}{}",
+ req,
+ sub_path
+ .as_ref()
+ .map(|s| format!("/{}", s))
+ .unwrap_or_default()
+ ))
+ .ok(),
+ PackageJsonDepValue::Workspace(_) => {
+ log::warn!(
+ "package.json workspace entries are not implemented yet for publishing."
+ );
+ None
+ }
+ },
+ Err(err) => {
+ log::warn!(
+ "Ignoring failed to resolve package.json dependency. {:#}",
+ err
+ );
+ None
+ }
+ },
+ }
+ } else {
+ None
+ };
let resolved = match resolved {
Some(resolved) => resolved,
None if self.bare_node_builtins && is_builtin_node_module(specifier) => {
@@ -305,8 +341,6 @@ fn to_range(
mod tests {
use std::sync::Arc;
- use crate::args::PackageJsonDepsProvider;
-
use super::*;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
@@ -355,19 +389,17 @@ mod tests {
}
}),
);
- let mapped_resolver = MappedSpecifierResolver::new(
- Some(Arc::new(import_map)),
- Arc::new(PackageJsonDepsProvider::new(Some(
- package_json.resolve_local_package_json_version_reqs(),
- ))),
+ let workspace_resolver = WorkspaceResolver::new_raw(
+ Some(import_map),
+ vec![Arc::new(package_json)],
+ deno_config::workspace::PackageJsonDepResolution::Enabled,
);
-
let fs = Arc::new(RealFs);
let sloppy_imports_resolver = SloppyImportsResolver::new(fs);
let unfurler = SpecifierUnfurler::new(
- &mapped_resolver,
Some(&sloppy_imports_resolver),
+ &workspace_resolver,
true,
);
diff --git a/cli/tools/task.rs b/cli/tools/task.rs
index a44dc8dbb..2905134f4 100644
--- a/cli/tools/task.rs
+++ b/cli/tools/task.rs
@@ -8,24 +8,30 @@ use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
use crate::npm::ManagedCliNpmResolver;
use crate::util::fs::canonicalize_path;
+use deno_config::workspace::TaskOrScript;
+use deno_config::workspace::Workspace;
+use deno_config::workspace::WorkspaceTasksConfig;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::future::LocalBoxFuture;
+use deno_core::normalize_path;
use deno_runtime::deno_node::NodeResolver;
use deno_semver::package::PackageNv;
use deno_task_shell::ExecutableCommand;
use deno_task_shell::ExecuteResult;
use deno_task_shell::ShellCommand;
use deno_task_shell::ShellCommandContext;
-use indexmap::IndexMap;
use lazy_regex::Lazy;
use regex::Regex;
+use std::borrow::Cow;
use std::collections::HashMap;
+use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
+use std::sync::Arc;
use tokio::task::LocalSet;
// WARNING: Do not depend on this env var in user code. It's not stable API.
@@ -38,146 +44,124 @@ pub async fn execute_script(
) -> Result<i32, AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
- let tasks_config = cli_options.resolve_tasks_config()?;
- let maybe_package_json = cli_options.maybe_package_json();
- let package_json_scripts = maybe_package_json
- .as_ref()
- .and_then(|p| p.scripts.clone())
- .unwrap_or_default();
+ let start_ctx = cli_options.workspace.resolve_start_ctx();
+ if !start_ctx.has_deno_or_pkg_json() {
+ bail!("deno task couldn't find deno.json(c). See https://deno.land/manual@v{}/getting_started/configuration_file", env!("CARGO_PKG_VERSION"))
+ }
+ let force_use_pkg_json = std::env::var_os(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
+ .map(|v| {
+ // always remove so sub processes don't inherit this env var
+ std::env::remove_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME);
+ v == "1"
+ })
+ .unwrap_or(false);
+ let tasks_config = start_ctx.to_tasks_config()?;
+ let tasks_config = if force_use_pkg_json {
+ tasks_config.with_only_pkg_json()
+ } else {
+ tasks_config
+ };
let task_name = match &task_flags.task {
Some(task) => task,
None => {
print_available_tasks(
&mut std::io::stdout(),
+ &cli_options.workspace,
&tasks_config,
- &package_json_scripts,
)?;
return Ok(1);
}
};
+
let npm_resolver = factory.npm_resolver().await?;
let node_resolver = factory.node_resolver().await?;
let env_vars = real_env_vars();
- let force_use_pkg_json = std::env::var_os(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
- .map(|v| {
- // always remove so sub processes don't inherit this env var
- std::env::remove_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME);
- v == "1"
- })
- .unwrap_or(false);
-
- if let Some(
- deno_config::Task::Definition(script)
- | deno_config::Task::Commented {
- definition: script, ..
- },
- ) = tasks_config.get(task_name).filter(|_| !force_use_pkg_json)
- {
- let config_file_url = cli_options.maybe_config_file_specifier().unwrap();
- let config_file_path = if config_file_url.scheme() == "file" {
- config_file_url.to_file_path().unwrap()
- } else {
- bail!("Only local configuration files are supported")
- };
- let cwd = match task_flags.cwd {
- Some(path) => canonicalize_path(&PathBuf::from(path))
- .context("failed canonicalizing --cwd")?,
- None => config_file_path.parent().unwrap().to_owned(),
- };
-
- let custom_commands =
- resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
- run_task(RunTaskOptions {
- task_name,
- script,
- cwd: &cwd,
- init_cwd: cli_options.initial_cwd(),
- env_vars,
- argv: cli_options.argv(),
- custom_commands,
- root_node_modules_dir: npm_resolver
- .root_node_modules_path()
- .map(|p| p.as_path()),
- })
- .await
- } else if package_json_scripts.contains_key(task_name) {
- let package_json_deps_provider = factory.package_json_deps_provider();
-
- if let Some(package_deps) = package_json_deps_provider.deps() {
- for (key, value) in package_deps {
- if let Err(err) = value {
- log::info!(
- "{} Ignoring dependency '{}' in package.json because its version requirement failed to parse: {:#}",
- colors::yellow("Warning"),
- key,
- err,
- );
- }
- }
- }
-
- // ensure the npm packages are installed if using a node_modules
- // directory and managed resolver
- if cli_options.has_node_modules_dir() {
- if let Some(npm_resolver) = npm_resolver.as_managed() {
- npm_resolver.ensure_top_level_package_json_install().await?;
- }
- }
- let cwd = match task_flags.cwd {
- Some(path) => canonicalize_path(&PathBuf::from(path))?,
- None => maybe_package_json
- .as_ref()
- .unwrap()
- .path
- .parent()
- .unwrap()
- .to_owned(),
- };
+ match tasks_config.task(task_name) {
+ Some((dir_url, task_or_script)) => match task_or_script {
+ TaskOrScript::Task(_tasks, script) => {
+ let cwd = match task_flags.cwd {
+ Some(path) => canonicalize_path(&PathBuf::from(path))
+ .context("failed canonicalizing --cwd")?,
+ None => normalize_path(dir_url.to_file_path().unwrap()),
+ };
- // At this point we already checked if the task name exists in package.json.
- // We can therefore check for "pre" and "post" scripts too, since we're only
- // dealing with package.json here and not deno.json
- let task_names = vec![
- format!("pre{}", task_name),
- task_name.clone(),
- format!("post{}", task_name),
- ];
- let custom_commands =
- resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
- for task_name in &task_names {
- if let Some(script) = package_json_scripts.get(task_name) {
- let exit_code = run_task(RunTaskOptions {
+ let custom_commands =
+ resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
+ run_task(RunTaskOptions {
task_name,
script,
cwd: &cwd,
init_cwd: cli_options.initial_cwd(),
- env_vars: env_vars.clone(),
+ env_vars,
argv: cli_options.argv(),
- custom_commands: custom_commands.clone(),
+ custom_commands,
root_node_modules_dir: npm_resolver
.root_node_modules_path()
.map(|p| p.as_path()),
})
- .await?;
- if exit_code > 0 {
- return Ok(exit_code);
- }
+ .await
}
- }
+ TaskOrScript::Script(scripts, _script) => {
+ // ensure the npm packages are installed if using a node_modules
+ // directory and managed resolver
+ if cli_options.has_node_modules_dir() {
+ if let Some(npm_resolver) = npm_resolver.as_managed() {
+ npm_resolver.ensure_top_level_package_json_install().await?;
+ }
+ }
- Ok(0)
- } else {
- log::error!("Task not found: {task_name}");
- if log::log_enabled!(log::Level::Error) {
- print_available_tasks(
- &mut std::io::stderr(),
- &tasks_config,
- &package_json_scripts,
- )?;
+ let cwd = match task_flags.cwd {
+ Some(path) => canonicalize_path(&PathBuf::from(path))?,
+ None => normalize_path(dir_url.to_file_path().unwrap()),
+ };
+
+ // At this point we already checked if the task name exists in package.json.
+ // We can therefore check for "pre" and "post" scripts too, since we're only
+ // dealing with package.json here and not deno.json
+ let task_names = vec![
+ format!("pre{}", task_name),
+ task_name.clone(),
+ format!("post{}", task_name),
+ ];
+ let custom_commands =
+ resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
+ for task_name in &task_names {
+ if let Some(script) = scripts.get(task_name) {
+ let exit_code = run_task(RunTaskOptions {
+ task_name,
+ script,
+ cwd: &cwd,
+ init_cwd: cli_options.initial_cwd(),
+ env_vars: env_vars.clone(),
+ argv: cli_options.argv(),
+ custom_commands: custom_commands.clone(),
+ root_node_modules_dir: npm_resolver
+ .root_node_modules_path()
+ .map(|p| p.as_path()),
+ })
+ .await?;
+ if exit_code > 0 {
+ return Ok(exit_code);
+ }
+ }
+ }
+
+ Ok(0)
+ }
+ },
+ None => {
+ log::error!("Task not found: {task_name}");
+ if log::log_enabled!(log::Level::Error) {
+ print_available_tasks(
+ &mut std::io::stderr(),
+ &cli_options.workspace,
+ &tasks_config,
+ )?;
+ }
+ Ok(1)
}
- Ok(1)
}
}
@@ -282,53 +266,92 @@ fn real_env_vars() -> HashMap<String, String> {
fn print_available_tasks(
writer: &mut dyn std::io::Write,
- tasks_config: &IndexMap<String, deno_config::Task>,
- package_json_scripts: &IndexMap<String, String>,
+ workspace: &Arc<Workspace>,
+ tasks_config: &WorkspaceTasksConfig,
) -> Result<(), std::io::Error> {
writeln!(writer, "{}", colors::green("Available tasks:"))?;
+ let is_cwd_root_dir = tasks_config.root.is_none();
- if tasks_config.is_empty() && package_json_scripts.is_empty() {
+ if tasks_config.is_empty() {
writeln!(
writer,
" {}",
colors::red("No tasks found in configuration file")
)?;
} else {
- for (is_deno, (key, task)) in tasks_config
- .iter()
- .map(|(k, t)| (true, (k, t.clone())))
- .chain(
- package_json_scripts
- .iter()
- .filter(|(key, _)| !tasks_config.contains_key(*key))
- .map(|(k, v)| (false, (k, deno_config::Task::Definition(v.clone())))),
- )
- {
- writeln!(
- writer,
- "- {}{}",
- colors::cyan(key),
- if is_deno {
- "".to_string()
- } else {
- format!(" {}", colors::italic_gray("(package.json)"))
- }
- )?;
- let definition = match &task {
- deno_config::Task::Definition(definition) => definition,
- deno_config::Task::Commented { definition, .. } => definition,
+ let mut seen_task_names =
+ HashSet::with_capacity(tasks_config.tasks_count());
+ for maybe_config in [&tasks_config.member, &tasks_config.root] {
+ let Some(config) = maybe_config else {
+ continue;
};
- if let deno_config::Task::Commented { comments, .. } = &task {
- let slash_slash = colors::italic_gray("//");
- for comment in comments {
- writeln!(
- writer,
- " {slash_slash} {}",
- colors::italic_gray(comment)
- )?;
+ for (is_root, is_deno, (key, task)) in config
+ .deno_json
+ .as_ref()
+ .map(|config| {
+ let is_root = !is_cwd_root_dir
+ && config.folder_url == *workspace.root_folder().0.as_ref();
+ config
+ .tasks
+ .iter()
+ .map(move |(k, t)| (is_root, true, (k, Cow::Borrowed(t))))
+ })
+ .into_iter()
+ .flatten()
+ .chain(
+ config
+ .package_json
+ .as_ref()
+ .map(|config| {
+ let is_root = !is_cwd_root_dir
+ && config.folder_url == *workspace.root_folder().0.as_ref();
+ config.tasks.iter().map(move |(k, v)| {
+ (
+ is_root,
+ false,
+ (k, Cow::Owned(deno_config::Task::Definition(v.clone()))),
+ )
+ })
+ })
+ .into_iter()
+ .flatten(),
+ )
+ {
+ if !seen_task_names.insert(key) {
+ continue; // already seen
+ }
+ writeln!(
+ writer,
+ "- {}{}",
+ colors::cyan(key),
+ if is_root {
+ if is_deno {
+ format!(" {}", colors::italic_gray("(workspace)"))
+ } else {
+ format!(" {}", colors::italic_gray("(workspace package.json)"))
+ }
+ } else if is_deno {
+ "".to_string()
+ } else {
+ format!(" {}", colors::italic_gray("(package.json)"))
+ }
+ )?;
+ let definition = match task.as_ref() {
+ deno_config::Task::Definition(definition) => definition,
+ deno_config::Task::Commented { definition, .. } => definition,
+ };
+ if let deno_config::Task::Commented { comments, .. } = task.as_ref() {
+ let slash_slash = colors::italic_gray("//");
+ for comment in comments {
+ writeln!(
+ writer,
+ " {slash_slash} {}",
+ colors::italic_gray(comment)
+ )?;
+ }
}
+ writeln!(writer, " {definition}")?;
}
- writeln!(writer, " {definition}")?;
}
}
diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs
index 88b539470..7042a82b9 100644
--- a/cli/tools/test/mod.rs
+++ b/cli/tools/test/mod.rs
@@ -1705,11 +1705,17 @@ fn collect_specifiers_with_test_mode(
async fn fetch_specifiers_with_test_mode(
cli_options: &CliOptions,
file_fetcher: &FileFetcher,
- files: FilePatterns,
+ member_patterns: impl Iterator<Item = FilePatterns>,
doc: &bool,
) -> Result<Vec<(ModuleSpecifier, TestMode)>, AnyError> {
- let mut specifiers_with_mode =
- collect_specifiers_with_test_mode(cli_options, files, doc)?;
+ let mut specifiers_with_mode = member_patterns
+ .map(|files| {
+ collect_specifiers_with_test_mode(cli_options, files.clone(), doc)
+ })
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
for (specifier, mode) in &mut specifiers_with_mode {
let file = file_fetcher
@@ -1731,7 +1737,8 @@ pub async fn run_tests(
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
- let test_options = cli_options.resolve_test_options(test_flags)?;
+ let workspace_test_options =
+ cli_options.resolve_workspace_test_options(&test_flags);
let file_fetcher = factory.file_fetcher()?;
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
@@ -1740,15 +1747,17 @@ pub async fn run_tests(
Permissions::from_options(&cli_options.permissions_options()?)?;
let log_level = cli_options.log_level();
+ let members_with_test_options =
+ cli_options.resolve_test_options_for_members(&test_flags)?;
let specifiers_with_mode = fetch_specifiers_with_test_mode(
cli_options,
file_fetcher,
- test_options.files.clone(),
- &test_options.doc,
+ members_with_test_options.into_iter().map(|(_, v)| v.files),
+ &workspace_test_options.doc,
)
.await?;
- if !test_options.allow_none && specifiers_with_mode.is_empty() {
+ if !workspace_test_options.allow_none && specifiers_with_mode.is_empty() {
return Err(generic_error("No test modules found"));
}
@@ -1761,7 +1770,7 @@ pub async fn run_tests(
)
.await?;
- if test_options.no_run {
+ if workspace_test_options.no_run {
return Ok(());
}
@@ -1787,16 +1796,16 @@ pub async fn run_tests(
))
},
)?,
- concurrent_jobs: test_options.concurrent_jobs,
- fail_fast: test_options.fail_fast,
+ concurrent_jobs: workspace_test_options.concurrent_jobs,
+ fail_fast: workspace_test_options.fail_fast,
log_level,
- filter: test_options.filter.is_some(),
- reporter: test_options.reporter,
- junit_path: test_options.junit_path,
+ filter: workspace_test_options.filter.is_some(),
+ reporter: workspace_test_options.reporter,
+ junit_path: workspace_test_options.junit_path,
specifier: TestSpecifierOptions {
- filter: TestFilter::from_flag(&test_options.filter),
- shuffle: test_options.shuffle,
- trace_leaks: test_options.trace_leaks,
+ filter: TestFilter::from_flag(&workspace_test_options.filter),
+ shuffle: workspace_test_options.shuffle,
+ trace_leaks: workspace_test_options.trace_leaks,
},
},
)
@@ -1838,34 +1847,47 @@ pub async fn run_tests_with_watch(
let factory = CliFactoryBuilder::new()
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
let cli_options = factory.cli_options();
- let test_options = cli_options.resolve_test_options(test_flags)?;
+ let workspace_test_options =
+ cli_options.resolve_workspace_test_options(&test_flags);
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
- if let Some(set) = &test_options.files.include {
- let watch_paths = set.base_paths();
- if !watch_paths.is_empty() {
- let _ = watcher_communicator.watch_paths(watch_paths);
- }
- }
-
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let log_level = cli_options.log_level();
let cli_options = cli_options.clone();
let module_graph_creator = factory.module_graph_creator().await?;
let file_fetcher = factory.file_fetcher()?;
- let test_modules = if test_options.doc {
- collect_specifiers(
- test_options.files.clone(),
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- |e| is_supported_test_ext(e.path),
- )
- } else {
- collect_specifiers(
- test_options.files.clone(),
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- is_supported_test_path_predicate,
- )
- }?;
+ let members_with_test_options =
+ cli_options.resolve_test_options_for_members(&test_flags)?;
+ let watch_paths = members_with_test_options
+ .iter()
+ .filter_map(|(_, test_options)| {
+ test_options
+ .files
+ .include
+ .as_ref()
+ .map(|set| set.base_paths())
+ })
+ .flatten()
+ .collect::<Vec<_>>();
+ let _ = watcher_communicator.watch_paths(watch_paths);
+ let test_modules = members_with_test_options
+ .iter()
+ .map(|(_, test_options)| {
+ collect_specifiers(
+ test_options.files.clone(),
+ cli_options.vendor_dir_path().map(ToOwned::to_owned),
+ if workspace_test_options.doc {
+ Box::new(|e: WalkEntry| is_supported_test_ext(e.path))
+ as Box<dyn Fn(WalkEntry) -> bool>
+ } else {
+ Box::new(is_supported_test_path_predicate)
+ },
+ )
+ })
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
let permissions =
Permissions::from_options(&cli_options.permissions_options()?)?;
@@ -1898,8 +1920,8 @@ pub async fn run_tests_with_watch(
let specifiers_with_mode = fetch_specifiers_with_test_mode(
&cli_options,
file_fetcher,
- test_options.files.clone(),
- &test_options.doc,
+ members_with_test_options.into_iter().map(|(_, v)| v.files),
+ &workspace_test_options.doc,
)
.await?
.into_iter()
@@ -1915,7 +1937,7 @@ pub async fn run_tests_with_watch(
)
.await?;
- if test_options.no_run {
+ if workspace_test_options.no_run {
return Ok(());
}
@@ -1938,16 +1960,16 @@ pub async fn run_tests_with_watch(
))
},
)?,
- concurrent_jobs: test_options.concurrent_jobs,
- fail_fast: test_options.fail_fast,
+ concurrent_jobs: workspace_test_options.concurrent_jobs,
+ fail_fast: workspace_test_options.fail_fast,
log_level,
- filter: test_options.filter.is_some(),
- reporter: test_options.reporter,
- junit_path: test_options.junit_path,
+ filter: workspace_test_options.filter.is_some(),
+ reporter: workspace_test_options.reporter,
+ junit_path: workspace_test_options.junit_path,
specifier: TestSpecifierOptions {
- filter: TestFilter::from_flag(&test_options.filter),
- shuffle: test_options.shuffle,
- trace_leaks: test_options.trace_leaks,
+ filter: TestFilter::from_flag(&workspace_test_options.filter),
+ shuffle: workspace_test_options.shuffle,
+ trace_leaks: workspace_test_options.trace_leaks,
},
},
)
diff --git a/cli/tools/vendor/build.rs b/cli/tools/vendor/build.rs
index 5aef63192..a4424e3f3 100644
--- a/cli/tools/vendor/build.rs
+++ b/cli/tools/vendor/build.rs
@@ -81,8 +81,8 @@ pub async fn build<
build_graph,
parsed_source_cache,
output_dir,
- maybe_original_import_map: original_import_map,
- maybe_jsx_import_source: jsx_import_source,
+ maybe_original_import_map,
+ maybe_jsx_import_source,
resolver,
environment,
} = input;
@@ -90,12 +90,12 @@ pub async fn build<
let output_dir_specifier =
ModuleSpecifier::from_directory_path(output_dir).unwrap();
- if let Some(original_im) = &original_import_map {
+ if let Some(original_im) = &maybe_original_import_map {
validate_original_import_map(original_im, &output_dir_specifier)?;
}
// add the jsx import source to the entry points to ensure it is always vendored
- if let Some(jsx_import_source) = jsx_import_source {
+ if let Some(jsx_import_source) = maybe_jsx_import_source {
if let Some(specifier_text) = jsx_import_source.maybe_specifier_text() {
if let Ok(specifier) = resolver.resolve(
&specifier_text,
@@ -171,8 +171,8 @@ pub async fn build<
graph: &graph,
modules: &all_modules,
mappings: &mappings,
- original_import_map,
- jsx_import_source,
+ maybe_original_import_map,
+ maybe_jsx_import_source,
resolver,
parsed_source_cache,
})?;
diff --git a/cli/tools/vendor/import_map.rs b/cli/tools/vendor/import_map.rs
index 68f2530d7..644e84a7b 100644
--- a/cli/tools/vendor/import_map.rs
+++ b/cli/tools/vendor/import_map.rs
@@ -59,7 +59,7 @@ impl<'a> ImportMapBuilder<'a> {
pub fn into_import_map(
self,
- original_import_map: Option<&ImportMap>,
+ maybe_original_import_map: Option<&ImportMap>,
) -> ImportMap {
fn get_local_imports(
new_relative_path: &str,
@@ -99,7 +99,7 @@ impl<'a> ImportMapBuilder<'a> {
let mut import_map = ImportMap::new(self.base_dir.clone());
- if let Some(original_im) = original_import_map {
+ if let Some(original_im) = maybe_original_import_map {
let original_base_dir = ModuleSpecifier::from_directory_path(
original_im
.base_url()
@@ -183,8 +183,8 @@ pub struct BuildImportMapInput<'a> {
pub modules: &'a [&'a Module],
pub graph: &'a ModuleGraph,
pub mappings: &'a Mappings,
- pub original_import_map: Option<&'a ImportMap>,
- pub jsx_import_source: Option<&'a JsxImportSourceConfig>,
+ pub maybe_original_import_map: Option<&'a ImportMap>,
+ pub maybe_jsx_import_source: Option<&'a JsxImportSourceConfig>,
pub resolver: &'a dyn deno_graph::source::Resolver,
pub parsed_source_cache: &'a ParsedSourceCache,
}
@@ -197,8 +197,8 @@ pub fn build_import_map(
modules,
graph,
mappings,
- original_import_map,
- jsx_import_source,
+ maybe_original_import_map,
+ maybe_jsx_import_source,
resolver,
parsed_source_cache,
} = input;
@@ -212,7 +212,7 @@ pub fn build_import_map(
}
// add the jsx import source to the destination import map, if mapped in the original import map
- if let Some(jsx_import_source) = jsx_import_source {
+ if let Some(jsx_import_source) = maybe_jsx_import_source {
if let Some(specifier_text) = jsx_import_source.maybe_specifier_text() {
if let Ok(resolved_url) = resolver.resolve(
&specifier_text,
@@ -228,7 +228,7 @@ pub fn build_import_map(
}
}
- Ok(builder.into_import_map(original_import_map).to_json())
+ Ok(builder.into_import_map(maybe_original_import_map).to_json())
}
fn visit_modules(
diff --git a/cli/tools/vendor/mod.rs b/cli/tools/vendor/mod.rs
index a8d8000d8..2dfa71c44 100644
--- a/cli/tools/vendor/mod.rs
+++ b/cli/tools/vendor/mod.rs
@@ -48,10 +48,17 @@ pub async fn vendor(
validate_options(&mut cli_options, &output_dir)?;
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options();
+ if cli_options.workspace.config_folders().len() > 1 {
+ bail!("deno vendor is not supported in a workspace. Set `\"vendor\": true` in the workspace deno.json file instead");
+ }
let entry_points =
resolve_entry_points(&vendor_flags, cli_options.initial_cwd())?;
- let jsx_import_source = cli_options.to_maybe_jsx_import_source_config()?;
+ let jsx_import_source =
+ cli_options.workspace.to_maybe_jsx_import_source_config()?;
let module_graph_creator = factory.module_graph_creator().await?.clone();
+ let workspace_resolver = factory.workspace_resolver().await?;
+ let root_folder = cli_options.workspace.root_folder().1;
+ let maybe_config_file = root_folder.deno_json.as_ref();
let output = build::build(build::BuildInput {
entry_points,
build_graph: move |entry_points| {
@@ -64,7 +71,7 @@ pub async fn vendor(
},
parsed_source_cache: factory.parsed_source_cache(),
output_dir: &output_dir,
- maybe_original_import_map: factory.maybe_import_map().await?.as_deref(),
+ maybe_original_import_map: workspace_resolver.maybe_import_map(),
maybe_jsx_import_source: jsx_import_source.as_ref(),
resolver: factory.resolver().await?.as_graph_resolver(),
environment: &build::RealVendorEnvironment,
@@ -91,7 +98,7 @@ pub async fn vendor(
let try_add_import_map = vendored_count > 0;
let modified_result = maybe_update_config_file(
&output_dir,
- cli_options,
+ maybe_config_file,
try_add_import_map,
try_add_node_modules_dir,
);
@@ -100,8 +107,9 @@ pub async fn vendor(
if modified_result.added_node_modules_dir {
let node_modules_path =
cli_options.node_modules_dir_path().cloned().or_else(|| {
- cli_options
- .maybe_config_file_specifier()
+ maybe_config_file
+ .as_ref()
+ .map(|d| &d.specifier)
.filter(|c| c.scheme() == "file")
.and_then(|c| c.to_file_path().ok())
.map(|config_path| config_path.parent().unwrap().join("node_modules"))
@@ -176,7 +184,7 @@ fn validate_options(
let import_map_specifier = options
.resolve_specified_import_map_specifier()?
.or_else(|| {
- let config_file = options.maybe_config_file().as_ref()?;
+ let config_file = options.workspace.root_folder().1.deno_json.as_ref()?;
config_file
.to_import_map_specifier()
.ok()
@@ -229,12 +237,12 @@ fn validate_options(
fn maybe_update_config_file(
output_dir: &Path,
- options: &CliOptions,
+ maybe_config_file: Option<&Arc<ConfigFile>>,
try_add_import_map: bool,
try_add_node_modules_dir: bool,
) -> ModifiedResult {
assert!(output_dir.is_absolute());
- let config_file = match options.maybe_config_file() {
+ let config_file = match maybe_config_file {
Some(config_file) => config_file,
None => return ModifiedResult::default(),
};
@@ -245,7 +253,6 @@ fn maybe_update_config_file(
let fmt_config_options = config_file
.to_fmt_config()
.ok()
- .flatten()
.map(|config| config.options)
.unwrap_or_default();
let result = update_config_file(
diff --git a/cli/tools/vendor/test.rs b/cli/tools/vendor/test.rs
index 830d5f8f0..ac07c47d1 100644
--- a/cli/tools/vendor/test.rs
+++ b/cli/tools/vendor/test.rs
@@ -8,6 +8,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
@@ -182,7 +183,7 @@ pub struct VendorOutput {
pub struct VendorTestBuilder {
entry_points: Vec<ModuleSpecifier>,
loader: TestLoader,
- original_import_map: Option<ImportMap>,
+ maybe_original_import_map: Option<ImportMap>,
environment: TestVendorEnvironment,
jsx_import_source_config: Option<JsxImportSourceConfig>,
}
@@ -207,7 +208,7 @@ impl VendorTestBuilder {
&mut self,
import_map: ImportMap,
) -> &mut Self {
- self.original_import_map = Some(import_map);
+ self.maybe_original_import_map = Some(import_map);
self
}
@@ -234,7 +235,7 @@ impl VendorTestBuilder {
let parsed_source_cache = ParsedSourceCache::default();
let resolver = Arc::new(build_resolver(
self.jsx_import_source_config.clone(),
- self.original_import_map.clone(),
+ self.maybe_original_import_map.clone(),
));
super::build::build(super::build::BuildInput {
entry_points,
@@ -257,7 +258,7 @@ impl VendorTestBuilder {
},
parsed_source_cache: &parsed_source_cache,
output_dir: &output_dir,
- maybe_original_import_map: self.original_import_map.as_ref(),
+ maybe_original_import_map: self.maybe_original_import_map.as_ref(),
maybe_jsx_import_source: self.jsx_import_source_config.as_ref(),
resolver: resolver.as_graph_resolver(),
environment: &self.environment,
@@ -287,15 +288,18 @@ impl VendorTestBuilder {
fn build_resolver(
maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
- original_import_map: Option<ImportMap>,
+ maybe_original_import_map: Option<ImportMap>,
) -> CliGraphResolver {
CliGraphResolver::new(CliGraphResolverOptions {
node_resolver: None,
npm_resolver: None,
sloppy_imports_resolver: None,
- package_json_deps_provider: Default::default(),
+ workspace_resolver: Arc::new(WorkspaceResolver::new_raw(
+ maybe_original_import_map,
+ Vec::new(),
+ deno_config::workspace::PackageJsonDepResolution::Enabled,
+ )),
maybe_jsx_import_source_config,
- maybe_import_map: original_import_map.map(Arc::new),
maybe_vendor_dir: None,
bare_node_builtins_enabled: false,
})