summaryrefslogtreecommitdiff
path: root/cli
diff options
context:
space:
mode:
Diffstat (limited to 'cli')
-rw-r--r--cli/Cargo.toml2
-rw-r--r--cli/args/flags.rs160
-rw-r--r--cli/args/import_map.rs118
-rw-r--r--cli/args/mod.rs1027
-rw-r--r--cli/args/package_json.rs128
-rw-r--r--cli/factory.rs205
-rw-r--r--cli/graph_container.rs2
-rw-r--r--cli/graph_util.rs38
-rw-r--r--cli/lsp/config.rs34
-rw-r--r--cli/lsp/diagnostics.rs2
-rw-r--r--cli/lsp/documents.rs4
-rw-r--r--cli/lsp/language_server.rs26
-rw-r--r--cli/lsp/resolver.rs34
-rw-r--r--cli/lsp/tsc.rs2
-rw-r--r--cli/module_loader.rs12
-rw-r--r--cli/npm/byonm.rs20
-rw-r--r--cli/npm/managed/mod.rs23
-rw-r--r--cli/npm/managed/resolvers/local.rs22
-rw-r--r--cli/npm/managed/resolvers/mod.rs4
-rw-r--r--cli/resolver.rs341
-rw-r--r--cli/schemas/config-file.v1.json2
-rw-r--r--cli/standalone/binary.rs313
-rw-r--r--cli/standalone/mod.rs449
-rw-r--r--cli/standalone/virtual_fs.rs41
-rw-r--r--cli/tools/bench/mod.rs93
-rw-r--r--cli/tools/check.rs2
-rw-r--r--cli/tools/compile.rs113
-rw-r--r--cli/tools/doc.rs51
-rw-r--r--cli/tools/fmt.rs471
-rw-r--r--cli/tools/info.rs23
-rw-r--r--cli/tools/lint/mod.rs429
-rw-r--r--cli/tools/registry/mod.rs111
-rw-r--r--cli/tools/registry/pm.rs25
-rw-r--r--cli/tools/registry/publish_order.rs21
-rw-r--r--cli/tools/registry/unfurl.rs70
-rw-r--r--cli/tools/task.rs319
-rw-r--r--cli/tools/test/mod.rs118
-rw-r--r--cli/tools/vendor/build.rs12
-rw-r--r--cli/tools/vendor/import_map.rs16
-rw-r--r--cli/tools/vendor/mod.rs25
-rw-r--r--cli/tools/vendor/test.rs18
-rw-r--r--cli/util/collections.rs38
-rw-r--r--cli/util/file_watcher.rs3
-rw-r--r--cli/util/mod.rs1
-rw-r--r--cli/worker.rs26
45 files changed, 2737 insertions, 2257 deletions
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
index ff82fc3cc..31232e093 100644
--- a/cli/Cargo.toml
+++ b/cli/Cargo.toml
@@ -65,7 +65,7 @@ winres.workspace = true
[dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true }
-deno_config = { workspace = true, features = ["deno_json", "package_json"] }
+deno_config = { workspace = true, features = ["workspace"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.141.0", features = ["html", "syntect"] }
deno_emit = "=0.43.0"
diff --git a/cli/args/flags.rs b/cli/args/flags.rs
index 5f58911c2..56fb4f09d 100644
--- a/cli/args/flags.rs
+++ b/cli/args/flags.rs
@@ -9,11 +9,13 @@ use clap::ArgMatches;
use clap::ColorChoice;
use clap::Command;
use clap::ValueHint;
+use deno_config::glob::FilePatterns;
use deno_config::glob::PathOrPatternSet;
use deno_config::ConfigFlag;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
+use deno_core::normalize_path;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
use deno_graph::GraphKind;
@@ -34,6 +36,7 @@ use std::path::PathBuf;
use std::str::FromStr;
use crate::args::resolve_no_prompt;
+use crate::util::collections::CheckedSet;
use crate::util::fs::canonicalize_path;
use super::flags_net;
@@ -45,6 +48,29 @@ pub struct FileFlags {
pub include: Vec<String>,
}
+impl FileFlags {
+ pub fn as_file_patterns(
+ &self,
+ base: &Path,
+ ) -> Result<FilePatterns, AnyError> {
+ Ok(FilePatterns {
+ include: if self.include.is_empty() {
+ None
+ } else {
+ Some(PathOrPatternSet::from_include_relative_path_or_patterns(
+ base,
+ &self.include,
+ )?)
+ },
+ exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
+ base,
+ &self.ignore,
+ )?,
+ base: base.to_path_buf(),
+ })
+ }
+}
+
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct AddFlags {
pub packages: Vec<String>,
@@ -156,7 +182,7 @@ pub struct EvalFlags {
pub code: String,
}
-#[derive(Clone, Debug, Eq, PartialEq)]
+#[derive(Clone, Default, Debug, Eq, PartialEq)]
pub struct FmtFlags {
pub check: bool,
pub files: FileFlags,
@@ -235,7 +261,7 @@ pub struct UninstallFlags {
pub kind: UninstallKind,
}
-#[derive(Clone, Debug, Eq, PartialEq)]
+#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct LintFlags {
pub files: FileFlags,
pub rules: bool,
@@ -323,7 +349,7 @@ pub struct TaskFlags {
pub task: Option<String>,
}
-#[derive(Clone, Debug, Default, Eq, PartialEq)]
+#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
pub enum TestReporterConfig {
#[default]
Pretty,
@@ -838,30 +864,54 @@ impl Flags {
args
}
- /// Extract path arguments for config search paths.
- /// If it returns Some(vec), the config should be discovered
- /// from the passed `current_dir` after trying to discover from each entry in
- /// the returned vector.
- /// If it returns None, the config file shouldn't be discovered at all.
+ /// Extract the directory paths the config file should be discovered from.
+ ///
+ /// Returns `None` if the config file should not be auto-discovered.
pub fn config_path_args(&self, current_dir: &Path) -> Option<Vec<PathBuf>> {
- use DenoSubcommand::*;
+ fn resolve_multiple_files(
+ files: &[String],
+ current_dir: &Path,
+ ) -> Vec<PathBuf> {
+ let mut seen = CheckedSet::with_capacity(files.len());
+ let result = files
+ .iter()
+ .filter_map(|p| {
+ let path = normalize_path(current_dir.join(p).parent()?);
+ if seen.insert(&path) {
+ Some(path)
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+ if result.is_empty() {
+ vec![current_dir.to_path_buf()]
+ } else {
+ result
+ }
+ }
+ use DenoSubcommand::*;
match &self.subcommand {
Fmt(FmtFlags { files, .. }) => {
- Some(files.include.iter().map(|p| current_dir.join(p)).collect())
+ Some(resolve_multiple_files(&files.include, current_dir))
}
Lint(LintFlags { files, .. }) => {
- Some(files.include.iter().map(|p| current_dir.join(p)).collect())
+ Some(resolve_multiple_files(&files.include, current_dir))
}
- Run(RunFlags { script, .. }) => {
+ Run(RunFlags { script, .. })
+ | Compile(CompileFlags {
+ source_file: script,
+ ..
+ }) => {
if let Ok(module_specifier) = resolve_url_or_path(script, current_dir) {
if module_specifier.scheme() == "file"
|| module_specifier.scheme() == "npm"
{
if let Ok(p) = module_specifier.to_file_path() {
- Some(vec![p])
+ Some(vec![p.parent().unwrap().to_path_buf()])
} else {
- Some(vec![])
+ Some(vec![current_dir.to_path_buf()])
}
} else {
// When the entrypoint doesn't have file: scheme (it's the remote
@@ -869,7 +919,7 @@ impl Flags {
None
}
} else {
- Some(vec![])
+ Some(vec![current_dir.to_path_buf()])
}
}
Task(TaskFlags {
@@ -880,57 +930,10 @@ impl Flags {
// `--cwd` when specified
match canonicalize_path(&PathBuf::from(path)) {
Ok(path) => Some(vec![path]),
- Err(_) => Some(vec![]),
- }
- }
- _ => Some(vec![]),
- }
- }
-
- /// Extract path argument for `package.json` search paths.
- /// If it returns Some(path), the `package.json` should be discovered
- /// from the `path` dir.
- /// If it returns None, the `package.json` file shouldn't be discovered at
- /// all.
- pub fn package_json_search_dir(&self, current_dir: &Path) -> Option<PathBuf> {
- use DenoSubcommand::*;
-
- match &self.subcommand {
- Run(RunFlags { script, .. }) | Serve(ServeFlags { script, .. }) => {
- let module_specifier = resolve_url_or_path(script, current_dir).ok()?;
- if module_specifier.scheme() == "file" {
- let p = module_specifier
- .to_file_path()
- .unwrap()
- .parent()?
- .to_owned();
- Some(p)
- } else if module_specifier.scheme() == "npm" {
- Some(current_dir.to_path_buf())
- } else {
- None
- }
- }
- Task(TaskFlags { cwd: Some(cwd), .. }) => {
- resolve_url_or_path(cwd, current_dir)
- .ok()?
- .to_file_path()
- .ok()
- }
- Task(_) | Check(_) | Coverage(_) | Cache(_) | Info(_) | Eval(_)
- | Test(_) | Bench(_) | Repl(_) | Compile(_) | Publish(_) => {
- Some(current_dir.to_path_buf())
- }
- Add(_) | Bundle(_) | Completions(_) | Doc(_) | Fmt(_) | Init(_)
- | Uninstall(_) | Jupyter(_) | Lsp | Lint(_) | Types | Upgrade(_)
- | Vendor(_) => None,
- Install(_) => {
- if *DENO_FUTURE {
- Some(current_dir.to_path_buf())
- } else {
- None
+ Err(_) => Some(vec![current_dir.to_path_buf()]),
}
}
+ _ => Some(vec![current_dir.to_path_buf()]),
}
}
@@ -9271,7 +9274,15 @@ mod tests {
fn test_config_path_args() {
let flags = flags_from_vec(svec!["deno", "run", "foo.js"]).unwrap();
let cwd = std::env::current_dir().unwrap();
- assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.join("foo.js")]));
+
+ assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()]));
+
+ let flags = flags_from_vec(svec!["deno", "run", "sub_dir/foo.js"]).unwrap();
+ let cwd = std::env::current_dir().unwrap();
+ assert_eq!(
+ flags.config_path_args(&cwd),
+ Some(vec![cwd.join("sub_dir").clone()])
+ );
let flags =
flags_from_vec(svec!["deno", "run", "https://example.com/foo.js"])
@@ -9279,20 +9290,27 @@ mod tests {
assert_eq!(flags.config_path_args(&cwd), None);
let flags =
- flags_from_vec(svec!["deno", "lint", "dir/a.js", "dir/b.js"]).unwrap();
+ flags_from_vec(svec!["deno", "lint", "dir/a/a.js", "dir/b/b.js"])
+ .unwrap();
assert_eq!(
flags.config_path_args(&cwd),
- Some(vec![cwd.join("dir/a.js"), cwd.join("dir/b.js")])
+ Some(vec![cwd.join("dir/a/"), cwd.join("dir/b/")])
);
let flags = flags_from_vec(svec!["deno", "lint"]).unwrap();
- assert!(flags.config_path_args(&cwd).unwrap().is_empty());
+ assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()]));
- let flags =
- flags_from_vec(svec!["deno", "fmt", "dir/a.js", "dir/b.js"]).unwrap();
+ let flags = flags_from_vec(svec![
+ "deno",
+ "fmt",
+ "dir/a/a.js",
+ "dir/a/a2.js",
+ "dir/b.js"
+ ])
+ .unwrap();
assert_eq!(
flags.config_path_args(&cwd),
- Some(vec![cwd.join("dir/a.js"), cwd.join("dir/b.js")])
+ Some(vec![cwd.join("dir/a/"), cwd.join("dir/")])
);
}
diff --git a/cli/args/import_map.rs b/cli/args/import_map.rs
index 2dc5a21d1..7a16ab215 100644
--- a/cli/args/import_map.rs
+++ b/cli/args/import_map.rs
@@ -1,127 +1,25 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_runtime::deno_permissions::PermissionsContainer;
-use import_map::ImportMap;
-use import_map::ImportMapDiagnostic;
-use log::warn;
-use super::ConfigFile;
use crate::file_fetcher::FileFetcher;
-pub async fn resolve_import_map(
- specified_specifier: Option<&Url>,
- maybe_config_file: Option<&ConfigFile>,
+pub async fn resolve_import_map_value_from_specifier(
+ specifier: &Url,
file_fetcher: &FileFetcher,
-) -> Result<Option<ImportMap>, AnyError> {
- if let Some(specifier) = specified_specifier {
- resolve_import_map_from_specifier(specifier.clone(), file_fetcher)
- .await
- .with_context(|| format!("Unable to load '{}' import map", specifier))
- .map(Some)
- } else if let Some(config_file) = maybe_config_file {
- let maybe_url_and_value = config_file
- .to_import_map_value(|specifier| {
- let specifier = specifier.clone();
- async move {
- let file = file_fetcher
- .fetch(&specifier, &PermissionsContainer::allow_all())
- .await?
- .into_text_decoded()?;
- Ok(file.source.to_string())
- }
- })
- .await
- .with_context(|| {
- format!(
- "Unable to resolve import map in '{}'",
- config_file.specifier
- )
- })?;
- match maybe_url_and_value {
- Some((url, value)) => {
- import_map_from_value(url.into_owned(), value).map(Some)
- }
- None => Ok(None),
- }
- } else {
- Ok(None)
- }
-}
-
-async fn resolve_import_map_from_specifier(
- specifier: Url,
- file_fetcher: &FileFetcher,
-) -> Result<ImportMap, AnyError> {
- let value: serde_json::Value = if specifier.scheme() == "data" {
+) -> Result<serde_json::Value, AnyError> {
+ if specifier.scheme() == "data" {
let data_url_text =
- deno_graph::source::RawDataUrl::parse(&specifier)?.decode()?;
- serde_json::from_str(&data_url_text)?
+ deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
+ Ok(serde_json::from_str(&data_url_text)?)
} else {
let file = file_fetcher
- .fetch(&specifier, &PermissionsContainer::allow_all())
+ .fetch(specifier, &PermissionsContainer::allow_all())
.await?
.into_text_decoded()?;
- serde_json::from_str(&file.source)?
- };
- import_map_from_value(specifier, value)
-}
-
-pub fn import_map_from_value(
- specifier: Url,
- json_value: serde_json::Value,
-) -> Result<ImportMap, AnyError> {
- debug_assert!(
- !specifier.as_str().contains("../"),
- "Import map specifier incorrectly contained ../: {}",
- specifier.as_str()
- );
- let result = import_map::parse_from_value(specifier, json_value)?;
- print_import_map_diagnostics(&result.diagnostics);
- Ok(result.import_map)
-}
-
-fn print_import_map_diagnostics(diagnostics: &[ImportMapDiagnostic]) {
- if !diagnostics.is_empty() {
- warn!(
- "Import map diagnostics:\n{}",
- diagnostics
- .iter()
- .map(|d| format!(" - {d}"))
- .collect::<Vec<_>>()
- .join("\n")
- );
+ Ok(serde_json::from_str(&file.source)?)
}
}
-
-pub fn enhance_import_map_value_with_workspace_members(
- mut import_map_value: serde_json::Value,
- workspace_members: &[deno_config::WorkspaceMemberConfig],
-) -> serde_json::Value {
- let mut imports =
- if let Some(imports) = import_map_value.get("imports").as_ref() {
- imports.as_object().unwrap().clone()
- } else {
- serde_json::Map::new()
- };
-
- for workspace_member in workspace_members {
- let name = &workspace_member.package_name;
- let version = &workspace_member.package_version;
- // Don't override existings, explicit imports
- if imports.contains_key(name) {
- continue;
- }
-
- imports.insert(
- name.to_string(),
- serde_json::Value::String(format!("jsr:{}@^{}", name, version)),
- );
- }
-
- import_map_value["imports"] = serde_json::Value::Object(imports);
- ::import_map::ext::expand_import_map_value(import_map_value)
-}
diff --git a/cli/args/mod.rs b/cli/args/mod.rs
index bf52c460f..f747271b8 100644
--- a/cli/args/mod.rs
+++ b/cli/args/mod.rs
@@ -5,21 +5,30 @@ mod flags;
mod flags_net;
mod import_map;
mod lockfile;
-pub mod package_json;
+mod package_json;
-pub use self::import_map::resolve_import_map;
-use ::import_map::ImportMap;
use deno_ast::SourceMapOption;
-use deno_config::package_json::PackageJsonDeps;
+use deno_config::workspace::CreateResolverOptions;
+use deno_config::workspace::PackageJsonDepResolution;
+use deno_config::workspace::Workspace;
+use deno_config::workspace::WorkspaceDiscoverOptions;
+use deno_config::workspace::WorkspaceDiscoverStart;
+use deno_config::workspace::WorkspaceMemberContext;
+use deno_config::workspace::WorkspaceResolver;
+use deno_config::WorkspaceLintConfig;
+use deno_core::normalize_path;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo;
+use deno_runtime::deno_fs::DenoConfigFsAdapter;
+use deno_runtime::deno_fs::RealFs;
+use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_semver::npm::NpmPackageReqReference;
-use indexmap::IndexMap;
+use import_map::resolve_import_map_value_from_specifier;
pub use deno_config::glob::FilePatterns;
pub use deno_config::BenchConfig;
@@ -32,10 +41,9 @@ pub use deno_config::TsConfig;
pub use deno_config::TsConfigForEmit;
pub use deno_config::TsConfigType;
pub use deno_config::TsTypeLib;
-pub use deno_config::WorkspaceConfig;
pub use flags::*;
pub use lockfile::CliLockfile;
-pub use package_json::PackageJsonDepsProvider;
+pub use package_json::PackageJsonInstallDepsProvider;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
@@ -68,7 +76,6 @@ use std::path::PathBuf;
use std::sync::Arc;
use thiserror::Error;
-use crate::args::import_map::enhance_import_map_value_with_workspace_members;
use crate::cache;
use crate::file_fetcher::FileFetcher;
use crate::util::fs::canonicalize_path_maybe_not_exists;
@@ -243,37 +250,45 @@ impl CacheSetting {
}
}
-#[derive(Clone, Debug, Eq, PartialEq)]
-pub struct BenchOptions {
- pub files: FilePatterns,
+pub struct WorkspaceBenchOptions {
pub filter: Option<String>,
pub json: bool,
pub no_run: bool,
}
+impl WorkspaceBenchOptions {
+ pub fn resolve(bench_flags: &BenchFlags) -> Self {
+ Self {
+ filter: bench_flags.filter.clone(),
+ json: bench_flags.json,
+ no_run: bench_flags.no_run,
+ }
+ }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct BenchOptions {
+ pub files: FilePatterns,
+}
+
impl BenchOptions {
pub fn resolve(
- maybe_bench_config: Option<BenchConfig>,
- maybe_bench_flags: Option<BenchFlags>,
- initial_cwd: &Path,
+ bench_config: BenchConfig,
+ bench_flags: &BenchFlags,
+ maybe_flags_base: Option<&Path>,
) -> Result<Self, AnyError> {
- let bench_flags = maybe_bench_flags.unwrap_or_default();
Ok(Self {
files: resolve_files(
- maybe_bench_config.map(|c| c.files),
- Some(bench_flags.files),
- initial_cwd,
+ bench_config.files,
+ &bench_flags.files,
+ maybe_flags_base,
)?,
- filter: bench_flags.filter,
- json: bench_flags.json,
- no_run: bench_flags.no_run,
})
}
}
#[derive(Clone, Debug)]
pub struct FmtOptions {
- pub check: bool,
pub options: FmtOptionsConfig,
pub files: FilePatterns,
}
@@ -287,79 +302,66 @@ impl Default for FmtOptions {
impl FmtOptions {
pub fn new_with_base(base: PathBuf) -> Self {
Self {
- check: false,
options: FmtOptionsConfig::default(),
files: FilePatterns::new_with_base(base),
}
}
pub fn resolve(
- maybe_fmt_config: Option<FmtConfig>,
- maybe_fmt_flags: Option<FmtFlags>,
- initial_cwd: &Path,
+ fmt_config: FmtConfig,
+ fmt_flags: &FmtFlags,
+ maybe_flags_base: Option<&Path>,
) -> Result<Self, AnyError> {
- let (maybe_config_options, maybe_config_files) =
- maybe_fmt_config.map(|c| (c.options, c.files)).unzip();
-
Ok(Self {
- check: maybe_fmt_flags.as_ref().map(|f| f.check).unwrap_or(false),
- options: resolve_fmt_options(
- maybe_fmt_flags.as_ref(),
- maybe_config_options,
- ),
+ options: resolve_fmt_options(fmt_flags, fmt_config.options),
files: resolve_files(
- maybe_config_files,
- maybe_fmt_flags.map(|f| f.files),
- initial_cwd,
+ fmt_config.files,
+ &fmt_flags.files,
+ maybe_flags_base,
)?,
})
}
}
fn resolve_fmt_options(
- fmt_flags: Option<&FmtFlags>,
- options: Option<FmtOptionsConfig>,
+ fmt_flags: &FmtFlags,
+ mut options: FmtOptionsConfig,
) -> FmtOptionsConfig {
- let mut options = options.unwrap_or_default();
-
- if let Some(fmt_flags) = fmt_flags {
- if let Some(use_tabs) = fmt_flags.use_tabs {
- options.use_tabs = Some(use_tabs);
- }
+ if let Some(use_tabs) = fmt_flags.use_tabs {
+ options.use_tabs = Some(use_tabs);
+ }
- if let Some(line_width) = fmt_flags.line_width {
- options.line_width = Some(line_width.get());
- }
+ if let Some(line_width) = fmt_flags.line_width {
+ options.line_width = Some(line_width.get());
+ }
- if let Some(indent_width) = fmt_flags.indent_width {
- options.indent_width = Some(indent_width.get());
- }
+ if let Some(indent_width) = fmt_flags.indent_width {
+ options.indent_width = Some(indent_width.get());
+ }
- if let Some(single_quote) = fmt_flags.single_quote {
- options.single_quote = Some(single_quote);
- }
+ if let Some(single_quote) = fmt_flags.single_quote {
+ options.single_quote = Some(single_quote);
+ }
- if let Some(prose_wrap) = &fmt_flags.prose_wrap {
- options.prose_wrap = Some(match prose_wrap.as_str() {
- "always" => ProseWrap::Always,
- "never" => ProseWrap::Never,
- "preserve" => ProseWrap::Preserve,
- // validators in `flags.rs` makes other values unreachable
- _ => unreachable!(),
- });
- }
+ if let Some(prose_wrap) = &fmt_flags.prose_wrap {
+ options.prose_wrap = Some(match prose_wrap.as_str() {
+ "always" => ProseWrap::Always,
+ "never" => ProseWrap::Never,
+ "preserve" => ProseWrap::Preserve,
+ // validators in `flags.rs` makes other values unreachable
+ _ => unreachable!(),
+ });
+ }
- if let Some(no_semis) = &fmt_flags.no_semicolons {
- options.semi_colons = Some(!no_semis);
- }
+ if let Some(no_semis) = &fmt_flags.no_semicolons {
+ options.semi_colons = Some(!no_semis);
}
options
}
-#[derive(Clone)]
-pub struct TestOptions {
- pub files: FilePatterns,
+#[derive(Clone, Debug)]
+pub struct WorkspaceTestOptions {
pub doc: bool,
pub no_run: bool,
pub fail_fast: Option<NonZeroUsize>,
@@ -372,37 +374,47 @@ pub struct TestOptions {
pub junit_path: Option<String>,
}
-impl TestOptions {
- pub fn resolve(
- maybe_test_config: Option<TestConfig>,
- maybe_test_flags: Option<TestFlags>,
- initial_cwd: &Path,
- ) -> Result<Self, AnyError> {
- let test_flags = maybe_test_flags.unwrap_or_default();
-
- Ok(Self {
- files: resolve_files(
- maybe_test_config.map(|c| c.files),
- Some(test_flags.files),
- initial_cwd,
- )?,
+impl WorkspaceTestOptions {
+ pub fn resolve(test_flags: &TestFlags) -> Self {
+ Self {
allow_none: test_flags.allow_none,
concurrent_jobs: test_flags
.concurrent_jobs
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap()),
doc: test_flags.doc,
fail_fast: test_flags.fail_fast,
- filter: test_flags.filter,
+ filter: test_flags.filter.clone(),
no_run: test_flags.no_run,
shuffle: test_flags.shuffle,
trace_leaks: test_flags.trace_leaks,
reporter: test_flags.reporter,
- junit_path: test_flags.junit_path,
+ junit_path: test_flags.junit_path.clone(),
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct TestOptions {
+ pub files: FilePatterns,
+}
+
+impl TestOptions {
+ pub fn resolve(
+ test_config: TestConfig,
+ test_flags: TestFlags,
+ maybe_flags_base: Option<&Path>,
+ ) -> Result<Self, AnyError> {
+ Ok(Self {
+ files: resolve_files(
+ test_config.files,
+ &test_flags.files,
+ maybe_flags_base,
+ )?,
})
}
}
-#[derive(Clone, Default, Debug)]
+#[derive(Clone, Copy, Default, Debug)]
pub enum LintReporterKind {
#[default]
Pretty,
@@ -411,10 +423,45 @@ pub enum LintReporterKind {
}
#[derive(Clone, Debug)]
+pub struct WorkspaceLintOptions {
+ pub reporter_kind: LintReporterKind,
+}
+
+impl WorkspaceLintOptions {
+ pub fn resolve(
+ lint_config: &WorkspaceLintConfig,
+ lint_flags: &LintFlags,
+ ) -> Result<Self, AnyError> {
+ let mut maybe_reporter_kind = if lint_flags.json {
+ Some(LintReporterKind::Json)
+ } else if lint_flags.compact {
+ Some(LintReporterKind::Compact)
+ } else {
+ None
+ };
+
+ if maybe_reporter_kind.is_none() {
+ // Flag not set, so try to get lint reporter from the config file.
+ maybe_reporter_kind = match lint_config.report.as_deref() {
+ Some("json") => Some(LintReporterKind::Json),
+ Some("compact") => Some(LintReporterKind::Compact),
+ Some("pretty") => Some(LintReporterKind::Pretty),
+ Some(_) => {
+ bail!("Invalid lint report type in config file")
+ }
+ None => None,
+ }
+ }
+ Ok(Self {
+ reporter_kind: maybe_reporter_kind.unwrap_or_default(),
+ })
+ }
+}
+
+#[derive(Clone, Debug)]
pub struct LintOptions {
pub rules: LintRulesConfig,
pub files: FilePatterns,
- pub reporter_kind: LintReporterKind,
pub fix: bool,
}
@@ -429,99 +476,51 @@ impl LintOptions {
Self {
rules: Default::default(),
files: FilePatterns::new_with_base(base),
- reporter_kind: Default::default(),
fix: false,
}
}
pub fn resolve(
- maybe_lint_config: Option<LintConfig>,
- maybe_lint_flags: Option<LintFlags>,
- initial_cwd: &Path,
+ lint_config: LintConfig,
+ lint_flags: LintFlags,
+ maybe_flags_base: Option<&Path>,
) -> Result<Self, AnyError> {
- let fix = maybe_lint_flags.as_ref().map(|f| f.fix).unwrap_or(false);
- let mut maybe_reporter_kind =
- maybe_lint_flags.as_ref().and_then(|lint_flags| {
- if lint_flags.json {
- Some(LintReporterKind::Json)
- } else if lint_flags.compact {
- Some(LintReporterKind::Compact)
- } else {
- None
- }
- });
-
- if maybe_reporter_kind.is_none() {
- // Flag not set, so try to get lint reporter from the config file.
- if let Some(lint_config) = &maybe_lint_config {
- maybe_reporter_kind = match lint_config.report.as_deref() {
- Some("json") => Some(LintReporterKind::Json),
- Some("compact") => Some(LintReporterKind::Compact),
- Some("pretty") => Some(LintReporterKind::Pretty),
- Some(_) => {
- bail!("Invalid lint report type in config file")
- }
- None => None,
- }
- }
- }
-
- let (
- maybe_file_flags,
- maybe_rules_tags,
- maybe_rules_include,
- maybe_rules_exclude,
- ) = maybe_lint_flags
- .map(|f| {
- (
- f.files,
- f.maybe_rules_tags,
- f.maybe_rules_include,
- f.maybe_rules_exclude,
- )
- })
- .unwrap_or_default();
-
- let (maybe_config_files, maybe_config_rules) =
- maybe_lint_config.map(|c| (c.files, c.rules)).unzip();
Ok(Self {
- reporter_kind: maybe_reporter_kind.unwrap_or_default(),
files: resolve_files(
- maybe_config_files,
- Some(maybe_file_flags),
- initial_cwd,
+ lint_config.files,
+ &lint_flags.files,
+ maybe_flags_base,
)?,
rules: resolve_lint_rules_options(
- maybe_config_rules,
- maybe_rules_tags,
- maybe_rules_include,
- maybe_rules_exclude,
+ lint_config.rules,
+ lint_flags.maybe_rules_tags,
+ lint_flags.maybe_rules_include,
+ lint_flags.maybe_rules_exclude,
),
- fix,
+ fix: lint_flags.fix,
})
}
}
fn resolve_lint_rules_options(
- maybe_lint_rules_config: Option<LintRulesConfig>,
+ config_rules: LintRulesConfig,
mut maybe_rules_tags: Option<Vec<String>>,
mut maybe_rules_include: Option<Vec<String>>,
mut maybe_rules_exclude: Option<Vec<String>>,
) -> LintRulesConfig {
- if let Some(config_rules) = maybe_lint_rules_config {
- // Try to get configured rules. CLI flags take precedence
- // over config file, i.e. if there's `rules.include` in config file
- // and `--rules-include` CLI flag, only the flag value is taken into account.
- if maybe_rules_include.is_none() {
- maybe_rules_include = config_rules.include;
- }
- if maybe_rules_exclude.is_none() {
- maybe_rules_exclude = config_rules.exclude;
- }
- if maybe_rules_tags.is_none() {
- maybe_rules_tags = config_rules.tags;
- }
+ // Try to get configured rules. CLI flags take precedence
+ // over config file, i.e. if there's `rules.include` in config file
+ // and `--rules-include` CLI flag, only the flag value is taken into account.
+ if maybe_rules_include.is_none() {
+ maybe_rules_include = config_rules.include;
+ }
+ if maybe_rules_exclude.is_none() {
+ maybe_rules_exclude = config_rules.exclude;
+ }
+ if maybe_rules_tags.is_none() {
+ maybe_rules_tags = config_rules.tags;
}
+
LintRulesConfig {
exclude: maybe_rules_exclude,
include: maybe_rules_include,
@@ -529,24 +528,6 @@ fn resolve_lint_rules_options(
}
}
-/// Discover `package.json` file. If `maybe_stop_at` is provided, we will stop
-/// crawling up the directory tree at that path.
-fn discover_package_json(
- flags: &Flags,
- maybe_stop_at: Option<PathBuf>,
- current_dir: &Path,
-) -> Result<Option<Arc<PackageJson>>, AnyError> {
- // TODO(bartlomieju): discover for all subcommands, but print warnings that
- // `package.json` is ignored in bundle/compile/etc.
-
- if let Some(package_json_dir) = flags.package_json_search_dir(current_dir) {
- return package_json::discover_from(&package_json_dir, maybe_stop_at);
- }
-
- log::debug!("No package.json file found");
- Ok(None)
-}
-
/// Discover `.npmrc` file - currently we only support it next to `package.json`
/// or next to `deno.json`.
///
@@ -798,12 +779,10 @@ pub struct CliOptions {
initial_cwd: PathBuf,
maybe_node_modules_folder: Option<PathBuf>,
maybe_vendor_folder: Option<PathBuf>,
- maybe_config_file: Option<ConfigFile>,
- maybe_package_json: Option<Arc<PackageJson>>,
npmrc: Arc<ResolvedNpmRc>,
maybe_lockfile: Option<Arc<CliLockfile>>,
overrides: CliOptionOverrides,
- maybe_workspace_config: Option<WorkspaceConfig>,
+ pub workspace: Arc<Workspace>,
pub disable_deprecated_api_warning: bool,
pub verbose_deprecated_api_warning: bool,
}
@@ -812,10 +791,9 @@ impl CliOptions {
pub fn new(
flags: Flags,
initial_cwd: PathBuf,
- maybe_config_file: Option<ConfigFile>,
maybe_lockfile: Option<Arc<CliLockfile>>,
- maybe_package_json: Option<Arc<PackageJson>>,
npmrc: Arc<ResolvedNpmRc>,
+ workspace: Arc<Workspace>,
force_global_cache: bool,
) -> Result<Self, AnyError> {
if let Some(insecure_allowlist) =
@@ -836,24 +814,23 @@ impl CliOptions {
}
let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache);
+ let root_folder = workspace.root_folder().1;
let maybe_node_modules_folder = resolve_node_modules_folder(
&initial_cwd,
&flags,
- maybe_config_file.as_ref(),
- maybe_package_json.as_deref(),
+ root_folder.deno_json.as_deref(),
+ root_folder.pkg_json.as_deref(),
)
.with_context(|| "Resolving node_modules folder.")?;
let maybe_vendor_folder = if force_global_cache {
None
} else {
- resolve_vendor_folder(&initial_cwd, &flags, maybe_config_file.as_ref())
+ resolve_vendor_folder(
+ &initial_cwd,
+ &flags,
+ root_folder.deno_json.as_deref(),
+ )
};
- let maybe_workspace_config =
- if let Some(config_file) = maybe_config_file.as_ref() {
- config_file.to_workspace_config()?
- } else {
- None
- };
if let Some(env_file_name) = &flags.env_file {
match from_filename(env_file_name) {
@@ -879,14 +856,12 @@ impl CliOptions {
Ok(Self {
flags,
initial_cwd,
- maybe_config_file,
maybe_lockfile,
- maybe_package_json,
npmrc,
maybe_node_modules_folder,
maybe_vendor_folder,
overrides: Default::default(),
- maybe_workspace_config,
+ workspace,
disable_deprecated_api_warning,
verbose_deprecated_api_warning,
})
@@ -895,50 +870,71 @@ impl CliOptions {
pub fn from_flags(flags: Flags) -> Result<Self, AnyError> {
let initial_cwd =
std::env::current_dir().with_context(|| "Failed getting cwd.")?;
- let additional_config_file_names =
- if matches!(flags.subcommand, DenoSubcommand::Publish(..)) {
- Some(vec!["jsr.json", "jsr.jsonc"])
- } else {
- None
+ let config_fs_adapter = DenoConfigFsAdapter::new(&RealFs);
+ let resolve_workspace_discover_options = || {
+ let additional_config_file_names: &'static [&'static str] =
+ if matches!(flags.subcommand, DenoSubcommand::Publish(..)) {
+ &["jsr.json", "jsr.jsonc"]
+ } else {
+ &[]
+ };
+ let config_parse_options = deno_config::ConfigParseOptions {
+ include_task_comments: matches!(
+ flags.subcommand,
+ DenoSubcommand::Task(..)
+ ),
};
- let parse_options = deno_config::ParseOptions {
- include_task_comments: matches!(
- flags.subcommand,
- DenoSubcommand::Task(..)
- ),
+ let discover_pkg_json = flags.config_flag
+ != deno_config::ConfigFlag::Disabled
+ && !flags.no_npm
+ && !has_flag_env_var("DENO_NO_PACKAGE_JSON");
+ if !discover_pkg_json {
+ log::debug!("package.json auto-discovery is disabled");
+ }
+ WorkspaceDiscoverOptions {
+ fs: &config_fs_adapter,
+ pkg_json_cache: Some(
+ &deno_runtime::deno_node::PackageJsonThreadLocalCache,
+ ),
+ config_parse_options,
+ additional_config_file_names,
+ discover_pkg_json,
+ }
};
- let maybe_config_file = ConfigFile::discover(
- &flags.config_flag,
- flags.config_path_args(&initial_cwd),
- &initial_cwd,
- additional_config_file_names,
- &parse_options,
- )?;
- let mut maybe_package_json = None;
- if flags.config_flag == deno_config::ConfigFlag::Disabled
- || flags.no_npm
- || has_flag_env_var("DENO_NO_PACKAGE_JSON")
- {
- log::debug!("package.json auto-discovery is disabled")
- } else if let Some(config_file) = &maybe_config_file {
- let specifier = config_file.specifier.clone();
- if specifier.scheme() == "file" {
- let maybe_stop_at = specifier
- .to_file_path()
- .unwrap()
- .parent()
- .map(|p| p.to_path_buf());
-
- maybe_package_json =
- discover_package_json(&flags, maybe_stop_at, &initial_cwd)?;
+ let workspace = match &flags.config_flag {
+ deno_config::ConfigFlag::Discover => {
+ if let Some(start_dirs) = flags.config_path_args(&initial_cwd) {
+ Workspace::discover(
+ WorkspaceDiscoverStart::Dirs(&start_dirs),
+ &resolve_workspace_discover_options(),
+ )?
+ } else {
+ Workspace::empty(Arc::new(
+ ModuleSpecifier::from_directory_path(&initial_cwd).unwrap(),
+ ))
+ }
}
- } else {
- maybe_package_json = discover_package_json(&flags, None, &initial_cwd)?;
+ deno_config::ConfigFlag::Path(path) => {
+ let config_path = normalize_path(initial_cwd.join(path));
+ Workspace::discover(
+ WorkspaceDiscoverStart::ConfigFile(&config_path),
+ &resolve_workspace_discover_options(),
+ )?
+ }
+ deno_config::ConfigFlag::Disabled => Workspace::empty(Arc::new(
+ ModuleSpecifier::from_directory_path(&initial_cwd).unwrap(),
+ )),
+ };
+
+ for diagnostic in workspace.diagnostics() {
+ log::warn!("{}", colors::yellow(diagnostic));
}
+
+ let root_folder = workspace.root_folder().1;
let (npmrc, _) = discover_npmrc(
- maybe_package_json.as_ref().map(|p| p.path.clone()),
- maybe_config_file.as_ref().and_then(|cf| {
+ root_folder.pkg_json.as_ref().map(|p| p.path.clone()),
+ root_folder.deno_json.as_ref().and_then(|cf| {
if cf.specifier.scheme() == "file" {
Some(cf.specifier.to_file_path().unwrap())
} else {
@@ -949,16 +945,18 @@ impl CliOptions {
let maybe_lock_file = CliLockfile::discover(
&flags,
- maybe_config_file.as_ref(),
- maybe_package_json.as_deref(),
+ root_folder.deno_json.as_deref(),
+ root_folder.pkg_json.as_deref(),
)?;
+
+ log::debug!("Finished config loading.");
+
Self::new(
flags,
initial_cwd,
- maybe_config_file,
maybe_lock_file.map(Arc::new),
- maybe_package_json,
npmrc,
+ Arc::new(workspace),
false,
)
}
@@ -968,10 +966,6 @@ impl CliOptions {
&self.initial_cwd
}
- pub fn maybe_config_file_specifier(&self) -> Option<ModuleSpecifier> {
- self.maybe_config_file.as_ref().map(|f| f.specifier.clone())
- }
-
pub fn graph_kind(&self) -> GraphKind {
match self.sub_command() {
DenoSubcommand::Cache(_) => GraphKind::All,
@@ -1057,70 +1051,78 @@ impl CliOptions {
Some(maybe_url) => Ok(maybe_url),
None => resolve_import_map_specifier(
self.flags.import_map_path.as_deref(),
- self.maybe_config_file.as_ref(),
+ self.workspace.root_folder().1.deno_json.as_deref(),
&self.initial_cwd,
),
}
}
- pub async fn resolve_import_map(
+ pub async fn create_workspace_resolver(
&self,
file_fetcher: &FileFetcher,
- ) -> Result<Option<ImportMap>, AnyError> {
- if let Some(workspace_config) = self.maybe_workspace_config.as_ref() {
- let root_config_file = self.maybe_config_file.as_ref().unwrap();
- let base_import_map_config = ::import_map::ext::ImportMapConfig {
- base_url: root_config_file.specifier.clone(),
- import_map_value: root_config_file.to_import_map_value_from_imports(),
- };
- let children_configs = workspace_config
- .members
- .iter()
- .map(|member| ::import_map::ext::ImportMapConfig {
- base_url: member.config_file.specifier.clone(),
- import_map_value: member
- .config_file
- .to_import_map_value_from_imports(),
- })
- .collect();
-
- let (import_map_url, import_map) =
- ::import_map::ext::create_synthetic_import_map(
- base_import_map_config,
- children_configs,
- );
- let import_map = enhance_import_map_value_with_workspace_members(
- import_map,
- &workspace_config.members,
- );
- log::debug!(
- "Workspace config generated this import map {}",
- serde_json::to_string_pretty(&import_map).unwrap()
- );
- let maybe_import_map_result =
- import_map::import_map_from_value(import_map_url, import_map).map(Some);
-
- return maybe_import_map_result;
- }
-
- if self
+ ) -> Result<WorkspaceResolver, AnyError> {
+ let overrode_no_import_map = self
.overrides
.import_map_specifier
.as_ref()
.map(|s| s.is_none())
- == Some(true)
- {
- // overrode to not use an import map
- return Ok(None);
- }
-
- let import_map_specifier = self.resolve_specified_import_map_specifier()?;
- resolve_import_map(
- import_map_specifier.as_ref(),
- self.maybe_config_file().as_ref(),
- file_fetcher,
+ == Some(true);
+ let cli_arg_specified_import_map = if overrode_no_import_map {
+ // use a fake empty import map
+ Some(deno_config::workspace::SpecifiedImportMap {
+ base_url: self
+ .workspace
+ .root_folder()
+ .0
+ .join("import_map.json")
+ .unwrap(),
+ value: serde_json::Value::Object(Default::default()),
+ })
+ } else {
+ let maybe_import_map_specifier =
+ self.resolve_specified_import_map_specifier()?;
+ match maybe_import_map_specifier {
+ Some(specifier) => {
+ let value =
+ resolve_import_map_value_from_specifier(&specifier, file_fetcher)
+ .await
+ .with_context(|| {
+ format!("Unable to load '{}' import map", specifier)
+ })?;
+ Some(deno_config::workspace::SpecifiedImportMap {
+ base_url: specifier,
+ value,
+ })
+ }
+ None => None,
+ }
+ };
+ Ok(
+ self
+ .workspace
+ .create_resolver(
+ CreateResolverOptions {
+ // todo(dsherret): this should be false for nodeModulesDir: true
+ pkg_json_dep_resolution: if self.use_byonm() {
+ PackageJsonDepResolution::Disabled
+ } else {
+ PackageJsonDepResolution::Enabled
+ },
+ specified_import_map: cli_arg_specified_import_map,
+ },
+ |specifier| {
+ let specifier = specifier.clone();
+ async move {
+ let file = file_fetcher
+ .fetch(&specifier, &PermissionsContainer::allow_all())
+ .await?
+ .into_text_decoded()?;
+ Ok(file.source.to_string())
+ }
+ },
+ )
+ .await?,
)
- .await
}
pub fn node_ipc_fd(&self) -> Option<i64> {
@@ -1155,22 +1157,18 @@ impl CliOptions {
}
pub fn resolve_main_module(&self) -> Result<ModuleSpecifier, AnyError> {
- match &self.flags.subcommand {
+ let main_module = match &self.flags.subcommand {
DenoSubcommand::Bundle(bundle_flags) => {
- resolve_url_or_path(&bundle_flags.source_file, self.initial_cwd())
- .map_err(AnyError::from)
+ resolve_url_or_path(&bundle_flags.source_file, self.initial_cwd())?
}
DenoSubcommand::Compile(compile_flags) => {
- resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())
- .map_err(AnyError::from)
+ resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
}
DenoSubcommand::Eval(_) => {
- resolve_url_or_path("./$deno$eval", self.initial_cwd())
- .map_err(AnyError::from)
+ resolve_url_or_path("./$deno$eval", self.initial_cwd())?
}
DenoSubcommand::Repl(_) => {
- resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())
- .map_err(AnyError::from)
+ resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())?
}
DenoSubcommand::Run(run_flags) => {
if run_flags.is_stdin() {
@@ -1179,25 +1177,24 @@ impl CliOptions {
.and_then(|cwd| {
resolve_url_or_path("./$deno$stdin.ts", &cwd)
.map_err(AnyError::from)
- })
+ })?
} else if run_flags.watch.is_some() {
- resolve_url_or_path(&run_flags.script, self.initial_cwd())
- .map_err(AnyError::from)
+ resolve_url_or_path(&run_flags.script, self.initial_cwd())?
} else if NpmPackageReqReference::from_str(&run_flags.script).is_ok() {
- ModuleSpecifier::parse(&run_flags.script).map_err(AnyError::from)
+ ModuleSpecifier::parse(&run_flags.script)?
} else {
- resolve_url_or_path(&run_flags.script, self.initial_cwd())
- .map_err(AnyError::from)
+ resolve_url_or_path(&run_flags.script, self.initial_cwd())?
}
}
DenoSubcommand::Serve(run_flags) => {
- resolve_url_or_path(&run_flags.script, self.initial_cwd())
- .map_err(AnyError::from)
+ resolve_url_or_path(&run_flags.script, self.initial_cwd())?
}
_ => {
bail!("No main module.")
}
- }
+ };
+
+ Ok(main_module)
}
pub fn resolve_file_header_overrides(
@@ -1266,11 +1263,9 @@ impl CliOptions {
initial_cwd: self.initial_cwd.clone(),
maybe_node_modules_folder: Some(path),
maybe_vendor_folder: self.maybe_vendor_folder.clone(),
- maybe_config_file: self.maybe_config_file.clone(),
- maybe_package_json: self.maybe_package_json.clone(),
npmrc: self.npmrc.clone(),
maybe_lockfile: self.maybe_lockfile.clone(),
- maybe_workspace_config: self.maybe_workspace_config.clone(),
+ workspace: self.workspace.clone(),
overrides: self.overrides.clone(),
disable_deprecated_api_warning: self.disable_deprecated_api_warning,
verbose_deprecated_api_warning: self.verbose_deprecated_api_warning,
@@ -1278,12 +1273,10 @@ impl CliOptions {
}
pub fn node_modules_dir_enablement(&self) -> Option<bool> {
- self.flags.node_modules_dir.or_else(|| {
- self
- .maybe_config_file
- .as_ref()
- .and_then(|c| c.json.node_modules_dir)
- })
+ self
+ .flags
+ .node_modules_dir
+ .or_else(|| self.workspace.node_modules_dir())
}
pub fn vendor_dir_path(&self) -> Option<&PathBuf> {
@@ -1304,10 +1297,7 @@ impl CliOptions {
&self,
config_type: TsConfigType,
) -> Result<TsConfigForEmit, AnyError> {
- let result = deno_config::get_ts_config_for_emit(
- config_type,
- self.maybe_config_file.as_ref(),
- );
+ let result = self.workspace.resolve_ts_config_for_emit(config_type);
match result {
Ok(mut ts_config_for_emit) => {
@@ -1346,101 +1336,83 @@ impl CliOptions {
self.maybe_lockfile.clone()
}
- pub fn resolve_tasks_config(
- &self,
- ) -> Result<IndexMap<String, deno_config::Task>, AnyError> {
- if let Some(config_file) = &self.maybe_config_file {
- config_file.resolve_tasks_config()
- } else if self.maybe_package_json.is_some() {
- Ok(Default::default())
- } else {
- bail!("deno task couldn't find deno.json(c). See https://deno.land/manual@v{}/getting_started/configuration_file", env!("CARGO_PKG_VERSION"))
- }
- }
-
- /// Return the JSX import source configuration.
- pub fn to_maybe_jsx_import_source_config(
- &self,
- ) -> Result<Option<JsxImportSourceConfig>, AnyError> {
- match self.maybe_config_file.as_ref() {
- Some(config) => config.to_maybe_jsx_import_source_config(),
- None => Ok(None),
- }
- }
-
/// Return any imports that should be brought into the scope of the module
/// graph.
pub fn to_maybe_imports(
&self,
) -> Result<Vec<deno_graph::ReferrerImports>, AnyError> {
- if let Some(config_file) = &self.maybe_config_file {
- config_file.to_maybe_imports().map(|maybe_imports| {
- maybe_imports
- .into_iter()
- .map(|(referrer, imports)| deno_graph::ReferrerImports {
- referrer,
- imports,
- })
- .collect()
- })
- } else {
- Ok(Vec::new())
- }
- }
-
- pub fn maybe_config_file(&self) -> &Option<ConfigFile> {
- &self.maybe_config_file
- }
-
- pub fn maybe_workspace_config(&self) -> &Option<WorkspaceConfig> {
- &self.maybe_workspace_config
- }
-
- pub fn maybe_package_json(&self) -> Option<&Arc<PackageJson>> {
- self.maybe_package_json.as_ref()
+ self.workspace.to_maybe_imports().map(|maybe_imports| {
+ maybe_imports
+ .into_iter()
+ .map(|(referrer, imports)| deno_graph::ReferrerImports {
+ referrer,
+ imports,
+ })
+ .collect()
+ })
}
pub fn npmrc(&self) -> &Arc<ResolvedNpmRc> {
&self.npmrc
}
- pub fn maybe_package_json_deps(&self) -> Option<PackageJsonDeps> {
- if matches!(
- self.flags.subcommand,
- DenoSubcommand::Task(TaskFlags { task: None, .. })
- ) {
- // don't have any package json dependencies for deno task with no args
- None
- } else {
- self
- .maybe_package_json()
- .as_ref()
- .map(|p| p.resolve_local_package_json_version_reqs())
+ pub fn resolve_fmt_options_for_members(
+ &self,
+ fmt_flags: &FmtFlags,
+ ) -> Result<Vec<FmtOptions>, AnyError> {
+ let cli_arg_patterns =
+ fmt_flags.files.as_file_patterns(self.initial_cwd())?;
+ let member_ctxs =
+ self.workspace.resolve_ctxs_from_patterns(&cli_arg_patterns);
+ let mut result = Vec::with_capacity(member_ctxs.len());
+ for member_ctx in &member_ctxs {
+ let options = self.resolve_fmt_options(fmt_flags, member_ctx)?;
+ result.push(options);
}
+ Ok(result)
}
pub fn resolve_fmt_options(
&self,
- fmt_flags: FmtFlags,
+ fmt_flags: &FmtFlags,
+ ctx: &WorkspaceMemberContext,
) -> Result<FmtOptions, AnyError> {
- let maybe_fmt_config = if let Some(config_file) = &self.maybe_config_file {
- config_file.to_fmt_config()?
- } else {
- None
- };
- FmtOptions::resolve(maybe_fmt_config, Some(fmt_flags), &self.initial_cwd)
+ let fmt_config = ctx.to_fmt_config()?;
+ FmtOptions::resolve(fmt_config, fmt_flags, Some(&self.initial_cwd))
+ }
+
+ pub fn resolve_workspace_lint_options(
+ &self,
+ lint_flags: &LintFlags,
+ ) -> Result<WorkspaceLintOptions, AnyError> {
+ let lint_config = self.workspace.to_lint_config()?;
+ WorkspaceLintOptions::resolve(&lint_config, lint_flags)
+ }
+
+ pub fn resolve_lint_options_for_members(
+ &self,
+ lint_flags: &LintFlags,
+ ) -> Result<Vec<(WorkspaceMemberContext, LintOptions)>, AnyError> {
+ let cli_arg_patterns =
+ lint_flags.files.as_file_patterns(self.initial_cwd())?;
+ let member_ctxs =
+ self.workspace.resolve_ctxs_from_patterns(&cli_arg_patterns);
+ let mut result = Vec::with_capacity(member_ctxs.len());
+ for member_ctx in member_ctxs {
+ let options =
+ self.resolve_lint_options(lint_flags.clone(), &member_ctx)?;
+ result.push((member_ctx, options));
+ }
+ Ok(result)
}
pub fn resolve_lint_options(
&self,
lint_flags: LintFlags,
+ ctx: &WorkspaceMemberContext,
) -> Result<LintOptions, AnyError> {
- let maybe_lint_config = if let Some(config_file) = &self.maybe_config_file {
- config_file.to_lint_config()?
- } else {
- None
- };
- LintOptions::resolve(maybe_lint_config, Some(lint_flags), &self.initial_cwd)
+ let lint_config = ctx.to_lint_config()?;
+ LintOptions::resolve(lint_config, lint_flags, Some(&self.initial_cwd))
}
pub fn resolve_lint_config(
@@ -1464,104 +1436,80 @@ impl CliOptions {
})
}
- pub fn resolve_config_excludes(&self) -> Result<PathOrPatternSet, AnyError> {
- let maybe_config_files = if let Some(config_file) = &self.maybe_config_file
- {
- Some(config_file.to_files_config()?)
- } else {
- None
- };
- Ok(maybe_config_files.map(|f| f.exclude).unwrap_or_default())
+ pub fn resolve_workspace_test_options(
+ &self,
+ test_flags: &TestFlags,
+ ) -> WorkspaceTestOptions {
+ WorkspaceTestOptions::resolve(test_flags)
+ }
+
+ pub fn resolve_test_options_for_members(
+ &self,
+ test_flags: &TestFlags,
+ ) -> Result<Vec<(WorkspaceMemberContext, TestOptions)>, AnyError> {
+ let cli_arg_patterns =
+ test_flags.files.as_file_patterns(self.initial_cwd())?;
+ let member_ctxs =
+ self.workspace.resolve_ctxs_from_patterns(&cli_arg_patterns);
+ let mut result = Vec::with_capacity(member_ctxs.len());
+ for member_ctx in member_ctxs {
+ let options =
+ self.resolve_test_options(test_flags.clone(), &member_ctx)?;
+ result.push((member_ctx, options));
+ }
+ Ok(result)
+ }
+
+ pub fn resolve_workspace_bench_options(
+ &self,
+ bench_flags: &BenchFlags,
+ ) -> WorkspaceBenchOptions {
+ WorkspaceBenchOptions::resolve(bench_flags)
}
pub fn resolve_test_options(
&self,
test_flags: TestFlags,
+ ctx: &WorkspaceMemberContext,
) -> Result<TestOptions, AnyError> {
- let maybe_test_config = if let Some(config_file) = &self.maybe_config_file {
- config_file.to_test_config()?
- } else {
- None
- };
- TestOptions::resolve(maybe_test_config, Some(test_flags), &self.initial_cwd)
+ let test_config = ctx.to_test_config()?;
+ TestOptions::resolve(test_config, test_flags, Some(&self.initial_cwd))
+ }
+
+ pub fn resolve_bench_options_for_members(
+ &self,
+ bench_flags: &BenchFlags,
+ ) -> Result<Vec<(WorkspaceMemberContext, BenchOptions)>, AnyError> {
+ let cli_arg_patterns =
+ bench_flags.files.as_file_patterns(self.initial_cwd())?;
+ let member_ctxs =
+ self.workspace.resolve_ctxs_from_patterns(&cli_arg_patterns);
+ let mut result = Vec::with_capacity(member_ctxs.len());
+ for member_ctx in member_ctxs {
+ let options = self.resolve_bench_options(bench_flags, &member_ctx)?;
+ result.push((member_ctx, options));
+ }
+ Ok(result)
}
pub fn resolve_bench_options(
&self,
- bench_flags: BenchFlags,
+ bench_flags: &BenchFlags,
+ ctx: &WorkspaceMemberContext,
) -> Result<BenchOptions, AnyError> {
- let maybe_bench_config = if let Some(config_file) = &self.maybe_config_file
- {
- config_file.to_bench_config()?
- } else {
- None
- };
- BenchOptions::resolve(
- maybe_bench_config,
- Some(bench_flags),
- &self.initial_cwd,
- )
+ let bench_config = ctx.to_bench_config()?;
+ BenchOptions::resolve(bench_config, bench_flags, Some(&self.initial_cwd))
}
pub fn resolve_deno_graph_workspace_members(
&self,
) -> Result<Vec<deno_graph::WorkspaceMember>, AnyError> {
- fn workspace_config_to_workspace_members(
- workspace_config: &deno_config::WorkspaceConfig,
- ) -> Result<Vec<deno_graph::WorkspaceMember>, AnyError> {
- workspace_config
- .members
- .iter()
- .map(|member| {
- config_to_workspace_member(&member.config_file).with_context(|| {
- format!(
- "Failed to resolve configuration for '{}' workspace member at '{}'",
- member.member_name,
- member.config_file.specifier.as_str()
- )
- })
- })
- .collect()
- }
-
- fn config_to_workspace_member(
- config: &ConfigFile,
- ) -> Result<deno_graph::WorkspaceMember, AnyError> {
- let nv = deno_semver::package::PackageNv {
- name: match &config.json.name {
- Some(name) => name.clone(),
- None => bail!("Missing 'name' field in config file."),
- },
- version: match &config.json.version {
- Some(name) => deno_semver::Version::parse_standard(name)?,
- None => bail!("Missing 'version' field in config file."),
- },
- };
- Ok(deno_graph::WorkspaceMember {
- base: config.specifier.join("./").unwrap(),
- nv,
- exports: config.to_exports_config()?.into_map(),
- })
- }
-
- let maybe_workspace_config = self.maybe_workspace_config();
- if let Some(wc) = maybe_workspace_config {
- workspace_config_to_workspace_members(wc)
- } else {
- Ok(
- self
- .maybe_config_file()
- .as_ref()
- .and_then(|c| match config_to_workspace_member(c) {
- Ok(m) => Some(vec![m]),
- Err(e) => {
- log::debug!("Deno config was not a package: {:#}", e);
- None
- }
- })
- .unwrap_or_default(),
- )
- }
+ self
+ .workspace
+ .jsr_packages()
+ .into_iter()
+ .map(|pkg| config_to_deno_graph_workspace_member(&pkg.config_file))
+ .collect::<Result<Vec<_>, _>>()
}
/// Vector of user script CLI arguments.
@@ -1578,11 +1526,7 @@ impl CliOptions {
}
pub fn check_js(&self) -> bool {
- self
- .maybe_config_file
- .as_ref()
- .map(|cf| cf.get_check_js())
- .unwrap_or(false)
+ self.workspace.check_js()
}
pub fn coverage_dir(&self) -> Option<String> {
@@ -1729,17 +1673,17 @@ impl CliOptions {
pub fn unstable_bare_node_builtins(&self) -> bool {
self.flags.unstable_config.bare_node_builtins
- || self
- .maybe_config_file()
- .as_ref()
- .map(|c| c.has_unstable("bare-node-builtins"))
- .unwrap_or(false)
+ || self.workspace.has_unstable("bare-node-builtins")
}
pub fn use_byonm(&self) -> bool {
if self.enable_future_features()
&& self.node_modules_dir_enablement().is_none()
- && self.maybe_package_json.is_some()
+ && self
+ .workspace
+ .config_folders()
+ .values()
+ .any(|f| f.pkg_json.is_some())
{
return true;
}
@@ -1750,28 +1694,16 @@ impl CliOptions {
.as_ref()
.map(|s| matches!(s.kind, NpmProcessStateKind::Byonm))
.unwrap_or(false)
- || self
- .maybe_config_file()
- .as_ref()
- .map(|c| c.has_unstable("byonm"))
- .unwrap_or(false)
+ || self.workspace.has_unstable("byonm")
}
pub fn unstable_sloppy_imports(&self) -> bool {
self.flags.unstable_config.sloppy_imports
- || self
- .maybe_config_file()
- .as_ref()
- .map(|c| c.has_unstable("sloppy-imports"))
- .unwrap_or(false)
+ || self.workspace.has_unstable("sloppy-imports")
}
pub fn unstable_features(&self) -> Vec<String> {
- let mut from_config_file = self
- .maybe_config_file()
- .as_ref()
- .map(|c| c.json.unstable.clone())
- .unwrap_or_default();
+ let mut from_config_file = self.workspace.unstable_features().to_vec();
self
.flags
@@ -1824,12 +1756,18 @@ impl CliOptions {
{
full_paths.push(import_map_path);
}
- if let Some(specifier) = self.maybe_config_file_specifier() {
- if specifier.scheme() == "file" {
- if let Ok(path) = specifier.to_file_path() {
- full_paths.push(path);
+
+ for (_, folder) in self.workspace.config_folders() {
+ if let Some(deno_json) = &folder.deno_json {
+ if deno_json.specifier.scheme() == "file" {
+ if let Ok(path) = deno_json.specifier.to_file_path() {
+ full_paths.push(path);
+ }
}
}
+ if let Some(pkg_json) = &folder.pkg_json {
+ full_paths.push(pkg_json.path.clone());
+ }
}
full_paths
}
@@ -1938,8 +1876,9 @@ impl StorageKeyResolver {
// otherwise we will use the path to the config file or None to
// fall back to using the main module's path
options
- .maybe_config_file
- .as_ref()
+ .workspace
+ .resolve_start_ctx()
+ .maybe_deno_json()
.map(|config_file| Some(config_file.specifier.to_string()))
})
}
@@ -1967,29 +1906,25 @@ impl StorageKeyResolver {
/// over config file, i.e. if there's `files.ignore` in config file
/// and `--ignore` CLI flag, only the flag value is taken into account.
fn resolve_files(
- maybe_files_config: Option<FilePatterns>,
- maybe_file_flags: Option<FileFlags>,
- initial_cwd: &Path,
+ mut files_config: FilePatterns,
+ file_flags: &FileFlags,
+ maybe_flags_base: Option<&Path>,
) -> Result<FilePatterns, AnyError> {
- let mut maybe_files_config = maybe_files_config
- .unwrap_or_else(|| FilePatterns::new_with_base(initial_cwd.to_path_buf()));
- if let Some(file_flags) = maybe_file_flags {
- if !file_flags.include.is_empty() {
- maybe_files_config.include =
- Some(PathOrPatternSet::from_include_relative_path_or_patterns(
- initial_cwd,
- &file_flags.include,
- )?);
- }
- if !file_flags.ignore.is_empty() {
- maybe_files_config.exclude =
- PathOrPatternSet::from_exclude_relative_path_or_patterns(
- initial_cwd,
- &file_flags.ignore,
- )?;
- }
+ if !file_flags.include.is_empty() {
+ files_config.include =
+ Some(PathOrPatternSet::from_include_relative_path_or_patterns(
+ maybe_flags_base.unwrap_or(&files_config.base),
+ &file_flags.include,
+ )?);
+ }
+ if !file_flags.ignore.is_empty() {
+ files_config.exclude =
+ PathOrPatternSet::from_exclude_relative_path_or_patterns(
+ maybe_flags_base.unwrap_or(&files_config.base),
+ &file_flags.ignore,
+ )?;
}
- Ok(maybe_files_config)
+ Ok(files_config)
}
/// Resolves the no_prompt value based on the cli flags and environment.
@@ -2009,6 +1944,26 @@ pub fn npm_pkg_req_ref_to_binary_command(
binary_name.to_string()
}
+pub fn config_to_deno_graph_workspace_member(
+ config: &ConfigFile,
+) -> Result<deno_graph::WorkspaceMember, AnyError> {
+ let nv = deno_semver::package::PackageNv {
+ name: match &config.json.name {
+ Some(name) => name.clone(),
+ None => bail!("Missing 'name' field in config file."),
+ },
+ version: match &config.json.version {
+ Some(name) => deno_semver::Version::parse_standard(name)?,
+ None => bail!("Missing 'version' field in config file."),
+ },
+ };
+ Ok(deno_graph::WorkspaceMember {
+ base: config.specifier.join("./").unwrap(),
+ nv,
+ exports: config.to_exports_config()?.into_map(),
+ })
+}
+
#[cfg(test)]
mod test {
use crate::util::fs::FileCollector;
@@ -2027,7 +1982,7 @@ mod test {
let config_file = ConfigFile::new(
config_text,
config_specifier,
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap();
let actual = resolve_import_map_specifier(
@@ -2051,7 +2006,7 @@ mod test {
let config_file = ConfigFile::new(
config_text,
config_specifier,
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap();
let actual = resolve_import_map_specifier(
@@ -2130,7 +2085,7 @@ mod test {
assert!(error.to_string().starts_with("Failed to expand glob"));
let resolved_files = resolve_files(
- Some(FilePatterns {
+ FilePatterns {
base: temp_dir_path.to_path_buf(),
include: Some(
PathOrPatternSet::from_include_relative_path_or_patterns(
@@ -2149,9 +2104,9 @@ mod test {
&["nested/**/*bazz.ts".to_string()],
)
.unwrap(),
- }),
- None,
- temp_dir_path,
+ },
+ &Default::default(),
+ Some(temp_dir_path),
)
.unwrap();
diff --git a/cli/args/package_json.rs b/cli/args/package_json.rs
index b6ccb33a4..eb1c41c5d 100644
--- a/cli/args/package_json.rs
+++ b/cli/args/package_json.rs
@@ -1,77 +1,87 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
-use deno_config::package_json::PackageJsonDeps;
-use deno_core::anyhow::bail;
-use deno_core::error::AnyError;
-use deno_runtime::deno_fs::RealFs;
-use deno_runtime::deno_node::load_pkg_json;
-use deno_runtime::deno_node::PackageJson;
+use deno_config::package_json::PackageJsonDepValue;
+use deno_config::workspace::Workspace;
use deno_semver::package::PackageReq;
-#[derive(Debug, Default)]
-pub struct PackageJsonDepsProvider(Option<PackageJsonDeps>);
-
-impl PackageJsonDepsProvider {
- pub fn new(deps: Option<PackageJsonDeps>) -> Self {
- Self(deps)
- }
-
- pub fn deps(&self) -> Option<&PackageJsonDeps> {
- self.0.as_ref()
- }
-
- pub fn reqs(&self) -> Option<Vec<&PackageReq>> {
- match &self.0 {
- Some(deps) => {
- let mut package_reqs = deps
- .values()
- .filter_map(|r| r.as_ref().ok())
- .collect::<Vec<_>>();
- package_reqs.sort(); // deterministic resolution
- Some(package_reqs)
- }
- None => None,
- }
- }
+#[derive(Debug)]
+pub struct InstallNpmWorkspacePkg {
+ pub alias: String,
+ pub pkg_dir: PathBuf,
}
-/// Attempts to discover the package.json file, maybe stopping when it
-/// reaches the specified `maybe_stop_at` directory.
-pub fn discover_from(
- start: &Path,
- maybe_stop_at: Option<PathBuf>,
-) -> Result<Option<Arc<PackageJson>>, AnyError> {
- const PACKAGE_JSON_NAME: &str = "package.json";
+// todo(#24419): this is not correct, but it's good enough for now.
+// We need deno_npm to be able to understand workspace packages and
+// then have a way to properly lay them out on the file system
+#[derive(Debug, Default)]
+pub struct PackageJsonInstallDepsProvider {
+ remote_pkg_reqs: Vec<PackageReq>,
+ workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
+}
- // note: ancestors() includes the `start` path
- for ancestor in start.ancestors() {
- let path = ancestor.join(PACKAGE_JSON_NAME);
+impl PackageJsonInstallDepsProvider {
+ pub fn empty() -> Self {
+ Self::default()
+ }
- let package_json = match load_pkg_json(&RealFs, &path) {
- Ok(Some(package_json)) => package_json,
- Ok(None) => {
- if let Some(stop_at) = maybe_stop_at.as_ref() {
- if ancestor == stop_at {
- break;
+ pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
+ let mut workspace_pkgs = Vec::new();
+ let mut remote_pkg_reqs = Vec::new();
+ let workspace_npm_pkgs = workspace.npm_packages();
+ for pkg_json in workspace.package_jsons() {
+ let deps = pkg_json.resolve_local_package_json_deps();
+ let mut pkg_reqs = Vec::with_capacity(deps.len());
+ for (alias, dep) in deps {
+ let Ok(dep) = dep else {
+ continue;
+ };
+ match dep {
+ PackageJsonDepValue::Req(pkg_req) => {
+ if let Some(pkg) = workspace_npm_pkgs
+ .iter()
+ .find(|pkg| pkg.matches_req(&pkg_req))
+ {
+ workspace_pkgs.push(InstallNpmWorkspacePkg {
+ alias,
+ pkg_dir: pkg.pkg_json.dir_path().to_path_buf(),
+ });
+ } else {
+ pkg_reqs.push(pkg_req)
+ }
+ }
+ PackageJsonDepValue::Workspace(version_req) => {
+ if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
+ pkg.matches_name_and_version_req(&alias, &version_req)
+ }) {
+ workspace_pkgs.push(InstallNpmWorkspacePkg {
+ alias,
+ pkg_dir: pkg.pkg_json.dir_path().to_path_buf(),
+ });
+ }
}
}
- continue;
}
- Err(err) => bail!(
- "Error loading package.json at {}. {:#}",
- path.display(),
- err
- ),
- };
+ // sort within each package
+ pkg_reqs.sort();
- log::debug!("package.json file found at '{}'", path.display());
- return Ok(Some(package_json));
+ remote_pkg_reqs.extend(pkg_reqs);
+ }
+ remote_pkg_reqs.shrink_to_fit();
+ workspace_pkgs.shrink_to_fit();
+ Self {
+ remote_pkg_reqs,
+ workspace_pkgs,
+ }
+ }
+
+ pub fn remote_pkg_reqs(&self) -> &Vec<PackageReq> {
+ &self.remote_pkg_reqs
}
- log::debug!("No package.json file found");
- Ok(None)
+ pub fn workspace_pkgs(&self) -> &Vec<InstallNpmWorkspacePkg> {
+ &self.workspace_pkgs
+ }
}
diff --git a/cli/factory.rs b/cli/factory.rs
index 56a28b6d9..62ab251f1 100644
--- a/cli/factory.rs
+++ b/cli/factory.rs
@@ -1,11 +1,10 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use crate::args::deno_json::deno_json_deps;
use crate::args::CliLockfile;
use crate::args::CliOptions;
use crate::args::DenoSubcommand;
use crate::args::Flags;
-use crate::args::PackageJsonDepsProvider;
+use crate::args::PackageJsonInstallDepsProvider;
use crate::args::StorageKeyResolver;
use crate::args::TsConfigType;
use crate::cache::Caches;
@@ -52,8 +51,12 @@ use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions;
+use std::collections::BTreeSet;
use std::path::PathBuf;
+use deno_config::package_json::PackageJsonDepValue;
+use deno_config::workspace::WorkspaceResolver;
+use deno_config::ConfigFile;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::FeatureChecker;
@@ -62,10 +65,10 @@ use deno_lockfile::WorkspaceMemberConfig;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
use deno_runtime::deno_node::NodeResolver;
+use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::inspector_server::InspectorServer;
-use import_map::ImportMap;
use log::warn;
use std::future::Future;
use std::sync::Arc;
@@ -156,7 +159,6 @@ struct CliFactoryServices {
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
lockfile: Deferred<Option<Arc<CliLockfile>>>,
- maybe_import_map: Deferred<Option<Arc<ImportMap>>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
blob_store: Deferred<Arc<BlobStore>>,
@@ -170,13 +172,13 @@ struct CliFactoryServices {
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
node_resolver: Deferred<Arc<NodeResolver>>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
- package_json_deps_provider: Deferred<Arc<PackageJsonDepsProvider>>,
text_only_progress_bar: Deferred<ProgressBar>,
type_checker: Deferred<Arc<TypeChecker>>,
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
cli_node_resolver: Deferred<Arc<CliNodeResolver>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
code_cache: Deferred<Arc<CodeCache>>,
+ workspace_resolver: Deferred<Arc<WorkspaceResolver>>,
}
pub struct CliFactory {
@@ -304,19 +306,33 @@ impl CliFactory {
}
pub fn maybe_lockfile(&self) -> &Option<Arc<CliLockfile>> {
- fn check_no_npm(lockfile: &CliLockfile, options: &CliOptions) -> bool {
- if options.no_npm() {
- return true;
- }
- // Deno doesn't yet understand npm workspaces and the package.json resolution
- // may be in a different folder than the deno.json/lockfile. So for now, ignore
- // any package.jsons that are in different folders
- options
- .maybe_package_json()
- .map(|package_json| {
- package_json.path.parent() != lockfile.filename.parent()
+ fn pkg_json_deps(maybe_pkg_json: Option<&PackageJson>) -> BTreeSet<String> {
+ let Some(pkg_json) = maybe_pkg_json else {
+ return Default::default();
+ };
+ pkg_json
+ .resolve_local_package_json_deps()
+ .values()
+ .filter_map(|dep| dep.as_ref().ok())
+ .filter_map(|dep| match dep {
+ PackageJsonDepValue::Req(req) => Some(req),
+ PackageJsonDepValue::Workspace(_) => None,
+ })
+ .map(|r| format!("npm:{}", r))
+ .collect()
+ }
+
+ fn deno_json_deps(
+ maybe_deno_json: Option<&ConfigFile>,
+ ) -> BTreeSet<String> {
+ maybe_deno_json
+ .map(|c| {
+ crate::args::deno_json::deno_json_deps(c)
+ .into_iter()
+ .map(|req| req.to_string())
+ .collect()
})
- .unwrap_or(false)
+ .unwrap_or_default()
}
self.services.lockfile.get_or_init(|| {
@@ -324,67 +340,52 @@ impl CliFactory {
// initialize the lockfile with the workspace's configuration
if let Some(lockfile) = &maybe_lockfile {
- let no_npm = check_no_npm(lockfile, &self.options);
- let package_json_deps = (!no_npm)
- .then(|| {
- self
- .package_json_deps_provider()
- .reqs()
- .map(|reqs| {
- reqs.into_iter().map(|s| format!("npm:{}", s)).collect()
- })
- .unwrap_or_default()
- })
- .unwrap_or_default();
- let config = match self.options.maybe_workspace_config() {
- Some(workspace_config) => deno_lockfile::WorkspaceConfig {
- root: WorkspaceMemberConfig {
- package_json_deps,
- dependencies: deno_json_deps(
- self.options.maybe_config_file().as_ref().unwrap(),
- )
- .into_iter()
- .map(|req| req.to_string())
- .collect(),
- },
- members: workspace_config
- .members
- .iter()
- .map(|member| {
- (
- member.package_name.clone(),
- WorkspaceMemberConfig {
- package_json_deps: Default::default(),
- dependencies: deno_json_deps(&member.config_file)
- .into_iter()
- .map(|req| req.to_string())
- .collect(),
- },
- )
- })
- .collect(),
- },
- None => deno_lockfile::WorkspaceConfig {
- root: WorkspaceMemberConfig {
- package_json_deps,
- dependencies: self
- .options
- .maybe_config_file()
- .as_ref()
- .map(|config| {
- deno_json_deps(config)
- .into_iter()
- .map(|req| req.to_string())
- .collect()
- })
- .unwrap_or_default(),
- },
- members: Default::default(),
+ let (root_url, root_folder) = self.options.workspace.root_folder();
+ let config = deno_lockfile::WorkspaceConfig {
+ root: WorkspaceMemberConfig {
+ package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
+ dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
},
+ members: self
+ .options
+ .workspace
+ .config_folders()
+ .iter()
+ .filter(|(folder_url, _)| *folder_url != root_url)
+ .filter_map(|(folder_url, folder)| {
+ Some((
+ {
+ // should never be None here, but just ignore members that
+ // do fail for this
+ let mut relative_path = root_url.make_relative(folder_url)?;
+ if relative_path.ends_with('/') {
+ // make it slightly cleaner by removing the trailing slash
+ relative_path.pop();
+ }
+ relative_path
+ },
+ {
+ let config = WorkspaceMemberConfig {
+ package_json_deps: pkg_json_deps(
+ folder.pkg_json.as_deref(),
+ ),
+ dependencies: deno_json_deps(folder.deno_json.as_deref()),
+ };
+ if config.package_json_deps.is_empty()
+ && config.dependencies.is_empty()
+ {
+ // exclude empty workspace members
+ return None;
+ }
+ config
+ },
+ ))
+ })
+ .collect(),
};
lockfile.set_workspace_config(
deno_lockfile::SetWorkspaceConfigOptions {
- no_npm,
+ no_npm: self.options.no_npm(),
no_config: self.options.no_config(),
config,
},
@@ -437,8 +438,9 @@ impl CliFactory {
cache_setting: self.options.cache_setting(),
text_only_progress_bar: self.text_only_progress_bar().clone(),
maybe_node_modules_path: self.options.node_modules_dir_path().cloned(),
- package_json_deps_provider:
- self.package_json_deps_provider().clone(),
+ package_json_deps_provider: Arc::new(PackageJsonInstallDepsProvider::from_workspace(
+ &self.options.workspace,
+ )),
npm_system_info: self.options.npm_system_info(),
npmrc: self.options.npmrc().clone()
})
@@ -447,28 +449,29 @@ impl CliFactory {
.await
}
- pub fn package_json_deps_provider(&self) -> &Arc<PackageJsonDepsProvider> {
- self.services.package_json_deps_provider.get_or_init(|| {
- Arc::new(PackageJsonDepsProvider::new(
- self.options.maybe_package_json_deps(),
- ))
- })
- }
-
- pub async fn maybe_import_map(
+ pub async fn workspace_resolver(
&self,
- ) -> Result<&Option<Arc<ImportMap>>, AnyError> {
+ ) -> Result<&Arc<WorkspaceResolver>, AnyError> {
self
.services
- .maybe_import_map
+ .workspace_resolver
.get_or_try_init_async(async {
- Ok(
- self
- .options
- .resolve_import_map(self.file_fetcher()?)
- .await?
- .map(Arc::new),
- )
+ let resolver = self
+ .options
+ .create_workspace_resolver(self.file_fetcher()?)
+ .await?;
+ if !resolver.diagnostics().is_empty() {
+ warn!(
+ "Import map diagnostics:\n{}",
+ resolver
+ .diagnostics()
+ .iter()
+ .map(|d| format!(" - {d}"))
+ .collect::<Vec<_>>()
+ .join("\n")
+ );
+ }
+ Ok(Arc::new(resolver))
})
.await
}
@@ -491,17 +494,15 @@ impl CliFactory {
} else {
Some(self.npm_resolver().await?.clone())
},
- package_json_deps_provider: self
- .package_json_deps_provider()
- .clone(),
+ workspace_resolver: self.workspace_resolver().await?.clone(),
+ bare_node_builtins_enabled: self
+ .options
+ .unstable_bare_node_builtins(),
maybe_jsx_import_source_config: self
.options
+ .workspace
.to_maybe_jsx_import_source_config()?,
- maybe_import_map: self.maybe_import_map().await?.clone(),
maybe_vendor_dir: self.options.vendor_dir_path(),
- bare_node_builtins_enabled: self
- .options
- .unstable_bare_node_builtins(),
})))
}
.boxed_local(),
@@ -759,7 +760,6 @@ impl CliFactory {
self.http_client_provider(),
self.npm_resolver().await?.as_ref(),
self.options.npm_system_info(),
- self.package_json_deps_provider(),
))
}
@@ -885,7 +885,6 @@ impl CliFactory {
.unsafely_ignore_certificate_errors()
.clone(),
unstable: self.options.legacy_unstable_flag(),
- maybe_root_package_json_deps: self.options.maybe_package_json_deps(),
create_hmr_runner,
create_coverage_collector,
})
diff --git a/cli/graph_container.rs b/cli/graph_container.rs
index 40ccda9b2..d439f9360 100644
--- a/cli/graph_container.rs
+++ b/cli/graph_container.rs
@@ -98,7 +98,7 @@ impl MainModuleGraphContainer {
&self,
files: &[String],
) -> Result<Vec<ModuleSpecifier>, AnyError> {
- let excludes = self.cli_options.resolve_config_excludes()?;
+ let excludes = self.cli_options.workspace.resolve_config_excludes()?;
Ok(
files
.iter()
diff --git a/cli/graph_util.rs b/cli/graph_util.rs
index f1e98e7c6..2f9ee8d93 100644
--- a/cli/graph_util.rs
+++ b/cli/graph_util.rs
@@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+use crate::args::config_to_deno_graph_workspace_member;
use crate::args::jsr_url;
use crate::args::CliLockfile;
use crate::args::CliOptions;
@@ -18,12 +19,13 @@ use crate::tools::check;
use crate::tools::check::TypeChecker;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path;
+use deno_config::workspace::JsrPackageConfig;
use deno_emit::LoaderChecksum;
use deno_graph::JsrLoadError;
use deno_graph::ModuleLoadError;
+use deno_graph::WorkspaceFastCheckOption;
use deno_runtime::fs_util::specifier_to_file_path;
-use deno_config::WorkspaceMemberConfig;
use deno_core::anyhow::bail;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
@@ -240,12 +242,12 @@ impl ModuleGraphCreator {
pub async fn create_and_validate_publish_graph(
&self,
- packages: &[WorkspaceMemberConfig],
+ package_configs: &[JsrPackageConfig],
build_fast_check_graph: bool,
) -> Result<ModuleGraph, AnyError> {
let mut roots = Vec::new();
- for package in packages {
- roots.extend(package.config_file.resolve_export_value_urls()?);
+ for package_config in package_configs {
+ roots.extend(package_config.config_file.resolve_export_value_urls()?);
}
let mut graph = self
.create_graph_with_options(CreateGraphOptions {
@@ -260,10 +262,16 @@ impl ModuleGraphCreator {
self.type_check_graph(graph.clone()).await?;
}
if build_fast_check_graph {
+ let fast_check_workspace_members = package_configs
+ .iter()
+ .map(|p| config_to_deno_graph_workspace_member(&p.config_file))
+ .collect::<Result<Vec<_>, _>>()?;
self.module_graph_builder.build_fast_check_graph(
&mut graph,
BuildFastCheckGraphOptions {
- workspace_fast_check: true,
+ workspace_fast_check: WorkspaceFastCheckOption::Enabled(
+ &fast_check_workspace_members,
+ ),
},
)?;
}
@@ -340,10 +348,10 @@ impl ModuleGraphCreator {
}
}
-pub struct BuildFastCheckGraphOptions {
+pub struct BuildFastCheckGraphOptions<'a> {
/// Whether to do fast check on workspace members. This
/// is mostly only useful when publishing.
- pub workspace_fast_check: bool,
+ pub workspace_fast_check: deno_graph::WorkspaceFastCheckOption<'a>,
}
pub struct ModuleGraphBuilder {
@@ -622,7 +630,10 @@ impl ModuleGraphBuilder {
}
log::debug!("Building fast check graph");
- let fast_check_cache = if !options.workspace_fast_check {
+ let fast_check_cache = if matches!(
+ options.workspace_fast_check,
+ deno_graph::WorkspaceFastCheckOption::Disabled
+ ) {
Some(cache::FastCheckCache::new(self.caches.fast_check_db()))
} else {
None
@@ -631,11 +642,6 @@ impl ModuleGraphBuilder {
let cli_resolver = &self.resolver;
let graph_resolver = cli_resolver.as_graph_resolver();
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
- let workspace_members = if options.workspace_fast_check {
- Some(self.options.resolve_deno_graph_workspace_members()?)
- } else {
- None
- };
graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions {
@@ -645,11 +651,7 @@ impl ModuleGraphBuilder {
module_parser: Some(&parser),
resolver: Some(graph_resolver),
npm_resolver: Some(&graph_npm_resolver),
- workspace_fast_check: if let Some(members) = &workspace_members {
- deno_graph::WorkspaceFastCheckOption::Enabled(members)
- } else {
- deno_graph::WorkspaceFastCheckOption::Disabled
- },
+ workspace_fast_check: options.workspace_fast_check,
},
);
Ok(())
diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs
index e1f3e3207..4b96511c0 100644
--- a/cli/lsp/config.rs
+++ b/cli/lsp/config.rs
@@ -16,7 +16,6 @@ use crate::util::fs::canonicalize_path_maybe_not_exists;
use deno_ast::MediaType;
use deno_config::FmtOptionsConfig;
use deno_config::TsConfig;
-use deno_core::anyhow::anyhow;
use deno_core::normalize_path;
use deno_core::serde::de::DeserializeOwned;
use deno_core::serde::Deserialize;
@@ -27,6 +26,8 @@ use deno_core::serde_json::Value;
use deno_core::ModuleSpecifier;
use deno_lint::linter::LintConfig;
use deno_npm::npm_rc::ResolvedNpmRc;
+use deno_runtime::deno_fs::DenoConfigFsAdapter;
+use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::fs_util::specifier_to_file_path;
@@ -935,7 +936,7 @@ impl Config {
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> bool {
let config_file = self.tree.config_file_for_specifier(specifier);
if let Some(cf) = config_file {
- if let Ok(files) = cf.to_files_config() {
+ if let Ok(files) = cf.to_exclude_files_config() {
if !files.matches_specifier(specifier) {
return false;
}
@@ -952,7 +953,7 @@ impl Config {
specifier: &ModuleSpecifier,
) -> bool {
if let Some(cf) = self.tree.config_file_for_specifier(specifier) {
- if let Some(options) = cf.to_test_config().ok().flatten() {
+ if let Ok(options) = cf.to_test_config() {
if !options.files.matches_specifier(specifier) {
return false;
}
@@ -1135,8 +1136,9 @@ impl ConfigData {
) -> Self {
if let Some(specifier) = config_file_specifier {
match ConfigFile::from_specifier(
+ &DenoConfigFsAdapter::new(&RealFs),
specifier.clone(),
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
) {
Ok(config_file) => {
lsp_log!(
@@ -1230,13 +1232,7 @@ impl ConfigData {
.and_then(|config_file| {
config_file
.to_fmt_config()
- .and_then(|o| {
- let base_path = config_file
- .specifier
- .to_file_path()
- .map_err(|_| anyhow!("Invalid base path."))?;
- FmtOptions::resolve(o, None, &base_path)
- })
+ .and_then(|o| FmtOptions::resolve(o, &Default::default(), None))
.inspect_err(|err| {
lsp_warn!(" Couldn't read formatter configuration: {}", err)
})
@@ -1264,13 +1260,7 @@ impl ConfigData {
.and_then(|config_file| {
config_file
.to_lint_config()
- .and_then(|o| {
- let base_path = config_file
- .specifier
- .to_file_path()
- .map_err(|_| anyhow!("Invalid base path."))?;
- LintOptions::resolve(o, None, &base_path)
- })
+ .and_then(|o| LintOptions::resolve(o, Default::default(), None))
.inspect_err(|err| {
lsp_warn!(" Couldn't read lint configuration: {}", err)
})
@@ -2115,7 +2105,7 @@ mod tests {
ConfigFile::new(
"{}",
root_uri.join("deno.json").unwrap(),
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap(),
)
@@ -2173,7 +2163,7 @@ mod tests {
})
.to_string(),
root_uri.join("deno.json").unwrap(),
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap(),
)
@@ -2199,7 +2189,7 @@ mod tests {
})
.to_string(),
root_uri.join("deno.json").unwrap(),
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap(),
)
@@ -2217,7 +2207,7 @@ mod tests {
})
.to_string(),
root_uri.join("deno.json").unwrap(),
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap(),
)
diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs
index 27983867a..9b500567d 100644
--- a/cli/lsp/diagnostics.rs
+++ b/cli/lsp/diagnostics.rs
@@ -1655,7 +1655,7 @@ mod tests {
let config_file = ConfigFile::new(
json_string,
base_url,
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap();
config.tree.inject_config_file(config_file).await;
diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs
index 0d9cd4fbb..48cfebfcc 100644
--- a/cli/lsp/documents.rs
+++ b/cli/lsp/documents.rs
@@ -1751,7 +1751,7 @@ console.log(b, "hello deno");
})
.to_string(),
config.root_uri().unwrap().join("deno.json").unwrap(),
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap(),
)
@@ -1795,7 +1795,7 @@ console.log(b, "hello deno");
})
.to_string(),
config.root_uri().unwrap().join("deno.json").unwrap(),
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap(),
)
diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs
index 25782b95c..cfc58439d 100644
--- a/cli/lsp/language_server.rs
+++ b/cli/lsp/language_server.rs
@@ -2,6 +2,8 @@
use base64::Engine;
use deno_ast::MediaType;
+use deno_config::workspace::Workspace;
+use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::resolve_url;
@@ -13,6 +15,7 @@ use deno_core::url;
use deno_core::ModuleSpecifier;
use deno_graph::GraphKind;
use deno_graph::Resolution;
+use deno_runtime::deno_fs::DenoConfigFsAdapter;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_semver::jsr::JsrPackageReqReference;
@@ -3549,6 +3552,24 @@ impl Inner {
}
let workspace_settings = self.config.workspace_settings();
+ let initial_cwd = config_data
+ .and_then(|d| d.scope.to_file_path().ok())
+ .unwrap_or_else(|| self.initial_cwd.clone());
+ // todo: we need a way to convert config data to a Workspace
+ let workspace = Arc::new(Workspace::discover(
+ deno_config::workspace::WorkspaceDiscoverStart::Dirs(&[
+ initial_cwd.clone()
+ ]),
+ &WorkspaceDiscoverOptions {
+ fs: &DenoConfigFsAdapter::new(&deno_runtime::deno_fs::RealFs),
+ pkg_json_cache: None,
+ config_parse_options: deno_config::ConfigParseOptions {
+ include_task_comments: false,
+ },
+ additional_config_file_names: &[],
+ discover_pkg_json: true,
+ },
+ )?);
let cli_options = CliOptions::new(
Flags {
cache_path: Some(self.cache.deno_dir().root.clone()),
@@ -3572,13 +3593,12 @@ impl Inner {
type_check_mode: crate::args::TypeCheckMode::Local,
..Default::default()
},
- self.initial_cwd.clone(),
- config_data.and_then(|d| d.config_file.as_deref().cloned()),
+ initial_cwd,
config_data.and_then(|d| d.lockfile.clone()),
- config_data.and_then(|d| d.package_json.clone()),
config_data
.and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc),
+ workspace,
force_global_cache,
)?;
diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs
index 5cf7f82b1..18d22afad 100644
--- a/cli/lsp/resolver.rs
+++ b/cli/lsp/resolver.rs
@@ -1,9 +1,9 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::args::create_default_npmrc;
-use crate::args::package_json;
use crate::args::CacheSetting;
use crate::args::CliLockfile;
+use crate::args::PackageJsonInstallDepsProvider;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
@@ -26,6 +26,8 @@ use crate::util::progress_bar::ProgressBarStyle;
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_cache_dir::HttpCache;
+use deno_config::workspace::PackageJsonDepResolution;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_graph::source::Resolver;
@@ -43,7 +45,6 @@ use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
-use package_json::PackageJsonDepsProvider;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
@@ -460,13 +461,10 @@ async fn create_npm_resolver(
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly),
maybe_node_modules_path: config_data
.and_then(|d| d.node_modules_dir.clone()),
- package_json_deps_provider: Arc::new(PackageJsonDepsProvider::new(
- config_data
- .and_then(|d| d.package_json.as_ref())
- .map(|package_json| {
- package_json.resolve_local_package_json_version_reqs()
- }),
- )),
+ // only used for top level install, so we can ignore this
+ package_json_deps_provider: Arc::new(
+ PackageJsonInstallDepsProvider::empty(),
+ ),
npmrc: config_data
.and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc),
@@ -504,16 +502,22 @@ fn create_graph_resolver(
Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
node_resolver: node_resolver.cloned(),
npm_resolver: npm_resolver.cloned(),
- package_json_deps_provider: Arc::new(PackageJsonDepsProvider::new(
+ workspace_resolver: Arc::new(WorkspaceResolver::new_raw(
+ config_data.and_then(|d| d.import_map.as_ref().map(|i| (**i).clone())),
config_data
- .and_then(|d| d.package_json.as_ref())
- .map(|package_json| {
- package_json.resolve_local_package_json_version_reqs()
- }),
+ .and_then(|d| d.package_json.clone())
+ .into_iter()
+ .collect(),
+ if config_data.map(|d| d.byonm).unwrap_or(false) {
+ PackageJsonDepResolution::Disabled
+ } else {
+ // todo(dsherret): this should also be disabled for when using
+ // auto-install with a node_modules directory
+ PackageJsonDepResolution::Enabled
+ },
)),
maybe_jsx_import_source_config: config_file
.and_then(|cf| cf.to_maybe_jsx_import_source_config().ok().flatten()),
- maybe_import_map: config_data.and_then(|d| d.import_map.clone()),
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
bare_node_builtins_enabled: config_file
.map(|cf| cf.has_unstable("bare-node-builtins"))
diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs
index bab9766eb..cc88a0811 100644
--- a/cli/lsp/tsc.rs
+++ b/cli/lsp/tsc.rs
@@ -5405,7 +5405,7 @@ mod tests {
})
.to_string(),
resolve_url("file:///deno.json").unwrap(),
- &deno_config::ParseOptions::default(),
+ &deno_config::ConfigParseOptions::default(),
)
.unwrap(),
)
diff --git a/cli/module_loader.rs b/cli/module_loader.rs
index 0e81736e5..4786b742f 100644
--- a/cli/module_loader.rs
+++ b/cli/module_loader.rs
@@ -81,7 +81,8 @@ pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
}
}
// cache as many entries in the import map as we can
- if let Some(import_map) = factory.maybe_import_map().await? {
+ let resolver = factory.workspace_resolver().await?;
+ if let Some(import_map) = resolver.maybe_import_map() {
let roots = import_map
.imports()
.entries()
@@ -510,7 +511,7 @@ impl<TGraphContainer: ModuleGraphContainer>
.as_managed()
.unwrap() // byonm won't create a Module::Npm
.resolve_pkg_folder_from_deno_module(module.nv_reference.nv())?;
- let maybe_resolution = self
+ self
.shared
.node_resolver
.resolve_package_sub_path_from_deno_module(
@@ -521,11 +522,8 @@ impl<TGraphContainer: ModuleGraphContainer>
)
.with_context(|| {
format!("Could not resolve '{}'.", module.nv_reference)
- })?;
- match maybe_resolution {
- Some(res) => res.into_url(),
- None => return Err(generic_error("not found")),
- }
+ })?
+ .into_url()
}
Some(Module::Node(module)) => module.specifier.clone(),
Some(Module::Js(module)) => module.specifier.clone(),
diff --git a/cli/npm/byonm.rs b/cli/npm/byonm.rs
index 0d4b9d4d4..bbd5da8ec 100644
--- a/cli/npm/byonm.rs
+++ b/cli/npm/byonm.rs
@@ -6,6 +6,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
+use deno_config::package_json::PackageJsonDepValue;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
@@ -87,13 +88,22 @@ impl ByonmCliNpmResolver {
req: &PackageReq,
pkg_json: &PackageJson,
) -> Option<String> {
- let deps = pkg_json.resolve_local_package_json_version_reqs();
+ let deps = pkg_json.resolve_local_package_json_deps();
for (key, value) in deps {
if let Ok(value) = value {
- if value.name == req.name
- && value.version_req.intersects(&req.version_req)
- {
- return Some(key);
+ match value {
+ PackageJsonDepValue::Req(dep_req) => {
+ if dep_req.name == req.name
+ && dep_req.version_req.intersects(&req.version_req)
+ {
+ return Some(key);
+ }
+ }
+ PackageJsonDepValue::Workspace(_workspace) => {
+ if key == req.name && req.version_req.tag() == Some("workspace") {
+ return Some(key);
+ }
+ }
}
}
}
diff --git a/cli/npm/managed/mod.rs b/cli/npm/managed/mod.rs
index 393fc8632..467703b05 100644
--- a/cli/npm/managed/mod.rs
+++ b/cli/npm/managed/mod.rs
@@ -29,7 +29,7 @@ use resolution::AddPkgReqsResult;
use crate::args::CliLockfile;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
-use crate::args::PackageJsonDepsProvider;
+use crate::args::PackageJsonInstallDepsProvider;
use crate::cache::FastInsecureHasher;
use crate::http_util::HttpClientProvider;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
@@ -66,7 +66,7 @@ pub struct CliNpmResolverManagedCreateOptions {
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
pub maybe_node_modules_path: Option<PathBuf>,
pub npm_system_info: NpmSystemInfo,
- pub package_json_deps_provider: Arc<PackageJsonDepsProvider>,
+ pub package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
pub npmrc: Arc<ResolvedNpmRc>,
}
@@ -131,7 +131,7 @@ fn create_inner(
npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>,
npm_rc: Arc<ResolvedNpmRc>,
- package_json_deps_provider: Arc<PackageJsonDepsProvider>,
+ package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
text_only_progress_bar: crate::util::progress_bar::ProgressBar,
node_modules_dir_path: Option<PathBuf>,
npm_system_info: NpmSystemInfo,
@@ -152,6 +152,7 @@ fn create_inner(
let fs_resolver = create_npm_fs_resolver(
fs.clone(),
npm_cache.clone(),
+ &package_json_deps_provider,
&text_only_progress_bar,
resolution.clone(),
tarball_cache.clone(),
@@ -249,7 +250,7 @@ pub struct ManagedCliNpmResolver {
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>,
- package_json_deps_provider: Arc<PackageJsonDepsProvider>,
+ package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
text_only_progress_bar: ProgressBar,
@@ -273,7 +274,7 @@ impl ManagedCliNpmResolver {
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_api: Arc<CliNpmRegistryApi>,
npm_cache: Arc<NpmCache>,
- package_json_deps_provider: Arc<PackageJsonDepsProvider>,
+ package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
text_only_progress_bar: ProgressBar,
@@ -459,12 +460,14 @@ impl ManagedCliNpmResolver {
pub async fn ensure_top_level_package_json_install(
&self,
) -> Result<bool, AnyError> {
- let Some(reqs) = self.package_json_deps_provider.reqs() else {
- return Ok(false);
- };
if !self.top_level_install_flag.raise() {
return Ok(false); // already did this
}
+ let reqs = self.package_json_deps_provider.remote_pkg_reqs();
+ if reqs.is_empty() {
+ return Ok(false);
+ }
+
// check if something needs resolving before bothering to load all
// the package information (which is slow)
if reqs
@@ -477,8 +480,7 @@ impl ManagedCliNpmResolver {
return Ok(false); // everything is already resolvable
}
- let reqs = reqs.into_iter().cloned().collect::<Vec<_>>();
- self.add_package_reqs(&reqs).await.map(|_| true)
+ self.add_package_reqs(reqs).await.map(|_| true)
}
pub async fn cache_package_info(
@@ -563,6 +565,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
create_npm_fs_resolver(
self.fs.clone(),
self.npm_cache.clone(),
+ &self.package_json_deps_provider,
&self.text_only_progress_bar,
npm_resolution.clone(),
self.tarball_cache.clone(),
diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs
index d338720b6..e8fffa0cd 100644
--- a/cli/npm/managed/resolvers/local.rs
+++ b/cli/npm/managed/resolvers/local.rs
@@ -15,6 +15,7 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
+use crate::args::PackageJsonInstallDepsProvider;
use crate::cache::CACHE_PERM;
use crate::npm::cache_dir::mixed_case_package_name_decode;
use crate::util::fs::atomic_write_file_with_retries;
@@ -57,6 +58,7 @@ use super::common::RegistryReadPermissionChecker;
pub struct LocalNpmPackageResolver {
cache: Arc<NpmCache>,
fs: Arc<dyn deno_fs::FileSystem>,
+ pkg_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
progress_bar: ProgressBar,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
@@ -67,9 +69,11 @@ pub struct LocalNpmPackageResolver {
}
impl LocalNpmPackageResolver {
+ #[allow(clippy::too_many_arguments)]
pub fn new(
cache: Arc<NpmCache>,
fs: Arc<dyn deno_fs::FileSystem>,
+ pkg_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
progress_bar: ProgressBar,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
@@ -79,6 +83,7 @@ impl LocalNpmPackageResolver {
Self {
cache,
fs: fs.clone(),
+ pkg_json_deps_provider,
progress_bar,
resolution,
tarball_cache,
@@ -221,6 +226,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
sync_resolution_with_fs(
&self.resolution.snapshot(),
&self.cache,
+ &self.pkg_json_deps_provider,
&self.progress_bar,
&self.tarball_cache,
&self.root_node_modules_path,
@@ -244,12 +250,13 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
async fn sync_resolution_with_fs(
snapshot: &NpmResolutionSnapshot,
cache: &Arc<NpmCache>,
+ pkg_json_deps_provider: &PackageJsonInstallDepsProvider,
progress_bar: &ProgressBar,
tarball_cache: &Arc<TarballCache>,
root_node_modules_dir_path: &Path,
system_info: &NpmSystemInfo,
) -> Result<(), AnyError> {
- if snapshot.is_empty() {
+ if snapshot.is_empty() && pkg_json_deps_provider.workspace_pkgs().is_empty() {
return Ok(()); // don't create the directory
}
@@ -475,6 +482,19 @@ async fn sync_resolution_with_fs(
bin_entries.finish(snapshot, &bin_node_modules_dir_path)?;
}
+ // 7. Create symlinks for the workspace packages
+ {
+ // todo(#24419): this is not exactly correct because it should
+ // install correctly for a workspace (potentially in sub directories),
+ // but this is good enough for a first pass
+ for workspace in pkg_json_deps_provider.workspace_pkgs() {
+ symlink_package_dir(
+ &workspace.pkg_dir,
+ &root_node_modules_dir_path.join(&workspace.alias),
+ )?;
+ }
+ }
+
setup_cache.save();
drop(single_process_lock);
drop(pb_clear_guard);
diff --git a/cli/npm/managed/resolvers/mod.rs b/cli/npm/managed/resolvers/mod.rs
index 2d812a2be..a7f545916 100644
--- a/cli/npm/managed/resolvers/mod.rs
+++ b/cli/npm/managed/resolvers/mod.rs
@@ -10,6 +10,7 @@ use std::sync::Arc;
use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs::FileSystem;
+use crate::args::PackageJsonInstallDepsProvider;
use crate::util::progress_bar::ProgressBar;
pub use self::common::NpmPackageFsResolver;
@@ -21,9 +22,11 @@ use super::cache::NpmCache;
use super::cache::TarballCache;
use super::resolution::NpmResolution;
+#[allow(clippy::too_many_arguments)]
pub fn create_npm_fs_resolver(
fs: Arc<dyn FileSystem>,
npm_cache: Arc<NpmCache>,
+ pkg_json_deps_provider: &Arc<PackageJsonInstallDepsProvider>,
progress_bar: &ProgressBar,
resolution: Arc<NpmResolution>,
tarball_cache: Arc<TarballCache>,
@@ -34,6 +37,7 @@ pub fn create_npm_fs_resolver(
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
npm_cache,
fs,
+ pkg_json_deps_provider.clone(),
progress_bar.clone(),
resolution,
tarball_cache,
diff --git a/cli/resolver.rs b/cli/resolver.rs
index 9305cd1c9..26cf16ba9 100644
--- a/cli/resolver.rs
+++ b/cli/resolver.rs
@@ -4,7 +4,10 @@ use async_trait::async_trait;
use dashmap::DashMap;
use dashmap::DashSet;
use deno_ast::MediaType;
-use deno_config::package_json::PackageJsonDeps;
+use deno_config::package_json::PackageJsonDepValue;
+use deno_config::workspace::MappedResolution;
+use deno_config::workspace::MappedResolutionError;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
@@ -30,14 +33,12 @@ use deno_runtime::deno_node::PackageJson;
use deno_runtime::fs_util::specifier_to_file_path;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
-use import_map::ImportMap;
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use crate::args::JsxImportSourceConfig;
-use crate::args::PackageJsonDepsProvider;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::colors;
use crate::node::CliNodeCodeTranslator;
@@ -128,15 +129,31 @@ impl CliNodeResolver {
referrer: &ModuleSpecifier,
mode: NodeResolutionMode,
) -> Result<NodeResolution, AnyError> {
- let package_folder = self
- .npm_resolver
- .resolve_pkg_folder_from_deno_module_req(req_ref.req(), referrer)?;
- let maybe_resolution = self.resolve_package_sub_path_from_deno_module(
- &package_folder,
+ self.resolve_req_with_sub_path(
+ req_ref.req(),
req_ref.sub_path(),
referrer,
mode,
- )?;
+ )
+ }
+
+ pub fn resolve_req_with_sub_path(
+ &self,
+ req: &PackageReq,
+ sub_path: Option<&str>,
+ referrer: &ModuleSpecifier,
+ mode: NodeResolutionMode,
+ ) -> Result<NodeResolution, AnyError> {
+ let package_folder = self
+ .npm_resolver
+ .resolve_pkg_folder_from_deno_module_req(req, referrer)?;
+ let maybe_resolution = self
+ .maybe_resolve_package_sub_path_from_deno_module(
+ &package_folder,
+ sub_path,
+ referrer,
+ mode,
+ )?;
match maybe_resolution {
Some(resolution) => Ok(resolution),
None => {
@@ -150,8 +167,9 @@ impl CliNodeResolver {
}
}
Err(anyhow!(
- "Failed resolving package subpath for '{}' in '{}'.",
- req_ref,
+ "Failed resolving '{}{}' in '{}'.",
+ req,
+ sub_path.map(|s| format!("/{}", s)).unwrap_or_default(),
package_folder.display()
))
}
@@ -164,6 +182,31 @@ impl CliNodeResolver {
sub_path: Option<&str>,
referrer: &ModuleSpecifier,
mode: NodeResolutionMode,
+ ) -> Result<NodeResolution, AnyError> {
+ self
+ .maybe_resolve_package_sub_path_from_deno_module(
+ package_folder,
+ sub_path,
+ referrer,
+ mode,
+ )?
+ .ok_or_else(|| {
+ anyhow!(
+ "Failed resolving '{}' in '{}'.",
+ sub_path
+ .map(|s| format!("/{}", s))
+ .unwrap_or_else(|| ".".to_string()),
+ package_folder.display(),
+ )
+ })
+ }
+
+ pub fn maybe_resolve_package_sub_path_from_deno_module(
+ &self,
+ package_folder: &Path,
+ sub_path: Option<&str>,
+ referrer: &ModuleSpecifier,
+ mode: NodeResolutionMode,
) -> Result<Option<NodeResolution>, AnyError> {
self.handle_node_resolve_result(
self.node_resolver.resolve_package_subpath_from_deno_module(
@@ -350,120 +393,39 @@ impl CjsResolutionStore {
}
}
-/// Result of checking if a specifier is mapped via
-/// an import map or package.json.
-pub enum MappedResolution {
- None,
- PackageJson(ModuleSpecifier),
- ImportMap(ModuleSpecifier),
-}
-
-impl MappedResolution {
- pub fn into_specifier(self) -> Option<ModuleSpecifier> {
- match self {
- MappedResolution::None => Option::None,
- MappedResolution::PackageJson(specifier) => Some(specifier),
- MappedResolution::ImportMap(specifier) => Some(specifier),
- }
- }
-}
-
-/// Resolver for specifiers that could be mapped via an
-/// import map or package.json.
-#[derive(Debug)]
-pub struct MappedSpecifierResolver {
- maybe_import_map: Option<Arc<ImportMap>>,
- package_json_deps_provider: Arc<PackageJsonDepsProvider>,
-}
-
-impl MappedSpecifierResolver {
- pub fn new(
- maybe_import_map: Option<Arc<ImportMap>>,
- package_json_deps_provider: Arc<PackageJsonDepsProvider>,
- ) -> Self {
- Self {
- maybe_import_map,
- package_json_deps_provider,
- }
- }
-
- pub fn resolve(
- &self,
- specifier: &str,
- referrer: &ModuleSpecifier,
- ) -> Result<MappedResolution, AnyError> {
- // attempt to resolve with the import map first
- let maybe_import_map_err = match self
- .maybe_import_map
- .as_ref()
- .map(|import_map| import_map.resolve(specifier, referrer))
- {
- Some(Ok(value)) => return Ok(MappedResolution::ImportMap(value)),
- Some(Err(err)) => Some(err),
- None => None,
- };
-
- // then with package.json
- if let Some(deps) = self.package_json_deps_provider.deps() {
- if let Some(specifier) = resolve_package_json_dep(specifier, deps)? {
- return Ok(MappedResolution::PackageJson(specifier));
- }
- }
-
- // otherwise, surface the import map error or try resolving when has no import map
- if let Some(err) = maybe_import_map_err {
- Err(err.into())
- } else {
- Ok(MappedResolution::None)
- }
- }
-}
-
/// A resolver that takes care of resolution, taking into account loaded
/// import map, JSX settings.
#[derive(Debug)]
pub struct CliGraphResolver {
+ node_resolver: Option<Arc<CliNodeResolver>>,
+ npm_resolver: Option<Arc<dyn CliNpmResolver>>,
sloppy_imports_resolver: Option<SloppyImportsResolver>,
- mapped_specifier_resolver: MappedSpecifierResolver,
+ workspace_resolver: Arc<WorkspaceResolver>,
maybe_default_jsx_import_source: Option<String>,
maybe_default_jsx_import_source_types: Option<String>,
maybe_jsx_import_source_module: Option<String>,
maybe_vendor_specifier: Option<ModuleSpecifier>,
- node_resolver: Option<Arc<CliNodeResolver>>,
- npm_resolver: Option<Arc<dyn CliNpmResolver>>,
found_package_json_dep_flag: AtomicFlag,
bare_node_builtins_enabled: bool,
}
pub struct CliGraphResolverOptions<'a> {
- pub sloppy_imports_resolver: Option<SloppyImportsResolver>,
pub node_resolver: Option<Arc<CliNodeResolver>>,
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
- pub package_json_deps_provider: Arc<PackageJsonDepsProvider>,
+ pub sloppy_imports_resolver: Option<SloppyImportsResolver>,
+ pub workspace_resolver: Arc<WorkspaceResolver>,
+ pub bare_node_builtins_enabled: bool,
pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
- pub maybe_import_map: Option<Arc<ImportMap>>,
pub maybe_vendor_dir: Option<&'a PathBuf>,
- pub bare_node_builtins_enabled: bool,
}
impl CliGraphResolver {
pub fn new(options: CliGraphResolverOptions) -> Self {
- let is_byonm = options
- .npm_resolver
- .as_ref()
- .map(|n| n.as_byonm().is_some())
- .unwrap_or(false);
Self {
+ node_resolver: options.node_resolver,
+ npm_resolver: options.npm_resolver,
sloppy_imports_resolver: options.sloppy_imports_resolver,
- mapped_specifier_resolver: MappedSpecifierResolver::new(
- options.maybe_import_map,
- if is_byonm {
- // don't resolve from the root package.json deps for byonm
- Arc::new(PackageJsonDepsProvider::new(None))
- } else {
- options.package_json_deps_provider
- },
- ),
+ workspace_resolver: options.workspace_resolver,
maybe_default_jsx_import_source: options
.maybe_jsx_import_source_config
.as_ref()
@@ -478,8 +440,6 @@ impl CliGraphResolver {
maybe_vendor_specifier: options
.maybe_vendor_dir
.and_then(|v| ModuleSpecifier::from_directory_path(v).ok()),
- node_resolver: options.node_resolver,
- npm_resolver: options.npm_resolver,
found_package_json_dep_flag: Default::default(),
bare_node_builtins_enabled: options.bare_node_builtins_enabled,
}
@@ -497,6 +457,7 @@ impl CliGraphResolver {
}
}
+ // todo(dsherret): if we returned structured errors from the NodeResolver we wouldn't need this
fn check_surface_byonm_node_error(
&self,
specifier: &str,
@@ -561,22 +522,92 @@ impl Resolver for CliGraphResolver {
let referrer = &referrer_range.specifier;
let result: Result<_, ResolveError> = self
- .mapped_specifier_resolver
+ .workspace_resolver
.resolve(specifier, referrer)
- .map_err(|err| err.into())
- .and_then(|resolution| match resolution {
- MappedResolution::ImportMap(specifier) => Ok(specifier),
- MappedResolution::PackageJson(specifier) => {
+ .map_err(|err| match err {
+ MappedResolutionError::Specifier(err) => ResolveError::Specifier(err),
+ MappedResolutionError::ImportMap(err) => {
+ ResolveError::Other(err.into())
+ }
+ });
+ let result = match result {
+ Ok(resolution) => match resolution {
+ MappedResolution::Normal(specifier)
+ | MappedResolution::ImportMap(specifier) => Ok(specifier),
+ // todo(dsherret): for byonm it should do resolution solely based on
+ // the referrer and not the package.json
+ MappedResolution::PackageJson {
+ dep_result,
+ alias,
+ sub_path,
+ ..
+ } => {
// found a specifier in the package.json, so mark that
// we need to do an "npm install" later
self.found_package_json_dep_flag.raise();
- Ok(specifier)
+
+ dep_result
+ .as_ref()
+ .map_err(|e| ResolveError::Other(e.clone().into()))
+ .and_then(|dep| match dep {
+ PackageJsonDepValue::Req(req) => {
+ ModuleSpecifier::parse(&format!(
+ "npm:{}{}",
+ req,
+ sub_path.map(|s| format!("/{}", s)).unwrap_or_default()
+ ))
+ .map_err(|e| ResolveError::Other(e.into()))
+ }
+ PackageJsonDepValue::Workspace(version_req) => self
+ .workspace_resolver
+ .resolve_workspace_pkg_json_folder_for_pkg_json_dep(
+ alias,
+ version_req,
+ )
+ .map_err(|e| ResolveError::Other(e.into()))
+ .and_then(|pkg_folder| {
+ Ok(
+ self
+ .node_resolver
+ .as_ref()
+ .unwrap()
+ .resolve_package_sub_path_from_deno_module(
+ pkg_folder,
+ sub_path.as_deref(),
+ referrer,
+ to_node_mode(mode),
+ )?
+ .into_url(),
+ )
+ }),
+ })
}
- MappedResolution::None => {
- deno_graph::resolve_import(specifier, &referrer_range.specifier)
- .map_err(|err| err.into())
+ },
+ Err(err) => Err(err),
+ };
+
+ // check if it's an npm specifier that resolves to a workspace member
+ if let Some(node_resolver) = &self.node_resolver {
+ if let Ok(specifier) = &result {
+ if let Ok(req_ref) = NpmPackageReqReference::from_specifier(specifier) {
+ if let Some(pkg_folder) = self
+ .workspace_resolver
+ .resolve_workspace_pkg_json_folder_for_npm_specifier(req_ref.req())
+ {
+ return Ok(
+ node_resolver
+ .resolve_package_sub_path_from_deno_module(
+ pkg_folder,
+ req_ref.sub_path(),
+ referrer,
+ to_node_mode(mode),
+ )?
+ .into_url(),
+ );
+ }
}
- });
+ }
+ }
// do sloppy imports resolution if enabled
let result =
@@ -733,28 +764,6 @@ fn sloppy_imports_resolve(
resolution.into_specifier().into_owned()
}
-fn resolve_package_json_dep(
- specifier: &str,
- deps: &PackageJsonDeps,
-) -> Result<Option<ModuleSpecifier>, AnyError> {
- for (bare_specifier, req_result) in deps {
- if specifier.starts_with(bare_specifier) {
- let path = &specifier[bare_specifier.len()..];
- if path.is_empty() || path.starts_with('/') {
- let req = req_result.as_ref().map_err(|err| {
- anyhow!(
- "Parsing version constraints in the application-level package.json is more strict at the moment.\n\n{:#}",
- err.clone()
- )
- })?;
- return Ok(Some(ModuleSpecifier::parse(&format!("npm:{req}{path}"))?));
- }
- }
- }
-
- Ok(None)
-}
-
#[derive(Debug)]
pub struct WorkerCliNpmGraphResolver<'a> {
npm_resolver: Option<&'a Arc<dyn CliNpmResolver>>,
@@ -1266,73 +1275,11 @@ impl SloppyImportsResolver {
#[cfg(test)]
mod test {
- use std::collections::BTreeMap;
-
use test_util::TestContext;
use super::*;
#[test]
- fn test_resolve_package_json_dep() {
- fn resolve(
- specifier: &str,
- deps: &BTreeMap<String, PackageReq>,
- ) -> Result<Option<String>, String> {
- let deps = deps
- .iter()
- .map(|(key, value)| (key.to_string(), Ok(value.clone())))
- .collect();
- resolve_package_json_dep(specifier, &deps)
- .map(|s| s.map(|s| s.to_string()))
- .map_err(|err| err.to_string())
- }
-
- let deps = BTreeMap::from([
- (
- "package".to_string(),
- PackageReq::from_str("package@1.0").unwrap(),
- ),
- (
- "package-alias".to_string(),
- PackageReq::from_str("package@^1.2").unwrap(),
- ),
- (
- "@deno/test".to_string(),
- PackageReq::from_str("@deno/test@~0.2").unwrap(),
- ),
- ]);
-
- assert_eq!(
- resolve("package", &deps).unwrap(),
- Some("npm:package@1.0".to_string()),
- );
- assert_eq!(
- resolve("package/some_path.ts", &deps).unwrap(),
- Some("npm:package@1.0/some_path.ts".to_string()),
- );
-
- assert_eq!(
- resolve("@deno/test", &deps).unwrap(),
- Some("npm:@deno/test@~0.2".to_string()),
- );
- assert_eq!(
- resolve("@deno/test/some_path.ts", &deps).unwrap(),
- Some("npm:@deno/test@~0.2/some_path.ts".to_string()),
- );
- // matches the start, but doesn't have the same length or a path
- assert_eq!(resolve("@deno/testing", &deps).unwrap(), None,);
-
- // alias
- assert_eq!(
- resolve("package-alias", &deps).unwrap(),
- Some("npm:package@^1.2".to_string()),
- );
-
- // non-existent bare specifier
- assert_eq!(resolve("non-existent", &deps).unwrap(), None);
- }
-
- #[test]
fn test_unstable_sloppy_imports() {
fn resolve(specifier: &ModuleSpecifier) -> SloppyImportsResolution {
SloppyImportsResolver::new(Arc::new(deno_fs::RealFs))
diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json
index bfcae271b..84e65fc77 100644
--- a/cli/schemas/config-file.v1.json
+++ b/cli/schemas/config-file.v1.json
@@ -604,7 +604,7 @@
}
]
},
- "workspaces": {
+ "workspace": {
"type": "array",
"items": {
"type": "string"
diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs
index 98af5fa77..bf035577c 100644
--- a/cli/standalone/binary.rs
+++ b/cli/standalone/binary.rs
@@ -2,6 +2,7 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
+use std::collections::VecDeque;
use std::env::current_exe;
use std::ffi::OsString;
use std::fs;
@@ -15,8 +16,8 @@ use std::path::PathBuf;
use std::process::Command;
use deno_ast::ModuleSpecifier;
-use deno_config::package_json::PackageJsonDepValueParseError;
-use deno_config::package_json::PackageJsonDeps;
+use deno_config::workspace::PackageJsonDepResolution;
+use deno_config::workspace::Workspace;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
@@ -26,9 +27,12 @@ use deno_core::futures::AsyncSeekExt;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::NpmSystemInfo;
+use deno_runtime::deno_node::PackageJson;
use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq;
use deno_semver::VersionReqSpecifierParseError;
+use eszip::EszipRelativeFileBaseUrl;
+use indexmap::IndexMap;
use log::Level;
use serde::Deserialize;
use serde::Serialize;
@@ -36,7 +40,7 @@ use serde::Serialize;
use crate::args::CaData;
use crate::args::CliOptions;
use crate::args::CompileFlags;
-use crate::args::PackageJsonDepsProvider;
+use crate::args::PackageJsonInstallDepsProvider;
use crate::args::PermissionFlags;
use crate::args::UnstableConfig;
use crate::cache::DenoDir;
@@ -44,6 +48,8 @@ use crate::file_fetcher::FileFetcher;
use crate::http_util::HttpClientProvider;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
+use crate::standalone::virtual_fs::VfsEntry;
+use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
@@ -54,82 +60,31 @@ use super::virtual_fs::VirtualDirectory;
const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd";
-#[derive(Serialize, Deserialize)]
-enum SerializablePackageJsonDepValueParseError {
- VersionReq(String),
- Unsupported { scheme: String },
-}
-
-impl SerializablePackageJsonDepValueParseError {
- pub fn from_err(err: PackageJsonDepValueParseError) -> Self {
- match err {
- PackageJsonDepValueParseError::VersionReq(err) => {
- Self::VersionReq(err.source.to_string())
- }
- PackageJsonDepValueParseError::Unsupported { scheme } => {
- Self::Unsupported { scheme }
- }
- }
- }
-
- pub fn into_err(self) -> PackageJsonDepValueParseError {
- match self {
- SerializablePackageJsonDepValueParseError::VersionReq(source) => {
- PackageJsonDepValueParseError::VersionReq(NpmVersionReqParseError {
- source: monch::ParseErrorFailureError::new(source),
- })
- }
- SerializablePackageJsonDepValueParseError::Unsupported { scheme } => {
- PackageJsonDepValueParseError::Unsupported { scheme }
- }
- }
- }
-}
-
-#[derive(Serialize, Deserialize)]
-pub struct SerializablePackageJsonDeps(
- BTreeMap<
- String,
- Result<PackageReq, SerializablePackageJsonDepValueParseError>,
- >,
-);
-
-impl SerializablePackageJsonDeps {
- pub fn from_deps(deps: PackageJsonDeps) -> Self {
- Self(
- deps
- .into_iter()
- .map(|(name, req)| {
- let res =
- req.map_err(SerializablePackageJsonDepValueParseError::from_err);
- (name, res)
- })
- .collect(),
- )
- }
-
- pub fn into_deps(self) -> PackageJsonDeps {
- self
- .0
- .into_iter()
- .map(|(name, res)| (name, res.map_err(|err| err.into_err())))
- .collect()
- }
-}
-
#[derive(Deserialize, Serialize)]
pub enum NodeModules {
Managed {
- /// Whether this uses a node_modules directory (true) or the global cache (false).
- node_modules_dir: bool,
- package_json_deps: Option<SerializablePackageJsonDeps>,
+ /// Relative path for the node_modules directory in the vfs.
+ node_modules_dir: Option<String>,
},
Byonm {
- package_json_deps: Option<SerializablePackageJsonDeps>,
+ root_node_modules_dir: String,
},
}
#[derive(Deserialize, Serialize)]
+pub struct SerializedWorkspaceResolverImportMap {
+ pub specifier: String,
+ pub json: String,
+}
+
+#[derive(Deserialize, Serialize)]
+pub struct SerializedWorkspaceResolver {
+ pub import_map: Option<SerializedWorkspaceResolverImportMap>,
+ pub package_jsons: BTreeMap<String, serde_json::Value>,
+ pub pkg_json_resolution: PackageJsonDepResolution,
+}
+
+#[derive(Deserialize, Serialize)]
pub struct Metadata {
pub argv: Vec<String>,
pub seed: Option<u64>,
@@ -140,8 +95,8 @@ pub struct Metadata {
pub ca_stores: Option<Vec<String>>,
pub ca_data: Option<Vec<u8>>,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
- pub maybe_import_map: Option<(Url, String)>,
- pub entrypoint: ModuleSpecifier,
+ pub workspace_resolver: SerializedWorkspaceResolver,
+ pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
pub disable_deprecated_api_warning: bool,
pub unstable_config: UnstableConfig,
@@ -415,13 +370,13 @@ pub fn unpack_into_dir(
fs::remove_file(&archive_path)?;
Ok(exe_path)
}
+
pub struct DenoCompileBinaryWriter<'a> {
deno_dir: &'a DenoDir,
file_fetcher: &'a FileFetcher,
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver,
npm_system_info: NpmSystemInfo,
- package_json_deps_provider: &'a PackageJsonDepsProvider,
}
impl<'a> DenoCompileBinaryWriter<'a> {
@@ -432,7 +387,6 @@ impl<'a> DenoCompileBinaryWriter<'a> {
http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver,
npm_system_info: NpmSystemInfo,
- package_json_deps_provider: &'a PackageJsonDepsProvider,
) -> Self {
Self {
deno_dir,
@@ -440,7 +394,6 @@ impl<'a> DenoCompileBinaryWriter<'a> {
http_client_provider,
npm_resolver,
npm_system_info,
- package_json_deps_provider,
}
}
@@ -448,7 +401,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
&self,
writer: &mut impl Write,
eszip: eszip::EszipV2,
- module_specifier: &ModuleSpecifier,
+ root_dir_url: EszipRelativeFileBaseUrl<'_>,
+ entrypoint: &ModuleSpecifier,
compile_flags: &CompileFlags,
cli_options: &CliOptions,
) -> Result<(), AnyError> {
@@ -465,13 +419,13 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}
set_windows_binary_to_gui(&mut original_binary)?;
}
-
self
.write_standalone_binary(
writer,
original_binary,
eszip,
- module_specifier,
+ root_dir_url,
+ entrypoint,
cli_options,
compile_flags,
)
@@ -557,11 +511,13 @@ impl<'a> DenoCompileBinaryWriter<'a> {
/// This functions creates a standalone deno binary by appending a bundle
/// and magic trailer to the currently executing binary.
+ #[allow(clippy::too_many_arguments)]
async fn write_standalone_binary(
&self,
writer: &mut impl Write,
original_bin: Vec<u8>,
mut eszip: eszip::EszipV2,
+ root_dir_url: EszipRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier,
cli_options: &CliOptions,
compile_flags: &CompileFlags,
@@ -574,48 +530,60 @@ impl<'a> DenoCompileBinaryWriter<'a> {
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
None => None,
};
- let maybe_import_map = cli_options
- .resolve_import_map(self.file_fetcher)
- .await?
- .map(|import_map| (import_map.base_url().clone(), import_map.to_json()));
- let (npm_vfs, npm_files, node_modules) =
- match self.npm_resolver.as_inner() {
- InnerCliNpmResolverRef::Managed(managed) => {
- let snapshot =
- managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
- if !snapshot.as_serialized().packages.is_empty() {
- let (root_dir, files) = self.build_vfs()?.into_dir_and_files();
- eszip.add_npm_snapshot(snapshot);
- (
- Some(root_dir),
- files,
- Some(NodeModules::Managed {
- node_modules_dir: self
- .npm_resolver
- .root_node_modules_path()
- .is_some(),
- package_json_deps: self.package_json_deps_provider.deps().map(
- |deps| SerializablePackageJsonDeps::from_deps(deps.clone()),
- ),
- }),
- )
- } else {
- (None, Vec::new(), None)
- }
- }
- InnerCliNpmResolverRef::Byonm(_) => {
- let (root_dir, files) = self.build_vfs()?.into_dir_and_files();
+ let workspace_resolver = cli_options
+ .create_workspace_resolver(self.file_fetcher)
+ .await?;
+ let root_path = root_dir_url.inner().to_file_path().unwrap();
+ let (npm_vfs, npm_files, node_modules) = match self.npm_resolver.as_inner()
+ {
+ InnerCliNpmResolverRef::Managed(managed) => {
+ let snapshot =
+ managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
+ if !snapshot.as_serialized().packages.is_empty() {
+ let (root_dir, files) = self
+ .build_vfs(&root_path, cli_options)?
+ .into_dir_and_files();
+ eszip.add_npm_snapshot(snapshot);
(
Some(root_dir),
files,
- Some(NodeModules::Byonm {
- package_json_deps: self.package_json_deps_provider.deps().map(
- |deps| SerializablePackageJsonDeps::from_deps(deps.clone()),
+ Some(NodeModules::Managed {
+ node_modules_dir: self.npm_resolver.root_node_modules_path().map(
+ |path| {
+ root_dir_url
+ .specifier_key(
+ &ModuleSpecifier::from_directory_path(path).unwrap(),
+ )
+ .into_owned()
+ },
),
}),
)
+ } else {
+ (None, Vec::new(), None)
}
- };
+ }
+ InnerCliNpmResolverRef::Byonm(resolver) => {
+ let (root_dir, files) = self
+ .build_vfs(&root_path, cli_options)?
+ .into_dir_and_files();
+ (
+ Some(root_dir),
+ files,
+ Some(NodeModules::Byonm {
+ root_node_modules_dir: root_dir_url
+ .specifier_key(
+ &ModuleSpecifier::from_directory_path(
+ // will always be set for byonm
+ resolver.root_node_modules_path().unwrap(),
+ )
+ .unwrap(),
+ )
+ .into_owned(),
+ }),
+ )
+ }
+ };
let metadata = Metadata {
argv: compile_flags.args.clone(),
@@ -629,8 +597,32 @@ impl<'a> DenoCompileBinaryWriter<'a> {
log_level: cli_options.log_level(),
ca_stores: cli_options.ca_stores().clone(),
ca_data,
- entrypoint: entrypoint.clone(),
- maybe_import_map,
+ entrypoint_key: root_dir_url.specifier_key(entrypoint).into_owned(),
+ workspace_resolver: SerializedWorkspaceResolver {
+ import_map: workspace_resolver.maybe_import_map().map(|i| {
+ SerializedWorkspaceResolverImportMap {
+ specifier: if i.base_url().scheme() == "file" {
+ root_dir_url.specifier_key(i.base_url()).into_owned()
+ } else {
+ // just make a remote url local
+ "deno.json".to_string()
+ },
+ json: i.to_json(),
+ }
+ }),
+ package_jsons: workspace_resolver
+ .package_jsons()
+ .map(|pkg_json| {
+ (
+ root_dir_url
+ .specifier_key(&pkg_json.specifier())
+ .into_owned(),
+ serde_json::to_value(pkg_json).unwrap(),
+ )
+ })
+ .collect(),
+ pkg_json_resolution: workspace_resolver.pkg_json_dep_resolution(),
+ },
node_modules,
disable_deprecated_api_warning: cli_options
.disable_deprecated_api_warning,
@@ -653,7 +645,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
)
}
- fn build_vfs(&self) -> Result<VfsBuilder, AnyError> {
+ fn build_vfs(
+ &self,
+ root_path: &Path,
+ cli_options: &CliOptions,
+ ) -> Result<VfsBuilder, AnyError> {
fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
if system_info != &NpmSystemInfo::default() {
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
@@ -664,7 +660,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
InnerCliNpmResolverRef::Managed(npm_resolver) => {
if let Some(node_modules_path) = npm_resolver.root_node_modules_path() {
maybe_warn_different_system(&self.npm_system_info);
- let mut builder = VfsBuilder::new(node_modules_path.clone())?;
+ let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
builder.add_dir_recursive(node_modules_path)?;
Ok(builder)
} else {
@@ -678,23 +674,82 @@ impl<'a> DenoCompileBinaryWriter<'a> {
npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)?;
builder.add_dir_recursive(&folder)?;
}
- // overwrite the root directory's name to obscure the user's registry url
- builder.set_root_dir_name("node_modules".to_string());
+
+ // Flatten all the registries folders into a single "node_modules/localhost" folder
+ // that will be used by denort when loading the npm cache. This avoids us exposing
+ // the user's private registry information and means we don't have to bother
+ // serializing all the different registry config into the binary.
+ builder.with_root_dir(|root_dir| {
+ root_dir.name = "node_modules".to_string();
+ let mut new_entries = Vec::with_capacity(root_dir.entries.len());
+ let mut localhost_entries = IndexMap::new();
+ for entry in std::mem::take(&mut root_dir.entries) {
+ match entry {
+ VfsEntry::Dir(dir) => {
+ for entry in dir.entries {
+ log::debug!(
+ "Flattening {} into node_modules",
+ entry.name()
+ );
+ if let Some(existing) =
+ localhost_entries.insert(entry.name().to_string(), entry)
+ {
+ panic!(
+ "Unhandled scenario where a duplicate entry was found: {:?}",
+ existing
+ );
+ }
+ }
+ }
+ VfsEntry::File(_) | VfsEntry::Symlink(_) => {
+ new_entries.push(entry);
+ }
+ }
+ }
+ new_entries.push(VfsEntry::Dir(VirtualDirectory {
+ name: "localhost".to_string(),
+ entries: localhost_entries.into_iter().map(|(_, v)| v).collect(),
+ }));
+ // needs to be sorted by name
+ new_entries.sort_by(|a, b| a.name().cmp(b.name()));
+ root_dir.entries = new_entries;
+ });
+
Ok(builder)
}
}
- InnerCliNpmResolverRef::Byonm(npm_resolver) => {
+ InnerCliNpmResolverRef::Byonm(_) => {
maybe_warn_different_system(&self.npm_system_info);
- // the root_node_modules directory will always exist for byonm
- let node_modules_path = npm_resolver.root_node_modules_path().unwrap();
- let parent_path = node_modules_path.parent().unwrap();
- let mut builder = VfsBuilder::new(parent_path.to_path_buf())?;
- let package_json_path = parent_path.join("package.json");
- if package_json_path.exists() {
- builder.add_file_at_path(&package_json_path)?;
+ let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
+ for pkg_json in cli_options.workspace.package_jsons() {
+ builder.add_file_at_path(&pkg_json.path)?;
}
- if node_modules_path.exists() {
- builder.add_dir_recursive(node_modules_path)?;
+ // traverse and add all the node_modules directories in the workspace
+ let mut pending_dirs = VecDeque::new();
+ pending_dirs.push_back(
+ cli_options
+ .workspace
+ .root_folder()
+ .0
+ .to_file_path()
+ .unwrap(),
+ );
+ while let Some(pending_dir) = pending_dirs.pop_front() {
+ let entries = fs::read_dir(&pending_dir).with_context(|| {
+ format!("Failed reading: {}", pending_dir.display())
+ })?;
+ for entry in entries {
+ let entry = entry?;
+ let path = entry.path();
+ if !path.is_dir() {
+ continue;
+ }
+ if path.ends_with("node_modules") {
+ builder.add_dir_recursive(&path)?;
+ } else {
+ pending_dirs.push_back(path);
+ }
+ }
}
Ok(builder)
}
diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs
index 24ba7c9db..cbd14db4f 100644
--- a/cli/standalone/mod.rs
+++ b/cli/standalone/mod.rs
@@ -10,7 +10,7 @@ use crate::args::get_root_cert_store;
use crate::args::npm_pkg_req_ref_to_binary_command;
use crate::args::CaData;
use crate::args::CacheSetting;
-use crate::args::PackageJsonDepsProvider;
+use crate::args::PackageJsonInstallDepsProvider;
use crate::args::StorageKeyResolver;
use crate::cache::Caches;
use crate::cache::DenoDirProvider;
@@ -25,7 +25,6 @@ use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::NpmCacheDir;
use crate::resolver::CjsResolutionStore;
use crate::resolver::CliNodeResolver;
-use crate::resolver::MappedSpecifierResolver;
use crate::resolver::NpmModuleLoader;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
@@ -35,6 +34,10 @@ use crate::worker::CliMainWorkerOptions;
use crate::worker::ModuleLoaderAndSourceMapGetter;
use crate::worker::ModuleLoaderFactory;
use deno_ast::MediaType;
+use deno_config::package_json::PackageJsonDepValue;
+use deno_config::workspace::MappedResolution;
+use deno_config::workspace::MappedResolutionError;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::type_error;
@@ -48,6 +51,7 @@ use deno_core::ModuleSpecifier;
use deno_core::ModuleType;
use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
+use deno_npm::npm_rc::ResolvedNpmRc;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
use deno_runtime::deno_node::NodeResolutionMode;
@@ -59,7 +63,9 @@ use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference;
+use eszip::EszipRelativeFileBaseUrl;
use import_map::parse_from_json;
+use std::borrow::Cow;
use std::rc::Rc;
use std::sync::Arc;
@@ -75,9 +81,43 @@ use self::binary::load_npm_vfs;
use self::binary::Metadata;
use self::file_system::DenoCompileFileSystem;
-struct SharedModuleLoaderState {
+struct WorkspaceEszipModule {
+ specifier: ModuleSpecifier,
+ inner: eszip::Module,
+}
+
+struct WorkspaceEszip {
eszip: eszip::EszipV2,
- mapped_specifier_resolver: MappedSpecifierResolver,
+ root_dir_url: ModuleSpecifier,
+}
+
+impl WorkspaceEszip {
+ pub fn get_module(
+ &self,
+ specifier: &ModuleSpecifier,
+ ) -> Option<WorkspaceEszipModule> {
+ if specifier.scheme() == "file" {
+ let specifier_key = EszipRelativeFileBaseUrl::new(&self.root_dir_url)
+ .specifier_key(specifier);
+ let module = self.eszip.get_module(&specifier_key)?;
+ let specifier = self.root_dir_url.join(&module.specifier).unwrap();
+ Some(WorkspaceEszipModule {
+ specifier,
+ inner: module,
+ })
+ } else {
+ let module = self.eszip.get_module(specifier.as_str())?;
+ Some(WorkspaceEszipModule {
+ specifier: ModuleSpecifier::parse(&module.specifier).unwrap(),
+ inner: module,
+ })
+ }
+ }
+}
+
+struct SharedModuleLoaderState {
+ eszip: WorkspaceEszip,
+ workspace_resolver: WorkspaceResolver,
node_resolver: Arc<CliNodeResolver>,
npm_module_loader: Arc<NpmModuleLoader>,
}
@@ -122,44 +162,92 @@ impl ModuleLoader for EmbeddedModuleLoader {
};
}
- let maybe_mapped = self
- .shared
- .mapped_specifier_resolver
- .resolve(specifier, &referrer)?
- .into_specifier();
-
- // npm specifier
- let specifier_text = maybe_mapped
- .as_ref()
- .map(|r| r.as_str())
- .unwrap_or(specifier);
- if let Ok(reference) = NpmPackageReqReference::from_str(specifier_text) {
- return self
- .shared
- .node_resolver
- .resolve_req_reference(
- &reference,
- &referrer,
- NodeResolutionMode::Execution,
- )
- .map(|res| res.into_url());
- }
+ let mapped_resolution =
+ self.shared.workspace_resolver.resolve(specifier, &referrer);
- let specifier = match maybe_mapped {
- Some(resolved) => resolved,
- None => deno_core::resolve_import(specifier, referrer.as_str())?,
- };
+ match mapped_resolution {
+ Ok(MappedResolution::PackageJson {
+ dep_result,
+ sub_path,
+ alias,
+ ..
+ }) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? {
+ PackageJsonDepValue::Req(req) => self
+ .shared
+ .node_resolver
+ .resolve_req_with_sub_path(
+ req,
+ sub_path.as_deref(),
+ &referrer,
+ NodeResolutionMode::Execution,
+ )
+ .map(|res| res.into_url()),
+ PackageJsonDepValue::Workspace(version_req) => {
+ let pkg_folder = self
+ .shared
+ .workspace_resolver
+ .resolve_workspace_pkg_json_folder_for_pkg_json_dep(
+ alias,
+ version_req,
+ )?;
+ Ok(
+ self
+ .shared
+ .node_resolver
+ .resolve_package_sub_path_from_deno_module(
+ pkg_folder,
+ sub_path.as_deref(),
+ &referrer,
+ NodeResolutionMode::Execution,
+ )?
+ .into_url(),
+ )
+ }
+ },
+ Ok(MappedResolution::Normal(specifier))
+ | Ok(MappedResolution::ImportMap(specifier)) => {
+ if let Ok(reference) =
+ NpmPackageReqReference::from_specifier(&specifier)
+ {
+ return self
+ .shared
+ .node_resolver
+ .resolve_req_reference(
+ &reference,
+ &referrer,
+ NodeResolutionMode::Execution,
+ )
+ .map(|res| res.into_url());
+ }
+
+ if specifier.scheme() == "jsr" {
+ if let Some(module) = self.shared.eszip.get_module(&specifier) {
+ return Ok(module.specifier);
+ }
+ }
- if specifier.scheme() == "jsr" {
- if let Some(module) = self.shared.eszip.get_module(specifier.as_str()) {
- return Ok(ModuleSpecifier::parse(&module.specifier).unwrap());
+ self
+ .shared
+ .node_resolver
+ .handle_if_in_node_modules(specifier)
}
+ Err(err)
+ if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
+ {
+ // todo(dsherret): return a better error from node resolution so that
+ // we can more easily tell whether to surface it or not
+ let node_result = self.shared.node_resolver.resolve(
+ specifier,
+ &referrer,
+ NodeResolutionMode::Execution,
+ );
+ if let Ok(Some(res)) = node_result {
+ return Ok(res.into_url());
+ }
+ Err(err.into())
+ }
+ Err(err) => Err(err.into()),
}
-
- self
- .shared
- .node_resolver
- .handle_if_in_node_modules(specifier)
}
fn load(
@@ -215,27 +303,23 @@ impl ModuleLoader for EmbeddedModuleLoader {
);
}
- let Some(module) =
- self.shared.eszip.get_module(original_specifier.as_str())
- else {
+ let Some(module) = self.shared.eszip.get_module(original_specifier) else {
return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!(
"Module not found: {}",
original_specifier
))));
};
let original_specifier = original_specifier.clone();
- let found_specifier =
- ModuleSpecifier::parse(&module.specifier).expect("invalid url in eszip");
deno_core::ModuleLoadResponse::Async(
async move {
- let code = module.source().await.ok_or_else(|| {
+ let code = module.inner.source().await.ok_or_else(|| {
type_error(format!("Module not found: {}", original_specifier))
})?;
let code = arc_u8_to_arc_str(code)
.map_err(|_| type_error("Module source is not utf-8"))?;
Ok(deno_core::ModuleSource::new_with_redirect(
- match module.kind {
+ match module.inner.kind {
eszip::ModuleKind::JavaScript => ModuleType::JavaScript,
eszip::ModuleKind::Json => ModuleType::Json,
eszip::ModuleKind::Jsonc => {
@@ -247,7 +331,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
},
ModuleSourceCode::String(code.into()),
&original_specifier,
- &found_specifier,
+ &module.specifier,
None,
))
}
@@ -324,10 +408,10 @@ pub async fn run(
mut eszip: eszip::EszipV2,
metadata: Metadata,
) -> Result<i32, AnyError> {
- let main_module = &metadata.entrypoint;
let current_exe_path = std::env::current_exe().unwrap();
let current_exe_name =
current_exe_path.file_name().unwrap().to_string_lossy();
+ let maybe_cwd = std::env::current_dir().ok();
let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
ca_stores: metadata.ca_stores,
@@ -341,119 +425,109 @@ pub async fn run(
));
// use a dummy npm registry url
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
- let root_path = std::env::temp_dir()
- .join(format!("deno-compile-{}", current_exe_name))
- .join("node_modules");
- let npm_cache_dir =
- NpmCacheDir::new(root_path.clone(), vec![npm_registry_url.clone()]);
+ let root_path =
+ std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name));
+ let root_dir_url = ModuleSpecifier::from_directory_path(&root_path).unwrap();
+ let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
+ let root_node_modules_path = root_path.join("node_modules");
+ let npm_cache_dir = NpmCacheDir::new(
+ root_node_modules_path.clone(),
+ vec![npm_registry_url.clone()],
+ );
let npm_global_cache_dir = npm_cache_dir.get_cache_location();
let cache_setting = CacheSetting::Only;
- let (package_json_deps_provider, fs, npm_resolver, maybe_vfs_root) =
- match metadata.node_modules {
- Some(binary::NodeModules::Managed {
- node_modules_dir,
- package_json_deps,
- }) => {
- // this will always have a snapshot
- let snapshot = eszip.take_npm_snapshot().unwrap();
- let vfs_root_dir_path = if node_modules_dir {
- root_path
- } else {
- npm_cache_dir.root_dir().to_owned()
- };
- let vfs = load_npm_vfs(vfs_root_dir_path.clone())
- .context("Failed to load npm vfs.")?;
- let maybe_node_modules_path = if node_modules_dir {
- Some(vfs.root().to_path_buf())
- } else {
- None
- };
- let package_json_deps_provider =
- Arc::new(PackageJsonDepsProvider::new(
- package_json_deps.map(|serialized| serialized.into_deps()),
- ));
- let fs = Arc::new(DenoCompileFileSystem::new(vfs))
- as Arc<dyn deno_fs::FileSystem>;
- let npm_resolver =
- create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
- CliNpmResolverManagedCreateOptions {
- snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
- snapshot,
- )),
- maybe_lockfile: None,
- fs: fs.clone(),
- http_client_provider: http_client_provider.clone(),
- npm_global_cache_dir,
- cache_setting,
- text_only_progress_bar: progress_bar,
- maybe_node_modules_path,
- package_json_deps_provider: package_json_deps_provider.clone(),
- npm_system_info: Default::default(),
- // Packages from different registries are already inlined in the ESZip,
- // so no need to create actual `.npmrc` configuration.
- npmrc: create_default_npmrc(),
- },
- ))
- .await?;
- (
- package_json_deps_provider,
- fs,
- npm_resolver,
- Some(vfs_root_dir_path),
- )
- }
- Some(binary::NodeModules::Byonm { package_json_deps }) => {
- let vfs_root_dir_path = root_path;
- let vfs = load_npm_vfs(vfs_root_dir_path.clone())
- .context("Failed to load npm vfs.")?;
- let node_modules_path = vfs.root().join("node_modules");
- let package_json_deps_provider =
- Arc::new(PackageJsonDepsProvider::new(
- package_json_deps.map(|serialized| serialized.into_deps()),
- ));
- let fs = Arc::new(DenoCompileFileSystem::new(vfs))
- as Arc<dyn deno_fs::FileSystem>;
- let npm_resolver =
- create_cli_npm_resolver(CliNpmResolverCreateOptions::Byonm(
- CliNpmResolverByonmCreateOptions {
- fs: fs.clone(),
- root_node_modules_dir: node_modules_path,
- },
- ))
- .await?;
- (
- package_json_deps_provider,
- fs,
- npm_resolver,
- Some(vfs_root_dir_path),
- )
- }
- None => {
- let package_json_deps_provider =
- Arc::new(PackageJsonDepsProvider::new(None));
- let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>;
- let npm_resolver =
- create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
- CliNpmResolverManagedCreateOptions {
- snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
- maybe_lockfile: None,
- fs: fs.clone(),
- http_client_provider: http_client_provider.clone(),
- npm_global_cache_dir,
- cache_setting,
- text_only_progress_bar: progress_bar,
- maybe_node_modules_path: None,
- package_json_deps_provider: package_json_deps_provider.clone(),
- npm_system_info: Default::default(),
- // Packages from different registries are already inlined in the ESZip,
- // so no need to create actual `.npmrc` configuration.
- npmrc: create_default_npmrc(),
- },
- ))
- .await?;
- (package_json_deps_provider, fs, npm_resolver, None)
- }
- };
+ let (fs, npm_resolver, maybe_vfs_root) = match metadata.node_modules {
+ Some(binary::NodeModules::Managed { node_modules_dir }) => {
+ // this will always have a snapshot
+ let snapshot = eszip.take_npm_snapshot().unwrap();
+ let vfs_root_dir_path = if node_modules_dir.is_some() {
+ root_path.clone()
+ } else {
+ npm_cache_dir.root_dir().to_owned()
+ };
+ let vfs = load_npm_vfs(vfs_root_dir_path.clone())
+ .context("Failed to load npm vfs.")?;
+ let maybe_node_modules_path = node_modules_dir
+ .map(|node_modules_dir| vfs_root_dir_path.join(node_modules_dir));
+ let fs = Arc::new(DenoCompileFileSystem::new(vfs))
+ as Arc<dyn deno_fs::FileSystem>;
+ let npm_resolver =
+ create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
+ CliNpmResolverManagedCreateOptions {
+ snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
+ snapshot,
+ )),
+ maybe_lockfile: None,
+ fs: fs.clone(),
+ http_client_provider: http_client_provider.clone(),
+ npm_global_cache_dir,
+ cache_setting,
+ text_only_progress_bar: progress_bar,
+ maybe_node_modules_path,
+ npm_system_info: Default::default(),
+ package_json_deps_provider: Arc::new(
+ // this is only used for installing packages, which isn't necessary with deno compile
+ PackageJsonInstallDepsProvider::empty(),
+ ),
+ // create an npmrc that uses the fake npm_registry_url to resolve packages
+ npmrc: Arc::new(ResolvedNpmRc {
+ default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
+ registry_url: npm_registry_url.clone(),
+ config: Default::default(),
+ },
+ scopes: Default::default(),
+ registry_configs: Default::default(),
+ }),
+ },
+ ))
+ .await?;
+ (fs, npm_resolver, Some(vfs_root_dir_path))
+ }
+ Some(binary::NodeModules::Byonm {
+ root_node_modules_dir,
+ }) => {
+ let vfs_root_dir_path = root_path.clone();
+ let vfs = load_npm_vfs(vfs_root_dir_path.clone())
+ .context("Failed to load vfs.")?;
+ let root_node_modules_dir = vfs.root().join(root_node_modules_dir);
+ let fs = Arc::new(DenoCompileFileSystem::new(vfs))
+ as Arc<dyn deno_fs::FileSystem>;
+ let npm_resolver = create_cli_npm_resolver(
+ CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
+ fs: fs.clone(),
+ root_node_modules_dir,
+ }),
+ )
+ .await?;
+ (fs, npm_resolver, Some(vfs_root_dir_path))
+ }
+ None => {
+ let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>;
+ let npm_resolver =
+ create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
+ CliNpmResolverManagedCreateOptions {
+ snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
+ maybe_lockfile: None,
+ fs: fs.clone(),
+ http_client_provider: http_client_provider.clone(),
+ npm_global_cache_dir,
+ cache_setting,
+ text_only_progress_bar: progress_bar,
+ maybe_node_modules_path: None,
+ npm_system_info: Default::default(),
+ package_json_deps_provider: Arc::new(
+ // this is only used for installing packages, which isn't necessary with deno compile
+ PackageJsonInstallDepsProvider::empty(),
+ ),
+ // Packages from different registries are already inlined in the ESZip,
+ // so no need to create actual `.npmrc` configuration.
+ npmrc: create_default_npmrc(),
+ },
+ ))
+ .await?;
+ (fs, npm_resolver, None)
+ }
+ };
let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some();
let node_resolver = Arc::new(NodeResolver::new(
@@ -471,9 +545,42 @@ pub async fn run(
node_resolver.clone(),
npm_resolver.clone().into_npm_resolver(),
));
- let maybe_import_map = metadata.maybe_import_map.map(|(base, source)| {
- Arc::new(parse_from_json(base, &source).unwrap().import_map)
- });
+ let workspace_resolver = {
+ let import_map = match metadata.workspace_resolver.import_map {
+ Some(import_map) => Some(
+ import_map::parse_from_json_with_options(
+ root_dir_url.join(&import_map.specifier).unwrap(),
+ &import_map.json,
+ import_map::ImportMapOptions {
+ address_hook: None,
+ expand_imports: true,
+ },
+ )?
+ .import_map,
+ ),
+ None => None,
+ };
+ let pkg_jsons = metadata
+ .workspace_resolver
+ .package_jsons
+ .into_iter()
+ .map(|(relative_path, json)| {
+ let path = root_dir_url
+ .join(&relative_path)
+ .unwrap()
+ .to_file_path()
+ .unwrap();
+ let pkg_json =
+ deno_config::package_json::PackageJson::load_from_value(path, json);
+ Arc::new(pkg_json)
+ })
+ .collect();
+ WorkspaceResolver::new_raw(
+ import_map,
+ pkg_jsons,
+ metadata.workspace_resolver.pkg_json_resolution,
+ )
+ };
let cli_node_resolver = Arc::new(CliNodeResolver::new(
Some(cjs_resolutions.clone()),
fs.clone(),
@@ -482,11 +589,11 @@ pub async fn run(
));
let module_loader_factory = StandaloneModuleLoaderFactory {
shared: Arc::new(SharedModuleLoaderState {
- eszip,
- mapped_specifier_resolver: MappedSpecifierResolver::new(
- maybe_import_map.clone(),
- package_json_deps_provider.clone(),
- ),
+ eszip: WorkspaceEszip {
+ eszip,
+ root_dir_url,
+ },
+ workspace_resolver,
node_resolver: cli_node_resolver.clone(),
npm_module_loader: Arc::new(NpmModuleLoader::new(
cjs_resolutions,
@@ -498,7 +605,6 @@ pub async fn run(
};
let permissions = {
- let maybe_cwd = std::env::current_dir().ok();
let mut permissions =
metadata.permissions.to_options(maybe_cwd.as_deref())?;
// if running with an npm vfs, grant read access to it
@@ -561,7 +667,7 @@ pub async fn run(
is_npm_main: main_module.scheme() == "npm",
skip_op_registration: true,
location: metadata.location,
- argv0: NpmPackageReqReference::from_specifier(main_module)
+ argv0: NpmPackageReqReference::from_specifier(&main_module)
.ok()
.map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref))
.or(std::env::args().next()),
@@ -571,7 +677,6 @@ pub async fn run(
unsafely_ignore_certificate_errors: metadata
.unsafely_ignore_certificate_errors,
unstable: metadata.unstable_config.legacy_flag_enabled,
- maybe_root_package_json_deps: package_json_deps_provider.deps().cloned(),
create_hmr_runner: None,
create_coverage_collector: None,
},
@@ -592,11 +697,7 @@ pub async fn run(
deno_core::JsRuntime::init_platform(None);
let mut worker = worker_factory
- .create_main_worker(
- WorkerExecutionMode::Run,
- main_module.clone(),
- permissions,
- )
+ .create_main_worker(WorkerExecutionMode::Run, main_module, permissions)
.await?;
let exit_code = worker.run().await?;
diff --git a/cli/standalone/virtual_fs.rs b/cli/standalone/virtual_fs.rs
index 3e6823d50..ee91b9f7f 100644
--- a/cli/standalone/virtual_fs.rs
+++ b/cli/standalone/virtual_fs.rs
@@ -12,6 +12,7 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
+use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
@@ -55,9 +56,8 @@ impl VfsBuilder {
root_dir: VirtualDirectory {
name: root_path
.file_stem()
- .unwrap()
- .to_string_lossy()
- .into_owned(),
+ .map(|s| s.to_string_lossy().into_owned())
+ .unwrap_or("root".to_string()),
entries: Vec::new(),
},
root_path,
@@ -67,13 +67,19 @@ impl VfsBuilder {
})
}
- pub fn set_root_dir_name(&mut self, name: String) {
- self.root_dir.name = name;
+ pub fn with_root_dir<R>(
+ &mut self,
+ with_root: impl FnOnce(&mut VirtualDirectory) -> R,
+ ) -> R {
+ with_root(&mut self.root_dir)
}
pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> {
- let path = canonicalize_path(path)?;
- self.add_dir_recursive_internal(&path)
+ let target_path = canonicalize_path(path)?;
+ if path != target_path {
+ self.add_symlink(path, &target_path)?;
+ }
+ self.add_dir_recursive_internal(&target_path)
}
fn add_dir_recursive_internal(
@@ -92,7 +98,7 @@ impl VfsBuilder {
if file_type.is_dir() {
self.add_dir_recursive_internal(&path)?;
} else if file_type.is_file() {
- self.add_file_at_path(&path)?;
+ self.add_file_at_path_not_symlink(&path)?;
} else if file_type.is_symlink() {
match util::fs::canonicalize_path(&path) {
Ok(target) => {
@@ -175,6 +181,17 @@ impl VfsBuilder {
}
pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> {
+ let target_path = canonicalize_path(path)?;
+ if target_path != path {
+ self.add_symlink(path, &target_path)?;
+ }
+ self.add_file_at_path_not_symlink(&target_path)
+ }
+
+ pub fn add_file_at_path_not_symlink(
+ &mut self,
+ path: &Path,
+ ) -> Result<(), AnyError> {
let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?;
self.add_file(path, file_bytes)
@@ -195,7 +212,9 @@ impl VfsBuilder {
let name = path.file_name().unwrap().to_string_lossy();
let data_len = data.len();
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
- Ok(_) => unreachable!(),
+ Ok(_) => {
+ // already added, just ignore
+ }
Err(insert_index) => {
dir.entries.insert(
insert_index,
@@ -228,6 +247,10 @@ impl VfsBuilder {
target.display()
);
let dest = self.path_relative_root(target)?;
+ if dest == self.path_relative_root(path)? {
+ // it's the same, ignore
+ return Ok(());
+ }
let dir = self.add_dir(path.parent().unwrap())?;
let name = path.file_name().unwrap().to_string_lossy();
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs
index 0378d6ae2..d801b908c 100644
--- a/cli/tools/bench/mod.rs
+++ b/cli/tools/bench/mod.rs
@@ -407,7 +407,8 @@ pub async fn run_benchmarks(
bench_flags: BenchFlags,
) -> Result<(), AnyError> {
let cli_options = CliOptions::from_flags(flags)?;
- let bench_options = cli_options.resolve_bench_options(bench_flags)?;
+ let workspace_bench_options =
+ cli_options.resolve_workspace_bench_options(&bench_flags);
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options();
// Various bench files should not share the same permissions in terms of
@@ -416,11 +417,21 @@ pub async fn run_benchmarks(
let permissions =
Permissions::from_options(&cli_options.permissions_options()?)?;
- let specifiers = collect_specifiers(
- bench_options.files,
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- is_supported_bench_path,
- )?;
+ let members_with_bench_options =
+ cli_options.resolve_bench_options_for_members(&bench_flags)?;
+ let specifiers = members_with_bench_options
+ .iter()
+ .map(|(_, bench_options)| {
+ collect_specifiers(
+ bench_options.files.clone(),
+ cli_options.vendor_dir_path().map(ToOwned::to_owned),
+ is_supported_bench_path,
+ )
+ })
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
if specifiers.is_empty() {
return Err(generic_error("No bench modules found"));
@@ -429,7 +440,7 @@ pub async fn run_benchmarks(
let main_graph_container = factory.main_module_graph_container().await?;
main_graph_container.check_specifiers(&specifiers).await?;
- if bench_options.no_run {
+ if workspace_bench_options.no_run {
return Ok(());
}
@@ -441,8 +452,8 @@ pub async fn run_benchmarks(
&permissions,
specifiers,
BenchSpecifierOptions {
- filter: TestFilter::from_flag(&bench_options.filter),
- json: bench_options.json,
+ filter: TestFilter::from_flag(&workspace_bench_options.filter),
+ json: workspace_bench_options.json,
log_level,
},
)
@@ -472,24 +483,40 @@ pub async fn run_benchmarks_with_watch(
let factory = CliFactoryBuilder::new()
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
let cli_options = factory.cli_options();
- let bench_options = cli_options.resolve_bench_options(bench_flags)?;
+ let workspace_bench_options =
+ cli_options.resolve_workspace_bench_options(&bench_flags);
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
- if let Some(set) = &bench_options.files.include {
- let watch_paths = set.base_paths();
- if !watch_paths.is_empty() {
- let _ = watcher_communicator.watch_paths(watch_paths);
- }
- }
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let module_graph_creator = factory.module_graph_creator().await?;
-
- let bench_modules = collect_specifiers(
- bench_options.files.clone(),
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- is_supported_bench_path,
- )?;
+ let members_with_bench_options =
+ cli_options.resolve_bench_options_for_members(&bench_flags)?;
+ let watch_paths = members_with_bench_options
+ .iter()
+ .filter_map(|(_, bench_options)| {
+ bench_options
+ .files
+ .include
+ .as_ref()
+ .map(|set| set.base_paths())
+ })
+ .flatten()
+ .collect::<Vec<_>>();
+ let _ = watcher_communicator.watch_paths(watch_paths);
+ let collected_bench_modules = members_with_bench_options
+ .iter()
+ .map(|(_, bench_options)| {
+ collect_specifiers(
+ bench_options.files.clone(),
+ cli_options.vendor_dir_path().map(ToOwned::to_owned),
+ is_supported_bench_path,
+ )
+ })
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
// Various bench files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
@@ -498,7 +525,7 @@ pub async fn run_benchmarks_with_watch(
Permissions::from_options(&cli_options.permissions_options()?)?;
let graph = module_graph_creator
- .create_graph(graph_kind, bench_modules)
+ .create_graph(graph_kind, collected_bench_modules.clone())
.await?;
module_graph_creator.graph_valid(&graph)?;
let bench_modules = &graph.roots;
@@ -524,16 +551,10 @@ pub async fn run_benchmarks_with_watch(
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
- // todo(dsherret): why are we collecting specifiers twice in a row?
- // Seems like a perf bug.
- let specifiers = collect_specifiers(
- bench_options.files,
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- is_supported_bench_path,
- )?
- .into_iter()
- .filter(|specifier| bench_modules_to_reload.contains(specifier))
- .collect::<Vec<ModuleSpecifier>>();
+ let specifiers = collected_bench_modules
+ .into_iter()
+ .filter(|specifier| bench_modules_to_reload.contains(specifier))
+ .collect::<Vec<ModuleSpecifier>>();
factory
.main_module_graph_container()
@@ -541,7 +562,7 @@ pub async fn run_benchmarks_with_watch(
.check_specifiers(&specifiers)
.await?;
- if bench_options.no_run {
+ if workspace_bench_options.no_run {
return Ok(());
}
@@ -551,8 +572,8 @@ pub async fn run_benchmarks_with_watch(
&permissions,
specifiers,
BenchSpecifierOptions {
- filter: TestFilter::from_flag(&bench_options.filter),
- json: bench_options.json,
+ filter: TestFilter::from_flag(&workspace_bench_options.filter),
+ json: workspace_bench_options.json,
log_level,
},
)
diff --git a/cli/tools/check.rs b/cli/tools/check.rs
index 6eb7a071c..4ec677f8f 100644
--- a/cli/tools/check.rs
+++ b/cli/tools/check.rs
@@ -183,7 +183,7 @@ impl TypeChecker {
self.module_graph_builder.build_fast_check_graph(
&mut graph,
BuildFastCheckGraphOptions {
- workspace_fast_check: false,
+ workspace_fast_check: deno_graph::WorkspaceFastCheckOption::Disabled,
},
)?;
}
diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs
index b7aa94691..e395c351b 100644
--- a/cli/tools/compile.rs
+++ b/cli/tools/compile.rs
@@ -5,6 +5,7 @@ use crate::args::Flags;
use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider;
use crate::standalone::is_standalone_binary;
+use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
@@ -12,6 +13,7 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
use deno_terminal::colors;
+use eszip::EszipRelativeFileBaseUrl;
use rand::Rng;
use std::path::Path;
use std::path::PathBuf;
@@ -82,12 +84,24 @@ pub async fn compile(
ts_config_for_emit.ts_config,
)?;
let parser = parsed_source_cache.as_capturing_parser();
+ let root_dir_url = resolve_root_dir_from_specifiers(
+ cli_options.workspace.root_folder().0,
+ graph.specifiers().map(|(s, _)| s).chain(
+ cli_options
+ .node_modules_dir_path()
+ .and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
+ .iter(),
+ ),
+ );
+ log::debug!("Binary root dir: {}", root_dir_url);
+ let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url);
let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
graph,
parser,
transpile_options,
emit_options,
- relative_file_base: None,
+ // make all the modules relative to the root folder
+ relative_file_base: Some(root_dir_url),
})?;
log::info!(
@@ -116,6 +130,7 @@ pub async fn compile(
.write_bin(
&mut file,
eszip,
+ root_dir_url,
&module_specifier,
&compile_flags,
cli_options,
@@ -268,6 +283,68 @@ fn get_os_specific_filepath(
}
}
+fn resolve_root_dir_from_specifiers<'a>(
+ starting_dir: &ModuleSpecifier,
+ specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
+) -> ModuleSpecifier {
+ fn select_common_root<'a>(a: &'a str, b: &'a str) -> &'a str {
+ let min_length = a.len().min(b.len());
+
+ let mut last_slash = 0;
+ for i in 0..min_length {
+ if a.as_bytes()[i] == b.as_bytes()[i] && a.as_bytes()[i] == b'/' {
+ last_slash = i;
+ } else if a.as_bytes()[i] != b.as_bytes()[i] {
+ break;
+ }
+ }
+
+ // Return the common root path up to the last common slash.
+ // This returns a slice of the original string 'a', up to and including the last matching '/'.
+ let common = &a[..=last_slash];
+ if cfg!(windows) && common == "file:///" {
+ a
+ } else {
+ common
+ }
+ }
+
+ fn is_file_system_root(url: &str) -> bool {
+ let Some(path) = url.strip_prefix("file:///") else {
+ return false;
+ };
+ if cfg!(windows) {
+ let Some((_drive, path)) = path.split_once('/') else {
+ return true;
+ };
+ path.is_empty()
+ } else {
+ path.is_empty()
+ }
+ }
+
+ let mut found_dir = starting_dir.as_str();
+ if !is_file_system_root(found_dir) {
+ for specifier in specifiers {
+ if specifier.scheme() == "file" {
+ found_dir = select_common_root(found_dir, specifier.as_str());
+ }
+ }
+ }
+ let found_dir = if is_file_system_root(found_dir) {
+ found_dir
+ } else {
+ // include the parent dir name because it helps create some context
+ found_dir
+ .strip_suffix('/')
+ .unwrap_or(found_dir)
+ .rfind('/')
+ .map(|i| &found_dir[..i + 1])
+ .unwrap_or(found_dir)
+ };
+ ModuleSpecifier::parse(found_dir).unwrap()
+}
+
#[cfg(test)]
mod test {
pub use super::*;
@@ -342,4 +419,38 @@ mod test {
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
}
+
+ #[test]
+ fn test_resolve_root_dir_from_specifiers() {
+ fn resolve(start: &str, specifiers: &[&str]) -> String {
+ let specifiers = specifiers
+ .iter()
+ .map(|s| ModuleSpecifier::parse(s).unwrap())
+ .collect::<Vec<_>>();
+ resolve_root_dir_from_specifiers(
+ &ModuleSpecifier::parse(start).unwrap(),
+ specifiers.iter(),
+ )
+ .to_string()
+ }
+
+ assert_eq!(resolve("file:///a/b/c", &["file:///a/b/c/d"]), "file:///a/");
+ assert_eq!(
+ resolve("file:///a/b/c/", &["file:///a/b/c/d"]),
+ "file:///a/b/"
+ );
+ assert_eq!(
+ resolve("file:///a/b/c/", &["file:///a/b/c/d", "file:///a/b/c/e"]),
+ "file:///a/b/"
+ );
+ assert_eq!(resolve("file:///", &["file:///a/b/c/d"]), "file:///");
+ if cfg!(windows) {
+ assert_eq!(resolve("file:///c:/", &["file:///c:/test"]), "file:///c:/");
+ // this will ignore the other one because it's on a separate drive
+ assert_eq!(
+ resolve("file:///c:/a/b/c/", &["file:///v:/a/b/c/d"]),
+ "file:///c:/a/b/"
+ );
+ }
+ }
}
diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs
index f123fc55a..79765a91d 100644
--- a/cli/tools/doc.rs
+++ b/cli/tools/doc.rs
@@ -187,31 +187,32 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
Default::default()
};
- let rewrite_map =
- if let Some(config_file) = cli_options.maybe_config_file().clone() {
- let config = config_file.to_exports_config()?;
-
- let rewrite_map = config
- .clone()
- .into_map()
- .into_keys()
- .map(|key| {
- Ok((
- config.get_resolved(&key)?.unwrap(),
- key
- .strip_prefix('.')
- .unwrap_or(&key)
- .strip_prefix('/')
- .unwrap_or(&key)
- .to_owned(),
- ))
- })
- .collect::<Result<IndexMap<_, _>, AnyError>>()?;
-
- Some(rewrite_map)
- } else {
- None
- };
+ let rewrite_map = if let Some(config_file) =
+ cli_options.workspace.resolve_start_ctx().maybe_deno_json()
+ {
+ let config = config_file.to_exports_config()?;
+
+ let rewrite_map = config
+ .clone()
+ .into_map()
+ .into_keys()
+ .map(|key| {
+ Ok((
+ config.get_resolved(&key)?.unwrap(),
+ key
+ .strip_prefix('.')
+ .unwrap_or(&key)
+ .strip_prefix('/')
+ .unwrap_or(&key)
+ .to_owned(),
+ ))
+ })
+ .collect::<Result<IndexMap<_, _>, AnyError>>()?;
+
+ Some(rewrite_map)
+ } else {
+ None
+ };
generate_docs_directory(
doc_nodes_by_url,
diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs
index b37a8e06b..c16be9fb2 100644
--- a/cli/tools/fmt.rs
+++ b/cli/tools/fmt.rs
@@ -13,6 +13,7 @@ use crate::args::FmtFlags;
use crate::args::FmtOptions;
use crate::args::FmtOptionsConfig;
use crate::args::ProseWrap;
+use crate::cache::Caches;
use crate::colors;
use crate::factory::CliFactory;
use crate::util::diff::diff;
@@ -20,6 +21,7 @@ use crate::util::file_watcher;
use crate::util::fs::canonicalize_path;
use crate::util::fs::FileCollector;
use crate::util::path::get_extension;
+use async_trait::async_trait;
use deno_ast::ParsedSource;
use deno_config::glob::FilePatterns;
use deno_core::anyhow::anyhow;
@@ -50,8 +52,11 @@ use crate::cache::IncrementalCache;
pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
if fmt_flags.is_stdin() {
let cli_options = CliOptions::from_flags(flags)?;
- let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
+ let start_ctx = cli_options.workspace.resolve_start_ctx();
+ let fmt_options =
+ cli_options.resolve_fmt_options(&fmt_flags, &start_ctx)?;
return format_stdin(
+ &fmt_flags,
fmt_options,
cli_options
.ext_flag()
@@ -70,42 +75,42 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
Ok(async move {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
- let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
- let files = collect_fmt_files(cli_options, fmt_options.files.clone())
- .and_then(|files| {
- if files.is_empty() {
- Err(generic_error("No target files found."))
+ let caches = factory.caches()?;
+ let mut paths_with_options_batches =
+ resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
+
+ for paths_with_options in &mut paths_with_options_batches {
+ let _ = watcher_communicator
+ .watch_paths(paths_with_options.paths.clone());
+ let files = std::mem::take(&mut paths_with_options.paths);
+ paths_with_options.paths = if let Some(paths) = &changed_paths {
+ if fmt_flags.check {
+ // check all files on any changed (https://github.com/denoland/deno/issues/12446)
+ files
+ .iter()
+ .any(|path| {
+ canonicalize_path(path)
+ .map(|path| paths.contains(&path))
+ .unwrap_or(false)
+ })
+ .then_some(files)
+ .unwrap_or_else(|| [].to_vec())
} else {
- Ok(files)
+ files
+ .into_iter()
+ .filter(|path| {
+ canonicalize_path(path)
+ .map(|path| paths.contains(&path))
+ .unwrap_or(false)
+ })
+ .collect::<Vec<_>>()
}
- })?;
- let _ = watcher_communicator.watch_paths(files.clone());
- let refmt_files = if let Some(paths) = changed_paths {
- if fmt_options.check {
- // check all files on any changed (https://github.com/denoland/deno/issues/12446)
- files
- .iter()
- .any(|path| {
- canonicalize_path(path)
- .map(|path| paths.contains(&path))
- .unwrap_or(false)
- })
- .then_some(files)
- .unwrap_or_else(|| [].to_vec())
} else {
files
- .into_iter()
- .filter(|path| {
- canonicalize_path(path)
- .map(|path| paths.contains(&path))
- .unwrap_or(false)
- })
- .collect::<Vec<_>>()
- }
- } else {
- files
- };
- format_files(factory, fmt_options, refmt_files).await?;
+ };
+ }
+
+ format_files(caches, &fmt_flags, paths_with_options_batches).await?;
Ok(())
})
@@ -114,43 +119,77 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
.await?;
} else {
let factory = CliFactory::from_flags(flags)?;
+ let caches = factory.caches()?;
let cli_options = factory.cli_options();
- let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
- let files = collect_fmt_files(cli_options, fmt_options.files.clone())
- .and_then(|files| {
- if files.is_empty() {
- Err(generic_error("No target files found."))
- } else {
- Ok(files)
- }
- })?;
- format_files(factory, fmt_options, files).await?;
+ let paths_with_options_batches =
+ resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
+ format_files(caches, &fmt_flags, paths_with_options_batches).await?;
}
Ok(())
}
-async fn format_files(
- factory: CliFactory,
- fmt_options: FmtOptions,
+struct PathsWithOptions {
+ base: PathBuf,
paths: Vec<PathBuf>,
+ options: FmtOptions,
+}
+
+fn resolve_paths_with_options_batches(
+ cli_options: &CliOptions,
+ fmt_flags: &FmtFlags,
+) -> Result<Vec<PathsWithOptions>, AnyError> {
+ let members_fmt_options =
+ cli_options.resolve_fmt_options_for_members(fmt_flags)?;
+ let mut paths_with_options_batches =
+ Vec::with_capacity(members_fmt_options.len());
+ for member_fmt_options in members_fmt_options {
+ let files =
+ collect_fmt_files(cli_options, member_fmt_options.files.clone())?;
+ if !files.is_empty() {
+ paths_with_options_batches.push(PathsWithOptions {
+ base: member_fmt_options.files.base.clone(),
+ paths: files,
+ options: member_fmt_options,
+ });
+ }
+ }
+ if paths_with_options_batches.is_empty() {
+ return Err(generic_error("No target files found."));
+ }
+ Ok(paths_with_options_batches)
+}
+
+async fn format_files(
+ caches: &Arc<Caches>,
+ fmt_flags: &FmtFlags,
+ paths_with_options_batches: Vec<PathsWithOptions>,
) -> Result<(), AnyError> {
- let caches = factory.caches()?;
- let check = fmt_options.check;
- let incremental_cache = Arc::new(IncrementalCache::new(
- caches.fmt_incremental_cache_db(),
- &fmt_options.options,
- &paths,
- ));
- if check {
- check_source_files(paths, fmt_options.options, incremental_cache.clone())
- .await?;
+ let formatter: Box<dyn Formatter> = if fmt_flags.check {
+ Box::new(CheckFormatter::default())
} else {
- format_source_files(paths, fmt_options.options, incremental_cache.clone())
+ Box::new(RealFormatter::default())
+ };
+ for paths_with_options in paths_with_options_batches {
+ log::debug!(
+ "Formatting {} file(s) in {}",
+ paths_with_options.paths.len(),
+ paths_with_options.base.display()
+ );
+ let fmt_options = paths_with_options.options;
+ let paths = paths_with_options.paths;
+ let incremental_cache = Arc::new(IncrementalCache::new(
+ caches.fmt_incremental_cache_db(),
+ &fmt_options.options,
+ &paths,
+ ));
+ formatter
+ .handle_files(paths, fmt_options.options, incremental_cache.clone())
.await?;
+ incremental_cache.wait_completion().await;
}
- incremental_cache.wait_completion().await;
- Ok(())
+
+ formatter.finish()
}
fn collect_fmt_files(
@@ -274,156 +313,190 @@ pub fn format_parsed_source(
)
}
-async fn check_source_files(
- paths: Vec<PathBuf>,
- fmt_options: FmtOptionsConfig,
- incremental_cache: Arc<IncrementalCache>,
-) -> Result<(), AnyError> {
- let not_formatted_files_count = Arc::new(AtomicUsize::new(0));
- let checked_files_count = Arc::new(AtomicUsize::new(0));
-
- // prevent threads outputting at the same time
- let output_lock = Arc::new(Mutex::new(0));
-
- run_parallelized(paths, {
- let not_formatted_files_count = not_formatted_files_count.clone();
- let checked_files_count = checked_files_count.clone();
- move |file_path| {
- checked_files_count.fetch_add(1, Ordering::Relaxed);
- let file_text = read_file_contents(&file_path)?.text;
-
- // skip checking the file if we know it's formatted
- if incremental_cache.is_file_same(&file_path, &file_text) {
- return Ok(());
- }
+#[async_trait]
+trait Formatter {
+ async fn handle_files(
+ &self,
+ paths: Vec<PathBuf>,
+ fmt_options: FmtOptionsConfig,
+ incremental_cache: Arc<IncrementalCache>,
+ ) -> Result<(), AnyError>;
- match format_file(&file_path, &file_text, &fmt_options) {
- Ok(Some(formatted_text)) => {
- not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
- let _g = output_lock.lock();
- let diff = diff(&file_text, &formatted_text);
- info!("");
- info!("{} {}:", colors::bold("from"), file_path.display());
- info!("{}", diff);
- }
- Ok(None) => {
- // When checking formatting, only update the incremental cache when
- // the file is the same since we don't bother checking for stable
- // formatting here. Additionally, ensure this is done during check
- // so that CIs that cache the DENO_DIR will get the benefit of
- // incremental formatting
- incremental_cache.update_file(&file_path, &file_text);
+ fn finish(&self) -> Result<(), AnyError>;
+}
+
+#[derive(Default)]
+struct CheckFormatter {
+ not_formatted_files_count: Arc<AtomicUsize>,
+ checked_files_count: Arc<AtomicUsize>,
+}
+
+#[async_trait]
+impl Formatter for CheckFormatter {
+ async fn handle_files(
+ &self,
+ paths: Vec<PathBuf>,
+ fmt_options: FmtOptionsConfig,
+ incremental_cache: Arc<IncrementalCache>,
+ ) -> Result<(), AnyError> {
+ // prevent threads outputting at the same time
+ let output_lock = Arc::new(Mutex::new(0));
+
+ run_parallelized(paths, {
+ let not_formatted_files_count = self.not_formatted_files_count.clone();
+ let checked_files_count = self.checked_files_count.clone();
+ move |file_path| {
+ checked_files_count.fetch_add(1, Ordering::Relaxed);
+ let file_text = read_file_contents(&file_path)?.text;
+
+ // skip checking the file if we know it's formatted
+ if incremental_cache.is_file_same(&file_path, &file_text) {
+ return Ok(());
}
- Err(e) => {
- not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
- let _g = output_lock.lock();
- warn!("Error checking: {}", file_path.to_string_lossy());
- warn!(
- "{}",
- format!("{e}")
- .split('\n')
- .map(|l| {
- if l.trim().is_empty() {
- String::new()
- } else {
- format!(" {l}")
- }
- })
- .collect::<Vec<_>>()
- .join("\n")
- );
+
+ match format_file(&file_path, &file_text, &fmt_options) {
+ Ok(Some(formatted_text)) => {
+ not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
+ let _g = output_lock.lock();
+ let diff = diff(&file_text, &formatted_text);
+ info!("");
+ info!("{} {}:", colors::bold("from"), file_path.display());
+ info!("{}", diff);
+ }
+ Ok(None) => {
+ // When checking formatting, only update the incremental cache when
+ // the file is the same since we don't bother checking for stable
+ // formatting here. Additionally, ensure this is done during check
+ // so that CIs that cache the DENO_DIR will get the benefit of
+ // incremental formatting
+ incremental_cache.update_file(&file_path, &file_text);
+ }
+ Err(e) => {
+ not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
+ let _g = output_lock.lock();
+ warn!("Error checking: {}", file_path.to_string_lossy());
+ warn!(
+ "{}",
+ format!("{e}")
+ .split('\n')
+ .map(|l| {
+ if l.trim().is_empty() {
+ String::new()
+ } else {
+ format!(" {l}")
+ }
+ })
+ .collect::<Vec<_>>()
+ .join("\n")
+ );
+ }
}
+ Ok(())
}
+ })
+ .await?;
+
+ Ok(())
+ }
+
+ fn finish(&self) -> Result<(), AnyError> {
+ let not_formatted_files_count =
+ self.not_formatted_files_count.load(Ordering::Relaxed);
+ let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
+ let checked_files_str =
+ format!("{} {}", checked_files_count, files_str(checked_files_count));
+ if not_formatted_files_count == 0 {
+ info!("Checked {}", checked_files_str);
Ok(())
+ } else {
+ let not_formatted_files_str = files_str(not_formatted_files_count);
+ Err(generic_error(format!(
+ "Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
+ )))
}
- })
- .await?;
-
- let not_formatted_files_count =
- not_formatted_files_count.load(Ordering::Relaxed);
- let checked_files_count = checked_files_count.load(Ordering::Relaxed);
- let checked_files_str =
- format!("{} {}", checked_files_count, files_str(checked_files_count));
- if not_formatted_files_count == 0 {
- info!("Checked {}", checked_files_str);
- Ok(())
- } else {
- let not_formatted_files_str = files_str(not_formatted_files_count);
- Err(generic_error(format!(
- "Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
- )))
}
}
-async fn format_source_files(
- paths: Vec<PathBuf>,
- fmt_options: FmtOptionsConfig,
- incremental_cache: Arc<IncrementalCache>,
-) -> Result<(), AnyError> {
- let formatted_files_count = Arc::new(AtomicUsize::new(0));
- let checked_files_count = Arc::new(AtomicUsize::new(0));
- let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
-
- run_parallelized(paths, {
- let formatted_files_count = formatted_files_count.clone();
- let checked_files_count = checked_files_count.clone();
- move |file_path| {
- checked_files_count.fetch_add(1, Ordering::Relaxed);
- let file_contents = read_file_contents(&file_path)?;
-
- // skip formatting the file if we know it's formatted
- if incremental_cache.is_file_same(&file_path, &file_contents.text) {
- return Ok(());
- }
+#[derive(Default)]
+struct RealFormatter {
+ formatted_files_count: Arc<AtomicUsize>,
+ checked_files_count: Arc<AtomicUsize>,
+}
- match format_ensure_stable(
- &file_path,
- &file_contents.text,
- &fmt_options,
- format_file,
- ) {
- Ok(Some(formatted_text)) => {
- incremental_cache.update_file(&file_path, &formatted_text);
- write_file_contents(
- &file_path,
- FileContents {
- had_bom: file_contents.had_bom,
- text: formatted_text,
- },
- )?;
- formatted_files_count.fetch_add(1, Ordering::Relaxed);
- let _g = output_lock.lock();
- info!("{}", file_path.to_string_lossy());
- }
- Ok(None) => {
- incremental_cache.update_file(&file_path, &file_contents.text);
+#[async_trait]
+impl Formatter for RealFormatter {
+ async fn handle_files(
+ &self,
+ paths: Vec<PathBuf>,
+ fmt_options: FmtOptionsConfig,
+ incremental_cache: Arc<IncrementalCache>,
+ ) -> Result<(), AnyError> {
+ let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
+
+ run_parallelized(paths, {
+ let formatted_files_count = self.formatted_files_count.clone();
+ let checked_files_count = self.checked_files_count.clone();
+ move |file_path| {
+ checked_files_count.fetch_add(1, Ordering::Relaxed);
+ let file_contents = read_file_contents(&file_path)?;
+
+ // skip formatting the file if we know it's formatted
+ if incremental_cache.is_file_same(&file_path, &file_contents.text) {
+ return Ok(());
}
- Err(e) => {
- let _g = output_lock.lock();
- log::error!("Error formatting: {}", file_path.to_string_lossy());
- log::error!(" {e}");
+
+ match format_ensure_stable(
+ &file_path,
+ &file_contents.text,
+ &fmt_options,
+ format_file,
+ ) {
+ Ok(Some(formatted_text)) => {
+ incremental_cache.update_file(&file_path, &formatted_text);
+ write_file_contents(
+ &file_path,
+ FileContents {
+ had_bom: file_contents.had_bom,
+ text: formatted_text,
+ },
+ )?;
+ formatted_files_count.fetch_add(1, Ordering::Relaxed);
+ let _g = output_lock.lock();
+ info!("{}", file_path.to_string_lossy());
+ }
+ Ok(None) => {
+ incremental_cache.update_file(&file_path, &file_contents.text);
+ }
+ Err(e) => {
+ let _g = output_lock.lock();
+ log::error!("Error formatting: {}", file_path.to_string_lossy());
+ log::error!(" {e}");
+ }
}
+ Ok(())
}
- Ok(())
- }
- })
- .await?;
-
- let formatted_files_count = formatted_files_count.load(Ordering::Relaxed);
- debug!(
- "Formatted {} {}",
- formatted_files_count,
- files_str(formatted_files_count),
- );
-
- let checked_files_count = checked_files_count.load(Ordering::Relaxed);
- info!(
- "Checked {} {}",
- checked_files_count,
- files_str(checked_files_count)
- );
+ })
+ .await?;
+ Ok(())
+ }
- Ok(())
+ fn finish(&self) -> Result<(), AnyError> {
+ let formatted_files_count =
+ self.formatted_files_count.load(Ordering::Relaxed);
+ debug!(
+ "Formatted {} {}",
+ formatted_files_count,
+ files_str(formatted_files_count),
+ );
+
+ let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
+ info!(
+ "Checked {} {}",
+ checked_files_count,
+ files_str(checked_files_count)
+ );
+ Ok(())
+ }
}
/// When storing any formatted text in the incremental cache, we want
@@ -491,14 +564,18 @@ fn format_ensure_stable(
/// Format stdin and write result to stdout.
/// Treats input as set by `--ext` flag.
/// Compatible with `--check` flag.
-fn format_stdin(fmt_options: FmtOptions, ext: &str) -> Result<(), AnyError> {
+fn format_stdin(
+ fmt_flags: &FmtFlags,
+ fmt_options: FmtOptions,
+ ext: &str,
+) -> Result<(), AnyError> {
let mut source = String::new();
if stdin().read_to_string(&mut source).is_err() {
bail!("Failed to read from stdin");
}
let file_path = PathBuf::from(format!("_stdin.{ext}"));
let formatted_text = format_file(&file_path, &source, &fmt_options.options)?;
- if fmt_options.check {
+ if fmt_flags.check {
#[allow(clippy::print_stdout)]
if formatted_text.is_some() {
println!("Not formatted stdin");
diff --git a/cli/tools/info.rs b/cli/tools/info.rs
index 76951b13d..18a4bed57 100644
--- a/cli/tools/info.rs
+++ b/cli/tools/info.rs
@@ -42,19 +42,20 @@ pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> {
let module_graph_creator = factory.module_graph_creator().await?;
let npm_resolver = factory.npm_resolver().await?;
let maybe_lockfile = factory.maybe_lockfile();
- let maybe_imports_map = factory.maybe_import_map().await?;
-
- let maybe_import_specifier = if let Some(imports_map) = maybe_imports_map {
- if let Ok(imports_specifier) =
- imports_map.resolve(&specifier, imports_map.base_url())
- {
- Some(imports_specifier)
+ let resolver = factory.workspace_resolver().await?;
+
+ let maybe_import_specifier =
+ if let Some(import_map) = resolver.maybe_import_map() {
+ if let Ok(imports_specifier) =
+ import_map.resolve(&specifier, import_map.base_url())
+ {
+ Some(imports_specifier)
+ } else {
+ None
+ }
} else {
None
- }
- } else {
- None
- };
+ };
let specifier = match maybe_import_specifier {
Some(specifier) => specifier,
diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs
index 0d9868cf2..e3f2844a7 100644
--- a/cli/tools/lint/mod.rs
+++ b/cli/tools/lint/mod.rs
@@ -9,13 +9,21 @@ use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
use deno_config::glob::FilePatterns;
+use deno_config::workspace::Workspace;
+use deno_config::workspace::WorkspaceMemberContext;
+use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
+use deno_core::futures::future::LocalBoxFuture;
+use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
+use deno_core::unsync::future::LocalFutureExt;
+use deno_core::unsync::future::SharedLocal;
use deno_graph::FastCheckDiagnostic;
+use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::LintConfig;
use deno_lint::linter::LintFileOptions;
@@ -33,6 +41,7 @@ use std::io::stdin;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
+use std::rc::Rc;
use std::sync::Arc;
use crate::args::CliOptions;
@@ -41,9 +50,12 @@ use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::args::LintReporterKind;
use crate::args::LintRulesConfig;
+use crate::args::WorkspaceLintOptions;
+use crate::cache::Caches;
use crate::cache::IncrementalCache;
use crate::colors;
use crate::factory::CliFactory;
+use crate::graph_util::ModuleGraphCreator;
use crate::tools::fmt::run_parallelized;
use crate::util::file_watcher;
use crate::util::fs::canonicalize_path;
@@ -79,35 +91,49 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
Ok(async move {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
- let lint_options = cli_options.resolve_lint_options(lint_flags)?;
let lint_config = cli_options.resolve_lint_config()?;
- let files =
- collect_lint_files(cli_options, lint_options.files.clone())
- .and_then(|files| {
- if files.is_empty() {
- Err(generic_error("No target files found."))
- } else {
- Ok(files)
- }
- })?;
- _ = watcher_communicator.watch_paths(files.clone());
-
- let lint_paths = if let Some(paths) = changed_paths {
- // lint all files on any changed (https://github.com/denoland/deno/issues/12446)
- files
- .iter()
- .any(|path| {
- canonicalize_path(path)
- .map(|p| paths.contains(&p))
- .unwrap_or(false)
- })
- .then_some(files)
- .unwrap_or_else(|| [].to_vec())
- } else {
- files
- };
-
- lint_files(factory, lint_options, lint_config, lint_paths).await?;
+ let mut paths_with_options_batches =
+ resolve_paths_with_options_batches(cli_options, &lint_flags)?;
+ for paths_with_options in &mut paths_with_options_batches {
+ _ = watcher_communicator
+ .watch_paths(paths_with_options.paths.clone());
+
+ let files = std::mem::take(&mut paths_with_options.paths);
+ paths_with_options.paths = if let Some(paths) = &changed_paths {
+ // lint all files on any changed (https://github.com/denoland/deno/issues/12446)
+ files
+ .iter()
+ .any(|path| {
+ canonicalize_path(path)
+ .map(|p| paths.contains(&p))
+ .unwrap_or(false)
+ })
+ .then_some(files)
+ .unwrap_or_else(|| [].to_vec())
+ } else {
+ files
+ };
+ }
+
+ let mut linter = WorkspaceLinter::new(
+ factory.caches()?.clone(),
+ factory.module_graph_creator().await?.clone(),
+ cli_options.workspace.clone(),
+ &cli_options.resolve_workspace_lint_options(&lint_flags)?,
+ );
+ for paths_with_options in paths_with_options_batches {
+ linter
+ .lint_files(
+ paths_with_options.options,
+ lint_config.clone(),
+ paths_with_options.ctx,
+ paths_with_options.paths,
+ )
+ .await?;
+ }
+
+ linter.finish();
+
Ok(())
})
},
@@ -117,15 +143,19 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
let is_stdin = lint_flags.is_stdin();
- let lint_options = cli_options.resolve_lint_options(lint_flags)?;
let lint_config = cli_options.resolve_lint_config()?;
- let files = &lint_options.files;
+ let workspace_lint_options =
+ cli_options.resolve_workspace_lint_options(&lint_flags)?;
let success = if is_stdin {
- let reporter_kind = lint_options.reporter_kind;
- let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind)));
+ let start_ctx = cli_options.workspace.resolve_start_ctx();
+ let reporter_lock = Arc::new(Mutex::new(create_reporter(
+ workspace_lint_options.reporter_kind,
+ )));
+ let lint_options =
+ cli_options.resolve_lint_options(lint_flags, &start_ctx)?;
let lint_rules = get_config_rules_err_empty(
lint_options.rules,
- cli_options.maybe_config_file().as_ref(),
+ start_ctx.maybe_deno_json().map(|c| c.as_ref()),
)?;
let file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
let r = lint_stdin(&file_path, lint_rules.rules, lint_config);
@@ -137,16 +167,25 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
reporter_lock.lock().close(1);
success
} else {
- let target_files = collect_lint_files(cli_options, files.clone())
- .and_then(|files| {
- if files.is_empty() {
- Err(generic_error("No target files found."))
- } else {
- Ok(files)
- }
- })?;
- debug!("Found {} files", target_files.len());
- lint_files(factory, lint_options, lint_config, target_files).await?
+ let mut linter = WorkspaceLinter::new(
+ factory.caches()?.clone(),
+ factory.module_graph_creator().await?.clone(),
+ cli_options.workspace.clone(),
+ &workspace_lint_options,
+ );
+ let paths_with_options_batches =
+ resolve_paths_with_options_batches(cli_options, &lint_flags)?;
+ for paths_with_options in paths_with_options_batches {
+ linter
+ .lint_files(
+ paths_with_options.options,
+ lint_config.clone(),
+ paths_with_options.ctx,
+ paths_with_options.paths,
+ )
+ .await?;
+ }
+ linter.finish()
};
if !success {
std::process::exit(1);
@@ -156,121 +195,202 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
Ok(())
}
-async fn lint_files(
- factory: CliFactory,
- lint_options: LintOptions,
- lint_config: LintConfig,
+struct PathsWithOptions {
+ ctx: WorkspaceMemberContext,
paths: Vec<PathBuf>,
-) -> Result<bool, AnyError> {
- let caches = factory.caches()?;
- let maybe_config_file = factory.cli_options().maybe_config_file().as_ref();
- let lint_rules =
- get_config_rules_err_empty(lint_options.rules, maybe_config_file)?;
- let incremental_cache = Arc::new(IncrementalCache::new(
- caches.lint_incremental_cache_db(),
- &lint_rules.incremental_cache_state(),
- &paths,
- ));
- let target_files_len = paths.len();
- let reporter_kind = lint_options.reporter_kind;
- // todo(dsherret): abstract away this lock behind a performant interface
- let reporter_lock =
- Arc::new(Mutex::new(create_reporter(reporter_kind.clone())));
- let has_error = Arc::new(AtomicFlag::default());
-
- let mut futures = Vec::with_capacity(2);
- if lint_rules.no_slow_types {
- if let Some(config_file) = maybe_config_file {
- let members = config_file.to_workspace_members()?;
- let has_error = has_error.clone();
- let reporter_lock = reporter_lock.clone();
- let module_graph_creator = factory.module_graph_creator().await?.clone();
- let path_urls = paths
- .iter()
- .filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
- .collect::<HashSet<_>>();
- futures.push(deno_core::unsync::spawn(async move {
- let graph = module_graph_creator
- .create_and_validate_publish_graph(&members, true)
- .await?;
- // todo(dsherret): this isn't exactly correct as linting isn't properly
- // setup to handle workspaces. Iterating over the workspace members
- // should be done at a higher level because it also needs to take into
- // account the config per workspace member.
- for member in &members {
- let export_urls = member.config_file.resolve_export_value_urls()?;
- if !export_urls.iter().any(|url| path_urls.contains(url)) {
- continue; // entrypoint is not specified, so skip
+ options: LintOptions,
+}
+
+fn resolve_paths_with_options_batches(
+ cli_options: &CliOptions,
+ lint_flags: &LintFlags,
+) -> Result<Vec<PathsWithOptions>, AnyError> {
+ let members_lint_options =
+ cli_options.resolve_lint_options_for_members(lint_flags)?;
+ let mut paths_with_options_batches =
+ Vec::with_capacity(members_lint_options.len());
+ for (ctx, lint_options) in members_lint_options {
+ let files = collect_lint_files(cli_options, lint_options.files.clone())?;
+ if !files.is_empty() {
+ paths_with_options_batches.push(PathsWithOptions {
+ ctx,
+ paths: files,
+ options: lint_options,
+ });
+ }
+ }
+ if paths_with_options_batches.is_empty() {
+ return Err(generic_error("No target files found."));
+ }
+ Ok(paths_with_options_batches)
+}
+
+type WorkspaceModuleGraphFuture =
+ SharedLocal<LocalBoxFuture<'static, Result<Rc<ModuleGraph>, Rc<AnyError>>>>;
+
+struct WorkspaceLinter {
+ caches: Arc<Caches>,
+ module_graph_creator: Arc<ModuleGraphCreator>,
+ workspace: Arc<Workspace>,
+ reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
+ workspace_module_graph: Option<WorkspaceModuleGraphFuture>,
+ has_error: Arc<AtomicFlag>,
+ file_count: usize,
+}
+
+impl WorkspaceLinter {
+ pub fn new(
+ caches: Arc<Caches>,
+ module_graph_creator: Arc<ModuleGraphCreator>,
+ workspace: Arc<Workspace>,
+ workspace_options: &WorkspaceLintOptions,
+ ) -> Self {
+ let reporter_lock =
+ Arc::new(Mutex::new(create_reporter(workspace_options.reporter_kind)));
+ Self {
+ caches,
+ module_graph_creator,
+ workspace,
+ reporter_lock,
+ workspace_module_graph: None,
+ has_error: Default::default(),
+ file_count: 0,
+ }
+ }
+
+ pub async fn lint_files(
+ &mut self,
+ lint_options: LintOptions,
+ lint_config: LintConfig,
+ member_ctx: WorkspaceMemberContext,
+ paths: Vec<PathBuf>,
+ ) -> Result<(), AnyError> {
+ self.file_count += paths.len();
+
+ let lint_rules = get_config_rules_err_empty(
+ lint_options.rules,
+ member_ctx.maybe_deno_json().map(|c| c.as_ref()),
+ )?;
+ let incremental_cache = Arc::new(IncrementalCache::new(
+ self.caches.lint_incremental_cache_db(),
+ &lint_rules.incremental_cache_state(),
+ &paths,
+ ));
+
+ let mut futures = Vec::with_capacity(2);
+ if lint_rules.no_slow_types {
+ if self.workspace_module_graph.is_none() {
+ let module_graph_creator = self.module_graph_creator.clone();
+ let packages = self.workspace.jsr_packages_for_publish();
+ self.workspace_module_graph = Some(
+ async move {
+ module_graph_creator
+ .create_and_validate_publish_graph(&packages, true)
+ .await
+ .map(Rc::new)
+ .map_err(Rc::new)
}
- let diagnostics = no_slow_types::collect_no_slow_type_diagnostics(
- &export_urls,
- &graph,
- );
- if !diagnostics.is_empty() {
- has_error.raise();
- let mut reporter = reporter_lock.lock();
- for diagnostic in &diagnostics {
- reporter
- .visit_diagnostic(LintOrCliDiagnostic::FastCheck(diagnostic));
+ .boxed_local()
+ .shared_local(),
+ );
+ }
+ let workspace_module_graph_future =
+ self.workspace_module_graph.as_ref().unwrap().clone();
+ let publish_config = member_ctx.maybe_package_config();
+ if let Some(publish_config) = publish_config {
+ let has_error = self.has_error.clone();
+ let reporter_lock = self.reporter_lock.clone();
+ let path_urls = paths
+ .iter()
+ .filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
+ .collect::<HashSet<_>>();
+ futures.push(
+ async move {
+ let graph = workspace_module_graph_future
+ .await
+ .map_err(|err| anyhow!("{:#}", err))?;
+ let export_urls =
+ publish_config.config_file.resolve_export_value_urls()?;
+ if !export_urls.iter().any(|url| path_urls.contains(url)) {
+ return Ok(()); // entrypoint is not specified, so skip
}
+ let diagnostics = no_slow_types::collect_no_slow_type_diagnostics(
+ &export_urls,
+ &graph,
+ );
+ if !diagnostics.is_empty() {
+ has_error.raise();
+ let mut reporter = reporter_lock.lock();
+ for diagnostic in &diagnostics {
+ reporter
+ .visit_diagnostic(LintOrCliDiagnostic::FastCheck(diagnostic));
+ }
+ }
+ Ok(())
}
- }
- Ok(())
- }));
- }
- }
-
- futures.push({
- let has_error = has_error.clone();
- let linter = create_linter(lint_rules.rules);
- let reporter_lock = reporter_lock.clone();
- let incremental_cache = incremental_cache.clone();
- let lint_config = lint_config.clone();
- let fix = lint_options.fix;
- deno_core::unsync::spawn(async move {
- run_parallelized(paths, {
- move |file_path| {
- let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?);
-
- // don't bother rechecking this file if it didn't have any diagnostics before
- if incremental_cache.is_file_same(&file_path, &file_text) {
- return Ok(());
- }
+ .boxed_local(),
+ );
+ }
+ }
- let r = lint_file(&linter, &file_path, file_text, lint_config, fix);
- if let Ok((file_source, file_diagnostics)) = &r {
- if file_diagnostics.is_empty() {
- // update the incremental cache if there were no diagnostics
- incremental_cache.update_file(
- &file_path,
- // ensure the returned text is used here as it may have been modified via --fix
- file_source.text(),
- )
+ futures.push({
+ let has_error = self.has_error.clone();
+ let linter = create_linter(lint_rules.rules);
+ let reporter_lock = self.reporter_lock.clone();
+ let incremental_cache = incremental_cache.clone();
+ let lint_config = lint_config.clone();
+ let fix = lint_options.fix;
+ async move {
+ run_parallelized(paths, {
+ move |file_path| {
+ let file_text =
+ deno_ast::strip_bom(fs::read_to_string(&file_path)?);
+
+ // don't bother rechecking this file if it didn't have any diagnostics before
+ if incremental_cache.is_file_same(&file_path, &file_text) {
+ return Ok(());
}
- }
- let success = handle_lint_result(
- &file_path.to_string_lossy(),
- r,
- reporter_lock.clone(),
- );
- if !success {
- has_error.raise();
- }
+ let r = lint_file(&linter, &file_path, file_text, lint_config, fix);
+ if let Ok((file_source, file_diagnostics)) = &r {
+ if file_diagnostics.is_empty() {
+ // update the incremental cache if there were no diagnostics
+ incremental_cache.update_file(
+ &file_path,
+ // ensure the returned text is used here as it may have been modified via --fix
+ file_source.text(),
+ )
+ }
+ }
- Ok(())
- }
- })
- .await
- })
- });
+ let success = handle_lint_result(
+ &file_path.to_string_lossy(),
+ r,
+ reporter_lock.clone(),
+ );
+ if !success {
+ has_error.raise();
+ }
- deno_core::futures::future::try_join_all(futures).await?;
+ Ok(())
+ }
+ })
+ .await
+ }
+ .boxed_local()
+ });
- incremental_cache.wait_completion().await;
- reporter_lock.lock().close(target_files_len);
+ deno_core::futures::future::try_join_all(futures).await?;
- Ok(!has_error.is_raised())
+ incremental_cache.wait_completion().await;
+ Ok(())
+ }
+
+ pub fn finish(self) -> bool {
+ debug!("Found {} files", self.file_count);
+ self.reporter_lock.lock().close(self.file_count);
+ !self.has_error.is_raised() // success
+ }
}
fn collect_lint_files(
@@ -692,9 +812,8 @@ impl LintReporter for PrettyLintReporter {
}
match check_count {
- n if n <= 1 => info!("Checked {} file", n),
- n if n > 1 => info!("Checked {} files", n),
- _ => unreachable!(),
+ 1 => info!("Checked 1 file"),
+ n => info!("Checked {} files", n),
}
}
}
@@ -744,9 +863,8 @@ impl LintReporter for CompactLintReporter {
}
match check_count {
- n if n <= 1 => info!("Checked {} file", n),
- n if n > 1 => info!("Checked {} files", n),
- _ => unreachable!(),
+ 1 => info!("Checked 1 file"),
+ n => info!("Checked {} files", n),
}
}
}
@@ -910,9 +1028,8 @@ pub fn get_configured_rules(
maybe_config_file: Option<&deno_config::ConfigFile>,
) -> ConfiguredRules {
const NO_SLOW_TYPES_NAME: &str = "no-slow-types";
- let implicit_no_slow_types = maybe_config_file
- .map(|c| c.is_package() || c.json.workspace.is_some())
- .unwrap_or(false);
+ let implicit_no_slow_types =
+ maybe_config_file.map(|c| c.is_package()).unwrap_or(false);
let no_slow_types = implicit_no_slow_types
&& !rules
.exclude
diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs
index d300e5eaf..134a973f7 100644
--- a/cli/tools/registry/mod.rs
+++ b/cli/tools/registry/mod.rs
@@ -11,9 +11,8 @@ use std::sync::Arc;
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
use deno_ast::ModuleSpecifier;
-use deno_config::glob::FilePatterns;
-use deno_config::ConfigFile;
-use deno_config::WorkspaceMemberConfig;
+use deno_config::workspace::JsrPackageConfig;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
@@ -27,7 +26,6 @@ use deno_core::serde_json::Value;
use deno_runtime::deno_fetch::reqwest;
use deno_runtime::deno_fs::FileSystem;
use deno_terminal::colors;
-use import_map::ImportMap;
use lsp_types::Url;
use serde::Deserialize;
use serde::Serialize;
@@ -44,7 +42,6 @@ use crate::cache::ParsedSourceCache;
use crate::factory::CliFactory;
use crate::graph_util::ModuleGraphCreator;
use crate::http_util::HttpClient;
-use crate::resolver::MappedSpecifierResolver;
use crate::resolver::SloppyImportsResolver;
use crate::tools::check::CheckOptions;
use crate::tools::lint::no_slow_types;
@@ -84,27 +81,28 @@ pub async fn publish(
let auth_method =
get_auth_method(publish_flags.token, publish_flags.dry_run)?;
- let import_map = cli_factory
- .maybe_import_map()
- .await?
- .clone()
- .unwrap_or_else(|| {
- Arc::new(ImportMap::new(Url::parse("file:///dev/null").unwrap()))
- });
+ let workspace_resolver = cli_factory.workspace_resolver().await?.clone();
let directory_path = cli_factory.cli_options().initial_cwd();
-
- let mapped_resolver = Arc::new(MappedSpecifierResolver::new(
- Some(import_map),
- cli_factory.package_json_deps_provider().clone(),
- ));
let cli_options = cli_factory.cli_options();
- let Some(config_file) = cli_options.maybe_config_file() else {
- bail!(
- "Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
- directory_path.display()
- );
- };
+ let publish_configs = cli_options.workspace.jsr_packages_for_publish();
+ if publish_configs.is_empty() {
+ match cli_options.workspace.resolve_start_ctx().maybe_deno_json() {
+ Some(deno_json) => {
+ debug_assert!(!deno_json.is_package());
+ bail!(
+ "Missing 'name', 'version' and 'exports' field in '{}'.",
+ deno_json.specifier
+ );
+ }
+ None => {
+ bail!(
+ "Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
+ directory_path.display()
+ );
+ }
+ }
+ }
let diagnostics_collector = PublishDiagnosticsCollector::default();
let publish_preparer = PublishPreparer::new(
@@ -114,14 +112,14 @@ pub async fn publish(
cli_factory.type_checker().await?.clone(),
cli_factory.fs().clone(),
cli_factory.cli_options().clone(),
- mapped_resolver,
+ workspace_resolver,
);
let prepared_data = publish_preparer
.prepare_packages_for_publishing(
publish_flags.allow_slow_types,
&diagnostics_collector,
- config_file.clone(),
+ publish_configs,
)
.await?;
@@ -193,8 +191,8 @@ struct PublishPreparer {
source_cache: Arc<ParsedSourceCache>,
type_checker: Arc<TypeChecker>,
cli_options: Arc<CliOptions>,
- mapped_resolver: Arc<MappedSpecifierResolver>,
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
+ workspace_resolver: Arc<WorkspaceResolver>,
}
impl PublishPreparer {
@@ -205,7 +203,7 @@ impl PublishPreparer {
type_checker: Arc<TypeChecker>,
fs: Arc<dyn FileSystem>,
cli_options: Arc<CliOptions>,
- mapped_resolver: Arc<MappedSpecifierResolver>,
+ workspace_resolver: Arc<WorkspaceResolver>,
) -> Self {
let sloppy_imports_resolver = if cli_options.unstable_sloppy_imports() {
Some(Arc::new(SloppyImportsResolver::new(fs.clone())))
@@ -218,8 +216,8 @@ impl PublishPreparer {
source_cache,
type_checker,
cli_options,
- mapped_resolver,
sloppy_imports_resolver,
+ workspace_resolver,
}
}
@@ -227,11 +225,9 @@ impl PublishPreparer {
&self,
allow_slow_types: bool,
diagnostics_collector: &PublishDiagnosticsCollector,
- deno_json: ConfigFile,
+ publish_configs: Vec<JsrPackageConfig>,
) -> Result<PreparePackagesData, AnyError> {
- let members = deno_json.to_workspace_members()?;
-
- if members.len() > 1 {
+ if publish_configs.len() > 1 {
log::info!("Publishing a workspace...");
}
@@ -240,31 +236,24 @@ impl PublishPreparer {
.build_and_check_graph_for_publish(
allow_slow_types,
diagnostics_collector,
- &members,
+ &publish_configs,
)
.await?;
- let mut package_by_name = HashMap::with_capacity(members.len());
+ let mut package_by_name = HashMap::with_capacity(publish_configs.len());
let publish_order_graph =
- publish_order::build_publish_order_graph(&graph, &members)?;
+ publish_order::build_publish_order_graph(&graph, &publish_configs)?;
- let results = members
+ let results = publish_configs
.into_iter()
.map(|member| {
let graph = graph.clone();
async move {
let package = self
- .prepare_publish(
- &member.package_name,
- &member.config_file,
- graph,
- diagnostics_collector,
- )
+ .prepare_publish(&member, graph, diagnostics_collector)
.await
- .with_context(|| {
- format!("Failed preparing '{}'.", member.package_name)
- })?;
- Ok::<_, AnyError>((member.package_name, package))
+ .with_context(|| format!("Failed preparing '{}'.", member.name))?;
+ Ok::<_, AnyError>((member.name, package))
}
.boxed()
})
@@ -284,12 +273,15 @@ impl PublishPreparer {
&self,
allow_slow_types: bool,
diagnostics_collector: &PublishDiagnosticsCollector,
- packages: &[WorkspaceMemberConfig],
+ package_configs: &[JsrPackageConfig],
) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> {
let build_fast_check_graph = !allow_slow_types;
let graph = self
.module_graph_creator
- .create_and_validate_publish_graph(packages, build_fast_check_graph)
+ .create_and_validate_publish_graph(
+ package_configs,
+ build_fast_check_graph,
+ )
.await?;
// todo(dsherret): move to lint rule
@@ -335,7 +327,7 @@ impl PublishPreparer {
} else {
log::info!("Checking for slow types in the public API...");
let mut any_pkg_had_diagnostics = false;
- for package in packages {
+ for package in package_configs {
let export_urls = package.config_file.resolve_export_value_urls()?;
let diagnostics =
no_slow_types::collect_no_slow_type_diagnostics(&export_urls, &graph);
@@ -389,14 +381,14 @@ impl PublishPreparer {
#[allow(clippy::too_many_arguments)]
async fn prepare_publish(
&self,
- package_name: &str,
- deno_json: &ConfigFile,
+ package: &JsrPackageConfig,
graph: Arc<deno_graph::ModuleGraph>,
diagnostics_collector: &PublishDiagnosticsCollector,
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
["mod.ts", "mod.js", "index.ts", "index.js"];
+ let deno_json = &package.config_file;
let config_path = deno_json.specifier.to_file_path().unwrap();
let root_dir = config_path.parent().unwrap().to_path_buf();
let Some(version) = deno_json.json.version.clone() else {
@@ -418,32 +410,29 @@ impl PublishPreparer {
"version": "{}",
"exports": "{}"
}}"#,
- package_name,
+ package.name,
version,
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
);
bail!(
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
- package_name,
+ package.name,
deno_json.specifier,
exports_content
);
}
- let Some(name_no_at) = package_name.strip_prefix('@') else {
+ let Some(name_no_at) = package.name.strip_prefix('@') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
};
let Some((scope, name_no_scope)) = name_no_at.split_once('/') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
};
- let file_patterns = deno_json
- .to_publish_config()?
- .map(|c| c.files)
- .unwrap_or_else(|| FilePatterns::new_with_base(root_dir.to_path_buf()));
+ let file_patterns = package.member_ctx.to_publish_config()?.files;
let tarball = deno_core::unsync::spawn_blocking({
let diagnostics_collector = diagnostics_collector.clone();
- let mapped_resolver = self.mapped_resolver.clone();
+ let workspace_resolver = self.workspace_resolver.clone();
let sloppy_imports_resolver = self.sloppy_imports_resolver.clone();
let cli_options = self.cli_options.clone();
let source_cache = self.source_cache.clone();
@@ -451,8 +440,8 @@ impl PublishPreparer {
move || {
let bare_node_builtins = cli_options.unstable_bare_node_builtins();
let unfurler = SpecifierUnfurler::new(
- &mapped_resolver,
sloppy_imports_resolver.as_deref(),
+ &workspace_resolver,
bare_node_builtins,
);
let root_specifier =
@@ -482,7 +471,7 @@ impl PublishPreparer {
})
.await??;
- log::debug!("Tarball size ({}): {}", package_name, tarball.bytes.len());
+ log::debug!("Tarball size ({}): {}", package.name, tarball.bytes.len());
Ok(Rc::new(PreparedPublishPackage {
scope: scope.to_string(),
diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs
index 4fdc02550..e3e2f1b55 100644
--- a/cli/tools/registry/pm.rs
+++ b/cli/tools/registry/pm.rs
@@ -49,7 +49,7 @@ impl DenoConfigFormat {
}
enum DenoOrPackageJson {
- Deno(deno_config::ConfigFile, DenoConfigFormat),
+ Deno(Arc<deno_config::ConfigFile>, DenoConfigFormat),
Npm(Arc<deno_node::PackageJson>, Option<FmtOptionsConfig>),
}
@@ -87,7 +87,6 @@ impl DenoOrPackageJson {
DenoOrPackageJson::Deno(deno, ..) => deno
.to_fmt_config()
.ok()
- .flatten()
.map(|f| f.options)
.unwrap_or_default(),
DenoOrPackageJson::Npm(_, config) => config.clone().unwrap_or_default(),
@@ -122,9 +121,10 @@ impl DenoOrPackageJson {
/// the new config
fn from_flags(flags: Flags) -> Result<(Self, CliFactory), AnyError> {
let factory = CliFactory::from_flags(flags.clone())?;
- let options = factory.cli_options().clone();
+ let options = factory.cli_options();
+ let start_ctx = options.workspace.resolve_start_ctx();
- match (options.maybe_config_file(), options.maybe_package_json()) {
+ match (start_ctx.maybe_deno_json(), start_ctx.maybe_pkg_json()) {
// when both are present, for now,
// default to deno.json
(Some(deno), Some(_) | None) => Ok((
@@ -141,20 +141,17 @@ impl DenoOrPackageJson {
std::fs::write(options.initial_cwd().join("deno.json"), "{}\n")
.context("Failed to create deno.json file")?;
log::info!("Created deno.json configuration file.");
- let new_factory = CliFactory::from_flags(flags.clone())?;
- let new_options = new_factory.cli_options().clone();
+ let factory = CliFactory::from_flags(flags.clone())?;
+ let options = factory.cli_options().clone();
+ let start_ctx = options.workspace.resolve_start_ctx();
Ok((
DenoOrPackageJson::Deno(
- new_options
- .maybe_config_file()
- .as_ref()
- .ok_or_else(|| {
- anyhow!("config not found, but it was just created")
- })?
- .clone(),
+ start_ctx.maybe_deno_json().cloned().ok_or_else(|| {
+ anyhow!("config not found, but it was just created")
+ })?,
DenoConfigFormat::Json,
),
- new_factory,
+ factory,
))
}
}
diff --git a/cli/tools/registry/publish_order.rs b/cli/tools/registry/publish_order.rs
index ad0f72272..ad77a56bb 100644
--- a/cli/tools/registry/publish_order.rs
+++ b/cli/tools/registry/publish_order.rs
@@ -5,7 +5,7 @@ use std::collections::HashSet;
use std::collections::VecDeque;
use deno_ast::ModuleSpecifier;
-use deno_config::WorkspaceMemberConfig;
+use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::ModuleGraph;
@@ -114,7 +114,7 @@ impl PublishOrderGraph {
pub fn build_publish_order_graph(
graph: &ModuleGraph,
- roots: &[WorkspaceMemberConfig],
+ roots: &[JsrPackageConfig],
) -> Result<PublishOrderGraph, AnyError> {
let packages = build_pkg_deps(graph, roots)?;
Ok(build_publish_order_graph_from_pkgs_deps(packages))
@@ -122,18 +122,23 @@ pub fn build_publish_order_graph(
fn build_pkg_deps(
graph: &deno_graph::ModuleGraph,
- roots: &[WorkspaceMemberConfig],
+ roots: &[JsrPackageConfig],
) -> Result<HashMap<String, HashSet<String>>, AnyError> {
let mut members = HashMap::with_capacity(roots.len());
let mut seen_modules = HashSet::with_capacity(graph.modules().count());
let roots = roots
.iter()
- .map(|r| (ModuleSpecifier::from_file_path(&r.dir_path).unwrap(), r))
+ .map(|r| {
+ (
+ ModuleSpecifier::from_directory_path(r.config_file.dir_path()).unwrap(),
+ r,
+ )
+ })
.collect::<Vec<_>>();
- for (root_dir_url, root) in &roots {
+ for (root_dir_url, pkg_config) in &roots {
let mut deps = HashSet::new();
let mut pending = VecDeque::new();
- pending.extend(root.config_file.resolve_export_value_urls()?);
+ pending.extend(pkg_config.config_file.resolve_export_value_urls()?);
while let Some(specifier) = pending.pop_front() {
let Some(module) = graph.get(&specifier).and_then(|m| m.js()) else {
continue;
@@ -168,12 +173,12 @@ fn build_pkg_deps(
specifier.as_str().starts_with(dir_url.as_str())
});
if let Some(root) = found_root {
- deps.insert(root.1.package_name.clone());
+ deps.insert(root.1.name.clone());
}
}
}
}
- members.insert(root.package_name.clone(), deps);
+ members.insert(pkg_config.name.clone(), deps);
}
Ok(members)
}
diff --git a/cli/tools/registry/unfurl.rs b/cli/tools/registry/unfurl.rs
index 36bff64bb..147b59f30 100644
--- a/cli/tools/registry/unfurl.rs
+++ b/cli/tools/registry/unfurl.rs
@@ -3,6 +3,9 @@
use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
+use deno_config::package_json::PackageJsonDepValue;
+use deno_config::workspace::MappedResolution;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::ModuleSpecifier;
use deno_graph::DependencyDescriptor;
use deno_graph::DynamicTemplatePart;
@@ -10,7 +13,6 @@ use deno_graph::ParserModuleAnalyzer;
use deno_graph::TypeScriptReference;
use deno_runtime::deno_node::is_builtin_node_module;
-use crate::resolver::MappedSpecifierResolver;
use crate::resolver::SloppyImportsResolver;
#[derive(Debug, Clone)]
@@ -39,20 +41,20 @@ impl SpecifierUnfurlerDiagnostic {
}
pub struct SpecifierUnfurler<'a> {
- mapped_resolver: &'a MappedSpecifierResolver,
sloppy_imports_resolver: Option<&'a SloppyImportsResolver>,
+ workspace_resolver: &'a WorkspaceResolver,
bare_node_builtins: bool,
}
impl<'a> SpecifierUnfurler<'a> {
pub fn new(
- mapped_resolver: &'a MappedSpecifierResolver,
sloppy_imports_resolver: Option<&'a SloppyImportsResolver>,
+ workspace_resolver: &'a WorkspaceResolver,
bare_node_builtins: bool,
) -> Self {
Self {
- mapped_resolver,
sloppy_imports_resolver,
+ workspace_resolver,
bare_node_builtins,
}
}
@@ -62,12 +64,46 @@ impl<'a> SpecifierUnfurler<'a> {
referrer: &ModuleSpecifier,
specifier: &str,
) -> Option<String> {
- let resolved =
- if let Ok(resolved) = self.mapped_resolver.resolve(specifier, referrer) {
- resolved.into_specifier()
- } else {
- None
- };
+ let resolved = if let Ok(resolved) =
+ self.workspace_resolver.resolve(specifier, referrer)
+ {
+ match resolved {
+ MappedResolution::Normal(specifier)
+ | MappedResolution::ImportMap(specifier) => Some(specifier),
+ MappedResolution::PackageJson {
+ sub_path,
+ dep_result,
+ ..
+ } => match dep_result {
+ Ok(dep) => match dep {
+ PackageJsonDepValue::Req(req) => ModuleSpecifier::parse(&format!(
+ "npm:{}{}",
+ req,
+ sub_path
+ .as_ref()
+ .map(|s| format!("/{}", s))
+ .unwrap_or_default()
+ ))
+ .ok(),
+ PackageJsonDepValue::Workspace(_) => {
+ log::warn!(
+ "package.json workspace entries are not implemented yet for publishing."
+ );
+ None
+ }
+ },
+ Err(err) => {
+ log::warn!(
+ "Ignoring failed to resolve package.json dependency. {:#}",
+ err
+ );
+ None
+ }
+ },
+ }
+ } else {
+ None
+ };
let resolved = match resolved {
Some(resolved) => resolved,
None if self.bare_node_builtins && is_builtin_node_module(specifier) => {
@@ -305,8 +341,6 @@ fn to_range(
mod tests {
use std::sync::Arc;
- use crate::args::PackageJsonDepsProvider;
-
use super::*;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
@@ -355,19 +389,17 @@ mod tests {
}
}),
);
- let mapped_resolver = MappedSpecifierResolver::new(
- Some(Arc::new(import_map)),
- Arc::new(PackageJsonDepsProvider::new(Some(
- package_json.resolve_local_package_json_version_reqs(),
- ))),
+ let workspace_resolver = WorkspaceResolver::new_raw(
+ Some(import_map),
+ vec![Arc::new(package_json)],
+ deno_config::workspace::PackageJsonDepResolution::Enabled,
);
-
let fs = Arc::new(RealFs);
let sloppy_imports_resolver = SloppyImportsResolver::new(fs);
let unfurler = SpecifierUnfurler::new(
- &mapped_resolver,
Some(&sloppy_imports_resolver),
+ &workspace_resolver,
true,
);
diff --git a/cli/tools/task.rs b/cli/tools/task.rs
index a44dc8dbb..2905134f4 100644
--- a/cli/tools/task.rs
+++ b/cli/tools/task.rs
@@ -8,24 +8,30 @@ use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
use crate::npm::ManagedCliNpmResolver;
use crate::util::fs::canonicalize_path;
+use deno_config::workspace::TaskOrScript;
+use deno_config::workspace::Workspace;
+use deno_config::workspace::WorkspaceTasksConfig;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::future::LocalBoxFuture;
+use deno_core::normalize_path;
use deno_runtime::deno_node::NodeResolver;
use deno_semver::package::PackageNv;
use deno_task_shell::ExecutableCommand;
use deno_task_shell::ExecuteResult;
use deno_task_shell::ShellCommand;
use deno_task_shell::ShellCommandContext;
-use indexmap::IndexMap;
use lazy_regex::Lazy;
use regex::Regex;
+use std::borrow::Cow;
use std::collections::HashMap;
+use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
+use std::sync::Arc;
use tokio::task::LocalSet;
// WARNING: Do not depend on this env var in user code. It's not stable API.
@@ -38,146 +44,124 @@ pub async fn execute_script(
) -> Result<i32, AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
- let tasks_config = cli_options.resolve_tasks_config()?;
- let maybe_package_json = cli_options.maybe_package_json();
- let package_json_scripts = maybe_package_json
- .as_ref()
- .and_then(|p| p.scripts.clone())
- .unwrap_or_default();
+ let start_ctx = cli_options.workspace.resolve_start_ctx();
+ if !start_ctx.has_deno_or_pkg_json() {
+ bail!("deno task couldn't find deno.json(c). See https://deno.land/manual@v{}/getting_started/configuration_file", env!("CARGO_PKG_VERSION"))
+ }
+ let force_use_pkg_json = std::env::var_os(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
+ .map(|v| {
+ // always remove so sub processes don't inherit this env var
+ std::env::remove_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME);
+ v == "1"
+ })
+ .unwrap_or(false);
+ let tasks_config = start_ctx.to_tasks_config()?;
+ let tasks_config = if force_use_pkg_json {
+ tasks_config.with_only_pkg_json()
+ } else {
+ tasks_config
+ };
let task_name = match &task_flags.task {
Some(task) => task,
None => {
print_available_tasks(
&mut std::io::stdout(),
+ &cli_options.workspace,
&tasks_config,
- &package_json_scripts,
)?;
return Ok(1);
}
};
+
let npm_resolver = factory.npm_resolver().await?;
let node_resolver = factory.node_resolver().await?;
let env_vars = real_env_vars();
- let force_use_pkg_json = std::env::var_os(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
- .map(|v| {
- // always remove so sub processes don't inherit this env var
- std::env::remove_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME);
- v == "1"
- })
- .unwrap_or(false);
-
- if let Some(
- deno_config::Task::Definition(script)
- | deno_config::Task::Commented {
- definition: script, ..
- },
- ) = tasks_config.get(task_name).filter(|_| !force_use_pkg_json)
- {
- let config_file_url = cli_options.maybe_config_file_specifier().unwrap();
- let config_file_path = if config_file_url.scheme() == "file" {
- config_file_url.to_file_path().unwrap()
- } else {
- bail!("Only local configuration files are supported")
- };
- let cwd = match task_flags.cwd {
- Some(path) => canonicalize_path(&PathBuf::from(path))
- .context("failed canonicalizing --cwd")?,
- None => config_file_path.parent().unwrap().to_owned(),
- };
-
- let custom_commands =
- resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
- run_task(RunTaskOptions {
- task_name,
- script,
- cwd: &cwd,
- init_cwd: cli_options.initial_cwd(),
- env_vars,
- argv: cli_options.argv(),
- custom_commands,
- root_node_modules_dir: npm_resolver
- .root_node_modules_path()
- .map(|p| p.as_path()),
- })
- .await
- } else if package_json_scripts.contains_key(task_name) {
- let package_json_deps_provider = factory.package_json_deps_provider();
-
- if let Some(package_deps) = package_json_deps_provider.deps() {
- for (key, value) in package_deps {
- if let Err(err) = value {
- log::info!(
- "{} Ignoring dependency '{}' in package.json because its version requirement failed to parse: {:#}",
- colors::yellow("Warning"),
- key,
- err,
- );
- }
- }
- }
-
- // ensure the npm packages are installed if using a node_modules
- // directory and managed resolver
- if cli_options.has_node_modules_dir() {
- if let Some(npm_resolver) = npm_resolver.as_managed() {
- npm_resolver.ensure_top_level_package_json_install().await?;
- }
- }
- let cwd = match task_flags.cwd {
- Some(path) => canonicalize_path(&PathBuf::from(path))?,
- None => maybe_package_json
- .as_ref()
- .unwrap()
- .path
- .parent()
- .unwrap()
- .to_owned(),
- };
+ match tasks_config.task(task_name) {
+ Some((dir_url, task_or_script)) => match task_or_script {
+ TaskOrScript::Task(_tasks, script) => {
+ let cwd = match task_flags.cwd {
+ Some(path) => canonicalize_path(&PathBuf::from(path))
+ .context("failed canonicalizing --cwd")?,
+ None => normalize_path(dir_url.to_file_path().unwrap()),
+ };
- // At this point we already checked if the task name exists in package.json.
- // We can therefore check for "pre" and "post" scripts too, since we're only
- // dealing with package.json here and not deno.json
- let task_names = vec![
- format!("pre{}", task_name),
- task_name.clone(),
- format!("post{}", task_name),
- ];
- let custom_commands =
- resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
- for task_name in &task_names {
- if let Some(script) = package_json_scripts.get(task_name) {
- let exit_code = run_task(RunTaskOptions {
+ let custom_commands =
+ resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
+ run_task(RunTaskOptions {
task_name,
script,
cwd: &cwd,
init_cwd: cli_options.initial_cwd(),
- env_vars: env_vars.clone(),
+ env_vars,
argv: cli_options.argv(),
- custom_commands: custom_commands.clone(),
+ custom_commands,
root_node_modules_dir: npm_resolver
.root_node_modules_path()
.map(|p| p.as_path()),
})
- .await?;
- if exit_code > 0 {
- return Ok(exit_code);
- }
+ .await
}
- }
+ TaskOrScript::Script(scripts, _script) => {
+ // ensure the npm packages are installed if using a node_modules
+ // directory and managed resolver
+ if cli_options.has_node_modules_dir() {
+ if let Some(npm_resolver) = npm_resolver.as_managed() {
+ npm_resolver.ensure_top_level_package_json_install().await?;
+ }
+ }
- Ok(0)
- } else {
- log::error!("Task not found: {task_name}");
- if log::log_enabled!(log::Level::Error) {
- print_available_tasks(
- &mut std::io::stderr(),
- &tasks_config,
- &package_json_scripts,
- )?;
+ let cwd = match task_flags.cwd {
+ Some(path) => canonicalize_path(&PathBuf::from(path))?,
+ None => normalize_path(dir_url.to_file_path().unwrap()),
+ };
+
+ // At this point we already checked if the task name exists in package.json.
+ // We can therefore check for "pre" and "post" scripts too, since we're only
+ // dealing with package.json here and not deno.json
+ let task_names = vec![
+ format!("pre{}", task_name),
+ task_name.clone(),
+ format!("post{}", task_name),
+ ];
+ let custom_commands =
+ resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
+ for task_name in &task_names {
+ if let Some(script) = scripts.get(task_name) {
+ let exit_code = run_task(RunTaskOptions {
+ task_name,
+ script,
+ cwd: &cwd,
+ init_cwd: cli_options.initial_cwd(),
+ env_vars: env_vars.clone(),
+ argv: cli_options.argv(),
+ custom_commands: custom_commands.clone(),
+ root_node_modules_dir: npm_resolver
+ .root_node_modules_path()
+ .map(|p| p.as_path()),
+ })
+ .await?;
+ if exit_code > 0 {
+ return Ok(exit_code);
+ }
+ }
+ }
+
+ Ok(0)
+ }
+ },
+ None => {
+ log::error!("Task not found: {task_name}");
+ if log::log_enabled!(log::Level::Error) {
+ print_available_tasks(
+ &mut std::io::stderr(),
+ &cli_options.workspace,
+ &tasks_config,
+ )?;
+ }
+ Ok(1)
}
- Ok(1)
}
}
@@ -282,53 +266,92 @@ fn real_env_vars() -> HashMap<String, String> {
fn print_available_tasks(
writer: &mut dyn std::io::Write,
- tasks_config: &IndexMap<String, deno_config::Task>,
- package_json_scripts: &IndexMap<String, String>,
+ workspace: &Arc<Workspace>,
+ tasks_config: &WorkspaceTasksConfig,
) -> Result<(), std::io::Error> {
writeln!(writer, "{}", colors::green("Available tasks:"))?;
+ let is_cwd_root_dir = tasks_config.root.is_none();
- if tasks_config.is_empty() && package_json_scripts.is_empty() {
+ if tasks_config.is_empty() {
writeln!(
writer,
" {}",
colors::red("No tasks found in configuration file")
)?;
} else {
- for (is_deno, (key, task)) in tasks_config
- .iter()
- .map(|(k, t)| (true, (k, t.clone())))
- .chain(
- package_json_scripts
- .iter()
- .filter(|(key, _)| !tasks_config.contains_key(*key))
- .map(|(k, v)| (false, (k, deno_config::Task::Definition(v.clone())))),
- )
- {
- writeln!(
- writer,
- "- {}{}",
- colors::cyan(key),
- if is_deno {
- "".to_string()
- } else {
- format!(" {}", colors::italic_gray("(package.json)"))
- }
- )?;
- let definition = match &task {
- deno_config::Task::Definition(definition) => definition,
- deno_config::Task::Commented { definition, .. } => definition,
+ let mut seen_task_names =
+ HashSet::with_capacity(tasks_config.tasks_count());
+ for maybe_config in [&tasks_config.member, &tasks_config.root] {
+ let Some(config) = maybe_config else {
+ continue;
};
- if let deno_config::Task::Commented { comments, .. } = &task {
- let slash_slash = colors::italic_gray("//");
- for comment in comments {
- writeln!(
- writer,
- " {slash_slash} {}",
- colors::italic_gray(comment)
- )?;
+ for (is_root, is_deno, (key, task)) in config
+ .deno_json
+ .as_ref()
+ .map(|config| {
+ let is_root = !is_cwd_root_dir
+ && config.folder_url == *workspace.root_folder().0.as_ref();
+ config
+ .tasks
+ .iter()
+ .map(move |(k, t)| (is_root, true, (k, Cow::Borrowed(t))))
+ })
+ .into_iter()
+ .flatten()
+ .chain(
+ config
+ .package_json
+ .as_ref()
+ .map(|config| {
+ let is_root = !is_cwd_root_dir
+ && config.folder_url == *workspace.root_folder().0.as_ref();
+ config.tasks.iter().map(move |(k, v)| {
+ (
+ is_root,
+ false,
+ (k, Cow::Owned(deno_config::Task::Definition(v.clone()))),
+ )
+ })
+ })
+ .into_iter()
+ .flatten(),
+ )
+ {
+ if !seen_task_names.insert(key) {
+ continue; // already seen
+ }
+ writeln!(
+ writer,
+ "- {}{}",
+ colors::cyan(key),
+ if is_root {
+ if is_deno {
+ format!(" {}", colors::italic_gray("(workspace)"))
+ } else {
+ format!(" {}", colors::italic_gray("(workspace package.json)"))
+ }
+ } else if is_deno {
+ "".to_string()
+ } else {
+ format!(" {}", colors::italic_gray("(package.json)"))
+ }
+ )?;
+ let definition = match task.as_ref() {
+ deno_config::Task::Definition(definition) => definition,
+ deno_config::Task::Commented { definition, .. } => definition,
+ };
+ if let deno_config::Task::Commented { comments, .. } = task.as_ref() {
+ let slash_slash = colors::italic_gray("//");
+ for comment in comments {
+ writeln!(
+ writer,
+ " {slash_slash} {}",
+ colors::italic_gray(comment)
+ )?;
+ }
}
+ writeln!(writer, " {definition}")?;
}
- writeln!(writer, " {definition}")?;
}
}
diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs
index 88b539470..7042a82b9 100644
--- a/cli/tools/test/mod.rs
+++ b/cli/tools/test/mod.rs
@@ -1705,11 +1705,17 @@ fn collect_specifiers_with_test_mode(
async fn fetch_specifiers_with_test_mode(
cli_options: &CliOptions,
file_fetcher: &FileFetcher,
- files: FilePatterns,
+ member_patterns: impl Iterator<Item = FilePatterns>,
doc: &bool,
) -> Result<Vec<(ModuleSpecifier, TestMode)>, AnyError> {
- let mut specifiers_with_mode =
- collect_specifiers_with_test_mode(cli_options, files, doc)?;
+ let mut specifiers_with_mode = member_patterns
+ .map(|files| {
+ collect_specifiers_with_test_mode(cli_options, files.clone(), doc)
+ })
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
for (specifier, mode) in &mut specifiers_with_mode {
let file = file_fetcher
@@ -1731,7 +1737,8 @@ pub async fn run_tests(
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags)?;
let cli_options = factory.cli_options();
- let test_options = cli_options.resolve_test_options(test_flags)?;
+ let workspace_test_options =
+ cli_options.resolve_workspace_test_options(&test_flags);
let file_fetcher = factory.file_fetcher()?;
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
@@ -1740,15 +1747,17 @@ pub async fn run_tests(
Permissions::from_options(&cli_options.permissions_options()?)?;
let log_level = cli_options.log_level();
+ let members_with_test_options =
+ cli_options.resolve_test_options_for_members(&test_flags)?;
let specifiers_with_mode = fetch_specifiers_with_test_mode(
cli_options,
file_fetcher,
- test_options.files.clone(),
- &test_options.doc,
+ members_with_test_options.into_iter().map(|(_, v)| v.files),
+ &workspace_test_options.doc,
)
.await?;
- if !test_options.allow_none && specifiers_with_mode.is_empty() {
+ if !workspace_test_options.allow_none && specifiers_with_mode.is_empty() {
return Err(generic_error("No test modules found"));
}
@@ -1761,7 +1770,7 @@ pub async fn run_tests(
)
.await?;
- if test_options.no_run {
+ if workspace_test_options.no_run {
return Ok(());
}
@@ -1787,16 +1796,16 @@ pub async fn run_tests(
))
},
)?,
- concurrent_jobs: test_options.concurrent_jobs,
- fail_fast: test_options.fail_fast,
+ concurrent_jobs: workspace_test_options.concurrent_jobs,
+ fail_fast: workspace_test_options.fail_fast,
log_level,
- filter: test_options.filter.is_some(),
- reporter: test_options.reporter,
- junit_path: test_options.junit_path,
+ filter: workspace_test_options.filter.is_some(),
+ reporter: workspace_test_options.reporter,
+ junit_path: workspace_test_options.junit_path,
specifier: TestSpecifierOptions {
- filter: TestFilter::from_flag(&test_options.filter),
- shuffle: test_options.shuffle,
- trace_leaks: test_options.trace_leaks,
+ filter: TestFilter::from_flag(&workspace_test_options.filter),
+ shuffle: workspace_test_options.shuffle,
+ trace_leaks: workspace_test_options.trace_leaks,
},
},
)
@@ -1838,34 +1847,47 @@ pub async fn run_tests_with_watch(
let factory = CliFactoryBuilder::new()
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
let cli_options = factory.cli_options();
- let test_options = cli_options.resolve_test_options(test_flags)?;
+ let workspace_test_options =
+ cli_options.resolve_workspace_test_options(&test_flags);
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
- if let Some(set) = &test_options.files.include {
- let watch_paths = set.base_paths();
- if !watch_paths.is_empty() {
- let _ = watcher_communicator.watch_paths(watch_paths);
- }
- }
-
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let log_level = cli_options.log_level();
let cli_options = cli_options.clone();
let module_graph_creator = factory.module_graph_creator().await?;
let file_fetcher = factory.file_fetcher()?;
- let test_modules = if test_options.doc {
- collect_specifiers(
- test_options.files.clone(),
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- |e| is_supported_test_ext(e.path),
- )
- } else {
- collect_specifiers(
- test_options.files.clone(),
- cli_options.vendor_dir_path().map(ToOwned::to_owned),
- is_supported_test_path_predicate,
- )
- }?;
+ let members_with_test_options =
+ cli_options.resolve_test_options_for_members(&test_flags)?;
+ let watch_paths = members_with_test_options
+ .iter()
+ .filter_map(|(_, test_options)| {
+ test_options
+ .files
+ .include
+ .as_ref()
+ .map(|set| set.base_paths())
+ })
+ .flatten()
+ .collect::<Vec<_>>();
+ let _ = watcher_communicator.watch_paths(watch_paths);
+ let test_modules = members_with_test_options
+ .iter()
+ .map(|(_, test_options)| {
+ collect_specifiers(
+ test_options.files.clone(),
+ cli_options.vendor_dir_path().map(ToOwned::to_owned),
+ if workspace_test_options.doc {
+ Box::new(|e: WalkEntry| is_supported_test_ext(e.path))
+ as Box<dyn Fn(WalkEntry) -> bool>
+ } else {
+ Box::new(is_supported_test_path_predicate)
+ },
+ )
+ })
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
let permissions =
Permissions::from_options(&cli_options.permissions_options()?)?;
@@ -1898,8 +1920,8 @@ pub async fn run_tests_with_watch(
let specifiers_with_mode = fetch_specifiers_with_test_mode(
&cli_options,
file_fetcher,
- test_options.files.clone(),
- &test_options.doc,
+ members_with_test_options.into_iter().map(|(_, v)| v.files),
+ &workspace_test_options.doc,
)
.await?
.into_iter()
@@ -1915,7 +1937,7 @@ pub async fn run_tests_with_watch(
)
.await?;
- if test_options.no_run {
+ if workspace_test_options.no_run {
return Ok(());
}
@@ -1938,16 +1960,16 @@ pub async fn run_tests_with_watch(
))
},
)?,
- concurrent_jobs: test_options.concurrent_jobs,
- fail_fast: test_options.fail_fast,
+ concurrent_jobs: workspace_test_options.concurrent_jobs,
+ fail_fast: workspace_test_options.fail_fast,
log_level,
- filter: test_options.filter.is_some(),
- reporter: test_options.reporter,
- junit_path: test_options.junit_path,
+ filter: workspace_test_options.filter.is_some(),
+ reporter: workspace_test_options.reporter,
+ junit_path: workspace_test_options.junit_path,
specifier: TestSpecifierOptions {
- filter: TestFilter::from_flag(&test_options.filter),
- shuffle: test_options.shuffle,
- trace_leaks: test_options.trace_leaks,
+ filter: TestFilter::from_flag(&workspace_test_options.filter),
+ shuffle: workspace_test_options.shuffle,
+ trace_leaks: workspace_test_options.trace_leaks,
},
},
)
diff --git a/cli/tools/vendor/build.rs b/cli/tools/vendor/build.rs
index 5aef63192..a4424e3f3 100644
--- a/cli/tools/vendor/build.rs
+++ b/cli/tools/vendor/build.rs
@@ -81,8 +81,8 @@ pub async fn build<
build_graph,
parsed_source_cache,
output_dir,
- maybe_original_import_map: original_import_map,
- maybe_jsx_import_source: jsx_import_source,
+ maybe_original_import_map,
+ maybe_jsx_import_source,
resolver,
environment,
} = input;
@@ -90,12 +90,12 @@ pub async fn build<
let output_dir_specifier =
ModuleSpecifier::from_directory_path(output_dir).unwrap();
- if let Some(original_im) = &original_import_map {
+ if let Some(original_im) = &maybe_original_import_map {
validate_original_import_map(original_im, &output_dir_specifier)?;
}
// add the jsx import source to the entry points to ensure it is always vendored
- if let Some(jsx_import_source) = jsx_import_source {
+ if let Some(jsx_import_source) = maybe_jsx_import_source {
if let Some(specifier_text) = jsx_import_source.maybe_specifier_text() {
if let Ok(specifier) = resolver.resolve(
&specifier_text,
@@ -171,8 +171,8 @@ pub async fn build<
graph: &graph,
modules: &all_modules,
mappings: &mappings,
- original_import_map,
- jsx_import_source,
+ maybe_original_import_map,
+ maybe_jsx_import_source,
resolver,
parsed_source_cache,
})?;
diff --git a/cli/tools/vendor/import_map.rs b/cli/tools/vendor/import_map.rs
index 68f2530d7..644e84a7b 100644
--- a/cli/tools/vendor/import_map.rs
+++ b/cli/tools/vendor/import_map.rs
@@ -59,7 +59,7 @@ impl<'a> ImportMapBuilder<'a> {
pub fn into_import_map(
self,
- original_import_map: Option<&ImportMap>,
+ maybe_original_import_map: Option<&ImportMap>,
) -> ImportMap {
fn get_local_imports(
new_relative_path: &str,
@@ -99,7 +99,7 @@ impl<'a> ImportMapBuilder<'a> {
let mut import_map = ImportMap::new(self.base_dir.clone());
- if let Some(original_im) = original_import_map {
+ if let Some(original_im) = maybe_original_import_map {
let original_base_dir = ModuleSpecifier::from_directory_path(
original_im
.base_url()
@@ -183,8 +183,8 @@ pub struct BuildImportMapInput<'a> {
pub modules: &'a [&'a Module],
pub graph: &'a ModuleGraph,
pub mappings: &'a Mappings,
- pub original_import_map: Option<&'a ImportMap>,
- pub jsx_import_source: Option<&'a JsxImportSourceConfig>,
+ pub maybe_original_import_map: Option<&'a ImportMap>,
+ pub maybe_jsx_import_source: Option<&'a JsxImportSourceConfig>,
pub resolver: &'a dyn deno_graph::source::Resolver,
pub parsed_source_cache: &'a ParsedSourceCache,
}
@@ -197,8 +197,8 @@ pub fn build_import_map(
modules,
graph,
mappings,
- original_import_map,
- jsx_import_source,
+ maybe_original_import_map,
+ maybe_jsx_import_source,
resolver,
parsed_source_cache,
} = input;
@@ -212,7 +212,7 @@ pub fn build_import_map(
}
// add the jsx import source to the destination import map, if mapped in the original import map
- if let Some(jsx_import_source) = jsx_import_source {
+ if let Some(jsx_import_source) = maybe_jsx_import_source {
if let Some(specifier_text) = jsx_import_source.maybe_specifier_text() {
if let Ok(resolved_url) = resolver.resolve(
&specifier_text,
@@ -228,7 +228,7 @@ pub fn build_import_map(
}
}
- Ok(builder.into_import_map(original_import_map).to_json())
+ Ok(builder.into_import_map(maybe_original_import_map).to_json())
}
fn visit_modules(
diff --git a/cli/tools/vendor/mod.rs b/cli/tools/vendor/mod.rs
index a8d8000d8..2dfa71c44 100644
--- a/cli/tools/vendor/mod.rs
+++ b/cli/tools/vendor/mod.rs
@@ -48,10 +48,17 @@ pub async fn vendor(
validate_options(&mut cli_options, &output_dir)?;
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options();
+ if cli_options.workspace.config_folders().len() > 1 {
+ bail!("deno vendor is not supported in a workspace. Set `\"vendor\": true` in the workspace deno.json file instead");
+ }
let entry_points =
resolve_entry_points(&vendor_flags, cli_options.initial_cwd())?;
- let jsx_import_source = cli_options.to_maybe_jsx_import_source_config()?;
+ let jsx_import_source =
+ cli_options.workspace.to_maybe_jsx_import_source_config()?;
let module_graph_creator = factory.module_graph_creator().await?.clone();
+ let workspace_resolver = factory.workspace_resolver().await?;
+ let root_folder = cli_options.workspace.root_folder().1;
+ let maybe_config_file = root_folder.deno_json.as_ref();
let output = build::build(build::BuildInput {
entry_points,
build_graph: move |entry_points| {
@@ -64,7 +71,7 @@ pub async fn vendor(
},
parsed_source_cache: factory.parsed_source_cache(),
output_dir: &output_dir,
- maybe_original_import_map: factory.maybe_import_map().await?.as_deref(),
+ maybe_original_import_map: workspace_resolver.maybe_import_map(),
maybe_jsx_import_source: jsx_import_source.as_ref(),
resolver: factory.resolver().await?.as_graph_resolver(),
environment: &build::RealVendorEnvironment,
@@ -91,7 +98,7 @@ pub async fn vendor(
let try_add_import_map = vendored_count > 0;
let modified_result = maybe_update_config_file(
&output_dir,
- cli_options,
+ maybe_config_file,
try_add_import_map,
try_add_node_modules_dir,
);
@@ -100,8 +107,9 @@ pub async fn vendor(
if modified_result.added_node_modules_dir {
let node_modules_path =
cli_options.node_modules_dir_path().cloned().or_else(|| {
- cli_options
- .maybe_config_file_specifier()
+ maybe_config_file
+ .as_ref()
+ .map(|d| &d.specifier)
.filter(|c| c.scheme() == "file")
.and_then(|c| c.to_file_path().ok())
.map(|config_path| config_path.parent().unwrap().join("node_modules"))
@@ -176,7 +184,7 @@ fn validate_options(
let import_map_specifier = options
.resolve_specified_import_map_specifier()?
.or_else(|| {
- let config_file = options.maybe_config_file().as_ref()?;
+ let config_file = options.workspace.root_folder().1.deno_json.as_ref()?;
config_file
.to_import_map_specifier()
.ok()
@@ -229,12 +237,12 @@ fn validate_options(
fn maybe_update_config_file(
output_dir: &Path,
- options: &CliOptions,
+ maybe_config_file: Option<&Arc<ConfigFile>>,
try_add_import_map: bool,
try_add_node_modules_dir: bool,
) -> ModifiedResult {
assert!(output_dir.is_absolute());
- let config_file = match options.maybe_config_file() {
+ let config_file = match maybe_config_file {
Some(config_file) => config_file,
None => return ModifiedResult::default(),
};
@@ -245,7 +253,6 @@ fn maybe_update_config_file(
let fmt_config_options = config_file
.to_fmt_config()
.ok()
- .flatten()
.map(|config| config.options)
.unwrap_or_default();
let result = update_config_file(
diff --git a/cli/tools/vendor/test.rs b/cli/tools/vendor/test.rs
index 830d5f8f0..ac07c47d1 100644
--- a/cli/tools/vendor/test.rs
+++ b/cli/tools/vendor/test.rs
@@ -8,6 +8,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
+use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
@@ -182,7 +183,7 @@ pub struct VendorOutput {
pub struct VendorTestBuilder {
entry_points: Vec<ModuleSpecifier>,
loader: TestLoader,
- original_import_map: Option<ImportMap>,
+ maybe_original_import_map: Option<ImportMap>,
environment: TestVendorEnvironment,
jsx_import_source_config: Option<JsxImportSourceConfig>,
}
@@ -207,7 +208,7 @@ impl VendorTestBuilder {
&mut self,
import_map: ImportMap,
) -> &mut Self {
- self.original_import_map = Some(import_map);
+ self.maybe_original_import_map = Some(import_map);
self
}
@@ -234,7 +235,7 @@ impl VendorTestBuilder {
let parsed_source_cache = ParsedSourceCache::default();
let resolver = Arc::new(build_resolver(
self.jsx_import_source_config.clone(),
- self.original_import_map.clone(),
+ self.maybe_original_import_map.clone(),
));
super::build::build(super::build::BuildInput {
entry_points,
@@ -257,7 +258,7 @@ impl VendorTestBuilder {
},
parsed_source_cache: &parsed_source_cache,
output_dir: &output_dir,
- maybe_original_import_map: self.original_import_map.as_ref(),
+ maybe_original_import_map: self.maybe_original_import_map.as_ref(),
maybe_jsx_import_source: self.jsx_import_source_config.as_ref(),
resolver: resolver.as_graph_resolver(),
environment: &self.environment,
@@ -287,15 +288,18 @@ impl VendorTestBuilder {
fn build_resolver(
maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
- original_import_map: Option<ImportMap>,
+ maybe_original_import_map: Option<ImportMap>,
) -> CliGraphResolver {
CliGraphResolver::new(CliGraphResolverOptions {
node_resolver: None,
npm_resolver: None,
sloppy_imports_resolver: None,
- package_json_deps_provider: Default::default(),
+ workspace_resolver: Arc::new(WorkspaceResolver::new_raw(
+ maybe_original_import_map,
+ Vec::new(),
+ deno_config::workspace::PackageJsonDepResolution::Enabled,
+ )),
maybe_jsx_import_source_config,
- maybe_import_map: original_import_map.map(Arc::new),
maybe_vendor_dir: None,
bare_node_builtins_enabled: false,
})
diff --git a/cli/util/collections.rs b/cli/util/collections.rs
new file mode 100644
index 000000000..21f73024b
--- /dev/null
+++ b/cli/util/collections.rs
@@ -0,0 +1,38 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::marker::PhantomData;
+
+pub struct CheckedSet<T: std::hash::Hash + ?Sized> {
+ _kind: PhantomData<T>,
+ checked: std::collections::HashSet<u64>,
+}
+
+impl<T: std::hash::Hash + ?Sized> Default for CheckedSet<T> {
+ fn default() -> Self {
+ Self {
+ _kind: Default::default(),
+ checked: Default::default(),
+ }
+ }
+}
+
+impl<T: std::hash::Hash + ?Sized> CheckedSet<T> {
+ pub fn with_capacity(capacity: usize) -> Self {
+ Self {
+ _kind: PhantomData,
+ checked: std::collections::HashSet::with_capacity(capacity),
+ }
+ }
+
+ pub fn insert(&mut self, value: &T) -> bool {
+ self.checked.insert(self.get_hash(value))
+ }
+
+ fn get_hash(&self, value: &T) -> u64 {
+ use std::collections::hash_map::DefaultHasher;
+ use std::hash::Hasher;
+ let mut hasher = DefaultHasher::new();
+ value.hash(&mut hasher);
+ hasher.finish()
+ }
+}
diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs
index b2628760b..176ca43f0 100644
--- a/cli/util/file_watcher.rs
+++ b/cli/util/file_watcher.rs
@@ -163,6 +163,9 @@ pub struct WatcherCommunicator {
impl WatcherCommunicator {
pub fn watch_paths(&self, paths: Vec<PathBuf>) -> Result<(), AnyError> {
+ if paths.is_empty() {
+ return Ok(());
+ }
self.paths_to_watch_tx.send(paths).map_err(AnyError::from)
}
diff --git a/cli/util/mod.rs b/cli/util/mod.rs
index 89df7bb98..69cdc77c3 100644
--- a/cli/util/mod.rs
+++ b/cli/util/mod.rs
@@ -2,6 +2,7 @@
// Note: Only add code in this folder that has no application specific logic
pub mod checksum;
+pub mod collections;
pub mod console;
pub mod diff;
pub mod display;
diff --git a/cli/worker.rs b/cli/worker.rs
index 00a20ab4d..987d65192 100644
--- a/cli/worker.rs
+++ b/cli/worker.rs
@@ -6,7 +6,6 @@ use std::rc::Rc;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
-use deno_config::package_json::PackageJsonDeps;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
@@ -41,7 +40,6 @@ use deno_runtime::BootstrapOptions;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference;
-use deno_semver::package::PackageReqReference;
use deno_terminal::colors;
use tokio::select;
@@ -117,7 +115,6 @@ pub struct CliMainWorkerOptions {
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub unstable: bool,
pub skip_op_registration: bool,
- pub maybe_root_package_json_deps: Option<PackageJsonDeps>,
pub create_hmr_runner: Option<CreateHmrRunnerCb>,
pub create_coverage_collector: Option<CreateCoverageCollectorCb>,
}
@@ -479,29 +476,6 @@ impl CliMainWorkerFactory {
let (main_module, is_main_cjs) = if let Ok(package_ref) =
NpmPackageReqReference::from_specifier(&main_module)
{
- let package_ref = if package_ref.req().version_req.version_text() == "*" {
- // When using the wildcard version, select the same version used in the
- // package.json deps in order to prevent adding new dependency version
- shared
- .options
- .maybe_root_package_json_deps
- .as_ref()
- .and_then(|deps| {
- deps
- .values()
- .filter_map(|v| v.as_ref().ok())
- .find(|dep| dep.name == package_ref.req().name)
- .map(|dep| {
- NpmPackageReqReference::new(PackageReqReference {
- req: dep.clone(),
- sub_path: package_ref.sub_path().map(|s| s.to_string()),
- })
- })
- })
- .unwrap_or(package_ref)
- } else {
- package_ref
- };
if let Some(npm_resolver) = shared.npm_resolver.as_managed() {
npm_resolver
.add_package_reqs(&[package_ref.req().clone()])