summaryrefslogtreecommitdiff
path: root/cli/tools
diff options
context:
space:
mode:
authorhaturau <135221985+haturatu@users.noreply.github.com>2024-11-20 01:20:47 +0900
committerGitHub <noreply@github.com>2024-11-20 01:20:47 +0900
commit85719a67e59c7aa45bead26e4942d7df8b1b42d4 (patch)
treeface0aecaac53e93ce2f23b53c48859bcf1a36ec /cli/tools
parent67697bc2e4a62a9670699fd18ad0dd8efc5bd955 (diff)
parent186b52731c6bb326c4d32905c5e732d082e83465 (diff)
Merge branch 'denoland:main' into main
Diffstat (limited to 'cli/tools')
-rw-r--r--cli/tools/bench/mod.rs3
-rw-r--r--cli/tools/check.rs11
-rw-r--r--cli/tools/compile.rs30
-rw-r--r--cli/tools/coverage/mod.rs49
-rw-r--r--cli/tools/doc.rs6
-rw-r--r--cli/tools/fmt.rs69
-rw-r--r--cli/tools/info.rs102
-rw-r--r--cli/tools/init/mod.rs41
-rw-r--r--cli/tools/installer.rs46
-rw-r--r--cli/tools/jupyter/install.rs6
-rw-r--r--cli/tools/jupyter/mod.rs2
-rw-r--r--cli/tools/jupyter/resources/deno-logo-32x32.pngbin1029 -> 1386 bytes
-rw-r--r--cli/tools/jupyter/resources/deno-logo-64x64.pngbin2066 -> 2913 bytes
-rw-r--r--cli/tools/jupyter/resources/deno-logo-svg.svg18
-rw-r--r--cli/tools/jupyter/server.rs14
-rw-r--r--cli/tools/lint/mod.rs6
-rw-r--r--cli/tools/lint/rules/no_sloppy_imports.rs1
-rw-r--r--cli/tools/registry/mod.rs1
-rw-r--r--cli/tools/registry/pm.rs944
-rw-r--r--cli/tools/registry/pm/cache_deps.rs17
-rw-r--r--cli/tools/registry/tar.rs2
-rw-r--r--cli/tools/repl/session.rs23
-rw-r--r--cli/tools/run/hmr.rs18
-rw-r--r--cli/tools/run/mod.rs21
-rw-r--r--cli/tools/serve.rs14
-rw-r--r--cli/tools/task.rs631
-rw-r--r--cli/tools/test/mod.rs5
-rw-r--r--cli/tools/upgrade.rs36
28 files changed, 1250 insertions, 866 deletions
diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs
index be5d0ad0e..1d49fa061 100644
--- a/cli/tools/bench/mod.rs
+++ b/cli/tools/bench/mod.rs
@@ -193,7 +193,7 @@ async fn bench_specifier_inner(
.await?;
// We execute the main module as a side module so that import.meta.main is not set.
- worker.execute_side_module_possibly_with_npm().await?;
+ worker.execute_side_module().await?;
let mut worker = worker.into_main_worker();
@@ -486,6 +486,7 @@ pub async fn run_benchmarks_with_watch(
),
move |flags, watcher_communicator, changed_paths| {
let bench_flags = bench_flags.clone();
+ watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags_for_watcher(
flags,
diff --git a/cli/tools/check.rs b/cli/tools/check.rs
index 7edb392d4..d88027888 100644
--- a/cli/tools/check.rs
+++ b/cli/tools/check.rs
@@ -32,6 +32,7 @@ use crate::graph_util::ModuleGraphBuilder;
use crate::npm::CliNpmResolver;
use crate::tsc;
use crate::tsc::Diagnostics;
+use crate::tsc::TypeCheckingCjsTracker;
use crate::util::extract;
use crate::util::path::to_percent_decoded_str;
@@ -99,6 +100,7 @@ pub struct CheckOptions {
pub struct TypeChecker {
caches: Arc<Caches>,
+ cjs_tracker: Arc<TypeCheckingCjsTracker>,
cli_options: Arc<CliOptions>,
module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<NodeResolver>,
@@ -108,6 +110,7 @@ pub struct TypeChecker {
impl TypeChecker {
pub fn new(
caches: Arc<Caches>,
+ cjs_tracker: Arc<TypeCheckingCjsTracker>,
cli_options: Arc<CliOptions>,
module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<NodeResolver>,
@@ -115,6 +118,7 @@ impl TypeChecker {
) -> Self {
Self {
caches,
+ cjs_tracker,
cli_options,
module_graph_builder,
node_resolver,
@@ -244,6 +248,7 @@ impl TypeChecker {
graph: graph.clone(),
hash_data,
maybe_npm: Some(tsc::RequestNpmState {
+ cjs_tracker: self.cjs_tracker.clone(),
node_resolver: self.node_resolver.clone(),
npm_resolver: self.npm_resolver.clone(),
}),
@@ -346,7 +351,7 @@ fn get_check_hash(
}
}
MediaType::Json
- | MediaType::TsBuildInfo
+ | MediaType::Css
| MediaType::SourceMap
| MediaType::Wasm
| MediaType::Unknown => continue,
@@ -428,7 +433,7 @@ fn get_tsc_roots(
}
MediaType::Json
| MediaType::Wasm
- | MediaType::TsBuildInfo
+ | MediaType::Css
| MediaType::SourceMap
| MediaType::Unknown => None,
},
@@ -536,7 +541,7 @@ fn has_ts_check(media_type: MediaType, file_text: &str) -> bool {
| MediaType::Tsx
| MediaType::Json
| MediaType::Wasm
- | MediaType::TsBuildInfo
+ | MediaType::Css
| MediaType::SourceMap
| MediaType::Unknown => false,
}
diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs
index c1f98bc08..b3e999337 100644
--- a/cli/tools/compile.rs
+++ b/cli/tools/compile.rs
@@ -5,6 +5,7 @@ use crate::args::CompileFlags;
use crate::args::Flags;
use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider;
+use crate::standalone::binary::StandaloneRelativeFileBaseUrl;
use crate::standalone::is_standalone_binary;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
@@ -14,7 +15,6 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_graph::GraphKind;
use deno_terminal::colors;
-use eszip::EszipRelativeFileBaseUrl;
use rand::Rng;
use std::path::Path;
use std::path::PathBuf;
@@ -29,7 +29,6 @@ pub async fn compile(
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let module_graph_creator = factory.module_graph_creator().await?;
- let parsed_source_cache = factory.parsed_source_cache();
let binary_writer = factory.create_compile_binary_writer().await?;
let http_client = factory.http_client_provider();
let module_specifier = cli_options.resolve_main_module()?;
@@ -70,7 +69,7 @@ pub async fn compile(
let graph = if cli_options.type_check_mode().is_true() {
// In this case, the previous graph creation did type checking, which will
// create a module graph with types information in it. We don't want to
- // store that in the eszip so create a code only module graph from scratch.
+ // store that in the binary so create a code only module graph from scratch.
module_graph_creator
.create_graph(GraphKind::CodeOnly, module_roots)
.await?
@@ -81,11 +80,6 @@ pub async fn compile(
let ts_config_for_emit = cli_options
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
check_warn_tsconfig(&ts_config_for_emit);
- let (transpile_options, emit_options) =
- crate::args::ts_config_to_transpile_and_emit_options(
- ts_config_for_emit.ts_config,
- )?;
- let parser = parsed_source_cache.as_capturing_parser();
let root_dir_url = resolve_root_dir_from_specifiers(
cli_options.workspace().root_dir(),
graph.specifiers().map(|(s, _)| s).chain(
@@ -96,17 +90,6 @@ pub async fn compile(
),
);
log::debug!("Binary root dir: {}", root_dir_url);
- let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url);
- let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
- graph,
- parser,
- transpile_options,
- emit_options,
- // make all the modules relative to the root folder
- relative_file_base: Some(root_dir_url),
- npm_packages: None,
- })?;
-
log::info!(
"{} {} to {}",
colors::green("Compile"),
@@ -133,15 +116,18 @@ pub async fn compile(
let write_result = binary_writer
.write_bin(
file,
- eszip,
- root_dir_url,
+ &graph,
+ StandaloneRelativeFileBaseUrl::from(&root_dir_url),
module_specifier,
&compile_flags,
cli_options,
)
.await
.with_context(|| {
- format!("Writing temporary file '{}'", temp_path.display())
+ format!(
+ "Writing deno compile executable to temporary file '{}'",
+ temp_path.display()
+ )
});
// set it as executable
diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs
index 260c0c842..2a554c133 100644
--- a/cli/tools/coverage/mod.rs
+++ b/cli/tools/coverage/mod.rs
@@ -6,12 +6,12 @@ use crate::args::FileFlags;
use crate::args::Flags;
use crate::cdp;
use crate::factory::CliFactory;
-use crate::npm::CliNpmResolver;
use crate::tools::fmt::format_json;
use crate::tools::test::is_supported_test_path;
use crate::util::text_encoding::source_map_from_code;
use deno_ast::MediaType;
+use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier;
use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns;
@@ -25,6 +25,7 @@ use deno_core::serde_json;
use deno_core::sourcemap::SourceMap;
use deno_core::url::Url;
use deno_core::LocalInspectorSession;
+use node_resolver::InNpmPackageChecker;
use regex::Regex;
use std::fs;
use std::fs::File;
@@ -327,6 +328,7 @@ fn generate_coverage_report(
coverage_report.found_lines =
if let Some(source_map) = maybe_source_map.as_ref() {
+ let script_source_lines = script_source.lines().collect::<Vec<_>>();
let mut found_lines = line_counts
.iter()
.enumerate()
@@ -334,7 +336,23 @@ fn generate_coverage_report(
// get all the mappings from this destination line to a different src line
let mut results = source_map
.tokens()
- .filter(move |token| token.get_dst_line() as usize == index)
+ .filter(|token| {
+ let dst_line = token.get_dst_line() as usize;
+ dst_line == index && {
+ let dst_col = token.get_dst_col() as usize;
+ let content = script_source_lines
+ .get(dst_line)
+ .and_then(|line| {
+ line.get(dst_col..std::cmp::min(dst_col + 2, line.len()))
+ })
+ .unwrap_or("");
+
+ !content.is_empty()
+ && content != "/*"
+ && content != "*/"
+ && content != "//"
+ }
+ })
.map(move |token| (token.get_src_line() as usize, *count))
.collect::<Vec<_>>();
// only keep the results that point at different src lines
@@ -444,7 +462,7 @@ fn filter_coverages(
coverages: Vec<cdp::ScriptCoverage>,
include: Vec<String>,
exclude: Vec<String>,
- npm_resolver: &dyn CliNpmResolver,
+ in_npm_pkg_checker: &dyn InNpmPackageChecker,
) -> Vec<cdp::ScriptCoverage> {
let include: Vec<Regex> =
include.iter().map(|e| Regex::new(e).unwrap()).collect();
@@ -462,13 +480,13 @@ fn filter_coverages(
.filter(|e| {
let is_internal = e.url.starts_with("ext:")
|| e.url.ends_with("__anonymous__")
- || e.url.ends_with("$deno$test.js")
+ || e.url.ends_with("$deno$test.mjs")
|| e.url.ends_with(".snap")
|| is_supported_test_path(Path::new(e.url.as_str()))
|| doc_test_re.is_match(e.url.as_str())
|| Url::parse(&e.url)
.ok()
- .map(|url| npm_resolver.in_npm_package(&url))
+ .map(|url| in_npm_pkg_checker.in_npm_package(&url))
.unwrap_or(false);
let is_included = include.iter().any(|p| p.is_match(&e.url));
@@ -479,7 +497,7 @@ fn filter_coverages(
.collect::<Vec<cdp::ScriptCoverage>>()
}
-pub async fn cover_files(
+pub fn cover_files(
flags: Arc<Flags>,
coverage_flags: CoverageFlags,
) -> Result<(), AnyError> {
@@ -489,9 +507,10 @@ pub async fn cover_files(
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
- let npm_resolver = factory.npm_resolver().await?;
+ let in_npm_pkg_checker = factory.in_npm_pkg_checker()?;
let file_fetcher = factory.file_fetcher()?;
let emitter = factory.emitter()?;
+ let cjs_tracker = factory.cjs_tracker()?;
assert!(!coverage_flags.files.include.is_empty());
@@ -511,7 +530,7 @@ pub async fn cover_files(
script_coverages,
coverage_flags.include,
coverage_flags.exclude,
- npm_resolver.as_ref(),
+ in_npm_pkg_checker.as_ref(),
);
if script_coverages.is_empty() {
return Err(generic_error("No covered files included in the report"));
@@ -568,16 +587,21 @@ pub async fn cover_files(
let transpiled_code = match file.media_type {
MediaType::JavaScript
| MediaType::Unknown
+ | MediaType::Css
+ | MediaType::Wasm
| MediaType::Cjs
| MediaType::Mjs
| MediaType::Json => None,
- MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(Vec::new()),
+ MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(String::new()),
MediaType::TypeScript
| MediaType::Jsx
| MediaType::Mts
| MediaType::Cts
| MediaType::Tsx => {
- Some(match emitter.maybe_cached_emit(&file.specifier, &file.source) {
+ let module_kind = ModuleKind::from_is_cjs(
+ cjs_tracker.is_maybe_cjs(&file.specifier, file.media_type)?,
+ );
+ Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source) {
Some(code) => code,
None => {
return Err(anyhow!(
@@ -588,13 +612,12 @@ pub async fn cover_files(
}
})
}
- MediaType::Wasm | MediaType::TsBuildInfo | MediaType::SourceMap => {
+ MediaType::SourceMap => {
unreachable!()
}
};
let runtime_code: String = match transpiled_code {
- Some(code) => String::from_utf8(code)
- .with_context(|| format!("Failed decoding {}", file.specifier))?,
+ Some(code) => code,
None => original_source.to_string(),
};
diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs
index 5e18546a2..e33da4efb 100644
--- a/cli/tools/doc.rs
+++ b/cli/tools/doc.rs
@@ -22,9 +22,9 @@ use deno_core::serde_json;
use deno_doc as doc;
use deno_doc::html::UrlResolveKind;
use deno_graph::source::NullFileSystem;
+use deno_graph::EsParser;
use deno_graph::GraphKind;
use deno_graph::ModuleAnalyzer;
-use deno_graph::ModuleParser;
use deno_graph::ModuleSpecifier;
use doc::html::ShortPath;
use doc::DocDiagnostic;
@@ -37,7 +37,7 @@ const JSON_SCHEMA_VERSION: u8 = 1;
async fn generate_doc_nodes_for_builtin_types(
doc_flags: DocFlags,
- parser: &dyn ModuleParser,
+ parser: &dyn EsParser,
analyzer: &dyn ModuleAnalyzer,
) -> Result<IndexMap<ModuleSpecifier, Vec<doc::DocNode>>, AnyError> {
let source_file_specifier =
@@ -96,7 +96,7 @@ pub async fn doc(
let module_info_cache = factory.module_info_cache()?;
let parsed_source_cache = factory.parsed_source_cache();
let capturing_parser = parsed_source_cache.as_capturing_parser();
- let analyzer = module_info_cache.as_module_analyzer(parsed_source_cache);
+ let analyzer = module_info_cache.as_module_analyzer();
let doc_nodes_by_url = match doc_flags.source_files {
DocSourceFileFlag::Builtin => {
diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs
index 8a4bc4e6c..d40abd5f5 100644
--- a/cli/tools/fmt.rs
+++ b/cli/tools/fmt.rs
@@ -83,6 +83,7 @@ pub async fn format(
file_watcher::PrintConfig::new("Fmt", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| {
let fmt_flags = fmt_flags.clone();
+ watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
@@ -227,6 +228,7 @@ fn collect_fmt_files(
})
.ignore_git_folder()
.ignore_node_modules()
+ .use_gitignore()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
}
@@ -353,6 +355,21 @@ fn format_yaml(
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
+ let ignore_file = file_text
+ .lines()
+ .take_while(|line| line.starts_with('#'))
+ .any(|line| {
+ line
+ .strip_prefix('#')
+ .unwrap()
+ .trim()
+ .starts_with("deno-fmt-ignore-file")
+ });
+
+ if ignore_file {
+ return Ok(None);
+ }
+
let formatted_str =
pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options))
.map_err(AnyError::from)?;
@@ -775,28 +792,26 @@ fn format_ensure_stable(
return Ok(Some(current_text));
}
Err(err) => {
- panic!(
+ bail!(
concat!(
"Formatting succeeded initially, but failed when ensuring a ",
"stable format. This indicates a bug in the formatter where ",
"the text it produces is not syntactically correct. As a temporary ",
- "workaround you can ignore this file ({}).\n\n{:#}"
+ "workaround you can ignore this file.\n\n{:#}"
),
- file_path.display(),
err,
)
}
}
count += 1;
if count == 5 {
- panic!(
+ bail!(
concat!(
"Formatting not stable. Bailed after {} tries. This indicates a bug ",
- "in the formatter where it formats the file ({}) differently each time. As a ",
+ "in the formatter where it formats the file differently each time. As a ",
"temporary workaround you can ignore this file."
),
count,
- file_path.display(),
)
}
}
@@ -978,6 +993,7 @@ fn get_resolved_malva_config(
single_line_top_level_declarations: false,
selector_override_comment_directive: "deno-fmt-selector-override".into(),
ignore_comment_directive: "deno-fmt-ignore".into(),
+ ignore_file_comment_directive: "deno-fmt-ignore-file".into(),
};
FormatOptions {
@@ -1016,7 +1032,7 @@ fn get_resolved_markup_fmt_config(
max_attrs_per_line: None,
prefer_attrs_single_line: false,
html_normal_self_closing: None,
- html_void_self_closing: Some(true),
+ html_void_self_closing: None,
component_self_closing: None,
svg_self_closing: None,
mathml_self_closing: None,
@@ -1036,6 +1052,7 @@ fn get_resolved_markup_fmt_config(
svelte_directive_shorthand: Some(true),
astro_attr_shorthand: Some(true),
ignore_comment_directive: "deno-fmt-ignore".into(),
+ ignore_file_comment_directive: "deno-fmt-ignore-file".into(),
};
FormatOptions {
@@ -1198,6 +1215,8 @@ fn is_supported_ext_fmt(path: &Path) -> bool {
#[cfg(test)]
mod test {
+ use test_util::assert_starts_with;
+
use super::*;
#[test]
@@ -1253,12 +1272,16 @@ mod test {
}
#[test]
- #[should_panic(expected = "Formatting not stable. Bailed after 5 tries.")]
fn test_format_ensure_stable_unstable_format() {
- format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
- Ok(Some(format!("1{file_text}")))
- })
- .unwrap();
+ let err =
+ format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
+ Ok(Some(format!("1{file_text}")))
+ })
+ .unwrap_err();
+ assert_starts_with!(
+ err.to_string(),
+ "Formatting not stable. Bailed after 5 tries."
+ );
}
#[test]
@@ -1272,16 +1295,20 @@ mod test {
}
#[test]
- #[should_panic(expected = "Formatting succeeded initially, but failed when")]
fn test_format_ensure_stable_error_second() {
- format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
- if file_text == "1" {
- Ok(Some("11".to_string()))
- } else {
- bail!("Error formatting.")
- }
- })
- .unwrap();
+ let err =
+ format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
+ if file_text == "1" {
+ Ok(Some("11".to_string()))
+ } else {
+ bail!("Error formatting.")
+ }
+ })
+ .unwrap_err();
+ assert_starts_with!(
+ err.to_string(),
+ "Formatting succeeded initially, but failed when"
+ );
}
#[test]
diff --git a/cli/tools/info.rs b/cli/tools/info.rs
index a6b390b81..c138d03d4 100644
--- a/cli/tools/info.rs
+++ b/cli/tools/info.rs
@@ -11,12 +11,14 @@ use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::serde_json;
+use deno_core::url;
use deno_graph::Dependency;
use deno_graph::GraphKind;
use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::Resolution;
+use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
@@ -47,20 +49,23 @@ pub async fn info(
let module_graph_creator = factory.module_graph_creator().await?;
let npm_resolver = factory.npm_resolver().await?;
let maybe_lockfile = cli_options.maybe_lockfile();
+ let npmrc = cli_options.npmrc();
let resolver = factory.workspace_resolver().await?;
- let maybe_import_specifier =
- if let Some(import_map) = resolver.maybe_import_map() {
- if let Ok(imports_specifier) =
- import_map.resolve(&specifier, import_map.base_url())
- {
- Some(imports_specifier)
- } else {
- None
- }
+ let cwd_url =
+ url::Url::from_directory_path(cli_options.initial_cwd()).unwrap();
+
+ let maybe_import_specifier = if let Some(import_map) =
+ resolver.maybe_import_map()
+ {
+ if let Ok(imports_specifier) = import_map.resolve(&specifier, &cwd_url) {
+ Some(imports_specifier)
} else {
None
- };
+ }
+ } else {
+ None
+ };
let specifier = match maybe_import_specifier {
Some(specifier) => specifier,
@@ -88,7 +93,8 @@ pub async fn info(
JSON_SCHEMA_VERSION.into(),
);
}
- add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref());
+
+ add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref(), npmrc);
display::write_json_to_stdout(&json_graph)?;
} else {
let mut output = String::new();
@@ -192,6 +198,7 @@ fn print_cache_info(
fn add_npm_packages_to_json(
json: &mut serde_json::Value,
npm_resolver: &dyn CliNpmResolver,
+ npmrc: &ResolvedNpmRc,
) {
let Some(npm_resolver) = npm_resolver.as_managed() else {
return; // does not include byonm to deno info's output
@@ -202,45 +209,28 @@ fn add_npm_packages_to_json(
let json = json.as_object_mut().unwrap();
let modules = json.get_mut("modules").and_then(|m| m.as_array_mut());
if let Some(modules) = modules {
- if modules.len() == 1
- && modules[0].get("kind").and_then(|k| k.as_str()) == Some("npm")
- {
- // If there is only one module and it's "external", then that means
- // someone provided an npm specifier as a cli argument. In this case,
- // we want to show which npm package the cli argument resolved to.
- let module = &mut modules[0];
- let maybe_package = module
- .get("specifier")
- .and_then(|k| k.as_str())
- .and_then(|specifier| NpmPackageNvReference::from_str(specifier).ok())
- .and_then(|package_ref| {
- snapshot
- .resolve_package_from_deno_module(package_ref.nv())
- .ok()
- });
- if let Some(pkg) = maybe_package {
- if let Some(module) = module.as_object_mut() {
- module
- .insert("npmPackage".to_string(), pkg.id.as_serialized().into());
- }
- }
- } else {
- // Filter out npm package references from the modules and instead
- // have them only listed as dependencies. This is done because various
- // npm specifiers modules in the graph are really just unresolved
- // references. So there could be listed multiple npm specifiers
- // that would resolve to a single npm package.
- for i in (0..modules.len()).rev() {
- if matches!(
- modules[i].get("kind").and_then(|k| k.as_str()),
- Some("npm") | Some("external")
- ) {
- modules.remove(i);
+ for module in modules.iter_mut() {
+ if matches!(module.get("kind").and_then(|k| k.as_str()), Some("npm")) {
+ // If there is only one module and it's "external", then that means
+ // someone provided an npm specifier as a cli argument. In this case,
+ // we want to show which npm package the cli argument resolved to.
+ let maybe_package = module
+ .get("specifier")
+ .and_then(|k| k.as_str())
+ .and_then(|specifier| NpmPackageNvReference::from_str(specifier).ok())
+ .and_then(|package_ref| {
+ snapshot
+ .resolve_package_from_deno_module(package_ref.nv())
+ .ok()
+ });
+ if let Some(pkg) = maybe_package {
+ if let Some(module) = module.as_object_mut() {
+ module
+ .insert("npmPackage".to_string(), pkg.id.as_serialized().into());
+ }
}
}
- }
- for module in modules.iter_mut() {
let dependencies = module
.get_mut("dependencies")
.and_then(|d| d.as_array_mut());
@@ -272,7 +262,7 @@ fn add_npm_packages_to_json(
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
for pkg in sorted_packages {
let mut kv = serde_json::Map::new();
- kv.insert("name".to_string(), pkg.id.nv.name.to_string().into());
+ kv.insert("name".to_string(), pkg.id.nv.name.clone().into());
kv.insert("version".to_string(), pkg.id.nv.version.to_string().into());
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
deps.sort();
@@ -281,6 +271,8 @@ fn add_npm_packages_to_json(
.map(|id| serde_json::Value::String(id.as_serialized()))
.collect::<Vec<_>>();
kv.insert("dependencies".to_string(), deps.into());
+ let registry_url = npmrc.get_registry_url(&pkg.id.nv.name);
+ kv.insert("registryUrl".to_string(), registry_url.to_string().into());
json_packages.insert(pkg.id.as_serialized(), kv.into());
}
@@ -545,7 +537,7 @@ impl<'a> GraphDisplayContext<'a> {
fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode {
enum PackageOrSpecifier {
- Package(NpmResolutionPackage),
+ Package(Box<NpmResolutionPackage>),
Specifier(ModuleSpecifier),
}
@@ -553,7 +545,7 @@ impl<'a> GraphDisplayContext<'a> {
let package_or_specifier = match module.npm() {
Some(npm) => match self.npm_info.resolve_package(npm.nv_reference.nv()) {
- Some(package) => Package(package.clone()),
+ Some(package) => Package(Box::new(package.clone())),
None => Specifier(module.specifier().clone()), // should never happen
},
None => Specifier(module.specifier().clone()),
@@ -660,10 +652,12 @@ impl<'a> GraphDisplayContext<'a> {
let message = match err {
HttpsChecksumIntegrity(_) => "(checksum integrity error)",
Decode(_) => "(loading decode error)",
- Loader(err) => match deno_core::error::get_custom_error_class(err) {
- Some("NotCapable") => "(not capable, requires --allow-import)",
- _ => "(loading error)",
- },
+ Loader(err) => {
+ match deno_runtime::errors::get_error_class_name(err) {
+ Some("NotCapable") => "(not capable, requires --allow-import)",
+ _ => "(loading error)",
+ }
+ }
Jsr(_) => "(loading error)",
NodeUnknownBuiltinModule(_) => "(unknown node built-in error)",
Npm(_) => "(npm loading error)",
diff --git a/cli/tools/init/mod.rs b/cli/tools/init/mod.rs
index 2d6a894e1..4e4a686c5 100644
--- a/cli/tools/init/mod.rs
+++ b/cli/tools/init/mod.rs
@@ -24,32 +24,29 @@ pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
create_file(
&dir,
"main.ts",
- r#"import { type Route, route, serveDir } from "@std/http";
+ r#"import { serveDir } from "@std/http";
-const routes: Route[] = [
- {
- pattern: new URLPattern({ pathname: "/" }),
- handler: () => new Response("Home page"),
- },
- {
- pattern: new URLPattern({ pathname: "/users/:id" }),
- handler: (_req, _info, params) => new Response(params?.pathname.groups.id),
- },
- {
- pattern: new URLPattern({ pathname: "/static/*" }),
- handler: (req) => serveDir(req),
- },
-];
-
-function defaultHandler(_req: Request) {
- return new Response("Not found", { status: 404 });
-}
-
-const handler = route(routes, defaultHandler);
+const userPagePattern = new URLPattern({ pathname: "/users/:id" });
+const staticPathPattern = new URLPattern({ pathname: "/static/*" });
export default {
fetch(req) {
- return handler(req);
+ const url = new URL(req.url);
+
+ if (url.pathname === "/") {
+ return new Response("Home page");
+ }
+
+ const userPageMatch = userPagePattern.exec(url);
+ if (userPageMatch) {
+ return new Response(userPageMatch.pathname.groups.id);
+ }
+
+ if (staticPathPattern.test(url)) {
+ return serveDir(req);
+ }
+
+ return new Response("Not found", { status: 404 });
},
} satisfies Deno.ServeDefaultExport;
"#,
diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs
index ed86e86c7..fe477a8e6 100644
--- a/cli/tools/installer.rs
+++ b/cli/tools/installer.rs
@@ -3,6 +3,7 @@
use crate::args::resolve_no_prompt;
use crate::args::AddFlags;
use crate::args::CaData;
+use crate::args::CacheSetting;
use crate::args::ConfigFlag;
use crate::args::Flags;
use crate::args::InstallFlags;
@@ -13,8 +14,11 @@ use crate::args::TypeCheckMode;
use crate::args::UninstallFlags;
use crate::args::UninstallKind;
use crate::factory::CliFactory;
+use crate::file_fetcher::FileFetcher;
use crate::graph_container::ModuleGraphContainer;
use crate::http_util::HttpClientProvider;
+use crate::jsr::JsrFetchResolver;
+use crate::npm::NpmFetchResolver;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use deno_core::anyhow::bail;
@@ -354,12 +358,51 @@ async fn install_global(
) -> Result<(), AnyError> {
// ensure the module is cached
let factory = CliFactory::from_flags(flags.clone());
+
+ let http_client = factory.http_client_provider();
+ let deps_http_cache = factory.global_http_cache()?;
+ let mut deps_file_fetcher = FileFetcher::new(
+ deps_http_cache.clone(),
+ CacheSetting::ReloadAll,
+ true,
+ http_client.clone(),
+ Default::default(),
+ None,
+ );
+
+ let npmrc = factory.cli_options().unwrap().npmrc();
+
+ deps_file_fetcher.set_download_log_level(log::Level::Trace);
+ let deps_file_fetcher = Arc::new(deps_file_fetcher);
+ let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
+ let npm_resolver = Arc::new(NpmFetchResolver::new(
+ deps_file_fetcher.clone(),
+ npmrc.clone(),
+ ));
+
+ let entry_text = install_flags_global.module_url.as_str();
+ let req = super::registry::AddRmPackageReq::parse(entry_text);
+
+ // found a package requirement but missing the prefix
+ if let Ok(Err(package_req)) = req {
+ if jsr_resolver.req_to_nv(&package_req).await.is_some() {
+ bail!(
+ "{entry_text} is missing a prefix. Did you mean `{}`?",
+ crate::colors::yellow(format!("deno install -g jsr:{package_req}"))
+ );
+ } else if npm_resolver.req_to_nv(&package_req).await.is_some() {
+ bail!(
+ "{entry_text} is missing a prefix. Did you mean `{}`?",
+ crate::colors::yellow(format!("deno install -g npm:{package_req}"))
+ );
+ }
+ }
+
factory
.main_module_graph_container()
.await?
.load_and_type_check_files(&[install_flags_global.module_url.clone()])
.await?;
- let http_client = factory.http_client_provider();
// create the install shim
create_install_shim(http_client, &flags, install_flags_global).await
@@ -1396,6 +1439,7 @@ mod tests {
.env_clear()
// use the deno binary in the target directory
.env("PATH", test_util::target_dir())
+ .env("RUST_BACKTRACE", "1")
.spawn()
.unwrap()
.wait()
diff --git a/cli/tools/jupyter/install.rs b/cli/tools/jupyter/install.rs
index b0ddc948d..aeff89ccf 100644
--- a/cli/tools/jupyter/install.rs
+++ b/cli/tools/jupyter/install.rs
@@ -58,9 +58,9 @@ pub fn install() -> Result<(), AnyError> {
let f = std::fs::File::create(kernel_json_path)?;
serde_json::to_writer_pretty(f, &json_data)?;
- install_icon(&user_data_dir, "logo-32x32.png", DENO_ICON_32)?;
- install_icon(&user_data_dir, "logo-64x64.png", DENO_ICON_64)?;
- install_icon(&user_data_dir, "logo-svg.svg", DENO_ICON_SVG)?;
+ install_icon(&kernel_dir, "logo-32x32.png", DENO_ICON_32)?;
+ install_icon(&kernel_dir, "logo-64x64.png", DENO_ICON_64)?;
+ install_icon(&kernel_dir, "logo-svg.svg", DENO_ICON_SVG)?;
log::info!("✅ Deno kernelspec installed successfully.");
Ok(())
diff --git a/cli/tools/jupyter/mod.rs b/cli/tools/jupyter/mod.rs
index 0ffd0da1e..732f95c49 100644
--- a/cli/tools/jupyter/mod.rs
+++ b/cli/tools/jupyter/mod.rs
@@ -61,7 +61,7 @@ pub async fn kernel(
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let main_module =
- resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd())
+ resolve_url_or_path("./$deno$jupyter.mts", cli_options.initial_cwd())
.unwrap();
// TODO(bartlomieju): should we run with all permissions?
let permissions =
diff --git a/cli/tools/jupyter/resources/deno-logo-32x32.png b/cli/tools/jupyter/resources/deno-logo-32x32.png
index 97871a02e..d59f251a2 100644
--- a/cli/tools/jupyter/resources/deno-logo-32x32.png
+++ b/cli/tools/jupyter/resources/deno-logo-32x32.png
Binary files differ
diff --git a/cli/tools/jupyter/resources/deno-logo-64x64.png b/cli/tools/jupyter/resources/deno-logo-64x64.png
index 1b9444ef6..37e98abaf 100644
--- a/cli/tools/jupyter/resources/deno-logo-64x64.png
+++ b/cli/tools/jupyter/resources/deno-logo-64x64.png
Binary files differ
diff --git a/cli/tools/jupyter/resources/deno-logo-svg.svg b/cli/tools/jupyter/resources/deno-logo-svg.svg
index d7bb9ef80..fbc22cd91 100644
--- a/cli/tools/jupyter/resources/deno-logo-svg.svg
+++ b/cli/tools/jupyter/resources/deno-logo-svg.svg
@@ -1 +1,17 @@
-<svg viewBox="0 0 30 30" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#clip0_29_599)"><path d="M15 0C23.2843 0 30 6.71572 30 15C30 23.2843 23.2843 30 15 30C6.71572 30 0 23.2843 0 15C0 6.71572 6.71572 0 15 0Z" fill="currentColor"></path><path d="M14.6635 22.3394C14.2788 22.2357 13.8831 22.4584 13.7705 22.8381L13.7655 22.8558L12.7694 26.5472L12.7649 26.565C12.6711 26.9498 12.9011 27.3414 13.2858 27.4451C13.6704 27.549 14.0661 27.3263 14.1787 26.9465L14.1837 26.9289L15.1797 23.2375L15.1843 23.2196C15.1911 23.1919 15.1962 23.164 15.1997 23.1362L15.2026 23.1084L15.179 22.9888L15.1445 22.8166L15.1227 22.7091C15.076 22.619 15.0111 22.5396 14.932 22.4759C14.853 22.4123 14.7615 22.3658 14.6635 22.3394ZM7.7224 18.5379C7.70424 18.5741 7.68883 18.6123 7.67658 18.6522L7.66967 18.6763L6.67358 22.3677L6.669 22.3856C6.57525 22.7704 6.80524 23.1619 7.1899 23.2657C7.57451 23.3695 7.97026 23.1469 8.08287 22.7671L8.08779 22.7494L8.99096 19.4023C8.51793 19.1518 8.09336 18.8628 7.7224 18.5379ZM5.34707 14.2929C4.9624 14.1891 4.56666 14.4117 4.4541 14.7915L4.44912 14.8092L3.45303 18.5006L3.44846 18.5184C3.35471 18.9032 3.58469 19.2947 3.96936 19.3985C4.35397 19.5023 4.74971 19.2797 4.86232 18.8999L4.86725 18.8822L5.86334 15.1908L5.86791 15.173C5.96166 14.7882 5.73174 14.3967 5.34707 14.2929ZM27.682 13.4546C27.2973 13.3508 26.9015 13.5734 26.789 13.9532L26.784 13.9709L25.7879 17.6623L25.7833 17.6801C25.6896 18.0649 25.9196 18.4564 26.3042 18.5602C26.6889 18.664 27.0846 18.4414 27.1972 18.0616L27.2021 18.0439L28.1982 14.3525L28.2028 14.3347C28.2965 13.9499 28.0666 13.5584 27.682 13.4546ZM3.17781 8.52527C2.34361 10.0444 1.81243 11.7112 1.61377 13.4329C1.7088 13.5412 1.83381 13.619 1.97301 13.6563C2.35768 13.7602 2.75342 13.5375 2.86598 13.1577L2.87096 13.1401L3.86705 9.44865L3.87162 9.43084C3.96537 9.04599 3.73539 8.65447 3.35072 8.5507C3.2943 8.53547 3.23623 8.52694 3.17781 8.52527ZM25.159 8.5507C24.7744 8.44687 24.3786 8.66953 24.266 9.04933L24.2611 9.06697L23.265 12.7584L23.2604 12.7762C23.1667 13.161 23.3966 13.5526 23.7813 13.6563C24.1659 13.7602 24.5617 13.5375 24.6743 13.1577L24.6792 13.1401L25.6753 9.44865L25.6799 9.43084C25.7736 9.04599 25.5436 8.65447 25.159 8.5507Z" fill="white"></path><path d="M7.51285 5.04065C7.12824 4.93682 6.73249 5.15948 6.61988 5.53929L6.61495 5.55692L5.61886 9.24833L5.61429 9.26614C5.52054 9.65098 5.75052 10.0425 6.13519 10.1463C6.5198 10.2501 6.91554 10.0274 7.02816 9.64764L7.03308 9.63001L8.02917 5.9386L8.03374 5.92079C8.12749 5.53595 7.89751 5.14442 7.51285 5.04065ZM20.3116 5.73845C19.9269 5.63462 19.5312 5.85727 19.4186 6.23708L19.4136 6.25471L18.7443 8.73499C19.1779 8.94915 19.5917 9.20126 19.9809 9.48839L20.0453 9.53643L20.8279 6.63639L20.8324 6.61858C20.9262 6.23374 20.6963 5.84221 20.3116 5.73845ZM13.7968 1.57642C13.3296 1.61771 12.8647 1.68338 12.4043 1.77317L12.3066 1.79263L11.3782 5.23419L11.3736 5.252C11.2799 5.63684 11.5099 6.02837 11.8945 6.13214C12.2792 6.23596 12.6749 6.01331 12.7875 5.6335L12.7924 5.61587L13.7885 1.92446L13.7931 1.90665C13.8196 1.79831 13.8209 1.68533 13.7968 1.57642ZM22.9626 4.1263L22.7669 4.85169L22.7623 4.86944C22.6686 5.25429 22.8986 5.64581 23.2832 5.74958C23.6678 5.85341 24.0636 5.63075 24.1762 5.25095L24.1811 5.23331L24.2025 5.15462C23.8362 4.81205 23.4511 4.49009 23.0491 4.19022L22.9626 4.1263ZM17.1672 1.69677L16.8139 3.00593L16.8094 3.02374C16.7156 3.40858 16.9456 3.80011 17.3303 3.90388C17.7149 4.0077 18.1106 3.78505 18.2233 3.40524L18.2282 3.38761L18.6 2.00966C18.1624 1.88867 17.719 1.79001 17.2714 1.71405L17.1672 1.69677Z" fill="white"></path><path d="M9.69085 24.6253C9.80341 24.2455 10.1992 24.0229 10.5838 24.1266C10.9685 24.2303 11.1984 24.6219 11.1047 25.0068L11.1001 25.0246L10.3872 27.6664L10.2876 27.6297C9.85836 27.4694 9.43765 27.2873 9.0271 27.0839L9.68587 24.6429L9.69085 24.6253Z" fill="white"></path><path d="M14.4141 8.49082C10.0522 8.49082 6.65918 11.2368 6.65918 14.6517C6.65918 17.8769 9.78123 19.9362 14.6211 19.8331C15.0327 19.8243 15.1517 20.1008 15.2856 20.4734C15.4196 20.846 15.7796 22.8097 16.0665 24.3117C16.3233 25.656 16.5842 27.0052 16.7834 28.3596C19.9439 27.9418 22.8663 26.3807 25.0076 24.0261L22.7237 15.5088C22.1544 13.4518 21.489 11.5564 19.7283 10.1794C18.3118 9.07166 16.5122 8.49082 14.4141 8.49082Z" fill="white"></path><path d="M15.3516 10.957C15.8694 10.957 16.2891 11.3767 16.2891 11.8945C16.2891 12.4123 15.8694 12.832 15.3516 12.832C14.8338 12.832 14.4141 12.4123 14.4141 11.8945C14.4141 11.3767 14.8338 10.957 15.3516 10.957Z" fill="currentColor"></path></g><defs><clipPath id="clip0_29_599"><rect width="30" height="30" fill="white"></rect></clipPath></defs></svg>
+<svg width="100%" height="100%" viewBox="0 0 441 441" version="1.1" xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/"
+ style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
+ <g transform="matrix(1.02631,-2.08167e-17,2.08167e-17,1.02631,-0.525826,-0.525138)">
+ <path
+ d="M37.965,296.635C26.441,271.766 20.009,244.065 20.009,214.873C20.009,207.318 20.439,199.863 21.278,192.531C22.129,185.123 23.39,177.852 25.036,170.742C34.286,130.852 55.801,95.64 85.384,69.301C110.233,47.207 140.674,31.444 174.043,24.299C187.212,21.486 200.872,20.006 214.875,20.006C219.783,20.011 224.727,20.2 229.701,20.579C253.285,22.38 275.571,28.317 295.904,37.625C312.305,45.143 327.486,54.87 341.064,66.426C375.17,95.48 398.957,135.953 406.867,181.369C408.757,192.255 409.742,203.45 409.742,214.873C409.738,219.789 409.548,224.74 409.168,229.721C407.731,248.545 403.659,266.542 397.34,283.379C388.521,306.83 375.308,328.136 358.706,346.294C337.113,368.342 309.673,378.152 286.755,377.744C270.09,377.447 253.784,370.816 242.516,361.114C226.42,347.253 219.918,331.409 217.69,313.729C217.136,309.334 217.461,297.358 219.748,289.066C221.453,282.885 225.777,270.948 232.1,265.727C224.703,262.541 215.183,255.604 212.182,252.274C211.445,251.455 211.54,250.174 212.2,249.292C212.861,248.41 214.02,248.062 215.057,248.435C221.416,250.618 229.161,252.771 237.327,254.137C248.067,255.932 261.424,258.194 274.955,258.859C307.946,260.479 342.407,245.67 353.103,216.207C363.798,186.744 359.649,157.602 321.279,140.121C282.909,122.64 265.185,101.856 234.183,89.32C213.934,81.131 191.396,85.992 168.257,98.78C105.931,133.223 50.092,242.048 75.833,342.873C76.201,344.252 75.58,345.705 74.328,346.392C73.156,347.036 71.713,346.852 70.741,345.962C63.25,337.731 56.454,328.857 50.445,319.433C45.796,312.139 41.623,304.524 37.965,296.635Z" />
+ </g>
+ <g transform="matrix(0.0920293,0.00428099,-0.00428099,0.0920293,-28.1272,-500.301)">
+ <path
+ d="M3053.7,5296.9C4371.65,5296.9 5441.66,6366.91 5441.66,7684.86C5441.66,9002.81 4371.65,10072.8 3053.7,10072.8C1735.75,10072.8 665.74,9002.81 665.74,7684.86C665.74,6366.91 1735.75,5296.9 3053.7,5296.9ZM3745.03,8143.22C3594.12,8142.82 3444.31,8124.57 3323.87,8110.15C3232.29,8099.18 3144.99,8079.23 3073.1,8058.23C3061.36,8054.62 3048.65,8059.09 3041.75,8069.24C3034.86,8079.4 3034.46,8093.71 3043.09,8102.44C3078.21,8137.94 3187.74,8210.21 3271.7,8241.83C3204.04,8303.2 3162.1,8438.28 3146.33,8507.94C3125.17,8601.4 3127.75,8734.83 3136.19,8783.45C3170.14,8979.04 3250.69,9151.99 3436.99,9297.9C3567.4,9400.03 3752.28,9465.38 3937.88,9460.06C4194.01,9452.71 4495.48,9328.51 4724.65,9070.17C5023.25,8710.58 5208.52,8252.45 5223.47,7749.5C5259.08,6551.9 4315.7,5550.69 3118.1,5515.08C1920.51,5479.47 919.301,6422.86 883.689,7620.45C865.246,8240.66 1109.37,8808.21 1515.43,9216.2C1526.73,9227.39 1544.21,9229.43 1557.78,9221.14C1571.35,9212.85 1577.51,9196.36 1572.7,9181.2C1234.07,8072.55 1799.11,6832.64 2474.84,6417.1C2725.71,6262.82 2973.99,6197.06 3203.56,6277.7C3555.04,6401.15 3763.03,6623.26 4199.06,6797.93C4635.09,6972.59 4696.35,7294.74 4592.58,7628.14C4488.81,7961.54 4113,8144.17 3745.03,8143.22ZM2917.17,6442.51C2777.75,6459.97 2693.93,6637.44 2687.08,6749.42C2680.18,6861.39 2744.03,7042.7 2926.19,7030.63C3139.52,7016.49 3195.89,6830.7 3164.24,6654.94C3140.48,6522.94 3033.73,6427.9 2917.17,6442.51Z"
+ style="fill:white;" />
+ </g>
+ <g transform="matrix(7.12289,0.543899,-0.543899,7.12289,-4867.49,-1040.55)">
+ <path
+ d="M721.316,105.751C722.813,105.518 724.225,106.703 724.582,108.395C725.058,110.649 724.402,113.065 721.658,113.329C719.314,113.555 718.422,111.242 718.468,109.796C718.513,108.35 719.525,106.03 721.316,105.751Z" />
+ </g>
+</svg>
diff --git a/cli/tools/jupyter/server.rs b/cli/tools/jupyter/server.rs
index 0cd80f7dd..5680ed4c1 100644
--- a/cli/tools/jupyter/server.rs
+++ b/cli/tools/jupyter/server.rs
@@ -329,7 +329,12 @@ impl JupyterServer {
})
.collect();
- (candidates, cursor_pos - prop_name.len())
+ if prop_name.len() > cursor_pos {
+ // TODO(bartlomieju): most likely not correct, but better than panicking because of sub with overflow
+ (candidates, cursor_pos)
+ } else {
+ (candidates, cursor_pos - prop_name.len())
+ }
} else {
// combine results of declarations and globalThis properties
let mut candidates = get_expression_property_names(
@@ -349,7 +354,12 @@ impl JupyterServer {
candidates.sort();
candidates.dedup(); // make sure to sort first
- (candidates, cursor_pos - expr.len())
+ if expr.len() > cursor_pos {
+ // TODO(bartlomieju): most likely not correct, but better than panicking because of sub with overflow
+ (candidates, cursor_pos)
+ } else {
+ (candidates, cursor_pos - expr.len())
+ }
};
connection
diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs
index e096b486e..fcefb4587 100644
--- a/cli/tools/lint/mod.rs
+++ b/cli/tools/lint/mod.rs
@@ -63,7 +63,7 @@ pub use rules::LintRuleProvider;
const JSON_SCHEMA_VERSION: u8 = 1;
-static STDIN_FILE_NAME: &str = "$deno$stdin.ts";
+static STDIN_FILE_NAME: &str = "$deno$stdin.mts";
pub async fn lint(
flags: Arc<Flags>,
@@ -80,6 +80,7 @@ pub async fn lint(
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| {
let lint_flags = lint_flags.clone();
+ watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
@@ -191,7 +192,7 @@ pub async fn lint(
linter.finish()
};
if !success {
- std::process::exit(1);
+ deno_runtime::exit(1);
}
}
@@ -435,6 +436,7 @@ fn collect_lint_files(
})
.ignore_git_folder()
.ignore_node_modules()
+ .use_gitignore()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&deno_config::fs::RealDenoConfigFs, files)
}
diff --git a/cli/tools/lint/rules/no_sloppy_imports.rs b/cli/tools/lint/rules/no_sloppy_imports.rs
index 2f6087588..94bf9a7c6 100644
--- a/cli/tools/lint/rules/no_sloppy_imports.rs
+++ b/cli/tools/lint/rules/no_sloppy_imports.rs
@@ -87,6 +87,7 @@ impl LintRule for NoSloppyImportsRule {
captures: Default::default(),
};
+ // fill this and capture the sloppy imports in the resolver
deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::All,
specifier: context.specifier().clone(),
diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs
index 4098d62e3..12289c581 100644
--- a/cli/tools/registry/mod.rs
+++ b/cli/tools/registry/mod.rs
@@ -69,6 +69,7 @@ pub use pm::add;
pub use pm::cache_top_level_deps;
pub use pm::remove;
pub use pm::AddCommandName;
+pub use pm::AddRmPackageReq;
use publish_order::PublishOrderGraph;
use unfurl::SpecifierUnfurler;
diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs
index 5dc042620..c1ea2c75e 100644
--- a/cli/tools/registry/pm.rs
+++ b/cli/tools/registry/pm.rs
@@ -1,32 +1,25 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-mod cache_deps;
-
-pub use cache_deps::cache_top_level_deps;
-use deno_semver::jsr::JsrPackageReqReference;
-use deno_semver::npm::NpmPackageReqReference;
-use deno_semver::VersionReq;
-
-use std::borrow::Cow;
+use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
-use deno_ast::TextChange;
-use deno_config::deno_json::FmtOptionsConfig;
-use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
-use deno_core::serde_json;
-use deno_core::ModuleSpecifier;
-use deno_runtime::deno_node;
+use deno_path_util::url_to_file_path;
+use deno_semver::jsr::JsrPackageReqReference;
+use deno_semver::npm::NpmPackageReqReference;
+use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
-use indexmap::IndexMap;
-use jsonc_parser::ast::ObjectProp;
-use jsonc_parser::ast::Value;
-use yoke::Yoke;
+use deno_semver::Version;
+use deno_semver::VersionReq;
+use jsonc_parser::cst::CstObject;
+use jsonc_parser::cst::CstObjectProp;
+use jsonc_parser::cst::CstRootNode;
+use jsonc_parser::json;
use crate::args::AddFlags;
use crate::args::CacheSetting;
@@ -38,236 +31,181 @@ use crate::file_fetcher::FileFetcher;
use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver;
-enum DenoConfigFormat {
- Json,
- Jsonc,
-}
+mod cache_deps;
-impl DenoConfigFormat {
- fn from_specifier(spec: &ModuleSpecifier) -> Result<Self, AnyError> {
- let file_name = spec
- .path_segments()
- .ok_or_else(|| anyhow!("Empty path in deno config specifier: {spec}"))?
- .last()
- .unwrap();
- match file_name {
- "deno.json" => Ok(Self::Json),
- "deno.jsonc" => Ok(Self::Jsonc),
- _ => bail!("Unsupported deno config file: {file_name}"),
- }
- }
-}
+pub use cache_deps::cache_top_level_deps;
-struct DenoConfig {
- config: Arc<deno_config::deno_json::ConfigFile>,
- format: DenoConfigFormat,
- imports: IndexMap<String, String>,
+#[derive(Debug, Copy, Clone)]
+enum ConfigKind {
+ DenoJson,
+ PackageJson,
}
-fn deno_json_imports(
- config: &deno_config::deno_json::ConfigFile,
-) -> Result<IndexMap<String, String>, AnyError> {
- Ok(
- config
- .json
- .imports
- .clone()
- .map(|imports| {
- serde_json::from_value(imports)
- .map_err(|err| anyhow!("Malformed \"imports\" configuration: {err}"))
- })
- .transpose()?
- .unwrap_or_default(),
- )
+struct ConfigUpdater {
+ kind: ConfigKind,
+ cst: CstRootNode,
+ root_object: CstObject,
+ path: PathBuf,
+ modified: bool,
}
-impl DenoConfig {
- fn from_options(options: &CliOptions) -> Result<Option<Self>, AnyError> {
- let start_dir = &options.start_dir;
- if let Some(config) = start_dir.maybe_deno_json() {
- Ok(Some(Self {
- imports: deno_json_imports(config)?,
- config: config.clone(),
- format: DenoConfigFormat::from_specifier(&config.specifier)?,
- }))
- } else {
- Ok(None)
- }
- }
- fn add(&mut self, selected: SelectedPackage) {
- self.imports.insert(
- selected.import_name,
- format!("{}@{}", selected.package_name, selected.version_req),
- );
+impl ConfigUpdater {
+ fn new(
+ kind: ConfigKind,
+ config_file_path: PathBuf,
+ ) -> Result<Self, AnyError> {
+ let config_file_contents = std::fs::read_to_string(&config_file_path)
+ .with_context(|| {
+ format!("Reading config file '{}'", config_file_path.display())
+ })?;
+ let cst = CstRootNode::parse(&config_file_contents, &Default::default())
+ .with_context(|| {
+ format!("Parsing config file '{}'", config_file_path.display())
+ })?;
+ let root_object = cst.object_value_or_set();
+ Ok(Self {
+ kind,
+ cst,
+ root_object,
+ path: config_file_path,
+ modified: false,
+ })
}
- fn remove(&mut self, package: &str) -> bool {
- self.imports.shift_remove(package).is_some()
+ fn display_path(&self) -> String {
+ deno_path_util::url_from_file_path(&self.path)
+ .map(|u| u.to_string())
+ .unwrap_or_else(|_| self.path.display().to_string())
}
- fn take_import_fields(
- &mut self,
- ) -> Vec<(&'static str, IndexMap<String, String>)> {
- vec![("imports", std::mem::take(&mut self.imports))]
+ fn obj(&self) -> &CstObject {
+ &self.root_object
}
-}
-impl NpmConfig {
- fn from_options(options: &CliOptions) -> Result<Option<Self>, AnyError> {
- let start_dir = &options.start_dir;
- if let Some(pkg_json) = start_dir.maybe_pkg_json() {
- Ok(Some(Self {
- dependencies: pkg_json.dependencies.clone().unwrap_or_default(),
- dev_dependencies: pkg_json.dev_dependencies.clone().unwrap_or_default(),
- config: pkg_json.clone(),
- fmt_options: None,
- }))
- } else {
- Ok(None)
- }
+ fn contents(&self) -> String {
+ self.cst.to_string()
}
fn add(&mut self, selected: SelectedPackage, dev: bool) {
- let (name, version) = package_json_dependency_entry(selected);
- if dev {
- self.dependencies.swap_remove(&name);
- self.dev_dependencies.insert(name, version);
- } else {
- self.dev_dependencies.swap_remove(&name);
- self.dependencies.insert(name, version);
+ fn insert_index(object: &CstObject, searching_name: &str) -> usize {
+ object
+ .properties()
+ .into_iter()
+ .take_while(|prop| {
+ let prop_name =
+ prop.name().and_then(|name| name.decoded_value().ok());
+ match prop_name {
+ Some(current_name) => {
+ searching_name.cmp(&current_name) == std::cmp::Ordering::Greater
+ }
+ None => true,
+ }
+ })
+ .count()
}
- }
- fn remove(&mut self, package: &str) -> bool {
- let in_deps = self.dependencies.shift_remove(package).is_some();
- let in_dev_deps = self.dev_dependencies.shift_remove(package).is_some();
- in_deps || in_dev_deps
- }
-
- fn take_import_fields(
- &mut self,
- ) -> Vec<(&'static str, IndexMap<String, String>)> {
- vec![
- ("dependencies", std::mem::take(&mut self.dependencies)),
- (
- "devDependencies",
- std::mem::take(&mut self.dev_dependencies),
- ),
- ]
- }
-}
-
-struct NpmConfig {
- config: Arc<deno_node::PackageJson>,
- fmt_options: Option<FmtOptionsConfig>,
- dependencies: IndexMap<String, String>,
- dev_dependencies: IndexMap<String, String>,
-}
-
-enum DenoOrPackageJson {
- Deno(DenoConfig),
- Npm(NpmConfig),
-}
-
-impl From<DenoConfig> for DenoOrPackageJson {
- fn from(config: DenoConfig) -> Self {
- Self::Deno(config)
- }
-}
-
-impl From<NpmConfig> for DenoOrPackageJson {
- fn from(config: NpmConfig) -> Self {
- Self::Npm(config)
- }
-}
+ match self.kind {
+ ConfigKind::DenoJson => {
+ let imports = self.root_object.object_value_or_set("imports");
+ let value =
+ format!("{}@{}", selected.package_name, selected.version_req);
+ if let Some(prop) = imports.get(&selected.import_name) {
+ prop.set_value(json!(value));
+ } else {
+ let index = insert_index(&imports, &selected.import_name);
+ imports.insert(index, &selected.import_name, json!(value));
+ }
+ }
+ ConfigKind::PackageJson => {
+ let deps_prop = self.root_object.get("dependencies");
+ let dev_deps_prop = self.root_object.get("devDependencies");
+
+ let dependencies = if dev {
+ self
+ .root_object
+ .object_value("devDependencies")
+ .unwrap_or_else(|| {
+ let index = deps_prop
+ .as_ref()
+ .map(|p| p.property_index() + 1)
+ .unwrap_or_else(|| self.root_object.properties().len());
+ self
+ .root_object
+ .insert(index, "devDependencies", json!({}))
+ .object_value_or_set()
+ })
+ } else {
+ self
+ .root_object
+ .object_value("dependencies")
+ .unwrap_or_else(|| {
+ let index = dev_deps_prop
+ .as_ref()
+ .map(|p| p.property_index())
+ .unwrap_or_else(|| self.root_object.properties().len());
+ self
+ .root_object
+ .insert(index, "dependencies", json!({}))
+ .object_value_or_set()
+ })
+ };
+ let other_dependencies = if dev {
+ deps_prop.and_then(|p| p.value().and_then(|v| v.as_object()))
+ } else {
+ dev_deps_prop.and_then(|p| p.value().and_then(|v| v.as_object()))
+ };
-/// Wrapper around `jsonc_parser::ast::Object` that can be stored in a `Yoke`
-#[derive(yoke::Yokeable)]
-struct JsoncObjectView<'a>(jsonc_parser::ast::Object<'a>);
+ let (alias, value) = package_json_dependency_entry(selected);
-struct ConfigUpdater {
- config: DenoOrPackageJson,
- // the `Yoke` is so we can carry the parsed object (which borrows from
- // the source) along with the source itself
- ast: Yoke<JsoncObjectView<'static>, String>,
- path: PathBuf,
- modified: bool,
-}
+ if let Some(other) = other_dependencies {
+ if let Some(prop) = other.get(&alias) {
+ remove_prop_and_maybe_parent_prop(prop);
+ }
+ }
-impl ConfigUpdater {
- fn obj(&self) -> &jsonc_parser::ast::Object<'_> {
- &self.ast.get().0
- }
- fn contents(&self) -> &str {
- self.ast.backing_cart()
- }
- async fn maybe_new(
- config: Option<impl Into<DenoOrPackageJson>>,
- ) -> Result<Option<Self>, AnyError> {
- if let Some(config) = config {
- Ok(Some(Self::new(config.into()).await?))
- } else {
- Ok(None)
- }
- }
- async fn new(config: DenoOrPackageJson) -> Result<Self, AnyError> {
- let specifier = config.specifier();
- if specifier.scheme() != "file" {
- bail!("Can't update a remote configuration file");
- }
- let config_file_path = specifier.to_file_path().map_err(|_| {
- anyhow!("Specifier {specifier:?} is an invalid file path")
- })?;
- let config_file_contents = {
- let contents = tokio::fs::read_to_string(&config_file_path)
- .await
- .with_context(|| {
- format!("Reading config file at: {}", config_file_path.display())
- })?;
- if contents.trim().is_empty() {
- "{}\n".into()
- } else {
- contents
+ if let Some(prop) = dependencies.get(&alias) {
+ prop.set_value(json!(value));
+ } else {
+ let index = insert_index(&dependencies, &alias);
+ dependencies.insert(index, &alias, json!(value));
+ }
}
- };
- let ast = Yoke::try_attach_to_cart(config_file_contents, |contents| {
- let ast = jsonc_parser::parse_to_ast(
- contents,
- &Default::default(),
- &Default::default(),
- )
- .with_context(|| {
- format!("Failed to parse config file at {}", specifier)
- })?;
- let obj = match ast.value {
- Some(Value::Object(obj)) => obj,
- _ => bail!(
- "Failed to update config file at {}, expected an object",
- specifier
- ),
- };
- Ok(JsoncObjectView(obj))
- })?;
- Ok(Self {
- config,
- ast,
- path: config_file_path,
- modified: false,
- })
- }
-
- fn add(&mut self, selected: SelectedPackage, dev: bool) {
- match &mut self.config {
- DenoOrPackageJson::Deno(deno) => deno.add(selected),
- DenoOrPackageJson::Npm(npm) => npm.add(selected, dev),
}
+
self.modified = true;
}
fn remove(&mut self, package: &str) -> bool {
- let removed = match &mut self.config {
- DenoOrPackageJson::Deno(deno) => deno.remove(package),
- DenoOrPackageJson::Npm(npm) => npm.remove(package),
+ let removed = match self.kind {
+ ConfigKind::DenoJson => {
+ if let Some(prop) = self
+ .root_object
+ .object_value("imports")
+ .and_then(|i| i.get(package))
+ {
+ remove_prop_and_maybe_parent_prop(prop);
+ true
+ } else {
+ false
+ }
+ }
+ ConfigKind::PackageJson => {
+ let deps = [
+ self
+ .root_object
+ .object_value("dependencies")
+ .and_then(|deps| deps.get(package)),
+ self
+ .root_object
+ .object_value("devDependencies")
+ .and_then(|deps| deps.get(package)),
+ ];
+ let removed = deps.iter().any(|d| d.is_some());
+ for dep in deps.into_iter().flatten() {
+ remove_prop_and_maybe_parent_prop(dep);
+ }
+ removed
+ }
};
if removed {
self.modified = true;
@@ -275,76 +213,28 @@ impl ConfigUpdater {
removed
}
- async fn commit(mut self) -> Result<(), AnyError> {
+ fn commit(&self) -> Result<(), AnyError> {
if !self.modified {
return Ok(());
}
- let import_fields = self.config.take_import_fields();
-
- let fmt_config_options = self.config.fmt_options();
-
- let new_text = update_config_file_content(
- self.obj(),
- self.contents(),
- fmt_config_options,
- import_fields.into_iter().map(|(k, v)| {
- (
- k,
- if v.is_empty() {
- None
- } else {
- Some(generate_imports(v.into_iter().collect()))
- },
- )
- }),
- self.config.file_name(),
- );
-
- tokio::fs::write(&self.path, new_text).await?;
+ let new_text = self.contents();
+ std::fs::write(&self.path, new_text).with_context(|| {
+ format!("failed writing to '{}'", self.path.display())
+ })?;
Ok(())
}
}
-impl DenoOrPackageJson {
- fn specifier(&self) -> Cow<ModuleSpecifier> {
- match self {
- Self::Deno(d, ..) => Cow::Borrowed(&d.config.specifier),
- Self::Npm(n, ..) => Cow::Owned(n.config.specifier()),
- }
- }
-
- fn fmt_options(&self) -> FmtOptionsConfig {
- match self {
- DenoOrPackageJson::Deno(deno, ..) => deno
- .config
- .to_fmt_config()
- .ok()
- .map(|f| f.options)
- .unwrap_or_default(),
- DenoOrPackageJson::Npm(config) => {
- config.fmt_options.clone().unwrap_or_default()
- }
- }
- }
-
- fn take_import_fields(
- &mut self,
- ) -> Vec<(&'static str, IndexMap<String, String>)> {
- match self {
- Self::Deno(d) => d.take_import_fields(),
- Self::Npm(n) => n.take_import_fields(),
- }
- }
-
- fn file_name(&self) -> &'static str {
- match self {
- DenoOrPackageJson::Deno(config) => match config.format {
- DenoConfigFormat::Json => "deno.json",
- DenoConfigFormat::Jsonc => "deno.jsonc",
- },
- DenoOrPackageJson::Npm(..) => "package.json",
- }
+fn remove_prop_and_maybe_parent_prop(prop: CstObjectProp) {
+ let parent = prop.parent().unwrap().as_object().unwrap();
+ prop.remove();
+ if parent.properties().is_empty() {
+ let parent_property = parent.parent().unwrap();
+ let root_object = parent_property.parent().unwrap().as_object().unwrap();
+ // remove the property
+ parent_property.remove();
+ root_object.ensure_multiline();
}
}
@@ -363,7 +253,14 @@ fn package_json_dependency_entry(
selected: SelectedPackage,
) -> (String, String) {
if let Some(npm_package) = selected.package_name.strip_prefix("npm:") {
- (npm_package.into(), selected.version_req)
+ if selected.import_name == npm_package {
+ (npm_package.into(), selected.version_req)
+ } else {
+ (
+ selected.import_name,
+ format!("npm:{}@{}", npm_package, selected.version_req),
+ )
+ }
} else if let Some(jsr_package) = selected.package_name.strip_prefix("jsr:") {
let jsr_package = jsr_package.strip_prefix('@').unwrap_or(jsr_package);
let scope_replaced = jsr_package.replace('/', "__");
@@ -393,21 +290,45 @@ impl std::fmt::Display for AddCommandName {
fn load_configs(
flags: &Arc<Flags>,
-) -> Result<(CliFactory, Option<NpmConfig>, Option<DenoConfig>), AnyError> {
+ has_jsr_specifiers: impl FnOnce() -> bool,
+) -> Result<(CliFactory, Option<ConfigUpdater>, Option<ConfigUpdater>), AnyError>
+{
let cli_factory = CliFactory::from_flags(flags.clone());
let options = cli_factory.cli_options()?;
- let npm_config = NpmConfig::from_options(options)?;
- let (cli_factory, deno_config) = match DenoConfig::from_options(options)? {
+ let start_dir = &options.start_dir;
+ let npm_config = match start_dir.maybe_pkg_json() {
+ Some(pkg_json) => Some(ConfigUpdater::new(
+ ConfigKind::PackageJson,
+ pkg_json.path.clone(),
+ )?),
+ None => None,
+ };
+ let deno_config = match start_dir.maybe_deno_json() {
+ Some(deno_json) => Some(ConfigUpdater::new(
+ ConfigKind::DenoJson,
+ url_to_file_path(&deno_json.specifier)?,
+ )?),
+ None => None,
+ };
+
+ let (cli_factory, deno_config) = match deno_config {
Some(config) => (cli_factory, Some(config)),
- None if npm_config.is_some() => (cli_factory, None),
- None => {
+ None if npm_config.is_some() && !has_jsr_specifiers() => {
+ (cli_factory, None)
+ }
+ _ => {
let factory = create_deno_json(flags, options)?;
let options = factory.cli_options()?.clone();
+ let deno_json = options
+ .start_dir
+ .maybe_deno_json()
+ .expect("Just created deno.json");
(
factory,
- Some(
- DenoConfig::from_options(&options)?.expect("Just created deno.json"),
- ),
+ Some(ConfigUpdater::new(
+ ConfigKind::DenoJson,
+ url_to_file_path(&deno_json.specifier)?,
+ )?),
)
}
};
@@ -415,18 +336,26 @@ fn load_configs(
Ok((cli_factory, npm_config, deno_config))
}
+fn path_distance(a: &Path, b: &Path) -> usize {
+ let diff = pathdiff::diff_paths(a, b);
+ let Some(diff) = diff else {
+ return usize::MAX;
+ };
+ diff.components().count()
+}
+
pub async fn add(
flags: Arc<Flags>,
add_flags: AddFlags,
cmd_name: AddCommandName,
) -> Result<(), AnyError> {
- let (cli_factory, npm_config, deno_config) = load_configs(&flags)?;
- let mut npm_config = ConfigUpdater::maybe_new(npm_config).await?;
- let mut deno_config = ConfigUpdater::maybe_new(deno_config).await?;
+ let (cli_factory, mut npm_config, mut deno_config) =
+ load_configs(&flags, || {
+ add_flags.packages.iter().any(|s| s.starts_with("jsr:"))
+ })?;
if let Some(deno) = &deno_config {
- let specifier = deno.config.specifier();
- if deno.obj().get_string("importMap").is_some() {
+ if deno.obj().get("importMap").is_some() {
bail!(
concat!(
"`deno {}` is not supported when configuration file contains an \"importMap\" field. ",
@@ -434,11 +363,26 @@ pub async fn add(
" at {}",
),
cmd_name,
- specifier
+ deno.display_path(),
);
}
}
+ let start_dir = cli_factory.cli_options()?.start_dir.dir_path();
+
+ // only prefer to add npm deps to `package.json` if there isn't a closer deno.json.
+ // example: if deno.json is in the CWD and package.json is in the parent, we should add
+ // npm deps to deno.json, since it's closer
+ let prefer_npm_config = match (npm_config.as_ref(), deno_config.as_ref()) {
+ (Some(npm), Some(deno)) => {
+ let npm_distance = path_distance(&npm.path, &start_dir);
+ let deno_distance = path_distance(&deno.path, &start_dir);
+ npm_distance <= deno_distance
+ }
+ (Some(_), None) => true,
+ (None, _) => false,
+ };
+
let http_client = cli_factory.http_client_provider();
let deps_http_cache = cli_factory.global_http_cache()?;
let mut deps_file_fetcher = FileFetcher::new(
@@ -449,16 +393,20 @@ pub async fn add(
Default::default(),
None,
);
+
+ let npmrc = cli_factory.cli_options().unwrap().npmrc();
+
deps_file_fetcher.set_download_log_level(log::Level::Trace);
let deps_file_fetcher = Arc::new(deps_file_fetcher);
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
- let npm_resolver = Arc::new(NpmFetchResolver::new(deps_file_fetcher));
+ let npm_resolver =
+ Arc::new(NpmFetchResolver::new(deps_file_fetcher, npmrc.clone()));
let mut selected_packages = Vec::with_capacity(add_flags.packages.len());
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
for entry_text in add_flags.packages.iter() {
- let req = AddPackageReq::parse(entry_text).with_context(|| {
+ let req = AddRmPackageReq::parse(entry_text).with_context(|| {
format!("Failed to parse package required: {}", entry_text)
})?;
@@ -509,15 +457,32 @@ pub async fn add(
match package_and_version {
PackageAndVersion::NotFound {
package: package_name,
- found_npm_package,
+ help,
package_req,
- } => {
- if found_npm_package {
- bail!("{} was not found, but a matching npm package exists. Did you mean `{}`?", crate::colors::red(package_name), crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}")));
- } else {
- bail!("{} was not found.", crate::colors::red(package_name));
+ } => match help {
+ Some(NotFoundHelp::NpmPackage) => {
+ bail!(
+ "{} was not found, but a matching npm package exists. Did you mean `{}`?",
+ crate::colors::red(package_name),
+ crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}"))
+ );
}
- }
+ Some(NotFoundHelp::JsrPackage) => {
+ bail!(
+ "{} was not found, but a matching jsr package exists. Did you mean `{}`?",
+ crate::colors::red(package_name),
+ crate::colors::yellow(format!("deno {cmd_name} jsr:{package_req}"))
+ )
+ }
+ Some(NotFoundHelp::PreReleaseVersion(version)) => {
+ bail!(
+ "{} has only pre-release versions available. Try specifying a version: `{}`",
+ crate::colors::red(&package_name),
+ crate::colors::yellow(format!("deno {cmd_name} {package_name}@^{version}"))
+ )
+ }
+ None => bail!("{} was not found.", crate::colors::red(package_name)),
+ },
PackageAndVersion::Selected(selected) => {
selected_packages.push(selected);
}
@@ -533,7 +498,7 @@ pub async fn add(
selected_package.selected_version
);
- if selected_package.package_name.starts_with("npm:") {
+ if selected_package.package_name.starts_with("npm:") && prefer_npm_config {
if let Some(npm) = &mut npm_config {
npm.add(selected_package, dev);
} else {
@@ -546,18 +511,11 @@ pub async fn add(
}
}
- let mut commit_futures = vec![];
if let Some(npm) = npm_config {
- commit_futures.push(npm.commit());
+ npm.commit()?;
}
if let Some(deno) = deno_config {
- commit_futures.push(deno.commit());
- }
- let commit_futures =
- deno_core::futures::future::join_all(commit_futures).await;
-
- for result in commit_futures {
- result.context("Failed to update configuration file")?;
+ deno.commit()?;
}
npm_install_after_modification(flags, Some(jsr_resolver)).await?;
@@ -572,87 +530,161 @@ struct SelectedPackage {
selected_version: String,
}
+enum NotFoundHelp {
+ NpmPackage,
+ JsrPackage,
+ PreReleaseVersion(Version),
+}
+
enum PackageAndVersion {
NotFound {
package: String,
- found_npm_package: bool,
package_req: PackageReq,
+ help: Option<NotFoundHelp>,
},
Selected(SelectedPackage),
}
+fn best_version<'a>(
+ versions: impl Iterator<Item = &'a Version>,
+) -> Option<&'a Version> {
+ let mut maybe_best_version: Option<&Version> = None;
+ for version in versions {
+ let is_best_version = maybe_best_version
+ .as_ref()
+ .map(|best_version| (*best_version).cmp(version).is_lt())
+ .unwrap_or(true);
+ if is_best_version {
+ maybe_best_version = Some(version);
+ }
+ }
+ maybe_best_version
+}
+
+trait PackageInfoProvider {
+ const SPECIFIER_PREFIX: &str;
+ /// The help to return if a package is found by this provider
+ const HELP: NotFoundHelp;
+ async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv>;
+ async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version>;
+}
+
+impl PackageInfoProvider for Arc<JsrFetchResolver> {
+ const HELP: NotFoundHelp = NotFoundHelp::JsrPackage;
+ const SPECIFIER_PREFIX: &str = "jsr";
+ async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
+ (**self).req_to_nv(req).await
+ }
+
+ async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> {
+ let info = self.package_info(&req.name).await?;
+ best_version(
+ info
+ .versions
+ .iter()
+ .filter(|(_, version_info)| !version_info.yanked)
+ .map(|(version, _)| version),
+ )
+ .cloned()
+ }
+}
+
+impl PackageInfoProvider for Arc<NpmFetchResolver> {
+ const HELP: NotFoundHelp = NotFoundHelp::NpmPackage;
+ const SPECIFIER_PREFIX: &str = "npm";
+ async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
+ (**self).req_to_nv(req).await
+ }
+
+ async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> {
+ let info = self.package_info(&req.name).await?;
+ best_version(info.versions.keys()).cloned()
+ }
+}
+
async fn find_package_and_select_version_for_req(
jsr_resolver: Arc<JsrFetchResolver>,
npm_resolver: Arc<NpmFetchResolver>,
- add_package_req: AddPackageReq,
+ add_package_req: AddRmPackageReq,
) -> Result<PackageAndVersion, AnyError> {
- match add_package_req.value {
- AddPackageReqValue::Jsr(req) => {
- let jsr_prefixed_name = format!("jsr:{}", &req.name);
- let Some(nv) = jsr_resolver.req_to_nv(&req).await else {
- if npm_resolver.req_to_nv(&req).await.is_some() {
+ async fn select<T: PackageInfoProvider, S: PackageInfoProvider>(
+ main_resolver: T,
+ fallback_resolver: S,
+ add_package_req: AddRmPackageReq,
+ ) -> Result<PackageAndVersion, AnyError> {
+ let req = match &add_package_req.value {
+ AddRmPackageReqValue::Jsr(req) => req,
+ AddRmPackageReqValue::Npm(req) => req,
+ };
+ let prefixed_name = format!("{}:{}", T::SPECIFIER_PREFIX, req.name);
+ let help_if_found_in_fallback = S::HELP;
+ let Some(nv) = main_resolver.req_to_nv(req).await else {
+ if fallback_resolver.req_to_nv(req).await.is_some() {
+ // it's in the other registry
+ return Ok(PackageAndVersion::NotFound {
+ package: prefixed_name,
+ help: Some(help_if_found_in_fallback),
+ package_req: req.clone(),
+ });
+ }
+ if req.version_req.version_text() == "*" {
+ if let Some(pre_release_version) =
+ main_resolver.latest_version(req).await
+ {
return Ok(PackageAndVersion::NotFound {
- package: jsr_prefixed_name,
- found_npm_package: true,
- package_req: req,
+ package: prefixed_name,
+ package_req: req.clone(),
+ help: Some(NotFoundHelp::PreReleaseVersion(
+ pre_release_version.clone(),
+ )),
});
}
+ }
- return Ok(PackageAndVersion::NotFound {
- package: jsr_prefixed_name,
- found_npm_package: false,
- package_req: req,
- });
- };
- let range_symbol = if req.version_req.version_text().starts_with('~') {
- '~'
- } else {
- '^'
- };
- Ok(PackageAndVersion::Selected(SelectedPackage {
- import_name: add_package_req.alias,
- package_name: jsr_prefixed_name,
- version_req: format!("{}{}", range_symbol, &nv.version),
- selected_version: nv.version.to_string(),
- }))
+ return Ok(PackageAndVersion::NotFound {
+ package: prefixed_name,
+ help: None,
+ package_req: req.clone(),
+ });
+ };
+ let range_symbol = if req.version_req.version_text().starts_with('~') {
+ "~"
+ } else if req.version_req.version_text() == nv.version.to_string() {
+ ""
+ } else {
+ "^"
+ };
+ Ok(PackageAndVersion::Selected(SelectedPackage {
+ import_name: add_package_req.alias,
+ package_name: prefixed_name,
+ version_req: format!("{}{}", range_symbol, &nv.version),
+ selected_version: nv.version.to_string(),
+ }))
+ }
+
+ match &add_package_req.value {
+ AddRmPackageReqValue::Jsr(_) => {
+ select(jsr_resolver, npm_resolver, add_package_req).await
}
- AddPackageReqValue::Npm(req) => {
- let npm_prefixed_name = format!("npm:{}", &req.name);
- let Some(nv) = npm_resolver.req_to_nv(&req).await else {
- return Ok(PackageAndVersion::NotFound {
- package: npm_prefixed_name,
- found_npm_package: false,
- package_req: req,
- });
- };
- let range_symbol = if req.version_req.version_text().starts_with('~') {
- '~'
- } else {
- '^'
- };
- Ok(PackageAndVersion::Selected(SelectedPackage {
- import_name: add_package_req.alias,
- package_name: npm_prefixed_name,
- version_req: format!("{}{}", range_symbol, &nv.version),
- selected_version: nv.version.to_string(),
- }))
+ AddRmPackageReqValue::Npm(_) => {
+ select(npm_resolver, jsr_resolver, add_package_req).await
}
}
}
#[derive(Debug, PartialEq, Eq)]
-enum AddPackageReqValue {
+enum AddRmPackageReqValue {
Jsr(PackageReq),
Npm(PackageReq),
}
#[derive(Debug, PartialEq, Eq)]
-struct AddPackageReq {
+pub struct AddRmPackageReq {
alias: String,
- value: AddPackageReqValue,
+ value: AddRmPackageReqValue,
}
-impl AddPackageReq {
+impl AddRmPackageReq {
pub fn parse(entry_text: &str) -> Result<Result<Self, PackageReq>, AnyError> {
enum Prefix {
Jsr,
@@ -707,9 +739,9 @@ impl AddPackageReq {
let req_ref =
JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?;
let package_req = req_ref.into_inner().req;
- Ok(Ok(AddPackageReq {
+ Ok(Ok(AddRmPackageReq {
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
- value: AddPackageReqValue::Jsr(package_req),
+ value: AddRmPackageReqValue::Jsr(package_req),
}))
}
Prefix::Npm => {
@@ -727,49 +759,48 @@ impl AddPackageReq {
deno_semver::RangeSetOrTag::Tag("latest".into()),
);
}
- Ok(Ok(AddPackageReq {
+ Ok(Ok(AddRmPackageReq {
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
- value: AddPackageReqValue::Npm(package_req),
+ value: AddRmPackageReqValue::Npm(package_req),
}))
}
}
}
}
-fn generate_imports(mut packages_to_version: Vec<(String, String)>) -> String {
- packages_to_version.sort_by(|(k1, _), (k2, _)| k1.cmp(k2));
- let mut contents = vec![];
- let len = packages_to_version.len();
- for (index, (package, version)) in packages_to_version.iter().enumerate() {
- // TODO(bartlomieju): fix it, once we start support specifying version on the cli
- contents.push(format!("\"{}\": \"{}\"", package, version));
- if index != len - 1 {
- contents.push(",".to_string());
- }
- }
- contents.join("\n")
-}
-
pub async fn remove(
flags: Arc<Flags>,
remove_flags: RemoveFlags,
) -> Result<(), AnyError> {
- let (_, npm_config, deno_config) = load_configs(&flags)?;
+ let (_, npm_config, deno_config) = load_configs(&flags, || false)?;
- let mut configs = [
- ConfigUpdater::maybe_new(npm_config).await?,
- ConfigUpdater::maybe_new(deno_config).await?,
- ];
+ let mut configs = [npm_config, deno_config];
let mut removed_packages = vec![];
for package in &remove_flags.packages {
- let mut removed = false;
+ let req = AddRmPackageReq::parse(package).with_context(|| {
+ format!("Failed to parse package required: {}", package)
+ })?;
+ let mut parsed_pkg_name = None;
for config in configs.iter_mut().flatten() {
- removed |= config.remove(package);
+ match &req {
+ Ok(rm_pkg) => {
+ if config.remove(&rm_pkg.alias) && parsed_pkg_name.is_none() {
+ parsed_pkg_name = Some(rm_pkg.alias.clone());
+ }
+ }
+ Err(pkg) => {
+ // An alias or a package name without registry/version
+ // constraints. Try to remove the package anyway.
+ if config.remove(&pkg.name) && parsed_pkg_name.is_none() {
+ parsed_pkg_name = Some(pkg.name.clone());
+ }
+ }
+ }
}
- if removed {
- removed_packages.push(package.clone());
+ if let Some(pkg) = parsed_pkg_name {
+ removed_packages.push(pkg);
}
}
@@ -780,7 +811,7 @@ pub async fn remove(
log::info!("Removed {}", crate::colors::green(package));
}
for config in configs.into_iter().flatten() {
- config.commit().await?;
+ config.commit()?;
}
npm_install_after_modification(flags, None).await?;
@@ -807,88 +838,11 @@ async fn npm_install_after_modification(
// npm install
cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?;
- Ok(())
-}
-
-fn update_config_file_content<
- I: IntoIterator<Item = (&'static str, Option<String>)>,
->(
- obj: &jsonc_parser::ast::Object,
- config_file_contents: &str,
- fmt_options: FmtOptionsConfig,
- entries: I,
- file_name: &str,
-) -> String {
- let mut text_changes = vec![];
- for (key, value) in entries {
- match obj.properties.iter().enumerate().find_map(|(idx, k)| {
- if k.name.as_str() == key {
- Some((idx, k))
- } else {
- None
- }
- }) {
- Some((
- idx,
- ObjectProp {
- value: Value::Object(lit),
- range,
- ..
- },
- )) => {
- if let Some(value) = value {
- text_changes.push(TextChange {
- range: (lit.range.start + 1)..(lit.range.end - 1),
- new_text: value,
- })
- } else {
- text_changes.push(TextChange {
- // remove field entirely, making sure to
- // remove the comma if it's not the last field
- range: range.start..(if idx == obj.properties.len() - 1 {
- range.end
- } else {
- obj.properties[idx + 1].range.start
- }),
- new_text: "".to_string(),
- })
- }
- }
-
- // need to add field
- None => {
- if let Some(value) = value {
- let insert_position = obj.range.end - 1;
- text_changes.push(TextChange {
- range: insert_position..insert_position,
- // NOTE(bartlomieju): adding `\n` here to force the formatter to always
- // produce a config file that is multiline, like so:
- // ```
- // {
- // "imports": {
- // "<package_name>": "<registry>:<package_name>@<semver>"
- // }
- // }
- new_text: format!("\"{key}\": {{\n {value} }}"),
- })
- }
- }
- // we verified the shape of `imports`/`dependencies` above
- Some(_) => unreachable!(),
- }
+ if let Some(lockfile) = cli_factory.cli_options()?.maybe_lockfile() {
+ lockfile.write_if_changed()?;
}
- let new_text =
- deno_ast::apply_text_changes(config_file_contents, text_changes);
-
- crate::tools::fmt::format_json(
- &PathBuf::from(file_name),
- &new_text,
- &fmt_options,
- )
- .ok()
- .map(|formatted_text| formatted_text.unwrap_or_else(|| new_text.clone()))
- .unwrap_or(new_text)
+ Ok(())
}
#[cfg(test)]
@@ -898,48 +852,52 @@ mod test {
#[test]
fn test_parse_add_package_req() {
assert_eq!(
- AddPackageReq::parse("jsr:foo").unwrap().unwrap(),
- AddPackageReq {
+ AddRmPackageReq::parse("jsr:foo").unwrap().unwrap(),
+ AddRmPackageReq {
alias: "foo".to_string(),
- value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
+ value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
- AddPackageReq::parse("alias@jsr:foo").unwrap().unwrap(),
- AddPackageReq {
+ AddRmPackageReq::parse("alias@jsr:foo").unwrap().unwrap(),
+ AddRmPackageReq {
alias: "alias".to_string(),
- value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
+ value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
- AddPackageReq::parse("@alias/pkg@npm:foo").unwrap().unwrap(),
- AddPackageReq {
+ AddRmPackageReq::parse("@alias/pkg@npm:foo")
+ .unwrap()
+ .unwrap(),
+ AddRmPackageReq {
alias: "@alias/pkg".to_string(),
- value: AddPackageReqValue::Npm(
+ value: AddRmPackageReqValue::Npm(
PackageReq::from_str("foo@latest").unwrap()
)
}
);
assert_eq!(
- AddPackageReq::parse("@alias/pkg@jsr:foo").unwrap().unwrap(),
- AddPackageReq {
+ AddRmPackageReq::parse("@alias/pkg@jsr:foo")
+ .unwrap()
+ .unwrap(),
+ AddRmPackageReq {
alias: "@alias/pkg".to_string(),
- value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
+ value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
- AddPackageReq::parse("alias@jsr:foo@^1.5.0")
+ AddRmPackageReq::parse("alias@jsr:foo@^1.5.0")
.unwrap()
.unwrap(),
- AddPackageReq {
+ AddRmPackageReq {
alias: "alias".to_string(),
- value: AddPackageReqValue::Jsr(
+ value: AddRmPackageReqValue::Jsr(
PackageReq::from_str("foo@^1.5.0").unwrap()
)
}
);
assert_eq!(
- AddPackageReq::parse("@scope/pkg@tag")
+ AddRmPackageReq::parse("@scope/pkg@tag")
.unwrap()
.unwrap_err()
.to_string(),
diff --git a/cli/tools/registry/pm/cache_deps.rs b/cli/tools/registry/pm/cache_deps.rs
index c8258e600..d3c8da868 100644
--- a/cli/tools/registry/pm/cache_deps.rs
+++ b/cli/tools/registry/pm/cache_deps.rs
@@ -44,7 +44,11 @@ pub async fn cache_top_level_deps(
let mut seen_reqs = std::collections::HashSet::new();
- for entry in import_map.imports().entries() {
+ for entry in import_map.imports().entries().chain(
+ import_map
+ .scopes()
+ .flat_map(|scope| scope.imports.entries()),
+ ) {
let Some(specifier) = entry.value else {
continue;
};
@@ -75,6 +79,13 @@ pub async fn cache_top_level_deps(
if entry.key.ends_with('/') && specifier.as_str().ends_with('/') {
continue;
}
+ if specifier.scheme() == "file" {
+ if let Ok(path) = specifier.to_file_path() {
+ if !path.is_file() {
+ continue;
+ }
+ }
+ }
roots.push(specifier.clone());
}
}
@@ -82,10 +93,6 @@ pub async fn cache_top_level_deps(
while let Some(info_future) = info_futures.next().await {
if let Some((specifier, info)) = info_future {
- if info.export(".").is_some() {
- roots.push(specifier.clone());
- continue;
- }
let exports = info.exports();
for (k, _) in exports {
if let Ok(spec) = specifier.join(k) {
diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs
index aca125e00..6d1801ce6 100644
--- a/cli/tools/registry/tar.rs
+++ b/cli/tools/registry/tar.rs
@@ -120,7 +120,7 @@ fn resolve_content_maybe_unfurling(
| MediaType::Unknown
| MediaType::Json
| MediaType::Wasm
- | MediaType::TsBuildInfo => {
+ | MediaType::Css => {
// not unfurlable data
return Ok(data);
}
diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs
index 484664dae..8e05c4abb 100644
--- a/cli/tools/repl/session.rs
+++ b/cli/tools/repl/session.rs
@@ -7,7 +7,7 @@ use crate::cdp;
use crate::colors;
use crate::lsp::ReplLanguageServer;
use crate::npm::CliNpmResolver;
-use crate::resolver::CliGraphResolver;
+use crate::resolver::CliResolver;
use crate::tools::test::report_tests;
use crate::tools::test::reporters::PrettyTestReporter;
use crate::tools::test::reporters::TestReporter;
@@ -25,6 +25,7 @@ use deno_ast::swc::visit::noop_visit_type;
use deno_ast::swc::visit::Visit;
use deno_ast::swc::visit::VisitWith;
use deno_ast::ImportsNotUsedAsValues;
+use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnosticsError;
use deno_ast::ParsedSource;
@@ -43,12 +44,12 @@ use deno_core::url::Url;
use deno_core::LocalInspectorSession;
use deno_core::PollEventLoopOptions;
use deno_graph::source::ResolutionMode;
-use deno_graph::source::Resolver;
use deno_graph::Position;
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
use deno_runtime::worker::MainWorker;
use deno_semver::npm::NpmPackageReqReference;
+use node_resolver::NodeModuleKind;
use once_cell::sync::Lazy;
use regex::Match;
use regex::Regex;
@@ -179,7 +180,7 @@ struct ReplJsxState {
pub struct ReplSession {
npm_resolver: Arc<dyn CliNpmResolver>,
- resolver: Arc<CliGraphResolver>,
+ resolver: Arc<CliResolver>,
pub worker: MainWorker,
session: LocalInspectorSession,
pub context_id: u64,
@@ -198,7 +199,7 @@ impl ReplSession {
pub async fn initialize(
cli_options: &CliOptions,
npm_resolver: Arc<dyn CliNpmResolver>,
- resolver: Arc<CliGraphResolver>,
+ resolver: Arc<CliResolver>,
mut worker: MainWorker,
main_module: ModuleSpecifier,
test_event_receiver: TestEventReceiver,
@@ -244,7 +245,7 @@ impl ReplSession {
assert_ne!(context_id, 0);
let referrer =
- deno_core::resolve_path("./$deno$repl.ts", cli_options.initial_cwd())
+ deno_core::resolve_path("./$deno$repl.mts", cli_options.initial_cwd())
.unwrap();
let cwd_url =
@@ -641,6 +642,10 @@ impl ReplSession {
jsx_fragment_factory: self.jsx.frag_factory.clone(),
jsx_import_source: self.jsx.import_source.clone(),
var_decl_imports: true,
+ verbatim_module_syntax: false,
+ },
+ &deno_ast::TranspileModuleOptions {
+ module_kind: Some(ModuleKind::Esm),
},
&deno_ast::EmitOptions {
source_map: deno_ast::SourceMapOption::None,
@@ -651,7 +656,6 @@ impl ReplSession {
},
)?
.into_source()
- .into_string()?
.text;
let value = self
@@ -708,7 +712,12 @@ impl ReplSession {
.flat_map(|i| {
self
.resolver
- .resolve(i, &referrer_range, ResolutionMode::Execution)
+ .resolve(
+ i,
+ &referrer_range,
+ NodeModuleKind::Esm,
+ ResolutionMode::Execution,
+ )
.ok()
.or_else(|| ModuleSpecifier::parse(i).ok())
})
diff --git a/cli/tools/run/hmr.rs b/cli/tools/run/hmr.rs
index 6ccf8e344..373c207d6 100644
--- a/cli/tools/run/hmr.rs
+++ b/cli/tools/run/hmr.rs
@@ -1,9 +1,9 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use crate::cdp;
-use crate::emit::Emitter;
-use crate::util::file_watcher::WatcherCommunicator;
-use crate::util::file_watcher::WatcherRestartMode;
+use std::collections::HashMap;
+use std::path::PathBuf;
+use std::sync::Arc;
+
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
@@ -12,11 +12,13 @@ use deno_core::serde_json::{self};
use deno_core::url::Url;
use deno_core::LocalInspectorSession;
use deno_terminal::colors;
-use std::collections::HashMap;
-use std::path::PathBuf;
-use std::sync::Arc;
use tokio::select;
+use crate::cdp;
+use crate::emit::Emitter;
+use crate::util::file_watcher::WatcherCommunicator;
+use crate::util::file_watcher::WatcherRestartMode;
+
fn explain(status: &cdp::Status) -> &'static str {
match status {
cdp::Status::Ok => "OK",
@@ -139,7 +141,7 @@ impl crate::worker::HmrRunner for HmrRunner {
};
let source_code = self.emitter.load_and_emit_for_hmr(
- &module_url
+ &module_url,
).await?;
let mut tries = 1;
diff --git a/cli/tools/run/mod.rs b/cli/tools/run/mod.rs
index 152e2650b..8fab544ec 100644
--- a/cli/tools/run/mod.rs
+++ b/cli/tools/run/mod.rs
@@ -30,6 +30,16 @@ To grant permissions, set them before the script argument. For example:
}
}
+fn set_npm_user_agent() {
+ static ONCE: std::sync::Once = std::sync::Once::new();
+ ONCE.call_once(|| {
+ std::env::set_var(
+ crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR,
+ crate::npm::get_npm_config_user_agent(),
+ );
+ });
+}
+
pub async fn run_script(
mode: WorkerExecutionMode,
flags: Arc<Flags>,
@@ -58,6 +68,10 @@ pub async fn run_script(
let main_module = cli_options.resolve_main_module()?;
+ if main_module.scheme() == "npm" {
+ set_npm_user_agent();
+ }
+
maybe_npm_install(&factory).await?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
@@ -110,7 +124,8 @@ async fn run_with_watch(
!watch_flags.no_clear_screen,
),
WatcherRestartMode::Automatic,
- move |flags, watcher_communicator, _changed_paths| {
+ move |flags, watcher_communicator, changed_paths| {
+ watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags_for_watcher(
flags,
@@ -119,6 +134,10 @@ async fn run_with_watch(
let cli_options = factory.cli_options()?;
let main_module = cli_options.resolve_main_module()?;
+ if main_module.scheme() == "npm" {
+ set_npm_user_agent();
+ }
+
maybe_npm_install(&factory).await?;
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
diff --git a/cli/tools/serve.rs b/cli/tools/serve.rs
index 4ce1cad6f..d7989140a 100644
--- a/cli/tools/serve.rs
+++ b/cli/tools/serve.rs
@@ -44,12 +44,15 @@ pub async fn serve(
maybe_npm_install(&factory).await?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
-
+ let hmr = serve_flags
+ .watch
+ .map(|watch_flags| watch_flags.hmr)
+ .unwrap_or(false);
do_serve(
worker_factory,
main_module.clone(),
serve_flags.worker_count,
- false,
+ hmr,
)
.await
}
@@ -109,8 +112,6 @@ async fn do_serve(
}
}
Ok(exit_code)
-
- // main.await?
}
async fn run_worker(
@@ -119,7 +120,7 @@ async fn run_worker(
main_module: ModuleSpecifier,
hmr: bool,
) -> Result<i32, AnyError> {
- let mut worker = worker_factory
+ let mut worker: crate::worker::CliMainWorker = worker_factory
.create_main_worker(
deno_runtime::WorkerExecutionMode::Serve {
is_main: false,
@@ -150,7 +151,8 @@ async fn serve_with_watch(
!watch_flags.no_clear_screen,
),
WatcherRestartMode::Automatic,
- move |flags, watcher_communicator, _changed_paths| {
+ move |flags, watcher_communicator, changed_paths| {
+ watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags_for_watcher(
flags,
diff --git a/cli/tools/task.rs b/cli/tools/task.rs
index 502b09d2c..682dbf814 100644
--- a/cli/tools/task.rs
+++ b/cli/tools/task.rs
@@ -1,14 +1,14 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use std::borrow::Cow;
use std::collections::HashMap;
use std::collections::HashSet;
+use std::num::NonZeroUsize;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
-use deno_config::deno_json::Task;
+use deno_config::workspace::TaskDefinition;
use deno_config::workspace::TaskOrScript;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceTasksConfig;
@@ -16,8 +16,15 @@ use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
+use deno_core::futures::future::LocalBoxFuture;
+use deno_core::futures::stream::futures_unordered;
+use deno_core::futures::FutureExt;
+use deno_core::futures::StreamExt;
+use deno_core::url::Url;
use deno_path_util::normalize_path;
+use deno_runtime::deno_node::NodeResolver;
use deno_task_shell::ShellCommand;
+use indexmap::IndexMap;
use crate::args::CliOptions;
use crate::args::Flags;
@@ -48,155 +55,376 @@ pub async fn execute_script(
v == "1"
})
.unwrap_or(false);
- let tasks_config = start_dir.to_tasks_config()?;
- let tasks_config = if force_use_pkg_json {
- tasks_config.with_only_pkg_json()
- } else {
- tasks_config
- };
+ let mut tasks_config = start_dir.to_tasks_config()?;
+ if force_use_pkg_json {
+ tasks_config = tasks_config.with_only_pkg_json()
+ }
- let task_name = match &task_flags.task {
- Some(task) => task,
- None => {
- print_available_tasks(
- &mut std::io::stdout(),
- &cli_options.start_dir,
- &tasks_config,
- )?;
- return Ok(0);
- }
+ let Some(task_name) = &task_flags.task else {
+ print_available_tasks(
+ &mut std::io::stdout(),
+ &cli_options.start_dir,
+ &tasks_config,
+ )?;
+ return Ok(0);
};
let npm_resolver = factory.npm_resolver().await?;
let node_resolver = factory.node_resolver().await?;
let env_vars = task_runner::real_env_vars();
- match tasks_config.task(task_name) {
- Some((dir_url, task_or_script)) => match task_or_script {
- TaskOrScript::Task(_tasks, script) => {
- let cwd = match task_flags.cwd {
- Some(path) => canonicalize_path(&PathBuf::from(path))
- .context("failed canonicalizing --cwd")?,
- None => normalize_path(dir_url.to_file_path().unwrap()),
- };
-
- let custom_commands = task_runner::resolve_custom_commands(
- npm_resolver.as_ref(),
- node_resolver,
- )?;
- run_task(RunTaskOptions {
- task_name,
- script,
- cwd: &cwd,
- env_vars,
- custom_commands,
- npm_resolver: npm_resolver.as_ref(),
- cli_options,
- })
- .await
- }
- TaskOrScript::Script(scripts, _script) => {
- // ensure the npm packages are installed if using a managed resolver
- if let Some(npm_resolver) = npm_resolver.as_managed() {
- npm_resolver.ensure_top_level_package_json_install().await?;
+ let no_of_concurrent_tasks = if let Ok(value) = std::env::var("DENO_JOBS") {
+ value.parse::<NonZeroUsize>().ok()
+ } else {
+ std::thread::available_parallelism().ok()
+ }
+ .unwrap_or_else(|| NonZeroUsize::new(2).unwrap());
+
+ let task_runner = TaskRunner {
+ tasks_config,
+ task_flags: &task_flags,
+ npm_resolver: npm_resolver.as_ref(),
+ node_resolver: node_resolver.as_ref(),
+ env_vars,
+ cli_options,
+ concurrency: no_of_concurrent_tasks.into(),
+ };
+
+ task_runner.run_task(task_name).await
+}
+
+struct RunSingleOptions<'a> {
+ task_name: &'a str,
+ script: &'a str,
+ cwd: &'a Path,
+ custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
+}
+
+struct TaskRunner<'a> {
+ tasks_config: WorkspaceTasksConfig,
+ task_flags: &'a TaskFlags,
+ npm_resolver: &'a dyn CliNpmResolver,
+ node_resolver: &'a NodeResolver,
+ env_vars: HashMap<String, String>,
+ cli_options: &'a CliOptions,
+ concurrency: usize,
+}
+
+impl<'a> TaskRunner<'a> {
+ pub async fn run_task(
+ &self,
+ task_name: &str,
+ ) -> Result<i32, deno_core::anyhow::Error> {
+ match sort_tasks_topo(task_name, &self.tasks_config) {
+ Ok(sorted) => self.run_tasks_in_parallel(sorted).await,
+ Err(err) => match err {
+ TaskError::NotFound(name) => {
+ if self.task_flags.is_run {
+ return Err(anyhow!("Task not found: {}", name));
+ }
+
+ log::error!("Task not found: {}", name);
+ if log::log_enabled!(log::Level::Error) {
+ self.print_available_tasks()?;
+ }
+ Ok(1)
+ }
+ TaskError::TaskDepCycle { path } => {
+ log::error!("Task cycle detected: {}", path.join(" -> "));
+ Ok(1)
}
+ },
+ }
+ }
+
+ pub fn print_available_tasks(&self) -> Result<(), std::io::Error> {
+ print_available_tasks(
+ &mut std::io::stderr(),
+ &self.cli_options.start_dir,
+ &self.tasks_config,
+ )
+ }
- let cwd = match task_flags.cwd {
- Some(path) => canonicalize_path(&PathBuf::from(path))?,
- None => normalize_path(dir_url.to_file_path().unwrap()),
- };
-
- // At this point we already checked if the task name exists in package.json.
- // We can therefore check for "pre" and "post" scripts too, since we're only
- // dealing with package.json here and not deno.json
- let task_names = vec![
- format!("pre{}", task_name),
- task_name.clone(),
- format!("post{}", task_name),
- ];
- let custom_commands = task_runner::resolve_custom_commands(
- npm_resolver.as_ref(),
- node_resolver,
- )?;
- for task_name in &task_names {
- if let Some(script) = scripts.get(task_name) {
- let exit_code = run_task(RunTaskOptions {
- task_name,
- script,
- cwd: &cwd,
- env_vars: env_vars.clone(),
- custom_commands: custom_commands.clone(),
- npm_resolver: npm_resolver.as_ref(),
- cli_options,
- })
- .await?;
- if exit_code > 0 {
- return Ok(exit_code);
+ async fn run_tasks_in_parallel(
+ &self,
+ task_names: Vec<String>,
+ ) -> Result<i32, deno_core::anyhow::Error> {
+ struct PendingTasksContext {
+ completed: HashSet<String>,
+ running: HashSet<String>,
+ task_names: Vec<String>,
+ }
+
+ impl PendingTasksContext {
+ fn has_remaining_tasks(&self) -> bool {
+ self.completed.len() < self.task_names.len()
+ }
+
+ fn mark_complete(&mut self, task_name: String) {
+ self.running.remove(&task_name);
+ self.completed.insert(task_name);
+ }
+
+ fn get_next_task<'a>(
+ &mut self,
+ runner: &'a TaskRunner<'a>,
+ ) -> Option<LocalBoxFuture<'a, Result<(i32, String), AnyError>>> {
+ for name in &self.task_names {
+ if self.completed.contains(name) || self.running.contains(name) {
+ continue;
+ }
+
+ let should_run = if let Ok((_, def)) = runner.get_task(name) {
+ match def {
+ TaskOrScript::Task(_, def) => def
+ .dependencies
+ .iter()
+ .all(|dep| self.completed.contains(dep)),
+ TaskOrScript::Script(_, _) => true,
}
+ } else {
+ false
+ };
+
+ if !should_run {
+ continue;
}
+
+ self.running.insert(name.clone());
+ let name = name.clone();
+ return Some(
+ async move {
+ runner
+ .run_task_no_dependencies(&name)
+ .await
+ .map(|exit_code| (exit_code, name))
+ }
+ .boxed_local(),
+ );
}
+ None
+ }
+ }
- Ok(0)
+ let mut context = PendingTasksContext {
+ completed: HashSet::with_capacity(task_names.len()),
+ running: HashSet::with_capacity(self.concurrency),
+ task_names,
+ };
+
+ let mut queue = futures_unordered::FuturesUnordered::new();
+
+ while context.has_remaining_tasks() {
+ while queue.len() < self.concurrency {
+ if let Some(task) = context.get_next_task(self) {
+ queue.push(task);
+ } else {
+ break;
+ }
}
- },
- None => {
- if task_flags.is_run {
- return Err(anyhow!("Task not found: {}", task_name));
+
+ // If queue is empty at this point, then there are no more tasks in the queue.
+ let Some(result) = queue.next().await else {
+ debug_assert_eq!(context.task_names.len(), 0);
+ break;
+ };
+
+ let (exit_code, name) = result?;
+ if exit_code > 0 {
+ return Ok(exit_code);
}
- log::error!("Task not found: {}", task_name);
- if log::log_enabled!(log::Level::Error) {
- print_available_tasks(
- &mut std::io::stderr(),
- &cli_options.start_dir,
- &tasks_config,
- )?;
+
+ context.mark_complete(name);
+ }
+
+ Ok(0)
+ }
+
+ fn get_task(
+ &self,
+ task_name: &str,
+ ) -> Result<(&Url, TaskOrScript), TaskError> {
+ let Some(result) = self.tasks_config.task(task_name) else {
+ return Err(TaskError::NotFound(task_name.to_string()));
+ };
+
+ Ok(result)
+ }
+
+ async fn run_task_no_dependencies(
+ &self,
+ task_name: &String,
+ ) -> Result<i32, deno_core::anyhow::Error> {
+ let (dir_url, task_or_script) = self.get_task(task_name.as_str()).unwrap();
+
+ match task_or_script {
+ TaskOrScript::Task(_tasks, definition) => {
+ self.run_deno_task(dir_url, task_name, definition).await
+ }
+ TaskOrScript::Script(scripts, _script) => {
+ self.run_npm_script(dir_url, task_name, scripts).await
}
- Ok(1)
}
}
-}
-struct RunTaskOptions<'a> {
- task_name: &'a str,
- script: &'a str,
- cwd: &'a Path,
- env_vars: HashMap<String, String>,
- custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
- npm_resolver: &'a dyn CliNpmResolver,
- cli_options: &'a CliOptions,
-}
+ async fn run_deno_task(
+ &self,
+ dir_url: &Url,
+ task_name: &String,
+ definition: &TaskDefinition,
+ ) -> Result<i32, deno_core::anyhow::Error> {
+ let cwd = match &self.task_flags.cwd {
+ Some(path) => canonicalize_path(&PathBuf::from(path))
+ .context("failed canonicalizing --cwd")?,
+ None => normalize_path(dir_url.to_file_path().unwrap()),
+ };
-async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
- let RunTaskOptions {
- task_name,
- script,
- cwd,
- env_vars,
- custom_commands,
- npm_resolver,
- cli_options,
- } = opts;
+ let custom_commands = task_runner::resolve_custom_commands(
+ self.npm_resolver,
+ self.node_resolver,
+ )?;
+ self
+ .run_single(RunSingleOptions {
+ task_name,
+ script: &definition.command,
+ cwd: &cwd,
+ custom_commands,
+ })
+ .await
+ }
- output_task(
- opts.task_name,
- &task_runner::get_script_with_args(script, cli_options.argv()),
- );
+ async fn run_npm_script(
+ &self,
+ dir_url: &Url,
+ task_name: &String,
+ scripts: &IndexMap<String, String>,
+ ) -> Result<i32, deno_core::anyhow::Error> {
+ // ensure the npm packages are installed if using a managed resolver
+ if let Some(npm_resolver) = self.npm_resolver.as_managed() {
+ npm_resolver.ensure_top_level_package_json_install().await?;
+ }
+
+ let cwd = match &self.task_flags.cwd {
+ Some(path) => canonicalize_path(&PathBuf::from(path))?,
+ None => normalize_path(dir_url.to_file_path().unwrap()),
+ };
- Ok(
- task_runner::run_task(task_runner::RunTaskOptions {
+ // At this point we already checked if the task name exists in package.json.
+ // We can therefore check for "pre" and "post" scripts too, since we're only
+ // dealing with package.json here and not deno.json
+ let task_names = vec![
+ format!("pre{}", task_name),
+ task_name.clone(),
+ format!("post{}", task_name),
+ ];
+ let custom_commands = task_runner::resolve_custom_commands(
+ self.npm_resolver,
+ self.node_resolver,
+ )?;
+ for task_name in &task_names {
+ if let Some(script) = scripts.get(task_name) {
+ let exit_code = self
+ .run_single(RunSingleOptions {
+ task_name,
+ script,
+ cwd: &cwd,
+ custom_commands: custom_commands.clone(),
+ })
+ .await?;
+ if exit_code > 0 {
+ return Ok(exit_code);
+ }
+ }
+ }
+
+ Ok(0)
+ }
+
+ async fn run_single(
+ &self,
+ opts: RunSingleOptions<'_>,
+ ) -> Result<i32, AnyError> {
+ let RunSingleOptions {
task_name,
script,
cwd,
- env_vars,
custom_commands,
- init_cwd: opts.cli_options.initial_cwd(),
- argv: cli_options.argv(),
- root_node_modules_dir: npm_resolver.root_node_modules_path(),
- stdio: None,
- })
- .await?
- .exit_code,
- )
+ } = opts;
+
+ output_task(
+ opts.task_name,
+ &task_runner::get_script_with_args(script, self.cli_options.argv()),
+ );
+
+ Ok(
+ task_runner::run_task(task_runner::RunTaskOptions {
+ task_name,
+ script,
+ cwd,
+ env_vars: self.env_vars.clone(),
+ custom_commands,
+ init_cwd: self.cli_options.initial_cwd(),
+ argv: self.cli_options.argv(),
+ root_node_modules_dir: self.npm_resolver.root_node_modules_path(),
+ stdio: None,
+ })
+ .await?
+ .exit_code,
+ )
+ }
+}
+
+#[derive(Debug)]
+enum TaskError {
+ NotFound(String),
+ TaskDepCycle { path: Vec<String> },
+}
+
+fn sort_tasks_topo(
+ name: &str,
+ task_config: &WorkspaceTasksConfig,
+) -> Result<Vec<String>, TaskError> {
+ fn sort_visit<'a>(
+ name: &'a str,
+ sorted: &mut Vec<String>,
+ mut path: Vec<&'a str>,
+ tasks_config: &'a WorkspaceTasksConfig,
+ ) -> Result<(), TaskError> {
+ // Already sorted
+ if sorted.iter().any(|sorted_name| sorted_name == name) {
+ return Ok(());
+ }
+
+ // Graph has a cycle
+ if path.contains(&name) {
+ path.push(name);
+ return Err(TaskError::TaskDepCycle {
+ path: path.iter().map(|s| s.to_string()).collect(),
+ });
+ }
+
+ let Some(def) = tasks_config.task(name) else {
+ return Err(TaskError::NotFound(name.to_string()));
+ };
+
+ if let TaskOrScript::Task(_, actual_def) = def.1 {
+ for dep in &actual_def.dependencies {
+ let mut path = path.clone();
+ path.push(name);
+ sort_visit(dep, sorted, path, tasks_config)?
+ }
+ }
+
+ sorted.push(name.to_string());
+
+ Ok(())
+ }
+
+ let mut sorted: Vec<String> = vec![];
+
+ sort_visit(name, &mut sorted, Vec::new(), task_config)?;
+
+ Ok(sorted)
}
fn output_task(task_name: &str, script: &str) {
@@ -222,80 +450,97 @@ fn print_available_tasks(
" {}",
colors::red("No tasks found in configuration file")
)?;
- } else {
- let mut seen_task_names =
- HashSet::with_capacity(tasks_config.tasks_count());
- for maybe_config in [&tasks_config.member, &tasks_config.root] {
- let Some(config) = maybe_config else {
- continue;
- };
- for (is_root, is_deno, (key, task)) in config
- .deno_json
- .as_ref()
- .map(|config| {
- let is_root = !is_cwd_root_dir
- && config.folder_url
- == *workspace_dir.workspace.root_dir().as_ref();
- config
- .tasks
- .iter()
- .map(move |(k, t)| (is_root, true, (k, Cow::Borrowed(t))))
- })
- .into_iter()
- .flatten()
- .chain(
- config
- .package_json
- .as_ref()
- .map(|config| {
- let is_root = !is_cwd_root_dir
- && config.folder_url
- == *workspace_dir.workspace.root_dir().as_ref();
- config.tasks.iter().map(move |(k, v)| {
- (is_root, false, (k, Cow::Owned(Task::Definition(v.clone()))))
- })
- })
- .into_iter()
- .flatten(),
- )
- {
- if !seen_task_names.insert(key) {
+ return Ok(());
+ }
+
+ struct AvailableTaskDescription {
+ is_root: bool,
+ is_deno: bool,
+ name: String,
+ task: TaskDefinition,
+ }
+ let mut seen_task_names = HashSet::with_capacity(tasks_config.tasks_count());
+ let mut task_descriptions = Vec::with_capacity(tasks_config.tasks_count());
+
+ for maybe_config in [&tasks_config.member, &tasks_config.root] {
+ let Some(config) = maybe_config else {
+ continue;
+ };
+
+ if let Some(config) = config.deno_json.as_ref() {
+ let is_root = !is_cwd_root_dir
+ && config.folder_url == *workspace_dir.workspace.root_dir().as_ref();
+
+ for (name, definition) in &config.tasks {
+ if !seen_task_names.insert(name) {
continue; // already seen
}
- writeln!(
- writer,
- "- {}{}",
- colors::cyan(key),
- if is_root {
- if is_deno {
- format!(" {}", colors::italic_gray("(workspace)"))
- } else {
- format!(" {}", colors::italic_gray("(workspace package.json)"))
- }
- } else if is_deno {
- "".to_string()
- } else {
- format!(" {}", colors::italic_gray("(package.json)"))
- }
- )?;
- let definition = match task.as_ref() {
- Task::Definition(definition) => definition,
- Task::Commented { definition, .. } => definition,
- };
- if let Task::Commented { comments, .. } = task.as_ref() {
- let slash_slash = colors::italic_gray("//");
- for comment in comments {
- writeln!(
- writer,
- " {slash_slash} {}",
- colors::italic_gray(comment)
- )?;
- }
+ task_descriptions.push(AvailableTaskDescription {
+ is_root,
+ is_deno: true,
+ name: name.to_string(),
+ task: definition.clone(),
+ });
+ }
+ }
+
+ if let Some(config) = config.package_json.as_ref() {
+ let is_root = !is_cwd_root_dir
+ && config.folder_url == *workspace_dir.workspace.root_dir().as_ref();
+ for (name, script) in &config.tasks {
+ if !seen_task_names.insert(name) {
+ continue; // already seen
}
- writeln!(writer, " {definition}")?;
+
+ task_descriptions.push(AvailableTaskDescription {
+ is_root,
+ is_deno: false,
+ name: name.to_string(),
+ task: deno_config::deno_json::TaskDefinition {
+ command: script.to_string(),
+ dependencies: vec![],
+ description: None,
+ },
+ });
}
}
}
+ for desc in task_descriptions {
+ writeln!(
+ writer,
+ "- {}{}",
+ colors::cyan(desc.name),
+ if desc.is_root {
+ if desc.is_deno {
+ format!(" {}", colors::italic_gray("(workspace)"))
+ } else {
+ format!(" {}", colors::italic_gray("(workspace package.json)"))
+ }
+ } else if desc.is_deno {
+ "".to_string()
+ } else {
+ format!(" {}", colors::italic_gray("(package.json)"))
+ }
+ )?;
+ if let Some(description) = &desc.task.description {
+ let slash_slash = colors::italic_gray("//");
+ writeln!(
+ writer,
+ " {slash_slash} {}",
+ colors::italic_gray(description)
+ )?;
+ }
+ writeln!(writer, " {}", desc.task.command)?;
+ if !desc.task.dependencies.is_empty() {
+ writeln!(
+ writer,
+ " {} {}",
+ colors::gray("depends on:"),
+ colors::cyan(desc.task.dependencies.join(", "))
+ )?;
+ }
+ }
+
Ok(())
}
diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs
index e81abad0b..6357ebcae 100644
--- a/cli/tools/test/mod.rs
+++ b/cli/tools/test/mod.rs
@@ -631,7 +631,7 @@ async fn configure_main_worker(
"Deno[Deno.internal].core.setLeakTracingEnabled(true);",
)?;
}
- let res = worker.execute_side_module_possibly_with_npm().await;
+ let res = worker.execute_side_module().await;
let mut worker = worker.into_main_worker();
match res {
Ok(()) => Ok(()),
@@ -1357,6 +1357,7 @@ pub async fn report_tests(
if let Err(err) = reporter.flush_report(&elapsed, &tests, &test_steps) {
eprint!("Test reporter failed to flush: {}", err)
}
+ #[allow(clippy::disallowed_methods)]
std::process::exit(130);
}
}
@@ -1642,6 +1643,7 @@ pub async fn run_tests_with_watch(
loop {
signal::ctrl_c().await.unwrap();
if !HAS_TEST_RUN_SIGINT_HANDLER.load(Ordering::Relaxed) {
+ #[allow(clippy::disallowed_methods)]
std::process::exit(130);
}
}
@@ -1659,6 +1661,7 @@ pub async fn run_tests_with_watch(
),
move |flags, watcher_communicator, changed_paths| {
let test_flags = test_flags.clone();
+ watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags_for_watcher(
flags,
diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs
index 7f21e6649..cb85859f7 100644
--- a/cli/tools/upgrade.rs
+++ b/cli/tools/upgrade.rs
@@ -540,7 +540,7 @@ pub async fn upgrade(
let Some(archive_data) = download_package(&client, download_url).await?
else {
log::error!("Download could not be found, aborting");
- std::process::exit(1)
+ deno_runtime::exit(1)
};
log::info!(
@@ -579,6 +579,10 @@ pub async fn upgrade(
let output_exe_path =
full_path_output_flag.as_ref().unwrap_or(&current_exe_path);
+
+ #[cfg(windows)]
+ kill_running_deno_lsp_processes();
+
let output_result = if *output_exe_path == current_exe_path {
replace_exe(&new_exe_path, output_exe_path)
} else {
@@ -913,7 +917,7 @@ async fn download_package(
// text above which will stay alive after the progress bars are complete
let progress = progress_bar.update("");
let maybe_bytes = client
- .download_with_progress(download_url.clone(), None, &progress)
+ .download_with_progress_and_retries(download_url.clone(), None, &progress)
.await
.with_context(|| format!("Failed downloading {download_url}. The version you requested may not have been built for the current architecture."))?;
Ok(maybe_bytes)
@@ -966,6 +970,34 @@ fn check_windows_access_denied_error(
})
}
+#[cfg(windows)]
+fn kill_running_deno_lsp_processes() {
+ // limit this to `deno lsp` invocations to avoid killing important programs someone might be running
+ let is_debug = log::log_enabled!(log::Level::Debug);
+ let get_pipe = || {
+ if is_debug {
+ std::process::Stdio::inherit()
+ } else {
+ std::process::Stdio::null()
+ }
+ };
+ let _ = Command::new("powershell.exe")
+ .args([
+ "-Command",
+ r#"Get-WmiObject Win32_Process | Where-Object {
+ $_.Name -eq 'deno.exe' -and
+ $_.CommandLine -match '^(?:\"[^\"]+\"|\S+)\s+lsp\b'
+} | ForEach-Object {
+ if ($_.Terminate()) {
+ Write-Host 'Terminated:' $_.ProcessId
+ }
+}"#,
+ ])
+ .stdout(get_pipe())
+ .stderr(get_pipe())
+ .output();
+}
+
fn set_exe_permissions(
current_exe_path: &Path,
output_exe_path: &Path,