diff options
-rw-r--r-- | cli/tools/registry/diagnostics.rs | 43 | ||||
-rw-r--r-- | cli/tools/registry/graph.rs | 290 | ||||
-rw-r--r-- | cli/tools/registry/mod.rs | 822 | ||||
-rw-r--r-- | tests/specs/publish/banned_triple_slash_directives/__test__.jsonc | 5 | ||||
-rw-r--r-- | tests/specs/publish/banned_triple_slash_directives/deno.json | 5 | ||||
-rw-r--r-- | tests/specs/publish/banned_triple_slash_directives/mod.ts | 2 | ||||
-rw-r--r-- | tests/specs/publish/banned_triple_slash_directives/publish.out | 26 |
7 files changed, 681 insertions, 512 deletions
diff --git a/cli/tools/registry/diagnostics.rs b/cli/tools/registry/diagnostics.rs index 49f8de045..1c3a3bd58 100644 --- a/cli/tools/registry/diagnostics.rs +++ b/cli/tools/registry/diagnostics.rs @@ -15,6 +15,7 @@ use deno_ast::diagnostics::DiagnosticSourcePos; use deno_ast::diagnostics::DiagnosticSourceRange; use deno_ast::swc::common::util::take::Take; use deno_ast::SourcePos; +use deno_ast::SourceRange; use deno_ast::SourceRanged; use deno_ast::SourceTextInfo; use deno_core::anyhow::anyhow; @@ -115,6 +116,11 @@ pub enum PublishDiagnostic { text_info: SourceTextInfo, referrer: deno_graph::Range, }, + BannedTripleSlashDirectives { + specifier: Url, + text_info: SourceTextInfo, + range: SourceRange, + }, } impl PublishDiagnostic { @@ -162,6 +168,7 @@ impl Diagnostic for PublishDiagnostic { UnsupportedJsxTsx { .. } => DiagnosticLevel::Warning, ExcludedModule { .. } => DiagnosticLevel::Error, MissingConstraint { .. } => DiagnosticLevel::Error, + BannedTripleSlashDirectives { .. } => DiagnosticLevel::Error, } } @@ -177,6 +184,9 @@ impl Diagnostic for PublishDiagnostic { UnsupportedJsxTsx { .. } => Cow::Borrowed("unsupported-jsx-tsx"), ExcludedModule { .. } => Cow::Borrowed("excluded-module"), MissingConstraint { .. } => Cow::Borrowed("missing-constraint"), + BannedTripleSlashDirectives { .. } => { + Cow::Borrowed("banned-triple-slash-directives") + } } } @@ -196,6 +206,7 @@ impl Diagnostic for PublishDiagnostic { UnsupportedJsxTsx { .. } => Cow::Borrowed("JSX and TSX files are currently not supported"), ExcludedModule { .. } => Cow::Borrowed("module in package's module graph was excluded from publishing"), MissingConstraint { specifier, .. } => Cow::Owned(format!("specifier '{}' is missing a version constraint", specifier)), + BannedTripleSlashDirectives { .. } => Cow::Borrowed("triple slash directives that modify globals are not allowed"), } } @@ -253,6 +264,15 @@ impl Diagnostic for PublishDiagnostic { text_info, .. } => from_referrer_range(referrer, text_info), + BannedTripleSlashDirectives { + specifier, + range, + text_info, + } => DiagnosticLocation::ModulePosition { + specifier: Cow::Borrowed(specifier), + source_pos: DiagnosticSourcePos::SourcePos(range.start), + text_info: Cow::Borrowed(text_info), + }, } } @@ -319,6 +339,19 @@ impl Diagnostic for PublishDiagnostic { text_info, .. } => from_range(text_info, referrer), + BannedTripleSlashDirectives { + range, text_info, .. + } => Some(DiagnosticSnippet { + source: Cow::Borrowed(text_info), + highlight: DiagnosticSnippetHighlight { + style: DiagnosticSnippetHighlightStyle::Error, + range: DiagnosticSourceRange { + start: DiagnosticSourcePos::SourcePos(range.start), + end: DiagnosticSourcePos::SourcePos(range.end), + }, + description: Some("the triple slash directive".into()), + }, + }), } } @@ -348,6 +381,9 @@ impl Diagnostic for PublishDiagnostic { "specify a version constraint for the specifier in the import map" })) }, + BannedTripleSlashDirectives { .. } => Some( + Cow::Borrowed("remove the triple slash directive"), + ), } } @@ -420,6 +456,10 @@ impl Diagnostic for PublishDiagnostic { ), Cow::Borrowed("major version if one is published in the future and potentially break"), ]), + BannedTripleSlashDirectives { .. } => Cow::Borrowed(&[ + Cow::Borrowed("instead instruct the user of your package to specify these directives"), + Cow::Borrowed("or set their 'lib' compiler option appropriately"), + ]), } } @@ -449,6 +489,9 @@ impl Diagnostic for PublishDiagnostic { MissingConstraint { .. } => { Some(Cow::Borrowed("https://jsr.io/go/missing-constraint")) } + BannedTripleSlashDirectives { .. } => Some(Cow::Borrowed( + "https://jsr.io/go/banned-triple-slash-directives", + )), } } } diff --git a/cli/tools/registry/graph.rs b/cli/tools/registry/graph.rs index 7e3239ced..d1356df9e 100644 --- a/cli/tools/registry/graph.rs +++ b/cli/tools/registry/graph.rs @@ -3,7 +3,11 @@ use std::collections::HashSet; use std::sync::Arc; +use deno_ast::swc::common::comments::CommentKind; +use deno_ast::ParsedSource; +use deno_ast::SourceRangedForSpanned; use deno_ast::SourceTextInfo; +use deno_core::error::AnyError; use deno_graph::ModuleEntryRef; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; @@ -12,137 +16,189 @@ use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use lsp_types::Url; +use crate::cache::ParsedSourceCache; + use super::diagnostics::PublishDiagnostic; use super::diagnostics::PublishDiagnosticsCollector; -pub fn collect_invalid_external_imports( - graph: &ModuleGraph, - diagnostics_collector: &PublishDiagnosticsCollector, -) { - let mut visited = HashSet::new(); - let mut skip_specifiers: HashSet<Url> = HashSet::new(); - - let mut collect_if_invalid = - |skip_specifiers: &mut HashSet<Url>, - source_text: &Arc<str>, - specifier_text: &str, - resolution: &ResolutionResolved| { - if visited.insert(resolution.specifier.clone()) { - match resolution.specifier.scheme() { - "file" | "data" | "node" => {} - "jsr" => { - skip_specifiers.insert(resolution.specifier.clone()); - - // check for a missing version constraint - if let Ok(jsr_req_ref) = - JsrPackageReqReference::from_specifier(&resolution.specifier) - { - if jsr_req_ref.req().version_req.version_text() == "*" { - let maybe_version = graph - .packages - .mappings() - .find(|(req, _)| *req == jsr_req_ref.req()) - .map(|(_, nv)| nv.version.clone()); - diagnostics_collector.push( - PublishDiagnostic::MissingConstraint { - specifier: resolution.specifier.clone(), - specifier_text: specifier_text.to_string(), - resolved_version: maybe_version, - text_info: SourceTextInfo::new(source_text.clone()), - referrer: resolution.range.clone(), - }, - ); +pub struct GraphDiagnosticsCollector { + parsed_source_cache: Arc<ParsedSourceCache>, +} + +impl GraphDiagnosticsCollector { + pub fn new(parsed_source_cache: Arc<ParsedSourceCache>) -> Self { + Self { + parsed_source_cache, + } + } + + pub fn collect_diagnostics_for_graph( + &self, + graph: &ModuleGraph, + diagnostics_collector: &PublishDiagnosticsCollector, + ) -> Result<(), AnyError> { + let mut visited = HashSet::new(); + let mut skip_specifiers: HashSet<Url> = HashSet::new(); + + let mut collect_if_invalid = + |skip_specifiers: &mut HashSet<Url>, + source_text: &Arc<str>, + specifier_text: &str, + resolution: &ResolutionResolved| { + if visited.insert(resolution.specifier.clone()) { + match resolution.specifier.scheme() { + "file" | "data" | "node" => {} + "jsr" => { + skip_specifiers.insert(resolution.specifier.clone()); + + // check for a missing version constraint + if let Ok(jsr_req_ref) = + JsrPackageReqReference::from_specifier(&resolution.specifier) + { + if jsr_req_ref.req().version_req.version_text() == "*" { + let maybe_version = graph + .packages + .mappings() + .find(|(req, _)| *req == jsr_req_ref.req()) + .map(|(_, nv)| nv.version.clone()); + diagnostics_collector.push( + PublishDiagnostic::MissingConstraint { + specifier: resolution.specifier.clone(), + specifier_text: specifier_text.to_string(), + resolved_version: maybe_version, + text_info: SourceTextInfo::new(source_text.clone()), + referrer: resolution.range.clone(), + }, + ); + } } } - } - "npm" => { - skip_specifiers.insert(resolution.specifier.clone()); - - // check for a missing version constraint - if let Ok(jsr_req_ref) = - NpmPackageReqReference::from_specifier(&resolution.specifier) - { - if jsr_req_ref.req().version_req.version_text() == "*" { - let maybe_version = graph - .get(&resolution.specifier) - .and_then(|m| m.npm()) - .map(|n| n.nv_reference.nv().version.clone()); - diagnostics_collector.push( - PublishDiagnostic::MissingConstraint { - specifier: resolution.specifier.clone(), - specifier_text: specifier_text.to_string(), - resolved_version: maybe_version, - text_info: SourceTextInfo::new(source_text.clone()), - referrer: resolution.range.clone(), - }, - ); + "npm" => { + skip_specifiers.insert(resolution.specifier.clone()); + + // check for a missing version constraint + if let Ok(jsr_req_ref) = + NpmPackageReqReference::from_specifier(&resolution.specifier) + { + if jsr_req_ref.req().version_req.version_text() == "*" { + let maybe_version = graph + .get(&resolution.specifier) + .and_then(|m| m.npm()) + .map(|n| n.nv_reference.nv().version.clone()); + diagnostics_collector.push( + PublishDiagnostic::MissingConstraint { + specifier: resolution.specifier.clone(), + specifier_text: specifier_text.to_string(), + resolved_version: maybe_version, + text_info: SourceTextInfo::new(source_text.clone()), + referrer: resolution.range.clone(), + }, + ); + } } } - } - "http" | "https" => { - skip_specifiers.insert(resolution.specifier.clone()); - diagnostics_collector.push( - PublishDiagnostic::InvalidExternalImport { - kind: format!("non-JSR '{}'", resolution.specifier.scheme()), - text_info: SourceTextInfo::new(source_text.clone()), - imported: resolution.specifier.clone(), - referrer: resolution.range.clone(), - }, - ); - } - _ => { - skip_specifiers.insert(resolution.specifier.clone()); - diagnostics_collector.push( - PublishDiagnostic::InvalidExternalImport { - kind: format!("'{}'", resolution.specifier.scheme()), - text_info: SourceTextInfo::new(source_text.clone()), - imported: resolution.specifier.clone(), - referrer: resolution.range.clone(), - }, - ); + "http" | "https" => { + skip_specifiers.insert(resolution.specifier.clone()); + diagnostics_collector.push( + PublishDiagnostic::InvalidExternalImport { + kind: format!("non-JSR '{}'", resolution.specifier.scheme()), + text_info: SourceTextInfo::new(source_text.clone()), + imported: resolution.specifier.clone(), + referrer: resolution.range.clone(), + }, + ); + } + _ => { + skip_specifiers.insert(resolution.specifier.clone()); + diagnostics_collector.push( + PublishDiagnostic::InvalidExternalImport { + kind: format!("'{}'", resolution.specifier.scheme()), + text_info: SourceTextInfo::new(source_text.clone()), + imported: resolution.specifier.clone(), + referrer: resolution.range.clone(), + }, + ); + } } } - } + }; + + let options = WalkOptions { + check_js: true, + follow_dynamic: true, + // search the entire graph and not just the fast check subset + prefer_fast_check_graph: false, + follow_type_only: true, }; + let mut iter = graph.walk(&graph.roots, options); + while let Some((specifier, entry)) = iter.next() { + if skip_specifiers.contains(specifier) { + iter.skip_previous_dependencies(); + continue; + } - let options = WalkOptions { - check_js: true, - follow_dynamic: true, - // this being disabled will cause it to follow everything in the graph - prefer_fast_check_graph: false, - follow_type_only: true, - }; - let mut iter = graph.walk(&graph.roots, options); - while let Some((specifier, entry)) = iter.next() { - if skip_specifiers.contains(specifier) { - iter.skip_previous_dependencies(); - continue; - } + let ModuleEntryRef::Module(module) = entry else { + continue; + }; + let Some(module) = module.js() else { + continue; + }; - let ModuleEntryRef::Module(module) = entry else { - continue; - }; - let Some(module) = module.js() else { - continue; - }; + let parsed_source = self + .parsed_source_cache + .get_parsed_source_from_js_module(module)?; + check_for_banned_triple_slash_directives( + &parsed_source, + diagnostics_collector, + ); - for (specifier_text, dep) in &module.dependencies { - if let Some(resolved) = dep.maybe_code.ok() { - collect_if_invalid( - &mut skip_specifiers, - &module.source, - specifier_text, - resolved, - ); - } - if let Some(resolved) = dep.maybe_type.ok() { - collect_if_invalid( - &mut skip_specifiers, - &module.source, - specifier_text, - resolved, - ); + for (specifier_text, dep) in &module.dependencies { + if let Some(resolved) = dep.maybe_code.ok() { + collect_if_invalid( + &mut skip_specifiers, + &module.source, + specifier_text, + resolved, + ); + } + if let Some(resolved) = dep.maybe_type.ok() { + collect_if_invalid( + &mut skip_specifiers, + &module.source, + specifier_text, + resolved, + ); + } } } + + Ok(()) + } +} + +fn check_for_banned_triple_slash_directives( + parsed_source: &ParsedSource, + diagnostics_collector: &PublishDiagnosticsCollector, +) { + let triple_slash_re = lazy_regex::regex!( + r#"^/\s+<reference\s+(no-default-lib\s*=\s*"true"|lib\s*=\s*("[^"]+"|'[^']+'))\s*/>\s*$"# + ); + + let Some(comments) = parsed_source.get_leading_comments() else { + return; + }; + for comment in comments { + if comment.kind != CommentKind::Line { + continue; + } + if triple_slash_re.is_match(&comment.text) { + diagnostics_collector.push( + PublishDiagnostic::BannedTripleSlashDirectives { + specifier: parsed_source.specifier().clone(), + range: comment.range(), + text_info: parsed_source.text_info().clone(), + }, + ); + } } } diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs index 495f24588..23e8f4313 100644 --- a/cli/tools/registry/mod.rs +++ b/cli/tools/registry/mod.rs @@ -23,6 +23,7 @@ use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::unsync::JoinSet; use deno_runtime::deno_fetch::reqwest; +use deno_runtime::deno_fs::FileSystem; use deno_terminal::colors; use import_map::ImportMap; use lsp_types::Url; @@ -47,7 +48,6 @@ use crate::tools::check::CheckOptions; use crate::tools::lint::no_slow_types; use crate::tools::registry::diagnostics::PublishDiagnostic; use crate::tools::registry::diagnostics::PublishDiagnosticsCollector; -use crate::tools::registry::graph::collect_invalid_external_imports; use crate::util::display::human_size; mod api; @@ -69,13 +69,100 @@ use unfurl::SpecifierUnfurler; use super::check::TypeChecker; +use self::graph::GraphDiagnosticsCollector; use self::paths::CollectedPublishPath; use self::tar::PublishableTarball; -#[allow(clippy::print_stderr)] -fn ring_bell() { - // ASCII code for the bell character. - eprint!("\x07"); +pub async fn publish( + flags: Flags, + publish_flags: PublishFlags, +) -> Result<(), AnyError> { + let cli_factory = CliFactory::from_flags(flags)?; + + let auth_method = + get_auth_method(publish_flags.token, publish_flags.dry_run)?; + + let import_map = cli_factory + .maybe_import_map() + .await? + .clone() + .unwrap_or_else(|| { + Arc::new(ImportMap::new(Url::parse("file:///dev/null").unwrap())) + }); + + let directory_path = cli_factory.cli_options().initial_cwd(); + + let mapped_resolver = Arc::new(MappedSpecifierResolver::new( + Some(import_map), + cli_factory.package_json_deps_provider().clone(), + )); + let cli_options = cli_factory.cli_options(); + let Some(config_file) = cli_options.maybe_config_file() else { + bail!( + "Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.", + directory_path.display() + ); + }; + + let diagnostics_collector = PublishDiagnosticsCollector::default(); + let publish_preparer = PublishPreparer::new( + GraphDiagnosticsCollector::new(cli_factory.parsed_source_cache().clone()), + cli_factory.module_graph_creator().await?.clone(), + cli_factory.parsed_source_cache().clone(), + cli_factory.type_checker().await?.clone(), + cli_factory.fs().clone(), + cli_factory.cli_options().clone(), + mapped_resolver, + ); + + let prepared_data = publish_preparer + .prepare_packages_for_publishing( + publish_flags.allow_slow_types, + &diagnostics_collector, + config_file.clone(), + ) + .await?; + + diagnostics_collector.print_and_error()?; + + if prepared_data.package_by_name.is_empty() { + bail!("No packages to publish"); + } + + if std::env::var("DENO_TESTING_DISABLE_GIT_CHECK") + .ok() + .is_none() + && !publish_flags.allow_dirty + && check_if_git_repo_dirty(cli_options.initial_cwd()).await + { + bail!("Aborting due to uncommitted changes. Check in source code or run with --allow-dirty"); + } + + if publish_flags.dry_run { + for (_, package) in prepared_data.package_by_name { + log::info!( + "{} of {} with files:", + colors::green_bold("Simulating publish"), + colors::gray(package.display_name()), + ); + for file in &package.tarball.files { + log::info!(" {} ({})", file.specifier, human_size(file.size as f64),); + } + } + log::warn!("{} Aborting due to --dry-run", colors::yellow("Warning")); + return Ok(()); + } + + perform_publish( + cli_factory.http_client(), + prepared_data.publish_order_graph, + prepared_data.package_by_name, + auth_method, + !publish_flags.no_provenance, + ) + .await?; + + Ok(()) } struct PreparedPublishPackage { @@ -93,157 +180,333 @@ impl PreparedPublishPackage { } } -static SUGGESTED_ENTRYPOINTS: [&str; 4] = - ["mod.ts", "mod.js", "index.ts", "index.js"]; +struct PreparePackagesData { + publish_order_graph: PublishOrderGraph, + package_by_name: HashMap<String, Rc<PreparedPublishPackage>>, +} -#[allow(clippy::too_many_arguments)] -async fn prepare_publish( - package_name: &str, - deno_json: &ConfigFile, +struct PublishPreparer { + graph_diagnostics_collector: GraphDiagnosticsCollector, + module_graph_creator: Arc<ModuleGraphCreator>, source_cache: Arc<ParsedSourceCache>, - graph: Arc<deno_graph::ModuleGraph>, + type_checker: Arc<TypeChecker>, cli_options: Arc<CliOptions>, mapped_resolver: Arc<MappedSpecifierResolver>, - sloppy_imports_resolver: Option<SloppyImportsResolver>, - diagnostics_collector: &PublishDiagnosticsCollector, -) -> Result<Rc<PreparedPublishPackage>, AnyError> { - let config_path = deno_json.specifier.to_file_path().unwrap(); - let root_dir = config_path.parent().unwrap().to_path_buf(); - let Some(version) = deno_json.json.version.clone() else { - bail!("{} is missing 'version' field", deno_json.specifier); - }; - if deno_json.json.exports.is_none() { - let mut suggested_entrypoint = None; + sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>, +} - for entrypoint in SUGGESTED_ENTRYPOINTS { - if root_dir.join(entrypoint).exists() { - suggested_entrypoint = Some(entrypoint); - break; +impl PublishPreparer { + pub fn new( + graph_diagnostics_collector: GraphDiagnosticsCollector, + module_graph_creator: Arc<ModuleGraphCreator>, + source_cache: Arc<ParsedSourceCache>, + type_checker: Arc<TypeChecker>, + fs: Arc<dyn FileSystem>, + cli_options: Arc<CliOptions>, + mapped_resolver: Arc<MappedSpecifierResolver>, + ) -> Self { + let sloppy_imports_resolver = if cli_options.unstable_sloppy_imports() { + Some(Arc::new(SloppyImportsResolver::new(fs.clone()))) + } else { + None + }; + Self { + graph_diagnostics_collector, + module_graph_creator, + source_cache, + type_checker, + cli_options, + mapped_resolver, + sloppy_imports_resolver, + } + } + + pub async fn prepare_packages_for_publishing( + &self, + allow_slow_types: bool, + diagnostics_collector: &PublishDiagnosticsCollector, + deno_json: ConfigFile, + ) -> Result<PreparePackagesData, AnyError> { + let members = deno_json.to_workspace_members()?; + + if members.len() > 1 { + log::info!("Publishing a workspace..."); + } + + // create the module graph + let graph = self + .build_and_check_graph_for_publish( + allow_slow_types, + diagnostics_collector, + &members, + ) + .await?; + + let mut package_by_name = HashMap::with_capacity(members.len()); + let publish_order_graph = + publish_order::build_publish_order_graph(&graph, &members)?; + + let results = members + .into_iter() + .map(|member| { + let graph = graph.clone(); + async move { + let package = self + .prepare_publish( + &member.package_name, + &member.config_file, + graph, + diagnostics_collector, + ) + .await + .with_context(|| { + format!("Failed preparing '{}'.", member.package_name) + })?; + Ok::<_, AnyError>((member.package_name, package)) + } + .boxed() + }) + .collect::<Vec<_>>(); + let results = deno_core::futures::future::join_all(results).await; + for result in results { + let (package_name, package) = result?; + package_by_name.insert(package_name, package); + } + Ok(PreparePackagesData { + publish_order_graph, + package_by_name, + }) + } + + async fn build_and_check_graph_for_publish( + &self, + allow_slow_types: bool, + diagnostics_collector: &PublishDiagnosticsCollector, + packages: &[WorkspaceMemberConfig], + ) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> { + let build_fast_check_graph = !allow_slow_types; + let graph = self + .module_graph_creator + .create_and_validate_publish_graph(packages, build_fast_check_graph) + .await?; + + // todo(dsherret): move to lint rule + self + .graph_diagnostics_collector + .collect_diagnostics_for_graph(&graph, diagnostics_collector)?; + + if allow_slow_types { + log::info!( + concat!( + "{} Publishing a library with slow types is not recommended. ", + "This may lead to poor type checking performance for users of ", + "your package, may affect the quality of automatic documentation ", + "generation, and your package will not be shipped with a .d.ts ", + "file for Node.js users." + ), + colors::yellow("Warning"), + ); + Ok(Arc::new(graph)) + } else if std::env::var("DENO_INTERNAL_FAST_CHECK_OVERWRITE").as_deref() + == Ok("1") + { + if check_if_git_repo_dirty(self.cli_options.initial_cwd()).await { + bail!("When using DENO_INTERNAL_FAST_CHECK_OVERWRITE, the git repo must be in a clean state."); + } + + for module in graph.modules() { + if module.specifier().scheme() != "file" { + continue; + } + let Some(js) = module.js() else { + continue; + }; + if let Some(module) = js.fast_check_module() { + std::fs::write( + js.specifier.to_file_path().unwrap(), + module.source.as_ref(), + )?; + } + } + + bail!("Exiting due to DENO_INTERNAL_FAST_CHECK_OVERWRITE") + } else { + log::info!("Checking for slow types in the public API..."); + let mut any_pkg_had_diagnostics = false; + for package in packages { + let export_urls = package.config_file.resolve_export_value_urls()?; + let diagnostics = + no_slow_types::collect_no_slow_type_diagnostics(&export_urls, &graph); + if !diagnostics.is_empty() { + any_pkg_had_diagnostics = true; + for diagnostic in diagnostics { + diagnostics_collector + .push(PublishDiagnostic::FastCheck(diagnostic)); + } + } + } + + if any_pkg_had_diagnostics { + Ok(Arc::new(graph)) + } else { + // fast check passed, type check the output as a temporary measure + // until we know that it's reliable and stable + let (graph, check_diagnostics) = self + .type_checker + .check_diagnostics( + graph, + CheckOptions { + build_fast_check_graph: false, // already built + lib: self.cli_options.ts_type_lib_window(), + log_ignored_options: false, + reload: self.cli_options.reload_flag(), + type_check_mode: self.cli_options.type_check_mode(), + }, + ) + .await?; + // ignore unused parameter diagnostics that may occur due to fast check + // not having function body implementations + let check_diagnostics = + check_diagnostics.filter(|d| d.include_when_remote()); + if !check_diagnostics.is_empty() { + bail!( + concat!( + "Failed ensuring public API type output is valid.\n\n", + "{:#}\n\n", + "You may have discovered a bug in Deno. Please open an issue at: ", + "https://github.com/denoland/deno/issues/" + ), + check_diagnostics + ); + } + Ok(graph) } } + } + + #[allow(clippy::too_many_arguments)] + async fn prepare_publish( + &self, + package_name: &str, + deno_json: &ConfigFile, + graph: Arc<deno_graph::ModuleGraph>, + diagnostics_collector: &PublishDiagnosticsCollector, + ) -> Result<Rc<PreparedPublishPackage>, AnyError> { + static SUGGESTED_ENTRYPOINTS: [&str; 4] = + ["mod.ts", "mod.js", "index.ts", "index.js"]; + + let config_path = deno_json.specifier.to_file_path().unwrap(); + let root_dir = config_path.parent().unwrap().to_path_buf(); + let Some(version) = deno_json.json.version.clone() else { + bail!("{} is missing 'version' field", deno_json.specifier); + }; + if deno_json.json.exports.is_none() { + let mut suggested_entrypoint = None; - let exports_content = format!( - r#"{{ + for entrypoint in SUGGESTED_ENTRYPOINTS { + if root_dir.join(entrypoint).exists() { + suggested_entrypoint = Some(entrypoint); + break; + } + } + + let exports_content = format!( + r#"{{ "name": "{}", "version": "{}", "exports": "{}" }}"#, - package_name, - version, - suggested_entrypoint.unwrap_or("<path_to_entrypoint>") - ); + package_name, + version, + suggested_entrypoint.unwrap_or("<path_to_entrypoint>") + ); - bail!( + bail!( "You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}", package_name, deno_json.specifier, exports_content ); - } - let Some(name_no_at) = package_name.strip_prefix('@') else { - bail!("Invalid package name, use '@<scope_name>/<package_name> format"); - }; - let Some((scope, name_no_scope)) = name_no_at.split_once('/') else { - bail!("Invalid package name, use '@<scope_name>/<package_name> format"); - }; - let file_patterns = deno_json - .to_publish_config()? - .map(|c| c.files) - .unwrap_or_else(|| FilePatterns::new_with_base(root_dir.to_path_buf())); - - let tarball = deno_core::unsync::spawn_blocking({ - let diagnostics_collector = diagnostics_collector.clone(); - let config_path = config_path.clone(); - move || { - let bare_node_builtins = cli_options.unstable_bare_node_builtins(); - let unfurler = SpecifierUnfurler::new( - &mapped_resolver, - sloppy_imports_resolver.as_ref(), - bare_node_builtins, - ); - let root_specifier = - ModuleSpecifier::from_directory_path(&root_dir).unwrap(); - let publish_paths = - paths::collect_publish_paths(paths::CollectPublishPathsOptions { - root_dir: &root_dir, - cli_options: &cli_options, - diagnostics_collector: &diagnostics_collector, - file_patterns, - force_include_paths: vec![config_path], - })?; - collect_excluded_module_diagnostics( - &root_specifier, - &graph, - &publish_paths, - &diagnostics_collector, - ); - tar::create_gzipped_tarball( - &publish_paths, - LazyGraphSourceParser::new(&source_cache, &graph), - &diagnostics_collector, - &unfurler, - ) - .context("Failed to create a tarball") } - }) - .await??; - - log::debug!("Tarball size ({}): {}", package_name, tarball.bytes.len()); - - Ok(Rc::new(PreparedPublishPackage { - scope: scope.to_string(), - package: name_no_scope.to_string(), - version: version.to_string(), - tarball, - exports: match &deno_json.json.exports { - Some(Value::Object(exports)) => exports - .into_iter() - .map(|(k, v)| (k.to_string(), v.as_str().unwrap().to_string())) - .collect(), - Some(Value::String(exports)) => { - let mut map = HashMap::new(); - map.insert(".".to_string(), exports.to_string()); - map - } - _ => HashMap::new(), - }, - // the config file is always at the root of a publishing dir, - // so getting the file name is always correct - config: config_path - .file_name() - .unwrap() - .to_string_lossy() - .to_string(), - })) -} - -fn collect_excluded_module_diagnostics( - root: &ModuleSpecifier, - graph: &deno_graph::ModuleGraph, - publish_paths: &[CollectedPublishPath], - diagnostics_collector: &PublishDiagnosticsCollector, -) { - let publish_specifiers = publish_paths - .iter() - .map(|path| &path.specifier) - .collect::<HashSet<_>>(); - let graph_specifiers = graph - .modules() - .filter_map(|m| match m { - deno_graph::Module::Js(_) | deno_graph::Module::Json(_) => { - Some(m.specifier()) + let Some(name_no_at) = package_name.strip_prefix('@') else { + bail!("Invalid package name, use '@<scope_name>/<package_name> format"); + }; + let Some((scope, name_no_scope)) = name_no_at.split_once('/') else { + bail!("Invalid package name, use '@<scope_name>/<package_name> format"); + }; + let file_patterns = deno_json + .to_publish_config()? + .map(|c| c.files) + .unwrap_or_else(|| FilePatterns::new_with_base(root_dir.to_path_buf())); + + let tarball = deno_core::unsync::spawn_blocking({ + let diagnostics_collector = diagnostics_collector.clone(); + let mapped_resolver = self.mapped_resolver.clone(); + let sloppy_imports_resolver = self.sloppy_imports_resolver.clone(); + let cli_options = self.cli_options.clone(); + let source_cache = self.source_cache.clone(); + let config_path = config_path.clone(); + move || { + let bare_node_builtins = cli_options.unstable_bare_node_builtins(); + let unfurler = SpecifierUnfurler::new( + &mapped_resolver, + sloppy_imports_resolver.as_deref(), + bare_node_builtins, + ); + let root_specifier = + ModuleSpecifier::from_directory_path(&root_dir).unwrap(); + let publish_paths = + paths::collect_publish_paths(paths::CollectPublishPathsOptions { + root_dir: &root_dir, + cli_options: &cli_options, + diagnostics_collector: &diagnostics_collector, + file_patterns, + force_include_paths: vec![config_path], + })?; + collect_excluded_module_diagnostics( + &root_specifier, + &graph, + &publish_paths, + &diagnostics_collector, + ); + tar::create_gzipped_tarball( + &publish_paths, + LazyGraphSourceParser::new(&source_cache, &graph), + &diagnostics_collector, + &unfurler, + ) + .context("Failed to create a tarball") } - deno_graph::Module::Npm(_) - | deno_graph::Module::Node(_) - | deno_graph::Module::External(_) => None, }) - .filter(|s| s.as_str().starts_with(root.as_str())); - for specifier in graph_specifiers { - if !publish_specifiers.contains(specifier) { - diagnostics_collector.push(PublishDiagnostic::ExcludedModule { - specifier: specifier.clone(), - }); - } + .await??; + + log::debug!("Tarball size ({}): {}", package_name, tarball.bytes.len()); + + Ok(Rc::new(PreparedPublishPackage { + scope: scope.to_string(), + package: name_no_scope.to_string(), + version: version.to_string(), + tarball, + exports: match &deno_json.json.exports { + Some(Value::Object(exports)) => exports + .into_iter() + .map(|(k, v)| (k.to_string(), v.as_str().unwrap().to_string())) + .collect(), + Some(Value::String(exports)) => { + let mut map = HashMap::new(); + map.insert(".".to_string(), exports.to_string()); + map + } + _ => HashMap::new(), + }, + // the config file is always at the root of a publishing dir, + // so getting the file name is always correct + config: config_path + .file_name() + .unwrap() + .to_string_lossy() + .to_string(), + })) } } @@ -785,271 +1048,34 @@ async fn publish_package( Ok(()) } -struct PreparePackagesData { - publish_order_graph: PublishOrderGraph, - package_by_name: HashMap<String, Rc<PreparedPublishPackage>>, -} - -async fn prepare_packages_for_publishing( - cli_factory: &CliFactory, - allow_slow_types: bool, +fn collect_excluded_module_diagnostics( + root: &ModuleSpecifier, + graph: &deno_graph::ModuleGraph, + publish_paths: &[CollectedPublishPath], diagnostics_collector: &PublishDiagnosticsCollector, - deno_json: ConfigFile, - mapped_resolver: Arc<MappedSpecifierResolver>, -) -> Result<PreparePackagesData, AnyError> { - let members = deno_json.to_workspace_members()?; - let module_graph_creator = cli_factory.module_graph_creator().await?.as_ref(); - let source_cache = cli_factory.parsed_source_cache(); - let type_checker = cli_factory.type_checker().await?; - let fs = cli_factory.fs(); - let cli_options = cli_factory.cli_options(); - - if members.len() > 1 { - log::info!("Publishing a workspace..."); - } - - // create the module graph - let graph = build_and_check_graph_for_publish( - module_graph_creator, - type_checker, - cli_options, - allow_slow_types, - diagnostics_collector, - &members, - ) - .await?; - - let mut package_by_name = HashMap::with_capacity(members.len()); - let publish_order_graph = - publish_order::build_publish_order_graph(&graph, &members)?; - - let results = members - .into_iter() - .map(|member| { - let mapped_resolver = mapped_resolver.clone(); - let sloppy_imports_resolver = if cli_options.unstable_sloppy_imports() { - Some(SloppyImportsResolver::new(fs.clone())) - } else { - None - }; - let graph = graph.clone(); - let cli_options = cli_options.clone(); - async move { - let package = prepare_publish( - &member.package_name, - &member.config_file, - source_cache.clone(), - graph, - cli_options, - mapped_resolver, - sloppy_imports_resolver, - diagnostics_collector, - ) - .await - .with_context(|| { - format!("Failed preparing '{}'.", member.package_name) - })?; - Ok::<_, AnyError>((member.package_name, package)) +) { + let publish_specifiers = publish_paths + .iter() + .map(|path| &path.specifier) + .collect::<HashSet<_>>(); + let graph_specifiers = graph + .modules() + .filter_map(|m| match m { + deno_graph::Module::Js(_) | deno_graph::Module::Json(_) => { + Some(m.specifier()) } - .boxed() + deno_graph::Module::Npm(_) + | deno_graph::Module::Node(_) + | deno_graph::Module::External(_) => None, }) - .collect::<Vec<_>>(); - let results = deno_core::futures::future::join_all(results).await; - for result in results { - let (package_name, package) = result?; - package_by_name.insert(package_name, package); - } - Ok(PreparePackagesData { - publish_order_graph, - package_by_name, - }) -} - -async fn build_and_check_graph_for_publish( - module_graph_creator: &ModuleGraphCreator, - type_checker: &TypeChecker, - cli_options: &CliOptions, - allow_slow_types: bool, - diagnostics_collector: &PublishDiagnosticsCollector, - packages: &[WorkspaceMemberConfig], -) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> { - let build_fast_check_graph = !allow_slow_types; - let graph = module_graph_creator - .create_and_validate_publish_graph(packages, build_fast_check_graph) - .await?; - - // todo(dsherret): move to lint rule - collect_invalid_external_imports(&graph, diagnostics_collector); - - if allow_slow_types { - log::info!( - concat!( - "{} Publishing a library with slow types is not recommended. ", - "This may lead to poor type checking performance for users of ", - "your package, may affect the quality of automatic documentation ", - "generation, and your package will not be shipped with a .d.ts ", - "file for Node.js users." - ), - colors::yellow("Warning"), - ); - Ok(Arc::new(graph)) - } else if std::env::var("DENO_INTERNAL_FAST_CHECK_OVERWRITE").as_deref() - == Ok("1") - { - if check_if_git_repo_dirty(cli_options.initial_cwd()).await { - bail!("When using DENO_INTERNAL_FAST_CHECK_OVERWRITE, the git repo must be in a clean state."); - } - - for module in graph.modules() { - if module.specifier().scheme() != "file" { - continue; - } - let Some(js) = module.js() else { - continue; - }; - if let Some(module) = js.fast_check_module() { - std::fs::write( - js.specifier.to_file_path().unwrap(), - module.source.as_ref(), - )?; - } - } - - bail!("Exiting due to DENO_INTERNAL_FAST_CHECK_OVERWRITE") - } else { - log::info!("Checking for slow types in the public API..."); - let mut any_pkg_had_diagnostics = false; - for package in packages { - let export_urls = package.config_file.resolve_export_value_urls()?; - let diagnostics = - no_slow_types::collect_no_slow_type_diagnostics(&export_urls, &graph); - if !diagnostics.is_empty() { - any_pkg_had_diagnostics = true; - for diagnostic in diagnostics { - diagnostics_collector.push(PublishDiagnostic::FastCheck(diagnostic)); - } - } - } - - if any_pkg_had_diagnostics { - Ok(Arc::new(graph)) - } else { - // fast check passed, type check the output as a temporary measure - // until we know that it's reliable and stable - let (graph, check_diagnostics) = type_checker - .check_diagnostics( - graph, - CheckOptions { - build_fast_check_graph: false, // already built - lib: cli_options.ts_type_lib_window(), - log_ignored_options: false, - reload: cli_options.reload_flag(), - type_check_mode: cli_options.type_check_mode(), - }, - ) - .await?; - // ignore unused parameter diagnostics that may occur due to fast check - // not having function body implementations - let check_diagnostics = - check_diagnostics.filter(|d| d.include_when_remote()); - if !check_diagnostics.is_empty() { - bail!( - concat!( - "Failed ensuring public API type output is valid.\n\n", - "{:#}\n\n", - "You may have discovered a bug in Deno. Please open an issue at: ", - "https://github.com/denoland/deno/issues/" - ), - check_diagnostics - ); - } - Ok(graph) - } - } -} - -pub async fn publish( - flags: Flags, - publish_flags: PublishFlags, -) -> Result<(), AnyError> { - let cli_factory = CliFactory::from_flags(flags)?; - - let auth_method = - get_auth_method(publish_flags.token, publish_flags.dry_run)?; - - let import_map = cli_factory - .maybe_import_map() - .await? - .clone() - .unwrap_or_else(|| { - Arc::new(ImportMap::new(Url::parse("file:///dev/null").unwrap())) - }); - - let directory_path = cli_factory.cli_options().initial_cwd(); - - let mapped_resolver = Arc::new(MappedSpecifierResolver::new( - Some(import_map), - cli_factory.package_json_deps_provider().clone(), - )); - let cli_options = cli_factory.cli_options(); - let Some(config_file) = cli_options.maybe_config_file() else { - bail!( - "Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.", - directory_path.display() - ); - }; - - let diagnostics_collector = PublishDiagnosticsCollector::default(); - - let prepared_data = prepare_packages_for_publishing( - &cli_factory, - publish_flags.allow_slow_types, - &diagnostics_collector, - config_file.clone(), - mapped_resolver, - ) - .await?; - - diagnostics_collector.print_and_error()?; - - if prepared_data.package_by_name.is_empty() { - bail!("No packages to publish"); - } - - if std::env::var("DENO_TESTING_DISABLE_GIT_CHECK") - .ok() - .is_none() - && !publish_flags.allow_dirty - && check_if_git_repo_dirty(cli_options.initial_cwd()).await - { - bail!("Aborting due to uncommitted changes. Check in source code or run with --allow-dirty"); - } - - if publish_flags.dry_run { - for (_, package) in prepared_data.package_by_name { - log::info!( - "{} of {} with files:", - colors::green_bold("Simulating publish"), - colors::gray(package.display_name()), - ); - for file in &package.tarball.files { - log::info!(" {} ({})", file.specifier, human_size(file.size as f64),); - } + .filter(|s| s.as_str().starts_with(root.as_str())); + for specifier in graph_specifiers { + if !publish_specifiers.contains(specifier) { + diagnostics_collector.push(PublishDiagnostic::ExcludedModule { + specifier: specifier.clone(), + }); } - log::warn!("{} Aborting due to --dry-run", colors::yellow("Warning")); - return Ok(()); } - - perform_publish( - cli_factory.http_client(), - prepared_data.publish_order_graph, - prepared_data.package_by_name, - auth_method, - !publish_flags.no_provenance, - ) - .await?; - - Ok(()) } #[derive(Deserialize)] @@ -1147,6 +1173,12 @@ async fn check_if_git_repo_dirty(cwd: &Path) -> bool { !output_str.trim().is_empty() } +#[allow(clippy::print_stderr)] +fn ring_bell() { + // ASCII code for the bell character. + eprint!("\x07"); +} + #[cfg(test)] mod tests { use super::tar::PublishableTarball; diff --git a/tests/specs/publish/banned_triple_slash_directives/__test__.jsonc b/tests/specs/publish/banned_triple_slash_directives/__test__.jsonc new file mode 100644 index 000000000..06a91f5b6 --- /dev/null +++ b/tests/specs/publish/banned_triple_slash_directives/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "publish --dry-run", + "output": "publish.out", + "exitCode": 1 +} diff --git a/tests/specs/publish/banned_triple_slash_directives/deno.json b/tests/specs/publish/banned_triple_slash_directives/deno.json new file mode 100644 index 000000000..fe4300ad6 --- /dev/null +++ b/tests/specs/publish/banned_triple_slash_directives/deno.json @@ -0,0 +1,5 @@ +{ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./mod.ts" +} diff --git a/tests/specs/publish/banned_triple_slash_directives/mod.ts b/tests/specs/publish/banned_triple_slash_directives/mod.ts new file mode 100644 index 000000000..a5bd87ef7 --- /dev/null +++ b/tests/specs/publish/banned_triple_slash_directives/mod.ts @@ -0,0 +1,2 @@ +/// <reference lib="deno.ns" /> +/// <reference no-default-lib="true" /> diff --git a/tests/specs/publish/banned_triple_slash_directives/publish.out b/tests/specs/publish/banned_triple_slash_directives/publish.out new file mode 100644 index 000000000..a67736bc2 --- /dev/null +++ b/tests/specs/publish/banned_triple_slash_directives/publish.out @@ -0,0 +1,26 @@ +Check file:///[WILDLINE]/mod.ts +Checking for slow types in the public API... +Check file:///[WILDLINE]/mod.ts +error[banned-triple-slash-directives]: triple slash directives that modify globals are not allowed + --> [WILDLINE]mod.ts:1:1 + | +1 | /// <reference lib="deno.ns" /> + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the triple slash directive + = hint: remove the triple slash directive + + info: instead instruct the user of your package to specify these directives + info: or set their 'lib' compiler option appropriately + docs: https://jsr.io/go/banned-triple-slash-directives + +error[banned-triple-slash-directives]: triple slash directives that modify globals are not allowed + --> [WILDLINE]:2:1 + | +2 | /// <reference no-default-lib="true" /> + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the triple slash directive + = hint: remove the triple slash directive + + info: instead instruct the user of your package to specify these directives + info: or set their 'lib' compiler option appropriately + docs: https://jsr.io/go/banned-triple-slash-directives + +error: Found 2 problems |