summaryrefslogtreecommitdiff
path: root/cli/tools
diff options
context:
space:
mode:
authorDavid Sherret <dsherret@users.noreply.github.com>2024-03-07 20:16:32 -0500
committerGitHub <noreply@github.com>2024-03-07 20:16:32 -0500
commit2dfc0aca7c6a04d54fe6f9a73be70fc4c591d552 (patch)
tree58fb01c46364e4888097e7135b2f829f38ce990c /cli/tools
parent2ed984ba3aa638c3f088ac1edc5c779c7d9195d1 (diff)
fix(publish): make include and exclude work (#22720)
1. Stops `deno publish` using some custom include/exclude behaviour from other sub commands 2. Takes ancestor directories into account when resolving gitignore 3. Backards compatible change that adds ability to unexclude an exclude by using a negated glob at a more specific level for all sub commands (see https://github.com/denoland/deno_config/pull/44).
Diffstat (limited to 'cli/tools')
-rw-r--r--cli/tools/bench/mod.rs27
-rw-r--r--cli/tools/coverage/mod.rs11
-rw-r--r--cli/tools/doc.rs12
-rw-r--r--cli/tools/fmt.rs2
-rw-r--r--cli/tools/lint/mod.rs2
-rw-r--r--cli/tools/registry/tar.rs179
-rw-r--r--cli/tools/test/mod.rs35
7 files changed, 110 insertions, 158 deletions
diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs
index 43b7103cd..b554f7349 100644
--- a/cli/tools/bench/mod.rs
+++ b/cli/tools/bench/mod.rs
@@ -14,12 +14,12 @@ use crate::tools::test::format_test_error;
use crate::tools::test::TestFilter;
use crate::util::file_watcher;
use crate::util::fs::collect_specifiers;
+use crate::util::fs::WalkEntry;
use crate::util::path::is_script_ext;
+use crate::util::path::matches_pattern_or_exact_path;
use crate::version::get_user_agent;
use crate::worker::CliMainWorkerFactory;
-use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::error::JsError;
@@ -394,25 +394,16 @@ async fn bench_specifiers(
}
/// Checks if the path has a basename and extension Deno supports for benches.
-fn is_supported_bench_path(path: &Path, patterns: &FilePatterns) -> bool {
- if !is_script_ext(path) {
+fn is_supported_bench_path(entry: WalkEntry) -> bool {
+ if !is_script_ext(entry.path) {
false
- } else if has_supported_bench_path_name(path) {
+ } else if has_supported_bench_path_name(entry.path) {
true
- } else {
+ } else if let Some(include) = &entry.patterns.include {
// allow someone to explicitly specify a path
- let matches_exact_path_or_pattern = patterns
- .include
- .as_ref()
- .map(|p| {
- p.inner().iter().any(|p| match p {
- PathOrPattern::Path(p) => p == path,
- PathOrPattern::RemoteUrl(_) => true,
- PathOrPattern::Pattern(p) => p.matches_path(path),
- })
- })
- .unwrap_or(false);
- matches_exact_path_or_pattern
+ matches_pattern_or_exact_path(include, entry.path)
+ } else {
+ false
}
}
diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs
index 5cc705741..66c0923de 100644
--- a/cli/tools/coverage/mod.rs
+++ b/cli/tools/coverage/mod.rs
@@ -388,23 +388,20 @@ fn collect_coverages(
initial_cwd.to_path_buf(),
)])
} else {
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
initial_cwd,
&files.include,
)?
}
}),
- exclude: PathOrPatternSet::from_relative_path_or_patterns(
+ exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
initial_cwd,
&files.ignore,
)
.context("Invalid ignore pattern.")?,
};
- let file_paths = FileCollector::new(|file_path, _| {
- file_path
- .extension()
- .map(|ext| ext == "json")
- .unwrap_or(false)
+ let file_paths = FileCollector::new(|e| {
+ e.path.extension().map(|ext| ext == "json").unwrap_or(false)
})
.ignore_git_folder()
.ignore_node_modules()
diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs
index 0b7b26e31..013a407aa 100644
--- a/cli/tools/doc.rs
+++ b/cli/tools/doc.rs
@@ -96,13 +96,15 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
let module_specifiers = collect_specifiers(
FilePatterns {
base: cli_options.initial_cwd().to_path_buf(),
- include: Some(PathOrPatternSet::from_relative_path_or_patterns(
- cli_options.initial_cwd(),
- source_files,
- )?),
+ include: Some(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
+ cli_options.initial_cwd(),
+ source_files,
+ )?,
+ ),
exclude: Default::default(),
},
- |_, _| true,
+ |_| true,
)?;
let graph = module_graph_creator
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs
index 86fc9700e..0f6afb232 100644
--- a/cli/tools/fmt.rs
+++ b/cli/tools/fmt.rs
@@ -154,7 +154,7 @@ async fn format_files(
}
fn collect_fmt_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
- FileCollector::new(|path, _| is_supported_ext_fmt(path))
+ FileCollector::new(|e| is_supported_ext_fmt(e.path))
.ignore_git_folder()
.ignore_node_modules()
.ignore_vendor_folder()
diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs
index ee7350fb4..1b81fca5a 100644
--- a/cli/tools/lint/mod.rs
+++ b/cli/tools/lint/mod.rs
@@ -263,7 +263,7 @@ async fn lint_files(
}
fn collect_lint_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
- FileCollector::new(|path, _| is_script_ext(path))
+ FileCollector::new(|e| is_script_ext(e.path))
.ignore_git_folder()
.ignore_node_modules()
.ignore_vendor_folder()
diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs
index d24d8abaa..0da410764 100644
--- a/cli/tools/registry/tar.rs
+++ b/cli/tools/registry/tar.rs
@@ -2,13 +2,11 @@
use bytes::Bytes;
use deno_ast::MediaType;
+use deno_ast::ModuleSpecifier;
use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
-use ignore::overrides::OverrideBuilder;
-use ignore::WalkBuilder;
use sha2::Digest;
use std::collections::HashSet;
use std::fmt::Write as FmtWrite;
@@ -18,6 +16,7 @@ use tar::Header;
use crate::cache::LazyGraphSourceParser;
use crate::tools::registry::paths::PackagePath;
+use crate::util::fs::FileCollector;
use super::diagnostics::PublishDiagnostic;
use super::diagnostics::PublishDiagnosticsCollector;
@@ -45,75 +44,60 @@ pub fn create_gzipped_tarball(
unfurler: &SpecifierUnfurler,
file_patterns: Option<FilePatterns>,
) -> Result<PublishableTarball, AnyError> {
+ let file_patterns = file_patterns
+ .unwrap_or_else(|| FilePatterns::new_with_base(dir.to_path_buf()));
let mut tar = TarGzArchive::new();
let mut files = vec![];
- let mut paths = HashSet::new();
-
- let mut ob = OverrideBuilder::new(dir);
- ob.add("!.git")?.add("!node_modules")?.add("!.DS_Store")?;
-
- for pattern in file_patterns.as_ref().iter().flat_map(|p| p.include.iter()) {
- for path_or_pat in pattern.inner() {
- match path_or_pat {
- PathOrPattern::Path(p) => ob.add(p.to_str().unwrap())?,
- PathOrPattern::Pattern(p) => ob.add(p.as_str())?,
- PathOrPattern::RemoteUrl(_) => continue,
- };
+ let iter_paths = FileCollector::new(|e| {
+ if !e.file_type.is_file() {
+ if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) {
+ diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
+ specifier,
+ kind: if e.file_type.is_symlink() {
+ "symlink".to_owned()
+ } else {
+ format!("{:?}", e.file_type)
+ },
+ });
+ }
+ return false;
}
- }
-
- let overrides = ob.build()?;
-
- let iterator = WalkBuilder::new(dir)
- .follow_links(false)
- .require_git(false)
- .git_ignore(true)
- .git_global(true)
- .git_exclude(true)
- .overrides(overrides)
- .filter_entry(move |entry| {
- let matches_pattern = file_patterns
- .as_ref()
- .map(|p| p.matches_path(entry.path()))
- .unwrap_or(true);
- matches_pattern
- })
- .build();
+ e.path.file_name().map(|s| s != ".DS_Store").unwrap_or(true)
+ })
+ .ignore_git_folder()
+ .ignore_node_modules()
+ .ignore_vendor_folder()
+ .use_gitignore()
+ .collect_file_patterns(file_patterns)?;
- for entry in iterator {
- let entry = entry?;
+ let mut paths = HashSet::with_capacity(iter_paths.len());
- let path = entry.path();
- let Some(file_type) = entry.file_type() else {
- // entry doesn’t have a file type if it corresponds to stdin.
+ for path in iter_paths {
+ let Ok(specifier) = Url::from_file_path(&path) else {
+ diagnostics_collector
+ .to_owned()
+ .push(PublishDiagnostic::InvalidPath {
+ path: path.to_path_buf(),
+ message: "unable to convert path to url".to_string(),
+ });
continue;
};
- let Ok(specifier) = Url::from_file_path(path) else {
+ let Ok(relative_path) = path.strip_prefix(dir) else {
diagnostics_collector
.to_owned()
.push(PublishDiagnostic::InvalidPath {
path: path.to_path_buf(),
- message: "unable to convert path to url".to_string(),
+ message: "path is not in publish directory".to_string(),
});
continue;
};
- if file_type.is_file() {
- let Ok(relative_path) = path.strip_prefix(dir) else {
- diagnostics_collector
- .to_owned()
- .push(PublishDiagnostic::InvalidPath {
- path: path.to_path_buf(),
- message: "path is not in publish directory".to_string(),
- });
- continue;
- };
-
- let path_str = relative_path.components().fold(
- "".to_string(),
- |mut path, component| {
+ let path_str =
+ relative_path
+ .components()
+ .fold("".to_string(), |mut path, component| {
path.push('/');
match component {
std::path::Component::Normal(normal) => {
@@ -124,66 +108,55 @@ pub fn create_gzipped_tarball(
_ => unreachable!(),
}
path
- },
- );
+ });
- match PackagePath::new(path_str.clone()) {
- Ok(package_path) => {
- if !paths.insert(package_path) {
- diagnostics_collector.to_owned().push(
- PublishDiagnostic::DuplicatePath {
- path: path.to_path_buf(),
- },
- );
- }
- }
- Err(err) => {
+ match PackagePath::new(path_str.clone()) {
+ Ok(package_path) => {
+ if !paths.insert(package_path) {
diagnostics_collector.to_owned().push(
- PublishDiagnostic::InvalidPath {
+ PublishDiagnostic::DuplicatePath {
path: path.to_path_buf(),
- message: err.to_string(),
},
);
}
}
-
- let content = resolve_content_maybe_unfurling(
- path,
- &specifier,
- unfurler,
- source_parser,
- diagnostics_collector,
- )?;
-
- let media_type = MediaType::from_specifier(&specifier);
- if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
- diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
- specifier: specifier.clone(),
- });
+ Err(err) => {
+ diagnostics_collector
+ .to_owned()
+ .push(PublishDiagnostic::InvalidPath {
+ path: path.to_path_buf(),
+ message: err.to_string(),
+ });
}
+ }
+
+ let content = resolve_content_maybe_unfurling(
+ &path,
+ &specifier,
+ unfurler,
+ source_parser,
+ diagnostics_collector,
+ )?;
- files.push(PublishableTarballFile {
- path_str: path_str.clone(),
+ let media_type = MediaType::from_specifier(&specifier);
+ if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
+ diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
specifier: specifier.clone(),
- // This hash string matches the checksum computed by registry
- hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
- size: content.len(),
- });
- tar
- .add_file(format!(".{}", path_str), &content)
- .with_context(|| {
- format!("Unable to add file to tarball '{}'", entry.path().display())
- })?;
- } else if !file_type.is_dir() {
- diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
- specifier,
- kind: if file_type.is_symlink() {
- "symlink".to_owned()
- } else {
- format!("{file_type:?}")
- },
});
}
+
+ files.push(PublishableTarballFile {
+ path_str: path_str.clone(),
+ specifier: specifier.clone(),
+ // This hash string matches the checksum computed by registry
+ hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
+ size: content.len(),
+ });
+ tar
+ .add_file(format!(".{}", path_str), &content)
+ .with_context(|| {
+ format!("Unable to add file to tarball '{}'", path.display())
+ })?;
}
let v = tar.finish().context("Unable to finish tarball")?;
diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs
index 4f500df3d..1970012a1 100644
--- a/cli/tools/test/mod.rs
+++ b/cli/tools/test/mod.rs
@@ -15,16 +15,17 @@ use crate::module_loader::ModuleLoadPreparer;
use crate::ops;
use crate::util::file_watcher;
use crate::util::fs::collect_specifiers;
+use crate::util::fs::WalkEntry;
use crate::util::path::get_extension;
use crate::util::path::is_script_ext;
use crate::util::path::mapped_specifier_for_tsc;
+use crate::util::path::matches_pattern_or_exact_path;
use crate::worker::CliMainWorkerFactory;
use deno_ast::swc::common::comments::CommentKind;
use deno_ast::MediaType;
use deno_ast::SourceRangedForSpanned;
use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context as _;
@@ -1350,28 +1351,16 @@ pub async fn report_tests(
(Ok(()), receiver)
}
-fn is_supported_test_path_predicate(
- path: &Path,
- patterns: &FilePatterns,
-) -> bool {
- if !is_script_ext(path) {
+fn is_supported_test_path_predicate(entry: WalkEntry) -> bool {
+ if !is_script_ext(entry.path) {
false
- } else if has_supported_test_path_name(path) {
+ } else if has_supported_test_path_name(entry.path) {
true
- } else {
+ } else if let Some(include) = &entry.patterns.include {
// allow someone to explicitly specify a path
- let matches_exact_path_or_pattern = patterns
- .include
- .as_ref()
- .map(|p| {
- p.inner().iter().any(|p| match p {
- PathOrPattern::Path(p) => p == path,
- PathOrPattern::RemoteUrl(_) => true,
- PathOrPattern::Pattern(p) => p.matches_path(path),
- })
- })
- .unwrap_or(false);
- matches_exact_path_or_pattern
+ matches_pattern_or_exact_path(include, entry.path)
+ } else {
+ false
}
}
@@ -1432,7 +1421,7 @@ fn collect_specifiers_with_test_mode(
collect_specifiers(files.clone(), is_supported_test_path_predicate)?;
if *include_inline {
- return collect_specifiers(files, |p, _| is_supported_test_ext(p)).map(
+ return collect_specifiers(files, |e| is_supported_test_ext(e.path)).map(
|specifiers| {
specifiers
.into_iter()
@@ -1608,8 +1597,8 @@ pub async fn run_tests_with_watch(
let module_graph_creator = factory.module_graph_creator().await?;
let file_fetcher = factory.file_fetcher()?;
let test_modules = if test_options.doc {
- collect_specifiers(test_options.files.clone(), |p, _| {
- is_supported_test_ext(p)
+ collect_specifiers(test_options.files.clone(), |e| {
+ is_supported_test_ext(e.path)
})
} else {
collect_specifiers(