summaryrefslogtreecommitdiff
path: root/cli
diff options
context:
space:
mode:
authorDavid Sherret <dsherret@users.noreply.github.com>2024-03-07 20:16:32 -0500
committerGitHub <noreply@github.com>2024-03-07 20:16:32 -0500
commit2dfc0aca7c6a04d54fe6f9a73be70fc4c591d552 (patch)
tree58fb01c46364e4888097e7135b2f829f38ce990c /cli
parent2ed984ba3aa638c3f088ac1edc5c779c7d9195d1 (diff)
fix(publish): make include and exclude work (#22720)
1. Stops `deno publish` using some custom include/exclude behaviour from other sub commands 2. Takes ancestor directories into account when resolving gitignore 3. Backards compatible change that adds ability to unexclude an exclude by using a negated glob at a more specific level for all sub commands (see https://github.com/denoland/deno_config/pull/44).
Diffstat (limited to 'cli')
-rw-r--r--cli/Cargo.toml2
-rw-r--r--cli/args/mod.rs14
-rw-r--r--cli/lsp/config.rs13
-rw-r--r--cli/lsp/documents.rs20
-rw-r--r--cli/tools/bench/mod.rs27
-rw-r--r--cli/tools/coverage/mod.rs11
-rw-r--r--cli/tools/doc.rs12
-rw-r--r--cli/tools/fmt.rs2
-rw-r--r--cli/tools/lint/mod.rs2
-rw-r--r--cli/tools/registry/tar.rs179
-rw-r--r--cli/tools/test/mod.rs35
-rw-r--r--cli/util/fs.rs104
-rw-r--r--cli/util/gitignore.rs151
-rw-r--r--cli/util/mod.rs1
-rw-r--r--cli/util/path.rs35
15 files changed, 409 insertions, 199 deletions
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
index d11ef8849..11fdcc123 100644
--- a/cli/Cargo.toml
+++ b/cli/Cargo.toml
@@ -64,7 +64,7 @@ winres.workspace = true
[dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true }
-deno_config = "=0.12.0"
+deno_config = "=0.14.1"
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.113.1", features = ["html"] }
deno_emit = "=0.38.2"
diff --git a/cli/args/mod.rs b/cli/args/mod.rs
index acdc96526..d72b41947 100644
--- a/cli/args/mod.rs
+++ b/cli/args/mod.rs
@@ -1258,7 +1258,7 @@ impl CliOptions {
pub fn resolve_config_excludes(&self) -> Result<PathOrPatternSet, AnyError> {
let maybe_config_files = if let Some(config_file) = &self.maybe_config_file
{
- config_file.to_files_config()?
+ Some(config_file.to_files_config()?)
} else {
None
};
@@ -1750,14 +1750,14 @@ fn resolve_files(
if let Some(file_flags) = maybe_file_flags {
if !file_flags.include.is_empty() {
maybe_files_config.include =
- Some(PathOrPatternSet::from_relative_path_or_patterns(
+ Some(PathOrPatternSet::from_include_relative_path_or_patterns(
initial_cwd,
&file_flags.include,
)?);
}
if !file_flags.ignore.is_empty() {
maybe_files_config.exclude =
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_exclude_relative_path_or_patterns(
initial_cwd,
&file_flags.ignore,
)?;
@@ -1886,7 +1886,7 @@ mod test {
temp_dir.write("pages/[id].ts", "");
let temp_dir_path = temp_dir.path().as_path();
- let error = PathOrPatternSet::from_relative_path_or_patterns(
+ let error = PathOrPatternSet::from_include_relative_path_or_patterns(
temp_dir_path,
&["data/**********.ts".to_string()],
)
@@ -1897,7 +1897,7 @@ mod test {
Some(FilePatterns {
base: temp_dir_path.to_path_buf(),
include: Some(
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
temp_dir_path,
&[
"data/test1.?s".to_string(),
@@ -1908,7 +1908,7 @@ mod test {
)
.unwrap(),
),
- exclude: PathOrPatternSet::from_relative_path_or_patterns(
+ exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
temp_dir_path,
&["nested/**/*bazz.ts".to_string()],
)
@@ -1919,7 +1919,7 @@ mod test {
)
.unwrap();
- let mut files = FileCollector::new(|_, _| true)
+ let mut files = FileCollector::new(|_| true)
.ignore_git_folder()
.ignore_node_modules()
.ignore_vendor_folder()
diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs
index 3d24c8c20..120828a79 100644
--- a/cli/lsp/config.rs
+++ b/cli/lsp/config.rs
@@ -1083,7 +1083,7 @@ impl Config {
pub fn get_disabled_paths(&self) -> PathOrPatternSet {
let mut path_or_patterns = vec![];
if let Some(cf) = self.maybe_config_file() {
- if let Some(files) = cf.to_files_config().ok().flatten() {
+ if let Ok(files) = cf.to_files_config() {
for path in files.exclude.into_path_or_patterns() {
path_or_patterns.push(path);
}
@@ -1095,7 +1095,14 @@ impl Config {
continue;
};
let settings = self.workspace_settings_for_specifier(workspace_uri);
- if settings.enable.unwrap_or_else(|| self.has_config_file()) {
+ let is_enabled = settings
+ .enable_paths
+ .as_ref()
+ .map(|p| !p.is_empty())
+ .unwrap_or_else(|| {
+ settings.enable.unwrap_or_else(|| self.has_config_file())
+ });
+ if is_enabled {
for path in &settings.disable_paths {
path_or_patterns.push(PathOrPattern::Path(workspace_path.join(path)));
}
@@ -1177,7 +1184,7 @@ fn specifier_enabled(
workspace_folders: &[(Url, lsp::WorkspaceFolder)],
) -> bool {
if let Some(cf) = config_file {
- if let Some(files) = cf.to_files_config().ok().flatten() {
+ if let Ok(files) = cf.to_files_config() {
if !files.matches_specifier(specifier) {
return false;
}
diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs
index b825bc020..7912dad78 100644
--- a/cli/lsp/documents.rs
+++ b/cli/lsp/documents.rs
@@ -1341,11 +1341,12 @@ impl Documents {
.inner()
.iter()
.map(|p| match p {
- PathOrPattern::Path(p) => {
- Cow::Owned(p.to_string_lossy().to_string())
+ PathOrPattern::Path(p) => p.to_string_lossy(),
+ PathOrPattern::NegatedPath(p) => {
+ Cow::Owned(format!("!{}", p.to_string_lossy()))
}
PathOrPattern::RemoteUrl(p) => Cow::Borrowed(p.as_str()),
- PathOrPattern::Pattern(p) => Cow::Borrowed(p.as_str()),
+ PathOrPattern::Pattern(p) => p.as_str(),
})
.collect::<Vec<_>>();
// ensure these are sorted so the hashing is deterministic
@@ -2061,8 +2062,13 @@ impl Iterator for PreloadDocumentFinder {
if let Ok(entry) = entry {
let path = entry.path();
if let Ok(file_type) = entry.file_type() {
- if file_patterns.matches_path(&path) {
- if file_type.is_dir() && is_discoverable_dir(&path) {
+ let is_dir = file_type.is_dir();
+ let path_kind = match is_dir {
+ true => deno_config::glob::PathKind::Directory,
+ false => deno_config::glob::PathKind::File,
+ };
+ if file_patterns.matches_path(&path, path_kind) {
+ if is_dir && is_discoverable_dir(&path) {
self.pending_entries.push_back(PendingEntry::Dir(
path.to_path_buf(),
file_patterns.clone(),
@@ -2354,7 +2360,7 @@ console.log(b, "hello deno");
file_patterns: FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Some(
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
temp_dir.path().as_path(),
&[
"root1".to_string(),
@@ -2415,7 +2421,7 @@ console.log(b, "hello deno");
file_patterns: FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Default::default(),
- exclude: PathOrPatternSet::from_relative_path_or_patterns(
+ exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
temp_dir.path().as_path(),
&[
"root1".to_string(),
diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs
index 43b7103cd..b554f7349 100644
--- a/cli/tools/bench/mod.rs
+++ b/cli/tools/bench/mod.rs
@@ -14,12 +14,12 @@ use crate::tools::test::format_test_error;
use crate::tools::test::TestFilter;
use crate::util::file_watcher;
use crate::util::fs::collect_specifiers;
+use crate::util::fs::WalkEntry;
use crate::util::path::is_script_ext;
+use crate::util::path::matches_pattern_or_exact_path;
use crate::version::get_user_agent;
use crate::worker::CliMainWorkerFactory;
-use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::error::JsError;
@@ -394,25 +394,16 @@ async fn bench_specifiers(
}
/// Checks if the path has a basename and extension Deno supports for benches.
-fn is_supported_bench_path(path: &Path, patterns: &FilePatterns) -> bool {
- if !is_script_ext(path) {
+fn is_supported_bench_path(entry: WalkEntry) -> bool {
+ if !is_script_ext(entry.path) {
false
- } else if has_supported_bench_path_name(path) {
+ } else if has_supported_bench_path_name(entry.path) {
true
- } else {
+ } else if let Some(include) = &entry.patterns.include {
// allow someone to explicitly specify a path
- let matches_exact_path_or_pattern = patterns
- .include
- .as_ref()
- .map(|p| {
- p.inner().iter().any(|p| match p {
- PathOrPattern::Path(p) => p == path,
- PathOrPattern::RemoteUrl(_) => true,
- PathOrPattern::Pattern(p) => p.matches_path(path),
- })
- })
- .unwrap_or(false);
- matches_exact_path_or_pattern
+ matches_pattern_or_exact_path(include, entry.path)
+ } else {
+ false
}
}
diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs
index 5cc705741..66c0923de 100644
--- a/cli/tools/coverage/mod.rs
+++ b/cli/tools/coverage/mod.rs
@@ -388,23 +388,20 @@ fn collect_coverages(
initial_cwd.to_path_buf(),
)])
} else {
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
initial_cwd,
&files.include,
)?
}
}),
- exclude: PathOrPatternSet::from_relative_path_or_patterns(
+ exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
initial_cwd,
&files.ignore,
)
.context("Invalid ignore pattern.")?,
};
- let file_paths = FileCollector::new(|file_path, _| {
- file_path
- .extension()
- .map(|ext| ext == "json")
- .unwrap_or(false)
+ let file_paths = FileCollector::new(|e| {
+ e.path.extension().map(|ext| ext == "json").unwrap_or(false)
})
.ignore_git_folder()
.ignore_node_modules()
diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs
index 0b7b26e31..013a407aa 100644
--- a/cli/tools/doc.rs
+++ b/cli/tools/doc.rs
@@ -96,13 +96,15 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
let module_specifiers = collect_specifiers(
FilePatterns {
base: cli_options.initial_cwd().to_path_buf(),
- include: Some(PathOrPatternSet::from_relative_path_or_patterns(
- cli_options.initial_cwd(),
- source_files,
- )?),
+ include: Some(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
+ cli_options.initial_cwd(),
+ source_files,
+ )?,
+ ),
exclude: Default::default(),
},
- |_, _| true,
+ |_| true,
)?;
let graph = module_graph_creator
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs
index 86fc9700e..0f6afb232 100644
--- a/cli/tools/fmt.rs
+++ b/cli/tools/fmt.rs
@@ -154,7 +154,7 @@ async fn format_files(
}
fn collect_fmt_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
- FileCollector::new(|path, _| is_supported_ext_fmt(path))
+ FileCollector::new(|e| is_supported_ext_fmt(e.path))
.ignore_git_folder()
.ignore_node_modules()
.ignore_vendor_folder()
diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs
index ee7350fb4..1b81fca5a 100644
--- a/cli/tools/lint/mod.rs
+++ b/cli/tools/lint/mod.rs
@@ -263,7 +263,7 @@ async fn lint_files(
}
fn collect_lint_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
- FileCollector::new(|path, _| is_script_ext(path))
+ FileCollector::new(|e| is_script_ext(e.path))
.ignore_git_folder()
.ignore_node_modules()
.ignore_vendor_folder()
diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs
index d24d8abaa..0da410764 100644
--- a/cli/tools/registry/tar.rs
+++ b/cli/tools/registry/tar.rs
@@ -2,13 +2,11 @@
use bytes::Bytes;
use deno_ast::MediaType;
+use deno_ast::ModuleSpecifier;
use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
-use ignore::overrides::OverrideBuilder;
-use ignore::WalkBuilder;
use sha2::Digest;
use std::collections::HashSet;
use std::fmt::Write as FmtWrite;
@@ -18,6 +16,7 @@ use tar::Header;
use crate::cache::LazyGraphSourceParser;
use crate::tools::registry::paths::PackagePath;
+use crate::util::fs::FileCollector;
use super::diagnostics::PublishDiagnostic;
use super::diagnostics::PublishDiagnosticsCollector;
@@ -45,75 +44,60 @@ pub fn create_gzipped_tarball(
unfurler: &SpecifierUnfurler,
file_patterns: Option<FilePatterns>,
) -> Result<PublishableTarball, AnyError> {
+ let file_patterns = file_patterns
+ .unwrap_or_else(|| FilePatterns::new_with_base(dir.to_path_buf()));
let mut tar = TarGzArchive::new();
let mut files = vec![];
- let mut paths = HashSet::new();
-
- let mut ob = OverrideBuilder::new(dir);
- ob.add("!.git")?.add("!node_modules")?.add("!.DS_Store")?;
-
- for pattern in file_patterns.as_ref().iter().flat_map(|p| p.include.iter()) {
- for path_or_pat in pattern.inner() {
- match path_or_pat {
- PathOrPattern::Path(p) => ob.add(p.to_str().unwrap())?,
- PathOrPattern::Pattern(p) => ob.add(p.as_str())?,
- PathOrPattern::RemoteUrl(_) => continue,
- };
+ let iter_paths = FileCollector::new(|e| {
+ if !e.file_type.is_file() {
+ if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) {
+ diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
+ specifier,
+ kind: if e.file_type.is_symlink() {
+ "symlink".to_owned()
+ } else {
+ format!("{:?}", e.file_type)
+ },
+ });
+ }
+ return false;
}
- }
-
- let overrides = ob.build()?;
-
- let iterator = WalkBuilder::new(dir)
- .follow_links(false)
- .require_git(false)
- .git_ignore(true)
- .git_global(true)
- .git_exclude(true)
- .overrides(overrides)
- .filter_entry(move |entry| {
- let matches_pattern = file_patterns
- .as_ref()
- .map(|p| p.matches_path(entry.path()))
- .unwrap_or(true);
- matches_pattern
- })
- .build();
+ e.path.file_name().map(|s| s != ".DS_Store").unwrap_or(true)
+ })
+ .ignore_git_folder()
+ .ignore_node_modules()
+ .ignore_vendor_folder()
+ .use_gitignore()
+ .collect_file_patterns(file_patterns)?;
- for entry in iterator {
- let entry = entry?;
+ let mut paths = HashSet::with_capacity(iter_paths.len());
- let path = entry.path();
- let Some(file_type) = entry.file_type() else {
- // entry doesn’t have a file type if it corresponds to stdin.
+ for path in iter_paths {
+ let Ok(specifier) = Url::from_file_path(&path) else {
+ diagnostics_collector
+ .to_owned()
+ .push(PublishDiagnostic::InvalidPath {
+ path: path.to_path_buf(),
+ message: "unable to convert path to url".to_string(),
+ });
continue;
};
- let Ok(specifier) = Url::from_file_path(path) else {
+ let Ok(relative_path) = path.strip_prefix(dir) else {
diagnostics_collector
.to_owned()
.push(PublishDiagnostic::InvalidPath {
path: path.to_path_buf(),
- message: "unable to convert path to url".to_string(),
+ message: "path is not in publish directory".to_string(),
});
continue;
};
- if file_type.is_file() {
- let Ok(relative_path) = path.strip_prefix(dir) else {
- diagnostics_collector
- .to_owned()
- .push(PublishDiagnostic::InvalidPath {
- path: path.to_path_buf(),
- message: "path is not in publish directory".to_string(),
- });
- continue;
- };
-
- let path_str = relative_path.components().fold(
- "".to_string(),
- |mut path, component| {
+ let path_str =
+ relative_path
+ .components()
+ .fold("".to_string(), |mut path, component| {
path.push('/');
match component {
std::path::Component::Normal(normal) => {
@@ -124,66 +108,55 @@ pub fn create_gzipped_tarball(
_ => unreachable!(),
}
path
- },
- );
+ });
- match PackagePath::new(path_str.clone()) {
- Ok(package_path) => {
- if !paths.insert(package_path) {
- diagnostics_collector.to_owned().push(
- PublishDiagnostic::DuplicatePath {
- path: path.to_path_buf(),
- },
- );
- }
- }
- Err(err) => {
+ match PackagePath::new(path_str.clone()) {
+ Ok(package_path) => {
+ if !paths.insert(package_path) {
diagnostics_collector.to_owned().push(
- PublishDiagnostic::InvalidPath {
+ PublishDiagnostic::DuplicatePath {
path: path.to_path_buf(),
- message: err.to_string(),
},
);
}
}
-
- let content = resolve_content_maybe_unfurling(
- path,
- &specifier,
- unfurler,
- source_parser,
- diagnostics_collector,
- )?;
-
- let media_type = MediaType::from_specifier(&specifier);
- if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
- diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
- specifier: specifier.clone(),
- });
+ Err(err) => {
+ diagnostics_collector
+ .to_owned()
+ .push(PublishDiagnostic::InvalidPath {
+ path: path.to_path_buf(),
+ message: err.to_string(),
+ });
}
+ }
+
+ let content = resolve_content_maybe_unfurling(
+ &path,
+ &specifier,
+ unfurler,
+ source_parser,
+ diagnostics_collector,
+ )?;
- files.push(PublishableTarballFile {
- path_str: path_str.clone(),
+ let media_type = MediaType::from_specifier(&specifier);
+ if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
+ diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
specifier: specifier.clone(),
- // This hash string matches the checksum computed by registry
- hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
- size: content.len(),
- });
- tar
- .add_file(format!(".{}", path_str), &content)
- .with_context(|| {
- format!("Unable to add file to tarball '{}'", entry.path().display())
- })?;
- } else if !file_type.is_dir() {
- diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
- specifier,
- kind: if file_type.is_symlink() {
- "symlink".to_owned()
- } else {
- format!("{file_type:?}")
- },
});
}
+
+ files.push(PublishableTarballFile {
+ path_str: path_str.clone(),
+ specifier: specifier.clone(),
+ // This hash string matches the checksum computed by registry
+ hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
+ size: content.len(),
+ });
+ tar
+ .add_file(format!(".{}", path_str), &content)
+ .with_context(|| {
+ format!("Unable to add file to tarball '{}'", path.display())
+ })?;
}
let v = tar.finish().context("Unable to finish tarball")?;
diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs
index 4f500df3d..1970012a1 100644
--- a/cli/tools/test/mod.rs
+++ b/cli/tools/test/mod.rs
@@ -15,16 +15,17 @@ use crate::module_loader::ModuleLoadPreparer;
use crate::ops;
use crate::util::file_watcher;
use crate::util::fs::collect_specifiers;
+use crate::util::fs::WalkEntry;
use crate::util::path::get_extension;
use crate::util::path::is_script_ext;
use crate::util::path::mapped_specifier_for_tsc;
+use crate::util::path::matches_pattern_or_exact_path;
use crate::worker::CliMainWorkerFactory;
use deno_ast::swc::common::comments::CommentKind;
use deno_ast::MediaType;
use deno_ast::SourceRangedForSpanned;
use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context as _;
@@ -1350,28 +1351,16 @@ pub async fn report_tests(
(Ok(()), receiver)
}
-fn is_supported_test_path_predicate(
- path: &Path,
- patterns: &FilePatterns,
-) -> bool {
- if !is_script_ext(path) {
+fn is_supported_test_path_predicate(entry: WalkEntry) -> bool {
+ if !is_script_ext(entry.path) {
false
- } else if has_supported_test_path_name(path) {
+ } else if has_supported_test_path_name(entry.path) {
true
- } else {
+ } else if let Some(include) = &entry.patterns.include {
// allow someone to explicitly specify a path
- let matches_exact_path_or_pattern = patterns
- .include
- .as_ref()
- .map(|p| {
- p.inner().iter().any(|p| match p {
- PathOrPattern::Path(p) => p == path,
- PathOrPattern::RemoteUrl(_) => true,
- PathOrPattern::Pattern(p) => p.matches_path(path),
- })
- })
- .unwrap_or(false);
- matches_exact_path_or_pattern
+ matches_pattern_or_exact_path(include, entry.path)
+ } else {
+ false
}
}
@@ -1432,7 +1421,7 @@ fn collect_specifiers_with_test_mode(
collect_specifiers(files.clone(), is_supported_test_path_predicate)?;
if *include_inline {
- return collect_specifiers(files, |p, _| is_supported_test_ext(p)).map(
+ return collect_specifiers(files, |e| is_supported_test_ext(e.path)).map(
|specifiers| {
specifiers
.into_iter()
@@ -1608,8 +1597,8 @@ pub async fn run_tests_with_watch(
let module_graph_creator = factory.module_graph_creator().await?;
let file_fetcher = factory.file_fetcher()?;
let test_modules = if test_options.doc {
- collect_specifiers(test_options.files.clone(), |p, _| {
- is_supported_test_ext(p)
+ collect_specifiers(test_options.files.clone(), |e| {
+ is_supported_test_ext(e.path)
})
} else {
collect_specifiers(
diff --git a/cli/util/fs.rs b/cli/util/fs.rs
index c81686f95..f6354097a 100644
--- a/cli/util/fs.rs
+++ b/cli/util/fs.rs
@@ -3,6 +3,7 @@
use std::collections::HashSet;
use std::env::current_dir;
use std::fmt::Write as FmtWrite;
+use std::fs::FileType;
use std::fs::OpenOptions;
use std::io::Error;
use std::io::ErrorKind;
@@ -26,6 +27,8 @@ use deno_runtime::deno_crypto::rand;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::PathClean;
+use crate::util::gitignore::DirGitIgnores;
+use crate::util::gitignore::GitIgnoreTree;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::util::progress_bar::ProgressMessagePrompt;
@@ -244,22 +247,31 @@ pub fn resolve_from_cwd(path: &Path) -> Result<PathBuf, AnyError> {
Ok(normalize_path(resolved_path))
}
+#[derive(Debug, Clone)]
+pub struct WalkEntry<'a> {
+ pub path: &'a Path,
+ pub file_type: &'a FileType,
+ pub patterns: &'a FilePatterns,
+}
+
/// Collects file paths that satisfy the given predicate, by recursively walking `files`.
/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory.
-pub struct FileCollector<TFilter: Fn(&Path, &FilePatterns) -> bool> {
+pub struct FileCollector<TFilter: Fn(WalkEntry) -> bool> {
file_filter: TFilter,
ignore_git_folder: bool,
ignore_node_modules: bool,
ignore_vendor_folder: bool,
+ use_gitignore: bool,
}
-impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
+impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
pub fn new(file_filter: TFilter) -> Self {
Self {
file_filter,
ignore_git_folder: false,
ignore_node_modules: false,
ignore_vendor_folder: false,
+ use_gitignore: false,
}
}
@@ -278,10 +290,46 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
self
}
+ pub fn use_gitignore(mut self) -> Self {
+ self.use_gitignore = true;
+ self
+ }
+
pub fn collect_file_patterns(
&self,
file_patterns: FilePatterns,
) -> Result<Vec<PathBuf>, AnyError> {
+ fn is_pattern_matched(
+ maybe_git_ignore: Option<&DirGitIgnores>,
+ path: &Path,
+ is_dir: bool,
+ file_patterns: &FilePatterns,
+ ) -> bool {
+ use deno_config::glob::FilePatternsMatch;
+
+ let path_kind = match is_dir {
+ true => deno_config::glob::PathKind::Directory,
+ false => deno_config::glob::PathKind::File,
+ };
+ match file_patterns.matches_path_detail(path, path_kind) {
+ FilePatternsMatch::Passed => {
+ // check gitignore
+ let is_gitignored = maybe_git_ignore
+ .as_ref()
+ .map(|git_ignore| git_ignore.is_ignored(path, is_dir))
+ .unwrap_or(false);
+ !is_gitignored
+ }
+ FilePatternsMatch::PassedOptedOutExclude => true,
+ FilePatternsMatch::Excluded => false,
+ }
+ }
+
+ let mut maybe_git_ignores = if self.use_gitignore {
+ Some(GitIgnoreTree::new(Arc::new(deno_runtime::deno_fs::RealFs)))
+ } else {
+ None
+ };
let mut target_files = Vec::new();
let mut visited_paths = HashSet::new();
let file_patterns_by_base = file_patterns.split_by_base();
@@ -299,20 +347,23 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
};
let file_type = e.file_type();
let is_dir = file_type.is_dir();
- let c = e.path().to_path_buf();
- if file_patterns.exclude.matches_path(&c)
- || !is_dir
- && !file_patterns
- .include
- .as_ref()
- .map(|i| i.matches_path(&c))
- .unwrap_or(true)
- {
+ let path = e.path().to_path_buf();
+ let maybe_gitignore =
+ maybe_git_ignores.as_mut().and_then(|git_ignores| {
+ let dir_path = if is_dir { &path } else { path.parent()? };
+ git_ignores.get_resolved_git_ignore(dir_path)
+ });
+ if !is_pattern_matched(
+ maybe_gitignore.as_deref(),
+ &path,
+ is_dir,
+ &file_patterns,
+ ) {
if is_dir {
iterator.skip_current_dir();
}
} else if is_dir {
- let should_ignore_dir = c
+ let should_ignore_dir = path
.file_name()
.map(|dir_name| {
let dir_name = dir_name.to_string_lossy().to_lowercase();
@@ -323,17 +374,20 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
_ => false,
};
// allow the user to opt out of ignoring by explicitly specifying the dir
- file != c && is_ignored_file
+ file != path && is_ignored_file
})
.unwrap_or(false)
- || !visited_paths.insert(c.clone());
+ || !visited_paths.insert(path.clone());
if should_ignore_dir {
iterator.skip_current_dir();
}
- } else if (self.file_filter)(&c, &file_patterns)
- && visited_paths.insert(c.clone())
+ } else if (self.file_filter)(WalkEntry {
+ path: &path,
+ file_type: &file_type,
+ patterns: &file_patterns,
+ }) && visited_paths.insert(path.clone())
{
- target_files.push(c);
+ target_files.push(path);
}
}
}
@@ -346,7 +400,7 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
/// Note: This ignores all .git and node_modules folders.
pub fn collect_specifiers(
mut files: FilePatterns,
- predicate: impl Fn(&Path, &FilePatterns) -> bool,
+ predicate: impl Fn(WalkEntry) -> bool,
) -> Result<Vec<ModuleSpecifier>, AnyError> {
let mut prepared = vec![];
@@ -365,6 +419,10 @@ pub fn collect_specifiers(
prepared.push(url);
}
}
+ PathOrPattern::NegatedPath(path) => {
+ // add it back
+ result.push(PathOrPattern::NegatedPath(path));
+ }
PathOrPattern::RemoteUrl(remote_url) => {
prepared.push(remote_url);
}
@@ -819,9 +877,9 @@ mod tests {
ignore_dir_path.to_path_buf(),
)]),
};
- let file_collector = FileCollector::new(|path, _| {
+ let file_collector = FileCollector::new(|e| {
// exclude dotfiles
- path
+ e.path
.file_name()
.and_then(|f| f.to_str())
.map(|f| !f.starts_with('.'))
@@ -943,9 +1001,9 @@ mod tests {
let ignore_dir_files = ["g.d.ts", ".gitignore"];
create_files(&ignore_dir_path, &ignore_dir_files);
- let predicate = |path: &Path, _: &FilePatterns| {
+ let predicate = |e: WalkEntry| {
// exclude dotfiles
- path
+ e.path
.file_name()
.and_then(|f| f.to_str())
.map(|f| !f.starts_with('.'))
@@ -956,7 +1014,7 @@ mod tests {
FilePatterns {
base: root_dir_path.to_path_buf(),
include: Some(
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
root_dir_path.as_path(),
&[
"http://localhost:8080".to_string(),
diff --git a/cli/util/gitignore.rs b/cli/util/gitignore.rs
new file mode 100644
index 000000000..da9065494
--- /dev/null
+++ b/cli/util/gitignore.rs
@@ -0,0 +1,151 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::collections::HashMap;
+use std::path::Path;
+use std::path::PathBuf;
+use std::rc::Rc;
+use std::sync::Arc;
+
+/// Resolved gitignore for a directory.
+pub struct DirGitIgnores {
+ current: Option<Rc<ignore::gitignore::Gitignore>>,
+ parent: Option<Rc<DirGitIgnores>>,
+}
+
+impl DirGitIgnores {
+ pub fn is_ignored(&self, path: &Path, is_dir: bool) -> bool {
+ let mut is_ignored = false;
+ if let Some(parent) = &self.parent {
+ is_ignored = parent.is_ignored(path, is_dir);
+ }
+ if let Some(current) = &self.current {
+ match current.matched(path, is_dir) {
+ ignore::Match::None => {}
+ ignore::Match::Ignore(_) => {
+ is_ignored = true;
+ }
+ ignore::Match::Whitelist(_) => {
+ is_ignored = false;
+ }
+ }
+ }
+ is_ignored
+ }
+}
+
+/// Resolves gitignores in a directory tree taking into account
+/// ancestor gitignores that may be found in a directory.
+pub struct GitIgnoreTree {
+ fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
+ ignores: HashMap<PathBuf, Option<Rc<DirGitIgnores>>>,
+}
+
+impl GitIgnoreTree {
+ pub fn new(fs: Arc<dyn deno_runtime::deno_fs::FileSystem>) -> Self {
+ Self {
+ fs,
+ ignores: Default::default(),
+ }
+ }
+
+ pub fn get_resolved_git_ignore(
+ &mut self,
+ dir_path: &Path,
+ ) -> Option<Rc<DirGitIgnores>> {
+ self.get_resolved_git_ignore_inner(dir_path, None)
+ }
+
+ fn get_resolved_git_ignore_inner(
+ &mut self,
+ dir_path: &Path,
+ maybe_parent: Option<&Path>,
+ ) -> Option<Rc<DirGitIgnores>> {
+ let maybe_resolved = self.ignores.get(dir_path).cloned();
+ if let Some(resolved) = maybe_resolved {
+ resolved
+ } else {
+ let resolved = self.resolve_gitignore_in_dir(dir_path, maybe_parent);
+ self.ignores.insert(dir_path.to_owned(), resolved.clone());
+ resolved
+ }
+ }
+
+ fn resolve_gitignore_in_dir(
+ &mut self,
+ dir_path: &Path,
+ maybe_parent: Option<&Path>,
+ ) -> Option<Rc<DirGitIgnores>> {
+ if let Some(parent) = maybe_parent {
+ // stop searching if the parent dir had a .git directory in it
+ if self.fs.exists_sync(&parent.join(".git")) {
+ return None;
+ }
+ }
+
+ let parent = dir_path.parent().and_then(|parent| {
+ self.get_resolved_git_ignore_inner(parent, Some(dir_path))
+ });
+ let current = self
+ .fs
+ .read_text_file_sync(&dir_path.join(".gitignore"))
+ .ok()
+ .and_then(|text| {
+ let mut builder = ignore::gitignore::GitignoreBuilder::new(dir_path);
+ for line in text.lines() {
+ builder.add_line(None, line).ok()?;
+ }
+ let gitignore = builder.build().ok()?;
+ Some(Rc::new(gitignore))
+ });
+ if parent.is_none() && current.is_none() {
+ None
+ } else {
+ Some(Rc::new(DirGitIgnores { current, parent }))
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use deno_runtime::deno_fs::InMemoryFs;
+
+ use super::*;
+
+ #[test]
+ fn git_ignore_tree() {
+ let fs = InMemoryFs::default();
+ fs.setup_text_files(vec![
+ ("/.gitignore".into(), "file.txt".into()),
+ ("/sub_dir/.gitignore".into(), "data.txt".into()),
+ (
+ "/sub_dir/sub_dir/.gitignore".into(),
+ "!file.txt\nignore.txt".into(),
+ ),
+ ]);
+ let mut ignore_tree = GitIgnoreTree::new(Arc::new(fs));
+ let mut run_test = |path: &str, expected: bool| {
+ let path = PathBuf::from(path);
+ let gitignore = ignore_tree
+ .get_resolved_git_ignore(path.parent().unwrap())
+ .unwrap();
+ assert_eq!(
+ gitignore.is_ignored(&path, /* is_dir */ false),
+ expected,
+ "Path: {}",
+ path.display()
+ );
+ };
+ run_test("/file.txt", true);
+ run_test("/other.txt", false);
+ run_test("/data.txt", false);
+ run_test("/sub_dir/file.txt", true);
+ run_test("/sub_dir/other.txt", false);
+ run_test("/sub_dir/data.txt", true);
+ run_test("/sub_dir/sub_dir/file.txt", false); // unignored up here
+ run_test("/sub_dir/sub_dir/sub_dir/file.txt", false);
+ run_test("/sub_dir/sub_dir/sub_dir/ignore.txt", true);
+ run_test("/sub_dir/sub_dir/ignore.txt", true);
+ run_test("/sub_dir/ignore.txt", false);
+ run_test("/ignore.txt", false);
+ }
+}
diff --git a/cli/util/mod.rs b/cli/util/mod.rs
index a6f72bc04..7e0e1bd37 100644
--- a/cli/util/mod.rs
+++ b/cli/util/mod.rs
@@ -8,6 +8,7 @@ pub mod display;
pub mod draw_thread;
pub mod file_watcher;
pub mod fs;
+pub mod gitignore;
pub mod logger;
pub mod path;
pub mod progress_bar;
diff --git a/cli/util/path.rs b/cli/util/path.rs
index 496b37c5e..fed74cb06 100644
--- a/cli/util/path.rs
+++ b/cli/util/path.rs
@@ -6,6 +6,9 @@ use std::path::PathBuf;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
+use deno_config::glob::PathGlobMatch;
+use deno_config::glob::PathOrPattern;
+use deno_config::glob::PathOrPatternSet;
use deno_core::error::uri_error;
use deno_core::error::AnyError;
@@ -244,6 +247,38 @@ pub fn root_url_to_safe_local_dirname(root: &ModuleSpecifier) -> PathBuf {
result
}
+/// Slightly different behaviour than the default matching
+/// where an exact path needs to be matched to be opted-in
+/// rather than just a partial directory match.
+///
+/// This is used by the test and bench filtering.
+pub fn matches_pattern_or_exact_path(
+ path_or_pattern_set: &PathOrPatternSet,
+ path: &Path,
+) -> bool {
+ for p in path_or_pattern_set.inner().iter().rev() {
+ match p {
+ PathOrPattern::Path(p) => {
+ if p == path {
+ return true;
+ }
+ }
+ PathOrPattern::NegatedPath(p) => {
+ if path.starts_with(p) {
+ return false;
+ }
+ }
+ PathOrPattern::RemoteUrl(_) => {}
+ PathOrPattern::Pattern(p) => match p.matches_path(path) {
+ PathGlobMatch::Matched => return true,
+ PathGlobMatch::MatchedNegated => return false,
+ PathGlobMatch::NotMatched => {}
+ },
+ }
+ }
+ false
+}
+
#[cfg(test)]
mod test {
use super::*;