summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock4
-rw-r--r--cli/Cargo.toml2
-rw-r--r--cli/args/mod.rs14
-rw-r--r--cli/lsp/config.rs13
-rw-r--r--cli/lsp/documents.rs20
-rw-r--r--cli/tools/bench/mod.rs27
-rw-r--r--cli/tools/coverage/mod.rs11
-rw-r--r--cli/tools/doc.rs12
-rw-r--r--cli/tools/fmt.rs2
-rw-r--r--cli/tools/lint/mod.rs2
-rw-r--r--cli/tools/registry/tar.rs179
-rw-r--r--cli/tools/test/mod.rs35
-rw-r--r--cli/util/fs.rs104
-rw-r--r--cli/util/gitignore.rs151
-rw-r--r--cli/util/mod.rs1
-rw-r--r--cli/util/path.rs35
-rw-r--r--ext/fs/in_memory_fs.rs425
-rw-r--r--ext/fs/lib.rs2
-rw-r--r--tests/integration/bench_tests.rs31
-rw-r--r--tests/integration/fmt_tests.rs27
-rw-r--r--tests/integration/lint_tests.rs25
-rw-r--r--tests/integration/lsp_tests.rs4
-rw-r--r--tests/integration/publish_tests.rs81
-rw-r--r--tests/integration/test_tests.rs30
24 files changed, 1016 insertions, 221 deletions
diff --git a/Cargo.lock b/Cargo.lock
index e36b3482f..8cd654696 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1232,9 +1232,9 @@ dependencies = [
[[package]]
name = "deno_config"
-version = "0.12.0"
+version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ebbc05e20df2d5b8562205f9b0c296bc528e833b0de126d489781952e13d939f"
+checksum = "61c801e30b12aa3f15f59d4d4947621eef34d6798a93f6a5037c0efa26f87a8b"
dependencies = [
"anyhow",
"glob",
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
index d11ef8849..11fdcc123 100644
--- a/cli/Cargo.toml
+++ b/cli/Cargo.toml
@@ -64,7 +64,7 @@ winres.workspace = true
[dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true }
-deno_config = "=0.12.0"
+deno_config = "=0.14.1"
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.113.1", features = ["html"] }
deno_emit = "=0.38.2"
diff --git a/cli/args/mod.rs b/cli/args/mod.rs
index acdc96526..d72b41947 100644
--- a/cli/args/mod.rs
+++ b/cli/args/mod.rs
@@ -1258,7 +1258,7 @@ impl CliOptions {
pub fn resolve_config_excludes(&self) -> Result<PathOrPatternSet, AnyError> {
let maybe_config_files = if let Some(config_file) = &self.maybe_config_file
{
- config_file.to_files_config()?
+ Some(config_file.to_files_config()?)
} else {
None
};
@@ -1750,14 +1750,14 @@ fn resolve_files(
if let Some(file_flags) = maybe_file_flags {
if !file_flags.include.is_empty() {
maybe_files_config.include =
- Some(PathOrPatternSet::from_relative_path_or_patterns(
+ Some(PathOrPatternSet::from_include_relative_path_or_patterns(
initial_cwd,
&file_flags.include,
)?);
}
if !file_flags.ignore.is_empty() {
maybe_files_config.exclude =
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_exclude_relative_path_or_patterns(
initial_cwd,
&file_flags.ignore,
)?;
@@ -1886,7 +1886,7 @@ mod test {
temp_dir.write("pages/[id].ts", "");
let temp_dir_path = temp_dir.path().as_path();
- let error = PathOrPatternSet::from_relative_path_or_patterns(
+ let error = PathOrPatternSet::from_include_relative_path_or_patterns(
temp_dir_path,
&["data/**********.ts".to_string()],
)
@@ -1897,7 +1897,7 @@ mod test {
Some(FilePatterns {
base: temp_dir_path.to_path_buf(),
include: Some(
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
temp_dir_path,
&[
"data/test1.?s".to_string(),
@@ -1908,7 +1908,7 @@ mod test {
)
.unwrap(),
),
- exclude: PathOrPatternSet::from_relative_path_or_patterns(
+ exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
temp_dir_path,
&["nested/**/*bazz.ts".to_string()],
)
@@ -1919,7 +1919,7 @@ mod test {
)
.unwrap();
- let mut files = FileCollector::new(|_, _| true)
+ let mut files = FileCollector::new(|_| true)
.ignore_git_folder()
.ignore_node_modules()
.ignore_vendor_folder()
diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs
index 3d24c8c20..120828a79 100644
--- a/cli/lsp/config.rs
+++ b/cli/lsp/config.rs
@@ -1083,7 +1083,7 @@ impl Config {
pub fn get_disabled_paths(&self) -> PathOrPatternSet {
let mut path_or_patterns = vec![];
if let Some(cf) = self.maybe_config_file() {
- if let Some(files) = cf.to_files_config().ok().flatten() {
+ if let Ok(files) = cf.to_files_config() {
for path in files.exclude.into_path_or_patterns() {
path_or_patterns.push(path);
}
@@ -1095,7 +1095,14 @@ impl Config {
continue;
};
let settings = self.workspace_settings_for_specifier(workspace_uri);
- if settings.enable.unwrap_or_else(|| self.has_config_file()) {
+ let is_enabled = settings
+ .enable_paths
+ .as_ref()
+ .map(|p| !p.is_empty())
+ .unwrap_or_else(|| {
+ settings.enable.unwrap_or_else(|| self.has_config_file())
+ });
+ if is_enabled {
for path in &settings.disable_paths {
path_or_patterns.push(PathOrPattern::Path(workspace_path.join(path)));
}
@@ -1177,7 +1184,7 @@ fn specifier_enabled(
workspace_folders: &[(Url, lsp::WorkspaceFolder)],
) -> bool {
if let Some(cf) = config_file {
- if let Some(files) = cf.to_files_config().ok().flatten() {
+ if let Ok(files) = cf.to_files_config() {
if !files.matches_specifier(specifier) {
return false;
}
diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs
index b825bc020..7912dad78 100644
--- a/cli/lsp/documents.rs
+++ b/cli/lsp/documents.rs
@@ -1341,11 +1341,12 @@ impl Documents {
.inner()
.iter()
.map(|p| match p {
- PathOrPattern::Path(p) => {
- Cow::Owned(p.to_string_lossy().to_string())
+ PathOrPattern::Path(p) => p.to_string_lossy(),
+ PathOrPattern::NegatedPath(p) => {
+ Cow::Owned(format!("!{}", p.to_string_lossy()))
}
PathOrPattern::RemoteUrl(p) => Cow::Borrowed(p.as_str()),
- PathOrPattern::Pattern(p) => Cow::Borrowed(p.as_str()),
+ PathOrPattern::Pattern(p) => p.as_str(),
})
.collect::<Vec<_>>();
// ensure these are sorted so the hashing is deterministic
@@ -2061,8 +2062,13 @@ impl Iterator for PreloadDocumentFinder {
if let Ok(entry) = entry {
let path = entry.path();
if let Ok(file_type) = entry.file_type() {
- if file_patterns.matches_path(&path) {
- if file_type.is_dir() && is_discoverable_dir(&path) {
+ let is_dir = file_type.is_dir();
+ let path_kind = match is_dir {
+ true => deno_config::glob::PathKind::Directory,
+ false => deno_config::glob::PathKind::File,
+ };
+ if file_patterns.matches_path(&path, path_kind) {
+ if is_dir && is_discoverable_dir(&path) {
self.pending_entries.push_back(PendingEntry::Dir(
path.to_path_buf(),
file_patterns.clone(),
@@ -2354,7 +2360,7 @@ console.log(b, "hello deno");
file_patterns: FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Some(
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
temp_dir.path().as_path(),
&[
"root1".to_string(),
@@ -2415,7 +2421,7 @@ console.log(b, "hello deno");
file_patterns: FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Default::default(),
- exclude: PathOrPatternSet::from_relative_path_or_patterns(
+ exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
temp_dir.path().as_path(),
&[
"root1".to_string(),
diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs
index 43b7103cd..b554f7349 100644
--- a/cli/tools/bench/mod.rs
+++ b/cli/tools/bench/mod.rs
@@ -14,12 +14,12 @@ use crate::tools::test::format_test_error;
use crate::tools::test::TestFilter;
use crate::util::file_watcher;
use crate::util::fs::collect_specifiers;
+use crate::util::fs::WalkEntry;
use crate::util::path::is_script_ext;
+use crate::util::path::matches_pattern_or_exact_path;
use crate::version::get_user_agent;
use crate::worker::CliMainWorkerFactory;
-use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::error::JsError;
@@ -394,25 +394,16 @@ async fn bench_specifiers(
}
/// Checks if the path has a basename and extension Deno supports for benches.
-fn is_supported_bench_path(path: &Path, patterns: &FilePatterns) -> bool {
- if !is_script_ext(path) {
+fn is_supported_bench_path(entry: WalkEntry) -> bool {
+ if !is_script_ext(entry.path) {
false
- } else if has_supported_bench_path_name(path) {
+ } else if has_supported_bench_path_name(entry.path) {
true
- } else {
+ } else if let Some(include) = &entry.patterns.include {
// allow someone to explicitly specify a path
- let matches_exact_path_or_pattern = patterns
- .include
- .as_ref()
- .map(|p| {
- p.inner().iter().any(|p| match p {
- PathOrPattern::Path(p) => p == path,
- PathOrPattern::RemoteUrl(_) => true,
- PathOrPattern::Pattern(p) => p.matches_path(path),
- })
- })
- .unwrap_or(false);
- matches_exact_path_or_pattern
+ matches_pattern_or_exact_path(include, entry.path)
+ } else {
+ false
}
}
diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs
index 5cc705741..66c0923de 100644
--- a/cli/tools/coverage/mod.rs
+++ b/cli/tools/coverage/mod.rs
@@ -388,23 +388,20 @@ fn collect_coverages(
initial_cwd.to_path_buf(),
)])
} else {
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
initial_cwd,
&files.include,
)?
}
}),
- exclude: PathOrPatternSet::from_relative_path_or_patterns(
+ exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
initial_cwd,
&files.ignore,
)
.context("Invalid ignore pattern.")?,
};
- let file_paths = FileCollector::new(|file_path, _| {
- file_path
- .extension()
- .map(|ext| ext == "json")
- .unwrap_or(false)
+ let file_paths = FileCollector::new(|e| {
+ e.path.extension().map(|ext| ext == "json").unwrap_or(false)
})
.ignore_git_folder()
.ignore_node_modules()
diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs
index 0b7b26e31..013a407aa 100644
--- a/cli/tools/doc.rs
+++ b/cli/tools/doc.rs
@@ -96,13 +96,15 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
let module_specifiers = collect_specifiers(
FilePatterns {
base: cli_options.initial_cwd().to_path_buf(),
- include: Some(PathOrPatternSet::from_relative_path_or_patterns(
- cli_options.initial_cwd(),
- source_files,
- )?),
+ include: Some(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
+ cli_options.initial_cwd(),
+ source_files,
+ )?,
+ ),
exclude: Default::default(),
},
- |_, _| true,
+ |_| true,
)?;
let graph = module_graph_creator
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs
index 86fc9700e..0f6afb232 100644
--- a/cli/tools/fmt.rs
+++ b/cli/tools/fmt.rs
@@ -154,7 +154,7 @@ async fn format_files(
}
fn collect_fmt_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
- FileCollector::new(|path, _| is_supported_ext_fmt(path))
+ FileCollector::new(|e| is_supported_ext_fmt(e.path))
.ignore_git_folder()
.ignore_node_modules()
.ignore_vendor_folder()
diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs
index ee7350fb4..1b81fca5a 100644
--- a/cli/tools/lint/mod.rs
+++ b/cli/tools/lint/mod.rs
@@ -263,7 +263,7 @@ async fn lint_files(
}
fn collect_lint_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
- FileCollector::new(|path, _| is_script_ext(path))
+ FileCollector::new(|e| is_script_ext(e.path))
.ignore_git_folder()
.ignore_node_modules()
.ignore_vendor_folder()
diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs
index d24d8abaa..0da410764 100644
--- a/cli/tools/registry/tar.rs
+++ b/cli/tools/registry/tar.rs
@@ -2,13 +2,11 @@
use bytes::Bytes;
use deno_ast::MediaType;
+use deno_ast::ModuleSpecifier;
use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
-use ignore::overrides::OverrideBuilder;
-use ignore::WalkBuilder;
use sha2::Digest;
use std::collections::HashSet;
use std::fmt::Write as FmtWrite;
@@ -18,6 +16,7 @@ use tar::Header;
use crate::cache::LazyGraphSourceParser;
use crate::tools::registry::paths::PackagePath;
+use crate::util::fs::FileCollector;
use super::diagnostics::PublishDiagnostic;
use super::diagnostics::PublishDiagnosticsCollector;
@@ -45,75 +44,60 @@ pub fn create_gzipped_tarball(
unfurler: &SpecifierUnfurler,
file_patterns: Option<FilePatterns>,
) -> Result<PublishableTarball, AnyError> {
+ let file_patterns = file_patterns
+ .unwrap_or_else(|| FilePatterns::new_with_base(dir.to_path_buf()));
let mut tar = TarGzArchive::new();
let mut files = vec![];
- let mut paths = HashSet::new();
-
- let mut ob = OverrideBuilder::new(dir);
- ob.add("!.git")?.add("!node_modules")?.add("!.DS_Store")?;
-
- for pattern in file_patterns.as_ref().iter().flat_map(|p| p.include.iter()) {
- for path_or_pat in pattern.inner() {
- match path_or_pat {
- PathOrPattern::Path(p) => ob.add(p.to_str().unwrap())?,
- PathOrPattern::Pattern(p) => ob.add(p.as_str())?,
- PathOrPattern::RemoteUrl(_) => continue,
- };
+ let iter_paths = FileCollector::new(|e| {
+ if !e.file_type.is_file() {
+ if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) {
+ diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
+ specifier,
+ kind: if e.file_type.is_symlink() {
+ "symlink".to_owned()
+ } else {
+ format!("{:?}", e.file_type)
+ },
+ });
+ }
+ return false;
}
- }
-
- let overrides = ob.build()?;
-
- let iterator = WalkBuilder::new(dir)
- .follow_links(false)
- .require_git(false)
- .git_ignore(true)
- .git_global(true)
- .git_exclude(true)
- .overrides(overrides)
- .filter_entry(move |entry| {
- let matches_pattern = file_patterns
- .as_ref()
- .map(|p| p.matches_path(entry.path()))
- .unwrap_or(true);
- matches_pattern
- })
- .build();
+ e.path.file_name().map(|s| s != ".DS_Store").unwrap_or(true)
+ })
+ .ignore_git_folder()
+ .ignore_node_modules()
+ .ignore_vendor_folder()
+ .use_gitignore()
+ .collect_file_patterns(file_patterns)?;
- for entry in iterator {
- let entry = entry?;
+ let mut paths = HashSet::with_capacity(iter_paths.len());
- let path = entry.path();
- let Some(file_type) = entry.file_type() else {
- // entry doesn’t have a file type if it corresponds to stdin.
+ for path in iter_paths {
+ let Ok(specifier) = Url::from_file_path(&path) else {
+ diagnostics_collector
+ .to_owned()
+ .push(PublishDiagnostic::InvalidPath {
+ path: path.to_path_buf(),
+ message: "unable to convert path to url".to_string(),
+ });
continue;
};
- let Ok(specifier) = Url::from_file_path(path) else {
+ let Ok(relative_path) = path.strip_prefix(dir) else {
diagnostics_collector
.to_owned()
.push(PublishDiagnostic::InvalidPath {
path: path.to_path_buf(),
- message: "unable to convert path to url".to_string(),
+ message: "path is not in publish directory".to_string(),
});
continue;
};
- if file_type.is_file() {
- let Ok(relative_path) = path.strip_prefix(dir) else {
- diagnostics_collector
- .to_owned()
- .push(PublishDiagnostic::InvalidPath {
- path: path.to_path_buf(),
- message: "path is not in publish directory".to_string(),
- });
- continue;
- };
-
- let path_str = relative_path.components().fold(
- "".to_string(),
- |mut path, component| {
+ let path_str =
+ relative_path
+ .components()
+ .fold("".to_string(), |mut path, component| {
path.push('/');
match component {
std::path::Component::Normal(normal) => {
@@ -124,66 +108,55 @@ pub fn create_gzipped_tarball(
_ => unreachable!(),
}
path
- },
- );
+ });
- match PackagePath::new(path_str.clone()) {
- Ok(package_path) => {
- if !paths.insert(package_path) {
- diagnostics_collector.to_owned().push(
- PublishDiagnostic::DuplicatePath {
- path: path.to_path_buf(),
- },
- );
- }
- }
- Err(err) => {
+ match PackagePath::new(path_str.clone()) {
+ Ok(package_path) => {
+ if !paths.insert(package_path) {
diagnostics_collector.to_owned().push(
- PublishDiagnostic::InvalidPath {
+ PublishDiagnostic::DuplicatePath {
path: path.to_path_buf(),
- message: err.to_string(),
},
);
}
}
-
- let content = resolve_content_maybe_unfurling(
- path,
- &specifier,
- unfurler,
- source_parser,
- diagnostics_collector,
- )?;
-
- let media_type = MediaType::from_specifier(&specifier);
- if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
- diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
- specifier: specifier.clone(),
- });
+ Err(err) => {
+ diagnostics_collector
+ .to_owned()
+ .push(PublishDiagnostic::InvalidPath {
+ path: path.to_path_buf(),
+ message: err.to_string(),
+ });
}
+ }
+
+ let content = resolve_content_maybe_unfurling(
+ &path,
+ &specifier,
+ unfurler,
+ source_parser,
+ diagnostics_collector,
+ )?;
- files.push(PublishableTarballFile {
- path_str: path_str.clone(),
+ let media_type = MediaType::from_specifier(&specifier);
+ if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
+ diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
specifier: specifier.clone(),
- // This hash string matches the checksum computed by registry
- hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
- size: content.len(),
- });
- tar
- .add_file(format!(".{}", path_str), &content)
- .with_context(|| {
- format!("Unable to add file to tarball '{}'", entry.path().display())
- })?;
- } else if !file_type.is_dir() {
- diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
- specifier,
- kind: if file_type.is_symlink() {
- "symlink".to_owned()
- } else {
- format!("{file_type:?}")
- },
});
}
+
+ files.push(PublishableTarballFile {
+ path_str: path_str.clone(),
+ specifier: specifier.clone(),
+ // This hash string matches the checksum computed by registry
+ hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
+ size: content.len(),
+ });
+ tar
+ .add_file(format!(".{}", path_str), &content)
+ .with_context(|| {
+ format!("Unable to add file to tarball '{}'", path.display())
+ })?;
}
let v = tar.finish().context("Unable to finish tarball")?;
diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs
index 4f500df3d..1970012a1 100644
--- a/cli/tools/test/mod.rs
+++ b/cli/tools/test/mod.rs
@@ -15,16 +15,17 @@ use crate::module_loader::ModuleLoadPreparer;
use crate::ops;
use crate::util::file_watcher;
use crate::util::fs::collect_specifiers;
+use crate::util::fs::WalkEntry;
use crate::util::path::get_extension;
use crate::util::path::is_script_ext;
use crate::util::path::mapped_specifier_for_tsc;
+use crate::util::path::matches_pattern_or_exact_path;
use crate::worker::CliMainWorkerFactory;
use deno_ast::swc::common::comments::CommentKind;
use deno_ast::MediaType;
use deno_ast::SourceRangedForSpanned;
use deno_config::glob::FilePatterns;
-use deno_config::glob::PathOrPattern;
use deno_core::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context as _;
@@ -1350,28 +1351,16 @@ pub async fn report_tests(
(Ok(()), receiver)
}
-fn is_supported_test_path_predicate(
- path: &Path,
- patterns: &FilePatterns,
-) -> bool {
- if !is_script_ext(path) {
+fn is_supported_test_path_predicate(entry: WalkEntry) -> bool {
+ if !is_script_ext(entry.path) {
false
- } else if has_supported_test_path_name(path) {
+ } else if has_supported_test_path_name(entry.path) {
true
- } else {
+ } else if let Some(include) = &entry.patterns.include {
// allow someone to explicitly specify a path
- let matches_exact_path_or_pattern = patterns
- .include
- .as_ref()
- .map(|p| {
- p.inner().iter().any(|p| match p {
- PathOrPattern::Path(p) => p == path,
- PathOrPattern::RemoteUrl(_) => true,
- PathOrPattern::Pattern(p) => p.matches_path(path),
- })
- })
- .unwrap_or(false);
- matches_exact_path_or_pattern
+ matches_pattern_or_exact_path(include, entry.path)
+ } else {
+ false
}
}
@@ -1432,7 +1421,7 @@ fn collect_specifiers_with_test_mode(
collect_specifiers(files.clone(), is_supported_test_path_predicate)?;
if *include_inline {
- return collect_specifiers(files, |p, _| is_supported_test_ext(p)).map(
+ return collect_specifiers(files, |e| is_supported_test_ext(e.path)).map(
|specifiers| {
specifiers
.into_iter()
@@ -1608,8 +1597,8 @@ pub async fn run_tests_with_watch(
let module_graph_creator = factory.module_graph_creator().await?;
let file_fetcher = factory.file_fetcher()?;
let test_modules = if test_options.doc {
- collect_specifiers(test_options.files.clone(), |p, _| {
- is_supported_test_ext(p)
+ collect_specifiers(test_options.files.clone(), |e| {
+ is_supported_test_ext(e.path)
})
} else {
collect_specifiers(
diff --git a/cli/util/fs.rs b/cli/util/fs.rs
index c81686f95..f6354097a 100644
--- a/cli/util/fs.rs
+++ b/cli/util/fs.rs
@@ -3,6 +3,7 @@
use std::collections::HashSet;
use std::env::current_dir;
use std::fmt::Write as FmtWrite;
+use std::fs::FileType;
use std::fs::OpenOptions;
use std::io::Error;
use std::io::ErrorKind;
@@ -26,6 +27,8 @@ use deno_runtime::deno_crypto::rand;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::PathClean;
+use crate::util::gitignore::DirGitIgnores;
+use crate::util::gitignore::GitIgnoreTree;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::util::progress_bar::ProgressMessagePrompt;
@@ -244,22 +247,31 @@ pub fn resolve_from_cwd(path: &Path) -> Result<PathBuf, AnyError> {
Ok(normalize_path(resolved_path))
}
+#[derive(Debug, Clone)]
+pub struct WalkEntry<'a> {
+ pub path: &'a Path,
+ pub file_type: &'a FileType,
+ pub patterns: &'a FilePatterns,
+}
+
/// Collects file paths that satisfy the given predicate, by recursively walking `files`.
/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory.
-pub struct FileCollector<TFilter: Fn(&Path, &FilePatterns) -> bool> {
+pub struct FileCollector<TFilter: Fn(WalkEntry) -> bool> {
file_filter: TFilter,
ignore_git_folder: bool,
ignore_node_modules: bool,
ignore_vendor_folder: bool,
+ use_gitignore: bool,
}
-impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
+impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
pub fn new(file_filter: TFilter) -> Self {
Self {
file_filter,
ignore_git_folder: false,
ignore_node_modules: false,
ignore_vendor_folder: false,
+ use_gitignore: false,
}
}
@@ -278,10 +290,46 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
self
}
+ pub fn use_gitignore(mut self) -> Self {
+ self.use_gitignore = true;
+ self
+ }
+
pub fn collect_file_patterns(
&self,
file_patterns: FilePatterns,
) -> Result<Vec<PathBuf>, AnyError> {
+ fn is_pattern_matched(
+ maybe_git_ignore: Option<&DirGitIgnores>,
+ path: &Path,
+ is_dir: bool,
+ file_patterns: &FilePatterns,
+ ) -> bool {
+ use deno_config::glob::FilePatternsMatch;
+
+ let path_kind = match is_dir {
+ true => deno_config::glob::PathKind::Directory,
+ false => deno_config::glob::PathKind::File,
+ };
+ match file_patterns.matches_path_detail(path, path_kind) {
+ FilePatternsMatch::Passed => {
+ // check gitignore
+ let is_gitignored = maybe_git_ignore
+ .as_ref()
+ .map(|git_ignore| git_ignore.is_ignored(path, is_dir))
+ .unwrap_or(false);
+ !is_gitignored
+ }
+ FilePatternsMatch::PassedOptedOutExclude => true,
+ FilePatternsMatch::Excluded => false,
+ }
+ }
+
+ let mut maybe_git_ignores = if self.use_gitignore {
+ Some(GitIgnoreTree::new(Arc::new(deno_runtime::deno_fs::RealFs)))
+ } else {
+ None
+ };
let mut target_files = Vec::new();
let mut visited_paths = HashSet::new();
let file_patterns_by_base = file_patterns.split_by_base();
@@ -299,20 +347,23 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
};
let file_type = e.file_type();
let is_dir = file_type.is_dir();
- let c = e.path().to_path_buf();
- if file_patterns.exclude.matches_path(&c)
- || !is_dir
- && !file_patterns
- .include
- .as_ref()
- .map(|i| i.matches_path(&c))
- .unwrap_or(true)
- {
+ let path = e.path().to_path_buf();
+ let maybe_gitignore =
+ maybe_git_ignores.as_mut().and_then(|git_ignores| {
+ let dir_path = if is_dir { &path } else { path.parent()? };
+ git_ignores.get_resolved_git_ignore(dir_path)
+ });
+ if !is_pattern_matched(
+ maybe_gitignore.as_deref(),
+ &path,
+ is_dir,
+ &file_patterns,
+ ) {
if is_dir {
iterator.skip_current_dir();
}
} else if is_dir {
- let should_ignore_dir = c
+ let should_ignore_dir = path
.file_name()
.map(|dir_name| {
let dir_name = dir_name.to_string_lossy().to_lowercase();
@@ -323,17 +374,20 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
_ => false,
};
// allow the user to opt out of ignoring by explicitly specifying the dir
- file != c && is_ignored_file
+ file != path && is_ignored_file
})
.unwrap_or(false)
- || !visited_paths.insert(c.clone());
+ || !visited_paths.insert(path.clone());
if should_ignore_dir {
iterator.skip_current_dir();
}
- } else if (self.file_filter)(&c, &file_patterns)
- && visited_paths.insert(c.clone())
+ } else if (self.file_filter)(WalkEntry {
+ path: &path,
+ file_type: &file_type,
+ patterns: &file_patterns,
+ }) && visited_paths.insert(path.clone())
{
- target_files.push(c);
+ target_files.push(path);
}
}
}
@@ -346,7 +400,7 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
/// Note: This ignores all .git and node_modules folders.
pub fn collect_specifiers(
mut files: FilePatterns,
- predicate: impl Fn(&Path, &FilePatterns) -> bool,
+ predicate: impl Fn(WalkEntry) -> bool,
) -> Result<Vec<ModuleSpecifier>, AnyError> {
let mut prepared = vec![];
@@ -365,6 +419,10 @@ pub fn collect_specifiers(
prepared.push(url);
}
}
+ PathOrPattern::NegatedPath(path) => {
+ // add it back
+ result.push(PathOrPattern::NegatedPath(path));
+ }
PathOrPattern::RemoteUrl(remote_url) => {
prepared.push(remote_url);
}
@@ -819,9 +877,9 @@ mod tests {
ignore_dir_path.to_path_buf(),
)]),
};
- let file_collector = FileCollector::new(|path, _| {
+ let file_collector = FileCollector::new(|e| {
// exclude dotfiles
- path
+ e.path
.file_name()
.and_then(|f| f.to_str())
.map(|f| !f.starts_with('.'))
@@ -943,9 +1001,9 @@ mod tests {
let ignore_dir_files = ["g.d.ts", ".gitignore"];
create_files(&ignore_dir_path, &ignore_dir_files);
- let predicate = |path: &Path, _: &FilePatterns| {
+ let predicate = |e: WalkEntry| {
// exclude dotfiles
- path
+ e.path
.file_name()
.and_then(|f| f.to_str())
.map(|f| !f.starts_with('.'))
@@ -956,7 +1014,7 @@ mod tests {
FilePatterns {
base: root_dir_path.to_path_buf(),
include: Some(
- PathOrPatternSet::from_relative_path_or_patterns(
+ PathOrPatternSet::from_include_relative_path_or_patterns(
root_dir_path.as_path(),
&[
"http://localhost:8080".to_string(),
diff --git a/cli/util/gitignore.rs b/cli/util/gitignore.rs
new file mode 100644
index 000000000..da9065494
--- /dev/null
+++ b/cli/util/gitignore.rs
@@ -0,0 +1,151 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use std::collections::HashMap;
+use std::path::Path;
+use std::path::PathBuf;
+use std::rc::Rc;
+use std::sync::Arc;
+
+/// Resolved gitignore for a directory.
+pub struct DirGitIgnores {
+ current: Option<Rc<ignore::gitignore::Gitignore>>,
+ parent: Option<Rc<DirGitIgnores>>,
+}
+
+impl DirGitIgnores {
+ pub fn is_ignored(&self, path: &Path, is_dir: bool) -> bool {
+ let mut is_ignored = false;
+ if let Some(parent) = &self.parent {
+ is_ignored = parent.is_ignored(path, is_dir);
+ }
+ if let Some(current) = &self.current {
+ match current.matched(path, is_dir) {
+ ignore::Match::None => {}
+ ignore::Match::Ignore(_) => {
+ is_ignored = true;
+ }
+ ignore::Match::Whitelist(_) => {
+ is_ignored = false;
+ }
+ }
+ }
+ is_ignored
+ }
+}
+
+/// Resolves gitignores in a directory tree taking into account
+/// ancestor gitignores that may be found in a directory.
+pub struct GitIgnoreTree {
+ fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
+ ignores: HashMap<PathBuf, Option<Rc<DirGitIgnores>>>,
+}
+
+impl GitIgnoreTree {
+ pub fn new(fs: Arc<dyn deno_runtime::deno_fs::FileSystem>) -> Self {
+ Self {
+ fs,
+ ignores: Default::default(),
+ }
+ }
+
+ pub fn get_resolved_git_ignore(
+ &mut self,
+ dir_path: &Path,
+ ) -> Option<Rc<DirGitIgnores>> {
+ self.get_resolved_git_ignore_inner(dir_path, None)
+ }
+
+ fn get_resolved_git_ignore_inner(
+ &mut self,
+ dir_path: &Path,
+ maybe_parent: Option<&Path>,
+ ) -> Option<Rc<DirGitIgnores>> {
+ let maybe_resolved = self.ignores.get(dir_path).cloned();
+ if let Some(resolved) = maybe_resolved {
+ resolved
+ } else {
+ let resolved = self.resolve_gitignore_in_dir(dir_path, maybe_parent);
+ self.ignores.insert(dir_path.to_owned(), resolved.clone());
+ resolved
+ }
+ }
+
+ fn resolve_gitignore_in_dir(
+ &mut self,
+ dir_path: &Path,
+ maybe_parent: Option<&Path>,
+ ) -> Option<Rc<DirGitIgnores>> {
+ if let Some(parent) = maybe_parent {
+ // stop searching if the parent dir had a .git directory in it
+ if self.fs.exists_sync(&parent.join(".git")) {
+ return None;
+ }
+ }
+
+ let parent = dir_path.parent().and_then(|parent| {
+ self.get_resolved_git_ignore_inner(parent, Some(dir_path))
+ });
+ let current = self
+ .fs
+ .read_text_file_sync(&dir_path.join(".gitignore"))
+ .ok()
+ .and_then(|text| {
+ let mut builder = ignore::gitignore::GitignoreBuilder::new(dir_path);
+ for line in text.lines() {
+ builder.add_line(None, line).ok()?;
+ }
+ let gitignore = builder.build().ok()?;
+ Some(Rc::new(gitignore))
+ });
+ if parent.is_none() && current.is_none() {
+ None
+ } else {
+ Some(Rc::new(DirGitIgnores { current, parent }))
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use deno_runtime::deno_fs::InMemoryFs;
+
+ use super::*;
+
+ #[test]
+ fn git_ignore_tree() {
+ let fs = InMemoryFs::default();
+ fs.setup_text_files(vec![
+ ("/.gitignore".into(), "file.txt".into()),
+ ("/sub_dir/.gitignore".into(), "data.txt".into()),
+ (
+ "/sub_dir/sub_dir/.gitignore".into(),
+ "!file.txt\nignore.txt".into(),
+ ),
+ ]);
+ let mut ignore_tree = GitIgnoreTree::new(Arc::new(fs));
+ let mut run_test = |path: &str, expected: bool| {
+ let path = PathBuf::from(path);
+ let gitignore = ignore_tree
+ .get_resolved_git_ignore(path.parent().unwrap())
+ .unwrap();
+ assert_eq!(
+ gitignore.is_ignored(&path, /* is_dir */ false),
+ expected,
+ "Path: {}",
+ path.display()
+ );
+ };
+ run_test("/file.txt", true);
+ run_test("/other.txt", false);
+ run_test("/data.txt", false);
+ run_test("/sub_dir/file.txt", true);
+ run_test("/sub_dir/other.txt", false);
+ run_test("/sub_dir/data.txt", true);
+ run_test("/sub_dir/sub_dir/file.txt", false); // unignored up here
+ run_test("/sub_dir/sub_dir/sub_dir/file.txt", false);
+ run_test("/sub_dir/sub_dir/sub_dir/ignore.txt", true);
+ run_test("/sub_dir/sub_dir/ignore.txt", true);
+ run_test("/sub_dir/ignore.txt", false);
+ run_test("/ignore.txt", false);
+ }
+}
diff --git a/cli/util/mod.rs b/cli/util/mod.rs
index a6f72bc04..7e0e1bd37 100644
--- a/cli/util/mod.rs
+++ b/cli/util/mod.rs
@@ -8,6 +8,7 @@ pub mod display;
pub mod draw_thread;
pub mod file_watcher;
pub mod fs;
+pub mod gitignore;
pub mod logger;
pub mod path;
pub mod progress_bar;
diff --git a/cli/util/path.rs b/cli/util/path.rs
index 496b37c5e..fed74cb06 100644
--- a/cli/util/path.rs
+++ b/cli/util/path.rs
@@ -6,6 +6,9 @@ use std::path::PathBuf;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
+use deno_config::glob::PathGlobMatch;
+use deno_config::glob::PathOrPattern;
+use deno_config::glob::PathOrPatternSet;
use deno_core::error::uri_error;
use deno_core::error::AnyError;
@@ -244,6 +247,38 @@ pub fn root_url_to_safe_local_dirname(root: &ModuleSpecifier) -> PathBuf {
result
}
+/// Slightly different behaviour than the default matching
+/// where an exact path needs to be matched to be opted-in
+/// rather than just a partial directory match.
+///
+/// This is used by the test and bench filtering.
+pub fn matches_pattern_or_exact_path(
+ path_or_pattern_set: &PathOrPatternSet,
+ path: &Path,
+) -> bool {
+ for p in path_or_pattern_set.inner().iter().rev() {
+ match p {
+ PathOrPattern::Path(p) => {
+ if p == path {
+ return true;
+ }
+ }
+ PathOrPattern::NegatedPath(p) => {
+ if path.starts_with(p) {
+ return false;
+ }
+ }
+ PathOrPattern::RemoteUrl(_) => {}
+ PathOrPattern::Pattern(p) => match p.matches_path(path) {
+ PathGlobMatch::Matched => return true,
+ PathGlobMatch::MatchedNegated => return false,
+ PathGlobMatch::NotMatched => {}
+ },
+ }
+ }
+ false
+}
+
#[cfg(test)]
mod test {
use super::*;
diff --git a/ext/fs/in_memory_fs.rs b/ext/fs/in_memory_fs.rs
new file mode 100644
index 000000000..fdd0ad7e7
--- /dev/null
+++ b/ext/fs/in_memory_fs.rs
@@ -0,0 +1,425 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+// Allow using Arc for this module.
+#![allow(clippy::disallowed_types)]
+
+use std::collections::hash_map::Entry;
+use std::collections::HashMap;
+use std::io::Error;
+use std::io::ErrorKind;
+use std::path::Path;
+use std::path::PathBuf;
+use std::rc::Rc;
+use std::sync::Arc;
+
+use deno_core::normalize_path;
+use deno_core::parking_lot::Mutex;
+use deno_io::fs::File;
+use deno_io::fs::FsError;
+use deno_io::fs::FsResult;
+use deno_io::fs::FsStat;
+
+use crate::interface::FsDirEntry;
+use crate::interface::FsFileType;
+use crate::FileSystem;
+use crate::OpenOptions;
+
+#[derive(Debug)]
+enum PathEntry {
+ Dir,
+ File(Vec<u8>),
+}
+
+/// A very basic in-memory file system useful for swapping out in
+/// the place of a RealFs for testing purposes.
+///
+/// Please develop this out as you need functionality.
+#[derive(Debug, Default)]
+pub struct InMemoryFs {
+ entries: Mutex<HashMap<PathBuf, Arc<PathEntry>>>,
+}
+
+impl InMemoryFs {
+ pub fn setup_text_files(&self, files: Vec<(String, String)>) {
+ for (path, text) in files {
+ let path = PathBuf::from(path);
+ self.mkdir_sync(path.parent().unwrap(), true, 0).unwrap();
+ self
+ .write_file_sync(
+ &path,
+ OpenOptions::write(true, false, false, None),
+ &text.into_bytes(),
+ )
+ .unwrap();
+ }
+ }
+
+ fn get_entry(&self, path: &Path) -> Option<Arc<PathEntry>> {
+ let path = normalize_path(path);
+ self.entries.lock().get(&path).cloned()
+ }
+}
+
+#[async_trait::async_trait(?Send)]
+impl FileSystem for InMemoryFs {
+ fn cwd(&self) -> FsResult<PathBuf> {
+ Err(FsError::NotSupported)
+ }
+
+ fn tmp_dir(&self) -> FsResult<PathBuf> {
+ Err(FsError::NotSupported)
+ }
+
+ fn chdir(&self, _path: &Path) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+
+ fn umask(&self, _mask: Option<u32>) -> FsResult<u32> {
+ Err(FsError::NotSupported)
+ }
+
+ fn open_sync(
+ &self,
+ _path: &Path,
+ _options: OpenOptions,
+ ) -> FsResult<Rc<dyn File>> {
+ Err(FsError::NotSupported)
+ }
+ async fn open_async(
+ &self,
+ path: PathBuf,
+ options: OpenOptions,
+ ) -> FsResult<Rc<dyn File>> {
+ self.open_sync(&path, options)
+ }
+
+ fn mkdir_sync(
+ &self,
+ path: &Path,
+ recursive: bool,
+ _mode: u32,
+ ) -> FsResult<()> {
+ let path = normalize_path(path);
+
+ if let Some(parent) = path.parent() {
+ let entry = self.entries.lock().get(parent).cloned();
+ match entry {
+ Some(entry) => match &*entry {
+ PathEntry::File(_) => {
+ return Err(FsError::Io(Error::new(
+ ErrorKind::InvalidInput,
+ "Parent is a file",
+ )))
+ }
+ PathEntry::Dir => {}
+ },
+ None => {
+ if recursive {
+ self.mkdir_sync(parent, true, 0)?;
+ } else {
+ return Err(FsError::Io(Error::new(
+ ErrorKind::NotFound,
+ "Not found",
+ )));
+ }
+ }
+ }
+ }
+
+ let entry = self.entries.lock().get(&path).cloned();
+ match entry {
+ Some(entry) => match &*entry {
+ PathEntry::File(_) => Err(FsError::Io(Error::new(
+ ErrorKind::InvalidInput,
+ "Is a file",
+ ))),
+ PathEntry::Dir => Ok(()),
+ },
+ None => {
+ self.entries.lock().insert(path, Arc::new(PathEntry::Dir));
+ Ok(())
+ }
+ }
+ }
+ async fn mkdir_async(
+ &self,
+ path: PathBuf,
+ recursive: bool,
+ mode: u32,
+ ) -> FsResult<()> {
+ self.mkdir_sync(&path, recursive, mode)
+ }
+
+ fn chmod_sync(&self, _path: &Path, _mode: u32) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> {
+ self.chmod_sync(&path, mode)
+ }
+
+ fn chown_sync(
+ &self,
+ _path: &Path,
+ _uid: Option<u32>,
+ _gid: Option<u32>,
+ ) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn chown_async(
+ &self,
+ path: PathBuf,
+ uid: Option<u32>,
+ gid: Option<u32>,
+ ) -> FsResult<()> {
+ self.chown_sync(&path, uid, gid)
+ }
+
+ fn remove_sync(&self, _path: &Path, _recursive: bool) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> {
+ self.remove_sync(&path, recursive)
+ }
+
+ fn copy_file_sync(&self, _from: &Path, _to: &Path) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn copy_file_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> {
+ self.copy_file_sync(&from, &to)
+ }
+
+ fn cp_sync(&self, _from: &Path, _to: &Path) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn cp_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> {
+ self.cp_sync(&from, &to)
+ }
+
+ fn stat_sync(&self, path: &Path) -> FsResult<FsStat> {
+ let entry = self.get_entry(path);
+ match entry {
+ Some(entry) => match &*entry {
+ PathEntry::Dir => Ok(FsStat {
+ is_file: false,
+ is_directory: true,
+ is_symlink: false,
+ size: 0,
+ mtime: None,
+ atime: None,
+ birthtime: None,
+ dev: 0,
+ ino: 0,
+ mode: 0,
+ nlink: 0,
+ uid: 0,
+ gid: 0,
+ rdev: 0,
+ blksize: 0,
+ blocks: 0,
+ is_block_device: false,
+ is_char_device: false,
+ is_fifo: false,
+ is_socket: false,
+ }),
+ PathEntry::File(data) => Ok(FsStat {
+ is_file: true,
+ is_directory: false,
+ is_symlink: false,
+ size: data.len() as u64,
+ mtime: None,
+ atime: None,
+ birthtime: None,
+ dev: 0,
+ ino: 0,
+ mode: 0,
+ nlink: 0,
+ uid: 0,
+ gid: 0,
+ rdev: 0,
+ blksize: 0,
+ blocks: 0,
+ is_block_device: false,
+ is_char_device: false,
+ is_fifo: false,
+ is_socket: false,
+ }),
+ },
+ None => Err(FsError::Io(Error::new(ErrorKind::NotFound, "Not found"))),
+ }
+ }
+ async fn stat_async(&self, path: PathBuf) -> FsResult<FsStat> {
+ self.stat_sync(&path)
+ }
+
+ fn lstat_sync(&self, _path: &Path) -> FsResult<FsStat> {
+ Err(FsError::NotSupported)
+ }
+ async fn lstat_async(&self, path: PathBuf) -> FsResult<FsStat> {
+ self.lstat_sync(&path)
+ }
+
+ fn realpath_sync(&self, _path: &Path) -> FsResult<PathBuf> {
+ Err(FsError::NotSupported)
+ }
+ async fn realpath_async(&self, path: PathBuf) -> FsResult<PathBuf> {
+ self.realpath_sync(&path)
+ }
+
+ fn read_dir_sync(&self, _path: &Path) -> FsResult<Vec<FsDirEntry>> {
+ Err(FsError::NotSupported)
+ }
+ async fn read_dir_async(&self, path: PathBuf) -> FsResult<Vec<FsDirEntry>> {
+ self.read_dir_sync(&path)
+ }
+
+ fn rename_sync(&self, _oldpath: &Path, _newpath: &Path) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn rename_async(
+ &self,
+ oldpath: PathBuf,
+ newpath: PathBuf,
+ ) -> FsResult<()> {
+ self.rename_sync(&oldpath, &newpath)
+ }
+
+ fn link_sync(&self, _oldpath: &Path, _newpath: &Path) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn link_async(
+ &self,
+ oldpath: PathBuf,
+ newpath: PathBuf,
+ ) -> FsResult<()> {
+ self.link_sync(&oldpath, &newpath)
+ }
+
+ fn symlink_sync(
+ &self,
+ _oldpath: &Path,
+ _newpath: &Path,
+ _file_type: Option<FsFileType>,
+ ) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn symlink_async(
+ &self,
+ oldpath: PathBuf,
+ newpath: PathBuf,
+ file_type: Option<FsFileType>,
+ ) -> FsResult<()> {
+ self.symlink_sync(&oldpath, &newpath, file_type)
+ }
+
+ fn read_link_sync(&self, _path: &Path) -> FsResult<PathBuf> {
+ Err(FsError::NotSupported)
+ }
+ async fn read_link_async(&self, path: PathBuf) -> FsResult<PathBuf> {
+ self.read_link_sync(&path)
+ }
+
+ fn truncate_sync(&self, _path: &Path, _len: u64) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> {
+ self.truncate_sync(&path, len)
+ }
+
+ fn utime_sync(
+ &self,
+ _path: &Path,
+ _atime_secs: i64,
+ _atime_nanos: u32,
+ _mtime_secs: i64,
+ _mtime_nanos: u32,
+ ) -> FsResult<()> {
+ Err(FsError::NotSupported)
+ }
+ async fn utime_async(
+ &self,
+ path: PathBuf,
+ atime_secs: i64,
+ atime_nanos: u32,
+ mtime_secs: i64,
+ mtime_nanos: u32,
+ ) -> FsResult<()> {
+ self.utime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
+ }
+
+ fn write_file_sync(
+ &self,
+ path: &Path,
+ options: OpenOptions,
+ data: &[u8],
+ ) -> FsResult<()> {
+ let path = normalize_path(path);
+ let has_parent_dir = path
+ .parent()
+ .and_then(|parent| self.get_entry(parent))
+ .map(|e| matches!(*e, PathEntry::Dir))
+ .unwrap_or(false);
+ if !has_parent_dir {
+ return Err(FsError::Io(Error::new(
+ ErrorKind::NotFound,
+ "Parent directory does not exist",
+ )));
+ }
+ let mut entries = self.entries.lock();
+ let entry = entries.entry(path.clone());
+ match entry {
+ Entry::Occupied(mut entry) => {
+ if let PathEntry::File(existing_data) = &**entry.get() {
+ if options.create_new {
+ return Err(FsError::Io(Error::new(
+ ErrorKind::AlreadyExists,
+ "File already exists",
+ )));
+ }
+ if options.append {
+ let mut new_data = existing_data.clone();
+ new_data.extend_from_slice(data);
+ entry.insert(Arc::new(PathEntry::File(new_data)));
+ } else {
+ entry.insert(Arc::new(PathEntry::File(data.to_vec())));
+ }
+ Ok(())
+ } else {
+ Err(FsError::Io(Error::new(
+ ErrorKind::InvalidInput,
+ "Not a file",
+ )))
+ }
+ }
+ Entry::Vacant(entry) => {
+ entry.insert(Arc::new(PathEntry::File(data.to_vec())));
+ Ok(())
+ }
+ }
+ }
+
+ async fn write_file_async(
+ &self,
+ path: PathBuf,
+ options: OpenOptions,
+ data: Vec<u8>,
+ ) -> FsResult<()> {
+ self.write_file_sync(&path, options, &data)
+ }
+
+ fn read_file_sync(&self, path: &Path) -> FsResult<Vec<u8>> {
+ let entry = self.get_entry(path);
+ match entry {
+ Some(entry) => match &*entry {
+ PathEntry::File(data) => Ok(data.clone()),
+ PathEntry::Dir => Err(FsError::Io(Error::new(
+ ErrorKind::InvalidInput,
+ "Is a directory",
+ ))),
+ },
+ None => Err(FsError::Io(Error::new(ErrorKind::NotFound, "Not found"))),
+ }
+ }
+ async fn read_file_async(&self, path: PathBuf) -> FsResult<Vec<u8>> {
+ self.read_file_sync(&path)
+ }
+}
diff --git a/ext/fs/lib.rs b/ext/fs/lib.rs
index c31cdd85d..05b119e2e 100644
--- a/ext/fs/lib.rs
+++ b/ext/fs/lib.rs
@@ -1,10 +1,12 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+mod in_memory_fs;
mod interface;
mod ops;
mod std_fs;
pub mod sync;
+pub use crate::in_memory_fs::InMemoryFs;
pub use crate::interface::FileSystem;
pub use crate::interface::FileSystemRc;
pub use crate::interface::FsDirEntry;
diff --git a/tests/integration/bench_tests.rs b/tests/integration/bench_tests.rs
index 8621679dc..e0d3f8724 100644
--- a/tests/integration/bench_tests.rs
+++ b/tests/integration/bench_tests.rs
@@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+use deno_core::serde_json::json;
use deno_core::url::Url;
use test_util as util;
use test_util::itest;
@@ -8,6 +9,7 @@ use util::assert_contains;
use util::assert_not_contains;
use util::env_vars_for_npm_tests;
use util::TestContext;
+use util::TestContextBuilder;
itest!(overloads {
args: "bench bench/overloads.ts",
@@ -285,3 +287,32 @@ fn conditionally_loads_type_graph() {
.run();
assert_not_contains!(output.combined_output(), "type_reference.d.ts");
}
+
+#[test]
+fn opt_out_top_level_exclude_via_bench_unexclude() {
+ let context = TestContextBuilder::new().use_temp_cwd().build();
+ let temp_dir = context.temp_dir().path();
+ temp_dir.join("deno.json").write_json(&json!({
+ "bench": {
+ "exclude": [ "!excluded.bench.ts" ]
+ },
+ "exclude": [ "excluded.bench.ts", "actually_excluded.bench.ts" ]
+ }));
+
+ temp_dir
+ .join("main.bench.ts")
+ .write("Deno.bench('test1', () => {});");
+ temp_dir
+ .join("excluded.bench.ts")
+ .write("Deno.bench('test2', () => {});");
+ temp_dir
+ .join("actually_excluded.bench.ts")
+ .write("Deno.bench('test3', () => {});");
+
+ let output = context.new_command().arg("bench").run();
+ output.assert_exit_code(0);
+ let output = output.combined_output();
+ assert_contains!(output, "main.bench.ts");
+ assert_contains!(output, "excluded.bench.ts");
+ assert_not_contains!(output, "actually_excluded.bench.ts");
+}
diff --git a/tests/integration/fmt_tests.rs b/tests/integration/fmt_tests.rs
index 6588ae10a..417454888 100644
--- a/tests/integration/fmt_tests.rs
+++ b/tests/integration/fmt_tests.rs
@@ -1,8 +1,10 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+use deno_core::serde_json::json;
use test_util as util;
use test_util::itest;
use util::assert_contains;
+use util::assert_not_contains;
use util::PathRef;
use util::TestContext;
use util::TestContextBuilder;
@@ -351,3 +353,28 @@ fn fmt_with_glob_config_and_flags() {
assert_contains!(output, "Found 2 not formatted files in 2 files");
}
+
+#[test]
+fn opt_out_top_level_exclude_via_fmt_unexclude() {
+ let context = TestContextBuilder::new().use_temp_cwd().build();
+ let temp_dir = context.temp_dir().path();
+ temp_dir.join("deno.json").write_json(&json!({
+ "fmt": {
+ "exclude": [ "!excluded.ts" ]
+ },
+ "exclude": [ "excluded.ts", "actually_excluded.ts" ]
+ }));
+
+ temp_dir.join("main.ts").write("const a = 1;");
+ temp_dir.join("excluded.ts").write("const a = 2;");
+ temp_dir
+ .join("actually_excluded.ts")
+ .write("const a = 2;");
+
+ let output = context.new_command().arg("fmt").run();
+ output.assert_exit_code(0);
+ let output = output.combined_output();
+ assert_contains!(output, "main.ts");
+ assert_contains!(output, "excluded.ts");
+ assert_not_contains!(output, "actually_excluded.ts");
+}
diff --git a/tests/integration/lint_tests.rs b/tests/integration/lint_tests.rs
index ae0414262..a55fb1ef4 100644
--- a/tests/integration/lint_tests.rs
+++ b/tests/integration/lint_tests.rs
@@ -1,6 +1,8 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+use deno_core::serde_json::json;
use test_util::assert_contains;
+use test_util::assert_not_contains;
use test_util::itest;
use test_util::TestContextBuilder;
@@ -252,3 +254,26 @@ itest!(no_slow_types_workspace {
cwd: Some("lint/no_slow_types_workspace"),
exit_code: 1,
});
+
+#[test]
+fn opt_out_top_level_exclude_via_lint_unexclude() {
+ let context = TestContextBuilder::new().use_temp_cwd().build();
+ let temp_dir = context.temp_dir().path();
+ temp_dir.join("deno.json").write_json(&json!({
+ "lint": {
+ "exclude": [ "!excluded.ts" ]
+ },
+ "exclude": [ "excluded.ts", "actually_excluded.ts" ]
+ }));
+
+ temp_dir.join("main.ts").write("const a = 1;");
+ temp_dir.join("excluded.ts").write("const a = 2;");
+ temp_dir.join("actually_excluded.ts").write("const a = 2;");
+
+ let output = context.new_command().arg("lint").run();
+ output.assert_exit_code(1);
+ let output = output.combined_output();
+ assert_contains!(output, "main.ts");
+ assert_contains!(output, "excluded.ts");
+ assert_not_contains!(output, "actually_excluded.ts");
+}
diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs
index 8165cc86a..a7193ff59 100644
--- a/tests/integration/lsp_tests.rs
+++ b/tests/integration/lsp_tests.rs
@@ -1652,11 +1652,9 @@ fn lsp_workspace_disable_enable_paths() {
.unwrap()
},
name: "project".to_string(),
- }])
- .set_deno_enable(false);
+ }]);
},
json!({ "deno": {
- "enable": false,
"disablePaths": ["./worker/node.ts"],
"enablePaths": ["./worker"],
} }),
diff --git a/tests/integration/publish_tests.rs b/tests/integration/publish_tests.rs
index 2c3bf9ff6..4d2db8635 100644
--- a/tests/integration/publish_tests.rs
+++ b/tests/integration/publish_tests.rs
@@ -408,7 +408,7 @@ fn ignores_directories() {
}
#[test]
-fn includes_directories_with_gitignore() {
+fn not_include_gitignored_file_even_if_matched_in_include() {
let context = publish_context_builder().build();
let temp_dir = context.temp_dir().path();
temp_dir.join("deno.json").write_json(&json!({
@@ -416,23 +416,75 @@ fn includes_directories_with_gitignore() {
"version": "1.0.0",
"exports": "./main.ts",
"publish": {
- "include": [ "deno.json", "main.ts" ]
+ // won't match ignored because it needs to be
+ // unexcluded via a negated glob in exclude
+ "include": [ "deno.json", "*.ts" ]
}
}));
- temp_dir.join(".gitignore").write("main.ts");
+ temp_dir.join(".gitignore").write("ignored.ts");
temp_dir.join("main.ts").write("");
temp_dir.join("ignored.ts").write("");
- let output = context
- .new_command()
- .arg("publish")
- .arg("--token")
- .arg("sadfasdf")
- .run();
+ let output = context.new_command().arg("publish").arg("--dry-run").run();
output.assert_exit_code(0);
let output = output.combined_output();
assert_contains!(output, "main.ts");
+ // it's gitignored
+ assert_not_contains!(output, "ignored.ts");
+}
+
+#[test]
+fn includes_directories_with_gitignore_when_unexcluded() {
+ let context = publish_context_builder().build();
+ let temp_dir = context.temp_dir().path();
+ temp_dir.join("deno.json").write_json(&json!({
+ "name": "@foo/bar",
+ "version": "1.0.0",
+ "exports": "./main.ts",
+ "publish": {
+ "include": [ "deno.json", "*.ts" ],
+ "exclude": [ "!ignored.ts" ]
+ }
+ }));
+
+ temp_dir.join(".gitignore").write("ignored.ts");
+ temp_dir.join("main.ts").write("");
+ temp_dir.join("ignored.ts").write("");
+
+ let output = context.new_command().arg("publish").arg("--dry-run").run();
+ output.assert_exit_code(0);
+ let output = output.combined_output();
+ assert_contains!(output, "main.ts");
+ assert_contains!(output, "ignored.ts");
+}
+
+#[test]
+fn includes_unexcluded_sub_dir() {
+ let context = publish_context_builder().build();
+ let temp_dir = context.temp_dir().path();
+ temp_dir.join("deno.json").write_json(&json!({
+ "name": "@foo/bar",
+ "version": "1.0.0",
+ "exports": "./included1.ts",
+ "publish": {
+ "exclude": [
+ "ignored",
+ "!ignored/unexcluded",
+ ]
+ }
+ }));
+
+ temp_dir.join("included1.ts").write("");
+ temp_dir.join("ignored/unexcluded").create_dir_all();
+ temp_dir.join("ignored/ignored.ts").write("");
+ temp_dir.join("ignored/unexcluded/included2.ts").write("");
+
+ let output = context.new_command().arg("publish").arg("--dry-run").run();
+ output.assert_exit_code(0);
+ let output = output.combined_output();
+ assert_contains!(output, "included1.ts");
+ assert_contains!(output, "included2.ts");
assert_not_contains!(output, "ignored.ts");
}
@@ -465,7 +517,7 @@ fn includes_directories() {
}
#[test]
-fn includes_dotenv() {
+fn not_includes_gitignored_dotenv() {
let context = publish_context_builder().build();
let temp_dir = context.temp_dir().path();
temp_dir.join("deno.json").write_json(&json!({
@@ -476,14 +528,9 @@ fn includes_dotenv() {
temp_dir.join("main.ts").write("");
temp_dir.join(".env").write("FOO=BAR");
+ temp_dir.join(".gitignore").write(".env");
- let output = context
- .new_command()
- .arg("publish")
- .arg("--token")
- .arg("sadfasdf")
- .arg("--dry-run")
- .run();
+ let output = context.new_command().arg("publish").arg("--dry-run").run();
output.assert_exit_code(0);
let output = output.combined_output();
assert_contains!(output, "main.ts");
diff --git a/tests/integration/test_tests.rs b/tests/integration/test_tests.rs
index cd85fd102..d5768b5ba 100644
--- a/tests/integration/test_tests.rs
+++ b/tests/integration/test_tests.rs
@@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+use deno_core::serde_json::json;
use deno_core::url::Url;
use test_util as util;
use test_util::itest;
@@ -668,3 +669,32 @@ itest!(test_include_relative_pattern_dot_slash {
output: "test/relative_pattern_dot_slash/output.out",
cwd: Some("test/relative_pattern_dot_slash"),
});
+
+#[test]
+fn opt_out_top_level_exclude_via_test_unexclude() {
+ let context = TestContextBuilder::new().use_temp_cwd().build();
+ let temp_dir = context.temp_dir().path();
+ temp_dir.join("deno.json").write_json(&json!({
+ "test": {
+ "exclude": [ "!excluded.test.ts" ]
+ },
+ "exclude": [ "excluded.test.ts", "actually_excluded.test.ts" ]
+ }));
+
+ temp_dir
+ .join("main.test.ts")
+ .write("Deno.test('test1', () => {});");
+ temp_dir
+ .join("excluded.test.ts")
+ .write("Deno.test('test2', () => {});");
+ temp_dir
+ .join("actually_excluded.test.ts")
+ .write("Deno.test('test3', () => {});");
+
+ let output = context.new_command().arg("test").run();
+ output.assert_exit_code(0);
+ let output = output.combined_output();
+ assert_contains!(output, "main.test.ts");
+ assert_contains!(output, "excluded.test.ts");
+ assert_not_contains!(output, "actually_excluded.test.ts");
+}