summaryrefslogtreecommitdiff
path: root/cli
diff options
context:
space:
mode:
Diffstat (limited to 'cli')
-rw-r--r--cli/config_file.rs174
-rw-r--r--cli/fs_util.rs62
-rw-r--r--cli/lsp/diagnostics.rs91
-rw-r--r--cli/lsp/language_server.rs40
-rw-r--r--cli/main.rs8
-rw-r--r--cli/proc_state.rs2
-rw-r--r--cli/tests/integration/lsp_tests.rs97
-rw-r--r--cli/tests/testdata/lsp/deno.fmt.exclude.jsonc16
-rw-r--r--cli/tests/testdata/lsp/deno.lint.exclude.jsonc18
-rw-r--r--cli/tools/fmt.rs15
-rw-r--r--cli/tools/lint.rs16
11 files changed, 391 insertions, 148 deletions
diff --git a/cli/config_file.rs b/cli/config_file.rs
index 879db3451..363a215eb 100644
--- a/cli/config_file.rs
+++ b/cli/config_file.rs
@@ -1,8 +1,11 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use crate::fs_util::canonicalize_path;
+use crate::fs_util::specifier_parent;
+use crate::fs_util::specifier_to_file_path;
use deno_core::anyhow::anyhow;
+use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
@@ -17,7 +20,6 @@ use std::collections::BTreeMap;
use std::collections::HashMap;
use std::fmt;
use std::path::Path;
-use std::path::PathBuf;
pub(crate) type MaybeImportsResult =
Result<Option<Vec<(ModuleSpecifier, Vec<String>)>>, AnyError>;
@@ -54,15 +56,15 @@ pub struct CompilerOptions {
#[derive(Debug, Clone, PartialEq)]
pub struct IgnoredCompilerOptions {
pub items: Vec<String>,
- pub maybe_path: Option<PathBuf>,
+ pub maybe_specifier: Option<ModuleSpecifier>,
}
impl fmt::Display for IgnoredCompilerOptions {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut codes = self.items.clone();
codes.sort();
- if let Some(path) = &self.maybe_path {
- write!(f, "Unsupported compiler options in \"{}\".\n The following options were ignored:\n {}", path.to_string_lossy(), codes.join(", "))
+ if let Some(specifier) = &self.maybe_specifier {
+ write!(f, "Unsupported compiler options in \"{}\".\n The following options were ignored:\n {}", specifier, codes.join(", "))
} else {
write!(f, "Unsupported compiler options provided.\n The following options were ignored:\n {}", codes.join(", "))
}
@@ -169,7 +171,7 @@ pub fn json_merge(a: &mut Value, b: &Value) {
fn parse_compiler_options(
compiler_options: &HashMap<String, Value>,
- maybe_path: Option<PathBuf>,
+ maybe_specifier: Option<ModuleSpecifier>,
is_runtime: bool,
) -> Result<(Value, Option<IgnoredCompilerOptions>), AnyError> {
let mut filtered: HashMap<String, Value> = HashMap::new();
@@ -187,7 +189,10 @@ fn parse_compiler_options(
}
let value = serde_json::to_value(filtered)?;
let maybe_ignored_options = if !items.is_empty() {
- Some(IgnoredCompilerOptions { items, maybe_path })
+ Some(IgnoredCompilerOptions {
+ items,
+ maybe_specifier,
+ })
} else {
None
};
@@ -292,27 +297,53 @@ struct SerializedFilesConfig {
}
impl SerializedFilesConfig {
- pub fn into_resolved(self, config_file_path: &Path) -> FilesConfig {
- let config_dir = config_file_path.parent().unwrap();
- FilesConfig {
+ pub fn into_resolved(
+ self,
+ config_file_specifier: &ModuleSpecifier,
+ ) -> Result<FilesConfig, AnyError> {
+ let config_dir = specifier_parent(config_file_specifier);
+ Ok(FilesConfig {
include: self
.include
.into_iter()
- .map(|p| config_dir.join(p))
- .collect(),
+ .map(|p| config_dir.join(&p))
+ .collect::<Result<Vec<ModuleSpecifier>, _>>()?,
exclude: self
.exclude
.into_iter()
- .map(|p| config_dir.join(p))
- .collect(),
- }
+ .map(|p| config_dir.join(&p))
+ .collect::<Result<Vec<ModuleSpecifier>, _>>()?,
+ })
}
}
#[derive(Clone, Debug, Default)]
pub struct FilesConfig {
- pub include: Vec<PathBuf>,
- pub exclude: Vec<PathBuf>,
+ pub include: Vec<ModuleSpecifier>,
+ pub exclude: Vec<ModuleSpecifier>,
+}
+
+impl FilesConfig {
+ /// Gets if the provided specifier is allowed based on the includes
+ /// and excludes in the configuration file.
+ pub fn matches_specifier(&self, specifier: &ModuleSpecifier) -> bool {
+ // Skip files which is in the exclude list.
+ let specifier_text = specifier.as_str();
+ if self
+ .exclude
+ .iter()
+ .any(|i| specifier_text.starts_with(i.as_str()))
+ {
+ return false;
+ }
+
+ // Ignore files not in the include list if it's not empty.
+ self.include.is_empty()
+ || self
+ .include
+ .iter()
+ .any(|i| specifier_text.starts_with(i.as_str()))
+ }
}
#[derive(Clone, Debug, Default, Deserialize)]
@@ -323,11 +354,14 @@ struct SerializedLintConfig {
}
impl SerializedLintConfig {
- pub fn into_resolved(self, config_file_path: &Path) -> LintConfig {
- LintConfig {
+ pub fn into_resolved(
+ self,
+ config_file_specifier: &ModuleSpecifier,
+ ) -> Result<LintConfig, AnyError> {
+ Ok(LintConfig {
rules: self.rules,
- files: self.files.into_resolved(config_file_path),
- }
+ files: self.files.into_resolved(config_file_specifier)?,
+ })
}
}
@@ -363,11 +397,14 @@ struct SerializedFmtConfig {
}
impl SerializedFmtConfig {
- pub fn into_resolved(self, config_file_path: &Path) -> FmtConfig {
- FmtConfig {
+ pub fn into_resolved(
+ self,
+ config_file_specifier: &ModuleSpecifier,
+ ) -> Result<FmtConfig, AnyError> {
+ Ok(FmtConfig {
options: self.options,
- files: self.files.into_resolved(config_file_path),
- }
+ files: self.files.into_resolved(config_file_specifier)?,
+ })
}
}
@@ -387,7 +424,7 @@ pub struct ConfigFileJson {
#[derive(Clone, Debug)]
pub struct ConfigFile {
- pub path: PathBuf,
+ pub specifier: ModuleSpecifier,
pub json: ConfigFileJson,
}
@@ -409,24 +446,46 @@ impl ConfigFile {
),
)
})?;
- let config_text = std::fs::read_to_string(config_path.clone())?;
- Self::new(&config_text, &config_path)
+ let config_specifier = ModuleSpecifier::from_file_path(&config_path)
+ .map_err(|_| {
+ anyhow!(
+ "Could not convert path to specifier. Path: {}",
+ config_path.display()
+ )
+ })?;
+ Self::from_specifier(&config_specifier)
+ }
+
+ pub fn from_specifier(specifier: &ModuleSpecifier) -> Result<Self, AnyError> {
+ let config_path = specifier_to_file_path(specifier)?;
+ let config_text = match std::fs::read_to_string(&config_path) {
+ Ok(text) => text,
+ Err(err) => bail!(
+ "Error reading config file {}: {}",
+ specifier,
+ err.to_string()
+ ),
+ };
+ Self::new(&config_text, specifier)
}
- pub fn new(text: &str, path: &Path) -> Result<Self, AnyError> {
+ pub fn new(
+ text: &str,
+ specifier: &ModuleSpecifier,
+ ) -> Result<Self, AnyError> {
let jsonc = match jsonc_parser::parse_to_serde_value(text) {
Ok(None) => json!({}),
Ok(Some(value)) if value.is_object() => value,
Ok(Some(_)) => {
return Err(anyhow!(
"config file JSON {:?} should be an object",
- path.to_str().unwrap()
+ specifier,
))
}
Err(e) => {
return Err(anyhow!(
"Unable to parse config file JSON {:?} because of {}",
- path.to_str().unwrap(),
+ specifier,
e.to_string()
))
}
@@ -434,7 +493,7 @@ impl ConfigFile {
let json: ConfigFileJson = serde_json::from_value(jsonc)?;
Ok(Self {
- path: path.to_owned(),
+ specifier: specifier.to_owned(),
json,
})
}
@@ -448,7 +507,7 @@ impl ConfigFile {
let options: HashMap<String, Value> =
serde_json::from_value(compiler_options)
.context("compilerOptions should be an object")?;
- parse_compiler_options(&options, Some(self.path.to_owned()), false)
+ parse_compiler_options(&options, Some(self.specifier.to_owned()), false)
} else {
Ok((json!({}), None))
}
@@ -458,7 +517,7 @@ impl ConfigFile {
if let Some(config) = self.json.lint.clone() {
let lint_config: SerializedLintConfig = serde_json::from_value(config)
.context("Failed to parse \"lint\" configuration")?;
- Ok(Some(lint_config.into_resolved(&self.path)))
+ Ok(Some(lint_config.into_resolved(&self.specifier)?))
} else {
Ok(None)
}
@@ -476,8 +535,6 @@ impl ConfigFile {
};
let compiler_options: CompilerOptions =
serde_json::from_value(compiler_options_value.clone())?;
- let referrer = ModuleSpecifier::from_file_path(&self.path)
- .map_err(|_| custom_error("TypeError", "bad config file specifier"))?;
if let Some(types) = compiler_options.types {
imports.extend(types);
}
@@ -493,6 +550,7 @@ impl ConfigFile {
));
}
if !imports.is_empty() {
+ let referrer = self.specifier.clone();
Ok(Some(vec![(referrer, imports)]))
} else {
Ok(None)
@@ -516,7 +574,7 @@ impl ConfigFile {
if let Some(config) = self.json.fmt.clone() {
let fmt_config: SerializedFmtConfig = serde_json::from_value(config)
.context("Failed to parse \"fmt\" configuration")?;
- Ok(Some(fmt_config.into_resolved(&self.path)))
+ Ok(Some(fmt_config.into_resolved(&self.specifier)?))
} else {
Ok(None)
}
@@ -603,9 +661,9 @@ mod tests {
}
}
}"#;
- let config_dir = PathBuf::from("/deno");
- let config_path = config_dir.join("tsconfig.json");
- let config_file = ConfigFile::new(config_text, &config_path).unwrap();
+ let config_dir = ModuleSpecifier::parse("file:///deno/").unwrap();
+ let config_specifier = config_dir.join("tsconfig.json").unwrap();
+ let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
let (options_value, ignored) =
config_file.to_compiler_options().expect("error parsing");
assert!(options_value.is_object());
@@ -616,7 +674,7 @@ mod tests {
ignored,
Some(IgnoredCompilerOptions {
items: vec!["build".to_string()],
- maybe_path: Some(config_path),
+ maybe_specifier: Some(config_specifier),
}),
);
@@ -624,10 +682,13 @@ mod tests {
.to_lint_config()
.expect("error parsing lint object")
.expect("lint object should be defined");
- assert_eq!(lint_config.files.include, vec![config_dir.join("src")]);
+ assert_eq!(
+ lint_config.files.include,
+ vec![config_dir.join("src/").unwrap()]
+ );
assert_eq!(
lint_config.files.exclude,
- vec![config_dir.join("src/testdata/")]
+ vec![config_dir.join("src/testdata/").unwrap()]
);
assert_eq!(
lint_config.rules.include,
@@ -643,10 +704,13 @@ mod tests {
.to_fmt_config()
.expect("error parsing fmt object")
.expect("fmt object should be defined");
- assert_eq!(fmt_config.files.include, vec![config_dir.join("src")]);
+ assert_eq!(
+ fmt_config.files.include,
+ vec![config_dir.join("src/").unwrap()]
+ );
assert_eq!(
fmt_config.files.exclude,
- vec![config_dir.join("src/testdata/")]
+ vec![config_dir.join("src/testdata/").unwrap()]
);
assert_eq!(fmt_config.options.use_tabs, Some(true));
assert_eq!(fmt_config.options.line_width, Some(80));
@@ -657,8 +721,9 @@ mod tests {
#[test]
fn test_parse_config_with_empty_file() {
let config_text = "";
- let config_path = PathBuf::from("/deno/tsconfig.json");
- let config_file = ConfigFile::new(config_text, &config_path).unwrap();
+ let config_specifier =
+ ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
+ let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
let (options_value, _) =
config_file.to_compiler_options().expect("error parsing");
assert!(options_value.is_object());
@@ -667,8 +732,9 @@ mod tests {
#[test]
fn test_parse_config_with_commented_file() {
let config_text = r#"//{"foo":"bar"}"#;
- let config_path = PathBuf::from("/deno/tsconfig.json");
- let config_file = ConfigFile::new(config_text, &config_path).unwrap();
+ let config_specifier =
+ ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
+ let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
let (options_value, _) =
config_file.to_compiler_options().expect("error parsing");
assert!(options_value.is_object());
@@ -677,17 +743,19 @@ mod tests {
#[test]
fn test_parse_config_with_invalid_file() {
let config_text = "{foo:bar}";
- let config_path = PathBuf::from("/deno/tsconfig.json");
+ let config_specifier =
+ ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
// Emit error: Unable to parse config file JSON "<config_path>" because of Unexpected token on line 1 column 6.
- assert!(ConfigFile::new(config_text, &config_path).is_err());
+ assert!(ConfigFile::new(config_text, &config_specifier).is_err());
}
#[test]
fn test_parse_config_with_not_object_file() {
let config_text = "[]";
- let config_path = PathBuf::from("/deno/tsconfig.json");
+ let config_specifier =
+ ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
// Emit error: config file JSON "<config_path>" should be an object
- assert!(ConfigFile::new(config_text, &config_path).is_err());
+ assert!(ConfigFile::new(config_text, &config_specifier).is_err());
}
#[test]
@@ -717,7 +785,7 @@ mod tests {
maybe_ignored_options,
Some(IgnoredCompilerOptions {
items: vec!["build".to_string()],
- maybe_path: None
+ maybe_specifier: None
})
);
}
diff --git a/cli/fs_util.rs b/cli/fs_util.rs
index 54bb8f3c6..20b20cccd 100644
--- a/cli/fs_util.rs
+++ b/cli/fs_util.rs
@@ -350,6 +350,37 @@ pub fn specifier_to_file_path(
}
}
+/// Ensures a specifier that will definitely be a directory has a trailing slash.
+pub fn ensure_directory_specifier(
+ mut specifier: ModuleSpecifier,
+) -> ModuleSpecifier {
+ let path = specifier.path();
+ if !path.ends_with('/') {
+ let new_path = format!("{}/", path);
+ specifier.set_path(&new_path);
+ }
+ specifier
+}
+
+/// Gets the parent of this module specifier.
+pub fn specifier_parent(specifier: &ModuleSpecifier) -> ModuleSpecifier {
+ let mut specifier = specifier.clone();
+ // don't use specifier.segments() because it will strip the leading slash
+ let mut segments = specifier.path().split('/').collect::<Vec<_>>();
+ if segments.iter().all(|s| s.is_empty()) {
+ return specifier;
+ }
+ if let Some(last) = segments.last() {
+ if last.is_empty() {
+ segments.pop();
+ }
+ segments.pop();
+ let new_path = format!("{}/", segments.join("/"));
+ specifier.set_path(&new_path);
+ }
+ specifier
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -688,4 +719,35 @@ mod tests {
assert_eq!(result, PathBuf::from(expected_path));
}
}
+
+ #[test]
+ fn test_ensure_directory_specifier() {
+ run_test("file:///", "file:///");
+ run_test("file:///test", "file:///test/");
+ run_test("file:///test/", "file:///test/");
+ run_test("file:///test/other", "file:///test/other/");
+ run_test("file:///test/other/", "file:///test/other/");
+
+ fn run_test(specifier: &str, expected: &str) {
+ let result =
+ ensure_directory_specifier(ModuleSpecifier::parse(specifier).unwrap());
+ assert_eq!(result.to_string(), expected);
+ }
+ }
+
+ #[test]
+ fn test_specifier_parent() {
+ run_test("file:///", "file:///");
+ run_test("file:///test", "file:///");
+ run_test("file:///test/", "file:///");
+ run_test("file:///test/other", "file:///test/");
+ run_test("file:///test/other.txt", "file:///test/");
+ run_test("file:///test/other/", "file:///test/");
+
+ fn run_test(specifier: &str, expected: &str) {
+ let result =
+ specifier_parent(&ModuleSpecifier::parse(specifier).unwrap());
+ assert_eq!(result.to_string(), expected);
+ }
+ }
}
diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs
index 8a0d50ef1..c20307bb3 100644
--- a/cli/lsp/diagnostics.rs
+++ b/cli/lsp/diagnostics.rs
@@ -2,13 +2,11 @@
use super::analysis;
use super::documents;
-use super::documents::Document;
use super::documents::Documents;
use super::language_server;
use super::tsc;
use crate::diagnostics;
-use crate::fs_util::specifier_to_file_path;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
@@ -302,59 +300,14 @@ fn ts_json_to_diagnostics(
.collect()
}
-// Filters documents according to the `include` and the `exclude` lists (from `StateSnapshot::maybe_lint_config`).
-// If a document is in the `exclude` list - then it be removed.
-// If the `include` list is not empty, and a document is not in - then it be removed too.
-fn filter_lint_documents(
- snapshot: &language_server::StateSnapshot,
- documents: &mut Vec<Document>,
-) {
- let lint_config = match &snapshot.maybe_lint_config {
- Some(config) => config,
- None => return,
- };
-
- documents.retain(|doc| {
- let path = if let Ok(file_path) = specifier_to_file_path(doc.specifier()) {
- file_path
- } else {
- return false;
- };
-
- // Skip files which is in the exclude list.
- if lint_config
- .files
- .exclude
- .iter()
- .any(|i| path.starts_with(i))
- {
- return false;
- }
-
- // Early return if the include list is empty.
- if lint_config.files.include.is_empty() {
- return true;
- }
-
- // Ignore files not in the include list.
- lint_config
- .files
- .include
- .iter()
- .any(|i| path.starts_with(i))
- });
-}
-
async fn generate_lint_diagnostics(
snapshot: &language_server::StateSnapshot,
collection: Arc<Mutex<DiagnosticCollection>>,
) -> Result<DiagnosticVec, AnyError> {
- let mut documents = snapshot.documents.documents(true, true);
+ let documents = snapshot.documents.documents(true, true);
let workspace_settings = snapshot.config.settings.workspace.clone();
let maybe_lint_config = snapshot.maybe_lint_config.clone();
- filter_lint_documents(snapshot, &mut documents);
-
tokio::task::spawn(async move {
let mut diagnostics_vec = Vec::new();
if workspace_settings.lint {
@@ -365,25 +318,35 @@ async fn generate_lint_diagnostics(
.await
.get_version(document.specifier(), &DiagnosticSource::DenoLint);
if version != current_version {
- let diagnostics = match document.maybe_parsed_source() {
- Some(Ok(parsed_source)) => {
- if let Ok(references) = analysis::get_lint_references(
- &parsed_source,
- maybe_lint_config.as_ref(),
- ) {
- references
- .into_iter()
- .map(|r| r.to_diagnostic())
- .collect::<Vec<_>>()
- } else {
+ let is_allowed = match &maybe_lint_config {
+ Some(lint_config) => {
+ lint_config.files.matches_specifier(document.specifier())
+ }
+ None => true,
+ };
+ let diagnostics = if is_allowed {
+ match document.maybe_parsed_source() {
+ Some(Ok(parsed_source)) => {
+ if let Ok(references) = analysis::get_lint_references(
+ &parsed_source,
+ maybe_lint_config.as_ref(),
+ ) {
+ references
+ .into_iter()
+ .map(|r| r.to_diagnostic())
+ .collect::<Vec<_>>()
+ } else {
+ Vec::new()
+ }
+ }
+ Some(Err(_)) => Vec::new(),
+ None => {
+ error!("Missing file contents for: {}", document.specifier());
Vec::new()
}
}
- Some(Err(_)) => Vec::new(),
- None => {
- error!("Missing file contents for: {}", document.specifier());
- Vec::new()
- }
+ } else {
+ Vec::new()
};
diagnostics_vec.push((
document.specifier().clone(),
diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs
index a2faf8a83..e7d0dae34 100644
--- a/cli/lsp/language_server.rs
+++ b/cli/lsp/language_server.rs
@@ -60,7 +60,6 @@ use crate::config_file::TsConfig;
use crate::deno_dir;
use crate::file_fetcher::get_source_from_data_url;
use crate::fs_util;
-use crate::fs_util::specifier_to_file_path;
use crate::logger;
use crate::tools::fmt::format_file;
use crate::tools::fmt::format_parsed_source;
@@ -304,9 +303,7 @@ impl Inner {
let config_url = if let Ok(url) = Url::from_file_path(config_str) {
Ok(url)
} else if let Some(root_uri) = maybe_root_uri {
- let root_path = specifier_to_file_path(&root_uri)?;
- let config_path = root_path.join(config_str);
- Url::from_file_path(config_path).map_err(|_| {
+ root_uri.join(config_str).map_err(|_| {
anyhow!("Bad file path for configuration file: \"{}\"", config_str)
})
} else {
@@ -317,13 +314,7 @@ impl Inner {
}?;
info!(" Resolved configuration file: \"{}\"", config_url);
- let config_file = {
- let buffer = specifier_to_file_path(&config_url)?;
- let path = buffer
- .to_str()
- .ok_or_else(|| anyhow!("Bad uri: \"{}\"", config_url))?;
- ConfigFile::read(path)?
- };
+ let config_file = ConfigFile::from_specifier(&config_url)?;
return Ok(Some((config_file, config_url)));
}
}
@@ -401,7 +392,7 @@ impl Inner {
let cache_url = if let Ok(url) = Url::from_file_path(cache_str) {
Ok(url)
} else if let Some(root_uri) = &maybe_root_uri {
- let root_path = specifier_to_file_path(root_uri)?;
+ let root_path = fs_util::specifier_to_file_path(root_uri)?;
let cache_path = root_path.join(cache_str);
Url::from_file_path(cache_path).map_err(|_| {
anyhow!("Bad file path for import path: {:?}", cache_str)
@@ -412,7 +403,7 @@ impl Inner {
cache_str
))
}?;
- let cache_path = specifier_to_file_path(&cache_url)?;
+ let cache_path = fs_util::specifier_to_file_path(&cache_url)?;
info!(
" Resolved cache path: \"{}\"",
cache_path.to_string_lossy()
@@ -457,7 +448,7 @@ impl Inner {
anyhow!("Bad data url for import map: {:?}", import_map_str)
})
} else if let Some(root_uri) = &maybe_root_uri {
- let root_path = specifier_to_file_path(root_uri)?;
+ let root_path = fs_util::specifier_to_file_path(root_uri)?;
let import_map_path = root_path.join(import_map_str);
Url::from_file_path(import_map_path).map_err(|_| {
anyhow!("Bad file path for import map: {:?}", import_map_str)
@@ -472,7 +463,7 @@ impl Inner {
let import_map_json = if import_map_url.scheme() == "data" {
get_source_from_data_url(&import_map_url)?.0
} else {
- let import_map_path = specifier_to_file_path(&import_map_url)?;
+ let import_map_path = fs_util::specifier_to_file_path(&import_map_url)?;
info!(
" Resolved import map: \"{}\"",
import_map_path.to_string_lossy()
@@ -660,7 +651,12 @@ impl Inner {
{
let config = &mut self.config;
- config.root_uri = params.root_uri;
+ // sometimes this root uri may not have a trailing slash, so force it to
+ config.root_uri = params
+ .root_uri
+ .map(|s| self.url_map.normalize_url(&s))
+ .map(fs_util::ensure_directory_specifier);
+
if let Some(value) = params.initialization_options {
config.set_workspace_settings(value).map_err(|err| {
error!("Cannot set workspace settings: {}", err);
@@ -1019,12 +1015,16 @@ impl Inner {
};
let mark = self.performance.mark("formatting", Some(&params));
let file_path =
- specifier_to_file_path(&params.text_document.uri).map_err(|err| {
+ fs_util::specifier_to_file_path(&specifier).map_err(|err| {
error!("{}", err);
LspError::invalid_request()
})?;
let fmt_options = if let Some(fmt_config) = self.maybe_fmt_config.as_ref() {
+ // skip formatting any files ignored by the config file
+ if !fmt_config.files.matches_specifier(&specifier) {
+ return Ok(None);
+ }
fmt_config.options.clone()
} else {
Default::default()
@@ -1907,7 +1907,7 @@ impl Inner {
.config
.root_uri
.as_ref()
- .and_then(|uri| specifier_to_file_path(uri).ok());
+ .and_then(|uri| fs_util::specifier_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyIncomingCall>::new();
for item in incoming_calls.iter() {
if let Some(resolved) = item
@@ -1956,7 +1956,7 @@ impl Inner {
.config
.root_uri
.as_ref()
- .and_then(|uri| specifier_to_file_path(uri).ok());
+ .and_then(|uri| fs_util::specifier_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyOutgoingCall>::new();
for item in outgoing_calls.iter() {
if let Some(resolved) = item
@@ -2012,7 +2012,7 @@ impl Inner {
.config
.root_uri
.as_ref()
- .and_then(|uri| specifier_to_file_path(uri).ok());
+ .and_then(|uri| fs_util::specifier_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyItem>::new();
match one_or_many {
tsc::OneOrMany::One(item) => {
diff --git a/cli/main.rs b/cli/main.rs
index 87fe28461..e2ef26cd3 100644
--- a/cli/main.rs
+++ b/cli/main.rs
@@ -217,7 +217,7 @@ pub fn create_main_worker(
}
} else if let Some(config_file) = &ps.maybe_config_file {
// otherwise we will use the path to the config file
- config_file.path.to_str().map(|s| s.to_string())
+ Some(config_file.specifier.to_string())
} else {
// otherwise we will use the path to the main module
Some(main_module.to_string())
@@ -724,10 +724,8 @@ async fn create_graph_and_maybe_check(
if let Some(ignored_options) = maybe_ignored_options {
eprintln!("{}", ignored_options);
}
- let maybe_config_specifier = ps
- .maybe_config_file
- .as_ref()
- .map(|cf| ModuleSpecifier::from_file_path(&cf.path).unwrap());
+ let maybe_config_specifier =
+ ps.maybe_config_file.as_ref().map(|cf| cf.specifier.clone());
let check_result = emit::check_and_maybe_emit(
graph.clone(),
&mut cache,
diff --git a/cli/proc_state.rs b/cli/proc_state.rs
index 7615b9d46..97f24bc90 100644
--- a/cli/proc_state.rs
+++ b/cli/proc_state.rs
@@ -500,7 +500,7 @@ impl ProcState {
let maybe_config_specifier = self
.maybe_config_file
.as_ref()
- .map(|cf| ModuleSpecifier::from_file_path(&cf.path).unwrap());
+ .map(|cf| cf.specifier.clone());
let options = emit::CheckOptions {
debug: self.flags.log_level == Some(log::Level::Debug),
emit_with_diagnostics: false,
diff --git a/cli/tests/integration/lsp_tests.rs b/cli/tests/integration/lsp_tests.rs
index 3d55ac28c..796130083 100644
--- a/cli/tests/integration/lsp_tests.rs
+++ b/cli/tests/integration/lsp_tests.rs
@@ -1,5 +1,6 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+use deno_ast::ModuleSpecifier;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde_json;
@@ -1421,6 +1422,61 @@ fn lsp_format_mbc() {
}
#[test]
+fn lsp_format_exclude_with_config() {
+ let temp_dir = TempDir::new().unwrap();
+ let mut params: lsp::InitializeParams =
+ serde_json::from_value(load_fixture("initialize_params.json")).unwrap();
+ let deno_fmt_jsonc =
+ serde_json::to_vec_pretty(&load_fixture("deno.fmt.exclude.jsonc")).unwrap();
+ fs::write(temp_dir.path().join("deno.fmt.jsonc"), deno_fmt_jsonc).unwrap();
+
+ params.root_uri = Some(Url::from_file_path(temp_dir.path()).unwrap());
+ if let Some(Value::Object(mut map)) = params.initialization_options {
+ map.insert("config".to_string(), json!("./deno.fmt.jsonc"));
+ params.initialization_options = Some(Value::Object(map));
+ }
+
+ let deno_exe = deno_exe_path();
+ let mut client = LspClient::new(&deno_exe).unwrap();
+ client
+ .write_request::<_, _, Value>("initialize", params)
+ .unwrap();
+
+ let file_uri =
+ ModuleSpecifier::from_file_path(temp_dir.path().join("ignored.ts"))
+ .unwrap()
+ .to_string();
+ did_open(
+ &mut client,
+ json!({
+ "textDocument": {
+ "uri": file_uri,
+ "languageId": "typescript",
+ "version": 1,
+ "text": "function myFunc(){}"
+ }
+ }),
+ );
+ let (maybe_res, maybe_err) = client
+ .write_request(
+ "textDocument/formatting",
+ json!({
+ "textDocument": {
+ "uri": file_uri
+ },
+ "options": {
+ "tabSize": 2,
+ "insertSpaces": true
+ }
+ }),
+ )
+ .unwrap();
+ assert!(maybe_err.is_none());
+ assert_eq!(maybe_res, Some(json!(null)));
+ shutdown(&mut client);
+}
+
+#[test]
fn lsp_large_doc_changes() {
let mut client = init("initialize_params.json");
did_open(&mut client, load_fixture("did_open_params_large.json"));
@@ -4048,6 +4104,47 @@ fn lsp_lint_with_config() {
}
#[test]
+fn lsp_lint_exclude_with_config() {
+ let temp_dir = TempDir::new().unwrap();
+ let mut params: lsp::InitializeParams =
+ serde_json::from_value(load_fixture("initialize_params.json")).unwrap();
+ let deno_lint_jsonc =
+ serde_json::to_vec_pretty(&load_fixture("deno.lint.exclude.jsonc"))
+ .unwrap();
+ fs::write(temp_dir.path().join("deno.lint.jsonc"), deno_lint_jsonc).unwrap();
+
+ params.root_uri = Some(Url::from_file_path(temp_dir.path()).unwrap());
+ if let Some(Value::Object(mut map)) = params.initialization_options {
+ map.insert("config".to_string(), json!("./deno.lint.jsonc"));
+ params.initialization_options = Some(Value::Object(map));
+ }
+
+ let deno_exe = deno_exe_path();
+ let mut client = LspClient::new(&deno_exe).unwrap();
+ client
+ .write_request::<_, _, Value>("initialize", params)
+ .unwrap();
+
+ let diagnostics = did_open(
+ &mut client,
+ json!({
+ "textDocument": {
+ "uri": ModuleSpecifier::from_file_path(temp_dir.path().join("ignored.ts")).unwrap().to_string(),
+ "languageId": "typescript",
+ "version": 1,
+ "text": "// TODO: fixme\nexport async function non_camel_case() {\nconsole.log(\"finished!\")\n}"
+ }
+ }),
+ );
+ let diagnostics = diagnostics
+ .into_iter()
+ .flat_map(|x| x.diagnostics)
+ .collect::<Vec<_>>();
+ assert_eq!(diagnostics, Vec::new());
+ shutdown(&mut client);
+}
+
+#[test]
fn lsp_jsx_import_source_pragma() {
let _g = http_server();
let mut client = init("initialize_params.json");
diff --git a/cli/tests/testdata/lsp/deno.fmt.exclude.jsonc b/cli/tests/testdata/lsp/deno.fmt.exclude.jsonc
new file mode 100644
index 000000000..246a40316
--- /dev/null
+++ b/cli/tests/testdata/lsp/deno.fmt.exclude.jsonc
@@ -0,0 +1,16 @@
+{
+ "fmt": {
+ "files": {
+ "exclude": [
+ "ignored.ts"
+ ]
+ },
+ "options": {
+ "useTabs": true,
+ "lineWidth": 40,
+ "indentWidth": 8,
+ "singleQuote": true,
+ "proseWrap": "always"
+ }
+ }
+}
diff --git a/cli/tests/testdata/lsp/deno.lint.exclude.jsonc b/cli/tests/testdata/lsp/deno.lint.exclude.jsonc
new file mode 100644
index 000000000..89f6108ec
--- /dev/null
+++ b/cli/tests/testdata/lsp/deno.lint.exclude.jsonc
@@ -0,0 +1,18 @@
+{
+ "lint": {
+ "files": {
+ "exclude": [
+ "ignored.ts"
+ ]
+ },
+ "rules": {
+ "exclude": [
+ "camelcase"
+ ],
+ "include": [
+ "ban-untagged-todo"
+ ],
+ "tags": []
+ }
+ }
+}
diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs
index ae9d68b29..004b390d3 100644
--- a/cli/tools/fmt.rs
+++ b/cli/tools/fmt.rs
@@ -15,6 +15,7 @@ use crate::diff::diff;
use crate::file_watcher;
use crate::file_watcher::ResolutionResult;
use crate::flags::FmtFlags;
+use crate::fs_util::specifier_to_file_path;
use crate::fs_util::{collect_files, get_extension, is_supported_ext_fmt};
use crate::text_encoding;
use deno_ast::ParsedSource;
@@ -55,11 +56,21 @@ pub async fn format(
if let Some(fmt_config) = maybe_fmt_config.as_ref() {
if include_files.is_empty() {
- include_files = fmt_config.files.include.clone();
+ include_files = fmt_config
+ .files
+ .include
+ .iter()
+ .filter_map(|s| specifier_to_file_path(s).ok())
+ .collect::<Vec<_>>();
}
if exclude_files.is_empty() {
- exclude_files = fmt_config.files.exclude.clone();
+ exclude_files = fmt_config
+ .files
+ .exclude
+ .iter()
+ .filter_map(|s| specifier_to_file_path(s).ok())
+ .collect::<Vec<_>>();
}
}
diff --git a/cli/tools/lint.rs b/cli/tools/lint.rs
index 2c68a0dac..b7ae88e4f 100644
--- a/cli/tools/lint.rs
+++ b/cli/tools/lint.rs
@@ -10,7 +10,7 @@ use crate::config_file::LintConfig;
use crate::file_watcher::ResolutionResult;
use crate::flags::LintFlags;
use crate::fmt_errors;
-use crate::fs_util::{collect_files, is_supported_ext};
+use crate::fs_util::{collect_files, is_supported_ext, specifier_to_file_path};
use crate::tools::fmt::run_parallelized;
use crate::{colors, file_watcher};
use deno_ast::MediaType;
@@ -71,11 +71,21 @@ pub async fn lint(
if let Some(lint_config) = maybe_lint_config.as_ref() {
if include_files.is_empty() {
- include_files = lint_config.files.include.clone();
+ include_files = lint_config
+ .files
+ .include
+ .iter()
+ .filter_map(|s| specifier_to_file_path(s).ok())
+ .collect::<Vec<_>>();
}
if exclude_files.is_empty() {
- exclude_files = lint_config.files.exclude.clone();
+ exclude_files = lint_config
+ .files
+ .exclude
+ .iter()
+ .filter_map(|s| specifier_to_file_path(s).ok())
+ .collect::<Vec<_>>();
}
}