diff options
Diffstat (limited to 'cli/lsp')
-rw-r--r-- | cli/lsp/config.rs | 105 | ||||
-rw-r--r-- | cli/lsp/documents.rs | 259 | ||||
-rw-r--r-- | cli/lsp/language_server.rs | 1 | ||||
-rw-r--r-- | cli/lsp/tsc.rs | 32 |
4 files changed, 363 insertions, 34 deletions
diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 60b975a44..418ffdc48 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -485,6 +485,31 @@ impl Config { .unwrap_or_else(|| self.settings.workspace.enable) } + /// Gets the root directories or file paths based on the workspace config. + pub fn enabled_root_urls(&self) -> Vec<Url> { + let mut urls: Vec<Url> = Vec::new(); + + if !self.settings.workspace.enable && self.enabled_paths.is_empty() { + // do not return any urls when disabled + return urls; + } + + for (workspace, enabled_paths) in &self.enabled_paths { + if !enabled_paths.is_empty() { + urls.extend(enabled_paths.iter().cloned()); + } else { + urls.push(workspace.clone()); + } + } + if urls.is_empty() { + if let Some(root_dir) = &self.root_uri { + urls.push(root_dir.clone()) + } + } + sort_and_remove_non_leaf_urls(&mut urls); + urls + } + pub fn specifier_code_lens_test(&self, specifier: &ModuleSpecifier) -> bool { let value = self .settings @@ -621,6 +646,21 @@ impl Config { } } +/// Removes any URLs that are a descendant of another URL in the collection. +fn sort_and_remove_non_leaf_urls(dirs: &mut Vec<Url>) { + if dirs.is_empty() { + return; + } + + dirs.sort(); + for i in (0..dirs.len() - 1).rev() { + let prev = &dirs[i + 1]; + if prev.as_str().starts_with(dirs[i].as_str()) { + dirs.remove(i + 1); + } + } +} + #[cfg(test)] mod tests { use super::*; @@ -785,4 +825,69 @@ mod tests { WorkspaceSettings::default() ); } + + #[test] + fn test_sort_and_remove_non_leaf_urls() { + fn run_test(dirs: Vec<&str>, expected_output: Vec<&str>) { + let mut dirs = dirs + .into_iter() + .map(|dir| Url::parse(dir).unwrap()) + .collect(); + sort_and_remove_non_leaf_urls(&mut dirs); + let dirs: Vec<_> = dirs.iter().map(|dir| dir.as_str()).collect(); + assert_eq!(dirs, expected_output); + } + + run_test( + vec![ + "file:///test/asdf/test/asdf/", + "file:///test/asdf/", + "file:///test/asdf/", + "file:///testing/456/893/", + "file:///testing/456/893/test/", + ], + vec!["file:///test/asdf/", "file:///testing/456/893/"], + ); + run_test(vec![], vec![]); + } + + #[test] + fn config_enabled_root_urls() { + let mut config = Config::new(); + let root_dir = Url::parse("file:///example/").unwrap(); + config.root_uri = Some(root_dir.clone()); + config.settings.workspace.enable = false; + config.settings.workspace.enable_paths = Vec::new(); + assert_eq!(config.enabled_root_urls(), vec![]); + + config.settings.workspace.enable = true; + assert_eq!(config.enabled_root_urls(), vec![root_dir]); + + config.settings.workspace.enable = false; + let root_dir1 = Url::parse("file:///root1/").unwrap(); + let root_dir2 = Url::parse("file:///root2/").unwrap(); + let root_dir3 = Url::parse("file:///root3/").unwrap(); + config.enabled_paths = HashMap::from([ + ( + root_dir1.clone(), + vec![ + root_dir1.join("sub_dir").unwrap(), + root_dir1.join("sub_dir/other").unwrap(), + root_dir1.join("test.ts").unwrap(), + ], + ), + (root_dir2.clone(), vec![root_dir2.join("other.ts").unwrap()]), + (root_dir3.clone(), vec![]), + ]); + + assert_eq!( + config.enabled_root_urls(), + vec![ + root_dir1.join("sub_dir").unwrap(), + root_dir1.join("test.ts").unwrap(), + root_dir2.join("other.ts").unwrap(), + root_dir3 + ] + ); + } } diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index aa47faf62..0c27893a7 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -43,11 +43,13 @@ use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::PackageJson; use deno_runtime::permissions::PermissionsContainer; use indexmap::IndexMap; +use lsp::Url; use once_cell::sync::Lazy; use std::collections::HashMap; use std::collections::HashSet; use std::collections::VecDeque; use std::fs; +use std::fs::ReadDir; use std::ops::Range; use std::path::Path; use std::path::PathBuf; @@ -775,18 +777,6 @@ impl FileSystemDocuments { self.docs.insert(specifier.clone(), doc.clone()); Some(doc) } - - pub fn refresh_dependencies( - &mut self, - resolver: &dyn deno_graph::source::Resolver, - ) { - for doc in self.docs.values_mut() { - if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { - *doc = new_doc; - } - } - self.dirty = true; - } } fn get_document_path( @@ -1166,6 +1156,7 @@ impl Documents { pub fn update_config( &mut self, + root_urls: Vec<Url>, maybe_import_map: Option<Arc<import_map::ImportMap>>, maybe_config_file: Option<&ConfigFile>, maybe_package_json: Option<&PackageJson>, @@ -1173,11 +1164,18 @@ impl Documents { npm_resolution: NpmResolution, ) { fn calculate_resolver_config_hash( + root_urls: &[Url], maybe_import_map: Option<&import_map::ImportMap>, maybe_jsx_config: Option<&JsxImportSourceConfig>, maybe_package_json_deps: Option<&PackageJsonDeps>, ) -> u64 { let mut hasher = FastInsecureHasher::default(); + hasher.write_hashable(&{ + // ensure these are sorted (they should be, but this is a safeguard) + let mut root_urls = root_urls.to_vec(); + root_urls.sort_unstable(); + root_urls + }); if let Some(import_map) = maybe_import_map { hasher.write_str(&import_map.to_json()); hasher.write_str(import_map.base_url().as_str()); @@ -1193,6 +1191,7 @@ impl Documents { let maybe_jsx_config = maybe_config_file.and_then(|cf| cf.to_maybe_jsx_import_source_config()); let new_resolver_config_hash = calculate_resolver_config_hash( + &root_urls, maybe_import_map.as_deref(), maybe_jsx_config.as_ref(), maybe_package_json_deps.as_ref(), @@ -1232,21 +1231,51 @@ impl Documents { // only refresh the dependencies if the underlying configuration has changed if self.resolver_config_hash != new_resolver_config_hash { - self.refresh_dependencies(); + self.refresh_dependencies(root_urls); self.resolver_config_hash = new_resolver_config_hash; } self.dirty = true; } - fn refresh_dependencies(&mut self) { + fn refresh_dependencies(&mut self, root_urls: Vec<Url>) { let resolver = self.resolver.as_graph_resolver(); for doc in self.open_docs.values_mut() { if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { *doc = new_doc; } } - self.file_system_docs.lock().refresh_dependencies(resolver); + + // update the file system documents + let mut fs_docs = self.file_system_docs.lock(); + let mut not_found_docs = + fs_docs.docs.keys().cloned().collect::<HashSet<_>>(); + let open_docs = &mut self.open_docs; + + for specifier in PreloadDocumentFinder::from_root_urls(&root_urls) { + // mark this document as having been found + not_found_docs.remove(&specifier); + + if !open_docs.contains_key(&specifier) + && !fs_docs.docs.contains_key(&specifier) + { + fs_docs.refresh_document(&self.cache, resolver, &specifier); + } else { + // update the existing entry to have the new resolver + if let Some(doc) = fs_docs.docs.get_mut(&specifier) { + if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { + *doc = new_doc; + } + } + } + } + + // clean up and remove any documents that weren't found + for uri in not_found_docs { + fs_docs.docs.remove(&uri); + } + + fs_docs.dirty = true; } /// Iterate through the documents, building a map where the key is a unique @@ -1478,12 +1507,150 @@ fn analyze_module( } } +/// Iterator that finds documents that can be preloaded into +/// the LSP on startup. +struct PreloadDocumentFinder { + pending_dirs: Vec<PathBuf>, + pending_files: Vec<PathBuf>, + current_entries: Option<ReadDir>, +} + +impl PreloadDocumentFinder { + pub fn from_root_urls(root_urls: &Vec<Url>) -> Self { + let mut finder = PreloadDocumentFinder { + pending_dirs: Default::default(), + pending_files: Default::default(), + current_entries: Default::default(), + }; + for root_url in root_urls { + if let Ok(path) = root_url.to_file_path() { + if path.is_dir() { + finder.pending_dirs.push(path); + } else { + finder.pending_files.push(path); + } + } + } + finder + } + + fn read_next_file_entry(&mut self) -> Option<ModuleSpecifier> { + fn is_discoverable_dir(dir_path: &Path) -> bool { + if let Some(dir_name) = dir_path.file_name() { + let dir_name = dir_name.to_string_lossy().to_lowercase(); + !matches!(dir_name.as_str(), "node_modules" | ".git") + } else { + false + } + } + + if let Some(mut entries) = self.current_entries.take() { + while let Some(entry) = entries.next() { + if let Ok(entry) = entry { + let path = entry.path(); + if let Ok(file_type) = entry.file_type() { + if file_type.is_dir() && is_discoverable_dir(&path) { + self.pending_dirs.push(path); + } else if file_type.is_file() { + if let Some(specifier) = Self::get_valid_specifier(&path) { + // restore the next entries for next time + self.current_entries = Some(entries); + return Some(specifier); + } + } + } + } + } + } + + None + } + + fn get_valid_specifier(path: &Path) -> Option<ModuleSpecifier> { + fn is_discoverable_file(file_path: &Path) -> bool { + // Don't auto-discover minified files as they are likely to be very large + // and likely not to have dependencies on code outside them that would + // be useful in the LSP + if let Some(file_name) = file_path.file_name() { + let file_name = file_name.to_string_lossy().to_lowercase(); + !file_name.as_str().contains(".min.") + } else { + false + } + } + + fn is_discoverable_media_type(media_type: MediaType) -> bool { + match media_type { + MediaType::JavaScript + | MediaType::Jsx + | MediaType::Mjs + | MediaType::Cjs + | MediaType::TypeScript + | MediaType::Mts + | MediaType::Cts + | MediaType::Dts + | MediaType::Dmts + | MediaType::Dcts + | MediaType::Tsx => true, + MediaType::Json // ignore because json never depends on other files + | MediaType::Wasm + | MediaType::SourceMap + | MediaType::TsBuildInfo + | MediaType::Unknown => false, + } + } + + let media_type = MediaType::from_path(path); + if is_discoverable_media_type(media_type) && is_discoverable_file(path) { + if let Ok(specifier) = ModuleSpecifier::from_file_path(path) { + return Some(specifier); + } + } + None + } + + fn queue_next_file_entries(&mut self) { + debug_assert!(self.current_entries.is_none()); + while let Some(dir_path) = self.pending_dirs.pop() { + if let Ok(entries) = fs::read_dir(&dir_path) { + self.current_entries = Some(entries); + break; + } + } + } +} + +impl Iterator for PreloadDocumentFinder { + type Item = ModuleSpecifier; + + fn next(&mut self) -> Option<Self::Item> { + // drain the pending files + while let Some(path) = self.pending_files.pop() { + if let Some(specifier) = Self::get_valid_specifier(&path) { + return Some(specifier); + } + } + + // then go through the current entries and directories + while !self.pending_dirs.is_empty() || self.current_entries.is_some() { + match self.read_next_file_entry() { + Some(entry) => return Some(entry), + None => { + self.queue_next_file_entries(); + } + } + } + None + } +} + #[cfg(test)] mod tests { use crate::npm::NpmResolution; use super::*; use import_map::ImportMap; + use pretty_assertions::assert_eq; use test_util::TempDir; fn setup(temp_dir: &TempDir) -> (Documents, PathBuf) { @@ -1616,6 +1783,7 @@ console.log(b, "hello deno"); .unwrap(); documents.update_config( + vec![], Some(Arc::new(import_map)), None, None, @@ -1655,6 +1823,7 @@ console.log(b, "hello deno"); .unwrap(); documents.update_config( + vec![], Some(Arc::new(import_map)), None, None, @@ -1676,4 +1845,64 @@ console.log(b, "hello deno"); ); } } + + #[test] + pub fn test_pre_load_document_finder() { + let temp_dir = TempDir::new(); + temp_dir.create_dir_all("root1/node_modules/"); + temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules + + temp_dir.create_dir_all("root1/sub_dir"); + temp_dir.create_dir_all("root1/.git"); + temp_dir.create_dir_all("root1/file.ts"); // no, directory + temp_dir.write("root1/mod1.ts", ""); // yes + temp_dir.write("root1/mod2.js", ""); // yes + temp_dir.write("root1/mod3.tsx", ""); // yes + temp_dir.write("root1/mod4.d.ts", ""); // yes + temp_dir.write("root1/mod5.jsx", ""); // yes + temp_dir.write("root1/mod6.mjs", ""); // yes + temp_dir.write("root1/mod7.mts", ""); // yes + temp_dir.write("root1/mod8.d.mts", ""); // yes + temp_dir.write("root1/other.json", ""); // no, json + temp_dir.write("root1/other.txt", ""); // no, text file + temp_dir.write("root1/other.wasm", ""); // no, don't load wasm + temp_dir.write("root1/sub_dir/mod.ts", ""); // yes + temp_dir.write("root1/sub_dir/data.min.ts", ""); // no, minified file + temp_dir.write("root1/.git/main.ts", ""); // no, .git folder + + temp_dir.create_dir_all("root2/folder"); + temp_dir.write("root2/file1.ts", ""); // yes, provided + temp_dir.write("root2/file2.ts", ""); // no, not provided + temp_dir.write("root2/folder/main.ts", ""); // yes, provided + + temp_dir.create_dir_all("root3/"); + temp_dir.write("root3/mod.ts", ""); // no, not provided + + let mut urls = PreloadDocumentFinder::from_root_urls(&vec![ + temp_dir.uri().join("root1/").unwrap(), + temp_dir.uri().join("root2/file1.ts").unwrap(), + temp_dir.uri().join("root2/folder/").unwrap(), + ]) + .collect::<Vec<_>>(); + + // order doesn't matter + urls.sort(); + + assert_eq!( + urls, + vec![ + temp_dir.uri().join("root1/mod1.ts").unwrap(), + temp_dir.uri().join("root1/mod2.js").unwrap(), + temp_dir.uri().join("root1/mod3.tsx").unwrap(), + temp_dir.uri().join("root1/mod4.d.ts").unwrap(), + temp_dir.uri().join("root1/mod5.jsx").unwrap(), + temp_dir.uri().join("root1/mod6.mjs").unwrap(), + temp_dir.uri().join("root1/mod7.mts").unwrap(), + temp_dir.uri().join("root1/mod8.d.mts").unwrap(), + temp_dir.uri().join("root1/sub_dir/mod.ts").unwrap(), + temp_dir.uri().join("root2/file1.ts").unwrap(), + temp_dir.uri().join("root2/folder/main.ts").unwrap(), + ] + ); + } } diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 164c9734f..754b5c95c 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -1138,6 +1138,7 @@ impl Inner { fn refresh_documents_config(&mut self) { self.documents.update_config( + self.config.enabled_root_urls(), self.maybe_import_map.clone(), self.maybe_config_file.as_ref(), self.maybe_package_json.as_ref(), diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index e846cc496..f02668910 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -2803,9 +2803,9 @@ fn op_respond(state: &mut OpState, args: Response) -> bool { fn op_script_names(state: &mut OpState) -> Vec<String> { let state = state.borrow_mut::<State>(); let documents = &state.state_snapshot.documents; - let open_docs = documents.documents(DocumentsFilter::OpenDiagnosable); - let mut result = Vec::new(); + let all_docs = documents.documents(DocumentsFilter::AllDiagnosable); let mut seen = HashSet::new(); + let mut result = Vec::new(); if documents.has_injected_types_node_package() { // ensure this is first so it resolves the node types first @@ -2822,23 +2822,17 @@ fn op_script_names(state: &mut OpState) -> Vec<String> { } // finally include the documents and all their dependencies - for doc in &open_docs { - let specifier = doc.specifier(); - if seen.insert(specifier.as_str()) { - result.push(specifier.to_string()); - } - } - - // and then all their dependencies (do this after to avoid exists calls) - for doc in &open_docs { - for dep in doc.dependencies().values() { - if let Some(specifier) = dep.get_type().or_else(|| dep.get_code()) { - if seen.insert(specifier.as_str()) { - // only include dependencies we know to exist otherwise typescript will error - if documents.exists(specifier) { - result.push(specifier.to_string()); - } - } + for doc in &all_docs { + let specifiers = std::iter::once(doc.specifier()).chain( + doc + .dependencies() + .values() + .filter_map(|dep| dep.get_type().or_else(|| dep.get_code())), + ); + for specifier in specifiers { + if seen.insert(specifier.as_str()) && documents.exists(specifier) { + // only include dependencies we know to exist otherwise typescript will error + result.push(specifier.to_string()); } } } |