diff options
-rw-r--r-- | cli/lsp/documents.rs | 126 | ||||
-rw-r--r-- | cli/tests/lsp_tests.rs | 135 | ||||
-rw-r--r-- | test_util/src/lsp.rs | 8 |
3 files changed, 207 insertions, 62 deletions
diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index ce8fead0d..1fd9b541d 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -37,6 +37,7 @@ use once_cell::sync::Lazy; use std::collections::BTreeMap; use std::collections::HashMap; use std::collections::HashSet; +use std::collections::VecDeque; use std::fs; use std::ops::Range; use std::path::Path; @@ -633,6 +634,23 @@ struct FileSystemDocuments { } impl FileSystemDocuments { + pub fn get<'a>( + &mut self, + cache: &HttpCache, + maybe_resolver: Option<&dyn deno_graph::source::Resolver>, + specifier: &ModuleSpecifier, + ) -> Option<Document> { + let fs_version = get_document_path(cache, specifier) + .and_then(|path| calculate_fs_version(&path)); + let file_system_doc = self.docs.get(specifier); + if file_system_doc.map(|d| d.fs_version().to_string()) != fs_version { + // attempt to update the file on the file system + self.refresh_document(cache, maybe_resolver, specifier) + } else { + file_system_doc.cloned() + } + } + /// Adds or updates a document by reading the document from the file system /// returning the document. fn refresh_document( @@ -709,7 +727,7 @@ pub struct Documents { /// settings. maybe_resolver: Option<CliResolver>, /// The npm package requirements. - npm_reqs: HashSet<NpmPackageReq>, + npm_reqs: Arc<HashSet<NpmPackageReq>>, /// Resolves a specifier to its final redirected to specifier. specifier_resolver: Arc<SpecifierResolver>, } @@ -724,7 +742,7 @@ impl Documents { file_system_docs: Default::default(), imports: Default::default(), maybe_resolver: None, - npm_reqs: HashSet::new(), + npm_reqs: Default::default(), specifier_resolver: Arc::new(SpecifierResolver::new(location)), } } @@ -862,7 +880,7 @@ impl Documents { /// Returns a collection of npm package requirements. pub fn npm_package_reqs(&mut self) -> HashSet<NpmPackageReq> { self.calculate_dependents_if_dirty(); - self.npm_reqs.clone() + (*self.npm_reqs).clone() } /// Return a document for the specifier. @@ -872,19 +890,7 @@ impl Documents { Some(document.clone()) } else { let mut file_system_docs = self.file_system_docs.lock(); - let fs_version = get_document_path(&self.cache, &specifier) - .and_then(|path| calculate_fs_version(&path)); - let file_system_doc = file_system_docs.docs.get(&specifier); - if file_system_doc.map(|d| d.fs_version().to_string()) != fs_version { - // attempt to update the file on the file system - file_system_docs.refresh_document( - &self.cache, - self.get_maybe_resolver(), - &specifier, - ) - } else { - file_system_doc.cloned() - } + file_system_docs.get(&self.cache, self.get_maybe_resolver(), &specifier) } } @@ -1075,46 +1081,74 @@ impl Documents { /// document and the value is a set of specifiers that depend on that /// document. fn calculate_dependents_if_dirty(&mut self) { - let mut file_system_docs = self.file_system_docs.lock(); - if !file_system_docs.dirty && !self.dirty { - return; + #[derive(Default)] + struct DocAnalyzer { + dependents_map: HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>, + analyzed_specifiers: HashSet<ModuleSpecifier>, + pending_specifiers: VecDeque<ModuleSpecifier>, + npm_reqs: HashSet<NpmPackageReq>, } - let mut dependents_map: HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>> = - HashMap::new(); - // favour documents that are open in case a document exists in both collections - let documents = file_system_docs.docs.iter().chain(self.open_docs.iter()); - for (specifier, doc) in documents { - for dependency in doc.dependencies().values() { - if let Some(dep) = dependency.get_code() { - dependents_map - .entry(dep.clone()) - .or_default() - .insert(specifier.clone()); - } - if let Some(dep) = dependency.get_type() { - dependents_map - .entry(dep.clone()) - .or_default() - .insert(specifier.clone()); + impl DocAnalyzer { + fn add(&mut self, dep: &ModuleSpecifier, specifier: &ModuleSpecifier) { + if !self.analyzed_specifiers.contains(dep) { + self.analyzed_specifiers.insert(dep.clone()); + // perf: ensure this is not added to unless this specifier has never + // been analyzed in order to not cause an extra file system lookup + self.pending_specifiers.push_back(dep.clone()); + if let Ok(reference) = NpmPackageReference::from_specifier(dep) { + self.npm_reqs.insert(reference.req); + } } - } - if let Resolved::Ok { specifier: dep, .. } = doc.maybe_types_dependency() - { - dependents_map + + self + .dependents_map .entry(dep.clone()) .or_default() .insert(specifier.clone()); } + + fn analyze_doc(&mut self, specifier: &ModuleSpecifier, doc: &Document) { + self.analyzed_specifiers.insert(specifier.clone()); + for dependency in doc.dependencies().values() { + if let Some(dep) = dependency.get_code() { + self.add(dep, specifier); + } + if let Some(dep) = dependency.get_type() { + self.add(dep, specifier); + } + } + if let Resolved::Ok { specifier: dep, .. } = + doc.maybe_types_dependency() + { + self.add(&dep, specifier); + } + } + } + + let mut file_system_docs = self.file_system_docs.lock(); + if !file_system_docs.dirty && !self.dirty { + return; + } + + let mut doc_analyzer = DocAnalyzer::default(); + // favor documents that are open in case a document exists in both collections + let documents = file_system_docs.docs.iter().chain(self.open_docs.iter()); + for (specifier, doc) in documents { + doc_analyzer.analyze_doc(specifier, doc); } - let mut npm_reqs = HashSet::new(); - for specifier in dependents_map.keys() { - if let Ok(reference) = NpmPackageReference::from_specifier(specifier) { - npm_reqs.insert(reference.req); + + let maybe_resolver = self.get_maybe_resolver(); + while let Some(specifier) = doc_analyzer.pending_specifiers.pop_front() { + if let Some(doc) = + file_system_docs.get(&self.cache, maybe_resolver, &specifier) + { + doc_analyzer.analyze_doc(&specifier, &doc); } } - self.dependents_map = Arc::new(dependents_map); - self.npm_reqs = npm_reqs; + + self.dependents_map = Arc::new(doc_analyzer.dependents_map); + self.npm_reqs = Arc::new(doc_analyzer.npm_reqs); self.dirty = false; file_system_docs.dirty = false; } diff --git a/cli/tests/lsp_tests.rs b/cli/tests/lsp_tests.rs index 541ab8e8d..a28b1757a 100644 --- a/cli/tests/lsp_tests.rs +++ b/cli/tests/lsp_tests.rs @@ -12,7 +12,10 @@ mod lsp { use pretty_assertions::assert_eq; use std::collections::HashSet; use std::fs; + use std::process::Stdio; + use test_util::deno_cmd_with_deno_dir; use test_util::deno_exe_path; + use test_util::env_vars_for_npm_tests; use test_util::http_server; use test_util::lsp::LspClient; use test_util::testdata_path; @@ -3321,13 +3324,13 @@ mod lsp { fn lsp_code_actions_deno_cache() { let mut session = TestSession::from_file("initialize_params.json"); let diagnostics = session.did_open(json!({ - "textDocument": { - "uri": "file:///a/file.ts", - "languageId": "typescript", - "version": 1, - "text": "import * as a from \"https://deno.land/x/a/mod.ts\";\n\nconsole.log(a);\n" - } - })); + "textDocument": { + "uri": "file:///a/file.ts", + "languageId": "typescript", + "version": 1, + "text": "import * as a from \"https://deno.land/x/a/mod.ts\";\n\nconsole.log(a);\n" + } + })); assert_eq!( diagnostics.with_source("deno"), load_fixture_as("diagnostics_deno_deps.json") @@ -3352,13 +3355,13 @@ mod lsp { fn lsp_code_actions_deno_cache_npm() { let mut session = TestSession::from_file("initialize_params.json"); let diagnostics = session.did_open(json!({ - "textDocument": { - "uri": "file:///a/file.ts", - "languageId": "typescript", - "version": 1, - "text": "import chalk from \"npm:chalk\";\n\nconsole.log(chalk.green);\n" - } - })); + "textDocument": { + "uri": "file:///a/file.ts", + "languageId": "typescript", + "version": 1, + "text": "import chalk from \"npm:chalk\";\n\nconsole.log(chalk.green);\n" + } + })); assert_eq!( diagnostics.with_source("deno"), load_fixture_as("code_actions/cache_npm/diagnostics.json") @@ -4243,6 +4246,110 @@ mod lsp { } #[test] + fn lsp_npm_specifier_unopened_file() { + let _g = http_server(); + let mut client = init("initialize_params.json"); + + // create other.ts, which re-exports an npm specifier + client.deno_dir().write( + "other.ts", + "export { default as chalk } from 'npm:chalk@5';", + ); + + // cache the other.ts file to the DENO_DIR + let deno = deno_cmd_with_deno_dir(client.deno_dir()) + .current_dir(client.deno_dir().path()) + .arg("cache") + .arg("--quiet") + .arg("other.ts") + .envs(env_vars_for_npm_tests()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .unwrap(); + let output = deno.wait_with_output().unwrap(); + assert!(output.status.success()); + assert_eq!(output.status.code(), Some(0)); + + let stdout = String::from_utf8(output.stdout).unwrap(); + assert!(stdout.is_empty()); + let stderr = String::from_utf8(output.stderr).unwrap(); + assert!(stderr.is_empty()); + + // open main.ts, which imports other.ts (unopened) + let main_url = + ModuleSpecifier::from_file_path(client.deno_dir().path().join("main.ts")) + .unwrap(); + did_open( + &mut client, + json!({ + "textDocument": { + "uri": main_url, + "languageId": "typescript", + "version": 1, + "text": "import { chalk } from './other.ts';\n\n", + } + }), + ); + + client + .write_notification( + "textDocument/didChange", + json!({ + "textDocument": { + "uri": main_url, + "version": 2 + }, + "contentChanges": [ + { + "range": { + "start": { + "line": 2, + "character": 0 + }, + "end": { + "line": 2, + "character": 0 + } + }, + "text": "chalk." + } + ] + }), + ) + .unwrap(); + read_diagnostics(&mut client); + + // now ensure completions work + let (maybe_res, maybe_err) = client + .write_request( + "textDocument/completion", + json!({ + "textDocument": { + "uri": main_url + }, + "position": { + "line": 2, + "character": 6 + }, + "context": { + "triggerKind": 2, + "triggerCharacter": "." + } + }), + ) + .unwrap(); + assert!(maybe_err.is_none()); + if let Some(lsp::CompletionResponse::List(list)) = maybe_res { + assert!(!list.is_incomplete); + assert_eq!(list.items.len(), 63); + assert!(list.items.iter().any(|i| i.label == "ansi256")); + } else { + panic!("unexpected response"); + } + } + + #[test] fn lsp_completions_registry() { let _g = http_server(); let mut client = init("initialize_params_registry.json"); diff --git a/test_util/src/lsp.rs b/test_util/src/lsp.rs index c0fd8ff1b..b2c28268e 100644 --- a/test_util/src/lsp.rs +++ b/test_util/src/lsp.rs @@ -159,7 +159,7 @@ pub struct LspClient { request_id: u64, start: Instant, writer: io::BufWriter<ChildStdin>, - _temp_deno_dir: TempDir, // directory will be deleted on drop + deno_dir: TempDir, } impl Drop for LspClient { @@ -255,10 +255,14 @@ impl LspClient { request_id: 1, start: Instant::now(), writer, - _temp_deno_dir: deno_dir, + deno_dir, }) } + pub fn deno_dir(&self) -> &TempDir { + &self.deno_dir + } + pub fn duration(&self) -> Duration { self.start.elapsed() } |