summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKitson Kelly <me@kitsonkelly.com>2021-01-22 21:03:16 +1100
committerGitHub <noreply@github.com>2021-01-22 21:03:16 +1100
commit1a9209d1e3ed297c96a698550ab833c54c02a4ee (patch)
tree21be94f78196af33dd4a59c40fbfe2e7fa744922
parentffa920e4b9594f201756f9eeca542e5dfb8576d1 (diff)
fix(lsp): handle mbc documents properly (#9151)
Co-authored-by: Ryan Dahl <ry@tinyclouds.org>
-rw-r--r--Cargo.lock7
-rw-r--r--cli/Cargo.toml1
-rw-r--r--cli/lsp/diagnostics.rs179
-rw-r--r--cli/lsp/documents.rs315
-rw-r--r--cli/lsp/language_server.rs600
-rw-r--r--cli/lsp/memory_cache.rs121
-rw-r--r--cli/lsp/mod.rs2
-rw-r--r--cli/lsp/sources.rs26
-rw-r--r--cli/lsp/text.rs617
-rw-r--r--cli/lsp/tsc.rs341
-rw-r--r--cli/tests/lsp/did_change_notification_large.json25
-rw-r--r--cli/tests/lsp/did_change_notification_mbc.json25
-rw-r--r--cli/tests/lsp/did_open_notification_large.json12
-rw-r--r--cli/tests/lsp/did_open_notification_mbc.json12
-rw-r--r--cli/tests/lsp/hover_request_mbc.json14
-rw-r--r--cli/tests/lsp/rename_request.json4
-rw-r--r--cli/tsc/99_main_compiler.js24
-rw-r--r--cli/tsc/compiler.d.ts2
18 files changed, 1478 insertions, 849 deletions
diff --git a/Cargo.lock b/Cargo.lock
index ab09055d5..0e6248fc3 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -453,6 +453,7 @@ dependencies = [
"tempfile",
"termcolor",
"test_util",
+ "text-size",
"tokio",
"tokio-rustls",
"tower-test",
@@ -2949,6 +2950,12 @@ dependencies = [
]
[[package]]
+name = "text-size"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
+
+[[package]]
name = "textwrap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
index d4e3083dd..8f0fc7235 100644
--- a/cli/Cargo.toml
+++ b/cli/Cargo.toml
@@ -74,6 +74,7 @@ swc_common = { version = "0.10.8", features = ["sourcemap"] }
swc_ecmascript = { version = "0.17.1", features = ["codegen", "dep_graph", "parser", "proposal", "react", "transforms", "typescript", "visit"] }
tempfile = "3.1.0"
termcolor = "1.1.2"
+text-size = "1.1.0"
tokio = { version = "1.0.1", features = ["full"] }
tokio-rustls = "0.22.0"
uuid = { version = "0.8.2", features = ["v4"] }
diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs
index 6632620ec..36e7b093c 100644
--- a/cli/lsp/diagnostics.rs
+++ b/cli/lsp/diagnostics.rs
@@ -4,7 +4,6 @@ use super::analysis::get_lint_references;
use super::analysis::references_to_diagnostics;
use super::analysis::ResolvedDependency;
use super::language_server::StateSnapshot;
-use super::memory_cache::FileId;
use super::tsc;
use crate::diagnostics;
@@ -13,7 +12,7 @@ use crate::media_type::MediaType;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::serde_json;
-use deno_core::serde_json::Value;
+use deno_core::ModuleSpecifier;
use lspower::lsp_types;
use std::collections::HashMap;
use std::collections::HashSet;
@@ -28,43 +27,47 @@ pub enum DiagnosticSource {
#[derive(Debug, Default, Clone)]
pub struct DiagnosticCollection {
- map: HashMap<(FileId, DiagnosticSource), Vec<lsp_types::Diagnostic>>,
- versions: HashMap<FileId, i32>,
- changes: HashSet<FileId>,
+ map: HashMap<(ModuleSpecifier, DiagnosticSource), Vec<lsp_types::Diagnostic>>,
+ versions: HashMap<ModuleSpecifier, i32>,
+ changes: HashSet<ModuleSpecifier>,
}
impl DiagnosticCollection {
pub fn set(
&mut self,
- file_id: FileId,
+ specifier: ModuleSpecifier,
source: DiagnosticSource,
version: Option<i32>,
diagnostics: Vec<lsp_types::Diagnostic>,
) {
- self.map.insert((file_id, source), diagnostics);
+ self.map.insert((specifier.clone(), source), diagnostics);
if let Some(version) = version {
- self.versions.insert(file_id, version);
+ self.versions.insert(specifier.clone(), version);
}
- self.changes.insert(file_id);
+ self.changes.insert(specifier);
}
pub fn diagnostics_for(
&self,
- file_id: FileId,
- source: DiagnosticSource,
+ specifier: &ModuleSpecifier,
+ source: &DiagnosticSource,
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
- self.map.get(&(file_id, source)).into_iter().flatten()
+ self
+ .map
+ .get(&(specifier.clone(), source.clone()))
+ .into_iter()
+ .flatten()
}
- pub fn get_version(&self, file_id: &FileId) -> Option<i32> {
- self.versions.get(file_id).cloned()
+ pub fn get_version(&self, specifier: &ModuleSpecifier) -> Option<i32> {
+ self.versions.get(specifier).cloned()
}
- pub fn invalidate(&mut self, file_id: &FileId) {
- self.versions.remove(file_id);
+ pub fn invalidate(&mut self, specifier: &ModuleSpecifier) {
+ self.versions.remove(specifier);
}
- pub fn take_changes(&mut self) -> Option<HashSet<FileId>> {
+ pub fn take_changes(&mut self) -> Option<HashSet<ModuleSpecifier>> {
if self.changes.is_empty() {
return None;
}
@@ -72,7 +75,8 @@ impl DiagnosticCollection {
}
}
-pub type DiagnosticVec = Vec<(FileId, Option<i32>, Vec<lsp_types::Diagnostic>)>;
+pub type DiagnosticVec =
+ Vec<(ModuleSpecifier, Option<i32>, Vec<lsp_types::Diagnostic>)>;
pub async fn generate_lint_diagnostics(
state_snapshot: StateSnapshot,
@@ -81,25 +85,24 @@ pub async fn generate_lint_diagnostics(
tokio::task::spawn_blocking(move || {
let mut diagnostic_list = Vec::new();
- let file_cache = state_snapshot.file_cache.lock().unwrap();
- for (specifier, doc_data) in state_snapshot.doc_data.iter() {
- let file_id = file_cache.lookup(specifier).unwrap();
- let version = doc_data.version;
- let current_version = diagnostic_collection.get_version(&file_id);
+ let documents = state_snapshot.documents.lock().unwrap();
+ for specifier in documents.open_specifiers() {
+ let version = documents.version(specifier);
+ let current_version = diagnostic_collection.get_version(specifier);
if version != current_version {
let media_type = MediaType::from(specifier);
- if let Ok(source_code) = file_cache.get_contents(file_id) {
+ if let Ok(Some(source_code)) = documents.content(specifier) {
if let Ok(references) =
get_lint_references(specifier, &media_type, &source_code)
{
if !references.is_empty() {
diagnostic_list.push((
- file_id,
+ specifier.clone(),
version,
references_to_diagnostics(references),
));
} else {
- diagnostic_list.push((file_id, version, Vec::new()));
+ diagnostic_list.push((specifier.clone(), version, Vec::new()));
}
}
} else {
@@ -154,7 +157,7 @@ fn to_lsp_range(
}
}
-type TsDiagnostics = Vec<diagnostics::Diagnostic>;
+type TsDiagnostics = HashMap<String, Vec<diagnostics::Diagnostic>>;
fn get_diagnostic_message(diagnostic: &diagnostics::Diagnostic) -> String {
if let Some(message) = diagnostic.message_text.clone() {
@@ -197,65 +200,70 @@ fn to_lsp_related_information(
}
fn ts_json_to_diagnostics(
- value: Value,
-) -> Result<Vec<lsp_types::Diagnostic>, AnyError> {
- let ts_diagnostics: TsDiagnostics = serde_json::from_value(value)?;
- Ok(
- ts_diagnostics
- .iter()
- .filter_map(|d| {
- if let (Some(start), Some(end)) = (&d.start, &d.end) {
- Some(lsp_types::Diagnostic {
- range: to_lsp_range(start, end),
- severity: Some((&d.category).into()),
- code: Some(lsp_types::NumberOrString::Number(d.code as i32)),
- code_description: None,
- source: Some("deno-ts".to_string()),
- message: get_diagnostic_message(d),
- related_information: to_lsp_related_information(
- &d.related_information,
- ),
- tags: match d.code {
- // These are codes that indicate the variable is unused.
- 6133 | 6192 | 6196 => {
- Some(vec![lsp_types::DiagnosticTag::Unnecessary])
- }
- _ => None,
- },
- data: None,
- })
- } else {
- None
- }
- })
- .collect(),
- )
+ diagnostics: &[diagnostics::Diagnostic],
+) -> Vec<lsp_types::Diagnostic> {
+ diagnostics
+ .iter()
+ .filter_map(|d| {
+ if let (Some(start), Some(end)) = (&d.start, &d.end) {
+ Some(lsp_types::Diagnostic {
+ range: to_lsp_range(start, end),
+ severity: Some((&d.category).into()),
+ code: Some(lsp_types::NumberOrString::Number(d.code as i32)),
+ code_description: None,
+ source: Some("deno-ts".to_string()),
+ message: get_diagnostic_message(d),
+ related_information: to_lsp_related_information(
+ &d.related_information,
+ ),
+ tags: match d.code {
+ // These are codes that indicate the variable is unused.
+ 6133 | 6192 | 6196 => {
+ Some(vec![lsp_types::DiagnosticTag::Unnecessary])
+ }
+ _ => None,
+ },
+ data: None,
+ })
+ } else {
+ None
+ }
+ })
+ .collect()
}
pub async fn generate_ts_diagnostics(
- ts_server: &tsc::TsServer,
- diagnostic_collection: &DiagnosticCollection,
state_snapshot: StateSnapshot,
+ diagnostic_collection: DiagnosticCollection,
+ ts_server: &tsc::TsServer,
) -> Result<DiagnosticVec, AnyError> {
let mut diagnostics = Vec::new();
- let state_snapshot_ = state_snapshot.clone();
- for (specifier, doc_data) in state_snapshot_.doc_data.iter() {
- let file_id = {
- // TODO(lucacasonato): this is highly inefficient
- let file_cache = state_snapshot_.file_cache.lock().unwrap();
- file_cache.lookup(specifier).unwrap()
- };
- let version = doc_data.version;
- let current_version = diagnostic_collection.get_version(&file_id);
- if version != current_version {
- let req = tsc::RequestMethod::GetDiagnostics(specifier.clone());
- let ts_diagnostics = ts_json_to_diagnostics(
- ts_server.request(state_snapshot.clone(), req).await?,
- )?;
- diagnostics.push((file_id, version, ts_diagnostics));
+ let mut specifiers = Vec::new();
+ {
+ let documents = state_snapshot.documents.lock().unwrap();
+ for specifier in documents.open_specifiers() {
+ let version = documents.version(specifier);
+ let current_version = diagnostic_collection.get_version(specifier);
+ if version != current_version {
+ specifiers.push(specifier.clone());
+ }
+ }
+ }
+ if !specifiers.is_empty() {
+ let req = tsc::RequestMethod::GetDiagnostics(specifiers);
+ let res = ts_server.request(state_snapshot.clone(), req).await?;
+ let ts_diagnostic_map: TsDiagnostics = serde_json::from_value(res)?;
+ for (specifier_str, ts_diagnostics) in ts_diagnostic_map.iter() {
+ let specifier = ModuleSpecifier::resolve_url(specifier_str)?;
+ let version =
+ state_snapshot.documents.lock().unwrap().version(&specifier);
+ diagnostics.push((
+ specifier,
+ version,
+ ts_json_to_diagnostics(ts_diagnostics),
+ ));
}
}
-
Ok(diagnostics)
}
@@ -266,19 +274,18 @@ pub async fn generate_dependency_diagnostics(
tokio::task::spawn_blocking(move || {
let mut diagnostics = Vec::new();
- let file_cache = state_snapshot.file_cache.lock().unwrap();
let mut sources = if let Ok(sources) = state_snapshot.sources.lock() {
sources
} else {
return Err(custom_error("Deadlock", "deadlock locking sources"));
};
- for (specifier, doc_data) in state_snapshot.doc_data.iter() {
- let file_id = file_cache.lookup(specifier).unwrap();
- let version = doc_data.version;
- let current_version = diagnostic_collection.get_version(&file_id);
+ let documents = state_snapshot.documents.lock().unwrap();
+ for specifier in documents.open_specifiers() {
+ let version = documents.version(specifier);
+ let current_version = diagnostic_collection.get_version(specifier);
if version != current_version {
let mut diagnostic_list = Vec::new();
- if let Some(dependencies) = &doc_data.dependencies {
+ if let Some(dependencies) = documents.dependencies(specifier) {
for (_, dependency) in dependencies.iter() {
if let (Some(code), Some(range)) = (
&dependency.maybe_code,
@@ -299,7 +306,7 @@ pub async fn generate_dependency_diagnostics(
})
}
ResolvedDependency::Resolved(specifier) => {
- if !(state_snapshot.doc_data.contains_key(&specifier) || sources.contains(&specifier)) {
+ if !(documents.contains(&specifier) || sources.contains(&specifier)) {
let is_local = specifier.as_url().scheme() == "file";
diagnostic_list.push(lsp_types::Diagnostic {
range: *range,
@@ -322,7 +329,7 @@ pub async fn generate_dependency_diagnostics(
}
}
}
- diagnostics.push((file_id, version, diagnostic_list))
+ diagnostics.push((specifier.clone(), version, diagnostic_list))
}
}
diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs
new file mode 100644
index 000000000..2f355c4d9
--- /dev/null
+++ b/cli/lsp/documents.rs
@@ -0,0 +1,315 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+use super::analysis;
+use super::text::LineIndex;
+
+use crate::import_map::ImportMap;
+use crate::media_type::MediaType;
+
+use deno_core::error::custom_error;
+use deno_core::error::AnyError;
+use deno_core::error::Context;
+use deno_core::ModuleSpecifier;
+use lspower::lsp_types::TextDocumentContentChangeEvent;
+use std::collections::HashMap;
+use std::ops::Range;
+
+#[derive(Debug, PartialEq, Eq)]
+enum IndexValid {
+ All,
+ UpTo(u32),
+}
+
+impl IndexValid {
+ fn covers(&self, line: u32) -> bool {
+ match *self {
+ IndexValid::UpTo(to) => to > line,
+ IndexValid::All => true,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct DocumentData {
+ bytes: Option<Vec<u8>>,
+ line_index: Option<LineIndex>,
+ dependencies: Option<HashMap<String, analysis::Dependency>>,
+ version: Option<i32>,
+}
+
+impl DocumentData {
+ pub fn apply_content_changes(
+ &mut self,
+ content_changes: Vec<TextDocumentContentChangeEvent>,
+ ) -> Result<(), AnyError> {
+ if self.bytes.is_none() {
+ return Ok(());
+ }
+ let content = &mut String::from_utf8(self.bytes.clone().unwrap())
+ .context("unable to parse bytes to string")?;
+ let mut line_index = if let Some(line_index) = &self.line_index {
+ line_index.clone()
+ } else {
+ LineIndex::new(&content)
+ };
+ let mut index_valid = IndexValid::All;
+ for change in content_changes {
+ if let Some(range) = change.range {
+ if !index_valid.covers(range.start.line) {
+ line_index = LineIndex::new(&content);
+ }
+ index_valid = IndexValid::UpTo(range.start.line);
+ let range = line_index.get_text_range(range)?;
+ content.replace_range(Range::<usize>::from(range), &change.text);
+ } else {
+ *content = change.text;
+ index_valid = IndexValid::UpTo(0);
+ }
+ }
+ self.bytes = Some(content.as_bytes().to_owned());
+ self.line_index = if index_valid == IndexValid::All {
+ Some(line_index)
+ } else {
+ Some(LineIndex::new(&content))
+ };
+ Ok(())
+ }
+
+ pub fn content(&self) -> Result<Option<String>, AnyError> {
+ if let Some(bytes) = self.bytes.clone() {
+ Ok(Some(
+ String::from_utf8(bytes).context("cannot decode bytes to string")?,
+ ))
+ } else {
+ Ok(None)
+ }
+ }
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct DocumentCache {
+ docs: HashMap<ModuleSpecifier, DocumentData>,
+}
+
+impl DocumentCache {
+ pub fn analyze_dependencies(
+ &mut self,
+ specifier: &ModuleSpecifier,
+ maybe_import_map: &Option<ImportMap>,
+ ) -> Result<(), AnyError> {
+ if !self.contains(specifier) {
+ return Err(custom_error(
+ "NotFound",
+ format!(
+ "The specifier (\"{}\") does not exist in the document cache.",
+ specifier
+ ),
+ ));
+ }
+
+ let doc = self.docs.get_mut(specifier).unwrap();
+ if let Some(source) = &doc.content()? {
+ if let Some((dependencies, _)) = analysis::analyze_dependencies(
+ specifier,
+ source,
+ &MediaType::from(specifier),
+ maybe_import_map,
+ ) {
+ doc.dependencies = Some(dependencies);
+ } else {
+ doc.dependencies = None;
+ }
+ } else {
+ doc.dependencies = None;
+ }
+
+ Ok(())
+ }
+
+ pub fn change(
+ &mut self,
+ specifier: &ModuleSpecifier,
+ version: i32,
+ content_changes: Vec<TextDocumentContentChangeEvent>,
+ ) -> Result<(), AnyError> {
+ if !self.contains(specifier) {
+ return Err(custom_error(
+ "NotFound",
+ format!(
+ "The specifier (\"{}\") does not exist in the document cache.",
+ specifier
+ ),
+ ));
+ }
+
+ let doc = self.docs.get_mut(specifier).unwrap();
+ doc.apply_content_changes(content_changes)?;
+ doc.version = Some(version);
+ Ok(())
+ }
+
+ pub fn close(&mut self, specifier: &ModuleSpecifier) {
+ if let Some(mut doc) = self.docs.get_mut(specifier) {
+ doc.version = None;
+ doc.dependencies = None;
+ }
+ }
+
+ pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
+ self.docs.contains_key(specifier)
+ }
+
+ pub fn content(
+ &self,
+ specifier: &ModuleSpecifier,
+ ) -> Result<Option<String>, AnyError> {
+ if let Some(doc) = self.docs.get(specifier) {
+ doc.content()
+ } else {
+ Ok(None)
+ }
+ }
+
+ pub fn dependencies(
+ &self,
+ specifier: &ModuleSpecifier,
+ ) -> Option<HashMap<String, analysis::Dependency>> {
+ let doc = self.docs.get(specifier)?;
+ doc.dependencies.clone()
+ }
+
+ pub fn len(&self) -> usize {
+ self.docs.iter().count()
+ }
+
+ pub fn line_index(&self, specifier: &ModuleSpecifier) -> Option<LineIndex> {
+ let doc = self.docs.get(specifier)?;
+ doc.line_index.clone()
+ }
+
+ pub fn open(
+ &mut self,
+ specifier: ModuleSpecifier,
+ version: i32,
+ text: String,
+ ) {
+ self.docs.insert(
+ specifier,
+ DocumentData {
+ bytes: Some(text.as_bytes().to_owned()),
+ version: Some(version),
+ line_index: Some(LineIndex::new(&text)),
+ ..Default::default()
+ },
+ );
+ }
+
+ pub fn open_specifiers(&self) -> Vec<&ModuleSpecifier> {
+ self
+ .docs
+ .iter()
+ .filter_map(|(key, data)| {
+ if data.version.is_some() {
+ Some(key)
+ } else {
+ None
+ }
+ })
+ .collect()
+ }
+
+ pub fn version(&self, specifier: &ModuleSpecifier) -> Option<i32> {
+ self.docs.get(specifier).and_then(|doc| doc.version)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use lspower::lsp_types;
+
+ #[test]
+ fn test_document_cache_contains() {
+ let mut document_cache = DocumentCache::default();
+ let specifier = ModuleSpecifier::resolve_url("file:///a/b.ts").unwrap();
+ let missing_specifier =
+ ModuleSpecifier::resolve_url("file:///a/c.ts").unwrap();
+ document_cache.open(
+ specifier.clone(),
+ 1,
+ "console.log(\"Hello Deno\");\n".to_owned(),
+ );
+ assert!(document_cache.contains(&specifier));
+ assert!(!document_cache.contains(&missing_specifier));
+ }
+
+ #[test]
+ fn test_document_cache_change() {
+ let mut document_cache = DocumentCache::default();
+ let specifier = ModuleSpecifier::resolve_url("file:///a/b.ts").unwrap();
+ document_cache.open(
+ specifier.clone(),
+ 1,
+ "console.log(\"Hello deno\");\n".to_owned(),
+ );
+ document_cache
+ .change(
+ &specifier,
+ 2,
+ vec![lsp_types::TextDocumentContentChangeEvent {
+ range: Some(lsp_types::Range {
+ start: lsp_types::Position {
+ line: 0,
+ character: 19,
+ },
+ end: lsp_types::Position {
+ line: 0,
+ character: 20,
+ },
+ }),
+ range_length: Some(1),
+ text: "D".to_string(),
+ }],
+ )
+ .expect("failed to make changes");
+ let actual = document_cache
+ .content(&specifier)
+ .expect("failed to get content");
+ assert_eq!(actual, Some("console.log(\"Hello Deno\");\n".to_string()));
+ }
+
+ #[test]
+ fn test_document_cache_change_utf16() {
+ let mut document_cache = DocumentCache::default();
+ let specifier = ModuleSpecifier::resolve_url("file:///a/b.ts").unwrap();
+ document_cache.open(
+ specifier.clone(),
+ 1,
+ "console.log(\"Hello πŸ¦•\");\n".to_owned(),
+ );
+ document_cache
+ .change(
+ &specifier,
+ 2,
+ vec![lsp_types::TextDocumentContentChangeEvent {
+ range: Some(lsp_types::Range {
+ start: lsp_types::Position {
+ line: 0,
+ character: 19,
+ },
+ end: lsp_types::Position {
+ line: 0,
+ character: 21,
+ },
+ }),
+ range_length: Some(2),
+ text: "Deno".to_string(),
+ }],
+ )
+ .expect("failed to make changes");
+ let actual = document_cache
+ .content(&specifier)
+ .expect("failed to get content");
+ assert_eq!(actual, Some("console.log(\"Hello Deno\");\n".to_string()));
+ }
+}
diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs
index 6f7f436b9..316aabf91 100644
--- a/cli/lsp/language_server.rs
+++ b/cli/lsp/language_server.rs
@@ -23,33 +23,31 @@ use tokio::fs;
use crate::deno_dir;
use crate::import_map::ImportMap;
-use crate::media_type::MediaType;
use crate::tsc_config::parse_config;
use crate::tsc_config::TsConfig;
-use super::analysis;
use super::capabilities;
use super::config::Config;
use super::diagnostics;
use super::diagnostics::DiagnosticCollection;
use super::diagnostics::DiagnosticSource;
-use super::memory_cache::MemoryCache;
+use super::documents::DocumentCache;
use super::sources;
use super::sources::Sources;
use super::text;
-use super::text::apply_content_changes;
+use super::text::LineIndex;
use super::tsc;
+use super::tsc::AssetDocument;
use super::tsc::TsServer;
use super::utils;
#[derive(Debug, Clone)]
pub struct LanguageServer {
- assets: Arc<Mutex<HashMap<ModuleSpecifier, Option<String>>>>,
+ assets: Arc<Mutex<HashMap<ModuleSpecifier, Option<AssetDocument>>>>,
client: Client,
ts_server: TsServer,
config: Arc<Mutex<Config>>,
- doc_data: Arc<Mutex<HashMap<ModuleSpecifier, DocumentData>>>,
- file_cache: Arc<Mutex<MemoryCache>>,
+ documents: Arc<Mutex<DocumentCache>>,
sources: Arc<Mutex<Sources>>,
diagnostics: Arc<Mutex<DiagnosticCollection>>,
maybe_config_uri: Arc<Mutex<Option<Url>>>,
@@ -59,9 +57,8 @@ pub struct LanguageServer {
#[derive(Debug, Clone, Default)]
pub struct StateSnapshot {
- pub assets: Arc<Mutex<HashMap<ModuleSpecifier, Option<String>>>>,
- pub doc_data: HashMap<ModuleSpecifier, DocumentData>,
- pub file_cache: Arc<Mutex<MemoryCache>>,
+ pub assets: Arc<Mutex<HashMap<ModuleSpecifier, Option<AssetDocument>>>>,
+ pub documents: Arc<Mutex<DocumentCache>>,
pub sources: Arc<Mutex<Sources>>,
}
@@ -78,8 +75,7 @@ impl LanguageServer {
client,
ts_server: TsServer::new(),
config: Default::default(),
- doc_data: Default::default(),
- file_cache: Default::default(),
+ documents: Default::default(),
sources,
diagnostics: Default::default(),
maybe_config_uri: Default::default(),
@@ -93,34 +89,65 @@ impl LanguageServer {
config.settings.enable
}
+ /// Searches assets, open documents and external sources for a line_index,
+ /// which might be performed asynchronously, hydrating in memory caches for
+ /// subsequent requests.
pub async fn get_line_index(
&self,
specifier: ModuleSpecifier,
- ) -> Result<Vec<u32>, AnyError> {
- let line_index = if specifier.as_url().scheme() == "asset" {
- let state_snapshot = self.snapshot();
- if let Some(source) =
- tsc::get_asset(&specifier, &self.ts_server, &state_snapshot).await?
- {
- text::index_lines(&source)
+ ) -> Result<LineIndex, AnyError> {
+ if specifier.as_url().scheme() == "asset" {
+ let maybe_asset =
+ { self.assets.lock().unwrap().get(&specifier).cloned() };
+ if let Some(maybe_asset) = maybe_asset {
+ if let Some(asset) = maybe_asset {
+ Ok(asset.line_index)
+ } else {
+ Err(anyhow!("asset is missing: {}", specifier))
+ }
} else {
- return Err(anyhow!("asset source missing: {}", specifier));
+ let state_snapshot = self.snapshot();
+ if let Some(asset) =
+ tsc::get_asset(&specifier, &self.ts_server, &state_snapshot).await?
+ {
+ Ok(asset.line_index)
+ } else {
+ Err(anyhow!("asset is missing: {}", specifier))
+ }
}
+ } else if let Some(line_index) =
+ self.documents.lock().unwrap().line_index(&specifier)
+ {
+ Ok(line_index)
+ } else if let Some(line_index) =
+ self.sources.lock().unwrap().get_line_index(&specifier)
+ {
+ Ok(line_index)
} else {
- let file_cache = self.file_cache.lock().unwrap();
- if let Some(file_id) = file_cache.lookup(&specifier) {
- let file_text = file_cache.get_contents(file_id)?;
- text::index_lines(&file_text)
+ Err(anyhow!("Unable to find line index for: {}", specifier))
+ }
+ }
+
+ /// Only searches already cached assets and documents for a line index. If
+ /// the line index cannot be found, `None` is returned.
+ pub fn get_line_index_sync(
+ &self,
+ specifier: &ModuleSpecifier,
+ ) -> Option<LineIndex> {
+ if specifier.as_url().scheme() == "asset" {
+ if let Some(Some(asset)) = self.assets.lock().unwrap().get(specifier) {
+ Some(asset.line_index.clone())
} else {
- let mut sources = self.sources.lock().unwrap();
- if let Some(line_index) = sources.get_line_index(&specifier) {
- line_index
- } else {
- return Err(anyhow!("source for specifier not found: {}", specifier));
- }
+ None
}
- };
- Ok(line_index)
+ } else {
+ let documents = self.documents.lock().unwrap();
+ if documents.contains(specifier) {
+ documents.line_index(specifier)
+ } else {
+ self.sources.lock().unwrap().get_line_index(specifier)
+ }
+ }
}
async fn prepare_diagnostics(&self) -> Result<(), AnyError> {
@@ -130,6 +157,7 @@ impl LanguageServer {
};
let lint = async {
+ let mut disturbed = false;
if lint_enabled {
let diagnostic_collection = self.diagnostics.lock().unwrap().clone();
let diagnostics = diagnostics::generate_lint_diagnostics(
@@ -137,59 +165,50 @@ impl LanguageServer {
diagnostic_collection,
)
.await;
+ disturbed = !diagnostics.is_empty();
{
let mut diagnostics_collection = self.diagnostics.lock().unwrap();
- for (file_id, version, diagnostics) in diagnostics {
+ for (specifier, version, diagnostics) in diagnostics {
diagnostics_collection.set(
- file_id,
+ specifier,
DiagnosticSource::Lint,
version,
diagnostics,
);
}
}
- self.publish_diagnostics().await?
};
-
- Ok::<(), AnyError>(())
+ Ok::<bool, AnyError>(disturbed)
};
let ts = async {
+ let mut disturbed = false;
if enabled {
- let diagnostics = {
- let diagnostic_collection = self.diagnostics.lock().unwrap().clone();
- match diagnostics::generate_ts_diagnostics(
- &self.ts_server,
- &diagnostic_collection,
- self.snapshot(),
- )
- .await
- {
- Ok(diagnostics) => diagnostics,
- Err(err) => {
- error!("Error processing TypeScript diagnostics:\n{}", err);
- vec![]
- }
- }
- };
+ let diagnostics_collection = self.diagnostics.lock().unwrap().clone();
+ let diagnostics = diagnostics::generate_ts_diagnostics(
+ self.snapshot(),
+ diagnostics_collection,
+ &self.ts_server,
+ )
+ .await?;
+ disturbed = !diagnostics.is_empty();
{
let mut diagnostics_collection = self.diagnostics.lock().unwrap();
- for (file_id, version, diagnostics) in diagnostics {
+ for (specifier, version, diagnostics) in diagnostics {
diagnostics_collection.set(
- file_id,
+ specifier,
DiagnosticSource::TypeScript,
version,
diagnostics,
);
}
- };
- self.publish_diagnostics().await?
- }
-
- Ok::<(), AnyError>(())
+ }
+ };
+ Ok::<bool, AnyError>(disturbed)
};
let deps = async {
+ let mut disturbed = false;
if enabled {
let diagnostics_collection = self.diagnostics.lock().unwrap().clone();
let diagnostics = diagnostics::generate_dependency_diagnostics(
@@ -197,27 +216,26 @@ impl LanguageServer {
diagnostics_collection,
)
.await?;
+ disturbed = !diagnostics.is_empty();
{
let mut diagnostics_collection = self.diagnostics.lock().unwrap();
- for (file_id, version, diagnostics) in diagnostics {
+ for (specifier, version, diagnostics) in diagnostics {
diagnostics_collection.set(
- file_id,
+ specifier,
DiagnosticSource::Deno,
version,
diagnostics,
);
}
}
- self.publish_diagnostics().await?
};
-
- Ok::<(), AnyError>(())
+ Ok::<bool, AnyError>(disturbed)
};
let (lint_res, ts_res, deps_res) = tokio::join!(lint, ts, deps);
- lint_res?;
- ts_res?;
- deps_res?;
+ if lint_res? || ts_res? || deps_res? {
+ self.publish_diagnostics().await?;
+ }
Ok(())
}
@@ -230,7 +248,7 @@ impl LanguageServer {
};
if let Some(diagnostic_changes) = maybe_changes {
let settings = self.config.lock().unwrap().settings.clone();
- for file_id in diagnostic_changes {
+ for specifier in diagnostic_changes {
// TODO(@kitsonk) not totally happy with the way we collect and store
// different types of diagnostics and offer them up to the client, we
// do need to send "empty" vectors though when a particular feature is
@@ -238,7 +256,7 @@ impl LanguageServer {
// diagnostics
let mut diagnostics: Vec<Diagnostic> = if settings.lint {
diagnostics_collection
- .diagnostics_for(file_id, DiagnosticSource::Lint)
+ .diagnostics_for(&specifier, &DiagnosticSource::Lint)
.cloned()
.collect()
} else {
@@ -247,27 +265,17 @@ impl LanguageServer {
if self.enabled() {
diagnostics.extend(
diagnostics_collection
- .diagnostics_for(file_id, DiagnosticSource::TypeScript)
+ .diagnostics_for(&specifier, &DiagnosticSource::TypeScript)
.cloned(),
);
diagnostics.extend(
diagnostics_collection
- .diagnostics_for(file_id, DiagnosticSource::Deno)
+ .diagnostics_for(&specifier, &DiagnosticSource::Deno)
.cloned(),
);
}
- let specifier = {
- let file_cache = self.file_cache.lock().unwrap();
- file_cache.get_specifier(file_id).clone()
- };
let uri = specifier.as_url().clone();
- let version = if let Some(doc_data) =
- self.doc_data.lock().unwrap().get(&specifier)
- {
- doc_data.version
- } else {
- None
- };
+ let version = self.documents.lock().unwrap().version(&specifier);
self
.client
.publish_diagnostics(uri, diagnostics, version)
@@ -281,8 +289,7 @@ impl LanguageServer {
pub fn snapshot(&self) -> StateSnapshot {
StateSnapshot {
assets: self.assets.clone(),
- doc_data: self.doc_data.lock().unwrap().clone(),
- file_cache: self.file_cache.clone(),
+ documents: self.documents.clone(),
sources: self.sources.clone(),
}
}
@@ -507,61 +514,48 @@ impl lspower::LanguageServer for LanguageServer {
return;
}
let specifier = utils::normalize_url(params.text_document.uri);
- let maybe_import_map = self.maybe_import_map.lock().unwrap().clone();
- if self
- .doc_data
+ self.documents.lock().unwrap().open(
+ specifier.clone(),
+ params.text_document.version,
+ params.text_document.text,
+ );
+ if let Err(err) = self
+ .documents
.lock()
.unwrap()
- .insert(
- specifier.clone(),
- DocumentData::new(
- specifier.clone(),
- params.text_document.version,
- &params.text_document.text,
- maybe_import_map,
- ),
- )
- .is_some()
+ .analyze_dependencies(&specifier, &self.maybe_import_map.lock().unwrap())
{
- error!("duplicate DidOpenTextDocument: {}", specifier);
+ error!("{}", err);
}
- self
- .file_cache
- .lock()
- .unwrap()
- .set_contents(specifier, Some(params.text_document.text.into_bytes()));
- // TODO(@lucacasonato): error handling
- self.prepare_diagnostics().await.unwrap();
+ // TODO(@kitsonk): how to better lazily do this?
+ if let Err(err) = self.prepare_diagnostics().await {
+ error!("{}", err);
+ }
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
let specifier = utils::normalize_url(params.text_document.uri);
- let mut content = {
- let file_cache = self.file_cache.lock().unwrap();
- let file_id = file_cache.lookup(&specifier).unwrap();
- file_cache.get_contents(file_id).unwrap()
- };
- apply_content_changes(&mut content, params.content_changes);
- {
- let mut doc_data = self.doc_data.lock().unwrap();
- let doc_data = doc_data.get_mut(&specifier).unwrap();
- let maybe_import_map = self.maybe_import_map.lock().unwrap();
- doc_data.update(
- params.text_document.version,
- &content,
- &maybe_import_map,
- );
+ if let Err(err) = self.documents.lock().unwrap().change(
+ &specifier,
+ params.text_document.version,
+ params.content_changes,
+ ) {
+ error!("{}", err);
}
-
- self
- .file_cache
+ if let Err(err) = self
+ .documents
.lock()
.unwrap()
- .set_contents(specifier, Some(content.into_bytes()));
+ .analyze_dependencies(&specifier, &self.maybe_import_map.lock().unwrap())
+ {
+ error!("{}", err);
+ }
- // TODO(@lucacasonato): error handling
- self.prepare_diagnostics().await.unwrap();
+ // TODO(@kitsonk): how to better lazily do this?
+ if let Err(err) = self.prepare_diagnostics().await {
+ error!("{}", err);
+ }
}
async fn did_close(&self, params: DidCloseTextDocumentParams) {
@@ -572,12 +566,12 @@ impl lspower::LanguageServer for LanguageServer {
return;
}
let specifier = utils::normalize_url(params.text_document.uri);
- if self.doc_data.lock().unwrap().remove(&specifier).is_none() {
- error!("orphaned document: {}", specifier);
+ self.documents.lock().unwrap().close(&specifier);
+
+ // TODO(@kitsonk): how to better lazily do this?
+ if let Err(err) = self.prepare_diagnostics().await {
+ error!("{}", err);
}
- // TODO(@kitsonk) should we do garbage collection on the diagnostics?
- // TODO(@lucacasonato): error handling
- self.prepare_diagnostics().await.unwrap();
}
async fn did_save(&self, _params: DidSaveTextDocumentParams) {
@@ -673,12 +667,17 @@ impl lspower::LanguageServer for LanguageServer {
params: DocumentFormattingParams,
) -> LspResult<Option<Vec<TextEdit>>> {
let specifier = utils::normalize_url(params.text_document.uri.clone());
- let file_text = {
- let file_cache = self.file_cache.lock().unwrap();
- let file_id = file_cache.lookup(&specifier).unwrap();
- // TODO(lucacasonato): handle error properly
- file_cache.get_contents(file_id).unwrap()
- };
+ let file_text = self
+ .documents
+ .lock()
+ .unwrap()
+ .content(&specifier)
+ .map_err(|_| {
+ LspError::invalid_params(
+ "The specified file could not be found in memory.",
+ )
+ })?
+ .unwrap();
let file_path =
if let Ok(file_path) = params.text_document.uri.to_file_path() {
@@ -723,14 +722,18 @@ impl lspower::LanguageServer for LanguageServer {
let specifier = utils::normalize_url(
params.text_document_position_params.text_document.uri,
);
- // TODO(lucacasonato): handle error correctly
- let line_index = self.get_line_index(specifier.clone()).await.unwrap();
+ let line_index =
+ if let Some(line_index) = self.get_line_index_sync(&specifier) {
+ line_index
+ } else {
+ return Err(LspError::invalid_params(format!(
+ "An unexpected specifier ({}) was provided.",
+ specifier
+ )));
+ };
let req = tsc::RequestMethod::GetQuickInfo((
specifier,
- text::to_char_pos(
- &line_index,
- params.text_document_position_params.position,
- ),
+ line_index.offset_tsc(params.text_document_position_params.position)?,
));
// TODO(lucacasonato): handle error correctly
let res = self.ts_server.request(self.snapshot(), req).await.unwrap();
@@ -738,7 +741,8 @@ impl lspower::LanguageServer for LanguageServer {
let maybe_quick_info: Option<tsc::QuickInfo> =
serde_json::from_value(res).unwrap();
if let Some(quick_info) = maybe_quick_info {
- Ok(Some(quick_info.to_hover(&line_index)))
+ let hover = quick_info.to_hover(&line_index);
+ Ok(Some(hover))
} else {
Ok(None)
}
@@ -754,15 +758,19 @@ impl lspower::LanguageServer for LanguageServer {
let specifier = utils::normalize_url(
params.text_document_position_params.text_document.uri,
);
- // TODO(lucacasonato): handle error correctly
- let line_index = self.get_line_index(specifier.clone()).await.unwrap();
+ let line_index =
+ if let Some(line_index) = self.get_line_index_sync(&specifier) {
+ line_index
+ } else {
+ return Err(LspError::invalid_params(format!(
+ "An unexpected specifier ({}) was provided.",
+ specifier
+ )));
+ };
let files_to_search = vec![specifier.clone()];
let req = tsc::RequestMethod::GetDocumentHighlights((
specifier,
- text::to_char_pos(
- &line_index,
- params.text_document_position_params.position,
- ),
+ line_index.offset_tsc(params.text_document_position_params.position)?,
files_to_search,
));
// TODO(lucacasonato): handle error correctly
@@ -793,11 +801,18 @@ impl lspower::LanguageServer for LanguageServer {
}
let specifier =
utils::normalize_url(params.text_document_position.text_document.uri);
- // TODO(lucacasonato): handle error correctly
- let line_index = self.get_line_index(specifier.clone()).await.unwrap();
+ let line_index =
+ if let Some(line_index) = self.get_line_index_sync(&specifier) {
+ line_index
+ } else {
+ return Err(LspError::invalid_params(format!(
+ "An unexpected specifier ({}) was provided.",
+ specifier
+ )));
+ };
let req = tsc::RequestMethod::GetReferences((
specifier,
- text::to_char_pos(&line_index, params.text_document_position.position),
+ line_index.offset_tsc(params.text_document_position.position)?,
));
// TODO(lucacasonato): handle error correctly
let res = self.ts_server.request(self.snapshot(), req).await.unwrap();
@@ -836,14 +851,18 @@ impl lspower::LanguageServer for LanguageServer {
let specifier = utils::normalize_url(
params.text_document_position_params.text_document.uri,
);
- // TODO(lucacasonato): handle error correctly
- let line_index = self.get_line_index(specifier.clone()).await.unwrap();
+ let line_index =
+ if let Some(line_index) = self.get_line_index_sync(&specifier) {
+ line_index
+ } else {
+ return Err(LspError::invalid_params(format!(
+ "An unexpected specifier ({}) was provided.",
+ specifier
+ )));
+ };
let req = tsc::RequestMethod::GetDefinition((
specifier,
- text::to_char_pos(
- &line_index,
- params.text_document_position_params.position,
- ),
+ line_index.offset_tsc(params.text_document_position_params.position)?,
));
// TODO(lucacasonato): handle error correctly
let res = self.ts_server.request(self.snapshot(), req).await.unwrap();
@@ -872,10 +891,18 @@ impl lspower::LanguageServer for LanguageServer {
let specifier =
utils::normalize_url(params.text_document_position.text_document.uri);
// TODO(lucacasonato): handle error correctly
- let line_index = self.get_line_index(specifier.clone()).await.unwrap();
+ let line_index =
+ if let Some(line_index) = self.get_line_index_sync(&specifier) {
+ line_index
+ } else {
+ return Err(LspError::invalid_params(format!(
+ "An unexpected specifier ({}) was provided.",
+ specifier
+ )));
+ };
let req = tsc::RequestMethod::GetCompletions((
specifier,
- text::to_char_pos(&line_index, params.text_document_position.position),
+ line_index.offset_tsc(params.text_document_position.position)?,
tsc::UserPreferences {
// TODO(lucacasonato): enable this. see https://github.com/denoland/deno/pull/8651
include_completions_with_insert_text: Some(false),
@@ -906,20 +933,18 @@ impl lspower::LanguageServer for LanguageServer {
params.text_document_position_params.text_document.uri,
);
let line_index =
- self
- .get_line_index(specifier.clone())
- .await
- .map_err(|err| {
- error!("Failed to get line_index {:#?}", err);
- LspError::internal_error()
- })?;
+ if let Some(line_index) = self.get_line_index_sync(&specifier) {
+ line_index
+ } else {
+ return Err(LspError::invalid_params(format!(
+ "An unexpected specifier ({}) was provided.",
+ specifier
+ )));
+ };
let req = tsc::RequestMethod::GetImplementation((
specifier,
- text::to_char_pos(
- &line_index,
- params.text_document_position_params.position,
- ),
+ line_index.offset_tsc(params.text_document_position_params.position)?,
));
let res =
self
@@ -965,36 +990,36 @@ impl lspower::LanguageServer for LanguageServer {
if !self.enabled() {
return Ok(None);
}
-
- let snapshot = self.snapshot();
let specifier =
utils::normalize_url(params.text_document_position.text_document.uri);
let line_index =
- self
- .get_line_index(specifier.clone())
- .await
- .map_err(|err| {
- error!("Failed to get line_index {:#?}", err);
- LspError::internal_error()
- })?;
+ if let Some(line_index) = self.get_line_index_sync(&specifier) {
+ line_index
+ } else {
+ return Err(LspError::invalid_params(format!(
+ "An unexpected specifier ({}) was provided.",
+ specifier
+ )));
+ };
let req = tsc::RequestMethod::FindRenameLocations((
specifier,
- text::to_char_pos(&line_index, params.text_document_position.position),
+ line_index.offset_tsc(params.text_document_position.position)?,
true,
true,
false,
));
- let res = self
- .ts_server
- .request(snapshot.clone(), req)
- .await
- .map_err(|err| {
- error!("Failed to request to tsserver {:#?}", err);
- LspError::invalid_request()
- })?;
+ let res =
+ self
+ .ts_server
+ .request(self.snapshot(), req)
+ .await
+ .map_err(|err| {
+ error!("Failed to request to tsserver {:#?}", err);
+ LspError::invalid_request()
+ })?;
let maybe_locations = serde_json::from_value::<
Option<Vec<tsc::RenameLocation>>,
@@ -1007,26 +1032,22 @@ impl lspower::LanguageServer for LanguageServer {
LspError::internal_error()
})?;
- match maybe_locations {
- Some(locations) => {
- let rename_locations = tsc::RenameLocations { locations };
- let workpace_edits = rename_locations
- .into_workspace_edit(
- snapshot,
- |s| self.get_line_index(s),
- &params.new_name,
- )
- .await
- .map_err(|err| {
- error!(
- "Failed to convert tsc::RenameLocations to WorkspaceEdit {:#?}",
- err
- );
- LspError::internal_error()
- })?;
- Ok(Some(workpace_edits))
- }
- None => Ok(None),
+ if let Some(locations) = maybe_locations {
+ let rename_locations = tsc::RenameLocations { locations };
+ let workspace_edits = rename_locations
+ .into_workspace_edit(
+ &params.new_name,
+ |s| self.get_line_index(s),
+ |s| self.documents.lock().unwrap().version(&s),
+ )
+ .await
+ .map_err(|err| {
+ error!("Failed to get workspace edits: {:#?}", err);
+ LspError::internal_error()
+ })?;
+ Ok(Some(workspace_edits))
+ } else {
+ Ok(None)
}
}
@@ -1090,12 +1111,8 @@ impl LanguageServer {
error!("{}", err);
LspError::internal_error()
})?;
- {
- let file_cache = self.file_cache.lock().unwrap();
- if let Some(file_id) = file_cache.lookup(&specifier) {
- let mut diagnostics_collection = self.diagnostics.lock().unwrap();
- diagnostics_collection.invalidate(&file_id);
- }
+ if self.documents.lock().unwrap().contains(&specifier) {
+ self.diagnostics.lock().unwrap().invalidate(&specifier);
}
self.prepare_diagnostics().await.map_err(|err| {
error!("{}", err);
@@ -1111,28 +1128,38 @@ impl LanguageServer {
let specifier = utils::normalize_url(params.text_document.uri);
let url = specifier.as_url();
let contents = if url.as_str() == "deno:/status.md" {
- let file_cache = self.file_cache.lock().unwrap();
+ let documents = self.documents.lock().unwrap();
Some(format!(
r#"# Deno Language Server Status
- Documents in memory: {}
"#,
- file_cache.len()
+ documents.len()
))
} else {
match url.scheme() {
"asset" => {
- let state_snapshot = self.snapshot();
- if let Some(text) =
- tsc::get_asset(&specifier, &self.ts_server, &state_snapshot)
- .await
- .map_err(|_| LspError::internal_error())?
- {
- Some(text)
+ let maybe_asset =
+ { self.assets.lock().unwrap().get(&specifier).cloned() };
+ if let Some(maybe_asset) = maybe_asset {
+ if let Some(asset) = maybe_asset {
+ Some(asset.text)
+ } else {
+ None
+ }
} else {
- error!("Missing asset: {}", specifier);
- None
+ let state_snapshot = self.snapshot();
+ if let Some(asset) =
+ tsc::get_asset(&specifier, &self.ts_server, &state_snapshot)
+ .await
+ .map_err(|_| LspError::internal_error())?
+ {
+ Some(asset.text)
+ } else {
+ error!("Missing asset: {}", specifier);
+ None
+ }
}
}
_ => {
@@ -1150,59 +1177,6 @@ impl LanguageServer {
}
}
-#[derive(Debug, Clone)]
-pub struct DocumentData {
- pub dependencies: Option<HashMap<String, analysis::Dependency>>,
- pub version: Option<i32>,
- specifier: ModuleSpecifier,
-}
-
-impl DocumentData {
- pub fn new(
- specifier: ModuleSpecifier,
- version: i32,
- source: &str,
- maybe_import_map: Option<ImportMap>,
- ) -> Self {
- let dependencies = if let Some((dependencies, _)) =
- analysis::analyze_dependencies(
- &specifier,
- source,
- &MediaType::from(&specifier),
- &maybe_import_map,
- ) {
- Some(dependencies)
- } else {
- None
- };
- Self {
- dependencies,
- version: Some(version),
- specifier,
- }
- }
-
- pub fn update(
- &mut self,
- version: i32,
- source: &str,
- maybe_import_map: &Option<ImportMap>,
- ) {
- self.dependencies = if let Some((dependencies, _)) =
- analysis::analyze_dependencies(
- &self.specifier,
- source,
- &MediaType::from(&self.specifier),
- maybe_import_map,
- ) {
- Some(dependencies)
- } else {
- None
- };
- self.version = Some(version)
- }
-}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -1211,6 +1185,7 @@ mod tests {
use lspower::LspService;
use std::fs;
use std::task::Poll;
+ use std::time::Instant;
use tower_test::mock::Spawn;
enum LspResponse {
@@ -1410,6 +1385,69 @@ mod tests {
]);
harness.run().await;
}
+
+ #[tokio::test]
+ async fn test_hover_change_mbc() {
+ let mut harness = LspTestHarness::new(vec![
+ ("initialize_request.json", LspResponse::RequestAny),
+ ("initialized_notification.json", LspResponse::None),
+ ("did_open_notification_mbc.json", LspResponse::None),
+ ("did_change_notification_mbc.json", LspResponse::None),
+ (
+ "hover_request_mbc.json",
+ LspResponse::Request(
+ 2,
+ json!({
+ "contents": [
+ {
+ "language": "typescript",
+ "value": "const b: \"πŸ˜ƒ\"",
+ },
+ "",
+ ],
+ "range": {
+ "start": {
+ "line": 2,
+ "character": 13,
+ },
+ "end": {
+ "line": 2,
+ "character": 14,
+ },
+ }
+ }),
+ ),
+ ),
+ (
+ "shutdown_request.json",
+ LspResponse::Request(3, json!(null)),
+ ),
+ ("exit_notification.json", LspResponse::None),
+ ]);
+ harness.run().await;
+ }
+
+ #[tokio::test]
+ async fn test_large_doc_change() {
+ let mut harness = LspTestHarness::new(vec![
+ ("initialize_request.json", LspResponse::RequestAny),
+ ("initialized_notification.json", LspResponse::None),
+ ("did_open_notification_large.json", LspResponse::None),
+ ("did_change_notification_large.json", LspResponse::None),
+ (
+ "shutdown_request.json",
+ LspResponse::Request(3, json!(null)),
+ ),
+ ("exit_notification.json", LspResponse::None),
+ ]);
+ let time = Instant::now();
+ harness.run().await;
+ assert!(
+ time.elapsed().as_millis() <= 10000,
+ "the execution time exceeded 10000ms"
+ );
+ }
+
#[tokio::test]
async fn test_rename() {
let mut harness = LspTestHarness::new(vec![
diff --git a/cli/lsp/memory_cache.rs b/cli/lsp/memory_cache.rs
deleted file mode 100644
index 9de6e8615..000000000
--- a/cli/lsp/memory_cache.rs
+++ /dev/null
@@ -1,121 +0,0 @@
-// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
-
-use deno_core::error::AnyError;
-use deno_core::ModuleSpecifier;
-use std::collections::HashMap;
-use std::fmt;
-
-#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
-pub struct FileId(pub u32);
-
-#[derive(Eq, PartialEq, Copy, Clone, Debug)]
-pub enum ChangeKind {
- Create,
- Modify,
- Delete,
-}
-
-pub struct ChangedFile {
- pub change_kind: ChangeKind,
- pub file_id: FileId,
-}
-
-#[derive(Default)]
-struct SpecifierInterner {
- map: HashMap<ModuleSpecifier, FileId>,
- vec: Vec<ModuleSpecifier>,
-}
-
-impl SpecifierInterner {
- pub fn get(&self, specifier: &ModuleSpecifier) -> Option<FileId> {
- self.map.get(specifier).copied()
- }
-
- pub fn intern(&mut self, specifier: ModuleSpecifier) -> FileId {
- if let Some(id) = self.get(&specifier) {
- return id;
- }
- let id = FileId(self.vec.len() as u32);
- self.map.insert(specifier.clone(), id);
- self.vec.push(specifier);
- id
- }
-
- pub fn lookup(&self, id: FileId) -> &ModuleSpecifier {
- &self.vec[id.0 as usize]
- }
-}
-
-#[derive(Default)]
-pub struct MemoryCache {
- data: Vec<Option<Vec<u8>>>,
- interner: SpecifierInterner,
- changes: Vec<ChangedFile>,
-}
-
-impl MemoryCache {
- fn alloc_file_id(&mut self, specifier: ModuleSpecifier) -> FileId {
- let file_id = self.interner.intern(specifier);
- let idx = file_id.0 as usize;
- let len = self.data.len().max(idx + 1);
- self.data.resize_with(len, || None);
- file_id
- }
-
- fn get(&self, file_id: FileId) -> &Option<Vec<u8>> {
- &self.data[file_id.0 as usize]
- }
-
- pub fn get_contents(&self, file_id: FileId) -> Result<String, AnyError> {
- String::from_utf8(self.get(file_id).as_deref().unwrap().to_vec())
- .map_err(|err| err.into())
- }
-
- fn get_mut(&mut self, file_id: FileId) -> &mut Option<Vec<u8>> {
- &mut self.data[file_id.0 as usize]
- }
-
- pub fn get_specifier(&self, file_id: FileId) -> &ModuleSpecifier {
- self.interner.lookup(file_id)
- }
-
- pub fn len(&self) -> usize {
- self.data.len()
- }
-
- pub fn lookup(&self, specifier: &ModuleSpecifier) -> Option<FileId> {
- self
- .interner
- .get(specifier)
- .filter(|&it| self.get(it).is_some())
- }
-
- pub fn set_contents(
- &mut self,
- specifier: ModuleSpecifier,
- contents: Option<Vec<u8>>,
- ) {
- let file_id = self.alloc_file_id(specifier);
- let change_kind = match (self.get(file_id), &contents) {
- (None, None) => return,
- (None, Some(_)) => ChangeKind::Create,
- (Some(_), None) => ChangeKind::Delete,
- (Some(old), Some(new)) if old == new => return,
- (Some(_), Some(_)) => ChangeKind::Modify,
- };
-
- *self.get_mut(file_id) = contents;
- self.changes.push(ChangedFile {
- file_id,
- change_kind,
- })
- }
-}
-
-impl fmt::Debug for MemoryCache {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("MemoryCache")
- .field("no_files", &self.data.len())
- .finish()
- }
-}
diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs
index aaee17c64..91880fc85 100644
--- a/cli/lsp/mod.rs
+++ b/cli/lsp/mod.rs
@@ -7,8 +7,8 @@ mod analysis;
mod capabilities;
mod config;
mod diagnostics;
+mod documents;
mod language_server;
-mod memory_cache;
mod sources;
mod text;
mod tsc;
diff --git a/cli/lsp/sources.rs b/cli/lsp/sources.rs
index 845f89af0..fac1120fb 100644
--- a/cli/lsp/sources.rs
+++ b/cli/lsp/sources.rs
@@ -1,7 +1,7 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use super::analysis;
-use super::text;
+use super::text::LineIndex;
use crate::file_fetcher::get_source_from_bytes;
use crate::file_fetcher::map_content_type;
@@ -43,6 +43,7 @@ pub async fn cache(
#[derive(Debug, Clone, Default)]
struct Metadata {
dependencies: Option<HashMap<String, analysis::Dependency>>,
+ line_index: LineIndex,
maybe_types: Option<analysis::ResolvedDependency>,
media_type: MediaType,
source: String,
@@ -75,19 +76,26 @@ impl Sources {
false
}
- pub fn get_length(&mut self, specifier: &ModuleSpecifier) -> Option<usize> {
+ /// Provides the length of the source content, calculated in a way that should
+ /// match the behavior of JavaScript, where strings are stored effectively as
+ /// `&[u16]` and when counting "chars" we need to represent the string as a
+ /// UTF-16 string in Rust.
+ pub fn get_length_utf16(
+ &mut self,
+ specifier: &ModuleSpecifier,
+ ) -> Option<usize> {
let specifier = self.resolve_specifier(specifier)?;
let metadata = self.get_metadata(&specifier)?;
- Some(metadata.source.chars().count())
+ Some(metadata.source.encode_utf16().count())
}
pub fn get_line_index(
&mut self,
specifier: &ModuleSpecifier,
- ) -> Option<Vec<u32>> {
+ ) -> Option<LineIndex> {
let specifier = self.resolve_specifier(specifier)?;
let metadata = self.get_metadata(&specifier)?;
- Some(text::index_lines(&metadata.source))
+ Some(metadata.line_index)
}
pub fn get_media_type(
@@ -127,8 +135,10 @@ impl Sources {
} else {
None
};
+ let line_index = LineIndex::new(&source);
let metadata = Metadata {
dependencies,
+ line_index,
maybe_types,
media_type,
source,
@@ -169,8 +179,10 @@ impl Sources {
} else {
None
};
+ let line_index = LineIndex::new(&source);
let metadata = Metadata {
dependencies,
+ line_index,
maybe_types,
media_type,
source,
@@ -388,7 +400,7 @@ mod tests {
}
#[test]
- fn test_sources_get_length() {
+ fn test_sources_get_length_utf16() {
let (mut sources, _) = setup();
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
let tests = c.join("tests");
@@ -396,7 +408,7 @@ mod tests {
&tests.join("001_hello.js").to_string_lossy(),
)
.unwrap();
- let actual = sources.get_length(&specifier);
+ let actual = sources.get_length_utf16(&specifier);
assert!(actual.is_some());
let actual = actual.unwrap();
assert_eq!(actual, 28);
diff --git a/cli/lsp/text.rs b/cli/lsp/text.rs
index e871cb265..1d350c12f 100644
--- a/cli/lsp/text.rs
+++ b/cli/lsp/text.rs
@@ -1,123 +1,233 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+use deno_core::error::custom_error;
+use deno_core::error::AnyError;
use deno_core::serde_json::json;
use deno_core::serde_json::Value;
use dissimilar::diff;
use dissimilar::Chunk;
+use lspower::jsonrpc;
use lspower::lsp_types;
use lspower::lsp_types::TextEdit;
+use std::collections::HashMap;
use std::ops::Bound;
-use std::ops::Range;
use std::ops::RangeBounds;
+use text_size::TextRange;
+use text_size::TextSize;
-// TODO(@kitson) in general all of these text handling routines don't handle
-// JavaScript encoding in the same way and likely cause issues when trying to
-// arbitrate between chars and Unicode graphemes. There be dragons.
-
-/// Generate a character position for the start of each line. For example:
-///
-/// ```rust
-/// let actual = index_lines("a\nb\n");
-/// assert_eq!(actual, vec![0, 2, 4]);
-/// ```
-///
-pub fn index_lines(text: &str) -> Vec<u32> {
- let mut indexes = vec![0_u32];
- for (i, c) in text.chars().enumerate() {
- if c == '\n' {
- indexes.push((i + 1) as u32);
+fn partition_point<T, P>(slice: &[T], mut predicate: P) -> usize
+where
+ P: FnMut(&T) -> bool,
+{
+ let mut left = 0;
+ let mut right = slice.len();
+
+ while left != right {
+ let mid = left + (right - left) / 2;
+ // SAFETY:
+ // When left < right, left <= mid < right.
+ // Therefore left always increases and right always decreases,
+ // and either of them is selected.
+ // In both cases left <= right is satisfied.
+ // Therefore if left < right in a step,
+ // left <= right is satisfied in the next step.
+ // Therefore as long as left != right, 0 <= left < right <= len is satisfied
+ // and if this case 0 <= mid < len is satisfied too.
+ let value = unsafe { slice.get_unchecked(mid) };
+ if predicate(value) {
+ left = mid + 1;
+ } else {
+ right = mid;
}
}
- indexes
+
+ left
}
-enum IndexValid {
- All,
- UpTo(u32),
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct Utf16Char {
+ pub start: TextSize,
+ pub end: TextSize,
}
-impl IndexValid {
- fn covers(&self, line: u32) -> bool {
- match *self {
- IndexValid::UpTo(to) => to > line,
- IndexValid::All => true,
+impl Utf16Char {
+ fn len(&self) -> TextSize {
+ self.end - self.start
+ }
+
+ fn len_utf16(&self) -> usize {
+ if self.len() == TextSize::from(4) {
+ 2
+ } else {
+ 1
}
}
}
-fn to_range(line_index: &[u32], range: lsp_types::Range) -> Range<usize> {
- let start =
- (line_index[range.start.line as usize] + range.start.character) as usize;
- let end =
- (line_index[range.end.line as usize] + range.end.character) as usize;
- Range { start, end }
+#[derive(Debug, Clone, Default, Eq, PartialEq)]
+pub struct LineIndex {
+ utf8_offsets: Vec<TextSize>,
+ utf16_lines: HashMap<u32, Vec<Utf16Char>>,
+ utf16_offsets: Vec<TextSize>,
}
-pub fn to_position(line_index: &[u32], char_pos: u32) -> lsp_types::Position {
- let mut line = 0_usize;
- let mut line_start = 0_u32;
- for (pos, v) in line_index.iter().enumerate() {
- if char_pos < *v {
- break;
+impl LineIndex {
+ pub fn new(text: &str) -> LineIndex {
+ let mut utf16_lines = HashMap::new();
+ let mut utf16_chars = Vec::new();
+
+ let mut utf8_offsets = vec![0.into()];
+ let mut utf16_offsets = vec![0.into()];
+ let mut curr_row = 0.into();
+ let mut curr_col = 0.into();
+ let mut curr_offset_u16 = 0.into();
+ let mut line = 0;
+ for c in text.chars() {
+ let c_len = TextSize::of(c);
+ curr_row += c_len;
+ curr_offset_u16 += TextSize::from(c.len_utf16() as u32);
+ if c == '\n' {
+ utf8_offsets.push(curr_row);
+ utf16_offsets.push(curr_offset_u16);
+
+ if !utf16_chars.is_empty() {
+ utf16_lines.insert(line, utf16_chars);
+ utf16_chars = Vec::new();
+ }
+
+ curr_col = 0.into();
+ line += 1;
+ continue;
+ }
+
+ if !c.is_ascii() {
+ utf16_chars.push(Utf16Char {
+ start: curr_col,
+ end: curr_col + c_len,
+ });
+ }
+ curr_col += c_len;
+ }
+
+ if !utf16_chars.is_empty() {
+ utf16_lines.insert(line, utf16_chars);
+ }
+
+ LineIndex {
+ utf8_offsets,
+ utf16_lines,
+ utf16_offsets,
}
- line_start = *v;
- line = pos;
}
- lsp_types::Position {
- line: line as u32,
- character: char_pos - line_start,
+ /// Convert a u16 based range to a u8 TextRange.
+ pub fn get_text_range(
+ &self,
+ range: lsp_types::Range,
+ ) -> Result<TextRange, AnyError> {
+ let start = self.offset(range.start)?;
+ let end = self.offset(range.end)?;
+ Ok(TextRange::new(start, end))
+ }
+
+ /// Return a u8 offset based on a u16 position.
+ pub fn offset(
+ &self,
+ position: lsp_types::Position,
+ ) -> Result<TextSize, AnyError> {
+ let col = self.utf16_to_utf8_col(position.line, position.character);
+ if let Some(line_offset) = self.utf8_offsets.get(position.line as usize) {
+ Ok(line_offset + col)
+ } else {
+ Err(custom_error("OutOfRange", "The position is out of range."))
+ }
}
-}
-pub fn to_char_pos(line_index: &[u32], position: lsp_types::Position) -> u32 {
- if let Some(line_start) = line_index.get(position.line as usize) {
- line_start + position.character
- } else {
- 0_u32
+ /// Convert an lsp Position into a tsc/TypeScript "position", which is really
+ /// an u16 byte offset from the start of the string represented as an u32.
+ pub fn offset_tsc(
+ &self,
+ position: lsp_types::Position,
+ ) -> jsonrpc::Result<u32> {
+ self
+ .offset_utf16(position)
+ .map(|ts| ts.into())
+ .map_err(|err| jsonrpc::Error::invalid_params(err.to_string()))
}
-}
-/// Apply a vector of document changes to the supplied string.
-pub fn apply_content_changes(
- content: &mut String,
- content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
-) {
- let mut line_index = index_lines(&content);
- let mut index_valid = IndexValid::All;
- for change in content_changes {
- if let Some(range) = change.range {
- if !index_valid.covers(range.start.line) {
- line_index = index_lines(&content);
- }
- let range = to_range(&line_index, range);
- content.replace_range(range, &change.text);
+ fn offset_utf16(
+ &self,
+ position: lsp_types::Position,
+ ) -> Result<TextSize, AnyError> {
+ if let Some(line_offset) = self.utf16_offsets.get(position.line as usize) {
+ Ok(line_offset + TextSize::from(position.character))
} else {
- *content = change.text;
- index_valid = IndexValid::UpTo(0);
+ Err(custom_error("OutOfRange", "The position is out of range."))
}
}
+
+ /// Returns a u16 position based on a u16 offset, which TypeScript offsets are
+ /// returned as u16.
+ pub fn position_tsc(&self, offset: TextSize) -> lsp_types::Position {
+ let line = partition_point(&self.utf16_offsets, |&it| it <= offset) - 1;
+ let line_start_offset = self.utf16_offsets[line];
+ let col = offset - line_start_offset;
+
+ lsp_types::Position {
+ line: line as u32,
+ character: col.into(),
+ }
+ }
+
+ /// Returns a u16 position based on a u8 offset.
+ pub fn position_utf16(&self, offset: TextSize) -> lsp_types::Position {
+ let line = partition_point(&self.utf8_offsets, |&it| it <= offset) - 1;
+ let line_start_offset = self.utf8_offsets[line];
+ let col = offset - line_start_offset;
+
+ lsp_types::Position {
+ line: line as u32,
+ character: col.into(),
+ }
+ }
+
+ fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ for c in utf16_chars {
+ if col > u32::from(c.start) {
+ col += u32::from(c.len()) - c.len_utf16() as u32;
+ } else {
+ break;
+ }
+ }
+ }
+
+ col.into()
+ }
}
/// Compare two strings and return a vector of text edit records which are
/// supported by the Language Server Protocol.
pub fn get_edits(a: &str, b: &str) -> Vec<TextEdit> {
+ if a == b {
+ return vec![];
+ }
let chunks = diff(a, b);
let mut text_edits = Vec::<TextEdit>::new();
- let line_index = index_lines(a);
+ let line_index = LineIndex::new(a);
let mut iter = chunks.iter().peekable();
- let mut a_pos = 0_u32;
+ let mut a_pos = TextSize::from(0);
loop {
let chunk = iter.next();
match chunk {
None => break,
Some(Chunk::Equal(e)) => {
- a_pos += e.chars().count() as u32;
+ a_pos += TextSize::from(e.encode_utf16().count() as u32);
}
Some(Chunk::Delete(d)) => {
- let start = to_position(&line_index, a_pos);
- a_pos += d.chars().count() as u32;
- let end = to_position(&line_index, a_pos);
+ let start = line_index.position_utf16(a_pos);
+ a_pos += TextSize::from(d.encode_utf16().count() as u32);
+ let end = line_index.position_utf16(a_pos);
let range = lsp_types::Range { start, end };
match iter.peek() {
Some(Chunk::Insert(i)) => {
@@ -134,7 +244,7 @@ pub fn get_edits(a: &str, b: &str) -> Vec<TextEdit> {
}
}
Some(Chunk::Insert(i)) => {
- let pos = to_position(&line_index, a_pos);
+ let pos = line_index.position_utf16(a_pos);
let range = lsp_types::Range {
start: pos,
end: pos,
@@ -153,6 +263,9 @@ pub fn get_edits(a: &str, b: &str) -> Vec<TextEdit> {
/// Convert a difference between two strings into a change range used by the
/// TypeScript Language Service.
pub fn get_range_change(a: &str, b: &str) -> Value {
+ if a == b {
+ return json!(null);
+ }
let chunks = diff(a, b);
let mut iter = chunks.iter().peekable();
let mut started = false;
@@ -162,12 +275,12 @@ pub fn get_range_change(a: &str, b: &str) -> Value {
let mut equal = 0;
let mut a_pos = 0;
loop {
- let chunk = iter.next();
- match chunk {
+ let diff = iter.next();
+ match diff {
None => break,
Some(Chunk::Equal(e)) => {
- a_pos += e.chars().count();
- equal += e.chars().count();
+ a_pos += e.encode_utf16().count();
+ equal += e.encode_utf16().count();
}
Some(Chunk::Delete(d)) => {
if !started {
@@ -175,7 +288,7 @@ pub fn get_range_change(a: &str, b: &str) -> Value {
started = true;
equal = 0;
}
- a_pos += d.chars().count();
+ a_pos += d.encode_utf16().count();
if started {
end = a_pos;
new_length += equal;
@@ -191,7 +304,7 @@ pub fn get_range_change(a: &str, b: &str) -> Value {
} else {
end += equal;
}
- new_length += i.chars().count() + equal;
+ new_length += i.encode_utf16().count() + equal;
equal = 0;
}
}
@@ -215,7 +328,7 @@ pub fn slice(s: &str, range: impl RangeBounds<usize>) -> &str {
let len = match range.end_bound() {
Bound::Included(bound) => *bound + 1,
Bound::Excluded(bound) => *bound,
- Bound::Unbounded => s.len(),
+ Bound::Unbounded => s.encode_utf16().count(),
} - start;
substring(s, start, start + len)
}
@@ -231,7 +344,7 @@ pub fn substring(s: &str, start: usize, end: usize) -> &str {
break;
}
if let Some(c) = it.next() {
- char_pos += 1;
+ char_pos += c.len_utf16();
byte_start += c.len_utf8();
} else {
break;
@@ -244,7 +357,7 @@ pub fn substring(s: &str, start: usize, end: usize) -> &str {
break;
}
if let Some(c) = it.next() {
- char_pos += 1;
+ char_pos += c.len_utf16();
byte_end += c.len_utf8();
} else {
break;
@@ -258,24 +371,194 @@ mod tests {
use super::*;
#[test]
- fn test_apply_content_changes() {
- let mut content = "a\nb\nc\nd".to_string();
- let content_changes = vec![lsp_types::TextDocumentContentChangeEvent {
- range: Some(lsp_types::Range {
- start: lsp_types::Position {
- line: 1,
- character: 0,
- },
- end: lsp_types::Position {
- line: 1,
- character: 1,
- },
- }),
- range_length: Some(1),
- text: "e".to_string(),
- }];
- apply_content_changes(&mut content, content_changes);
- assert_eq!(content, "a\ne\nc\nd");
+ fn test_line_index() {
+ let text = "hello\nworld";
+ let index = LineIndex::new(text);
+ assert_eq!(
+ index.position_utf16(0.into()),
+ lsp_types::Position {
+ line: 0,
+ character: 0
+ }
+ );
+ assert_eq!(
+ index.position_utf16(1.into()),
+ lsp_types::Position {
+ line: 0,
+ character: 1
+ }
+ );
+ assert_eq!(
+ index.position_utf16(5.into()),
+ lsp_types::Position {
+ line: 0,
+ character: 5
+ }
+ );
+ assert_eq!(
+ index.position_utf16(6.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 0
+ }
+ );
+ assert_eq!(
+ index.position_utf16(7.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 1
+ }
+ );
+ assert_eq!(
+ index.position_utf16(8.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 2
+ }
+ );
+ assert_eq!(
+ index.position_utf16(10.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 4
+ }
+ );
+ assert_eq!(
+ index.position_utf16(11.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 5
+ }
+ );
+ assert_eq!(
+ index.position_utf16(12.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 6
+ }
+ );
+
+ let text = "\nhello\nworld";
+ let index = LineIndex::new(text);
+ assert_eq!(
+ index.position_utf16(0.into()),
+ lsp_types::Position {
+ line: 0,
+ character: 0
+ }
+ );
+ assert_eq!(
+ index.position_utf16(1.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 0
+ }
+ );
+ assert_eq!(
+ index.position_utf16(2.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 1
+ }
+ );
+ assert_eq!(
+ index.position_utf16(6.into()),
+ lsp_types::Position {
+ line: 1,
+ character: 5
+ }
+ );
+ assert_eq!(
+ index.position_utf16(7.into()),
+ lsp_types::Position {
+ line: 2,
+ character: 0
+ }
+ );
+ }
+
+ #[test]
+ fn test_char_len() {
+ assert_eq!('パ'.len_utf8(), 3);
+ assert_eq!('パ'.len_utf16(), 1);
+ assert_eq!('ηΌ–'.len_utf8(), 3);
+ assert_eq!('ηΌ–'.len_utf16(), 1);
+ assert_eq!('πŸ¦•'.len_utf8(), 4);
+ assert_eq!('πŸ¦•'.len_utf16(), 2);
+ }
+
+ #[test]
+ fn test_empty_index() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'x';
+",
+ );
+ assert_eq!(col_index.utf16_lines.len(), 0);
+ }
+
+ #[test]
+ fn test_single_char() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'パ';
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 1);
+ assert_eq!(
+ col_index.utf16_lines[&1][0],
+ Utf16Char {
+ start: 17.into(),
+ end: 20.into()
+ }
+ );
+
+ // UTF-16 to UTF-8, no changes
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
+
+ let col_index = LineIndex::new("a𐐏b");
+ assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
+ }
+
+ #[test]
+ fn test_string() {
+ let col_index = LineIndex::new(
+ "
+const C: char = \"パ パ\";
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 2);
+ assert_eq!(
+ col_index.utf16_lines[&1][0],
+ Utf16Char {
+ start: 17.into(),
+ end: 20.into()
+ }
+ );
+ assert_eq!(
+ col_index.utf16_lines[&1][1],
+ Utf16Char {
+ start: 21.into(),
+ end: 24.into()
+ }
+ );
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
+
+ // パ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
+ assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first パ at 17..20
+ assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second パ at 21..24
+
+ assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
}
#[test]
@@ -319,6 +602,11 @@ mod tests {
#[test]
fn test_get_range_change() {
let a = "abcdefg";
+ let b = "abcdefg";
+ let actual = get_range_change(a, b);
+ assert_eq!(actual, json!(null));
+
+ let a = "abcdefg";
let b = "abedcfg";
let actual = get_range_change(a, b);
assert_eq!(
@@ -401,108 +689,56 @@ mod tests {
"newLength": 3
})
);
- }
- #[test]
- fn test_index_lines() {
- let actual = index_lines("a\nb\r\nc");
- assert_eq!(actual, vec![0, 2, 5]);
- }
-
- #[test]
- fn test_to_position() {
- let line_index = index_lines("a\nb\r\nc\n");
- assert_eq!(
- to_position(&line_index, 6),
- lsp_types::Position {
- line: 2,
- character: 1,
- }
- );
- assert_eq!(
- to_position(&line_index, 0),
- lsp_types::Position {
- line: 0,
- character: 0,
- }
- );
+ let a = "hello πŸ¦•!";
+ let b = "hello deno!";
+ let actual = get_range_change(a, b);
assert_eq!(
- to_position(&line_index, 3),
- lsp_types::Position {
- line: 1,
- character: 1,
- }
+ actual,
+ json!({
+ "span": {
+ "start": 6,
+ "length": 2,
+ },
+ "newLength": 4
+ })
);
- }
- #[test]
- fn test_to_position_mbc() {
- let line_index = index_lines("yΜ†\nπŸ˜±πŸ¦•\n🀯\n");
- assert_eq!(
- to_position(&line_index, 0),
- lsp_types::Position {
- line: 0,
- character: 0,
- }
- );
- assert_eq!(
- to_position(&line_index, 2),
- lsp_types::Position {
- line: 0,
- character: 2,
- }
- );
- assert_eq!(
- to_position(&line_index, 3),
- lsp_types::Position {
- line: 1,
- character: 0,
- }
- );
- assert_eq!(
- to_position(&line_index, 4),
- lsp_types::Position {
- line: 1,
- character: 1,
- }
- );
- assert_eq!(
- to_position(&line_index, 5),
- lsp_types::Position {
- line: 1,
- character: 2,
- }
- );
- assert_eq!(
- to_position(&line_index, 6),
- lsp_types::Position {
- line: 2,
- character: 0,
- }
- );
- assert_eq!(
- to_position(&line_index, 7),
- lsp_types::Position {
- line: 2,
- character: 1,
- }
- );
+ let a = "hello deno!";
+ let b = "hello denoπŸ¦•!";
+ let actual = get_range_change(a, b);
assert_eq!(
- to_position(&line_index, 8),
- lsp_types::Position {
- line: 3,
- character: 0,
- }
+ actual,
+ json!({
+ "span": {
+ "start": 10,
+ "length": 0,
+ },
+ "newLength": 2
+ })
);
+
+ // TODO(@kitsonk): https://github.com/dtolnay/dissimilar/issues/5
+ // let a = r#" πŸ¦•πŸ‡ΊπŸ‡ΈπŸ‘ "#;
+ // let b = r#" πŸ‡ΊπŸ‡ΈπŸ‘ "#;
+ // let actual = get_range_change(a, b);
+ // assert_eq!(
+ // actual,
+ // json!({
+ // "span": {
+ // "start": 1,
+ // "length": 2,
+ // },
+ // "newLength": 0
+ // })
+ // );
}
#[test]
fn test_substring() {
assert_eq!(substring("Deno", 1, 3), "en");
assert_eq!(substring("y̆y̆", 2, 4), "y̆");
- // this doesn't work like JavaScript, as πŸ¦• is treated as a single char in
- // Rust, but as two chars in JavaScript.
- // assert_eq!(substring("πŸ¦•πŸ¦•", 2, 4), "πŸ¦•");
+ assert_eq!(substring("πŸ¦•πŸ¦•", 2, 4), "πŸ¦•");
}
#[test]
@@ -511,5 +747,6 @@ mod tests {
assert_eq!(slice("Deno", 1..=3), "eno");
assert_eq!(slice("Deno Land", 1..), "eno Land");
assert_eq!(slice("Deno", ..3), "Den");
+ assert_eq!(slice("Hello πŸ¦•", 6..8), "πŸ¦•");
}
}
diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs
index a09ac9588..575476e40 100644
--- a/cli/lsp/tsc.rs
+++ b/cli/lsp/tsc.rs
@@ -3,6 +3,7 @@
use super::analysis::ResolvedDependency;
use super::language_server::StateSnapshot;
use super::text;
+use super::text::LineIndex;
use super::utils;
use crate::media_type::MediaType;
@@ -32,6 +33,7 @@ use regex::Regex;
use std::borrow::Cow;
use std::collections::HashMap;
use std::thread;
+use text_size::TextSize;
use tokio::sync::mpsc;
use tokio::sync::oneshot;
@@ -80,6 +82,14 @@ impl TsServer {
}
}
+/// An lsp representation of an asset in memory, that has either been retrieved
+/// from static assets built into Rust, or static assets built into tsc.
+#[derive(Debug, Clone)]
+pub struct AssetDocument {
+ pub text: String,
+ pub line_index: LineIndex,
+}
+
/// Optionally returns an internal asset, first checking for any static assets
/// in Rust, then checking any previously retrieved static assets from the
/// isolate, and then finally, the tsc isolate itself.
@@ -87,28 +97,41 @@ pub async fn get_asset(
specifier: &ModuleSpecifier,
ts_server: &TsServer,
state_snapshot: &StateSnapshot,
-) -> Result<Option<String>, AnyError> {
+) -> Result<Option<AssetDocument>, AnyError> {
let specifier_str = specifier.to_string().replace("asset:///", "");
- if let Some(asset_text) = tsc::get_asset(&specifier_str) {
- Ok(Some(asset_text.to_string()))
+ if let Some(text) = tsc::get_asset(&specifier_str) {
+ let maybe_asset = Some(AssetDocument {
+ line_index: LineIndex::new(text),
+ text: text.to_string(),
+ });
+ state_snapshot
+ .assets
+ .lock()
+ .unwrap()
+ .insert(specifier.clone(), maybe_asset.clone());
+ Ok(maybe_asset)
} else {
- {
- let assets = state_snapshot.assets.lock().unwrap();
- if let Some(asset) = assets.get(specifier) {
- return Ok(asset.clone());
- }
- }
- let asset: Option<String> = serde_json::from_value(
- ts_server
- .request(
- state_snapshot.clone(),
- RequestMethod::GetAsset(specifier.clone()),
- )
- .await?,
- )?;
- let mut assets = state_snapshot.assets.lock().unwrap();
- assets.insert(specifier.clone(), asset.clone());
- Ok(asset)
+ let res = ts_server
+ .request(
+ state_snapshot.clone(),
+ RequestMethod::GetAsset(specifier.clone()),
+ )
+ .await?;
+ let maybe_text: Option<String> = serde_json::from_value(res)?;
+ let maybe_asset = if let Some(text) = maybe_text {
+ Some(AssetDocument {
+ line_index: LineIndex::new(&text),
+ text,
+ })
+ } else {
+ None
+ };
+ state_snapshot
+ .assets
+ .lock()
+ .unwrap()
+ .insert(specifier.clone(), maybe_asset.clone());
+ Ok(maybe_asset)
}
}
@@ -342,10 +365,10 @@ pub struct TextSpan {
}
impl TextSpan {
- pub fn to_range(&self, line_index: &[u32]) -> lsp_types::Range {
+ pub fn to_range(&self, line_index: &LineIndex) -> lsp_types::Range {
lsp_types::Range {
- start: text::to_position(line_index, self.start),
- end: text::to_position(line_index, self.start + self.length),
+ start: line_index.position_tsc(self.start.into()),
+ end: line_index.position_tsc(TextSize::from(self.start + self.length)),
}
}
}
@@ -376,7 +399,7 @@ pub struct QuickInfo {
}
impl QuickInfo {
- pub fn to_hover(&self, line_index: &[u32]) -> lsp_types::Hover {
+ pub fn to_hover(&self, line_index: &LineIndex) -> lsp_types::Hover {
let mut contents = Vec::<lsp_types::MarkedString>::new();
if let Some(display_string) =
display_parts_to_string(self.display_parts.clone())
@@ -425,12 +448,12 @@ pub struct DocumentSpan {
impl DocumentSpan {
pub async fn to_link<F, Fut>(
&self,
- line_index: &[u32],
+ line_index: &LineIndex,
index_provider: F,
) -> Option<lsp_types::LocationLink>
where
F: Fn(ModuleSpecifier) -> Fut,
- Fut: Future<Output = Result<Vec<u32>, AnyError>>,
+ Fut: Future<Output = Result<LineIndex, AnyError>>,
{
let target_specifier =
ModuleSpecifier::resolve_url(&self.file_name).unwrap();
@@ -486,15 +509,16 @@ pub struct RenameLocations {
}
impl RenameLocations {
- pub async fn into_workspace_edit<F, Fut>(
+ pub async fn into_workspace_edit<F, Fut, V>(
self,
- snapshot: StateSnapshot,
- index_provider: F,
new_name: &str,
+ index_provider: F,
+ version_provider: V,
) -> Result<lsp_types::WorkspaceEdit, AnyError>
where
F: Fn(ModuleSpecifier) -> Fut,
- Fut: Future<Output = Result<Vec<u32>, AnyError>>,
+ Fut: Future<Output = Result<LineIndex, AnyError>>,
+ V: Fn(ModuleSpecifier) -> Option<i32>,
{
let mut text_document_edit_map: HashMap<Url, lsp_types::TextDocumentEdit> =
HashMap::new();
@@ -510,10 +534,7 @@ impl RenameLocations {
lsp_types::TextDocumentEdit {
text_document: lsp_types::OptionalVersionedTextDocumentIdentifier {
uri: uri.clone(),
- version: snapshot
- .doc_data
- .get(&specifier)
- .map_or_else(|| None, |data| data.version),
+ version: version_provider(specifier.clone()),
},
edits: Vec::<
lsp_types::OneOf<
@@ -592,12 +613,12 @@ pub struct DefinitionInfoAndBoundSpan {
impl DefinitionInfoAndBoundSpan {
pub async fn to_definition<F, Fut>(
&self,
- line_index: &[u32],
+ line_index: &LineIndex,
index_provider: F,
) -> Option<lsp_types::GotoDefinitionResponse>
where
F: Fn(ModuleSpecifier) -> Fut + Clone,
- Fut: Future<Output = Result<Vec<u32>, AnyError>>,
+ Fut: Future<Output = Result<LineIndex, AnyError>>,
{
if let Some(definitions) = &self.definitions {
let mut location_links = Vec::<lsp_types::LocationLink>::new();
@@ -627,7 +648,7 @@ pub struct DocumentHighlights {
impl DocumentHighlights {
pub fn to_highlight(
&self,
- line_index: &[u32],
+ line_index: &LineIndex,
) -> Vec<lsp_types::DocumentHighlight> {
self
.highlight_spans
@@ -656,7 +677,7 @@ pub struct ReferenceEntry {
}
impl ReferenceEntry {
- pub fn to_location(&self, line_index: &[u32]) -> lsp_types::Location {
+ pub fn to_location(&self, line_index: &LineIndex) -> lsp_types::Location {
let uri =
utils::normalize_file_name(&self.document_span.file_name).unwrap();
lsp_types::Location {
@@ -676,7 +697,7 @@ pub struct CompletionInfo {
impl CompletionInfo {
pub fn into_completion_response(
self,
- line_index: &[u32],
+ line_index: &LineIndex,
) -> lsp_types::CompletionResponse {
let items = self
.entries
@@ -704,7 +725,7 @@ pub struct CompletionEntry {
impl CompletionEntry {
pub fn into_completion_item(
self,
- line_index: &[u32],
+ line_index: &LineIndex,
) -> lsp_types::CompletionItem {
let mut item = lsp_types::CompletionItem {
label: self.name,
@@ -801,11 +822,13 @@ fn cache_snapshot(
.contains_key(&(specifier.clone().into(), version.clone().into()))
{
let s = ModuleSpecifier::resolve_url(&specifier)?;
- let content = {
- let file_cache = state.state_snapshot.file_cache.lock().unwrap();
- let file_id = file_cache.lookup(&s).unwrap();
- file_cache.get_contents(file_id)?
- };
+ let content = state
+ .state_snapshot
+ .documents
+ .lock()
+ .unwrap()
+ .content(&s)?
+ .unwrap();
state
.snapshots
.insert((specifier.into(), version.into()), content);
@@ -873,7 +896,7 @@ fn get_change_range(state: &mut State, args: Value) -> Result<Value, AnyError> {
"start": 0,
"length": v.old_length,
},
- "newLength": current.chars().count(),
+ "newLength": current.encode_utf16().count(),
}))
}
} else {
@@ -890,16 +913,22 @@ fn get_change_range(state: &mut State, args: Value) -> Result<Value, AnyError> {
fn get_length(state: &mut State, args: Value) -> Result<Value, AnyError> {
let v: SourceSnapshotArgs = serde_json::from_value(args)?;
let specifier = ModuleSpecifier::resolve_url(&v.specifier)?;
- if state.state_snapshot.doc_data.contains_key(&specifier) {
+ if state
+ .state_snapshot
+ .documents
+ .lock()
+ .unwrap()
+ .contains(&specifier)
+ {
cache_snapshot(state, v.specifier.clone(), v.version.clone())?;
let content = state
.snapshots
.get(&(v.specifier.into(), v.version.into()))
.unwrap();
- Ok(json!(content.chars().count()))
+ Ok(json!(content.encode_utf16().count()))
} else {
let mut sources = state.state_snapshot.sources.lock().unwrap();
- Ok(json!(sources.get_length(&specifier).unwrap()))
+ Ok(json!(sources.get_length_utf16(&specifier).unwrap()))
}
}
@@ -915,7 +944,13 @@ struct GetTextArgs {
fn get_text(state: &mut State, args: Value) -> Result<Value, AnyError> {
let v: GetTextArgs = serde_json::from_value(args)?;
let specifier = ModuleSpecifier::resolve_url(&v.specifier)?;
- let content = if state.state_snapshot.doc_data.contains_key(&specifier) {
+ let content = if state
+ .state_snapshot
+ .documents
+ .lock()
+ .unwrap()
+ .contains(&specifier)
+ {
cache_snapshot(state, v.specifier.clone(), v.version.clone())?;
state
.snapshots
@@ -939,8 +974,9 @@ fn resolve(state: &mut State, args: Value) -> Result<Value, AnyError> {
return Err(custom_error("Deadlock", "deadlock locking sources"));
};
- if let Some(doc_data) = state.state_snapshot.doc_data.get(&referrer) {
- if let Some(dependencies) = &doc_data.dependencies {
+ let documents = state.state_snapshot.documents.lock().unwrap();
+ if documents.contains(&referrer) {
+ if let Some(dependencies) = documents.dependencies(&referrer) {
for specifier in &v.specifiers {
if specifier.starts_with("asset:///") {
resolved.push(Some((
@@ -959,10 +995,7 @@ fn resolve(state: &mut State, args: Value) -> Result<Value, AnyError> {
if let ResolvedDependency::Resolved(resolved_specifier) =
resolved_import
{
- if state
- .state_snapshot
- .doc_data
- .contains_key(&resolved_specifier)
+ if documents.contains(&resolved_specifier)
|| sources.contains(&resolved_specifier)
{
let media_type = if let Some(media_type) =
@@ -1001,7 +1034,10 @@ fn resolve(state: &mut State, args: Value) -> Result<Value, AnyError> {
} else {
return Err(custom_error(
"NotFound",
- "the referring specifier is unexpectedly missing",
+ format!(
+ "the referring ({}) specifier is unexpectedly missing",
+ referrer
+ ),
));
}
@@ -1014,8 +1050,8 @@ fn respond(state: &mut State, args: Value) -> Result<Value, AnyError> {
}
fn script_names(state: &mut State, _args: Value) -> Result<Value, AnyError> {
- let script_names: Vec<&ModuleSpecifier> =
- state.state_snapshot.doc_data.keys().collect();
+ let documents = state.state_snapshot.documents.lock().unwrap();
+ let script_names = documents.open_specifiers();
Ok(json!(script_names))
}
@@ -1028,11 +1064,14 @@ struct ScriptVersionArgs {
fn script_version(state: &mut State, args: Value) -> Result<Value, AnyError> {
let v: ScriptVersionArgs = serde_json::from_value(args)?;
let specifier = ModuleSpecifier::resolve_url(&v.specifier)?;
- let maybe_doc_data = state.state_snapshot.doc_data.get(&specifier);
- if let Some(doc_data) = maybe_doc_data {
- if let Some(version) = doc_data.version {
- return Ok(json!(version.to_string()));
- }
+ if let Some(version) = state
+ .state_snapshot
+ .documents
+ .lock()
+ .unwrap()
+ .version(&specifier)
+ {
+ return Ok(json!(version.to_string()));
} else {
let mut sources = state.state_snapshot.sources.lock().unwrap();
if let Some(version) = sources.get_script_version(&specifier) {
@@ -1153,13 +1192,14 @@ pub struct UserPreferences {
}
/// Methods that are supported by the Language Service in the compiler isolate.
+#[derive(Debug)]
pub enum RequestMethod {
/// Configure the compilation settings for the server.
Configure(TsConfig),
/// Retrieve the text of an assets that exists in memory in the isolate.
GetAsset(ModuleSpecifier),
/// Return diagnostics for given file.
- GetDiagnostics(ModuleSpecifier),
+ GetDiagnostics(Vec<ModuleSpecifier>),
/// Return quick info at position (hover information).
GetQuickInfo((ModuleSpecifier, u32)),
/// Return document highlights at position.
@@ -1189,10 +1229,10 @@ impl RequestMethod {
"method": "getAsset",
"specifier": specifier,
}),
- RequestMethod::GetDiagnostics(specifier) => json!({
+ RequestMethod::GetDiagnostics(specifiers) => json!({
"id": id,
"method": "getDiagnostics",
- "specifier": specifier,
+ "specifiers": specifiers,
}),
RequestMethod::GetQuickInfo((specifier, position)) => json!({
"id": id,
@@ -1294,30 +1334,21 @@ pub fn request(
#[cfg(test)]
mod tests {
- use super::super::memory_cache::MemoryCache;
use super::*;
- use crate::lsp::language_server::DocumentData;
- use std::collections::HashMap;
+ use crate::lsp::documents::DocumentCache;
use std::sync::Arc;
use std::sync::Mutex;
fn mock_state_snapshot(sources: Vec<(&str, &str, i32)>) -> StateSnapshot {
- let mut doc_data = HashMap::new();
- let mut file_cache = MemoryCache::default();
+ let mut documents = DocumentCache::default();
for (specifier, content, version) in sources {
let specifier = ModuleSpecifier::resolve_url(specifier)
.expect("failed to create specifier");
- doc_data.insert(
- specifier.clone(),
- DocumentData::new(specifier.clone(), version, content, None),
- );
- file_cache.set_contents(specifier, Some(content.as_bytes().to_vec()));
+ documents.open(specifier, version, content.to_string());
}
- let file_cache = Arc::new(Mutex::new(file_cache));
StateSnapshot {
assets: Default::default(),
- doc_data,
- file_cache,
+ documents: Arc::new(Mutex::new(documents)),
sources: Default::default(),
}
}
@@ -1413,29 +1444,31 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
- RequestMethod::GetDiagnostics(specifier),
+ RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(
response,
- json!([
- {
- "start": {
- "line": 0,
- "character": 0,
- },
- "end": {
- "line": 0,
- "character": 7
- },
- "fileName": "file:///a.ts",
- "messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the `lib` compiler option to include 'dom'.",
- "sourceLine": "console.log(\"hello deno\");",
- "category": 1,
- "code": 2584
- }
- ])
+ json!({
+ "file:///a.ts": [
+ {
+ "start": {
+ "line": 0,
+ "character": 0,
+ },
+ "end": {
+ "line": 0,
+ "character": 7
+ },
+ "fileName": "file:///a.ts",
+ "messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the `lib` compiler option to include 'dom'.",
+ "sourceLine": "console.log(\"hello deno\");",
+ "category": 1,
+ "code": 2584
+ }
+ ]
+ })
);
}
@@ -1466,11 +1499,11 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
- RequestMethod::GetDiagnostics(specifier),
+ RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
- assert_eq!(response, json!([]));
+ assert_eq!(response, json!({ "file:///a.ts": [] }));
}
#[test]
@@ -1496,28 +1529,30 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
- RequestMethod::GetDiagnostics(specifier),
+ RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(
response,
- json!([{
- "start": {
- "line": 1,
- "character": 8
- },
- "end": {
- "line": 1,
- "character": 30
- },
- "fileName": "file:///a.ts",
- "messageText": "\'A\' is declared but its value is never read.",
- "sourceLine": " import { A } from \".\";",
- "category": 2,
- "code": 6133,
- "reportsUnnecessary": true,
- }])
+ json!({
+ "file:///a.ts": [{
+ "start": {
+ "line": 1,
+ "character": 8
+ },
+ "end": {
+ "line": 1,
+ "character": 30
+ },
+ "fileName": "file:///a.ts",
+ "messageText": "\'A\' is declared but its value is never read.",
+ "sourceLine": " import { A } from \".\";",
+ "category": 2,
+ "code": 6133,
+ "reportsUnnecessary": true,
+ }]
+ })
);
}
@@ -1548,11 +1583,11 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
- RequestMethod::GetDiagnostics(specifier),
+ RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
- assert_eq!(response, json!([]));
+ assert_eq!(response, json!({ "file:///a.ts": [] }));
}
#[test]
@@ -1585,42 +1620,44 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
- RequestMethod::GetDiagnostics(specifier),
+ RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(
response,
- json!([{
- "start": {
- "line": 1,
- "character": 8
- },
- "end": {
- "line": 6,
- "character": 55,
- },
- "fileName": "file:///a.ts",
- "messageText": "All imports in import declaration are unused.",
- "sourceLine": " import {",
- "category": 2,
- "code": 6192,
- "reportsUnnecessary": true
- }, {
- "start": {
- "line": 8,
- "character": 29
- },
- "end": {
- "line": 8,
- "character": 29
- },
- "fileName": "file:///a.ts",
- "messageText": "Expression expected.",
- "sourceLine": " import * as test from",
- "category": 1,
- "code": 1109
- }])
+ json!({
+ "file:///a.ts": [{
+ "start": {
+ "line": 1,
+ "character": 8
+ },
+ "end": {
+ "line": 6,
+ "character": 55,
+ },
+ "fileName": "file:///a.ts",
+ "messageText": "All imports in import declaration are unused.",
+ "sourceLine": " import {",
+ "category": 2,
+ "code": 6192,
+ "reportsUnnecessary": true
+ }, {
+ "start": {
+ "line": 8,
+ "character": 29
+ },
+ "end": {
+ "line": 8,
+ "character": 29
+ },
+ "fileName": "file:///a.ts",
+ "messageText": "Expression expected.",
+ "sourceLine": " import * as test from",
+ "category": 1,
+ "code": 1109
+ }]
+ })
);
}
@@ -1641,11 +1678,11 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
- RequestMethod::GetDiagnostics(specifier),
+ RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
- assert_eq!(response, json!([]));
+ assert_eq!(response, json!({}));
}
#[test]
diff --git a/cli/tests/lsp/did_change_notification_large.json b/cli/tests/lsp/did_change_notification_large.json
new file mode 100644
index 000000000..c4999a7c3
--- /dev/null
+++ b/cli/tests/lsp/did_change_notification_large.json
@@ -0,0 +1,25 @@
+{
+ "jsonrpc": "2.0",
+ "method": "textDocument/didChange",
+ "params": {
+ "textDocument": {
+ "uri": "file:///a/file.ts",
+ "version": 2
+ },
+ "contentChanges": [
+ {
+ "range": {
+ "start": {
+ "line": 444,
+ "character": 11
+ },
+ "end": {
+ "line": 444,
+ "character": 14
+ }
+ },
+ "text": "+++"
+ }
+ ]
+ }
+}
diff --git a/cli/tests/lsp/did_change_notification_mbc.json b/cli/tests/lsp/did_change_notification_mbc.json
new file mode 100644
index 000000000..fed742d39
--- /dev/null
+++ b/cli/tests/lsp/did_change_notification_mbc.json
@@ -0,0 +1,25 @@
+{
+ "jsonrpc": "2.0",
+ "method": "textDocument/didChange",
+ "params": {
+ "textDocument": {
+ "uri": "file:///a/file.ts",
+ "version": 2
+ },
+ "contentChanges": [
+ {
+ "range": {
+ "start": {
+ "line": 1,
+ "character": 11
+ },
+ "end": {
+ "line": 1,
+ "character": 13
+ }
+ },
+ "text": ""
+ }
+ ]
+ }
+}
diff --git a/cli/tests/lsp/did_open_notification_large.json b/cli/tests/lsp/did_open_notification_large.json
new file mode 100644
index 000000000..4a467891c
--- /dev/null
+++ b/cli/tests/lsp/did_open_notification_large.json
@@ -0,0 +1,12 @@
+{
+ "jsonrpc": "2.0",
+ "method": "textDocument/didOpen",
+ "params": {
+ "textDocument": {
+ "uri": "file:///a/file.ts",
+ "languageId": "javascript",
+ "version": 1,
+ "text": "// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.\n\n// @ts-check\n/// <reference path=\"./compiler.d.ts\" />\n// deno-lint-ignore-file no-undef\n\n// This module is the entry point for \"compiler\" isolate, ie. the one\n// that is created when Deno needs to type check TypeScript, and in some\n// instances convert TypeScript to JavaScript.\n\n// Removes the `__proto__` for security reasons. This intentionally makes\n// Deno non compliant with ECMA-262 Annex B.2.2.1\ndelete Object.prototype.__proto__;\n\n((window) => {\n /** @type {DenoCore} */\n const core = window.Deno.core;\n\n let logDebug = false;\n let logSource = \"JS\";\n\n function setLogDebug(debug, source) {\n logDebug = debug;\n if (source) {\n logSource = source;\n }\n }\n\n function debug(...args) {\n if (logDebug) {\n const stringifiedArgs = args.map((arg) =>\n typeof arg === \"string\" ? arg : JSON.stringify(arg)\n ).join(\" \");\n // adding a non-zero integer value to the end of the debug string causes\n // the message to be printed to stderr instead of stdout, which is better\n // aligned to the behaviour of debug messages\n core.print(`DEBUG ${logSource} - ${stringifiedArgs}\\n`, 1);\n }\n }\n\n function error(...args) {\n const stringifiedArgs = args.map((arg) =>\n typeof arg === \"string\" || arg instanceof Error\n ? String(arg)\n : JSON.stringify(arg)\n ).join(\" \");\n core.print(`ERROR ${logSource} = ${stringifiedArgs}\\n`, 1);\n }\n\n class AssertionError extends Error {\n constructor(msg) {\n super(msg);\n this.name = \"AssertionError\";\n }\n }\n\n function assert(cond, msg = \"Assertion failed.\") {\n if (!cond) {\n throw new AssertionError(msg);\n }\n }\n\n /** @type {Map<string, ts.SourceFile>} */\n const sourceFileCache = new Map();\n\n /** @param {ts.DiagnosticRelatedInformation} diagnostic */\n function fromRelatedInformation({\n start,\n length,\n file,\n messageText: msgText,\n ...ri\n }) {\n let messageText;\n let messageChain;\n if (typeof msgText === \"object\") {\n messageChain = msgText;\n } else {\n messageText = msgText;\n }\n if (start !== undefined && length !== undefined && file) {\n const startPos = file.getLineAndCharacterOfPosition(start);\n const sourceLine = file.getFullText().split(\"\\n\")[startPos.line];\n const fileName = file.fileName;\n return {\n start: startPos,\n end: file.getLineAndCharacterOfPosition(start + length),\n fileName,\n messageChain,\n messageText,\n sourceLine,\n ...ri,\n };\n } else {\n return {\n messageChain,\n messageText,\n ...ri,\n };\n }\n }\n\n /** @param {ts.Diagnostic[]} diagnostics */\n function fromTypeScriptDiagnostic(diagnostics) {\n return diagnostics.map(({ relatedInformation: ri, source, ...diag }) => {\n /** @type {any} */\n const value = fromRelatedInformation(diag);\n value.relatedInformation = ri\n ? ri.map(fromRelatedInformation)\n : undefined;\n value.source = source;\n return value;\n });\n }\n\n // Using incremental compile APIs requires that all\n // paths must be either relative or absolute. Since\n // analysis in Rust operates on fully resolved URLs,\n // it makes sense to use the same scheme here.\n const ASSETS = \"asset:///\";\n const CACHE = \"cache:///\";\n\n /** Diagnostics that are intentionally ignored when compiling TypeScript in\n * Deno, as they provide misleading or incorrect information. */\n const IGNORED_DIAGNOSTICS = [\n // TS1208: All files must be modules when the '--isolatedModules' flag is\n // provided. We can ignore because we guarantee that all files are\n // modules.\n 1208,\n // TS1375: 'await' expressions are only allowed at the top level of a file\n // when that file is a module, but this file has no imports or exports.\n // Consider adding an empty 'export {}' to make this file a module.\n 1375,\n // TS1103: 'for-await-of' statement is only allowed within an async function\n // or async generator.\n 1103,\n // TS2306: File 'file:///Users/rld/src/deno/cli/tests/subdir/amd_like.js' is\n // not a module.\n 2306,\n // TS2691: An import path cannot end with a '.ts' extension. Consider\n // importing 'bad-module' instead.\n 2691,\n // TS2792: Cannot find module. Did you mean to set the 'moduleResolution'\n // option to 'node', or to add aliases to the 'paths' option?\n 2792,\n // TS5009: Cannot find the common subdirectory path for the input files.\n 5009,\n // TS5055: Cannot write file\n // 'http://localhost:4545/cli/tests/subdir/mt_application_x_javascript.j4.js'\n // because it would overwrite input file.\n 5055,\n // TypeScript is overly opinionated that only CommonJS modules kinds can\n // support JSON imports. Allegedly this was fixed in\n // Microsoft/TypeScript#26825 but that doesn't seem to be working here,\n // so we will ignore complaints about this compiler setting.\n 5070,\n // TS7016: Could not find a declaration file for module '...'. '...'\n // implicitly has an 'any' type. This is due to `allowJs` being off by\n // default but importing of a JavaScript module.\n 7016,\n ];\n\n const SNAPSHOT_COMPILE_OPTIONS = {\n esModuleInterop: true,\n jsx: ts.JsxEmit.React,\n module: ts.ModuleKind.ESNext,\n noEmit: true,\n strict: true,\n target: ts.ScriptTarget.ESNext,\n };\n\n class ScriptSnapshot {\n /** @type {string} */\n specifier;\n /** @type {string} */\n version;\n /**\n * @param {string} specifier\n * @param {string} version \n */\n constructor(specifier, version) {\n this.specifier = specifier;\n this.version = version;\n }\n /**\n * @param {number} start \n * @param {number} end \n * @returns {string}\n */\n getText(start, end) {\n const { specifier, version } = this;\n debug(\n `snapshot.getText(${start}, ${end}) specifier: ${specifier} version: ${version}`,\n );\n return core.jsonOpSync(\"op_get_text\", { specifier, version, start, end });\n }\n /**\n * @returns {number}\n */\n getLength() {\n const { specifier, version } = this;\n debug(`snapshot.getLength() specifier: ${specifier} version: ${version}`);\n return core.jsonOpSync(\"op_get_length\", { specifier, version });\n }\n /**\n * @param {ScriptSnapshot} oldSnapshot\n * @returns {ts.TextChangeRange | undefined}\n */\n getChangeRange(oldSnapshot) {\n const { specifier, version } = this;\n const { version: oldVersion } = oldSnapshot;\n const oldLength = oldSnapshot.getLength();\n debug(\n `snapshot.getLength() specifier: ${specifier} oldVersion: ${oldVersion} version: ${version}`,\n );\n return core.jsonOpSync(\n \"op_get_change_range\",\n { specifier, oldLength, oldVersion, version },\n );\n }\n dispose() {\n const { specifier, version } = this;\n debug(`snapshot.dispose() specifier: ${specifier} version: ${version}`);\n core.jsonOpSync(\"op_dispose\", { specifier, version });\n }\n }\n\n /** @type {ts.CompilerOptions} */\n let compilationSettings = {};\n\n /** @type {ts.LanguageService} */\n let languageService;\n\n /** An object literal of the incremental compiler host, which provides the\n * specific \"bindings\" to the Deno environment that tsc needs to work.\n *\n * @type {ts.CompilerHost & ts.LanguageServiceHost} */\n const host = {\n fileExists(fileName) {\n debug(`host.fileExists(\"${fileName}\")`);\n return false;\n },\n readFile(specifier) {\n debug(`host.readFile(\"${specifier}\")`);\n return core.jsonOpSync(\"op_load\", { specifier }).data;\n },\n getSourceFile(\n specifier,\n languageVersion,\n _onError,\n _shouldCreateNewSourceFile,\n ) {\n debug(\n `host.getSourceFile(\"${specifier}\", ${\n ts.ScriptTarget[languageVersion]\n })`,\n );\n let sourceFile = sourceFileCache.get(specifier);\n if (sourceFile) {\n return sourceFile;\n }\n\n /** @type {{ data: string; hash?: string; scriptKind: ts.ScriptKind }} */\n const { data, hash, scriptKind } = core.jsonOpSync(\n \"op_load\",\n { specifier },\n );\n assert(\n data != null,\n `\"data\" is unexpectedly null for \"${specifier}\".`,\n );\n sourceFile = ts.createSourceFile(\n specifier,\n data,\n languageVersion,\n false,\n scriptKind,\n );\n sourceFile.moduleName = specifier;\n sourceFile.version = hash;\n sourceFileCache.set(specifier, sourceFile);\n return sourceFile;\n },\n getDefaultLibFileName() {\n return `${ASSETS}/lib.esnext.d.ts`;\n },\n getDefaultLibLocation() {\n return ASSETS;\n },\n writeFile(fileName, data, _writeByteOrderMark, _onError, sourceFiles) {\n debug(`host.writeFile(\"${fileName}\")`);\n let maybeSpecifiers;\n if (sourceFiles) {\n maybeSpecifiers = sourceFiles.map((sf) => sf.moduleName);\n }\n return core.jsonOpSync(\n \"op_emit\",\n { maybeSpecifiers, fileName, data },\n );\n },\n getCurrentDirectory() {\n return CACHE;\n },\n getCanonicalFileName(fileName) {\n return fileName;\n },\n useCaseSensitiveFileNames() {\n return true;\n },\n getNewLine() {\n return \"\\n\";\n },\n resolveModuleNames(specifiers, base) {\n debug(`host.resolveModuleNames()`);\n debug(` base: ${base}`);\n debug(` specifiers: ${specifiers.join(\", \")}`);\n /** @type {Array<[string, ts.Extension] | undefined>} */\n const resolved = core.jsonOpSync(\"op_resolve\", {\n specifiers,\n base,\n });\n if (resolved) {\n const result = resolved.map((item) => {\n if (item) {\n const [resolvedFileName, extension] = item;\n return {\n resolvedFileName,\n extension,\n isExternalLibraryImport: false,\n };\n }\n return undefined;\n });\n result.length = specifiers.length;\n return result;\n } else {\n return new Array(specifiers.length);\n }\n },\n createHash(data) {\n return core.jsonOpSync(\"op_create_hash\", { data }).hash;\n },\n\n // LanguageServiceHost\n getCompilationSettings() {\n debug(\"host.getCompilationSettings()\");\n return compilationSettings;\n },\n getScriptFileNames() {\n debug(\"host.getScriptFileNames()\");\n return core.jsonOpSync(\"op_script_names\", undefined);\n },\n getScriptVersion(specifier) {\n debug(`host.getScriptVersion(\"${specifier}\")`);\n const sourceFile = sourceFileCache.get(specifier);\n if (sourceFile) {\n return sourceFile.version ?? \"1\";\n }\n return core.jsonOpSync(\"op_script_version\", { specifier });\n },\n getScriptSnapshot(specifier) {\n debug(`host.getScriptSnapshot(\"${specifier}\")`);\n const sourceFile = sourceFileCache.get(specifier);\n if (sourceFile) {\n return {\n getText(start, end) {\n return sourceFile.text.substring(start, end);\n },\n getLength() {\n return sourceFile.text.length;\n },\n getChangeRange() {\n return undefined;\n },\n };\n }\n /** @type {string | undefined} */\n const version = core.jsonOpSync(\"op_script_version\", { specifier });\n if (version != null) {\n return new ScriptSnapshot(specifier, version);\n }\n return undefined;\n },\n };\n\n /** @type {Array<[string, number]>} */\n const stats = [];\n let statsStart = 0;\n\n function performanceStart() {\n stats.length = 0;\n statsStart = Date.now();\n ts.performance.enable();\n }\n\n /**\n * @param {{ program: ts.Program | ts.EmitAndSemanticDiagnosticsBuilderProgram, fileCount?: number }} options \n */\n function performanceProgram({ program, fileCount }) {\n if (program) {\n if (\"getProgram\" in program) {\n program = program.getProgram();\n }\n stats.push([\"Files\", program.getSourceFiles().length]);\n stats.push([\"Nodes\", program.getNodeCount()]);\n stats.push([\"Identifiers\", program.getIdentifierCount()]);\n stats.push([\"Symbols\", program.getSymbolCount()]);\n stats.push([\"Types\", program.getTypeCount()]);\n stats.push([\"Instantiations\", program.getInstantiationCount()]);\n } else if (fileCount != null) {\n stats.push([\"Files\", fileCount]);\n }\n const programTime = ts.performance.getDuration(\"Program\");\n const bindTime = ts.performance.getDuration(\"Bind\");\n const checkTime = ts.performance.getDuration(\"Check\");\n const emitTime = ts.performance.getDuration(\"Emit\");\n stats.push([\"Parse time\", programTime]);\n stats.push([\"Bind time\", bindTime]);\n stats.push([\"Check time\", checkTime]);\n stats.push([\"Emit time\", emitTime]);\n stats.push(\n [\"Total TS time\", programTime + bindTime + checkTime + emitTime],\n );\n }\n\n function performanceEnd() {\n const duration = Date.now() - statsStart;\n stats.push([\"Compile time\", duration]);\n return stats;\n }\n\n /**\n * @typedef {object} Request\n * @property {Record<string, any>} config\n * @property {boolean} debug\n * @property {string[]} rootNames\n */\n\n /** The API that is called by Rust when executing a request.\n * @param {Request} request\n */\n function exec({ config, debug: debugFlag, rootNames }) {\n setLogDebug(debugFlag, \"TS\");\n performanceStart();\n debug(\">>> exec start\", { rootNames });\n debug(config);\n\n const { options, errors: configFileParsingDiagnostics } = ts\n .convertCompilerOptionsFromJson(config, \"\");\n // The `allowNonTsExtensions` is a \"hidden\" compiler option used in VSCode\n // which is not allowed to be passed in JSON, we need it to allow special\n // URLs which Deno supports. So we need to either ignore the diagnostic, or\n // inject it ourselves.\n Object.assign(options, { allowNonTsExtensions: true });\n const program = ts.createIncrementalProgram({\n rootNames,\n options,\n host,\n configFileParsingDiagnostics,\n });\n\n const { diagnostics: emitDiagnostics } = program.emit();\n\n const diagnostics = [\n ...program.getConfigFileParsingDiagnostics(),\n ...program.getSyntacticDiagnostics(),\n ...program.getOptionsDiagnostics(),\n ...program.getGlobalDiagnostics(),\n ...program.getSemanticDiagnostics(),\n ...emitDiagnostics,\n ].filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code));\n performanceProgram({ program });\n\n core.jsonOpSync(\"op_respond\", {\n diagnostics: fromTypeScriptDiagnostic(diagnostics),\n stats: performanceEnd(),\n });\n debug(\"<<< exec stop\");\n }\n\n /**\n * @param {number} id \n * @param {any} data \n */\n function respond(id, data = null) {\n core.jsonOpSync(\"op_respond\", { id, data });\n }\n\n /**\n * @param {LanguageServerRequest} request \n */\n function serverRequest({ id, ...request }) {\n debug(`serverRequest()`, { id, ...request });\n switch (request.method) {\n case \"configure\": {\n const { options, errors } = ts\n .convertCompilerOptionsFromJson(request.compilerOptions, \"\");\n Object.assign(options, { allowNonTsExtensions: true });\n if (errors.length) {\n debug(ts.formatDiagnostics(errors, host));\n }\n compilationSettings = options;\n return respond(id, true);\n }\n case \"getAsset\": {\n const sourceFile = host.getSourceFile(\n request.specifier,\n ts.ScriptTarget.ESNext,\n );\n return respond(id, sourceFile && sourceFile.text);\n }\n case \"getDiagnostics\": {\n try {\n /** @type {Record<string, any[]>} */\n const diagnosticMap = {};\n for (const specifier of request.specifiers) {\n diagnosticMap[specifier] = fromTypeScriptDiagnostic([\n ...languageService.getSemanticDiagnostics(specifier),\n ...languageService.getSuggestionDiagnostics(specifier),\n ...languageService.getSyntacticDiagnostics(specifier),\n ].filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code)));\n }\n return respond(id, diagnosticMap);\n } catch (e) {\n if (\"stack\" in e) {\n error(e.stack);\n } else {\n error(e);\n }\n return respond(id, {});\n }\n }\n case \"getQuickInfo\": {\n return respond(\n id,\n languageService.getQuickInfoAtPosition(\n request.specifier,\n request.position,\n ),\n );\n }\n case \"getCompletions\": {\n return respond(\n id,\n languageService.getCompletionsAtPosition(\n request.specifier,\n request.position,\n request.preferences,\n ),\n );\n }\n case \"getDocumentHighlights\": {\n return respond(\n id,\n languageService.getDocumentHighlights(\n request.specifier,\n request.position,\n request.filesToSearch,\n ),\n );\n }\n case \"getReferences\": {\n return respond(\n id,\n languageService.getReferencesAtPosition(\n request.specifier,\n request.position,\n ),\n );\n }\n case \"getDefinition\": {\n return respond(\n id,\n languageService.getDefinitionAndBoundSpan(\n request.specifier,\n request.position,\n ),\n );\n }\n case \"getImplementation\": {\n return respond(\n id,\n languageService.getImplementationAtPosition(\n request.specifier,\n request.position,\n ),\n );\n }\n case \"findRenameLocations\": {\n return respond(\n id,\n languageService.findRenameLocations(\n request.specifier,\n request.position,\n request.findInStrings,\n request.findInComments,\n request.providePrefixAndSuffixTextForRename,\n ),\n );\n }\n default:\n throw new TypeError(\n // @ts-ignore exhausted case statement sets type to never\n `Invalid request method for request: \"${request.method}\" (${id})`,\n );\n }\n }\n\n /** @param {{ debug: boolean; }} init */\n function serverInit({ debug: debugFlag }) {\n if (hasStarted) {\n throw new Error(\"The language server has already been initialized.\");\n }\n hasStarted = true;\n languageService = ts.createLanguageService(host);\n core.ops();\n setLogDebug(debugFlag, \"TSLS\");\n debug(\"serverInit()\");\n }\n\n let hasStarted = false;\n\n /** Startup the runtime environment, setting various flags.\n * @param {{ debugFlag?: boolean; legacyFlag?: boolean; }} msg\n */\n function startup({ debugFlag = false }) {\n if (hasStarted) {\n throw new Error(\"The compiler runtime already started.\");\n }\n hasStarted = true;\n core.ops();\n setLogDebug(!!debugFlag, \"TS\");\n }\n\n // Setup the compiler runtime during the build process.\n core.ops();\n core.registerErrorClass(\"Error\", Error);\n\n // A build time only op that provides some setup information that is used to\n // ensure the snapshot is setup properly.\n /** @type {{ buildSpecifier: string; libs: string[] }} */\n const { buildSpecifier, libs } = core.jsonOpSync(\"op_build_info\", {});\n for (const lib of libs) {\n const specifier = `lib.${lib}.d.ts`;\n // we are using internal APIs here to \"inject\" our custom libraries into\n // tsc, so things like `\"lib\": [ \"deno.ns\" ]` are supported.\n if (!ts.libs.includes(lib)) {\n ts.libs.push(lib);\n ts.libMap.set(lib, `lib.${lib}.d.ts`);\n }\n // we are caching in memory common type libraries that will be re-used by\n // tsc on when the snapshot is restored\n assert(\n host.getSourceFile(`${ASSETS}${specifier}`, ts.ScriptTarget.ESNext),\n );\n }\n // this helps ensure as much as possible is in memory that is re-usable\n // before the snapshotting is done, which helps unsure fast \"startup\" for\n // subsequent uses of tsc in Deno.\n const TS_SNAPSHOT_PROGRAM = ts.createProgram({\n rootNames: [buildSpecifier],\n options: SNAPSHOT_COMPILE_OPTIONS,\n host,\n });\n ts.getPreEmitDiagnostics(TS_SNAPSHOT_PROGRAM);\n\n // exposes the two functions that are called by `tsc::exec()` when type\n // checking TypeScript.\n globalThis.startup = startup;\n globalThis.exec = exec;\n\n // exposes the functions that are called when the compiler is used as a\n // language service.\n globalThis.serverInit = serverInit;\n globalThis.serverRequest = serverRequest;\n})(this);\n"
+ }
+ }
+}
diff --git a/cli/tests/lsp/did_open_notification_mbc.json b/cli/tests/lsp/did_open_notification_mbc.json
new file mode 100644
index 000000000..d7dd9444e
--- /dev/null
+++ b/cli/tests/lsp/did_open_notification_mbc.json
@@ -0,0 +1,12 @@
+{
+ "jsonrpc": "2.0",
+ "method": "textDocument/didOpen",
+ "params": {
+ "textDocument": {
+ "uri": "file:///a/file.ts",
+ "languageId": "typescript",
+ "version": 1,
+ "text": "const a = `ηΌ–ε†™θ½―δ»ΆεΎˆιšΎ`;\nconst b = `πŸ‘πŸ¦•πŸ˜ƒ`;\nconsole.log(a, b);\n"
+ }
+ }
+}
diff --git a/cli/tests/lsp/hover_request_mbc.json b/cli/tests/lsp/hover_request_mbc.json
new file mode 100644
index 000000000..6821fb0ff
--- /dev/null
+++ b/cli/tests/lsp/hover_request_mbc.json
@@ -0,0 +1,14 @@
+{
+ "jsonrpc": "2.0",
+ "id": 2,
+ "method": "textDocument/hover",
+ "params": {
+ "textDocument": {
+ "uri": "file:///a/file.ts"
+ },
+ "position": {
+ "line": 2,
+ "character": 14
+ }
+ }
+}
diff --git a/cli/tests/lsp/rename_request.json b/cli/tests/lsp/rename_request.json
index d9efe4b3f..a317d9257 100644
--- a/cli/tests/lsp/rename_request.json
+++ b/cli/tests/lsp/rename_request.json
@@ -7,8 +7,8 @@
"uri": "file:///a/file.ts"
},
"position": {
- "line": 5,
- "character": 19
+ "line": 0,
+ "character": 4
},
"newName": "variable_modified"
}
diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js
index 14fcfa2bc..b50e32c76 100644
--- a/cli/tsc/99_main_compiler.js
+++ b/cli/tsc/99_main_compiler.js
@@ -511,15 +511,23 @@ delete Object.prototype.__proto__;
}
case "getDiagnostics": {
try {
- const diagnostics = [
- ...languageService.getSemanticDiagnostics(request.specifier),
- ...languageService.getSuggestionDiagnostics(request.specifier),
- ...languageService.getSyntacticDiagnostics(request.specifier),
- ].filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code));
- return respond(id, fromTypeScriptDiagnostic(diagnostics));
+ /** @type {Record<string, any[]>} */
+ const diagnosticMap = {};
+ for (const specifier of request.specifiers) {
+ diagnosticMap[specifier] = fromTypeScriptDiagnostic([
+ ...languageService.getSemanticDiagnostics(specifier),
+ ...languageService.getSuggestionDiagnostics(specifier),
+ ...languageService.getSyntacticDiagnostics(specifier),
+ ].filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code)));
+ }
+ return respond(id, diagnosticMap);
} catch (e) {
- error(e);
- return respond(id, []);
+ if ("stack" in e) {
+ error(e.stack);
+ } else {
+ error(e);
+ }
+ return respond(id, {});
}
}
case "getQuickInfo": {
diff --git a/cli/tsc/compiler.d.ts b/cli/tsc/compiler.d.ts
index 7fd4ce37d..fc0a2bf83 100644
--- a/cli/tsc/compiler.d.ts
+++ b/cli/tsc/compiler.d.ts
@@ -70,7 +70,7 @@ declare global {
interface GetDiagnosticsRequest extends BaseLanguageServerRequest {
method: "getDiagnostics";
- specifier: string;
+ specifiers: string[];
}
interface GetQuickInfoRequest extends BaseLanguageServerRequest {