summaryrefslogtreecommitdiff
path: root/cli/lsp
diff options
context:
space:
mode:
Diffstat (limited to 'cli/lsp')
-rw-r--r--cli/lsp/analysis.rs171
-rw-r--r--cli/lsp/code_lens.rs128
-rw-r--r--cli/lsp/completions.rs26
-rw-r--r--cli/lsp/diagnostics.rs37
-rw-r--r--cli/lsp/document_source.rs76
-rw-r--r--cli/lsp/documents.rs129
-rw-r--r--cli/lsp/language_server.rs150
-rw-r--r--cli/lsp/mod.rs1
-rw-r--r--cli/lsp/sources.rs95
-rw-r--r--cli/lsp/text.rs15
-rw-r--r--cli/lsp/tsc.rs35
-rw-r--r--cli/lsp/urls.rs2
12 files changed, 486 insertions, 379 deletions
diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs
index 4c5f1fea7..12b54a503 100644
--- a/cli/lsp/analysis.rs
+++ b/cli/lsp/analysis.rs
@@ -4,14 +4,22 @@ use super::language_server;
use super::tsc;
use crate::ast;
+use crate::ast::Location;
use crate::import_map::ImportMap;
use crate::lsp::documents::DocumentData;
-use crate::media_type::MediaType;
use crate::module_graph::parse_deno_types;
use crate::module_graph::parse_ts_reference;
use crate::module_graph::TypeScriptReference;
use crate::tools::lint::create_linter;
+use deno_ast::swc::ast as swc_ast;
+use deno_ast::swc::common::DUMMY_SP;
+use deno_ast::swc::visit::Node;
+use deno_ast::swc::visit::Visit;
+use deno_ast::swc::visit::VisitWith;
+use deno_ast::Diagnostic;
+use deno_ast::MediaType;
+use deno_ast::SourceTextInfo;
use deno_core::error::anyhow;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
@@ -29,11 +37,6 @@ use regex::Regex;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::fmt;
-use swc_common::DUMMY_SP;
-use swc_ecmascript::ast as swc_ast;
-use swc_ecmascript::visit::Node;
-use swc_ecmascript::visit::Visit;
-use swc_ecmascript::visit::VisitWith;
lazy_static::lazy_static! {
/// Diagnostic error codes which actually are the same, and so when grouping
@@ -131,17 +134,12 @@ fn as_lsp_range(range: &deno_lint::diagnostic::Range) -> Range {
}
pub fn get_lint_references(
- specifier: &ModuleSpecifier,
- media_type: &MediaType,
- source_code: &str,
+ parsed_source: &deno_ast::ParsedSource,
) -> Result<Vec<Reference>, AnyError> {
- let syntax = ast::get_syntax(media_type);
+ let syntax = deno_ast::get_syntax(parsed_source.media_type());
let lint_rules = rules::get_recommended_rules();
let linter = create_linter(syntax, lint_rules);
- // TODO(@kitsonk) we should consider caching the swc source file versions for
- // reuse by other processes
- let (_, lint_diagnostics) =
- linter.lint(specifier.to_string(), source_code.to_string())?;
+ let lint_diagnostics = linter.lint_with_ast(parsed_source);
Ok(
lint_diagnostics
@@ -281,27 +279,34 @@ pub fn resolve_import(
pub fn parse_module(
specifier: &ModuleSpecifier,
- source: &str,
- media_type: &MediaType,
-) -> Result<ast::ParsedModule, AnyError> {
- ast::parse(&specifier.to_string(), source, media_type)
+ source: SourceTextInfo,
+ media_type: MediaType,
+) -> Result<deno_ast::ParsedSource, Diagnostic> {
+ deno_ast::parse_module(deno_ast::ParseParams {
+ specifier: specifier.as_str().to_string(),
+ source,
+ media_type,
+ // capture the tokens for linting and formatting
+ capture_tokens: true,
+ maybe_syntax: None,
+ })
}
// TODO(@kitsonk) a lot of this logic is duplicated in module_graph.rs in
// Module::parse() and should be refactored out to a common function.
pub fn analyze_dependencies(
specifier: &ModuleSpecifier,
- media_type: &MediaType,
- parsed_module: &ast::ParsedModule,
+ media_type: MediaType,
+ parsed_source: &deno_ast::ParsedSource,
maybe_import_map: &Option<ImportMap>,
) -> (HashMap<String, Dependency>, Option<ResolvedDependency>) {
let mut maybe_type = None;
let mut dependencies = HashMap::<String, Dependency>::new();
// Parse leading comments for supported triple slash references.
- for comment in parsed_module.get_leading_comments().iter() {
+ for comment in parsed_source.get_leading_comments().iter() {
if let Some((ts_reference, span)) = parse_ts_reference(comment) {
- let loc = parsed_module.get_location(span.lo);
+ let loc = parsed_source.source().line_and_column_index(span.lo);
match ts_reference {
TypeScriptReference::Path(import) => {
let dep = dependencies.entry(import.clone()).or_default();
@@ -310,20 +315,19 @@ pub fn analyze_dependencies(
dep.maybe_code = Some(resolved_import);
dep.maybe_code_specifier_range = Some(Range {
start: Position {
- line: (loc.line - 1) as u32,
- character: loc.col as u32,
+ line: loc.line_index as u32,
+ character: loc.column_index as u32,
},
end: Position {
- line: (loc.line - 1) as u32,
- character: (loc.col + import.chars().count() + 2) as u32,
+ line: loc.line_index as u32,
+ character: (loc.column_index + import.chars().count() + 2) as u32,
},
});
}
TypeScriptReference::Types(import) => {
let resolved_import =
resolve_import(&import, specifier, maybe_import_map);
- if media_type == &MediaType::JavaScript
- || media_type == &MediaType::Jsx
+ if media_type == MediaType::JavaScript || media_type == MediaType::Jsx
{
maybe_type = Some(resolved_import.clone());
}
@@ -331,12 +335,12 @@ pub fn analyze_dependencies(
dep.maybe_type = Some(resolved_import);
dep.maybe_type_specifier_range = Some(Range {
start: Position {
- line: (loc.line - 1) as u32,
- character: loc.col as u32,
+ line: loc.line_index as u32,
+ character: loc.column_index as u32,
},
end: Position {
- line: (loc.line - 1) as u32,
- character: (loc.col + import.chars().count() + 2) as u32,
+ line: loc.line_index as u32,
+ character: (loc.column_index + import.chars().count() + 2) as u32,
},
});
}
@@ -345,9 +349,9 @@ pub fn analyze_dependencies(
}
// Parse ES and type only imports
- let descriptors = parsed_module.analyze_dependencies();
+ let descriptors = deno_graph::analyze_dependencies(parsed_source);
for desc in descriptors.into_iter().filter(|desc| {
- desc.kind != swc_ecmascript::dep_graph::DependencyKind::Require
+ desc.kind != deno_ast::swc::dep_graph::DependencyKind::Require
}) {
let resolved_import =
resolve_import(&desc.specifier, specifier, maybe_import_map);
@@ -359,7 +363,7 @@ pub fn analyze_dependencies(
(
resolve_import(deno_types, specifier, maybe_import_map),
deno_types.clone(),
- parsed_module.get_location(span.lo)
+ parsed_source.source().line_and_column_index(span.lo)
)
})
} else {
@@ -368,16 +372,20 @@ pub fn analyze_dependencies(
let dep = dependencies.entry(desc.specifier.to_string()).or_default();
dep.is_dynamic = desc.is_dynamic;
- let start = parsed_module.get_location(desc.specifier_span.lo);
- let end = parsed_module.get_location(desc.specifier_span.hi);
+ let start = parsed_source
+ .source()
+ .line_and_column_index(desc.specifier_span.lo);
+ let end = parsed_source
+ .source()
+ .line_and_column_index(desc.specifier_span.hi);
let range = Range {
start: Position {
- line: (start.line - 1) as u32,
- character: start.col as u32,
+ line: start.line_index as u32,
+ character: start.column_index as u32,
},
end: Position {
- line: (end.line - 1) as u32,
- character: end.col as u32,
+ line: end.line_index as u32,
+ character: end.column_index as u32,
},
};
dep.maybe_code_specifier_range = Some(range);
@@ -388,12 +396,15 @@ pub fn analyze_dependencies(
{
dep.maybe_type_specifier_range = Some(Range {
start: Position {
- line: (loc.line - 1) as u32,
- character: (loc.col + 1) as u32,
+ line: loc.line_index as u32,
+ // +1 to skip quote
+ character: (loc.column_index + 1) as u32,
},
end: Position {
- line: (loc.line - 1) as u32,
- character: (loc.col + 1 + specifier.chars().count()) as u32,
+ line: loc.line_index as u32,
+ // +1 to skip quote
+ character: (loc.column_index + 1 + specifier.chars().count())
+ as u32,
},
});
dep.maybe_type = Some(resolved_dependency);
@@ -692,14 +703,12 @@ impl CodeActionCollection {
})
.unwrap();
- let line_content = if let Some(doc) = document {
- doc
- .content_line(diagnostic.range.start.line as usize)
- .ok()
- .flatten()
- } else {
- None
- };
+ let line_content = document.map(|d| {
+ d.source()
+ .text_info()
+ .line_text(diagnostic.range.start.line as usize)
+ .to_string()
+ });
let mut changes = HashMap::new();
changes.insert(
@@ -1021,14 +1030,14 @@ impl DependencyRanges {
struct DependencyRangeCollector<'a> {
import_ranges: DependencyRanges,
- parsed_module: &'a ast::ParsedModule,
+ parsed_source: &'a deno_ast::ParsedSource,
}
impl<'a> DependencyRangeCollector<'a> {
- pub fn new(parsed_module: &'a ast::ParsedModule) -> Self {
+ pub fn new(parsed_source: &'a deno_ast::ParsedSource) -> Self {
Self {
import_ranges: DependencyRanges::default(),
- parsed_module,
+ parsed_source,
}
}
@@ -1043,8 +1052,8 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
node: &swc_ast::ImportDecl,
_parent: &dyn Node,
) {
- let start = self.parsed_module.get_location(node.src.span.lo);
- let end = self.parsed_module.get_location(node.src.span.hi);
+ let start = Location::from_pos(self.parsed_source, node.src.span.lo);
+ let end = Location::from_pos(self.parsed_source, node.src.span.hi);
self.import_ranges.0.push(DependencyRange {
range: narrow_range(get_range_from_location(&start, &end)),
specifier: node.src.value.to_string(),
@@ -1057,8 +1066,8 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
_parent: &dyn Node,
) {
if let Some(src) = &node.src {
- let start = self.parsed_module.get_location(src.span.lo);
- let end = self.parsed_module.get_location(src.span.hi);
+ let start = Location::from_pos(self.parsed_source, src.span.lo);
+ let end = Location::from_pos(self.parsed_source, src.span.hi);
self.import_ranges.0.push(DependencyRange {
range: narrow_range(get_range_from_location(&start, &end)),
specifier: src.value.to_string(),
@@ -1071,8 +1080,8 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
node: &swc_ast::ExportAll,
_parent: &dyn Node,
) {
- let start = self.parsed_module.get_location(node.src.span.lo);
- let end = self.parsed_module.get_location(node.src.span.hi);
+ let start = Location::from_pos(self.parsed_source, node.src.span.lo);
+ let end = Location::from_pos(self.parsed_source, node.src.span.hi);
self.import_ranges.0.push(DependencyRange {
range: narrow_range(get_range_from_location(&start, &end)),
specifier: node.src.value.to_string(),
@@ -1084,8 +1093,8 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
node: &swc_ast::TsImportType,
_parent: &dyn Node,
) {
- let start = self.parsed_module.get_location(node.arg.span.lo);
- let end = self.parsed_module.get_location(node.arg.span.hi);
+ let start = Location::from_pos(self.parsed_source, node.arg.span.lo);
+ let end = Location::from_pos(self.parsed_source, node.arg.span.hi);
self.import_ranges.0.push(DependencyRange {
range: narrow_range(get_range_from_location(&start, &end)),
specifier: node.arg.value.to_string(),
@@ -1096,11 +1105,11 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
/// Analyze a document for import ranges, which then can be used to identify if
/// a particular position within the document as inside an import range.
pub fn analyze_dependency_ranges(
- parsed_module: &ast::ParsedModule,
+ parsed_source: &deno_ast::ParsedSource,
) -> Result<DependencyRanges, AnyError> {
- let mut collector = DependencyRangeCollector::new(parsed_module);
- parsed_module
- .module
+ let mut collector = DependencyRangeCollector::new(parsed_source);
+ parsed_source
+ .module()
.visit_with(&swc_ast::Invalid { span: DUMMY_SP }, &mut collector);
Ok(collector.take())
}
@@ -1202,8 +1211,13 @@ mod tests {
fn test_get_lint_references() {
let specifier = resolve_url("file:///a.ts").expect("bad specifier");
let source = "const foo = 42;";
- let actual =
- get_lint_references(&specifier, &MediaType::TypeScript, source).unwrap();
+ let parsed_module = parse_module(
+ &specifier,
+ SourceTextInfo::from_string(source.to_string()),
+ MediaType::TypeScript,
+ )
+ .unwrap();
+ let actual = get_lint_references(&parsed_module).unwrap();
assert_eq!(
actual,
@@ -1246,11 +1260,15 @@ mod tests {
// @deno-types="https://deno.land/x/types/react/index.d.ts";
import React from "https://cdn.skypack.dev/react";
"#;
- let parsed_module =
- parse_module(&specifier, source, &MediaType::TypeScript).unwrap();
+ let parsed_module = parse_module(
+ &specifier,
+ SourceTextInfo::from_string(source.to_string()),
+ MediaType::TypeScript,
+ )
+ .unwrap();
let (actual, maybe_type) = analyze_dependencies(
&specifier,
- &MediaType::TypeScript,
+ MediaType::TypeScript,
&parsed_module,
&None,
);
@@ -1338,7 +1356,12 @@ mod tests {
let source =
"import * as a from \"./b.ts\";\nexport * as a from \"./c.ts\";\n";
let media_type = MediaType::TypeScript;
- let parsed_module = parse_module(&specifier, source, &media_type).unwrap();
+ let parsed_module = parse_module(
+ &specifier,
+ SourceTextInfo::from_string(source.to_string()),
+ media_type,
+ )
+ .unwrap();
let result = analyze_dependency_ranges(&parsed_module);
assert!(result.is_ok());
let actual = result.unwrap();
diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs
index 0570ac703..6755f50d5 100644
--- a/cli/lsp/code_lens.rs
+++ b/cli/lsp/code_lens.rs
@@ -1,11 +1,18 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
-use super::analysis;
+use super::config::Config;
+use super::config::WorkspaceSettings;
use super::language_server;
+use super::text::LineIndex;
use super::tsc;
-use crate::ast::ParsedModule;
+use super::tsc::NavigationTree;
-use deno_core::error::anyhow;
+use deno_ast::swc::ast;
+use deno_ast::swc::common::Span;
+use deno_ast::swc::visit::Node;
+use deno_ast::swc::visit::Visit;
+use deno_ast::swc::visit::VisitWith;
+use deno_ast::ParsedSource;
use deno_core::error::AnyError;
use deno_core::resolve_url;
use deno_core::serde::Deserialize;
@@ -18,11 +25,6 @@ use regex::Regex;
use std::cell::RefCell;
use std::collections::HashSet;
use std::rc::Rc;
-use swc_common::Span;
-use swc_ecmascript::ast;
-use swc_ecmascript::visit::Node;
-use swc_ecmascript::visit::Visit;
-use swc_ecmascript::visit::VisitWith;
lazy_static::lazy_static! {
static ref ABSTRACT_MODIFIER: Regex = Regex::new(r"\babstract\b").unwrap();
@@ -44,24 +46,24 @@ pub struct CodeLensData {
pub specifier: ModuleSpecifier,
}
-fn span_to_range(span: &Span, parsed_module: &ParsedModule) -> lsp::Range {
- let start = parsed_module.get_location(span.lo);
- let end = parsed_module.get_location(span.hi);
+fn span_to_range(span: &Span, parsed_source: &ParsedSource) -> lsp::Range {
+ let start = parsed_source.source().line_and_column_index(span.lo);
+ let end = parsed_source.source().line_and_column_index(span.hi);
lsp::Range {
start: lsp::Position {
- line: (start.line - 1) as u32,
- character: start.col as u32,
+ line: start.line_index as u32,
+ character: start.column_index as u32,
},
end: lsp::Position {
- line: (end.line - 1) as u32,
- character: end.col as u32,
+ line: end.line_index as u32,
+ character: end.column_index as u32,
},
}
}
struct DenoTestCollector<'a> {
code_lenses: Vec<lsp::CodeLens>,
- parsed_module: &'a ParsedModule,
+ parsed_source: &'a ParsedSource,
specifier: ModuleSpecifier,
test_vars: HashSet<String>,
}
@@ -69,18 +71,18 @@ struct DenoTestCollector<'a> {
impl<'a> DenoTestCollector<'a> {
pub fn new(
specifier: ModuleSpecifier,
- parsed_module: &'a ParsedModule,
+ parsed_source: &'a ParsedSource,
) -> Self {
Self {
code_lenses: Vec::new(),
- parsed_module,
+ parsed_source,
specifier,
test_vars: HashSet::new(),
}
}
fn add_code_lens<N: AsRef<str>>(&mut self, name: N, span: &Span) {
- let range = span_to_range(span, self.parsed_module);
+ let range = span_to_range(span, self.parsed_source);
self.code_lenses.push(lsp::CodeLens {
range,
command: Some(lsp::Command {
@@ -370,36 +372,37 @@ pub(crate) async fn resolve_code_lens(
pub(crate) async fn collect(
specifier: &ModuleSpecifier,
- language_server: &mut language_server::Inner,
+ parsed_source: Option<&ParsedSource>,
+ config: &Config,
+ line_index: &LineIndex,
+ navigation_tree: &NavigationTree,
) -> Result<Vec<lsp::CodeLens>, AnyError> {
- let mut code_lenses = collect_test(specifier, language_server)?;
- code_lenses.extend(collect_tsc(specifier, language_server).await?);
+ let mut code_lenses = collect_test(specifier, parsed_source, config)?;
+ code_lenses.extend(
+ collect_tsc(
+ specifier,
+ &config.get_workspace_settings(),
+ line_index,
+ navigation_tree,
+ )
+ .await?,
+ );
Ok(code_lenses)
}
fn collect_test(
specifier: &ModuleSpecifier,
- language_server: &mut language_server::Inner,
+ parsed_source: Option<&ParsedSource>,
+ config: &Config,
) -> Result<Vec<lsp::CodeLens>, AnyError> {
- if language_server.config.specifier_code_lens_test(specifier) {
- let source = language_server
- .get_text_content(specifier)
- .ok_or_else(|| anyhow!("Missing text content: {}", specifier))?;
- let media_type = language_server
- .get_media_type(specifier)
- .ok_or_else(|| anyhow!("Missing media type: {}", specifier))?;
- // we swallow parsed errors, as they are meaningless here.
- // TODO(@kitsonk) consider caching previous code_lens results to return if
- // there is a parse error to avoid issues of lenses popping in and out
- if let Ok(parsed_module) =
- analysis::parse_module(specifier, &source, &media_type)
- {
+ if config.specifier_code_lens_test(specifier) {
+ if let Some(parsed_source) = parsed_source {
let mut collector =
- DenoTestCollector::new(specifier.clone(), &parsed_module);
- parsed_module.module.visit_with(
+ DenoTestCollector::new(specifier.clone(), parsed_source);
+ parsed_source.module().visit_with(
&ast::Invalid {
- span: swc_common::DUMMY_SP,
+ span: deno_ast::swc::common::DUMMY_SP,
},
&mut collector,
);
@@ -412,13 +415,10 @@ fn collect_test(
/// Return tsc navigation tree code lenses.
async fn collect_tsc(
specifier: &ModuleSpecifier,
- language_server: &mut language_server::Inner,
+ workspace_settings: &WorkspaceSettings,
+ line_index: &LineIndex,
+ navigation_tree: &NavigationTree,
) -> Result<Vec<lsp::CodeLens>, AnyError> {
- let workspace_settings = language_server.config.get_workspace_settings();
- let line_index = language_server
- .get_line_index_sync(specifier)
- .ok_or_else(|| anyhow!("Missing line index."))?;
- let navigation_tree = language_server.get_navigation_tree(specifier).await?;
let code_lenses = Rc::new(RefCell::new(Vec::new()));
navigation_tree.walk(&|i, mp| {
let mut code_lenses = code_lenses.borrow_mut();
@@ -428,7 +428,7 @@ async fn collect_tsc(
let source = CodeLensSource::Implementations;
match i.kind {
tsc::ScriptElementKind::InterfaceElement => {
- code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
+ code_lenses.push(i.to_code_lens(line_index, specifier, &source));
}
tsc::ScriptElementKind::ClassElement
| tsc::ScriptElementKind::MemberFunctionElement
@@ -436,7 +436,7 @@ async fn collect_tsc(
| tsc::ScriptElementKind::MemberGetAccessorElement
| tsc::ScriptElementKind::MemberSetAccessorElement => {
if ABSTRACT_MODIFIER.is_match(&i.kind_modifiers) {
- code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
+ code_lenses.push(i.to_code_lens(line_index, specifier, &source));
}
}
_ => (),
@@ -448,31 +448,31 @@ async fn collect_tsc(
let source = CodeLensSource::References;
if let Some(parent) = &mp {
if parent.kind == tsc::ScriptElementKind::EnumElement {
- code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
+ code_lenses.push(i.to_code_lens(line_index, specifier, &source));
}
}
match i.kind {
tsc::ScriptElementKind::FunctionElement => {
if workspace_settings.code_lens.references_all_functions {
- code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
+ code_lenses.push(i.to_code_lens(line_index, specifier, &source));
}
}
tsc::ScriptElementKind::ConstElement
| tsc::ScriptElementKind::LetElement
| tsc::ScriptElementKind::VariableElement => {
if EXPORT_MODIFIER.is_match(&i.kind_modifiers) {
- code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
+ code_lenses.push(i.to_code_lens(line_index, specifier, &source));
}
}
tsc::ScriptElementKind::ClassElement => {
if i.text != "<class>" {
- code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
+ code_lenses.push(i.to_code_lens(line_index, specifier, &source));
}
}
tsc::ScriptElementKind::InterfaceElement
| tsc::ScriptElementKind::TypeElement
| tsc::ScriptElementKind::EnumElement => {
- code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
+ code_lenses.push(i.to_code_lens(line_index, specifier, &source));
}
tsc::ScriptElementKind::LocalFunctionElement
| tsc::ScriptElementKind::MemberGetAccessorElement
@@ -485,11 +485,8 @@ async fn collect_tsc(
tsc::ScriptElementKind::ClassElement
| tsc::ScriptElementKind::InterfaceElement
| tsc::ScriptElementKind::TypeElement => {
- code_lenses.push(i.to_code_lens(
- &line_index,
- specifier,
- &source,
- ));
+ code_lenses
+ .push(i.to_code_lens(line_index, specifier, &source));
}
_ => (),
}
@@ -505,8 +502,10 @@ async fn collect_tsc(
#[cfg(test)]
mod tests {
+ use deno_ast::MediaType;
+ use deno_ast::SourceTextInfo;
+
use super::*;
- use crate::media_type::MediaType;
#[test]
fn test_deno_test_collector() {
@@ -519,13 +518,16 @@ mod tests {
Deno.test("test b", function anotherTest() {});
"#;
- let parsed_module =
- analysis::parse_module(&specifier, source, &MediaType::TypeScript)
- .unwrap();
+ let parsed_module = crate::lsp::analysis::parse_module(
+ &specifier,
+ SourceTextInfo::from_string(source.to_string()),
+ MediaType::TypeScript,
+ )
+ .unwrap();
let mut collector = DenoTestCollector::new(specifier, &parsed_module);
- parsed_module.module.visit_with(
+ parsed_module.module().visit_with(
&ast::Invalid {
- span: swc_common::DUMMY_SP,
+ span: deno_ast::swc::common::DUMMY_SP,
},
&mut collector,
);
diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs
index f808f9607..6e7d71009 100644
--- a/cli/lsp/completions.rs
+++ b/cli/lsp/completions.rs
@@ -403,10 +403,11 @@ mod tests {
use crate::lsp::documents::DocumentCache;
use crate::lsp::documents::LanguageId;
use crate::lsp::sources::Sources;
- use crate::media_type::MediaType;
+ use deno_ast::MediaType;
use deno_core::resolve_url;
use std::collections::HashMap;
use std::path::Path;
+ use std::sync::Arc;
use tempfile::TempDir;
fn mock_state_snapshot(
@@ -418,17 +419,28 @@ mod tests {
for (specifier, source, version, language_id) in fixtures {
let specifier =
resolve_url(specifier).expect("failed to create specifier");
- documents.open(specifier.clone(), *version, language_id.clone(), source);
+ documents.open(
+ specifier.clone(),
+ *version,
+ *language_id,
+ Arc::new(source.to_string()),
+ );
let media_type = MediaType::from(&specifier);
- let parsed_module =
- analysis::parse_module(&specifier, source, &media_type).unwrap();
+ let parsed_module = documents
+ .get(&specifier)
+ .unwrap()
+ .source()
+ .module()
+ .map(|r| r.as_ref())
+ .unwrap()
+ .unwrap();
let (deps, _) = analysis::analyze_dependencies(
&specifier,
- &media_type,
- &parsed_module,
+ media_type,
+ parsed_module,
&None,
);
- let dep_ranges = analysis::analyze_dependency_ranges(&parsed_module).ok();
+ let dep_ranges = analysis::analyze_dependency_ranges(parsed_module).ok();
documents
.set_dependencies(&specifier, Some(deps), dep_ranges)
.unwrap();
diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs
index 11a4e8364..c106c9865 100644
--- a/cli/lsp/diagnostics.rs
+++ b/cli/lsp/diagnostics.rs
@@ -7,7 +7,6 @@ use super::sources::Sources;
use super::tsc;
use crate::diagnostics;
-use crate::media_type::MediaType;
use crate::tokio_util::create_basic_runtime;
use analysis::ResolvedDependency;
@@ -327,23 +326,29 @@ async fn generate_lint_diagnostics(
.lock()
.await
.get_version(specifier, &DiagnosticSource::DenoLint);
- let media_type = MediaType::from(specifier);
if version != current_version {
- if let Ok(Some(source_code)) = documents.content(specifier) {
- if let Ok(references) = analysis::get_lint_references(
- specifier,
- &media_type,
- &source_code,
- ) {
- let diagnostics =
- references.into_iter().map(|r| r.to_diagnostic()).collect();
- diagnostics_vec.push((specifier.clone(), version, diagnostics));
- } else {
- diagnostics_vec.push((specifier.clone(), version, Vec::new()));
+ let module = documents
+ .get(specifier)
+ .map(|d| d.source().module())
+ .flatten();
+ let diagnostics = match module {
+ Some(Ok(module)) => {
+ if let Ok(references) = analysis::get_lint_references(module) {
+ references
+ .into_iter()
+ .map(|r| r.to_diagnostic())
+ .collect::<Vec<_>>()
+ } else {
+ Vec::new()
+ }
}
- } else {
- error!("Missing file contents for: {}", specifier);
- }
+ Some(Err(_)) => Vec::new(),
+ None => {
+ error!("Missing file contents for: {}", specifier);
+ Vec::new()
+ }
+ };
+ diagnostics_vec.push((specifier.clone(), version, diagnostics));
}
}
}
diff --git a/cli/lsp/document_source.rs b/cli/lsp/document_source.rs
new file mode 100644
index 000000000..109f2c300
--- /dev/null
+++ b/cli/lsp/document_source.rs
@@ -0,0 +1,76 @@
+use deno_ast::swc::common::BytePos;
+use deno_ast::Diagnostic;
+use deno_ast::MediaType;
+use deno_ast::ParsedSource;
+use deno_ast::SourceTextInfo;
+use deno_core::ModuleSpecifier;
+use once_cell::sync::OnceCell;
+use std::sync::Arc;
+
+use super::analysis;
+use super::text::LineIndex;
+
+#[derive(Debug)]
+struct DocumentSourceInner {
+ specifier: ModuleSpecifier,
+ media_type: MediaType,
+ text_info: SourceTextInfo,
+ parsed_module: OnceCell<Result<ParsedSource, Diagnostic>>,
+ line_index: LineIndex,
+}
+
+/// Immutable information about a document.
+#[derive(Debug, Clone)]
+pub struct DocumentSource {
+ inner: Arc<DocumentSourceInner>,
+}
+
+impl DocumentSource {
+ pub fn new(
+ specifier: &ModuleSpecifier,
+ media_type: MediaType,
+ text: Arc<String>,
+ line_index: LineIndex,
+ ) -> Self {
+ Self {
+ inner: Arc::new(DocumentSourceInner {
+ specifier: specifier.clone(),
+ media_type,
+ text_info: SourceTextInfo::new(BytePos(0), text),
+ parsed_module: OnceCell::new(),
+ line_index,
+ }),
+ }
+ }
+
+ pub fn text_info(&self) -> &SourceTextInfo {
+ &self.inner.text_info
+ }
+
+ pub fn line_index(&self) -> &LineIndex {
+ &self.inner.line_index
+ }
+
+ pub fn module(&self) -> Option<&Result<ParsedSource, Diagnostic>> {
+ let is_parsable = matches!(
+ self.inner.media_type,
+ MediaType::JavaScript
+ | MediaType::Jsx
+ | MediaType::TypeScript
+ | MediaType::Tsx
+ | MediaType::Dts,
+ );
+ if is_parsable {
+ // lazily parse the module
+ Some(self.inner.parsed_module.get_or_init(|| {
+ analysis::parse_module(
+ &self.inner.specifier,
+ self.inner.text_info.clone(),
+ self.inner.media_type,
+ )
+ }))
+ } else {
+ None
+ }
+ }
+}
diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs
index 4f961715d..3855150e7 100644
--- a/cli/lsp/documents.rs
+++ b/cli/lsp/documents.rs
@@ -1,24 +1,24 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use super::analysis;
+use super::document_source::DocumentSource;
use super::text::LineIndex;
use super::tsc;
-use crate::media_type::MediaType;
-
+use deno_ast::MediaType;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
-use deno_core::error::Context;
use deno_core::ModuleSpecifier;
use lspower::lsp;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ops::Range;
use std::str::FromStr;
+use std::sync::Arc;
/// A representation of the language id sent from the LSP client, which is used
/// to determine how the document is handled within the language server.
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, Copy)]
pub enum LanguageId {
JavaScript,
Jsx,
@@ -81,11 +81,10 @@ impl IndexValid {
#[derive(Debug, Clone)]
pub struct DocumentData {
- bytes: Option<Vec<u8>>,
+ source: DocumentSource,
dependencies: Option<HashMap<String, analysis::Dependency>>,
dependency_ranges: Option<analysis::DependencyRanges>,
pub(crate) language_id: LanguageId,
- line_index: Option<LineIndex>,
maybe_navigation_tree: Option<tsc::NavigationTree>,
specifier: ModuleSpecifier,
version: Option<i32>,
@@ -96,14 +95,19 @@ impl DocumentData {
specifier: ModuleSpecifier,
version: i32,
language_id: LanguageId,
- source: &str,
+ source_text: Arc<String>,
) -> Self {
+ let line_index = LineIndex::new(&source_text);
Self {
- bytes: Some(source.as_bytes().to_owned()),
+ source: DocumentSource::new(
+ &specifier,
+ MediaType::from(&language_id),
+ source_text,
+ line_index,
+ ),
dependencies: None,
dependency_ranges: None,
language_id,
- line_index: Some(LineIndex::new(source)),
maybe_navigation_tree: None,
specifier,
version: Some(version),
@@ -114,59 +118,39 @@ impl DocumentData {
&mut self,
content_changes: Vec<lsp::TextDocumentContentChangeEvent>,
) -> Result<(), AnyError> {
- if self.bytes.is_none() {
- return Ok(());
- }
- let content = &mut String::from_utf8(self.bytes.clone().unwrap())
- .context("unable to parse bytes to string")?;
- let mut line_index = if let Some(line_index) = &self.line_index {
- line_index.clone()
- } else {
- LineIndex::new(content)
- };
+ let mut content = self.source.text_info().text_str().to_string();
+ let mut line_index = self.source.line_index().clone();
let mut index_valid = IndexValid::All;
for change in content_changes {
if let Some(range) = change.range {
if !index_valid.covers(range.start.line) {
- line_index = LineIndex::new(content);
+ line_index = LineIndex::new(&content);
}
index_valid = IndexValid::UpTo(range.start.line);
let range = line_index.get_text_range(range)?;
content.replace_range(Range::<usize>::from(range), &change.text);
} else {
- *content = change.text;
+ content = change.text;
index_valid = IndexValid::UpTo(0);
}
}
- self.bytes = Some(content.as_bytes().to_owned());
- self.line_index = if index_valid == IndexValid::All {
- Some(line_index)
+ let line_index = if index_valid == IndexValid::All {
+ line_index
} else {
- Some(LineIndex::new(content))
+ LineIndex::new(&content)
};
+ self.source = DocumentSource::new(
+ &self.specifier,
+ MediaType::from(&self.language_id),
+ Arc::new(content),
+ line_index,
+ );
self.maybe_navigation_tree = None;
Ok(())
}
- pub fn content(&self) -> Result<Option<String>, AnyError> {
- if let Some(bytes) = &self.bytes {
- Ok(Some(
- String::from_utf8(bytes.clone())
- .context("cannot decode bytes to string")?,
- ))
- } else {
- Ok(None)
- }
- }
-
- pub fn content_line(&self, line: usize) -> Result<Option<String>, AnyError> {
- let content = self.content().ok().flatten();
- if let Some(content) = content {
- let lines = content.lines().into_iter().collect::<Vec<&str>>();
- Ok(Some(lines[line].to_string()))
- } else {
- Ok(None)
- }
+ pub fn source(&self) -> &DocumentSource {
+ &self.source
}
/// Determines if a position within the document is within a dependency range
@@ -223,7 +207,7 @@ impl DocumentCache {
specifier: &ModuleSpecifier,
version: i32,
content_changes: Vec<lsp::TextDocumentContentChangeEvent>,
- ) -> Result<Option<String>, AnyError> {
+ ) -> Result<(), AnyError> {
if !self.contains_key(specifier) {
return Err(custom_error(
"NotFound",
@@ -237,7 +221,7 @@ impl DocumentCache {
let doc = self.docs.get_mut(specifier).unwrap();
doc.apply_content_changes(content_changes)?;
doc.version = Some(version);
- doc.content()
+ Ok(())
}
pub fn close(&mut self, specifier: &ModuleSpecifier) {
@@ -249,15 +233,15 @@ impl DocumentCache {
self.docs.contains_key(specifier)
}
- pub fn content(
- &self,
- specifier: &ModuleSpecifier,
- ) -> Result<Option<String>, AnyError> {
- if let Some(doc) = self.docs.get(specifier) {
- doc.content()
- } else {
- Ok(None)
- }
+ pub fn get(&self, specifier: &ModuleSpecifier) -> Option<&DocumentData> {
+ self.docs.get(specifier)
+ }
+
+ pub fn content(&self, specifier: &ModuleSpecifier) -> Option<Arc<String>> {
+ self
+ .docs
+ .get(specifier)
+ .map(|d| d.source().text_info().text())
}
// For a given specifier, get all open documents which directly or indirectly
@@ -282,13 +266,6 @@ impl DocumentCache {
.flatten()
}
- pub fn get_language_id(
- &self,
- specifier: &ModuleSpecifier,
- ) -> Option<LanguageId> {
- self.docs.get(specifier).map(|doc| doc.language_id.clone())
- }
-
pub fn get_navigation_tree(
&self,
specifier: &ModuleSpecifier,
@@ -349,8 +326,10 @@ impl DocumentCache {
}
pub fn line_index(&self, specifier: &ModuleSpecifier) -> Option<LineIndex> {
- let doc = self.docs.get(specifier)?;
- doc.line_index.clone()
+ self
+ .docs
+ .get(specifier)
+ .map(|d| d.source().line_index().clone())
}
pub fn open(
@@ -358,7 +337,7 @@ impl DocumentCache {
specifier: ModuleSpecifier,
version: i32,
language_id: LanguageId,
- source: &str,
+ source: Arc<String>,
) {
self.docs.insert(
specifier.clone(),
@@ -489,7 +468,7 @@ mod tests {
specifier.clone(),
1,
LanguageId::TypeScript,
- "console.log(\"Hello Deno\");\n",
+ Arc::new("console.log(\"Hello Deno\");\n".to_string()),
);
assert!(document_cache.contains_key(&specifier));
assert!(!document_cache.contains_key(&missing_specifier));
@@ -503,7 +482,7 @@ mod tests {
specifier.clone(),
1,
LanguageId::TypeScript,
- "console.log(\"Hello deno\");\n",
+ Arc::new("console.log(\"Hello deno\");\n".to_string()),
);
document_cache
.change(
@@ -527,8 +506,9 @@ mod tests {
.expect("failed to make changes");
let actual = document_cache
.content(&specifier)
- .expect("failed to get content");
- assert_eq!(actual, Some("console.log(\"Hello Deno\");\n".to_string()));
+ .expect("failed to get content")
+ .to_string();
+ assert_eq!(actual, "console.log(\"Hello Deno\");\n");
}
#[test]
@@ -539,7 +519,7 @@ mod tests {
specifier.clone(),
1,
LanguageId::TypeScript,
- "console.log(\"Hello πŸ¦•\");\n",
+ Arc::new("console.log(\"Hello πŸ¦•\");\n".to_string()),
);
document_cache
.change(
@@ -563,8 +543,9 @@ mod tests {
.expect("failed to make changes");
let actual = document_cache
.content(&specifier)
- .expect("failed to get content");
- assert_eq!(actual, Some("console.log(\"Hello Deno\");\n".to_string()));
+ .expect("failed to get content")
+ .to_string();
+ assert_eq!(actual, "console.log(\"Hello Deno\");\n");
}
#[test]
@@ -576,7 +557,7 @@ mod tests {
specifier.clone(),
1,
"typescript".parse().unwrap(),
- "console.log(\"hello world\");\n",
+ Arc::new("console.log(\"hello world\");\n".to_string()),
);
assert!(document_cache.is_diagnosable(&specifier));
let specifier = resolve_url("file:///a/file.rs").unwrap();
@@ -584,7 +565,7 @@ mod tests {
specifier.clone(),
1,
"rust".parse().unwrap(),
- "pub mod a;",
+ Arc::new("pub mod a;".to_string()),
);
assert!(!document_cache.is_diagnosable(&specifier));
let specifier =
diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs
index 8d13382a7..f2b03a0aa 100644
--- a/cli/lsp/language_server.rs
+++ b/cli/lsp/language_server.rs
@@ -58,9 +58,8 @@ use crate::deno_dir;
use crate::fs_util;
use crate::import_map::ImportMap;
use crate::logger;
-use crate::media_type::MediaType;
use crate::tools::fmt::format_file;
-use crate::tools::fmt::get_typescript_config;
+use crate::tools::fmt::format_parsed_module;
pub const REGISTRIES_PATH: &str = "registries";
const SOURCES_PATH: &str = "deps";
@@ -165,19 +164,17 @@ impl Inner {
/// Analyzes dependencies of a document that has been opened in the editor and
/// sets the dependencies property on the document.
- fn analyze_dependencies(
- &mut self,
- specifier: &ModuleSpecifier,
- media_type: &MediaType,
- source: &str,
- ) {
- if let Ok(parsed_module) =
- analysis::parse_module(specifier, source, media_type)
+ fn analyze_dependencies(&mut self, specifier: &ModuleSpecifier) {
+ if let Some(Ok(parsed_module)) = self
+ .documents
+ .get(specifier)
+ .map(|d| d.source().module())
+ .flatten()
{
let (mut deps, _) = analysis::analyze_dependencies(
specifier,
- media_type,
- &parsed_module,
+ parsed_module.media_type(),
+ parsed_module,
&self.maybe_import_map,
);
for (_, dep) in deps.iter_mut() {
@@ -188,7 +185,7 @@ impl Inner {
}
}
}
- let dep_ranges = analysis::analyze_dependency_ranges(&parsed_module).ok();
+ let dep_ranges = analysis::analyze_dependency_ranges(parsed_module).ok();
if let Err(err) =
self
.documents
@@ -202,18 +199,14 @@ impl Inner {
/// Analyzes all dependencies for all documents that have been opened in the
/// editor and sets the dependencies property on the documents.
fn analyze_dependencies_all(&mut self) {
- let docs: Vec<(ModuleSpecifier, String, MediaType)> = self
+ let specifiers = self
.documents
.docs
- .iter()
- .filter_map(|(s, doc)| {
- let source = doc.content().ok().flatten()?;
- let media_type = MediaType::from(&doc.language_id);
- Some((s.clone(), source, media_type))
- })
- .collect();
- for (specifier, source, media_type) in docs {
- self.analyze_dependencies(&specifier, &media_type, &source);
+ .keys()
+ .map(ToOwned::to_owned)
+ .collect::<Vec<_>>();
+ for specifier in specifiers {
+ self.analyze_dependencies(&specifier);
}
}
@@ -281,30 +274,19 @@ impl Inner {
pub(crate) fn get_text_content(
&self,
specifier: &ModuleSpecifier,
- ) -> Option<String> {
+ ) -> Option<Arc<String>> {
if specifier.scheme() == "asset" {
self
.assets
.get(specifier)
.map(|o| o.clone().map(|a| a.text))?
} else if self.documents.contains_key(specifier) {
- self.documents.content(specifier).unwrap()
+ self.documents.content(specifier)
} else {
self.sources.get_source(specifier)
}
}
- pub(crate) fn get_media_type(
- &self,
- specifier: &ModuleSpecifier,
- ) -> Option<MediaType> {
- if specifier.scheme() == "asset" || self.documents.contains_key(specifier) {
- Some(MediaType::from(specifier))
- } else {
- self.sources.get_media_type(specifier)
- }
- }
-
pub(crate) async fn get_navigation_tree(
&mut self,
specifier: &ModuleSpecifier,
@@ -789,20 +771,15 @@ impl Inner {
params.text_document.language_id, params.text_document.uri
);
}
- let media_type = MediaType::from(&language_id);
self.documents.open(
specifier.clone(),
params.text_document.version,
language_id,
- &params.text_document.text,
+ Arc::new(params.text_document.text),
);
if self.documents.is_diagnosable(&specifier) {
- self.analyze_dependencies(
- &specifier,
- &media_type,
- &params.text_document.text,
- );
+ self.analyze_dependencies(&specifier);
self
.diagnostics_server
.invalidate(self.documents.dependents(&specifier))
@@ -822,12 +799,9 @@ impl Inner {
params.text_document.version,
params.content_changes,
) {
- Ok(Some(source)) => {
+ Ok(()) => {
if self.documents.is_diagnosable(&specifier) {
- let media_type = MediaType::from(
- &self.documents.get_language_id(&specifier).unwrap(),
- );
- self.analyze_dependencies(&specifier, &media_type, &source);
+ self.analyze_dependencies(&specifier);
self
.diagnostics_server
.invalidate(self.documents.dependents(&specifier))
@@ -837,7 +811,6 @@ impl Inner {
}
}
}
- Ok(_) => error!("No content returned from change."),
Err(err) => error!("{}", err),
}
self.performance.measure(mark);
@@ -1021,16 +994,11 @@ impl Inner {
return Ok(None);
}
let mark = self.performance.mark("formatting", Some(&params));
- let file_text = self
- .documents
- .content(&specifier)
- .map_err(|_| {
- LspError::invalid_params(
- "The specified file could not be found in memory.",
- )
- })?
- .unwrap();
- let line_index = self.documents.line_index(&specifier);
+ let document_data = self.documents.get(&specifier).ok_or_else(|| {
+ LspError::invalid_params(
+ "The specified file could not be found in memory.",
+ )
+ })?;
let file_path =
if let Ok(file_path) = params.text_document.uri.to_file_path() {
file_path
@@ -1038,14 +1006,28 @@ impl Inner {
PathBuf::from(params.text_document.uri.path())
};
- // TODO(lucacasonato): handle error properly
+ let source = document_data.source().clone();
let text_edits = tokio::task::spawn_blocking(move || {
- let config = get_typescript_config();
- match format_file(&file_path, &file_text, config) {
+ let format_result = match source.module() {
+ Some(Ok(parsed_module)) => Ok(format_parsed_module(parsed_module)),
+ Some(Err(err)) => Err(err.to_string()),
+ None => {
+ // it's not a js/ts file, so attempt to format its contents
+ format_file(&file_path, source.text_info().text_str())
+ }
+ };
+
+ match format_result {
Ok(new_text) => {
- Some(text::get_edits(&file_text, &new_text, line_index))
+ let line_index = source.line_index();
+ Some(text::get_edits(
+ source.text_info().text_str(),
+ &new_text,
+ line_index,
+ ))
}
Err(err) => {
+ // TODO(lucacasonato): handle error properly
warn!("Format error: {}", err);
None
}
@@ -1257,7 +1239,7 @@ impl Inner {
Some("deno-lint") => code_actions
.add_deno_lint_ignore_action(
&specifier,
- self.documents.docs.get(&specifier),
+ self.documents.get(&specifier),
diagnostic,
)
.map_err(|err| {
@@ -1436,11 +1418,37 @@ impl Inner {
}
let mark = self.performance.mark("code_lens", Some(&params));
- let code_lenses =
- code_lens::collect(&specifier, self).await.map_err(|err| {
+ let navigation_tree =
+ self.get_navigation_tree(&specifier).await.map_err(|err| {
error!("Error getting code lenses for \"{}\": {}", specifier, err);
LspError::internal_error()
})?;
+ let parsed_module = self
+ .documents
+ .get(&specifier)
+ .map(|d| d.source().module())
+ .flatten()
+ .map(|m| m.as_ref().ok())
+ .flatten();
+ let line_index = self.get_line_index_sync(&specifier).ok_or_else(|| {
+ error!(
+ "Error getting code lenses for \"{}\": Missing line index",
+ specifier
+ );
+ LspError::internal_error()
+ })?;
+ let code_lenses = code_lens::collect(
+ &specifier,
+ parsed_module,
+ &self.config,
+ &line_index,
+ &navigation_tree,
+ )
+ .await
+ .map_err(|err| {
+ error!("Error getting code lenses for \"{}\": {}", specifier, err);
+ LspError::internal_error()
+ })?;
self.performance.measure(mark);
Ok(Some(code_lenses))
@@ -2606,11 +2614,7 @@ impl Inner {
// now that we have dependencies loaded, we need to re-analyze them and
// invalidate some diagnostics
if self.documents.contains_key(&referrer) {
- if let Some(source) = self.documents.content(&referrer).unwrap() {
- let media_type =
- MediaType::from(&self.documents.get_language_id(&referrer).unwrap());
- self.analyze_dependencies(&referrer, &media_type, &source);
- }
+ self.analyze_dependencies(&referrer);
self.diagnostics_server.invalidate(vec![referrer]).await;
}
@@ -2728,7 +2732,7 @@ impl Inner {
.await
.map_err(|_| LspError::internal_error())?
{
- Some(asset.text)
+ Some(asset.text.to_string())
} else {
error!("Missing asset: {}", specifier);
None
@@ -2736,7 +2740,7 @@ impl Inner {
}
_ => {
if let Some(source) = self.sources.get_source(&specifier) {
- Some(source)
+ Some(source.to_string())
} else {
error!("The cached source was not found: {}", specifier);
None
diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs
index 0404d64e0..fda2ac82b 100644
--- a/cli/lsp/mod.rs
+++ b/cli/lsp/mod.rs
@@ -10,6 +10,7 @@ mod code_lens;
mod completions;
mod config;
mod diagnostics;
+mod document_source;
mod documents;
pub(crate) mod language_server;
mod lsp_custom;
diff --git a/cli/lsp/sources.rs b/cli/lsp/sources.rs
index a3f0ae750..6207bb3eb 100644
--- a/cli/lsp/sources.rs
+++ b/cli/lsp/sources.rs
@@ -1,6 +1,7 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use super::analysis;
+use super::document_source::DocumentSource;
use super::text::LineIndex;
use super::tsc;
use super::urls::INVALID_SPECIFIER;
@@ -13,12 +14,12 @@ use crate::flags::Flags;
use crate::http_cache;
use crate::http_cache::HttpCache;
use crate::import_map::ImportMap;
-use crate::media_type::MediaType;
use crate::module_graph::GraphBuilder;
use crate::program_state::ProgramState;
use crate::specifier_handler::FetchHandler;
use crate::text_encoding;
+use deno_ast::MediaType;
use deno_core::error::anyhow;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
@@ -118,12 +119,11 @@ fn resolve_specifier(
struct Metadata {
dependencies: Option<HashMap<String, analysis::Dependency>>,
length_utf16: usize,
- line_index: LineIndex,
maybe_navigation_tree: Option<tsc::NavigationTree>,
maybe_types: Option<analysis::ResolvedDependency>,
maybe_warning: Option<String>,
media_type: MediaType,
- source: String,
+ source: DocumentSource,
specifier: ModuleSpecifier,
version: String,
}
@@ -133,12 +133,16 @@ impl Default for Metadata {
Self {
dependencies: None,
length_utf16: 0,
- line_index: LineIndex::default(),
maybe_navigation_tree: None,
maybe_types: None,
maybe_warning: None,
media_type: MediaType::default(),
- source: String::default(),
+ source: DocumentSource::new(
+ &INVALID_SPECIFIER,
+ MediaType::default(),
+ Arc::new(String::default()),
+ LineIndex::default(),
+ ),
specifier: INVALID_SPECIFIER.clone(),
version: String::default(),
}
@@ -148,55 +152,58 @@ impl Default for Metadata {
impl Metadata {
fn new(
specifier: &ModuleSpecifier,
- source: &str,
+ source: Arc<String>,
version: &str,
- media_type: &MediaType,
+ media_type: MediaType,
maybe_warning: Option<String>,
maybe_import_map: &Option<ImportMap>,
) -> Self {
- let (dependencies, maybe_types) = if let Ok(parsed_module) =
- analysis::parse_module(specifier, source, media_type)
- {
- let (deps, maybe_types) = analysis::analyze_dependencies(
- specifier,
- media_type,
- &parsed_module,
- maybe_import_map,
- );
- (Some(deps), maybe_types)
- } else {
- (None, None)
- };
- let line_index = LineIndex::new(source);
+ let line_index = LineIndex::new(&source);
+ let document_source =
+ DocumentSource::new(specifier, media_type, source, line_index);
+ let (dependencies, maybe_types) =
+ if let Some(Ok(parsed_module)) = document_source.module() {
+ let (deps, maybe_types) = analysis::analyze_dependencies(
+ specifier,
+ media_type,
+ parsed_module,
+ maybe_import_map,
+ );
+ (Some(deps), maybe_types)
+ } else {
+ (None, None)
+ };
Self {
dependencies,
- length_utf16: source.encode_utf16().count(),
- line_index,
+ length_utf16: document_source
+ .text_info()
+ .text_str()
+ .encode_utf16()
+ .count(),
maybe_navigation_tree: None,
maybe_types,
maybe_warning,
media_type: media_type.to_owned(),
- source: source.to_string(),
+ source: document_source,
specifier: specifier.clone(),
version: version.to_string(),
}
}
fn refresh(&mut self, maybe_import_map: &Option<ImportMap>) {
- let (dependencies, maybe_types) = if let Ok(parsed_module) =
- analysis::parse_module(&self.specifier, &self.source, &self.media_type)
- {
- let (deps, maybe_types) = analysis::analyze_dependencies(
- &self.specifier,
- &self.media_type,
- &parsed_module,
- maybe_import_map,
- );
- (Some(deps), maybe_types)
- } else {
- (None, None)
- };
+ let (dependencies, maybe_types) =
+ if let Some(Ok(parsed_module)) = self.source.module() {
+ let (deps, maybe_types) = analysis::analyze_dependencies(
+ &self.specifier,
+ self.media_type,
+ parsed_module,
+ maybe_import_map,
+ );
+ (Some(deps), maybe_types)
+ } else {
+ (None, None)
+ };
self.dependencies = dependencies;
self.maybe_types = maybe_types;
}
@@ -265,7 +272,7 @@ impl Sources {
self.0.lock().get_script_version(specifier)
}
- pub fn get_source(&self, specifier: &ModuleSpecifier) -> Option<String> {
+ pub fn get_source(&self, specifier: &ModuleSpecifier) -> Option<Arc<String>> {
self.0.lock().get_source(specifier)
}
@@ -344,7 +351,7 @@ impl Inner {
let specifier =
resolve_specifier(specifier, &mut self.redirects, &self.http_cache)?;
let metadata = self.get_metadata(&specifier)?;
- Some(metadata.line_index)
+ Some(metadata.source.line_index().clone())
}
fn get_maybe_types(
@@ -406,9 +413,9 @@ impl Inner {
};
let mut metadata = Metadata::new(
specifier,
- &source,
+ Arc::new(source),
&version,
- &media_type,
+ media_type,
maybe_warning,
&self.maybe_import_map,
);
@@ -455,11 +462,11 @@ impl Inner {
Some(metadata.version)
}
- fn get_source(&mut self, specifier: &ModuleSpecifier) -> Option<String> {
+ fn get_source(&mut self, specifier: &ModuleSpecifier) -> Option<Arc<String>> {
let specifier =
resolve_specifier(specifier, &mut self.redirects, &self.http_cache)?;
let metadata = self.get_metadata(&specifier)?;
- Some(metadata.source)
+ Some(metadata.source.text_info().text())
}
fn resolution_result(
@@ -602,7 +609,7 @@ mod tests {
resolve_path(&tests.join("001_hello.js").to_string_lossy()).unwrap();
let actual = sources.get_source(&specifier);
assert!(actual.is_some());
- let actual = actual.unwrap();
+ let actual = actual.unwrap().to_string();
assert_eq!(actual, "console.log(\"Hello World\");\n");
}
diff --git a/cli/lsp/text.rs b/cli/lsp/text.rs
index fedeabd06..0b9ae79be 100644
--- a/cli/lsp/text.rs
+++ b/cli/lsp/text.rs
@@ -210,21 +210,12 @@ impl LineIndex {
/// Compare two strings and return a vector of text edit records which are
/// supported by the Language Server Protocol.
-pub fn get_edits(
- a: &str,
- b: &str,
- maybe_line_index: Option<LineIndex>,
-) -> Vec<TextEdit> {
+pub fn get_edits(a: &str, b: &str, line_index: &LineIndex) -> Vec<TextEdit> {
if a == b {
return vec![];
}
let chunks = diff(a, b);
let mut text_edits = Vec::<TextEdit>::new();
- let line_index = if let Some(line_index) = maybe_line_index {
- line_index
- } else {
- LineIndex::new(a)
- };
let mut iter = chunks.iter().peekable();
let mut a_pos = TextSize::from(0);
loop {
@@ -575,7 +566,7 @@ const C: char = \"パ パ\";
fn test_get_edits() {
let a = "abcdefg";
let b = "a\nb\nchije\nfg\n";
- let actual = get_edits(a, b, None);
+ let actual = get_edits(a, b, &LineIndex::new(a));
assert_eq!(
actual,
vec![
@@ -613,7 +604,7 @@ const C: char = \"パ パ\";
fn test_get_edits_mbc() {
let a = "const bar = \"πŸ‘πŸ‡ΊπŸ‡ΈπŸ˜ƒ\";\nconsole.log('hello deno')\n";
let b = "const bar = \"πŸ‘πŸ‡ΊπŸ‡ΈπŸ˜ƒ\";\nconsole.log(\"hello deno\");\n";
- let actual = get_edits(a, b, None);
+ let actual = get_edits(a, b, &LineIndex::new(a));
assert_eq!(
actual,
vec![
diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs
index 0e5e3a995..61a6c9796 100644
--- a/cli/lsp/tsc.rs
+++ b/cli/lsp/tsc.rs
@@ -18,11 +18,11 @@ use super::text::LineIndex;
use super::urls::INVALID_SPECIFIER;
use crate::config_file::TsConfig;
-use crate::media_type::MediaType;
use crate::tokio_util::create_basic_runtime;
use crate::tsc;
use crate::tsc::ResolveArgs;
+use deno_ast::MediaType;
use deno_core::error::anyhow;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
@@ -45,6 +45,7 @@ use lspower::lsp;
use regex::Captures;
use regex::Regex;
use std::collections::HashSet;
+use std::sync::Arc;
use std::thread;
use std::{borrow::Cow, cmp};
use std::{collections::HashMap, path::Path};
@@ -111,7 +112,7 @@ impl TsServer {
/// from static assets built into Rust, or static assets built into tsc.
#[derive(Debug, Clone)]
pub struct AssetDocument {
- pub text: String,
+ pub text: Arc<String>,
pub length: usize,
pub line_index: LineIndex,
pub maybe_navigation_tree: Option<NavigationTree>,
@@ -121,7 +122,7 @@ impl AssetDocument {
pub fn new<T: AsRef<str>>(text: T) -> Self {
let text = text.as_ref();
Self {
- text: text.to_string(),
+ text: Arc::new(text.to_string()),
length: text.encode_utf16().count(),
line_index: LineIndex::new(text),
maybe_navigation_tree: None,
@@ -2057,7 +2058,7 @@ fn cache_snapshot(
state
.state_snapshot
.documents
- .content(specifier)?
+ .content(specifier)
.ok_or_else(|| {
anyhow!("Specifier unexpectedly doesn't have content: {}", specifier)
})?
@@ -2068,7 +2069,7 @@ fn cache_snapshot(
};
state
.snapshots
- .insert((specifier.clone(), version.into()), content);
+ .insert((specifier.clone(), version.into()), content.to_string());
}
Ok(())
}
@@ -2235,17 +2236,16 @@ fn op_get_text(
let specifier = state.normalize_specifier(args.specifier)?;
let content =
if let Some(Some(content)) = state.state_snapshot.assets.get(&specifier) {
- content.text.clone()
+ content.text.as_str()
} else {
cache_snapshot(state, &specifier, args.version.clone())?;
state
.snapshots
.get(&(specifier, args.version.into()))
.unwrap()
- .clone()
};
state.state_snapshot.performance.measure(mark);
- Ok(text::slice(&content, args.start..args.end).to_string())
+ Ok(text::slice(content, args.start..args.end).to_string())
}
fn op_load(
@@ -2259,7 +2259,7 @@ fn op_load(
let specifier = state.normalize_specifier(args.specifier)?;
let result = state.state_snapshot.sources.get_source(&specifier);
state.state_snapshot.performance.measure(mark);
- Ok(result)
+ Ok(result.map(|t| t.to_string()))
}
fn op_resolve(
@@ -2908,19 +2908,24 @@ mod tests {
for (specifier, source, version, language_id) in fixtures {
let specifier =
resolve_url(specifier).expect("failed to create specifier");
- documents.open(specifier.clone(), *version, language_id.clone(), source);
+ documents.open(
+ specifier.clone(),
+ *version,
+ *language_id,
+ Arc::new(source.to_string()),
+ );
let media_type = MediaType::from(&specifier);
- if let Ok(parsed_module) =
- analysis::parse_module(&specifier, source, &media_type)
+ if let Some(Ok(parsed_module)) =
+ documents.get(&specifier).unwrap().source().module()
{
let (deps, _) = analysis::analyze_dependencies(
&specifier,
- &media_type,
- &parsed_module,
+ media_type,
+ parsed_module,
&None,
);
let dep_ranges =
- analysis::analyze_dependency_ranges(&parsed_module).ok();
+ analysis::analyze_dependency_ranges(parsed_module).ok();
documents
.set_dependencies(&specifier, Some(deps), dep_ranges)
.unwrap();
diff --git a/cli/lsp/urls.rs b/cli/lsp/urls.rs
index 0987b51c4..8b47911f6 100644
--- a/cli/lsp/urls.rs
+++ b/cli/lsp/urls.rs
@@ -1,9 +1,9 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use crate::file_fetcher::map_content_type;
-use crate::media_type::MediaType;
use data_url::DataUrl;
+use deno_ast::MediaType;
use deno_core::error::uri_error;
use deno_core::error::AnyError;
use deno_core::url::Position;