summaryrefslogtreecommitdiff
path: root/cli/lsp
diff options
context:
space:
mode:
Diffstat (limited to 'cli/lsp')
-rw-r--r--cli/lsp/cache.rs14
-rw-r--r--cli/lsp/documents.rs25
-rw-r--r--cli/lsp/jsr_resolver.rs19
-rw-r--r--cli/lsp/registries.rs1
4 files changed, 40 insertions, 19 deletions
diff --git a/cli/lsp/cache.rs b/cli/lsp/cache.rs
index eec6433a2..e0034207d 100644
--- a/cli/lsp/cache.rs
+++ b/cli/lsp/cache.rs
@@ -11,6 +11,16 @@ use std::path::Path;
use std::sync::Arc;
use std::time::SystemTime;
+/// In the LSP, we disallow the cache from automatically copying from
+/// the global cache to the local cache for technical reasons.
+///
+/// 1. We need to verify the checksums from the lockfile are correct when
+/// moving from the global to the local cache.
+/// 2. We need to verify the checksums for JSR https specifiers match what
+/// is found in the package's manifest.
+pub const LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY: deno_cache_dir::GlobalToLocalCopy =
+ deno_cache_dir::GlobalToLocalCopy::Disallow;
+
pub fn calculate_fs_version(
cache: &Arc<dyn HttpCache>,
specifier: &ModuleSpecifier,
@@ -123,8 +133,8 @@ impl CacheMetadata {
return None;
}
let cache_key = self.cache.cache_item_key(specifier).ok()?;
- let specifier_metadata = self.cache.read_metadata(&cache_key).ok()??;
- let values = Arc::new(parse_metadata(&specifier_metadata.headers));
+ let headers = self.cache.read_headers(&cache_key).ok()??;
+ let values = Arc::new(parse_metadata(&headers));
let version = calculate_fs_version_in_cache(&self.cache, specifier);
let mut metadata_map = self.metadata.lock();
let metadata = Metadata { values, version };
diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs
index c58a392d5..125307757 100644
--- a/cli/lsp/documents.rs
+++ b/cli/lsp/documents.rs
@@ -2,6 +2,7 @@
use super::cache::calculate_fs_version;
use super::cache::calculate_fs_version_at_path;
+use super::cache::LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY;
use super::jsr_resolver::JsrResolver;
use super::language_server::StateNpmSnapshot;
use super::text::LineIndex;
@@ -736,12 +737,7 @@ impl RedirectResolver {
) -> Option<ModuleSpecifier> {
if redirect_limit > 0 {
let cache_key = self.cache.cache_item_key(specifier).ok()?;
- let headers = self
- .cache
- .read_metadata(&cache_key)
- .ok()
- .flatten()
- .map(|m| m.headers)?;
+ let headers = self.cache.read_headers(&cache_key).ok().flatten()?;
if let Some(location) = headers.get("location") {
let redirect =
deno_core::resolve_import(location, specifier.as_str()).ok()?;
@@ -822,12 +818,14 @@ impl FileSystemDocuments {
} else {
let fs_version = calculate_fs_version(cache, specifier)?;
let cache_key = cache.cache_item_key(specifier).ok()?;
- let bytes = cache.read_file_bytes(&cache_key).ok()??;
- let specifier_metadata = cache.read_metadata(&cache_key).ok()??;
+ let bytes = cache
+ .read_file_bytes(&cache_key, None, LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY)
+ .ok()??;
+ let specifier_headers = cache.read_headers(&cache_key).ok()??;
let (_, maybe_charset) =
deno_graph::source::resolve_media_type_and_charset_from_headers(
specifier,
- Some(&specifier_metadata.headers),
+ Some(&specifier_headers),
);
let content = deno_graph::source::decode_owned_source(
specifier,
@@ -835,7 +833,7 @@ impl FileSystemDocuments {
maybe_charset,
)
.ok()?;
- let maybe_headers = Some(specifier_metadata.headers);
+ let maybe_headers = Some(specifier_headers);
Document::new(
specifier.clone(),
fs_version,
@@ -1826,8 +1824,7 @@ impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> {
fn load(
&mut self,
specifier: &ModuleSpecifier,
- is_dynamic: bool,
- cache_setting: deno_graph::source::CacheSetting,
+ options: deno_graph::source::LoadOptions,
) -> deno_graph::source::LoadFuture {
let specifier = if self.unstable_sloppy_imports {
self
@@ -1839,9 +1836,7 @@ impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> {
match self.load_from_docs(&specifier) {
Some(fut) => fut,
- None => self
- .inner_loader
- .load(&specifier, is_dynamic, cache_setting),
+ None => self.inner_loader.load(&specifier, options),
}
}
diff --git a/cli/lsp/jsr_resolver.rs b/cli/lsp/jsr_resolver.rs
index 8243bb0f2..be7bdc0f5 100644
--- a/cli/lsp/jsr_resolver.rs
+++ b/cli/lsp/jsr_resolver.rs
@@ -15,6 +15,8 @@ use deno_semver::package::PackageReq;
use std::borrow::Cow;
use std::sync::Arc;
+use super::cache::LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY;
+
#[derive(Debug)]
pub struct JsrResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
@@ -111,7 +113,13 @@ fn read_cached_package_info(
) -> Option<JsrPackageInfo> {
let meta_url = jsr_url().join(&format!("{}/meta.json", name)).ok()?;
let meta_cache_item_key = cache.cache_item_key(&meta_url).ok()?;
- let meta_bytes = cache.read_file_bytes(&meta_cache_item_key).ok()??;
+ let meta_bytes = cache
+ .read_file_bytes(
+ &meta_cache_item_key,
+ None,
+ LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY,
+ )
+ .ok()??;
serde_json::from_slice::<JsrPackageInfo>(&meta_bytes).ok()
}
@@ -123,12 +131,19 @@ fn read_cached_package_version_info(
.join(&format!("{}/{}_meta.json", &nv.name, &nv.version))
.ok()?;
let meta_cache_item_key = cache.cache_item_key(&meta_url).ok()?;
- let meta_bytes = cache.read_file_bytes(&meta_cache_item_key).ok()??;
+ let meta_bytes = cache
+ .read_file_bytes(
+ &meta_cache_item_key,
+ None,
+ LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY,
+ )
+ .ok()??;
// This is a roundabout way of deserializing `JsrPackageVersionInfo`,
// because we only want the `exports` field and `module_graph` is large.
let mut info =
serde_json::from_slice::<serde_json::Value>(&meta_bytes).ok()?;
Some(JsrPackageVersionInfo {
+ manifest: Default::default(), // not used by the LSP (only caching checks this in deno_graph)
exports: info.as_object_mut()?.remove("exports")?,
module_graph: None,
})
diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs
index f4a64c7ee..2b0cae7d2 100644
--- a/cli/lsp/registries.rs
+++ b/cli/lsp/registries.rs
@@ -515,6 +515,7 @@ impl ModuleRegistry {
permissions: PermissionsContainer::allow_all(),
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
maybe_cache_setting: None,
+ maybe_checksum: None,
})
.await;
// if there is an error fetching, we will cache an empty file, so that