summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNayeem Rahman <nayeemrmn99@gmail.com>2023-03-26 10:24:10 +0100
committerGitHub <noreply@github.com>2023-03-26 12:24:10 +0300
commitb4c61c146a50dea0c4a53d8d505a4308ea7da279 (patch)
tree230268db099dee2f709b6a0a5dee506a04e24096
parent8b596cbae1392f17c9642c085b006bed6692dcdb (diff)
fix(cli): don't store blob and data urls in the module cache (#18261)
-rw-r--r--cli/file_fetcher.rs82
-rw-r--r--cli/proc_state.rs1
-rw-r--r--cli/tests/integration/watcher_tests.rs40
-rw-r--r--cli/tools/doc.rs2
-rw-r--r--cli/tools/run.rs2
-rw-r--r--cli/tools/test.rs1
-rw-r--r--ext/web/blob.rs5
7 files changed, 49 insertions, 84 deletions
diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs
index 75c2c608f..677fa393b 100644
--- a/cli/file_fetcher.rs
+++ b/cli/file_fetcher.rs
@@ -50,10 +50,6 @@ pub const SUPPORTED_SCHEMES: [&str; 5] =
/// A structure representing a source file.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct File {
- /// The path to the local version of the source file. For local files this
- /// will be the direct path to that file. For remote files, it will be the
- /// path to the file in the HTTP cache.
- pub local: PathBuf,
/// For remote files, if there was an `X-TypeScript-Type` header, the parsed
/// out value of that header.
pub maybe_types: Option<String>,
@@ -90,13 +86,12 @@ fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
let local = specifier.to_file_path().map_err(|_| {
uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
})?;
- let bytes = fs::read(&local)?;
+ let bytes = fs::read(local)?;
let charset = text_encoding::detect_charset(&bytes).to_string();
let source = get_source_from_bytes(bytes, Some(charset))?;
let media_type = MediaType::from_specifier(specifier);
Ok(File {
- local,
maybe_types: None,
media_type,
source: source.into(),
@@ -218,13 +213,6 @@ impl FileFetcher {
bytes: Vec<u8>,
headers: &HashMap<String, String>,
) -> Result<File, AnyError> {
- let local =
- self
- .http_cache
- .get_cache_filename(specifier)
- .ok_or_else(|| {
- generic_error("Cannot convert specifier to cached filename.")
- })?;
let maybe_content_type = headers.get("content-type");
let (media_type, maybe_charset) =
map_content_type(specifier, maybe_content_type);
@@ -238,7 +226,6 @@ impl FileFetcher {
};
Ok(File {
- local,
maybe_types,
media_type,
source: source.into(),
@@ -290,39 +277,12 @@ impl FileFetcher {
specifier: &ModuleSpecifier,
) -> Result<File, AnyError> {
debug!("FileFetcher::fetch_data_url() - specifier: {}", specifier);
- match self.fetch_cached(specifier, 0) {
- Ok(Some(file)) => return Ok(file),
- Ok(None) => {}
- Err(err) => return Err(err),
- }
-
- if self.cache_setting == CacheSetting::Only {
- return Err(custom_error(
- "NotCached",
- format!(
- "Specifier not found in cache: \"{specifier}\", --cached-only is specified."
- ),
- ));
- }
-
let (source, content_type) = get_source_from_data_url(specifier)?;
let (media_type, _) = map_content_type(specifier, Some(&content_type));
-
- let local =
- self
- .http_cache
- .get_cache_filename(specifier)
- .ok_or_else(|| {
- generic_error("Cannot convert specifier to cached filename.")
- })?;
let mut headers = HashMap::new();
headers.insert("content-type".to_string(), content_type);
- self
- .http_cache
- .set(specifier, headers.clone(), source.as_bytes())?;
Ok(File {
- local,
maybe_types: None,
media_type,
source: source.into(),
@@ -337,21 +297,6 @@ impl FileFetcher {
specifier: &ModuleSpecifier,
) -> Result<File, AnyError> {
debug!("FileFetcher::fetch_blob_url() - specifier: {}", specifier);
- match self.fetch_cached(specifier, 0) {
- Ok(Some(file)) => return Ok(file),
- Ok(None) => {}
- Err(err) => return Err(err),
- }
-
- if self.cache_setting == CacheSetting::Only {
- return Err(custom_error(
- "NotCached",
- format!(
- "Specifier not found in cache: \"{specifier}\", --cached-only is specified."
- ),
- ));
- }
-
let blob = {
let blob_store = self.blob_store.borrow();
blob_store
@@ -370,22 +315,10 @@ impl FileFetcher {
let (media_type, maybe_charset) =
map_content_type(specifier, Some(&content_type));
let source = get_source_from_bytes(bytes, maybe_charset)?;
-
- let local =
- self
- .http_cache
- .get_cache_filename(specifier)
- .ok_or_else(|| {
- generic_error("Cannot convert specifier to cached filename.")
- })?;
let mut headers = HashMap::new();
headers.insert("content-type".to_string(), content_type);
- self
- .http_cache
- .set(specifier, headers.clone(), source.as_bytes())?;
Ok(File {
- local,
maybe_types: None,
media_type,
source: source.into(),
@@ -562,17 +495,9 @@ impl FileFetcher {
// disk changing effecting things like workers and dynamic imports.
fetch_local(specifier)
} else if scheme == "data" {
- let result = self.fetch_data_url(specifier);
- if let Ok(file) = &result {
- self.cache.insert(specifier.clone(), file.clone());
- }
- result
+ self.fetch_data_url(specifier)
} else if scheme == "blob" {
- let result = self.fetch_blob_url(specifier).await;
- if let Ok(file) = &result {
- self.cache.insert(specifier.clone(), file.clone());
- }
- result
+ self.fetch_blob_url(specifier).await
} else if !self.allow_remote {
Err(custom_error(
"NoRemote",
@@ -1037,7 +962,6 @@ mod tests {
ModuleSpecifier::from_file_path(local.as_os_str().to_str().unwrap())
.unwrap();
let file = File {
- local,
maybe_types: None,
media_type: MediaType::TypeScript,
source: "some source code".into(),
diff --git a/cli/proc_state.rs b/cli/proc_state.rs
index 45f3bed5f..529b66070 100644
--- a/cli/proc_state.rs
+++ b/cli/proc_state.rs
@@ -136,6 +136,7 @@ impl ProcState {
/// Reset all runtime state to its default. This should be used on file
/// watcher restarts.
pub fn reset_for_file_watcher(&mut self) {
+ self.blob_store.clear();
self.0 = Arc::new(Inner {
dir: self.dir.clone(),
options: self.options.clone(),
diff --git a/cli/tests/integration/watcher_tests.rs b/cli/tests/integration/watcher_tests.rs
index e50ac04e7..44907b912 100644
--- a/cli/tests/integration/watcher_tests.rs
+++ b/cli/tests/integration/watcher_tests.rs
@@ -1121,6 +1121,46 @@ fn test_watch_unload_handler_error_on_drop() {
check_alive_then_kill(child);
}
+#[test]
+fn run_watch_blob_urls_reset() {
+ let _g = util::http_server();
+ let t = TempDir::new();
+ let file_to_watch = t.path().join("file_to_watch.js");
+ let file_content = r#"
+ const prevUrl = localStorage.getItem("url");
+ if (prevUrl == null) {
+ console.log("first run, storing blob url");
+ const url = URL.createObjectURL(
+ new Blob(["export {}"], { type: "application/javascript" }),
+ );
+ await import(url); // this shouldn't insert into the fs module cache
+ localStorage.setItem("url", url);
+ } else {
+ await import(prevUrl)
+ .then(() => console.log("importing old blob url incorrectly works"))
+ .catch(() => console.log("importing old blob url correctly failed"));
+ }
+ "#;
+ write(&file_to_watch, file_content).unwrap();
+ let mut child = util::deno_cmd()
+ .current_dir(util::testdata_path())
+ .arg("run")
+ .arg("--watch")
+ .arg(&file_to_watch)
+ .env("NO_COLOR", "1")
+ .stdout(std::process::Stdio::piped())
+ .stderr(std::process::Stdio::piped())
+ .spawn()
+ .unwrap();
+ let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
+ wait_contains("first run, storing blob url", &mut stdout_lines);
+ wait_contains("finished", &mut stderr_lines);
+ write(&file_to_watch, file_content).unwrap();
+ wait_contains("importing old blob url correctly failed", &mut stdout_lines);
+ wait_contains("finished", &mut stderr_lines);
+ check_alive_then_kill(child);
+}
+
// Regression test for https://github.com/denoland/deno/issues/15465.
#[test]
fn run_watch_reload_once() {
diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs
index 38553e9ff..28641677b 100644
--- a/cli/tools/doc.rs
+++ b/cli/tools/doc.rs
@@ -17,7 +17,6 @@ use deno_core::resolve_path;
use deno_core::resolve_url_or_path;
use deno_doc as doc;
use deno_graph::ModuleSpecifier;
-use std::path::PathBuf;
pub async fn print_docs(
flags: Flags,
@@ -69,7 +68,6 @@ pub async fn print_docs(
let root_specifier =
resolve_path("./$deno$doc.ts", ps.options.initial_cwd()).unwrap();
let root = File {
- local: PathBuf::from("./$deno$doc.ts"),
maybe_types: None,
media_type: MediaType::TypeScript,
source: format!("export * from \"{module_specifier}\";").into(),
diff --git a/cli/tools/run.rs b/cli/tools/run.rs
index 007e0fb2a..141be0810 100644
--- a/cli/tools/run.rs
+++ b/cli/tools/run.rs
@@ -72,7 +72,6 @@ pub async fn run_from_stdin(flags: Flags) -> Result<i32, AnyError> {
std::io::stdin().read_to_end(&mut source)?;
// Create a dummy source file.
let source_file = File {
- local: main_module.clone().to_file_path().unwrap(),
maybe_types: None,
media_type: MediaType::TypeScript,
source: String::from_utf8(source)?.into(),
@@ -144,7 +143,6 @@ pub async fn eval_command(
.into_bytes();
let file = File {
- local: main_module.clone().to_file_path().unwrap(),
maybe_types: None,
media_type: MediaType::Unknown,
source: String::from_utf8(source_code)?.into(),
diff --git a/cli/tools/test.rs b/cli/tools/test.rs
index 28364050e..02d91e2a4 100644
--- a/cli/tools/test.rs
+++ b/cli/tools/test.rs
@@ -1000,7 +1000,6 @@ fn extract_files_from_regex_blocks(
.unwrap_or(file_specifier);
Some(File {
- local: file_specifier.to_file_path().unwrap(),
maybe_types: None,
media_type: file_media_type,
source: file_source.into(),
diff --git a/ext/web/blob.rs b/ext/web/blob.rs
index 7796c18af..faa394729 100644
--- a/ext/web/blob.rs
+++ b/ext/web/blob.rs
@@ -79,6 +79,11 @@ impl BlobStore {
let mut blob_store = self.object_urls.lock();
blob_store.remove(url);
}
+
+ pub fn clear(&self) {
+ self.parts.lock().clear();
+ self.object_urls.lock().clear();
+ }
}
#[derive(Debug)]