summaryrefslogtreecommitdiff
path: root/cli
diff options
context:
space:
mode:
Diffstat (limited to 'cli')
-rw-r--r--cli/file_fetcher.rs81
-rw-r--r--cli/lsp/registries.rs6
-rw-r--r--cli/main.rs4
-rw-r--r--cli/program_state.rs10
-rw-r--r--cli/specifier_handler.rs4
-rw-r--r--cli/standalone.rs6
-rw-r--r--cli/tests/blob_gc_finalization.js11
-rw-r--r--cli/tests/blob_gc_finalization.js.out1
-rw-r--r--cli/tests/integration/run_tests.rs6
-rw-r--r--cli/tests/unit/fetch_test.ts4
-rw-r--r--cli/tests/unit/response_test.ts3
11 files changed, 82 insertions, 54 deletions
diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs
index bd16cb1e1..a326d130e 100644
--- a/cli/file_fetcher.rs
+++ b/cli/file_fetcher.rs
@@ -19,7 +19,7 @@ use deno_core::futures;
use deno_core::futures::future::FutureExt;
use deno_core::ModuleSpecifier;
use deno_runtime::deno_fetch::reqwest;
-use deno_runtime::deno_web::BlobUrlStore;
+use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::Permissions;
use log::debug;
use log::info;
@@ -212,7 +212,7 @@ pub struct FileFetcher {
cache_setting: CacheSetting,
http_cache: HttpCache,
http_client: reqwest::Client,
- blob_url_store: BlobUrlStore,
+ blob_store: BlobStore,
}
impl FileFetcher {
@@ -221,7 +221,7 @@ impl FileFetcher {
cache_setting: CacheSetting,
allow_remote: bool,
ca_data: Option<Vec<u8>>,
- blob_url_store: BlobUrlStore,
+ blob_store: BlobStore,
) -> Result<Self, AnyError> {
Ok(Self {
auth_tokens: AuthTokens::new(env::var(DENO_AUTH_TOKENS).ok()),
@@ -230,7 +230,7 @@ impl FileFetcher {
cache_setting,
http_cache,
http_client: create_http_client(get_user_agent(), ca_data)?,
- blob_url_store,
+ blob_store,
})
}
@@ -360,7 +360,7 @@ impl FileFetcher {
}
/// Get a blob URL.
- fn fetch_blob_url(
+ async fn fetch_blob_url(
&self,
specifier: &ModuleSpecifier,
) -> Result<File, AnyError> {
@@ -381,20 +381,24 @@ impl FileFetcher {
));
}
- let blob_url_storage = self.blob_url_store.borrow();
- let blob = blob_url_storage.get(specifier.clone())?.ok_or_else(|| {
- custom_error(
- "NotFound",
- format!("Blob URL not found: \"{}\".", specifier),
- )
- })?;
+ let blob = {
+ let blob_store = self.blob_store.borrow();
+ blob_store
+ .get_object_url(specifier.clone())?
+ .ok_or_else(|| {
+ custom_error(
+ "NotFound",
+ format!("Blob URL not found: \"{}\".", specifier),
+ )
+ })?
+ };
- let content_type = blob.media_type;
+ let content_type = blob.media_type.clone();
+ let bytes = blob.read_all().await?;
let (media_type, maybe_charset) =
map_content_type(specifier, Some(content_type.clone()));
- let source =
- strip_shebang(get_source_from_bytes(blob.data, maybe_charset)?);
+ let source = strip_shebang(get_source_from_bytes(bytes, maybe_charset)?);
let local =
self
@@ -525,7 +529,7 @@ impl FileFetcher {
}
result
} else if scheme == "blob" {
- let result = self.fetch_blob_url(specifier);
+ let result = self.fetch_blob_url(specifier).await;
if let Ok(file) = &result {
self.cache.insert(specifier.clone(), file.clone());
}
@@ -580,6 +584,7 @@ mod tests {
use deno_core::resolve_url;
use deno_core::resolve_url_or_path;
use deno_runtime::deno_web::Blob;
+ use deno_runtime::deno_web::InMemoryBlobPart;
use std::rc::Rc;
use tempfile::TempDir;
@@ -588,28 +593,28 @@ mod tests {
maybe_temp_dir: Option<Rc<TempDir>>,
) -> (FileFetcher, Rc<TempDir>) {
let (file_fetcher, temp_dir, _) =
- setup_with_blob_url_store(cache_setting, maybe_temp_dir);
+ setup_with_blob_store(cache_setting, maybe_temp_dir);
(file_fetcher, temp_dir)
}
- fn setup_with_blob_url_store(
+ fn setup_with_blob_store(
cache_setting: CacheSetting,
maybe_temp_dir: Option<Rc<TempDir>>,
- ) -> (FileFetcher, Rc<TempDir>, BlobUrlStore) {
+ ) -> (FileFetcher, Rc<TempDir>, BlobStore) {
let temp_dir = maybe_temp_dir.unwrap_or_else(|| {
Rc::new(TempDir::new().expect("failed to create temp directory"))
});
let location = temp_dir.path().join("deps");
- let blob_url_store = BlobUrlStore::default();
+ let blob_store = BlobStore::default();
let file_fetcher = FileFetcher::new(
HttpCache::new(&location),
cache_setting,
true,
None,
- blob_url_store.clone(),
+ blob_store.clone(),
)
.expect("setup failed");
- (file_fetcher, temp_dir, blob_url_store)
+ (file_fetcher, temp_dir, blob_store)
}
macro_rules! file_url {
@@ -948,16 +953,18 @@ mod tests {
#[tokio::test]
async fn test_fetch_blob_url() {
- let (file_fetcher, _, blob_url_store) =
- setup_with_blob_url_store(CacheSetting::Use, None);
+ let (file_fetcher, _, blob_store) =
+ setup_with_blob_store(CacheSetting::Use, None);
+
+ let bytes =
+ "export const a = \"a\";\n\nexport enum A {\n A,\n B,\n C,\n}\n"
+ .as_bytes()
+ .to_vec();
- let specifier = blob_url_store.insert(
+ let specifier = blob_store.insert_object_url(
Blob {
- data:
- "export const a = \"a\";\n\nexport enum A {\n A,\n B,\n C,\n}\n"
- .as_bytes()
- .to_vec(),
media_type: "application/typescript".to_string(),
+ parts: vec![Arc::new(Box::new(InMemoryBlobPart::from(bytes)))],
},
None,
);
@@ -1049,7 +1056,7 @@ mod tests {
CacheSetting::ReloadAll,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("setup failed");
let result = file_fetcher
@@ -1076,7 +1083,7 @@ mod tests {
CacheSetting::Use,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("could not create file fetcher");
let specifier =
@@ -1104,7 +1111,7 @@ mod tests {
CacheSetting::Use,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("could not create file fetcher");
let result = file_fetcher_02
@@ -1265,7 +1272,7 @@ mod tests {
CacheSetting::Use,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("could not create file fetcher");
let specifier =
@@ -1296,7 +1303,7 @@ mod tests {
CacheSetting::Use,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("could not create file fetcher");
let result = file_fetcher_02
@@ -1406,7 +1413,7 @@ mod tests {
CacheSetting::Use,
false,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("could not create file fetcher");
let specifier =
@@ -1433,7 +1440,7 @@ mod tests {
CacheSetting::Only,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("could not create file fetcher");
let file_fetcher_02 = FileFetcher::new(
@@ -1441,7 +1448,7 @@ mod tests {
CacheSetting::Use,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("could not create file fetcher");
let specifier =
diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs
index 83b322eab..96c24b43f 100644
--- a/cli/lsp/registries.rs
+++ b/cli/lsp/registries.rs
@@ -26,7 +26,7 @@ use deno_core::serde_json::json;
use deno_core::url::Position;
use deno_core::url::Url;
use deno_core::ModuleSpecifier;
-use deno_runtime::deno_web::BlobUrlStore;
+use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::Permissions;
use log::error;
use lspower::lsp;
@@ -264,7 +264,7 @@ impl Default for ModuleRegistry {
cache_setting,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.unwrap();
@@ -283,7 +283,7 @@ impl ModuleRegistry {
CacheSetting::Use,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.context("Error creating file fetcher in module registry.")
.unwrap();
diff --git a/cli/main.rs b/cli/main.rs
index f1cf67ac4..381ba7214 100644
--- a/cli/main.rs
+++ b/cli/main.rs
@@ -121,7 +121,7 @@ fn create_web_worker_callback(
ts_version: version::TYPESCRIPT.to_string(),
no_color: !colors::use_color(),
get_error_class_fn: Some(&crate::errors::get_error_class_name),
- blob_url_store: program_state.blob_url_store.clone(),
+ blob_store: program_state.blob_store.clone(),
broadcast_channel: program_state.broadcast_channel.clone(),
};
@@ -207,7 +207,7 @@ pub fn create_main_worker(
.join("location_data")
.join(checksum::gen(&[loc.to_string().as_bytes()]))
}),
- blob_url_store: program_state.blob_url_store.clone(),
+ blob_store: program_state.blob_store.clone(),
broadcast_channel: program_state.broadcast_channel.clone(),
};
diff --git a/cli/program_state.rs b/cli/program_state.rs
index 3d4d67f53..becc8faa3 100644
--- a/cli/program_state.rs
+++ b/cli/program_state.rs
@@ -16,7 +16,7 @@ use crate::source_maps::SourceMapGetter;
use crate::specifier_handler::FetchHandler;
use crate::version;
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
-use deno_runtime::deno_web::BlobUrlStore;
+use deno_runtime::deno_web::BlobStore;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::Permissions;
@@ -53,7 +53,7 @@ pub struct ProgramState {
pub maybe_import_map: Option<ImportMap>,
pub maybe_inspector_server: Option<Arc<InspectorServer>>,
pub ca_data: Option<Vec<u8>>,
- pub blob_url_store: BlobUrlStore,
+ pub blob_store: BlobStore,
pub broadcast_channel: InMemoryBroadcastChannel,
}
@@ -79,7 +79,7 @@ impl ProgramState {
CacheSetting::Use
};
- let blob_url_store = BlobUrlStore::default();
+ let blob_store = BlobStore::default();
let broadcast_channel = InMemoryBroadcastChannel::default();
let file_fetcher = FileFetcher::new(
@@ -87,7 +87,7 @@ impl ProgramState {
cache_usage,
!flags.no_remote,
ca_data.clone(),
- blob_url_store.clone(),
+ blob_store.clone(),
)?;
let lockfile = if let Some(filename) = &flags.lock {
@@ -146,7 +146,7 @@ impl ProgramState {
maybe_import_map,
maybe_inspector_server,
ca_data,
- blob_url_store,
+ blob_store,
broadcast_channel,
};
Ok(Arc::new(program_state))
diff --git a/cli/specifier_handler.rs b/cli/specifier_handler.rs
index f1cda0b69..78687ba95 100644
--- a/cli/specifier_handler.rs
+++ b/cli/specifier_handler.rs
@@ -574,7 +574,7 @@ pub mod tests {
use crate::file_fetcher::CacheSetting;
use crate::http_cache::HttpCache;
use deno_core::resolve_url_or_path;
- use deno_runtime::deno_web::BlobUrlStore;
+ use deno_runtime::deno_web::BlobStore;
use tempfile::TempDir;
macro_rules! map (
@@ -599,7 +599,7 @@ pub mod tests {
CacheSetting::Use,
true,
None,
- BlobUrlStore::default(),
+ BlobStore::default(),
)
.expect("could not setup");
let disk_cache = deno_dir.gen_cache;
diff --git a/cli/standalone.rs b/cli/standalone.rs
index fa8c5a7ee..74e5de1ca 100644
--- a/cli/standalone.rs
+++ b/cli/standalone.rs
@@ -24,7 +24,7 @@ use deno_core::ModuleLoader;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
-use deno_runtime::deno_web::BlobUrlStore;
+use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::Permissions;
use deno_runtime::permissions::PermissionsOptions;
use deno_runtime::worker::MainWorker;
@@ -213,7 +213,7 @@ pub async fn run(
let main_module = resolve_url(SPECIFIER)?;
let program_state = ProgramState::build(flags).await?;
let permissions = Permissions::from_options(&metadata.permissions);
- let blob_url_store = BlobUrlStore::default();
+ let blob_store = BlobStore::default();
let broadcast_channel = InMemoryBroadcastChannel::default();
let module_loader = Rc::new(EmbeddedModuleLoader(source_code));
let create_web_worker_cb = Arc::new(|_| {
@@ -246,7 +246,7 @@ pub async fn run(
get_error_class_fn: Some(&get_error_class_name),
location: metadata.location,
origin_storage_dir: None,
- blob_url_store,
+ blob_store,
broadcast_channel,
};
let mut worker =
diff --git a/cli/tests/blob_gc_finalization.js b/cli/tests/blob_gc_finalization.js
new file mode 100644
index 000000000..34c878513
--- /dev/null
+++ b/cli/tests/blob_gc_finalization.js
@@ -0,0 +1,11 @@
+// This test creates 1024 blobs of 128 MB each. This will only work if the blobs
+// and their backing data is GCed as expected.
+for (let i = 0; i < 1024; i++) {
+ // Create a 128MB byte array, and then a blob from it.
+ const buf = new Uint8Array(128 * 1024 * 1024);
+ new Blob([buf]);
+ // It is very important that there is a yield here, otherwise the finalizer
+ // for the blob is not called and the memory is not freed.
+ await new Promise((resolve) => setTimeout(resolve, 0));
+}
+console.log("GCed all blobs");
diff --git a/cli/tests/blob_gc_finalization.js.out b/cli/tests/blob_gc_finalization.js.out
new file mode 100644
index 000000000..dcc4500f8
--- /dev/null
+++ b/cli/tests/blob_gc_finalization.js.out
@@ -0,0 +1 @@
+GCed all blobs
diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs
index 025ee07e1..374cd473b 100644
--- a/cli/tests/integration/run_tests.rs
+++ b/cli/tests/integration/run_tests.rs
@@ -366,6 +366,12 @@ itest!(js_import_detect {
exit_code: 0,
});
+itest!(blob_gc_finalization {
+ args: "run blob_gc_finalization.js",
+ output: "blob_gc_finalization.js.out",
+ exit_code: 0,
+});
+
itest!(lock_write_requires_lock {
args: "run --lock-write some_file.ts",
output: "lock_write_requires_lock.out",
diff --git a/cli/tests/unit/fetch_test.ts b/cli/tests/unit/fetch_test.ts
index a84e18367..24a820dba 100644
--- a/cli/tests/unit/fetch_test.ts
+++ b/cli/tests/unit/fetch_test.ts
@@ -821,7 +821,9 @@ unitTest(function responseRedirect(): void {
unitTest(async function responseWithoutBody(): Promise<void> {
const response = new Response();
assertEquals(await response.arrayBuffer(), new ArrayBuffer(0));
- assertEquals(await response.blob(), new Blob([]));
+ const blob = await response.blob();
+ assertEquals(blob.size, 0);
+ assertEquals(await blob.arrayBuffer(), new ArrayBuffer(0));
assertEquals(await response.text(), "");
await assertThrowsAsync(async () => {
await response.json();
diff --git a/cli/tests/unit/response_test.ts b/cli/tests/unit/response_test.ts
index 9993eb925..7e444fd83 100644
--- a/cli/tests/unit/response_test.ts
+++ b/cli/tests/unit/response_test.ts
@@ -34,7 +34,8 @@ unitTest(async function responseBlob() {
assert(blobPromise instanceof Promise);
const blob = await blobPromise;
assert(blob instanceof Blob);
- assertEquals(blob, new Blob([new Uint8Array([1, 2, 3])]));
+ assertEquals(blob.size, 3);
+ assertEquals(await blob.arrayBuffer(), new Uint8Array([1, 2, 3]).buffer);
});
// TODO(lucacasonato): re-enable test once #10002 is fixed.