summaryrefslogtreecommitdiff
path: root/ext
diff options
context:
space:
mode:
Diffstat (limited to 'ext')
-rw-r--r--ext/web/14_compression.js123
-rw-r--r--ext/web/Cargo.toml1
-rw-r--r--ext/web/compression.rs104
-rw-r--r--ext/web/lib.deno_web.d.ts14
-rw-r--r--ext/web/lib.rs14
5 files changed, 256 insertions, 0 deletions
diff --git a/ext/web/14_compression.js b/ext/web/14_compression.js
new file mode 100644
index 000000000..1a0f77e66
--- /dev/null
+++ b/ext/web/14_compression.js
@@ -0,0 +1,123 @@
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="../../core/lib.deno_core.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./lib.deno_web.d.ts" />
+
+"use strict";
+
+((window) => {
+ const core = window.Deno.core;
+ const webidl = window.__bootstrap.webidl;
+ const { TransformStream } = window.__bootstrap.streams;
+
+ webidl.converters.CompressionFormat = webidl.createEnumConverter(
+ "CompressionFormat",
+ [
+ "deflate",
+ "gzip",
+ ],
+ );
+
+ class CompressionStream {
+ #transform;
+
+ constructor(format) {
+ const prefix = "Failed to construct 'CompressionStream'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ format = webidl.converters.CompressionFormat(format, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ const rid = core.opSync("op_compression_new", format, false);
+
+ this.#transform = new TransformStream({
+ transform(chunk, controller) {
+ // TODO(lucacasonato): convert chunk to BufferSource
+ const output = core.opSync(
+ "op_compression_write",
+ rid,
+ chunk,
+ );
+ maybeEnqueue(controller, output);
+ },
+ flush(controller) {
+ const output = core.opSync("op_compression_finish", rid);
+ maybeEnqueue(controller, output);
+ },
+ });
+
+ this[webidl.brand] = webidl.brand;
+ }
+
+ get readable() {
+ webidl.assertBranded(this, CompressionStream);
+ return this.#transform.readable;
+ }
+
+ get writable() {
+ webidl.assertBranded(this, CompressionStream);
+ return this.#transform.writable;
+ }
+ }
+
+ webidl.configurePrototype(CompressionStream);
+
+ class DecompressionStream {
+ #transform;
+
+ constructor(format) {
+ const prefix = "Failed to construct 'DecompressionStream'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ format = webidl.converters.CompressionFormat(format, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ const rid = core.opSync("op_compression_new", format, true);
+
+ this.#transform = new TransformStream({
+ transform(chunk, controller) {
+ // TODO(lucacasonato): convert chunk to BufferSource
+ const output = core.opSync(
+ "op_compression_write",
+ rid,
+ chunk,
+ );
+ maybeEnqueue(controller, output);
+ },
+ flush(controller) {
+ const output = core.opSync("op_compression_finish", rid);
+ maybeEnqueue(controller, output);
+ },
+ });
+
+ this[webidl.brand] = webidl.brand;
+ }
+
+ get readable() {
+ webidl.assertBranded(this, DecompressionStream);
+ return this.#transform.readable;
+ }
+
+ get writable() {
+ webidl.assertBranded(this, DecompressionStream);
+ return this.#transform.writable;
+ }
+ }
+
+ function maybeEnqueue(controller, output) {
+ if (output && output.byteLength > 0) {
+ controller.enqueue(output);
+ }
+ }
+
+ webidl.configurePrototype(DecompressionStream);
+
+ window.__bootstrap.compression = {
+ CompressionStream,
+ DecompressionStream,
+ };
+})(globalThis);
diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml
index 31982d590..14523e206 100644
--- a/ext/web/Cargo.toml
+++ b/ext/web/Cargo.toml
@@ -18,6 +18,7 @@ async-trait = "0.1.51"
base64 = "0.13.0"
deno_core = { version = "0.116.0", path = "../../core" }
encoding_rs = "0.8.29"
+flate2 = "1"
serde = "1.0.129"
tokio = { version = "1.10.1", features = ["full"] }
uuid = { version = "0.8.2", features = ["v4", "serde"] }
diff --git a/ext/web/compression.rs b/ext/web/compression.rs
new file mode 100644
index 000000000..c84db7550
--- /dev/null
+++ b/ext/web/compression.rs
@@ -0,0 +1,104 @@
+// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+
+use deno_core::error::AnyError;
+use deno_core::OpState;
+use deno_core::Resource;
+use deno_core::ResourceId;
+use deno_core::ZeroCopyBuf;
+use flate2::write::GzDecoder;
+use flate2::write::GzEncoder;
+use flate2::write::ZlibDecoder;
+use flate2::write::ZlibEncoder;
+use flate2::Compression;
+use std::borrow::Cow;
+use std::cell::RefCell;
+use std::io::Write;
+use std::rc::Rc;
+
+#[derive(Debug)]
+struct CompressionResource(RefCell<Inner>);
+
+#[derive(Debug)]
+enum Inner {
+ DeflateDecoder(ZlibDecoder<Vec<u8>>),
+ DeflateEncoder(ZlibEncoder<Vec<u8>>),
+ GzDecoder(GzDecoder<Vec<u8>>),
+ GzEncoder(GzEncoder<Vec<u8>>),
+}
+
+impl Resource for CompressionResource {
+ fn name(&self) -> Cow<str> {
+ "compression".into()
+ }
+}
+
+pub fn op_compression_new(
+ state: &mut OpState,
+ format: String,
+ is_decoder: bool,
+) -> Result<ResourceId, AnyError> {
+ let w = Vec::new();
+ let inner = match (format.as_str(), is_decoder) {
+ ("deflate", true) => Inner::DeflateDecoder(ZlibDecoder::new(w)),
+ ("deflate", false) => {
+ Inner::DeflateEncoder(ZlibEncoder::new(w, Compression::default()))
+ }
+ ("gzip", true) => Inner::GzDecoder(GzDecoder::new(w)),
+ ("gzip", false) => {
+ Inner::GzEncoder(GzEncoder::new(w, Compression::default()))
+ }
+ _ => unreachable!(),
+ };
+ let resource = CompressionResource(RefCell::new(inner));
+ Ok(state.resource_table.add(resource))
+}
+
+pub fn op_compression_write(
+ state: &mut OpState,
+ rid: ResourceId,
+ input: ZeroCopyBuf,
+) -> Result<ZeroCopyBuf, AnyError> {
+ let resource = state.resource_table.get::<CompressionResource>(rid)?;
+ let mut inner = resource.0.borrow_mut();
+ let out: Vec<u8> = match &mut *inner {
+ Inner::DeflateDecoder(d) => {
+ d.write_all(&input)?;
+ d.flush()?;
+ d.get_mut().drain(..)
+ }
+ Inner::DeflateEncoder(d) => {
+ d.write_all(&input)?;
+ d.flush()?;
+ d.get_mut().drain(..)
+ }
+ Inner::GzDecoder(d) => {
+ d.write_all(&input)?;
+ d.flush()?;
+ d.get_mut().drain(..)
+ }
+ Inner::GzEncoder(d) => {
+ d.write_all(&input)?;
+ d.flush()?;
+ d.get_mut().drain(..)
+ }
+ }
+ .collect();
+ Ok(out.into())
+}
+
+pub fn op_compression_finish(
+ state: &mut OpState,
+ rid: ResourceId,
+ _: (),
+) -> Result<ZeroCopyBuf, AnyError> {
+ let resource = state.resource_table.take::<CompressionResource>(rid)?;
+ let resource = Rc::try_unwrap(resource).unwrap();
+ let inner = resource.0.into_inner();
+ let out: Vec<u8> = match inner {
+ Inner::DeflateDecoder(d) => d.finish()?,
+ Inner::DeflateEncoder(d) => d.finish()?,
+ Inner::GzDecoder(d) => d.finish()?,
+ Inner::GzEncoder(d) => d.finish()?,
+ };
+ Ok(out.into())
+}
diff --git a/ext/web/lib.deno_web.d.ts b/ext/web/lib.deno_web.d.ts
index ce20f08b1..1233b842f 100644
--- a/ext/web/lib.deno_web.d.ts
+++ b/ext/web/lib.deno_web.d.ts
@@ -809,3 +809,17 @@ declare function structuredClone(
value: any,
options?: StructuredSerializeOptions,
): any;
+
+declare class CompressionStream {
+ constructor(format: string);
+
+ readonly readable: ReadableStream<Uint8Array>;
+ readonly writable: WritableStream<Uint8Array>;
+}
+
+declare class DecompressionStream {
+ constructor(format: string);
+
+ readonly readable: ReadableStream<Uint8Array>;
+ readonly writable: WritableStream<Uint8Array>;
+}
diff --git a/ext/web/lib.rs b/ext/web/lib.rs
index 281e55e06..b32deeb97 100644
--- a/ext/web/lib.rs
+++ b/ext/web/lib.rs
@@ -1,6 +1,7 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
mod blob;
+mod compression;
mod message_port;
use deno_core::error::range_error;
@@ -66,6 +67,7 @@ pub fn init(blob_store: BlobStore, maybe_location: Option<Url>) -> Extension {
"11_blob_url.js",
"12_location.js",
"13_message_port.js",
+ "14_compression.js",
))
.ops(vec![
("op_base64_decode", op_sync(op_base64_decode)),
@@ -102,6 +104,18 @@ pub fn init(blob_store: BlobStore, maybe_location: Option<Url>) -> Extension {
"op_message_port_recv_message",
op_async(op_message_port_recv_message),
),
+ (
+ "op_compression_new",
+ op_sync(compression::op_compression_new),
+ ),
+ (
+ "op_compression_write",
+ op_sync(compression::op_compression_write),
+ ),
+ (
+ "op_compression_finish",
+ op_sync(compression::op_compression_finish),
+ ),
])
.state(move |state| {
state.put(blob_store.clone());