From a0285e2eb88f6254f6494b0ecd1878db3a3b2a58 Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Wed, 11 Aug 2021 12:27:05 +0200 Subject: Rename extensions/ directory to ext/ (#11643) --- ext/broadcast_channel/01_broadcast_channel.js | 186 + ext/broadcast_channel/Cargo.toml | 20 + ext/broadcast_channel/README.md | 5 + .../in_memory_broadcast_channel.rs | 97 + .../lib.deno_broadcast_channel.d.ts | 55 + ext/broadcast_channel/lib.rs | 139 + ext/console/01_colors.js | 99 + ext/console/02_console.js | 2066 ++++++++ ext/console/Cargo.toml | 17 + ext/console/README.md | 5 + ext/console/internal.d.ts | 16 + ext/console/lib.deno_console.d.ts | 28 + ext/console/lib.rs | 19 + ext/crypto/00_crypto.js | 1013 ++++ ext/crypto/01_webidl.js | 188 + ext/crypto/Cargo.toml | 28 + ext/crypto/README.md | 5 + ext/crypto/key.rs | 117 + ext/crypto/lib.deno_crypto.d.ts | 155 + ext/crypto/lib.rs | 558 +++ ext/fetch/01_fetch_util.js | 22 + ext/fetch/20_headers.js | 479 ++ ext/fetch/21_formdata.js | 507 ++ ext/fetch/22_body.js | 403 ++ ext/fetch/22_http_client.js | 40 + ext/fetch/23_request.js | 484 ++ ext/fetch/23_response.js | 451 ++ ext/fetch/26_fetch.js | 542 +++ ext/fetch/Cargo.toml | 28 + ext/fetch/README.md | 5 + ext/fetch/internal.d.ts | 108 + ext/fetch/lib.deno_fetch.d.ts | 437 ++ ext/fetch/lib.rs | 567 +++ ext/ffi/00_ffi.js | 30 + ext/ffi/Cargo.toml | 20 + ext/ffi/README.md | 3 + ext/ffi/lib.rs | 397 ++ ext/http/01_http.js | 383 ++ ext/http/Cargo.toml | 25 + ext/http/README.md | 4 + ext/http/lib.deno_http.unstable.d.ts | 53 + ext/http/lib.rs | 684 +++ ext/net/01_net.js | 240 + ext/net/02_tls.js | 89 + ext/net/04_net_unstable.js | 49 + ext/net/Cargo.toml | 25 + ext/net/README.md | 30 + ext/net/io.rs | 232 + ext/net/lib.deno_net.d.ts | 150 + ext/net/lib.deno_net.unstable.d.ts | 258 + ext/net/lib.rs | 131 + ext/net/ops.rs | 795 +++ ext/net/ops_tls.rs | 1061 ++++ ext/net/ops_unix.rs | 180 + ext/net/resolve_addr.rs | 156 + ext/timers/01_timers.js | 595 +++ ext/timers/02_performance.js | 569 +++ ext/timers/Cargo.toml | 28 + ext/timers/README.md | 5 + ext/timers/benches/timers_ops.rs | 40 + ext/timers/lib.rs | 193 + ext/tls/Cargo.toml | 24 + ext/tls/README.md | 4 + ext/tls/lib.rs | 192 + ext/url/00_url.js | 623 +++ ext/url/Cargo.toml | 28 + ext/url/README.md | 5 + ext/url/benches/url_ops.rs | 27 + ext/url/internal.d.ts | 14 + ext/url/lib.deno_url.d.ts | 175 + ext/url/lib.rs | 173 + ext/web/00_infra.js | 264 + ext/web/01_dom_exception.js | 171 + ext/web/01_mimesniff.js | 211 + ext/web/02_event.js | 1294 +++++ ext/web/02_structured_clone.js | 85 + ext/web/03_abort_signal.js | 123 + ext/web/04_global_interfaces.js | 79 + ext/web/05_base64.js | 73 + ext/web/06_streams.js | 4473 +++++++++++++++++ ext/web/06_streams_types.d.ts | 55 + ext/web/08_text_encoding.js | 420 ++ ext/web/09_file.js | 569 +++ ext/web/10_filereader.js | 461 ++ ext/web/11_blob_url.js | 59 + ext/web/12_location.js | 409 ++ ext/web/13_message_port.js | 286 ++ ext/web/Cargo.toml | 26 + ext/web/README.md | 6 + ext/web/blob.rs | 265 + ext/web/internal.d.ts | 98 + ext/web/lib.deno_web.d.ts | 752 +++ ext/web/lib.rs | 390 ++ ext/web/message_port.rs | 217 + ext/webgpu/01_webgpu.js | 5140 ++++++++++++++++++++ ext/webgpu/02_idl_types.js | 1957 ++++++++ ext/webgpu/Cargo.toml | 21 + ext/webgpu/README.md | 35 + ext/webgpu/binding.rs | 335 ++ ext/webgpu/buffer.rs | 241 + ext/webgpu/bundle.rs | 454 ++ ext/webgpu/command_encoder.rs | 692 +++ ext/webgpu/compute_pass.rs | 356 ++ ext/webgpu/error.rs | 294 ++ ext/webgpu/lib.deno_webgpu.d.ts | 1131 +++++ ext/webgpu/lib.rs | 928 ++++ ext/webgpu/pipeline.rs | 686 +++ ext/webgpu/queue.rs | 160 + ext/webgpu/render_pass.rs | 688 +++ ext/webgpu/sampler.rs | 120 + ext/webgpu/shader.rs | 70 + ext/webgpu/texture.rs | 244 + ext/webgpu/webgpu.idl | 1057 ++++ ext/webidl/00_webidl.js | 1079 ++++ ext/webidl/Cargo.toml | 17 + ext/webidl/README.md | 6 + ext/webidl/internal.d.ts | 341 ++ ext/webidl/lib.rs | 14 + ext/websocket/01_websocket.js | 538 ++ ext/websocket/02_websocketstream.js | 412 ++ ext/websocket/Cargo.toml | 24 + ext/websocket/README.md | 5 + ext/websocket/lib.deno_websocket.d.ts | 112 + ext/websocket/lib.rs | 516 ++ ext/webstorage/01_webstorage.js | 191 + ext/webstorage/Cargo.toml | 20 + ext/webstorage/README.md | 5 + ext/webstorage/lib.deno_webstorage.d.ts | 42 + ext/webstorage/lib.rs | 245 + 129 files changed, 45541 insertions(+) create mode 100644 ext/broadcast_channel/01_broadcast_channel.js create mode 100644 ext/broadcast_channel/Cargo.toml create mode 100644 ext/broadcast_channel/README.md create mode 100644 ext/broadcast_channel/in_memory_broadcast_channel.rs create mode 100644 ext/broadcast_channel/lib.deno_broadcast_channel.d.ts create mode 100644 ext/broadcast_channel/lib.rs create mode 100644 ext/console/01_colors.js create mode 100644 ext/console/02_console.js create mode 100644 ext/console/Cargo.toml create mode 100644 ext/console/README.md create mode 100644 ext/console/internal.d.ts create mode 100644 ext/console/lib.deno_console.d.ts create mode 100644 ext/console/lib.rs create mode 100644 ext/crypto/00_crypto.js create mode 100644 ext/crypto/01_webidl.js create mode 100644 ext/crypto/Cargo.toml create mode 100644 ext/crypto/README.md create mode 100644 ext/crypto/key.rs create mode 100644 ext/crypto/lib.deno_crypto.d.ts create mode 100644 ext/crypto/lib.rs create mode 100644 ext/fetch/01_fetch_util.js create mode 100644 ext/fetch/20_headers.js create mode 100644 ext/fetch/21_formdata.js create mode 100644 ext/fetch/22_body.js create mode 100644 ext/fetch/22_http_client.js create mode 100644 ext/fetch/23_request.js create mode 100644 ext/fetch/23_response.js create mode 100644 ext/fetch/26_fetch.js create mode 100644 ext/fetch/Cargo.toml create mode 100644 ext/fetch/README.md create mode 100644 ext/fetch/internal.d.ts create mode 100644 ext/fetch/lib.deno_fetch.d.ts create mode 100644 ext/fetch/lib.rs create mode 100644 ext/ffi/00_ffi.js create mode 100644 ext/ffi/Cargo.toml create mode 100644 ext/ffi/README.md create mode 100644 ext/ffi/lib.rs create mode 100644 ext/http/01_http.js create mode 100644 ext/http/Cargo.toml create mode 100644 ext/http/README.md create mode 100644 ext/http/lib.deno_http.unstable.d.ts create mode 100644 ext/http/lib.rs create mode 100644 ext/net/01_net.js create mode 100644 ext/net/02_tls.js create mode 100644 ext/net/04_net_unstable.js create mode 100644 ext/net/Cargo.toml create mode 100644 ext/net/README.md create mode 100644 ext/net/io.rs create mode 100644 ext/net/lib.deno_net.d.ts create mode 100644 ext/net/lib.deno_net.unstable.d.ts create mode 100644 ext/net/lib.rs create mode 100644 ext/net/ops.rs create mode 100644 ext/net/ops_tls.rs create mode 100644 ext/net/ops_unix.rs create mode 100644 ext/net/resolve_addr.rs create mode 100644 ext/timers/01_timers.js create mode 100644 ext/timers/02_performance.js create mode 100644 ext/timers/Cargo.toml create mode 100644 ext/timers/README.md create mode 100644 ext/timers/benches/timers_ops.rs create mode 100644 ext/timers/lib.rs create mode 100644 ext/tls/Cargo.toml create mode 100644 ext/tls/README.md create mode 100644 ext/tls/lib.rs create mode 100644 ext/url/00_url.js create mode 100644 ext/url/Cargo.toml create mode 100644 ext/url/README.md create mode 100644 ext/url/benches/url_ops.rs create mode 100644 ext/url/internal.d.ts create mode 100644 ext/url/lib.deno_url.d.ts create mode 100644 ext/url/lib.rs create mode 100644 ext/web/00_infra.js create mode 100644 ext/web/01_dom_exception.js create mode 100644 ext/web/01_mimesniff.js create mode 100644 ext/web/02_event.js create mode 100644 ext/web/02_structured_clone.js create mode 100644 ext/web/03_abort_signal.js create mode 100644 ext/web/04_global_interfaces.js create mode 100644 ext/web/05_base64.js create mode 100644 ext/web/06_streams.js create mode 100644 ext/web/06_streams_types.d.ts create mode 100644 ext/web/08_text_encoding.js create mode 100644 ext/web/09_file.js create mode 100644 ext/web/10_filereader.js create mode 100644 ext/web/11_blob_url.js create mode 100644 ext/web/12_location.js create mode 100644 ext/web/13_message_port.js create mode 100644 ext/web/Cargo.toml create mode 100644 ext/web/README.md create mode 100644 ext/web/blob.rs create mode 100644 ext/web/internal.d.ts create mode 100644 ext/web/lib.deno_web.d.ts create mode 100644 ext/web/lib.rs create mode 100644 ext/web/message_port.rs create mode 100644 ext/webgpu/01_webgpu.js create mode 100644 ext/webgpu/02_idl_types.js create mode 100644 ext/webgpu/Cargo.toml create mode 100644 ext/webgpu/README.md create mode 100644 ext/webgpu/binding.rs create mode 100644 ext/webgpu/buffer.rs create mode 100644 ext/webgpu/bundle.rs create mode 100644 ext/webgpu/command_encoder.rs create mode 100644 ext/webgpu/compute_pass.rs create mode 100644 ext/webgpu/error.rs create mode 100644 ext/webgpu/lib.deno_webgpu.d.ts create mode 100644 ext/webgpu/lib.rs create mode 100644 ext/webgpu/pipeline.rs create mode 100644 ext/webgpu/queue.rs create mode 100644 ext/webgpu/render_pass.rs create mode 100644 ext/webgpu/sampler.rs create mode 100644 ext/webgpu/shader.rs create mode 100644 ext/webgpu/texture.rs create mode 100644 ext/webgpu/webgpu.idl create mode 100644 ext/webidl/00_webidl.js create mode 100644 ext/webidl/Cargo.toml create mode 100644 ext/webidl/README.md create mode 100644 ext/webidl/internal.d.ts create mode 100644 ext/webidl/lib.rs create mode 100644 ext/websocket/01_websocket.js create mode 100644 ext/websocket/02_websocketstream.js create mode 100644 ext/websocket/Cargo.toml create mode 100644 ext/websocket/README.md create mode 100644 ext/websocket/lib.deno_websocket.d.ts create mode 100644 ext/websocket/lib.rs create mode 100644 ext/webstorage/01_webstorage.js create mode 100644 ext/webstorage/Cargo.toml create mode 100644 ext/webstorage/README.md create mode 100644 ext/webstorage/lib.deno_webstorage.d.ts create mode 100644 ext/webstorage/lib.rs (limited to 'ext') diff --git a/ext/broadcast_channel/01_broadcast_channel.js b/ext/broadcast_channel/01_broadcast_channel.js new file mode 100644 index 000000000..42112c014 --- /dev/null +++ b/ext/broadcast_channel/01_broadcast_channel.js @@ -0,0 +1,186 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// + +"use strict"; + +((window) => { + const core = window.Deno.core; + const webidl = window.__bootstrap.webidl; + const { setTarget } = window.__bootstrap.event; + const { DOMException } = window.__bootstrap.domException; + const { + ArrayPrototypeIndexOf, + ArrayPrototypeSplice, + ArrayPrototypePush, + Symbol, + Uint8Array, + ObjectDefineProperty, + Map, + MapPrototypeSet, + MapPrototypeGet, + FunctionPrototypeCall, + } = window.__bootstrap.primordials; + + const handlerSymbol = Symbol("eventHandlers"); + function makeWrappedHandler(handler) { + function wrappedHandler(...args) { + if (typeof wrappedHandler.handler !== "function") { + return; + } + return FunctionPrototypeCall(wrappedHandler.handler, this, ...args); + } + wrappedHandler.handler = handler; + return wrappedHandler; + } + // TODO(lucacasonato) reuse when we can reuse code between web crates + function defineEventHandler(emitter, name) { + // HTML specification section 8.1.5.1 + ObjectDefineProperty(emitter, `on${name}`, { + get() { + // TODO(bnoordhuis) The "BroadcastChannel should have an onmessage + // event" WPT test expects that .onmessage !== undefined. Returning + // null makes it pass but is perhaps not exactly in the spirit. + if (!this[handlerSymbol]) { + return null; + } + return MapPrototypeGet(this[handlerSymbol], name)?.handler ?? null; + }, + set(value) { + if (!this[handlerSymbol]) { + this[handlerSymbol] = new Map(); + } + let handlerWrapper = MapPrototypeGet(this[handlerSymbol], name); + if (handlerWrapper) { + handlerWrapper.handler = value; + } else { + handlerWrapper = makeWrappedHandler(value); + this.addEventListener(name, handlerWrapper); + } + MapPrototypeSet(this[handlerSymbol], name, handlerWrapper); + }, + configurable: true, + enumerable: true, + }); + } + + const _name = Symbol("[[name]]"); + const _closed = Symbol("[[closed]]"); + + const channels = []; + let rid = null; + + async function recv() { + while (channels.length > 0) { + const message = await core.opAsync("op_broadcast_recv", rid); + + if (message === null) { + break; + } + + const [name, data] = message; + dispatch(null, name, new Uint8Array(data)); + } + + core.close(rid); + rid = null; + } + + function dispatch(source, name, data) { + for (const channel of channels) { + if (channel === source) continue; // Don't self-send. + if (channel[_name] !== name) continue; + if (channel[_closed]) continue; + + const go = () => { + if (channel[_closed]) return; + const event = new MessageEvent("message", { + data: core.deserialize(data), // TODO(bnoordhuis) Cache immutables. + origin: "http://127.0.0.1", + }); + setTarget(event, channel); + channel.dispatchEvent(event); + }; + + defer(go); + } + } + + // Defer to avoid starving the event loop. Not using queueMicrotask() + // for that reason: it lets promises make forward progress but can + // still starve other parts of the event loop. + function defer(go) { + setTimeout(go, 1); + } + + class BroadcastChannel extends EventTarget { + [_name]; + [_closed] = false; + + get name() { + return this[_name]; + } + + constructor(name) { + super(); + + const prefix = "Failed to construct 'BroadcastChannel'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + this[_name] = webidl.converters["DOMString"](name, { + prefix, + context: "Argument 1", + }); + + this[webidl.brand] = webidl.brand; + + ArrayPrototypePush(channels, this); + + if (rid === null) { + // Create the rid immediately, otherwise there is a time window (and a + // race condition) where messages can get lost, because recv() is async. + rid = core.opSync("op_broadcast_subscribe"); + recv(); + } + } + + postMessage(message) { + webidl.assertBranded(this, BroadcastChannel); + + const prefix = "Failed to execute 'postMessage' on 'BroadcastChannel'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + if (this[_closed]) { + throw new DOMException("Already closed", "InvalidStateError"); + } + + if (typeof message === "function" || typeof message === "symbol") { + throw new DOMException("Uncloneable value", "DataCloneError"); + } + + const data = core.serialize(message); + + // Send to other listeners in this VM. + dispatch(this, this[_name], new Uint8Array(data)); + + // Send to listeners in other VMs. + defer(() => core.opAsync("op_broadcast_send", [rid, this[_name]], data)); + } + + close() { + webidl.assertBranded(this, BroadcastChannel); + this[_closed] = true; + + const index = ArrayPrototypeIndexOf(channels, this); + if (index === -1) return; + + ArrayPrototypeSplice(channels, index, 1); + if (channels.length === 0) core.opSync("op_broadcast_unsubscribe", rid); + } + } + + defineEventHandler(BroadcastChannel.prototype, "message"); + defineEventHandler(BroadcastChannel.prototype, "messageerror"); + + window.__bootstrap.broadcastChannel = { BroadcastChannel }; +})(this); diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml new file mode 100644 index 000000000..2f8b10bc7 --- /dev/null +++ b/ext/broadcast_channel/Cargo.toml @@ -0,0 +1,20 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_broadcast_channel" +version = "0.8.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "Implementation of BroadcastChannel API for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +async-trait = "0.1" +deno_core = { version = "0.96.0", path = "../../core" } +tokio = { version = "1.8.1", features = ["full"] } +uuid = { version = "0.8.2", features = ["v4"] } diff --git a/ext/broadcast_channel/README.md b/ext/broadcast_channel/README.md new file mode 100644 index 000000000..5b5034ef7 --- /dev/null +++ b/ext/broadcast_channel/README.md @@ -0,0 +1,5 @@ +# deno_broadcast_channel + +This crate implements the BroadcastChannel functions of Deno. + +Spec: https://html.spec.whatwg.org/multipage/web-messaging.html diff --git a/ext/broadcast_channel/in_memory_broadcast_channel.rs b/ext/broadcast_channel/in_memory_broadcast_channel.rs new file mode 100644 index 000000000..879a3dbd5 --- /dev/null +++ b/ext/broadcast_channel/in_memory_broadcast_channel.rs @@ -0,0 +1,97 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use crate::BroadcastChannel; +use async_trait::async_trait; +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; +use std::sync::Arc; +use tokio::sync::broadcast; +use tokio::sync::mpsc; +use uuid::Uuid; + +#[derive(Clone)] +pub struct InMemoryBroadcastChannel(Arc>>); + +pub struct InMemoryBroadcastChannelResource { + rx: tokio::sync::Mutex<( + broadcast::Receiver, + mpsc::UnboundedReceiver<()>, + )>, + cancel_tx: mpsc::UnboundedSender<()>, + uuid: Uuid, +} + +#[derive(Clone, Debug)] +struct Message { + name: Arc, + data: Arc>, + uuid: Uuid, +} + +impl Default for InMemoryBroadcastChannel { + fn default() -> Self { + let (tx, _) = broadcast::channel(256); + Self(Arc::new(Mutex::new(tx))) + } +} + +#[async_trait] +impl BroadcastChannel for InMemoryBroadcastChannel { + type Resource = InMemoryBroadcastChannelResource; + + fn subscribe(&self) -> Result { + let (cancel_tx, cancel_rx) = mpsc::unbounded_channel(); + let broadcast_rx = self.0.lock().subscribe(); + let rx = tokio::sync::Mutex::new((broadcast_rx, cancel_rx)); + let uuid = Uuid::new_v4(); + Ok(Self::Resource { + rx, + cancel_tx, + uuid, + }) + } + + fn unsubscribe(&self, resource: &Self::Resource) -> Result<(), AnyError> { + Ok(resource.cancel_tx.send(())?) + } + + async fn send( + &self, + resource: &Self::Resource, + name: String, + data: Vec, + ) -> Result<(), AnyError> { + let name = Arc::new(name); + let data = Arc::new(data); + let uuid = resource.uuid; + self.0.lock().send(Message { name, data, uuid })?; + Ok(()) + } + + async fn recv( + &self, + resource: &Self::Resource, + ) -> Result, AnyError> { + let mut g = resource.rx.lock().await; + let (broadcast_rx, cancel_rx) = &mut *g; + loop { + let result = tokio::select! { + r = broadcast_rx.recv() => r, + _ = cancel_rx.recv() => return Ok(None), + }; + use tokio::sync::broadcast::error::RecvError::*; + match result { + Err(Closed) => return Ok(None), + Err(Lagged(_)) => (), // Backlogged, messages dropped. + Ok(message) if message.uuid == resource.uuid => (), // Self-send. + Ok(message) => { + let name = String::clone(&message.name); + let data = Vec::clone(&message.data); + return Ok(Some((name, data))); + } + } + } + } +} + +impl deno_core::Resource for InMemoryBroadcastChannelResource {} diff --git a/ext/broadcast_channel/lib.deno_broadcast_channel.d.ts b/ext/broadcast_channel/lib.deno_broadcast_channel.d.ts new file mode 100644 index 000000000..c8efef778 --- /dev/null +++ b/ext/broadcast_channel/lib.deno_broadcast_channel.d.ts @@ -0,0 +1,55 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file no-explicit-any + +/// +/// + +interface BroadcastChannelEventMap { + "message": MessageEvent; + "messageerror": MessageEvent; +} + +interface BroadcastChannel extends EventTarget { + /** + * Returns the channel name (as passed to the constructor). + */ + readonly name: string; + onmessage: ((this: BroadcastChannel, ev: MessageEvent) => any) | null; + onmessageerror: ((this: BroadcastChannel, ev: MessageEvent) => any) | null; + /** + * Closes the BroadcastChannel object, opening it up to garbage collection. + */ + close(): void; + /** + * Sends the given message to other BroadcastChannel objects set up for + * this channel. Messages can be structured objects, e.g. nested objects + * and arrays. + */ + postMessage(message: any): void; + addEventListener( + type: K, + listener: (this: BroadcastChannel, ev: BroadcastChannelEventMap[K]) => any, + options?: boolean | AddEventListenerOptions, + ): void; + addEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | AddEventListenerOptions, + ): void; + removeEventListener( + type: K, + listener: (this: BroadcastChannel, ev: BroadcastChannelEventMap[K]) => any, + options?: boolean | EventListenerOptions, + ): void; + removeEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | EventListenerOptions, + ): void; +} + +declare var BroadcastChannel: { + prototype: BroadcastChannel; + new (name: string): BroadcastChannel; +}; diff --git a/ext/broadcast_channel/lib.rs b/ext/broadcast_channel/lib.rs new file mode 100644 index 000000000..3f88db162 --- /dev/null +++ b/ext/broadcast_channel/lib.rs @@ -0,0 +1,139 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +mod in_memory_broadcast_channel; + +pub use in_memory_broadcast_channel::InMemoryBroadcastChannel; + +use async_trait::async_trait; +use deno_core::error::bad_resource_id; +use deno_core::error::AnyError; +use deno_core::include_js_files; +use deno_core::op_async; +use deno_core::op_sync; +use deno_core::Extension; +use deno_core::OpState; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_core::ZeroCopyBuf; +use std::cell::RefCell; +use std::path::PathBuf; +use std::rc::Rc; + +#[async_trait] +pub trait BroadcastChannel: Clone { + type Resource: Resource; + + fn subscribe(&self) -> Result; + + fn unsubscribe(&self, resource: &Self::Resource) -> Result<(), AnyError>; + + async fn send( + &self, + resource: &Self::Resource, + name: String, + data: Vec, + ) -> Result<(), AnyError>; + + async fn recv( + &self, + resource: &Self::Resource, + ) -> Result, AnyError>; +} + +pub type Message = (String, Vec); + +struct Unstable(bool); // --unstable + +pub fn op_broadcast_subscribe( + state: &mut OpState, + _args: (), + _buf: (), +) -> Result { + let unstable = state.borrow::().0; + + if !unstable { + eprintln!( + "Unstable API 'BroadcastChannel'. The --unstable flag must be provided.", + ); + std::process::exit(70); + } + + let bc = state.borrow::(); + let resource = bc.subscribe()?; + Ok(state.resource_table.add(resource)) +} + +pub fn op_broadcast_unsubscribe( + state: &mut OpState, + rid: ResourceId, + _buf: (), +) -> Result<(), AnyError> { + let resource = state + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let bc = state.borrow::(); + bc.unsubscribe(&resource) +} + +pub async fn op_broadcast_send( + state: Rc>, + (rid, name): (ResourceId, String), + buf: ZeroCopyBuf, +) -> Result<(), AnyError> { + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let bc = state.borrow().borrow::().clone(); + bc.send(&resource, name, buf.to_vec()).await +} + +pub async fn op_broadcast_recv( + state: Rc>, + rid: ResourceId, + _buf: (), +) -> Result, AnyError> { + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let bc = state.borrow().borrow::().clone(); + bc.recv(&resource).await +} + +pub fn init( + bc: BC, + unstable: bool, +) -> Extension { + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/broadcast_channel", + "01_broadcast_channel.js", + )) + .ops(vec![ + ( + "op_broadcast_subscribe", + op_sync(op_broadcast_subscribe::), + ), + ( + "op_broadcast_unsubscribe", + op_sync(op_broadcast_unsubscribe::), + ), + ("op_broadcast_send", op_async(op_broadcast_send::)), + ("op_broadcast_recv", op_async(op_broadcast_recv::)), + ]) + .state(move |state| { + state.put(bc.clone()); + state.put(Unstable(unstable)); + Ok(()) + }) + .build() +} + +pub fn get_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("lib.deno_broadcast_channel.d.ts") +} diff --git a/ext/console/01_colors.js b/ext/console/01_colors.js new file mode 100644 index 000000000..3c7384a6c --- /dev/null +++ b/ext/console/01_colors.js @@ -0,0 +1,99 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// + +"use strict"; + +((window) => { + const { + RegExp, + StringPrototypeReplace, + ArrayPrototypeJoin, + } = window.__bootstrap.primordials; + + function code(open, close) { + return { + open: `\x1b[${open}m`, + close: `\x1b[${close}m`, + regexp: new RegExp(`\\x1b\\[${close}m`, "g"), + }; + } + + function run(str, code) { + return `${code.open}${ + StringPrototypeReplace(str, code.regexp, code.open) + }${code.close}`; + } + + function bold(str) { + return run(str, code(1, 22)); + } + + function italic(str) { + return run(str, code(3, 23)); + } + + function yellow(str) { + return run(str, code(33, 39)); + } + + function cyan(str) { + return run(str, code(36, 39)); + } + + function red(str) { + return run(str, code(31, 39)); + } + + function green(str) { + return run(str, code(32, 39)); + } + + function bgRed(str) { + return run(str, code(41, 49)); + } + + function white(str) { + return run(str, code(37, 39)); + } + + function gray(str) { + return run(str, code(90, 39)); + } + + function magenta(str) { + return run(str, code(35, 39)); + } + + // https://github.com/chalk/ansi-regex/blob/2b56fb0c7a07108e5b54241e8faec160d393aedb/index.js + const ANSI_PATTERN = new RegExp( + ArrayPrototypeJoin([ + "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)", + "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", + ], "|"), + "g", + ); + + function stripColor(string) { + return StringPrototypeReplace(string, ANSI_PATTERN, ""); + } + + function maybeColor(fn) { + return !(globalThis.Deno?.noColor ?? false) ? fn : (s) => s; + } + + window.__bootstrap.colors = { + bold, + italic, + yellow, + cyan, + red, + green, + bgRed, + white, + gray, + magenta, + stripColor, + maybeColor, + }; +})(this); diff --git a/ext/console/02_console.js b/ext/console/02_console.js new file mode 100644 index 000000000..40ff2e25d --- /dev/null +++ b/ext/console/02_console.js @@ -0,0 +1,2066 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// + +"use strict"; + +((window) => { + const core = window.Deno.core; + const colors = window.__bootstrap.colors; + const { + ArrayBufferIsView, + isNaN, + DataView, + Date, + DateNow, + DatePrototypeGetTime, + DatePrototypeToISOString, + Boolean, + BooleanPrototypeToString, + ObjectKeys, + ObjectCreate, + ObjectAssign, + ObjectIs, + ObjectValues, + ObjectFromEntries, + ObjectGetPrototypeOf, + ObjectGetOwnPropertyDescriptor, + ObjectGetOwnPropertySymbols, + ObjectPrototypeHasOwnProperty, + ObjectPrototypePropertyIsEnumerable, + Promise, + String, + StringPrototypeRepeat, + StringPrototypeReplace, + StringPrototypeReplaceAll, + StringPrototypeSplit, + StringPrototypeSlice, + StringPrototypeCodePointAt, + StringPrototypeCharCodeAt, + StringPrototypeNormalize, + StringPrototypeMatch, + StringPrototypePadStart, + StringPrototypeLocaleCompare, + StringPrototypeToString, + StringPrototypeTrim, + StringPrototypeIncludes, + TypeError, + NumberParseInt, + RegExp, + RegExpPrototypeTest, + RegExpPrototypeToString, + Set, + SetPrototypeEntries, + Symbol, + SymbolPrototypeToString, + SymbolToStringTag, + SymbolHasInstance, + SymbolFor, + Array, + ArrayIsArray, + ArrayPrototypeJoin, + ArrayPrototypeMap, + ArrayPrototypeReduce, + ArrayPrototypeEntries, + ArrayPrototypePush, + ArrayPrototypePop, + ArrayPrototypeSort, + ArrayPrototypeSlice, + ArrayPrototypeIncludes, + ArrayPrototypeFill, + ArrayPrototypeFilter, + ArrayPrototypeFind, + FunctionPrototypeBind, + Map, + MapPrototypeHas, + MapPrototypeGet, + MapPrototypeSet, + MapPrototypeDelete, + MapPrototypeEntries, + MapPrototypeForEach, + Error, + ErrorCaptureStackTrace, + MathAbs, + MathMax, + MathMin, + MathSqrt, + MathRound, + MathFloor, + Number, + NumberPrototypeToString, + Proxy, + ReflectGet, + ReflectGetOwnPropertyDescriptor, + ReflectGetPrototypeOf, + WeakMap, + WeakSet, + } = window.__bootstrap.primordials; + + function isInvalidDate(x) { + return isNaN(DatePrototypeGetTime(x)); + } + + function hasOwnProperty(obj, v) { + if (obj == null) { + return false; + } + return ObjectPrototypeHasOwnProperty(obj, v); + } + + function propertyIsEnumerable(obj, prop) { + if ( + obj == null || + typeof obj.propertyIsEnumerable !== "function" + ) { + return false; + } + + return ObjectPrototypePropertyIsEnumerable(obj, prop); + } + + // Copyright Joyent, Inc. and other Node contributors. MIT license. + // Forked from Node's lib/internal/cli_table.js + + function isTypedArray(x) { + return ArrayBufferIsView(x) && !(x instanceof DataView); + } + + const tableChars = { + middleMiddle: "─", + rowMiddle: "┼", + topRight: "┐", + topLeft: "┌", + leftMiddle: "├", + topMiddle: "┬", + bottomRight: "┘", + bottomLeft: "└", + bottomMiddle: "┴", + rightMiddle: "┤", + left: "│ ", + right: " │", + middle: " │ ", + }; + + function isFullWidthCodePoint(code) { + // Code points are partially derived from: + // http://www.unicode.org/Public/UNIDATA/EastAsianWidth.txt + return ( + code >= 0x1100 && + (code <= 0x115f || // Hangul Jamo + code === 0x2329 || // LEFT-POINTING ANGLE BRACKET + code === 0x232a || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (code >= 0x2e80 && code <= 0x3247 && code !== 0x303f) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + (code >= 0x3250 && code <= 0x4dbf) || + // CJK Unified Ideographs .. Yi Radicals + (code >= 0x4e00 && code <= 0xa4c6) || + // Hangul Jamo Extended-A + (code >= 0xa960 && code <= 0xa97c) || + // Hangul Syllables + (code >= 0xac00 && code <= 0xd7a3) || + // CJK Compatibility Ideographs + (code >= 0xf900 && code <= 0xfaff) || + // Vertical Forms + (code >= 0xfe10 && code <= 0xfe19) || + // CJK Compatibility Forms .. Small Form Variants + (code >= 0xfe30 && code <= 0xfe6b) || + // Halfwidth and Fullwidth Forms + (code >= 0xff01 && code <= 0xff60) || + (code >= 0xffe0 && code <= 0xffe6) || + // Kana Supplement + (code >= 0x1b000 && code <= 0x1b001) || + // Enclosed Ideographic Supplement + (code >= 0x1f200 && code <= 0x1f251) || + // Miscellaneous Symbols and Pictographs 0x1f300 - 0x1f5ff + // Emoticons 0x1f600 - 0x1f64f + (code >= 0x1f300 && code <= 0x1f64f) || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + (code >= 0x20000 && code <= 0x3fffd)) + ); + } + + function getStringWidth(str) { + str = StringPrototypeNormalize(colors.stripColor(str), "NFC"); + let width = 0; + + for (const ch of str) { + width += isFullWidthCodePoint(StringPrototypeCodePointAt(ch, 0)) ? 2 : 1; + } + + return width; + } + + function renderRow(row, columnWidths) { + let out = tableChars.left; + for (let i = 0; i < row.length; i++) { + const cell = row[i]; + const len = getStringWidth(cell); + const needed = columnWidths[i] - len; + out += `${cell}${StringPrototypeRepeat(" ", needed)}`; + if (i !== row.length - 1) { + out += tableChars.middle; + } + } + out += tableChars.right; + return out; + } + + function cliTable(head, columns) { + const rows = []; + const columnWidths = ArrayPrototypeMap(head, (h) => getStringWidth(h)); + const longestColumn = ArrayPrototypeReduce( + columns, + (n, a) => MathMax(n, a.length), + 0, + ); + + for (let i = 0; i < head.length; i++) { + const column = columns[i]; + for (let j = 0; j < longestColumn; j++) { + if (rows[j] === undefined) { + rows[j] = []; + } + const value = (rows[j][i] = hasOwnProperty(column, j) ? column[j] : ""); + const width = columnWidths[i] || 0; + const counted = getStringWidth(value); + columnWidths[i] = MathMax(width, counted); + } + } + + const divider = ArrayPrototypeMap( + columnWidths, + (i) => StringPrototypeRepeat(tableChars.middleMiddle, i + 2), + ); + + let result = + `${tableChars.topLeft}${ + ArrayPrototypeJoin(divider, tableChars.topMiddle) + }` + + `${tableChars.topRight}\n${renderRow(head, columnWidths)}\n` + + `${tableChars.leftMiddle}${ + ArrayPrototypeJoin(divider, tableChars.rowMiddle) + }` + + `${tableChars.rightMiddle}\n`; + + for (const row of rows) { + result += `${renderRow(row, columnWidths)}\n`; + } + + result += + `${tableChars.bottomLeft}${ + ArrayPrototypeJoin(divider, tableChars.bottomMiddle) + }` + + tableChars.bottomRight; + + return result; + } + /* End of forked part */ + + const DEFAULT_INSPECT_OPTIONS = { + depth: 4, + indentLevel: 0, + sorted: false, + trailingComma: false, + compact: true, + iterableLimit: 100, + showProxy: false, + colors: false, + getters: false, + showHidden: false, + }; + + const DEFAULT_INDENT = " "; // Default indent string + + const LINE_BREAKING_LENGTH = 80; + const MIN_GROUP_LENGTH = 6; + const STR_ABBREVIATE_SIZE = 100; + + const PROMISE_STRING_BASE_LENGTH = 12; + + class CSI { + static kClear = "\x1b[1;1H"; + static kClearScreenDown = "\x1b[0J"; + } + + function getClassInstanceName(instance) { + if (typeof instance != "object") { + return ""; + } + const constructor = instance?.constructor; + if (typeof constructor == "function") { + return constructor.name ?? ""; + } + return ""; + } + + function maybeColor(fn, inspectOptions) { + return inspectOptions.colors ? fn : (s) => s; + } + + function inspectFunction(value, level, inspectOptions) { + const cyan = maybeColor(colors.cyan, inspectOptions); + if (customInspect in value && typeof value[customInspect] === "function") { + return String(value[customInspect](inspect)); + } + // Might be Function/AsyncFunction/GeneratorFunction/AsyncGeneratorFunction + let cstrName = ObjectGetPrototypeOf(value)?.constructor?.name; + if (!cstrName) { + // If prototype is removed or broken, + // use generic 'Function' instead. + cstrName = "Function"; + } + + // Our function may have properties, so we want to format those + // as if our function was an object + // If we didn't find any properties, we will just append an + // empty suffix. + let suffix = ``; + if ( + ObjectKeys(value).length > 0 || + ObjectGetOwnPropertySymbols(value).length > 0 + ) { + const propString = inspectRawObject(value, level, inspectOptions); + // Filter out the empty string for the case we only have + // non-enumerable symbols. + if ( + propString.length > 0 && + propString !== "{}" + ) { + suffix = ` ${propString}`; + } + } + + if (value.name && value.name !== "anonymous") { + // from MDN spec + return cyan(`[${cstrName}: ${value.name}]`) + suffix; + } + return cyan(`[${cstrName}]`) + suffix; + } + + function inspectIterable( + value, + level, + options, + inspectOptions, + ) { + const cyan = maybeColor(colors.cyan, inspectOptions); + if (level >= inspectOptions.depth) { + return cyan(`[${options.typeName}]`); + } + + const entries = []; + let iter; + + switch (options.typeName) { + case "Map": + iter = MapPrototypeEntries(value); + break; + case "Set": + iter = SetPrototypeEntries(value); + break; + case "Array": + iter = ArrayPrototypeEntries(value); + break; + default: + if (isTypedArray(value)) { + iter = ArrayPrototypeEntries(value); + } else { + throw new TypeError("unreachable"); + } + } + + let entriesLength = 0; + const next = () => { + return iter.next(); + }; + for (const el of iter) { + if (entriesLength < inspectOptions.iterableLimit) { + ArrayPrototypePush( + entries, + options.entryHandler( + el, + level + 1, + inspectOptions, + FunctionPrototypeBind(next, iter), + ), + ); + } + entriesLength++; + } + + if (options.sort) { + ArrayPrototypeSort(entries); + } + + if (entriesLength > inspectOptions.iterableLimit) { + const nmore = entriesLength - inspectOptions.iterableLimit; + ArrayPrototypePush(entries, `... ${nmore} more items`); + } + + const iPrefix = `${options.displayName ? options.displayName + " " : ""}`; + + const initIndentation = `\n${ + StringPrototypeRepeat(DEFAULT_INDENT, level + 1) + }`; + const entryIndentation = `,\n${ + StringPrototypeRepeat(DEFAULT_INDENT, level + 1) + }`; + const closingIndentation = `${inspectOptions.trailingComma ? "," : ""}\n${ + StringPrototypeRepeat(DEFAULT_INDENT, level) + }`; + + let iContent; + if (options.group && entries.length > MIN_GROUP_LENGTH) { + const groups = groupEntries(entries, level, value); + iContent = `${initIndentation}${ + ArrayPrototypeJoin(groups, entryIndentation) + }${closingIndentation}`; + } else { + iContent = entries.length === 0 + ? "" + : ` ${ArrayPrototypeJoin(entries, ", ")} `; + if ( + colors.stripColor(iContent).length > LINE_BREAKING_LENGTH || + !inspectOptions.compact + ) { + iContent = `${initIndentation}${ + ArrayPrototypeJoin(entries, entryIndentation) + }${closingIndentation}`; + } + } + + return `${iPrefix}${options.delims[0]}${iContent}${options.delims[1]}`; + } + + // Ported from Node.js + // Copyright Node.js contributors. All rights reserved. + function groupEntries( + entries, + level, + value, + iterableLimit = 100, + ) { + let totalLength = 0; + let maxLength = 0; + let entriesLength = entries.length; + if (iterableLimit < entriesLength) { + // This makes sure the "... n more items" part is not taken into account. + entriesLength--; + } + const separatorSpace = 2; // Add 1 for the space and 1 for the separator. + const dataLen = new Array(entriesLength); + // Calculate the total length of all output entries and the individual max + // entries length of all output entries. + // IN PROGRESS: Colors are being taken into account. + for (let i = 0; i < entriesLength; i++) { + // Taking colors into account: removing the ANSI color + // codes from the string before measuring its length + const len = colors.stripColor(entries[i]).length; + dataLen[i] = len; + totalLength += len + separatorSpace; + if (maxLength < len) maxLength = len; + } + // Add two to `maxLength` as we add a single whitespace character plus a comma + // in-between two entries. + const actualMax = maxLength + separatorSpace; + // Check if at least three entries fit next to each other and prevent grouping + // of arrays that contains entries of very different length (i.e., if a single + // entry is longer than 1/5 of all other entries combined). Otherwise the + // space in-between small entries would be enormous. + if ( + actualMax * 3 + (level + 1) < LINE_BREAKING_LENGTH && + (totalLength / actualMax > 5 || maxLength <= 6) + ) { + const approxCharHeights = 2.5; + const averageBias = MathSqrt(actualMax - totalLength / entries.length); + const biasedMax = MathMax(actualMax - 3 - averageBias, 1); + // Dynamically check how many columns seem possible. + const columns = MathMin( + // Ideally a square should be drawn. We expect a character to be about 2.5 + // times as high as wide. This is the area formula to calculate a square + // which contains n rectangles of size `actualMax * approxCharHeights`. + // Divide that by `actualMax` to receive the correct number of columns. + // The added bias increases the columns for short entries. + MathRound( + MathSqrt(approxCharHeights * biasedMax * entriesLength) / biasedMax, + ), + // Do not exceed the breakLength. + MathFloor((LINE_BREAKING_LENGTH - (level + 1)) / actualMax), + // Limit the columns to a maximum of fifteen. + 15, + ); + // Return with the original output if no grouping should happen. + if (columns <= 1) { + return entries; + } + const tmp = []; + const maxLineLength = []; + for (let i = 0; i < columns; i++) { + let lineMaxLength = 0; + for (let j = i; j < entries.length; j += columns) { + if (dataLen[j] > lineMaxLength) lineMaxLength = dataLen[j]; + } + lineMaxLength += separatorSpace; + maxLineLength[i] = lineMaxLength; + } + let order = "padStart"; + if (value !== undefined) { + for (let i = 0; i < entries.length; i++) { + if ( + typeof value[i] !== "number" && + typeof value[i] !== "bigint" + ) { + order = "padEnd"; + break; + } + } + } + // Each iteration creates a single line of grouped entries. + for (let i = 0; i < entriesLength; i += columns) { + // The last lines may contain less entries than columns. + const max = MathMin(i + columns, entriesLength); + let str = ""; + let j = i; + for (; j < max - 1; j++) { + const lengthOfColorCodes = entries[j].length - dataLen[j]; + const padding = maxLineLength[j - i] + lengthOfColorCodes; + str += `${entries[j]}, `[order](padding, " "); + } + if (order === "padStart") { + const lengthOfColorCodes = entries[j].length - dataLen[j]; + const padding = maxLineLength[j - i] + + lengthOfColorCodes - + separatorSpace; + str += StringPrototypePadStart(entries[j], padding, " "); + } else { + str += entries[j]; + } + ArrayPrototypePush(tmp, str); + } + if (iterableLimit < entries.length) { + ArrayPrototypePush(tmp, entries[entriesLength]); + } + entries = tmp; + } + return entries; + } + + function _inspectValue( + value, + level, + inspectOptions, + ) { + const proxyDetails = core.getProxyDetails(value); + if (proxyDetails != null && inspectOptions.showProxy) { + return inspectProxy(proxyDetails, level, inspectOptions); + } + + const green = maybeColor(colors.green, inspectOptions); + const yellow = maybeColor(colors.yellow, inspectOptions); + const gray = maybeColor(colors.gray, inspectOptions); + const cyan = maybeColor(colors.cyan, inspectOptions); + const bold = maybeColor(colors.bold, inspectOptions); + const red = maybeColor(colors.red, inspectOptions); + + switch (typeof value) { + case "string": + return green(quoteString(value)); + case "number": // Numbers are yellow + // Special handling of -0 + return yellow(ObjectIs(value, -0) ? "-0" : `${value}`); + case "boolean": // booleans are yellow + return yellow(String(value)); + case "undefined": // undefined is gray + return gray(String(value)); + case "symbol": // Symbols are green + return green(maybeQuoteSymbol(value)); + case "bigint": // Bigints are yellow + return yellow(`${value}n`); + case "function": // Function string is cyan + if (ctxHas(value)) { + // Circular string is cyan + return cyan("[Circular]"); + } + + return inspectFunction(value, level, inspectOptions); + case "object": // null is bold + if (value === null) { + return bold("null"); + } + + if (ctxHas(value)) { + // Circular string is cyan + return cyan("[Circular]"); + } + return inspectObject(value, level, inspectOptions); + default: + // Not implemented is red + return red("[Not Implemented]"); + } + } + + function inspectValue( + value, + level, + inspectOptions, + ) { + ArrayPrototypePush(CTX_STACK, value); + let x; + try { + x = _inspectValue(value, level, inspectOptions); + } finally { + ArrayPrototypePop(CTX_STACK); + } + return x; + } + + // We can match Node's quoting behavior exactly by swapping the double quote and + // single quote in this array. That would give preference to single quotes. + // However, we prefer double quotes as the default. + const QUOTES = ['"', "'", "`"]; + + /** Surround the string in quotes. + * + * The quote symbol is chosen by taking the first of the `QUOTES` array which + * does not occur in the string. If they all occur, settle with `QUOTES[0]`. + * + * Insert a backslash before any occurrence of the chosen quote symbol and + * before any backslash. + */ + function quoteString(string) { + const quote = + ArrayPrototypeFind(QUOTES, (c) => !StringPrototypeIncludes(string, c)) ?? + QUOTES[0]; + const escapePattern = new RegExp(`(?=[${quote}\\\\])`, "g"); + string = StringPrototypeReplace(string, escapePattern, "\\"); + string = replaceEscapeSequences(string); + return `${quote}${string}${quote}`; + } + + // Replace escape sequences that can modify output. + function replaceEscapeSequences(string) { + return StringPrototypeReplace( + StringPrototypeReplace( + StringPrototypeReplace( + StringPrototypeReplace( + StringPrototypeReplace( + StringPrototypeReplace( + StringPrototypeReplace(string, /[\b]/g, "\\b"), + /\f/g, + "\\f", + ), + /\n/g, + "\\n", + ), + /\r/g, + "\\r", + ), + /\t/g, + "\\t", + ), + /\v/g, + "\\v", + ), + // deno-lint-ignore no-control-regex + /[\x00-\x1f\x7f-\x9f]/g, + (c) => + "\\x" + + StringPrototypePadStart( + NumberPrototypeToString(StringPrototypeCharCodeAt(c, 0), 16), + 2, + "0", + ), + ); + } + + // Surround a string with quotes when it is required (e.g the string not a valid identifier). + function maybeQuoteString(string) { + if (RegExpPrototypeTest(/^[a-zA-Z_][a-zA-Z_0-9]*$/, string)) { + return replaceEscapeSequences(string); + } + + return quoteString(string); + } + + // Surround a symbol's description in quotes when it is required (e.g the description has non printable characters). + function maybeQuoteSymbol(symbol) { + if (symbol.description === undefined) { + return SymbolPrototypeToString(symbol); + } + + if (RegExpPrototypeTest(/^[a-zA-Z_][a-zA-Z_.0-9]*$/, symbol.description)) { + return SymbolPrototypeToString(symbol); + } + + return `Symbol(${quoteString(symbol.description)})`; + } + + const CTX_STACK = []; + function ctxHas(x) { + // Only check parent contexts + return ArrayPrototypeIncludes( + ArrayPrototypeSlice(CTX_STACK, 0, CTX_STACK.length - 1), + x, + ); + } + + // Print strings when they are inside of arrays or objects with quotes + function inspectValueWithQuotes( + value, + level, + inspectOptions, + ) { + const green = maybeColor(colors.green, inspectOptions); + switch (typeof value) { + case "string": { + const trunc = value.length > STR_ABBREVIATE_SIZE + ? StringPrototypeSlice(value, 0, STR_ABBREVIATE_SIZE) + "..." + : value; + return green(quoteString(trunc)); // Quoted strings are green + } + default: + return inspectValue(value, level, inspectOptions); + } + } + + function inspectArray( + value, + level, + inspectOptions, + ) { + const gray = maybeColor(colors.gray, inspectOptions); + const options = { + typeName: "Array", + displayName: "", + delims: ["[", "]"], + entryHandler: (entry, level, inspectOptions, next) => { + const [index, val] = entry; + let i = index; + if (!ObjectPrototypeHasOwnProperty(value, i)) { + i++; + while (!ObjectPrototypeHasOwnProperty(value, i) && i < value.length) { + next(); + i++; + } + const emptyItems = i - index; + const ending = emptyItems > 1 ? "s" : ""; + return gray(`<${emptyItems} empty item${ending}>`); + } else { + return inspectValueWithQuotes(val, level, inspectOptions); + } + }, + group: inspectOptions.compact, + sort: false, + }; + return inspectIterable(value, level, options, inspectOptions); + } + + function inspectTypedArray( + typedArrayName, + value, + level, + inspectOptions, + ) { + const valueLength = value.length; + const options = { + typeName: typedArrayName, + displayName: `${typedArrayName}(${valueLength})`, + delims: ["[", "]"], + entryHandler: (entry, level, inspectOptions) => { + const val = entry[1]; + return inspectValueWithQuotes(val, level + 1, inspectOptions); + }, + group: inspectOptions.compact, + sort: false, + }; + return inspectIterable(value, level, options, inspectOptions); + } + + function inspectSet( + value, + level, + inspectOptions, + ) { + const options = { + typeName: "Set", + displayName: "Set", + delims: ["{", "}"], + entryHandler: (entry, level, inspectOptions) => { + const val = entry[1]; + return inspectValueWithQuotes(val, level + 1, inspectOptions); + }, + group: false, + sort: inspectOptions.sorted, + }; + return inspectIterable(value, level, options, inspectOptions); + } + + function inspectMap( + value, + level, + inspectOptions, + ) { + const options = { + typeName: "Map", + displayName: "Map", + delims: ["{", "}"], + entryHandler: (entry, level, inspectOptions) => { + const [key, val] = entry; + return `${ + inspectValueWithQuotes( + key, + level + 1, + inspectOptions, + ) + } => ${inspectValueWithQuotes(val, level + 1, inspectOptions)}`; + }, + group: false, + sort: inspectOptions.sorted, + }; + return inspectIterable( + value, + level, + options, + inspectOptions, + ); + } + + function inspectWeakSet(inspectOptions) { + const cyan = maybeColor(colors.cyan, inspectOptions); + return `WeakSet { ${cyan("[items unknown]")} }`; // as seen in Node, with cyan color + } + + function inspectWeakMap(inspectOptions) { + const cyan = maybeColor(colors.cyan, inspectOptions); + return `WeakMap { ${cyan("[items unknown]")} }`; // as seen in Node, with cyan color + } + + function inspectDate(value, inspectOptions) { + // without quotes, ISO format, in magenta like before + const magenta = maybeColor(colors.magenta, inspectOptions); + return magenta( + isInvalidDate(value) ? "Invalid Date" : DatePrototypeToISOString(value), + ); + } + + function inspectRegExp(value, inspectOptions) { + const red = maybeColor(colors.red, inspectOptions); + return red(RegExpPrototypeToString(value)); // RegExps are red + } + + function inspectStringObject(value, inspectOptions) { + const cyan = maybeColor(colors.cyan, inspectOptions); + return cyan(`[String: "${StringPrototypeToString(value)}"]`); // wrappers are in cyan + } + + function inspectBooleanObject(value, inspectOptions) { + const cyan = maybeColor(colors.cyan, inspectOptions); + return cyan(`[Boolean: ${BooleanPrototypeToString(value)}]`); // wrappers are in cyan + } + + function inspectNumberObject(value, inspectOptions) { + const cyan = maybeColor(colors.cyan, inspectOptions); + return cyan(`[Number: ${NumberPrototypeToString(value)}]`); // wrappers are in cyan + } + + const PromiseState = { + Pending: 0, + Fulfilled: 1, + Rejected: 2, + }; + + function inspectPromise( + value, + level, + inspectOptions, + ) { + const cyan = maybeColor(colors.cyan, inspectOptions); + const red = maybeColor(colors.red, inspectOptions); + + const [state, result] = core.getPromiseDetails(value); + + if (state === PromiseState.Pending) { + return `Promise { ${cyan("")} }`; + } + + const prefix = state === PromiseState.Fulfilled + ? "" + : `${red("")} `; + + const str = `${prefix}${ + inspectValueWithQuotes( + result, + level + 1, + inspectOptions, + ) + }`; + + if (str.length + PROMISE_STRING_BASE_LENGTH > LINE_BREAKING_LENGTH) { + return `Promise {\n${ + StringPrototypeRepeat(DEFAULT_INDENT, level + 1) + }${str}\n}`; + } + + return `Promise { ${str} }`; + } + + function inspectProxy( + targetAndHandler, + level, + inspectOptions, + ) { + return `Proxy ${inspectArray(targetAndHandler, level, inspectOptions)}`; + } + + function inspectRawObject( + value, + level, + inspectOptions, + ) { + const cyan = maybeColor(colors.cyan, inspectOptions); + + if (level >= inspectOptions.depth) { + return cyan("[Object]"); // wrappers are in cyan + } + + let baseString; + + let shouldShowDisplayName = false; + let displayName = value[ + SymbolToStringTag + ]; + if (!displayName) { + displayName = getClassInstanceName(value); + } + if ( + displayName && displayName !== "Object" && displayName !== "anonymous" + ) { + shouldShowDisplayName = true; + } + + const entries = []; + const stringKeys = ObjectKeys(value); + const symbolKeys = ObjectGetOwnPropertySymbols(value); + if (inspectOptions.sorted) { + ArrayPrototypeSort(stringKeys); + ArrayPrototypeSort( + symbolKeys, + (s1, s2) => + StringPrototypeLocaleCompare( + (s1.description ?? ""), + s2.description ?? "", + ), + ); + } + + const red = maybeColor(colors.red, inspectOptions); + + for (const key of stringKeys) { + if (inspectOptions.getters) { + let propertyValue; + let error = null; + try { + propertyValue = value[key]; + } catch (error_) { + error = error_; + } + const inspectedValue = error == null + ? inspectValueWithQuotes( + propertyValue, + level + 1, + inspectOptions, + ) + : red(`[Thrown ${error.name}: ${error.message}]`); + ArrayPrototypePush( + entries, + `${maybeQuoteString(key)}: ${inspectedValue}`, + ); + } else { + const descriptor = ObjectGetOwnPropertyDescriptor(value, key); + if (descriptor.get !== undefined && descriptor.set !== undefined) { + ArrayPrototypePush( + entries, + `${maybeQuoteString(key)}: [Getter/Setter]`, + ); + } else if (descriptor.get !== undefined) { + ArrayPrototypePush(entries, `${maybeQuoteString(key)}: [Getter]`); + } else { + ArrayPrototypePush( + entries, + `${maybeQuoteString(key)}: ${ + inspectValueWithQuotes(value[key], level + 1, inspectOptions) + }`, + ); + } + } + } + + for (const key of symbolKeys) { + if ( + !inspectOptions.showHidden && + !propertyIsEnumerable(value, key) + ) { + continue; + } + + if (inspectOptions.getters) { + let propertyValue; + let error; + try { + propertyValue = value[key]; + } catch (error_) { + error = error_; + } + const inspectedValue = error == null + ? inspectValueWithQuotes( + propertyValue, + level + 1, + inspectOptions, + ) + : red(`Thrown ${error.name}: ${error.message}`); + ArrayPrototypePush( + entries, + `[${maybeQuoteSymbol(key)}]: ${inspectedValue}`, + ); + } else { + const descriptor = ObjectGetOwnPropertyDescriptor(value, key); + if (descriptor.get !== undefined && descriptor.set !== undefined) { + ArrayPrototypePush( + entries, + `[${maybeQuoteSymbol(key)}]: [Getter/Setter]`, + ); + } else if (descriptor.get !== undefined) { + ArrayPrototypePush(entries, `[${maybeQuoteSymbol(key)}]: [Getter]`); + } else { + ArrayPrototypePush( + entries, + `[${maybeQuoteSymbol(key)}]: ${ + inspectValueWithQuotes(value[key], level + 1, inspectOptions) + }`, + ); + } + } + } + + // Making sure color codes are ignored when calculating the total length + const totalLength = entries.length + level + + colors.stripColor(ArrayPrototypeJoin(entries, "")).length; + + if (entries.length === 0) { + baseString = "{}"; + } else if (totalLength > LINE_BREAKING_LENGTH || !inspectOptions.compact) { + const entryIndent = StringPrototypeRepeat(DEFAULT_INDENT, level + 1); + const closingIndent = StringPrototypeRepeat(DEFAULT_INDENT, level); + baseString = `{\n${entryIndent}${ + ArrayPrototypeJoin(entries, `,\n${entryIndent}`) + }${inspectOptions.trailingComma ? "," : ""}\n${closingIndent}}`; + } else { + baseString = `{ ${ArrayPrototypeJoin(entries, ", ")} }`; + } + + if (shouldShowDisplayName) { + baseString = `${displayName} ${baseString}`; + } + + return baseString; + } + + function inspectObject( + value, + level, + inspectOptions, + ) { + if (customInspect in value && typeof value[customInspect] === "function") { + return String(value[customInspect](inspect)); + } + // This non-unique symbol is used to support op_crates, ie. + // in extensions/web we don't want to depend on public + // Symbol.for("Deno.customInspect") symbol defined in the public API. + // Internal only, shouldn't be used by users. + const privateCustomInspect = SymbolFor("Deno.privateCustomInspect"); + if ( + privateCustomInspect in value && + typeof value[privateCustomInspect] === "function" + ) { + // TODO(nayeemrmn): `inspect` is passed as an argument because custom + // inspect implementations in `extensions` need it, but may not have access + // to the `Deno` namespace in web workers. Remove when the `Deno` + // namespace is always enabled. + return String(value[privateCustomInspect](inspect)); + } + if (value instanceof Error) { + return String(value.stack); + } else if (ArrayIsArray(value)) { + return inspectArray(value, level, inspectOptions); + } else if (value instanceof Number) { + return inspectNumberObject(value, inspectOptions); + } else if (value instanceof Boolean) { + return inspectBooleanObject(value, inspectOptions); + } else if (value instanceof String) { + return inspectStringObject(value, inspectOptions); + } else if (value instanceof Promise) { + return inspectPromise(value, level, inspectOptions); + } else if (value instanceof RegExp) { + return inspectRegExp(value, inspectOptions); + } else if (value instanceof Date) { + return inspectDate(value, inspectOptions); + } else if (value instanceof Set) { + return inspectSet(value, level, inspectOptions); + } else if (value instanceof Map) { + return inspectMap(value, level, inspectOptions); + } else if (value instanceof WeakSet) { + return inspectWeakSet(inspectOptions); + } else if (value instanceof WeakMap) { + return inspectWeakMap(inspectOptions); + } else if (isTypedArray(value)) { + return inspectTypedArray( + ObjectGetPrototypeOf(value).constructor.name, + value, + level, + inspectOptions, + ); + } else { + // Otherwise, default object formatting + return inspectRawObject(value, level, inspectOptions); + } + } + + const colorKeywords = new Map([ + ["black", "#000000"], + ["silver", "#c0c0c0"], + ["gray", "#808080"], + ["white", "#ffffff"], + ["maroon", "#800000"], + ["red", "#ff0000"], + ["purple", "#800080"], + ["fuchsia", "#ff00ff"], + ["green", "#008000"], + ["lime", "#00ff00"], + ["olive", "#808000"], + ["yellow", "#ffff00"], + ["navy", "#000080"], + ["blue", "#0000ff"], + ["teal", "#008080"], + ["aqua", "#00ffff"], + ["orange", "#ffa500"], + ["aliceblue", "#f0f8ff"], + ["antiquewhite", "#faebd7"], + ["aquamarine", "#7fffd4"], + ["azure", "#f0ffff"], + ["beige", "#f5f5dc"], + ["bisque", "#ffe4c4"], + ["blanchedalmond", "#ffebcd"], + ["blueviolet", "#8a2be2"], + ["brown", "#a52a2a"], + ["burlywood", "#deb887"], + ["cadetblue", "#5f9ea0"], + ["chartreuse", "#7fff00"], + ["chocolate", "#d2691e"], + ["coral", "#ff7f50"], + ["cornflowerblue", "#6495ed"], + ["cornsilk", "#fff8dc"], + ["crimson", "#dc143c"], + ["cyan", "#00ffff"], + ["darkblue", "#00008b"], + ["darkcyan", "#008b8b"], + ["darkgoldenrod", "#b8860b"], + ["darkgray", "#a9a9a9"], + ["darkgreen", "#006400"], + ["darkgrey", "#a9a9a9"], + ["darkkhaki", "#bdb76b"], + ["darkmagenta", "#8b008b"], + ["darkolivegreen", "#556b2f"], + ["darkorange", "#ff8c00"], + ["darkorchid", "#9932cc"], + ["darkred", "#8b0000"], + ["darksalmon", "#e9967a"], + ["darkseagreen", "#8fbc8f"], + ["darkslateblue", "#483d8b"], + ["darkslategray", "#2f4f4f"], + ["darkslategrey", "#2f4f4f"], + ["darkturquoise", "#00ced1"], + ["darkviolet", "#9400d3"], + ["deeppink", "#ff1493"], + ["deepskyblue", "#00bfff"], + ["dimgray", "#696969"], + ["dimgrey", "#696969"], + ["dodgerblue", "#1e90ff"], + ["firebrick", "#b22222"], + ["floralwhite", "#fffaf0"], + ["forestgreen", "#228b22"], + ["gainsboro", "#dcdcdc"], + ["ghostwhite", "#f8f8ff"], + ["gold", "#ffd700"], + ["goldenrod", "#daa520"], + ["greenyellow", "#adff2f"], + ["grey", "#808080"], + ["honeydew", "#f0fff0"], + ["hotpink", "#ff69b4"], + ["indianred", "#cd5c5c"], + ["indigo", "#4b0082"], + ["ivory", "#fffff0"], + ["khaki", "#f0e68c"], + ["lavender", "#e6e6fa"], + ["lavenderblush", "#fff0f5"], + ["lawngreen", "#7cfc00"], + ["lemonchiffon", "#fffacd"], + ["lightblue", "#add8e6"], + ["lightcoral", "#f08080"], + ["lightcyan", "#e0ffff"], + ["lightgoldenrodyellow", "#fafad2"], + ["lightgray", "#d3d3d3"], + ["lightgreen", "#90ee90"], + ["lightgrey", "#d3d3d3"], + ["lightpink", "#ffb6c1"], + ["lightsalmon", "#ffa07a"], + ["lightseagreen", "#20b2aa"], + ["lightskyblue", "#87cefa"], + ["lightslategray", "#778899"], + ["lightslategrey", "#778899"], + ["lightsteelblue", "#b0c4de"], + ["lightyellow", "#ffffe0"], + ["limegreen", "#32cd32"], + ["linen", "#faf0e6"], + ["magenta", "#ff00ff"], + ["mediumaquamarine", "#66cdaa"], + ["mediumblue", "#0000cd"], + ["mediumorchid", "#ba55d3"], + ["mediumpurple", "#9370db"], + ["mediumseagreen", "#3cb371"], + ["mediumslateblue", "#7b68ee"], + ["mediumspringgreen", "#00fa9a"], + ["mediumturquoise", "#48d1cc"], + ["mediumvioletred", "#c71585"], + ["midnightblue", "#191970"], + ["mintcream", "#f5fffa"], + ["mistyrose", "#ffe4e1"], + ["moccasin", "#ffe4b5"], + ["navajowhite", "#ffdead"], + ["oldlace", "#fdf5e6"], + ["olivedrab", "#6b8e23"], + ["orangered", "#ff4500"], + ["orchid", "#da70d6"], + ["palegoldenrod", "#eee8aa"], + ["palegreen", "#98fb98"], + ["paleturquoise", "#afeeee"], + ["palevioletred", "#db7093"], + ["papayawhip", "#ffefd5"], + ["peachpuff", "#ffdab9"], + ["peru", "#cd853f"], + ["pink", "#ffc0cb"], + ["plum", "#dda0dd"], + ["powderblue", "#b0e0e6"], + ["rosybrown", "#bc8f8f"], + ["royalblue", "#4169e1"], + ["saddlebrown", "#8b4513"], + ["salmon", "#fa8072"], + ["sandybrown", "#f4a460"], + ["seagreen", "#2e8b57"], + ["seashell", "#fff5ee"], + ["sienna", "#a0522d"], + ["skyblue", "#87ceeb"], + ["slateblue", "#6a5acd"], + ["slategray", "#708090"], + ["slategrey", "#708090"], + ["snow", "#fffafa"], + ["springgreen", "#00ff7f"], + ["steelblue", "#4682b4"], + ["tan", "#d2b48c"], + ["thistle", "#d8bfd8"], + ["tomato", "#ff6347"], + ["turquoise", "#40e0d0"], + ["violet", "#ee82ee"], + ["wheat", "#f5deb3"], + ["whitesmoke", "#f5f5f5"], + ["yellowgreen", "#9acd32"], + ["rebeccapurple", "#663399"], + ]); + + function parseCssColor(colorString) { + if (MapPrototypeHas(colorKeywords, colorString)) { + colorString = MapPrototypeGet(colorKeywords, colorString); + } + // deno-fmt-ignore + const hashMatch = StringPrototypeMatch(colorString, /^#([\dA-Fa-f]{2})([\dA-Fa-f]{2})([\dA-Fa-f]{2})([\dA-Fa-f]{2})?$/); + if (hashMatch != null) { + return [ + Number(`0x${hashMatch[1]}`), + Number(`0x${hashMatch[2]}`), + Number(`0x${hashMatch[3]}`), + ]; + } + // deno-fmt-ignore + const smallHashMatch = StringPrototypeMatch(colorString, /^#([\dA-Fa-f])([\dA-Fa-f])([\dA-Fa-f])([\dA-Fa-f])?$/); + if (smallHashMatch != null) { + return [ + Number(`0x${smallHashMatch[1]}0`), + Number(`0x${smallHashMatch[2]}0`), + Number(`0x${smallHashMatch[3]}0`), + ]; + } + // deno-fmt-ignore + const rgbMatch = StringPrototypeMatch(colorString, /^rgba?\(\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)\s*(,\s*([+\-]?\d*\.?\d+)\s*)?\)$/); + if (rgbMatch != null) { + return [ + MathRound(MathMax(0, MathMin(255, Number(rgbMatch[1])))), + MathRound(MathMax(0, MathMin(255, Number(rgbMatch[2])))), + MathRound(MathMax(0, MathMin(255, Number(rgbMatch[3])))), + ]; + } + // deno-fmt-ignore + const hslMatch = StringPrototypeMatch(colorString, /^hsla?\(\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)%\s*,\s*([+\-]?\d*\.?\d+)%\s*(,\s*([+\-]?\d*\.?\d+)\s*)?\)$/); + if (hslMatch != null) { + // https://www.rapidtables.com/convert/color/hsl-to-rgb.html + let h = Number(hslMatch[1]) % 360; + if (h < 0) { + h += 360; + } + const s = MathMax(0, MathMin(100, Number(hslMatch[2]))) / 100; + const l = MathMax(0, MathMin(100, Number(hslMatch[3]))) / 100; + const c = (1 - MathAbs(2 * l - 1)) * s; + const x = c * (1 - MathAbs((h / 60) % 2 - 1)); + const m = l - c / 2; + let r_; + let g_; + let b_; + if (h < 60) { + [r_, g_, b_] = [c, x, 0]; + } else if (h < 120) { + [r_, g_, b_] = [x, c, 0]; + } else if (h < 180) { + [r_, g_, b_] = [0, c, x]; + } else if (h < 240) { + [r_, g_, b_] = [0, x, c]; + } else if (h < 300) { + [r_, g_, b_] = [x, 0, c]; + } else { + [r_, g_, b_] = [c, 0, x]; + } + return [ + MathRound((r_ + m) * 255), + MathRound((g_ + m) * 255), + MathRound((b_ + m) * 255), + ]; + } + return null; + } + + function getDefaultCss() { + return { + backgroundColor: null, + color: null, + fontWeight: null, + fontStyle: null, + textDecorationColor: null, + textDecorationLine: [], + }; + } + + function parseCss(cssString) { + const css = getDefaultCss(); + + const rawEntries = []; + let inValue = false; + let currentKey = null; + let parenthesesDepth = 0; + let currentPart = ""; + for (let i = 0; i < cssString.length; i++) { + const c = cssString[i]; + if (c == "(") { + parenthesesDepth++; + } else if (parenthesesDepth > 0) { + if (c == ")") { + parenthesesDepth--; + } + } else if (inValue) { + if (c == ";") { + const value = StringPrototypeTrim(currentPart); + if (value != "") { + ArrayPrototypePush(rawEntries, [currentKey, value]); + } + currentKey = null; + currentPart = ""; + inValue = false; + continue; + } + } else if (c == ":") { + currentKey = StringPrototypeTrim(currentPart); + currentPart = ""; + inValue = true; + continue; + } + currentPart += c; + } + if (inValue && parenthesesDepth == 0) { + const value = StringPrototypeTrim(currentPart); + if (value != "") { + ArrayPrototypePush(rawEntries, [currentKey, value]); + } + currentKey = null; + currentPart = ""; + } + + for (const [key, value] of rawEntries) { + if (key == "background-color") { + const color = parseCssColor(value); + if (color != null) { + css.backgroundColor = color; + } + } else if (key == "color") { + const color = parseCssColor(value); + if (color != null) { + css.color = color; + } + } else if (key == "font-weight") { + if (value == "bold") { + css.fontWeight = value; + } + } else if (key == "font-style") { + if ( + ArrayPrototypeIncludes(["italic", "oblique", "oblique 14deg"], value) + ) { + css.fontStyle = "italic"; + } + } else if (key == "text-decoration-line") { + css.textDecorationLine = []; + for (const lineType of StringPrototypeSplit(value, /\s+/g)) { + if ( + ArrayPrototypeIncludes( + ["line-through", "overline", "underline"], + lineType, + ) + ) { + ArrayPrototypePush(css.textDecorationLine, lineType); + } + } + } else if (key == "text-decoration-color") { + const color = parseCssColor(value); + if (color != null) { + css.textDecorationColor = color; + } + } else if (key == "text-decoration") { + css.textDecorationColor = null; + css.textDecorationLine = []; + for (const arg of StringPrototypeSplit(value, /\s+/g)) { + const maybeColor = parseCssColor(arg); + if (maybeColor != null) { + css.textDecorationColor = maybeColor; + } else if ( + ArrayPrototypeIncludes( + ["line-through", "overline", "underline"], + arg, + ) + ) { + ArrayPrototypePush(css.textDecorationLine, arg); + } + } + } + } + + return css; + } + + function colorEquals(color1, color2) { + return color1?.[0] == color2?.[0] && color1?.[1] == color2?.[1] && + color1?.[2] == color2?.[2]; + } + + function cssToAnsi(css, prevCss = null) { + prevCss = prevCss ?? getDefaultCss(); + let ansi = ""; + if (!colorEquals(css.backgroundColor, prevCss.backgroundColor)) { + if (css.backgroundColor != null) { + const [r, g, b] = css.backgroundColor; + ansi += `\x1b[48;2;${r};${g};${b}m`; + } else { + ansi += "\x1b[49m"; + } + } + if (!colorEquals(css.color, prevCss.color)) { + if (css.color != null) { + const [r, g, b] = css.color; + ansi += `\x1b[38;2;${r};${g};${b}m`; + } else { + ansi += "\x1b[39m"; + } + } + if (css.fontWeight != prevCss.fontWeight) { + if (css.fontWeight == "bold") { + ansi += `\x1b[1m`; + } else { + ansi += "\x1b[22m"; + } + } + if (css.fontStyle != prevCss.fontStyle) { + if (css.fontStyle == "italic") { + ansi += `\x1b[3m`; + } else { + ansi += "\x1b[23m"; + } + } + if (!colorEquals(css.textDecorationColor, prevCss.textDecorationColor)) { + if (css.textDecorationColor != null) { + const [r, g, b] = css.textDecorationColor; + ansi += `\x1b[58;2;${r};${g};${b}m`; + } else { + ansi += "\x1b[59m"; + } + } + if ( + ArrayPrototypeIncludes(css.textDecorationLine, "line-through") != + ArrayPrototypeIncludes(prevCss.textDecorationLine, "line-through") + ) { + if (ArrayPrototypeIncludes(css.textDecorationLine, "line-through")) { + ansi += "\x1b[9m"; + } else { + ansi += "\x1b[29m"; + } + } + if ( + ArrayPrototypeIncludes(css.textDecorationLine, "overline") != + ArrayPrototypeIncludes(prevCss.textDecorationLine, "overline") + ) { + if (ArrayPrototypeIncludes(css.textDecorationLine, "overline")) { + ansi += "\x1b[53m"; + } else { + ansi += "\x1b[55m"; + } + } + if ( + ArrayPrototypeIncludes(css.textDecorationLine, "underline") != + ArrayPrototypeIncludes(prevCss.textDecorationLine, "underline") + ) { + if (ArrayPrototypeIncludes(css.textDecorationLine, "underline")) { + ansi += "\x1b[4m"; + } else { + ansi += "\x1b[24m"; + } + } + return ansi; + } + + function inspectArgs(args, inspectOptions = {}) { + const noColor = globalThis.Deno?.noColor ?? true; + const rInspectOptions = { ...DEFAULT_INSPECT_OPTIONS, ...inspectOptions }; + const first = args[0]; + let a = 0; + let string = ""; + + if (typeof first == "string" && args.length > 1) { + a++; + // Index of the first not-yet-appended character. Use this so we only + // have to append to `string` when a substitution occurs / at the end. + let appendedChars = 0; + let usedStyle = false; + let prevCss = null; + for (let i = 0; i < first.length - 1; i++) { + if (first[i] == "%") { + const char = first[++i]; + if (a < args.length) { + let formattedArg = null; + if (char == "s") { + // Format as a string. + formattedArg = String(args[a++]); + } else if (ArrayPrototypeIncludes(["d", "i"], char)) { + // Format as an integer. + const value = args[a++]; + if (typeof value == "bigint") { + formattedArg = `${value}n`; + } else if (typeof value == "number") { + formattedArg = `${NumberParseInt(String(value))}`; + } else { + formattedArg = "NaN"; + } + } else if (char == "f") { + // Format as a floating point value. + const value = args[a++]; + if (typeof value == "number") { + formattedArg = `${value}`; + } else { + formattedArg = "NaN"; + } + } else if (ArrayPrototypeIncludes(["O", "o"], char)) { + // Format as an object. + formattedArg = inspectValue( + args[a++], + 0, + rInspectOptions, + ); + } else if (char == "c") { + const value = args[a++]; + if (!noColor) { + const css = parseCss(value); + formattedArg = cssToAnsi(css, prevCss); + if (formattedArg != "") { + usedStyle = true; + prevCss = css; + } + } else { + formattedArg = ""; + } + } + + if (formattedArg != null) { + string += StringPrototypeSlice(first, appendedChars, i - 1) + + formattedArg; + appendedChars = i + 1; + } + } + if (char == "%") { + string += StringPrototypeSlice(first, appendedChars, i - 1) + "%"; + appendedChars = i + 1; + } + } + } + string += StringPrototypeSlice(first, appendedChars); + if (usedStyle) { + string += "\x1b[0m"; + } + } + + for (; a < args.length; a++) { + if (a > 0) { + string += " "; + } + if (typeof args[a] == "string") { + string += args[a]; + } else { + // Use default maximum depth for null or undefined arguments. + string += inspectValue(args[a], 0, rInspectOptions); + } + } + + if (rInspectOptions.indentLevel > 0) { + const groupIndent = StringPrototypeRepeat( + DEFAULT_INDENT, + rInspectOptions.indentLevel, + ); + string = groupIndent + + StringPrototypeReplaceAll(string, "\n", `\n${groupIndent}`); + } + + return string; + } + + const countMap = new Map(); + const timerMap = new Map(); + const isConsoleInstance = Symbol("isConsoleInstance"); + + function getConsoleInspectOptions() { + return { + ...DEFAULT_INSPECT_OPTIONS, + colors: !(globalThis.Deno?.noColor ?? false), + }; + } + + class Console { + #printFunc = null; + [isConsoleInstance] = false; + + constructor(printFunc) { + this.#printFunc = printFunc; + this.indentLevel = 0; + this[isConsoleInstance] = true; + + // ref https://console.spec.whatwg.org/#console-namespace + // For historical web-compatibility reasons, the namespace object for + // console must have as its [[Prototype]] an empty object, created as if + // by ObjectCreate(%ObjectPrototype%), instead of %ObjectPrototype%. + const console = ObjectCreate({}, { + [SymbolToStringTag]: { + enumerable: false, + writable: false, + configurable: true, + value: "console", + }, + }); + ObjectAssign(console, this); + return console; + } + + log = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 1, + ); + }; + + debug = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 0, + ); + }; + + info = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 1, + ); + }; + + dir = (obj = undefined, options = {}) => { + this.#printFunc( + inspectArgs([obj], { ...getConsoleInspectOptions(), ...options }) + + "\n", + 1, + ); + }; + + dirxml = this.dir; + + warn = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 2, + ); + }; + + error = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 3, + ); + }; + + assert = (condition = false, ...args) => { + if (condition) { + return; + } + + if (args.length === 0) { + this.error("Assertion failed"); + return; + } + + const [first, ...rest] = args; + + if (typeof first === "string") { + this.error(`Assertion failed: ${first}`, ...rest); + return; + } + + this.error(`Assertion failed:`, ...args); + }; + + count = (label = "default") => { + label = String(label); + + if (MapPrototypeHas(countMap, label)) { + const current = MapPrototypeGet(countMap, label) || 0; + MapPrototypeSet(countMap, label, current + 1); + } else { + MapPrototypeSet(countMap, label, 1); + } + + this.info(`${label}: ${MapPrototypeGet(countMap, label)}`); + }; + + countReset = (label = "default") => { + label = String(label); + + if (MapPrototypeHas(countMap, label)) { + MapPrototypeSet(countMap, label, 0); + } else { + this.warn(`Count for '${label}' does not exist`); + } + }; + + table = (data = undefined, properties = undefined) => { + if (properties !== undefined && !ArrayIsArray(properties)) { + throw new Error( + "The 'properties' argument must be of type Array. " + + "Received type string", + ); + } + + if (data === null || typeof data !== "object") { + return this.log(data); + } + + const stringifyValue = (value) => + inspectValueWithQuotes(value, 0, { + ...DEFAULT_INSPECT_OPTIONS, + depth: 1, + }); + const toTable = (header, body) => this.log(cliTable(header, body)); + + let resultData; + const isSet = data instanceof Set; + const isMap = data instanceof Map; + const valuesKey = "Values"; + const indexKey = isSet || isMap ? "(iter idx)" : "(idx)"; + + if (data instanceof Set) { + resultData = [...data]; + } else if (data instanceof Map) { + let idx = 0; + resultData = {}; + + MapPrototypeForEach(data, (v, k) => { + resultData[idx] = { Key: k, Values: v }; + idx++; + }); + } else { + resultData = data; + } + + const keys = ObjectKeys(resultData); + const numRows = keys.length; + + const objectValues = properties + ? ObjectFromEntries( + ArrayPrototypeMap( + properties, + (name) => [name, ArrayPrototypeFill(new Array(numRows), "")], + ), + ) + : {}; + const indexKeys = []; + const values = []; + + let hasPrimitives = false; + keys.forEach((k, idx) => { + const value = resultData[k]; + const primitive = value === null || + (typeof value !== "function" && typeof value !== "object"); + if (properties === undefined && primitive) { + hasPrimitives = true; + ArrayPrototypePush(values, stringifyValue(value)); + } else { + const valueObj = value || {}; + const keys = properties || ObjectKeys(valueObj); + for (const k of keys) { + if (!primitive && k in valueObj) { + if (!(k in objectValues)) { + objectValues[k] = ArrayPrototypeFill(new Array(numRows), ""); + } + objectValues[k][idx] = stringifyValue(valueObj[k]); + } + } + ArrayPrototypePush(values, ""); + } + + ArrayPrototypePush(indexKeys, k); + }); + + const headerKeys = ObjectKeys(objectValues); + const bodyValues = ObjectValues(objectValues); + const header = ArrayPrototypeFilter([ + indexKey, + ...(properties || + [...headerKeys, !isMap && hasPrimitives && valuesKey]), + ], Boolean); + const body = [indexKeys, ...bodyValues, values]; + + toTable(header, body); + }; + + time = (label = "default") => { + label = String(label); + + if (MapPrototypeHas(timerMap, label)) { + this.warn(`Timer '${label}' already exists`); + return; + } + + MapPrototypeSet(timerMap, label, DateNow()); + }; + + timeLog = (label = "default", ...args) => { + label = String(label); + + if (!MapPrototypeHas(timerMap, label)) { + this.warn(`Timer '${label}' does not exists`); + return; + } + + const startTime = MapPrototypeGet(timerMap, label); + const duration = DateNow() - startTime; + + this.info(`${label}: ${duration}ms`, ...args); + }; + + timeEnd = (label = "default") => { + label = String(label); + + if (!MapPrototypeHas(timerMap, label)) { + this.warn(`Timer '${label}' does not exists`); + return; + } + + const startTime = MapPrototypeGet(timerMap, label); + MapPrototypeDelete(timerMap, label); + const duration = DateNow() - startTime; + + this.info(`${label}: ${duration}ms`); + }; + + group = (...label) => { + if (label.length > 0) { + this.log(...label); + } + this.indentLevel += 2; + }; + + groupCollapsed = this.group; + + groupEnd = () => { + if (this.indentLevel > 0) { + this.indentLevel -= 2; + } + }; + + clear = () => { + this.indentLevel = 0; + this.#printFunc(CSI.kClear, 1); + this.#printFunc(CSI.kClearScreenDown, 1); + }; + + trace = (...args) => { + const message = inspectArgs( + args, + { ...getConsoleInspectOptions(), indentLevel: 0 }, + ); + const err = { + name: "Trace", + message, + }; + ErrorCaptureStackTrace(err, this.trace); + this.error(err.stack); + }; + + static [SymbolHasInstance](instance) { + return instance[isConsoleInstance]; + } + } + + const customInspect = SymbolFor("Deno.customInspect"); + + function inspect( + value, + inspectOptions = {}, + ) { + return inspectValue(value, 0, { + ...DEFAULT_INSPECT_OPTIONS, + ...inspectOptions, + // TODO(nayeemrmn): Indent level is not supported. + indentLevel: 0, + }); + } + + /** Creates a proxy that represents a subset of the properties + * of the original object optionally without evaluating the properties + * in order to get the values. */ + function createFilteredInspectProxy({ object, keys, evaluate }) { + return new Proxy({}, { + get(_target, key) { + if (key === SymbolToStringTag) { + return object.constructor?.name; + } else if (ArrayPrototypeIncludes(keys, key)) { + return ReflectGet(object, key); + } else { + return undefined; + } + }, + getOwnPropertyDescriptor(_target, key) { + if (!ArrayPrototypeIncludes(keys, key)) { + return undefined; + } else if (evaluate) { + return getEvaluatedDescriptor(object, key); + } else { + return getDescendantPropertyDescriptor(object, key) ?? + getEvaluatedDescriptor(object, key); + } + }, + has(_target, key) { + return ArrayPrototypeIncludes(keys, key); + }, + ownKeys() { + return keys; + }, + }); + + function getDescendantPropertyDescriptor(object, key) { + let propertyDescriptor = ReflectGetOwnPropertyDescriptor(object, key); + if (!propertyDescriptor) { + const prototype = ReflectGetPrototypeOf(object); + if (prototype) { + propertyDescriptor = getDescendantPropertyDescriptor(prototype, key); + } + } + return propertyDescriptor; + } + + function getEvaluatedDescriptor(object, key) { + return { + configurable: true, + enumerable: true, + value: object[key], + }; + } + } + + // A helper function that will bind our own console implementation + // with default implementation of Console from V8. This will cause + // console messages to be piped to inspector console. + // + // We are using `Deno.core.callConsole` binding to preserve proper stack + // frames in inspector console. This has to be done because V8 considers + // the last JS stack frame as gospel for the inspector. In our case we + // specifically want the latest user stack frame to be the one that matters + // though. + // + // Inspired by: + // https://github.com/nodejs/node/blob/1317252dfe8824fd9cfee125d2aaa94004db2f3b/lib/internal/util/inspector.js#L39-L61 + function wrapConsole(consoleFromDeno, consoleFromV8) { + const callConsole = core.callConsole; + + for (const key of ObjectKeys(consoleFromV8)) { + if (ObjectPrototypeHasOwnProperty(consoleFromDeno, key)) { + consoleFromDeno[key] = FunctionPrototypeBind( + callConsole, + consoleFromDeno, + consoleFromV8[key], + consoleFromDeno[key], + ); + } + } + } + + // Expose these fields to internalObject for tests. + window.__bootstrap.internals = { + ...window.__bootstrap.internals ?? {}, + Console, + cssToAnsi, + inspectArgs, + parseCss, + parseCssColor, + }; + + window.__bootstrap.console = { + CSI, + inspectArgs, + Console, + customInspect, + inspect, + wrapConsole, + createFilteredInspectProxy, + }; +})(this); diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml new file mode 100644 index 000000000..8955ad037 --- /dev/null +++ b/ext/console/Cargo.toml @@ -0,0 +1,17 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_console" +version = "0.14.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "Implementation of Console API for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +deno_core = { version = "0.96.0", path = "../../core" } diff --git a/ext/console/README.md b/ext/console/README.md new file mode 100644 index 000000000..2f8fb448a --- /dev/null +++ b/ext/console/README.md @@ -0,0 +1,5 @@ +# deno_console + +This crate implements the Console API. + +Spec: https://console.spec.whatwg.org/ diff --git a/ext/console/internal.d.ts b/ext/console/internal.d.ts new file mode 100644 index 000000000..ef7834ba6 --- /dev/null +++ b/ext/console/internal.d.ts @@ -0,0 +1,16 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// +/// + +declare namespace globalThis { + declare namespace __bootstrap { + declare namespace console { + declare function createFilteredInspectProxy(params: { + object: TObject; + keys: (keyof TObject)[]; + evaluate: boolean; + }): Record; + } + } +} diff --git a/ext/console/lib.deno_console.d.ts b/ext/console/lib.deno_console.d.ts new file mode 100644 index 000000000..730a5da8e --- /dev/null +++ b/ext/console/lib.deno_console.d.ts @@ -0,0 +1,28 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file no-explicit-any + +/// +/// + +declare interface Console { + assert(condition?: boolean, ...data: any[]): void; + clear(): void; + count(label?: string): void; + countReset(label?: string): void; + debug(...data: any[]): void; + dir(item?: any, options?: any): void; + dirxml(...data: any[]): void; + error(...data: any[]): void; + group(...data: any[]): void; + groupCollapsed(...data: any[]): void; + groupEnd(): void; + info(...data: any[]): void; + log(...data: any[]): void; + table(tabularData?: any, properties?: string[]): void; + time(label?: string): void; + timeEnd(label?: string): void; + timeLog(label?: string, ...data: any[]): void; + trace(...data: any[]): void; + warn(...data: any[]): void; +} diff --git a/ext/console/lib.rs b/ext/console/lib.rs new file mode 100644 index 000000000..40a428fa9 --- /dev/null +++ b/ext/console/lib.rs @@ -0,0 +1,19 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use deno_core::include_js_files; +use deno_core::Extension; +use std::path::PathBuf; + +pub fn init() -> Extension { + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/console", + "01_colors.js", + "02_console.js", + )) + .build() +} + +pub fn get_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_console.d.ts") +} diff --git a/ext/crypto/00_crypto.js b/ext/crypto/00_crypto.js new file mode 100644 index 000000000..449946295 --- /dev/null +++ b/ext/crypto/00_crypto.js @@ -0,0 +1,1013 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// + +"use strict"; + +((window) => { + const core = window.Deno.core; + const webidl = window.__bootstrap.webidl; + const { DOMException } = window.__bootstrap.domException; + + const { + ArrayPrototypeFind, + ArrayBufferIsView, + ArrayPrototypeIncludes, + BigInt64Array, + StringPrototypeToUpperCase, + Symbol, + SymbolFor, + SymbolToStringTag, + WeakMap, + WeakMapPrototypeGet, + WeakMapPrototypeSet, + Int8Array, + Uint8Array, + TypedArrayPrototypeSlice, + Int16Array, + Uint16Array, + Int32Array, + Uint32Array, + Uint8ClampedArray, + TypeError, + } = window.__bootstrap.primordials; + + // P-521 is not yet supported. + const supportedNamedCurves = ["P-256", "P-384"]; + const recognisedUsages = [ + "encrypt", + "decrypt", + "sign", + "verify", + "deriveKey", + "deriveBits", + "wrapKey", + "unwrapKey", + ]; + + const simpleAlgorithmDictionaries = { + RsaHashedKeyGenParams: { hash: "HashAlgorithmIdentifier" }, + EcKeyGenParams: {}, + HmacKeyGenParams: { hash: "HashAlgorithmIdentifier" }, + RsaPssParams: {}, + EcdsaParams: { hash: "HashAlgorithmIdentifier" }, + HmacImportParams: { hash: "HashAlgorithmIdentifier" }, + }; + + const supportedAlgorithms = { + "digest": { + "SHA-1": null, + "SHA-256": null, + "SHA-384": null, + "SHA-512": null, + }, + "generateKey": { + "RSASSA-PKCS1-v1_5": "RsaHashedKeyGenParams", + "RSA-PSS": "RsaHashedKeyGenParams", + "ECDSA": "EcKeyGenParams", + "HMAC": "HmacKeyGenParams", + }, + "sign": { + "RSASSA-PKCS1-v1_5": null, + "RSA-PSS": "RsaPssParams", + "ECDSA": "EcdsaParams", + "HMAC": null, + }, + "verify": { + "RSASSA-PKCS1-v1_5": null, + "RSA-PSS": "RsaPssParams", + "HMAC": null, + }, + "importKey": { + "HMAC": "HmacImportParams", + }, + }; + + // See https://www.w3.org/TR/WebCryptoAPI/#dfn-normalize-an-algorithm + function normalizeAlgorithm(algorithm, op) { + if (typeof algorithm == "string") { + return normalizeAlgorithm({ name: algorithm }, op); + } + + // 1. + const registeredAlgorithms = supportedAlgorithms[op]; + // 2. 3. + const initialAlg = webidl.converters.Algorithm(algorithm, { + prefix: "Failed to normalize algorithm", + context: "passed algorithm", + }); + // 4. + let algName = initialAlg.name; + + // 5. + let desiredType = undefined; + for (const key in registeredAlgorithms) { + if ( + StringPrototypeToUpperCase(key) === StringPrototypeToUpperCase(algName) + ) { + algName = key; + desiredType = registeredAlgorithms[key]; + } + } + if (desiredType === undefined) { + throw new DOMException( + "Unrecognized algorithm name", + "NotSupportedError", + ); + } + + // Fast path everything below if the registered dictionary is "None". + if (desiredType === null) { + return { name: algName }; + } + + const normalizedAlgorithm = webidl.converters[desiredType](algorithm, { + prefix: "Failed to normalize algorithm", + context: "passed algorithm", + }); + normalizedAlgorithm.name = algName; + + const dict = simpleAlgorithmDictionaries[desiredType]; + for (const member in dict) { + const idlType = dict[member]; + const idlValue = normalizedAlgorithm[member]; + + if (idlType === "BufferSource") { + normalizedAlgorithm[member] = new Uint8Array( + TypedArrayPrototypeSlice( + (ArrayBufferIsView(idlValue) ? idlValue.buffer : idlValue), + idlValue.byteOffset ?? 0, + idlValue.byteLength, + ), + ); + } else if (idlType === "HashAlgorithmIdentifier") { + normalizedAlgorithm[member] = normalizeAlgorithm(idlValue, "digest"); + } else if (idlType === "AlgorithmIdentifier") { + // TODO(lucacasonato): implement + throw new TypeError("unimplemented"); + } + } + + return normalizedAlgorithm; + } + + const _handle = Symbol("[[handle]]"); + const _algorithm = Symbol("[[algorithm]]"); + const _extractable = Symbol("[[extractable]]"); + const _usages = Symbol("[[usages]]"); + const _type = Symbol("[[type]]"); + + class CryptoKey { + /** @type {string} */ + [_type]; + /** @type {boolean} */ + [_extractable]; + /** @type {object} */ + [_algorithm]; + /** @type {string[]} */ + [_usages]; + /** @type {object} */ + [_handle]; + + constructor() { + webidl.illegalConstructor(); + } + + /** @returns {string} */ + get type() { + webidl.assertBranded(this, CryptoKey); + return this[_type]; + } + + /** @returns {boolean} */ + get extractable() { + webidl.assertBranded(this, CryptoKey); + return this[_extractable]; + } + + /** @returns {string[]} */ + get usages() { + webidl.assertBranded(this, CryptoKey); + // TODO(lucacasonato): return a SameObject copy + return this[_usages]; + } + + /** @returns {object} */ + get algorithm() { + webidl.assertBranded(this, CryptoKey); + // TODO(lucacasonato): return a SameObject copy + return this[_algorithm]; + } + + get [SymbolToStringTag]() { + return "CryptoKey"; + } + + [SymbolFor("Deno.customInspect")](inspect) { + return `${this.constructor.name} ${ + inspect({ + type: this.type, + extractable: this.extractable, + algorithm: this.algorithm, + usages: this.usages, + }) + }`; + } + } + + webidl.configurePrototype(CryptoKey); + + /** + * @param {string} type + * @param {boolean} extractable + * @param {string[]} usages + * @param {object} algorithm + * @param {object} handle + * @returns + */ + function constructKey(type, extractable, usages, algorithm, handle) { + const key = webidl.createBranded(CryptoKey); + key[_type] = type; + key[_extractable] = extractable; + key[_usages] = usages; + key[_algorithm] = algorithm; + key[_handle] = handle; + return key; + } + + // https://w3c.github.io/webcrypto/#concept-usage-intersection + /** + * @param {string[]} a + * @param {string[]} b + * @returns + */ + function usageIntersection(a, b) { + return a.filter((i) => b.includes(i)); + } + + // TODO(lucacasonato): this should be moved to rust + /** @type {WeakMap} */ + const KEY_STORE = new WeakMap(); + + class SubtleCrypto { + constructor() { + webidl.illegalConstructor(); + } + + /** + * @param {string} algorithm + * @param {BufferSource} data + * @returns {Promise} + */ + async digest(algorithm, data) { + webidl.assertBranded(this, SubtleCrypto); + const prefix = "Failed to execute 'digest' on 'SubtleCrypto'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + prefix, + context: "Argument 1", + }); + data = webidl.converters.BufferSource(data, { + prefix, + context: "Argument 2", + }); + + if (ArrayBufferIsView(data)) { + data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + } else { + data = new Uint8Array(data); + } + + data = TypedArrayPrototypeSlice(data); + + algorithm = normalizeAlgorithm(algorithm, "digest"); + + const result = await core.opAsync( + "op_crypto_subtle_digest", + algorithm.name, + data, + ); + + return result.buffer; + } + + /** + * @param {string} algorithm + * @param {CryptoKey} key + * @param {BufferSource} data + * @returns {Promise} + */ + async sign(algorithm, key, data) { + webidl.assertBranded(this, SubtleCrypto); + const prefix = "Failed to execute 'sign' on 'SubtleCrypto'"; + webidl.requiredArguments(arguments.length, 3, { prefix }); + algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + prefix, + context: "Argument 1", + }); + key = webidl.converters.CryptoKey(key, { + prefix, + context: "Argument 2", + }); + data = webidl.converters.BufferSource(data, { + prefix, + context: "Argument 3", + }); + + // 1. + if (ArrayBufferIsView(data)) { + data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + } else { + data = new Uint8Array(data); + } + data = TypedArrayPrototypeSlice(data); + + // 2. + const normalizedAlgorithm = normalizeAlgorithm(algorithm, "sign"); + + const handle = key[_handle]; + const keyData = WeakMapPrototypeGet(KEY_STORE, handle); + + // 8. + if (normalizedAlgorithm.name !== key[_algorithm].name) { + throw new DOMException( + "Signing algorithm doesn't match key algorithm.", + "InvalidAccessError", + ); + } + + // 9. + if (!ArrayPrototypeIncludes(key[_usages], "sign")) { + throw new DOMException( + "Key does not support the 'sign' operation.", + "InvalidAccessError", + ); + } + + switch (normalizedAlgorithm.name) { + case "RSASSA-PKCS1-v1_5": { + // 1. + if (key[_type] !== "private") { + throw new DOMException( + "Key type not supported", + "InvalidAccessError", + ); + } + + // 2. + const hashAlgorithm = key[_algorithm].hash.name; + const signature = await core.opAsync("op_crypto_sign_key", { + key: keyData, + algorithm: "RSASSA-PKCS1-v1_5", + hash: hashAlgorithm, + }, data); + + return signature.buffer; + } + case "RSA-PSS": { + // 1. + if (key[_type] !== "private") { + throw new DOMException( + "Key type not supported", + "InvalidAccessError", + ); + } + + // 2. + const hashAlgorithm = key[_algorithm].hash.name; + const signature = await core.opAsync("op_crypto_sign_key", { + key: keyData, + algorithm: "RSA-PSS", + hash: hashAlgorithm, + saltLength: normalizedAlgorithm.saltLength, + }, data); + + return signature.buffer; + } + case "ECDSA": { + // 1. + if (key[_type] !== "private") { + throw new DOMException( + "Key type not supported", + "InvalidAccessError", + ); + } + + // 2. + const hashAlgorithm = normalizedAlgorithm.hash.name; + const namedCurve = key[_algorithm].namedCurve; + if (!ArrayPrototypeIncludes(supportedNamedCurves, namedCurve)) { + throw new DOMException("Curve not supported", "NotSupportedError"); + } + + const signature = await core.opAsync("op_crypto_sign_key", { + key: keyData, + algorithm: "ECDSA", + hash: hashAlgorithm, + namedCurve, + }, data); + + return signature.buffer; + } + case "HMAC": { + const hashAlgorithm = key[_algorithm].hash.name; + + const signature = await core.opAsync("op_crypto_sign_key", { + key: keyData, + algorithm: "HMAC", + hash: hashAlgorithm, + }, data); + + return signature.buffer; + } + } + + throw new TypeError("unreachable"); + } + + /** + * @param {string} format + * @param {BufferSource} keyData + * @param {string} algorithm + * @param {boolean} extractable + * @param {KeyUsages[]} keyUsages + * @returns {Promise} + */ + // deno-lint-ignore require-await + async importKey(format, keyData, algorithm, extractable, keyUsages) { + webidl.assertBranded(this, SubtleCrypto); + const prefix = "Failed to execute 'importKey' on 'SubtleCrypto'"; + webidl.requiredArguments(arguments.length, 4, { prefix }); + format = webidl.converters.KeyFormat(format, { + prefix, + context: "Argument 1", + }); + keyData = webidl.converters.BufferSource(keyData, { + prefix, + context: "Argument 2", + }); + algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + prefix, + context: "Argument 3", + }); + extractable = webidl.converters.boolean(extractable, { + prefix, + context: "Argument 4", + }); + keyUsages = webidl.converters["sequence"](keyUsages, { + prefix, + context: "Argument 5", + }); + + const normalizedAlgorithm = normalizeAlgorithm(algorithm, "importKey"); + + if ( + ArrayPrototypeFind( + keyUsages, + (u) => !ArrayPrototypeIncludes(["sign", "verify"], u), + ) !== undefined + ) { + throw new DOMException("Invalid key usages", "SyntaxError"); + } + + switch (normalizedAlgorithm.name) { + // https://w3c.github.io/webcrypto/#hmac-operations + case "HMAC": { + switch (format) { + case "raw": { + const hash = normalizedAlgorithm.hash; + // 5. + let length = keyData.byteLength * 8; + // 6. + if (length === 0) { + throw new DOMException("Key length is zero", "DataError"); + } + if (normalizeAlgorithm.length) { + // 7. + if ( + normalizedAlgorithm.length > length || + normalizedAlgorithm.length <= (length - 8) + ) { + throw new DOMException( + "Key length is invalid", + "DataError", + ); + } + length = normalizeAlgorithm.length; + } + + if (keyUsages.length == 0) { + throw new DOMException("Key usage is empty", "SyntaxError"); + } + + const handle = {}; + WeakMapPrototypeSet(KEY_STORE, handle, { + type: "raw", + data: keyData, + }); + + const algorithm = { + name: "HMAC", + length, + hash, + }; + + const key = constructKey( + "secret", + true, + usageIntersection(keyUsages, recognisedUsages), + algorithm, + handle, + ); + + return key; + } + // TODO(@littledivy): jwk + default: + throw new DOMException("Not implemented", "NotSupportedError"); + } + } + // TODO(@littledivy): RSASSA-PKCS1-v1_5 + // TODO(@littledivy): RSA-PSS + // TODO(@littledivy): ECDSA + default: + throw new DOMException("Not implemented", "NotSupportedError"); + } + } + + /** + * @param {string} format + * @param {CryptoKey} key + * @returns {Promise} + */ + // deno-lint-ignore require-await + async exportKey(format, key) { + webidl.assertBranded(this, SubtleCrypto); + const prefix = "Failed to execute 'exportKey' on 'SubtleCrypto'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + format = webidl.converters.KeyFormat(format, { + prefix, + context: "Argument 1", + }); + key = webidl.converters.CryptoKey(key, { + prefix, + context: "Argument 2", + }); + + const handle = key[_handle]; + // 2. + const bits = WeakMapPrototypeGet(KEY_STORE, handle); + + switch (key[_algorithm].name) { + case "HMAC": { + if (bits == null) { + throw new DOMException("Key is not available", "OperationError"); + } + switch (format) { + // 3. + case "raw": { + for (let _i = 7 & (8 - bits.length % 8); _i > 0; _i--) { + bits.push(0); + } + // 4-5. + return bits.buffer; + } + // TODO(@littledivy): jwk + default: + throw new DOMException("Not implemented", "NotSupportedError"); + } + } + // TODO(@littledivy): RSASSA-PKCS1-v1_5 + // TODO(@littledivy): RSA-PSS + // TODO(@littledivy): ECDSA + default: + throw new DOMException("Not implemented", "NotSupportedError"); + } + } + + /** + * @param {string} algorithm + * @param {CryptoKey} key + * @param {BufferSource} signature + * @param {BufferSource} data + * @returns {Promise} + */ + async verify(algorithm, key, signature, data) { + webidl.assertBranded(this, SubtleCrypto); + const prefix = "Failed to execute 'verify' on 'SubtleCrypto'"; + webidl.requiredArguments(arguments.length, 4, { prefix }); + algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + prefix, + context: "Argument 1", + }); + key = webidl.converters.CryptoKey(key, { + prefix, + context: "Argument 2", + }); + signature = webidl.converters.BufferSource(signature, { + prefix, + context: "Argument 3", + }); + data = webidl.converters.BufferSource(data, { + prefix, + context: "Argument 4", + }); + + // 2. + if (ArrayBufferIsView(signature)) { + signature = new Uint8Array( + signature.buffer, + signature.byteOffset, + signature.byteLength, + ); + } else { + signature = new Uint8Array(signature); + } + signature = TypedArrayPrototypeSlice(signature); + + // 3. + if (ArrayBufferIsView(data)) { + data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + } else { + data = new Uint8Array(data); + } + data = TypedArrayPrototypeSlice(data); + + const normalizedAlgorithm = normalizeAlgorithm(algorithm, "verify"); + + const handle = key[_handle]; + const keyData = WeakMapPrototypeGet(KEY_STORE, handle); + + if (normalizedAlgorithm.name !== key[_algorithm].name) { + throw new DOMException( + "Verifying algorithm doesn't match key algorithm.", + "InvalidAccessError", + ); + } + + if (!ArrayPrototypeIncludes(key[_usages], "verify")) { + throw new DOMException( + "Key does not support the 'verify' operation.", + "InvalidAccessError", + ); + } + + switch (normalizedAlgorithm.name) { + case "RSASSA-PKCS1-v1_5": { + if (key[_type] !== "public") { + throw new DOMException( + "Key type not supported", + "InvalidAccessError", + ); + } + + const hashAlgorithm = key[_algorithm].hash.name; + return await core.opAsync("op_crypto_verify_key", { + key: keyData, + algorithm: "RSASSA-PKCS1-v1_5", + hash: hashAlgorithm, + signature, + }, data); + } + case "RSA-PSS": { + if (key[_type] !== "public") { + throw new DOMException( + "Key type not supported", + "InvalidAccessError", + ); + } + + const hashAlgorithm = key[_algorithm].hash.name; + const saltLength = normalizedAlgorithm.saltLength; + return await core.opAsync("op_crypto_verify_key", { + key: keyData, + algorithm: "RSA-PSS", + hash: hashAlgorithm, + saltLength, + signature, + }, data); + } + case "HMAC": { + const hash = key[_algorithm].hash.name; + return await core.opAsync("op_crypto_verify_key", { + key: keyData, + algorithm: "HMAC", + hash, + signature, + }, data); + } + } + + throw new TypeError("unreachable"); + } + + /** + * @param {string} algorithm + * @param {boolean} extractable + * @param {KeyUsage[]} keyUsages + * @returns {Promise} + */ + async generateKey(algorithm, extractable, keyUsages) { + webidl.assertBranded(this, SubtleCrypto); + const prefix = "Failed to execute 'generateKey' on 'SubtleCrypto'"; + webidl.requiredArguments(arguments.length, 3, { prefix }); + algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + prefix, + context: "Argument 1", + }); + extractable = webidl.converters["boolean"](extractable, { + prefix, + context: "Argument 2", + }); + keyUsages = webidl.converters["sequence"](keyUsages, { + prefix, + context: "Argument 3", + }); + + const usages = keyUsages; + + const normalizedAlgorithm = normalizeAlgorithm(algorithm, "generateKey"); + + // https://github.com/denoland/deno/pull/9614#issuecomment-866049433 + if (!extractable) { + throw new DOMException( + "Non-extractable keys are not supported", + "SecurityError", + ); + } + + const result = await generateKey( + normalizedAlgorithm, + extractable, + usages, + ); + + if (result instanceof CryptoKey) { + const type = result[_type]; + if ((type === "secret" || type === "private") && usages.length === 0) { + throw new DOMException("Invalid key usages", "SyntaxError"); + } + } else if (result.privateKey instanceof CryptoKey) { + if (result.privateKey[_usages].length === 0) { + throw new DOMException("Invalid key usages", "SyntaxError"); + } + } + + return result; + } + + get [SymbolToStringTag]() { + return "SubtleCrypto"; + } + } + + async function generateKey(normalizedAlgorithm, extractable, usages) { + switch (normalizedAlgorithm.name) { + case "RSASSA-PKCS1-v1_5": + case "RSA-PSS": { + // 1. + if ( + ArrayPrototypeFind( + usages, + (u) => !ArrayPrototypeIncludes(["sign", "verify"], u), + ) !== undefined + ) { + throw new DOMException("Invalid key usages", "SyntaxError"); + } + + // 2. + const keyData = await core.opAsync( + "op_crypto_generate_key", + { + name: normalizedAlgorithm.name, + modulusLength: normalizedAlgorithm.modulusLength, + publicExponent: normalizedAlgorithm.publicExponent, + }, + ); + const handle = {}; + WeakMapPrototypeSet(KEY_STORE, handle, { + type: "pkcs8", + data: keyData, + }); + + // 4-8. + const algorithm = { + name: normalizedAlgorithm.name, + modulusLength: normalizedAlgorithm.modulusLength, + publicExponent: normalizedAlgorithm.publicExponent, + hash: normalizedAlgorithm.hash, + }; + + // 9-13. + const publicKey = constructKey( + "public", + true, + usageIntersection(usages, ["verify"]), + algorithm, + handle, + ); + + // 14-18. + const privateKey = constructKey( + "private", + extractable, + usageIntersection(usages, ["sign"]), + algorithm, + handle, + ); + + // 19-22. + return { publicKey, privateKey }; + } + // TODO(lucacasonato): RSA-OAEP + case "ECDSA": { + // 1. + if ( + ArrayPrototypeFind( + usages, + (u) => !ArrayPrototypeIncludes(["sign", "verify"], u), + ) !== undefined + ) { + throw new DOMException("Invalid key usages", "SyntaxError"); + } + + // 2-3. + const handle = {}; + if ( + ArrayPrototypeIncludes( + supportedNamedCurves, + normalizedAlgorithm.namedCurve, + ) + ) { + const keyData = await core.opAsync("op_crypto_generate_key", { + name: "ECDSA", + namedCurve: normalizedAlgorithm.namedCurve, + }); + WeakMapPrototypeSet(KEY_STORE, handle, { + type: "pkcs8", + data: keyData, + }); + } else { + throw new DOMException("Curve not supported", "NotSupportedError"); + } + + // 4-6. + const algorithm = { + name: "ECDSA", + namedCurve: normalizedAlgorithm.namedCurve, + }; + + // 7-11. + const publicKey = constructKey( + "public", + true, + usageIntersection(usages, ["verify"]), + algorithm, + handle, + ); + + // 12-16. + const privateKey = constructKey( + "private", + extractable, + usageIntersection(usages, ["sign"]), + algorithm, + handle, + ); + + // 17-20. + return { publicKey, privateKey }; + } + // TODO(lucacasonato): ECDH + // TODO(lucacasonato): AES-CTR + // TODO(lucacasonato): AES-CBC + // TODO(lucacasonato): AES-GCM + // TODO(lucacasonato): AES-KW + case "HMAC": { + // 1. + if ( + ArrayPrototypeFind( + usages, + (u) => !ArrayPrototypeIncludes(["sign", "verify"], u), + ) !== undefined + ) { + throw new DOMException("Invalid key usages", "SyntaxError"); + } + + // 2. + let length; + if (normalizedAlgorithm.length === undefined) { + length = null; + } else if (normalizedAlgorithm.length !== 0) { + length = normalizedAlgorithm.length; + } else { + throw new DOMException("Invalid length", "OperationError"); + } + + // 3-4. + const keyData = await core.opAsync("op_crypto_generate_key", { + name: "HMAC", + hash: normalizedAlgorithm.hash.name, + length, + }); + const handle = {}; + WeakMapPrototypeSet(KEY_STORE, handle, { type: "raw", data: keyData }); + + // 6-10. + const algorithm = { + name: "HMAC", + hash: { + name: normalizedAlgorithm.hash.name, + }, + length: keyData.byteLength * 8, + }; + + // 5, 11-13. + const key = constructKey( + "secret", + extractable, + usages, + algorithm, + handle, + ); + + // 14. + return key; + } + } + } + + const subtle = webidl.createBranded(SubtleCrypto); + + class Crypto { + constructor() { + webidl.illegalConstructor(); + } + + getRandomValues(arrayBufferView) { + webidl.assertBranded(this, Crypto); + const prefix = "Failed to execute 'getRandomValues' on 'Crypto'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + arrayBufferView = webidl.converters.ArrayBufferView(arrayBufferView, { + prefix, + context: "Argument 1", + }); + if ( + !( + arrayBufferView instanceof Int8Array || + arrayBufferView instanceof Uint8Array || + arrayBufferView instanceof Uint8ClampedArray || + arrayBufferView instanceof Int16Array || + arrayBufferView instanceof Uint16Array || + arrayBufferView instanceof Int32Array || + arrayBufferView instanceof Uint32Array || + arrayBufferView instanceof BigInt64Array || + arrayBufferView instanceof BigUint64Array + ) + ) { + throw new DOMException( + "The provided ArrayBufferView is not an integer array type", + "TypeMismatchError", + ); + } + const ui8 = new Uint8Array( + arrayBufferView.buffer, + arrayBufferView.byteOffset, + arrayBufferView.byteLength, + ); + core.opSync("op_crypto_get_random_values", ui8); + return arrayBufferView; + } + + randomUUID() { + webidl.assertBranded(this, Crypto); + return core.opSync("op_crypto_random_uuid"); + } + + get subtle() { + webidl.assertBranded(this, Crypto); + return subtle; + } + + get [SymbolToStringTag]() { + return "Crypto"; + } + + [SymbolFor("Deno.customInspect")](inspect) { + return `${this.constructor.name} ${inspect({})}`; + } + } + + webidl.configurePrototype(Crypto); + + window.__bootstrap.crypto = { + SubtleCrypto, + crypto: webidl.createBranded(Crypto), + Crypto, + CryptoKey, + }; +})(this); diff --git a/ext/crypto/01_webidl.js b/ext/crypto/01_webidl.js new file mode 100644 index 000000000..7e78170b4 --- /dev/null +++ b/ext/crypto/01_webidl.js @@ -0,0 +1,188 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// + +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + const { CryptoKey } = window.__bootstrap.crypto; + + webidl.converters.AlgorithmIdentifier = (V, opts) => { + // Union for (object or DOMString) + if (webidl.type(V) == "Object") { + return webidl.converters.object(V, opts); + } + return webidl.converters.DOMString(V, opts); + }; + + webidl.converters.KeyType = webidl.createEnumConverter("KeyType", [ + "public", + "private", + "secret", + ]); + + webidl.converters.KeyFormat = webidl.createEnumConverter("KeyFormat", [ + "raw", + "pkcs8", + "spki", + "jwk", + ]); + + webidl.converters.KeyUsage = webidl.createEnumConverter("KeyUsage", [ + "encrypt", + "decrypt", + "sign", + "verify", + "deriveKey", + "deriveBits", + "wrapKey", + "unwrapKey", + ]); + + webidl.converters["sequence"] = webidl.createSequenceConverter( + webidl.converters.KeyUsage, + ); + + webidl.converters.HashAlgorithmIdentifier = + webidl.converters.AlgorithmIdentifier; + + /** @type {__bootstrap.webidl.Dictionary} */ + const dictAlgorithm = [{ + key: "name", + converter: webidl.converters.DOMString, + required: true, + }]; + + webidl.converters.Algorithm = webidl + .createDictionaryConverter("Algorithm", dictAlgorithm); + + webidl.converters.BigInteger = webidl.converters.Uint8Array; + + /** @type {__bootstrap.webidl.Dictionary} */ + const dictRsaKeyGenParams = [ + ...dictAlgorithm, + { + key: "modulusLength", + converter: (V, opts) => + webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + required: true, + }, + { + key: "publicExponent", + converter: webidl.converters.BigInteger, + required: true, + }, + ]; + + webidl.converters.RsaKeyGenParams = webidl + .createDictionaryConverter("RsaKeyGenParams", dictRsaKeyGenParams); + + const dictRsaHashedKeyGenParams = [ + ...dictRsaKeyGenParams, + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + ]; + + webidl.converters.RsaHashedKeyGenParams = webidl.createDictionaryConverter( + "RsaHashedKeyGenParams", + dictRsaHashedKeyGenParams, + ); + + webidl.converters.NamedCurve = webidl.converters.DOMString; + + const dictEcKeyGenParams = [ + ...dictAlgorithm, + { + key: "namedCurve", + converter: webidl.converters.NamedCurve, + required: true, + }, + ]; + + webidl.converters.EcKeyGenParams = webidl + .createDictionaryConverter("EcKeyGenParams", dictEcKeyGenParams); + + const dictHmacKeyGenParams = [ + ...dictAlgorithm, + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "length", + converter: (V, opts) => + webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + }, + ]; + + webidl.converters.HmacKeyGenParams = webidl + .createDictionaryConverter("HmacKeyGenParams", dictHmacKeyGenParams); + + const dictRsaPssParams = [ + ...dictAlgorithm, + { + key: "saltLength", + converter: (V, opts) => + webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + required: true, + }, + ]; + + webidl.converters.RsaPssParams = webidl + .createDictionaryConverter("RsaPssParams", dictRsaPssParams); + + const dictEcdsaParams = [ + ...dictAlgorithm, + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + ]; + + webidl.converters["EcdsaParams"] = webidl + .createDictionaryConverter("EcdsaParams", dictEcdsaParams); + + const dictHmacImportParams = [ + ...dictAlgorithm, + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "length", + converter: (V, opts) => + webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + }, + ]; + + webidl.converters.HmacImportParams = webidl + .createDictionaryConverter("HmacImportParams", dictHmacImportParams); + + webidl.converters.CryptoKey = webidl.createInterfaceConverter( + "CryptoKey", + CryptoKey, + ); + + const dictCryptoKeyPair = [ + { + key: "publicKey", + converter: webidl.converters.CryptoKey, + }, + { + key: "privateKey", + converter: webidl.converters.CryptoKey, + }, + ]; + + webidl.converters.CryptoKeyPair = webidl + .createDictionaryConverter("CryptoKeyPair", dictCryptoKeyPair); +})(this); diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml new file mode 100644 index 000000000..8eb939e86 --- /dev/null +++ b/ext/crypto/Cargo.toml @@ -0,0 +1,28 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_crypto" +version = "0.28.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "Web Cryptography API implementation for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +deno_core = { version = "0.96.0", path = "../../core" } +deno_web = { version = "0.45.0", path = "../web" } +lazy_static = "1.4.0" +num-traits = "0.2.14" +rand = "0.8.4" +ring = { version = "0.16.20", features = ["std"] } +rsa = { version = "0.5.0", default-features = false, features = ["std"] } +serde = { version = "1.0.126", features = ["derive"] } +sha-1 = "0.9.7" +sha2 = "0.9.5" +tokio = { version = "1.8.1", features = ["full"] } +uuid = { version = "0.8.2", features = ["v4"] } diff --git a/ext/crypto/README.md b/ext/crypto/README.md new file mode 100644 index 000000000..be0724458 --- /dev/null +++ b/ext/crypto/README.md @@ -0,0 +1,5 @@ +# deno_crypto + +This crate implements the Web Cryptography API. + +Spec: https://www.w3.org/TR/WebCryptoAPI/ diff --git a/ext/crypto/key.rs b/ext/crypto/key.rs new file mode 100644 index 000000000..cb44812fd --- /dev/null +++ b/ext/crypto/key.rs @@ -0,0 +1,117 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use ring::agreement::Algorithm as RingAlgorithm; +use ring::digest; +use ring::hmac::Algorithm as HmacAlgorithm; +use ring::signature::EcdsaSigningAlgorithm; +use serde::Deserialize; +use serde::Serialize; + +#[derive(Serialize, Deserialize, Copy, Clone)] +#[serde(rename_all = "camelCase")] +pub enum KeyType { + Public, + Private, + Secret, +} + +#[derive(Serialize, Deserialize, Copy, Clone)] +pub enum CryptoHash { + #[serde(rename = "SHA-1")] + Sha1, + #[serde(rename = "SHA-256")] + Sha256, + #[serde(rename = "SHA-384")] + Sha384, + #[serde(rename = "SHA-512")] + Sha512, +} + +#[derive(Serialize, Deserialize, Copy, Clone)] +pub enum CryptoNamedCurve { + #[serde(rename = "P-256")] + P256, + #[serde(rename = "P-384")] + P384, +} + +impl From for &RingAlgorithm { + fn from(curve: CryptoNamedCurve) -> &'static RingAlgorithm { + match curve { + CryptoNamedCurve::P256 => &ring::agreement::ECDH_P256, + CryptoNamedCurve::P384 => &ring::agreement::ECDH_P384, + } + } +} + +impl From for &EcdsaSigningAlgorithm { + fn from(curve: CryptoNamedCurve) -> &'static EcdsaSigningAlgorithm { + match curve { + CryptoNamedCurve::P256 => { + &ring::signature::ECDSA_P256_SHA256_FIXED_SIGNING + } + CryptoNamedCurve::P384 => { + &ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING + } + } + } +} + +impl From for HmacAlgorithm { + fn from(hash: CryptoHash) -> HmacAlgorithm { + match hash { + CryptoHash::Sha1 => ring::hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY, + CryptoHash::Sha256 => ring::hmac::HMAC_SHA256, + CryptoHash::Sha384 => ring::hmac::HMAC_SHA384, + CryptoHash::Sha512 => ring::hmac::HMAC_SHA512, + } + } +} + +impl From for &'static digest::Algorithm { + fn from(hash: CryptoHash) -> &'static digest::Algorithm { + match hash { + CryptoHash::Sha1 => &digest::SHA1_FOR_LEGACY_USE_ONLY, + CryptoHash::Sha256 => &digest::SHA256, + CryptoHash::Sha384 => &digest::SHA384, + CryptoHash::Sha512 => &digest::SHA512, + } + } +} + +#[derive(Serialize, Deserialize, Copy, Clone, Debug, PartialEq)] +#[serde(rename_all = "camelCase")] +pub enum KeyUsage { + Encrypt, + Decrypt, + Sign, + Verify, + DeriveKey, + DeriveBits, + WrapKey, + UnwrapKey, +} + +#[derive(Serialize, Deserialize, Clone, Copy)] +pub enum Algorithm { + #[serde(rename = "RSASSA-PKCS1-v1_5")] + RsassaPkcs1v15, + #[serde(rename = "RSA-PSS")] + RsaPss, + #[serde(rename = "RSA-OAEP")] + RsaOaep, + #[serde(rename = "ECDSA")] + Ecdsa, + #[serde(rename = "ECDH")] + Ecdh, + #[serde(rename = "AES-CTR")] + AesCtr, + #[serde(rename = "AES-CBC")] + AesCbc, + #[serde(rename = "AES-GCM")] + AesGcm, + #[serde(rename = "AES-KW")] + AesKw, + #[serde(rename = "HMAC")] + Hmac, +} diff --git a/ext/crypto/lib.deno_crypto.d.ts b/ext/crypto/lib.deno_crypto.d.ts new file mode 100644 index 000000000..b89b62f2e --- /dev/null +++ b/ext/crypto/lib.deno_crypto.d.ts @@ -0,0 +1,155 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// +/// + +declare var crypto: Crypto; + +interface Algorithm { + name: string; +} + +interface KeyAlgorithm { + name: string; +} + +type AlgorithmIdentifier = string | Algorithm; +type HashAlgorithmIdentifier = AlgorithmIdentifier; +type KeyType = "private" | "public" | "secret"; +type KeyUsage = + | "decrypt" + | "deriveBits" + | "deriveKey" + | "encrypt" + | "sign" + | "unwrapKey" + | "verify" + | "wrapKey"; + +type NamedCurve = string; + +interface HmacKeyGenParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; +} + +interface EcKeyGenParams extends Algorithm { + namedCurve: NamedCurve; +} + +interface EcdsaParams extends Algorithm { + hash: HashAlgorithmIdentifier; +} + +interface RsaHashedKeyGenParams extends RsaKeyGenParams { + hash: HashAlgorithmIdentifier; +} + +interface RsaKeyGenParams extends Algorithm { + modulusLength: number; + publicExponent: Uint8Array; +} + +interface RsaPssParams extends Algorithm { + saltLength: number; +} + +interface HmacImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; +} + +/** The CryptoKey dictionary of the Web Crypto API represents a cryptographic key. */ +interface CryptoKey { + readonly algorithm: KeyAlgorithm; + readonly extractable: boolean; + readonly type: KeyType; + readonly usages: KeyUsage[]; +} + +declare var CryptoKey: { + prototype: CryptoKey; + new (): CryptoKey; +}; + +/** The CryptoKeyPair dictionary of the Web Crypto API represents a key pair for an asymmetric cryptography algorithm, also known as a public-key algorithm. */ +interface CryptoKeyPair { + privateKey: CryptoKey; + publicKey: CryptoKey; +} + +declare var CryptoKeyPair: { + prototype: CryptoKeyPair; + new (): CryptoKeyPair; +}; + +/** This Web Crypto API interface provides a number of low-level cryptographic functions. It is accessed via the Crypto.subtle properties available in a window context (via Window.crypto). */ +interface SubtleCrypto { + generateKey( + algorithm: RsaHashedKeyGenParams | EcKeyGenParams, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + generateKey( + algorithm: HmacKeyGenParams, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + generateKey( + algorithm: AlgorithmIdentifier, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + importKey( + format: "raw", + keyData: BufferSource, + algorithm: AlgorithmIdentifier | HmacImportParams, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + sign( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + verify( + algorithm: AlgorithmIdentifier | RsaPssParams, + key: CryptoKey, + signature: BufferSource, + data: BufferSource, + ): Promise; + digest( + algorithm: AlgorithmIdentifier, + data: BufferSource, + ): Promise; +} + +declare interface Crypto { + readonly subtle: SubtleCrypto; + getRandomValues< + T extends + | Int8Array + | Int16Array + | Int32Array + | Uint8Array + | Uint16Array + | Uint32Array + | Uint8ClampedArray + | Float32Array + | Float64Array + | DataView + | null, + >( + array: T, + ): T; + randomUUID(): string; +} + +interface Algorithm { + name: string; +} + +declare var SubtleCrypto: { + prototype: SubtleCrypto; + new (): SubtleCrypto; +}; diff --git a/ext/crypto/lib.rs b/ext/crypto/lib.rs new file mode 100644 index 000000000..e77b34987 --- /dev/null +++ b/ext/crypto/lib.rs @@ -0,0 +1,558 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::custom_error; +use deno_core::error::not_supported; +use deno_core::error::null_opbuf; +use deno_core::error::type_error; +use deno_core::error::AnyError; +use deno_core::include_js_files; +use deno_core::op_async; +use deno_core::op_sync; +use deno_core::Extension; +use deno_core::OpState; +use deno_core::ZeroCopyBuf; +use serde::Deserialize; + +use std::cell::RefCell; +use std::convert::TryInto; +use std::rc::Rc; + +use lazy_static::lazy_static; +use num_traits::cast::FromPrimitive; +use rand::rngs::OsRng; +use rand::rngs::StdRng; +use rand::thread_rng; +use rand::Rng; +use rand::SeedableRng; +use ring::digest; +use ring::hmac::Algorithm as HmacAlgorithm; +use ring::hmac::Key as HmacKey; +use ring::rand as RingRand; +use ring::rand::SecureRandom; +use ring::signature::EcdsaKeyPair; +use ring::signature::EcdsaSigningAlgorithm; +use rsa::padding::PaddingScheme; +use rsa::pkcs8::FromPrivateKey; +use rsa::pkcs8::ToPrivateKey; +use rsa::BigUint; +use rsa::PublicKey; +use rsa::RsaPrivateKey; +use rsa::RsaPublicKey; +use sha1::Sha1; +use sha2::Digest; +use sha2::Sha256; +use sha2::Sha384; +use sha2::Sha512; +use std::path::PathBuf; + +pub use rand; // Re-export rand + +mod key; + +use crate::key::Algorithm; +use crate::key::CryptoHash; +use crate::key::CryptoNamedCurve; + +// Allowlist for RSA public exponents. +lazy_static! { + static ref PUB_EXPONENT_1: BigUint = BigUint::from_u64(3).unwrap(); + static ref PUB_EXPONENT_2: BigUint = BigUint::from_u64(65537).unwrap(); +} + +pub fn init(maybe_seed: Option) -> Extension { + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/crypto", + "00_crypto.js", + "01_webidl.js", + )) + .ops(vec![ + ( + "op_crypto_get_random_values", + op_sync(op_crypto_get_random_values), + ), + ("op_crypto_generate_key", op_async(op_crypto_generate_key)), + ("op_crypto_sign_key", op_async(op_crypto_sign_key)), + ("op_crypto_verify_key", op_async(op_crypto_verify_key)), + ("op_crypto_subtle_digest", op_async(op_crypto_subtle_digest)), + ("op_crypto_random_uuid", op_sync(op_crypto_random_uuid)), + ]) + .state(move |state| { + if let Some(seed) = maybe_seed { + state.put(StdRng::seed_from_u64(seed)); + } + Ok(()) + }) + .build() +} + +pub fn op_crypto_get_random_values( + state: &mut OpState, + mut zero_copy: ZeroCopyBuf, + _: (), +) -> Result<(), AnyError> { + if zero_copy.len() > 65536 { + return Err( + deno_web::DomExceptionQuotaExceededError::new(&format!("The ArrayBufferView's byte length ({}) exceeds the number of bytes of entropy available via this API (65536)", zero_copy.len())) + .into(), + ); + } + + let maybe_seeded_rng = state.try_borrow_mut::(); + if let Some(seeded_rng) = maybe_seeded_rng { + seeded_rng.fill(&mut *zero_copy); + } else { + let mut rng = thread_rng(); + rng.fill(&mut *zero_copy); + } + + Ok(()) +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AlgorithmArg { + name: Algorithm, + modulus_length: Option, + public_exponent: Option, + named_curve: Option, + hash: Option, + length: Option, +} + +pub async fn op_crypto_generate_key( + _state: Rc>, + args: AlgorithmArg, + _: (), +) -> Result { + let algorithm = args.name; + + let key = match algorithm { + Algorithm::RsassaPkcs1v15 | Algorithm::RsaPss => { + let public_exponent = args.public_exponent.ok_or_else(not_supported)?; + let modulus_length = args.modulus_length.ok_or_else(not_supported)?; + + let exponent = BigUint::from_bytes_be(&public_exponent); + if exponent != *PUB_EXPONENT_1 && exponent != *PUB_EXPONENT_2 { + return Err(custom_error( + "DOMExceptionOperationError", + "Bad public exponent", + )); + } + + let mut rng = OsRng; + + let private_key: RsaPrivateKey = tokio::task::spawn_blocking( + move || -> Result { + RsaPrivateKey::new_with_exp( + &mut rng, + modulus_length as usize, + &exponent, + ) + }, + ) + .await + .unwrap() + .map_err(|e| custom_error("DOMExceptionOperationError", e.to_string()))?; + + private_key.to_pkcs8_der()?.as_ref().to_vec() + } + Algorithm::Ecdsa => { + let curve: &EcdsaSigningAlgorithm = + args.named_curve.ok_or_else(not_supported)?.into(); + let rng = RingRand::SystemRandom::new(); + let private_key: Vec = tokio::task::spawn_blocking( + move || -> Result, ring::error::Unspecified> { + let pkcs8 = EcdsaKeyPair::generate_pkcs8(curve, &rng)?; + Ok(pkcs8.as_ref().to_vec()) + }, + ) + .await + .unwrap() + .map_err(|_| { + custom_error("DOMExceptionOperationError", "Key generation failed") + })?; + + private_key + } + Algorithm::Hmac => { + let hash: HmacAlgorithm = args.hash.ok_or_else(not_supported)?.into(); + + let length = if let Some(length) = args.length { + if (length % 8) != 0 { + return Err(custom_error( + "DOMExceptionOperationError", + "hmac block length must be byte aligned", + )); + } + let length = length / 8; + if length > ring::digest::MAX_BLOCK_LEN { + return Err(custom_error( + "DOMExceptionOperationError", + "hmac block length is too large", + )); + } + length + } else { + hash.digest_algorithm().block_len + }; + + let rng = RingRand::SystemRandom::new(); + let mut key_bytes = [0; ring::digest::MAX_BLOCK_LEN]; + let key_bytes = &mut key_bytes[..length]; + rng.fill(key_bytes).map_err(|_| { + custom_error("DOMExceptionOperationError", "Key generation failed") + })?; + + key_bytes.to_vec() + } + _ => return Err(not_supported()), + }; + + Ok(key.into()) +} + +#[derive(Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum KeyFormat { + Raw, + Pkcs8, +} + +#[derive(Deserialize)] +#[serde(rename_all = "lowercase")] +pub struct KeyData { + // TODO(littledivy): Kept here to be used to importKey() in future. + #[allow(dead_code)] + r#type: KeyFormat, + data: ZeroCopyBuf, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SignArg { + key: KeyData, + algorithm: Algorithm, + salt_length: Option, + hash: Option, + named_curve: Option, +} + +pub async fn op_crypto_sign_key( + _state: Rc>, + args: SignArg, + zero_copy: Option, +) -> Result { + let zero_copy = zero_copy.ok_or_else(null_opbuf)?; + let data = &*zero_copy; + let algorithm = args.algorithm; + + let signature = match algorithm { + Algorithm::RsassaPkcs1v15 => { + let private_key = RsaPrivateKey::from_pkcs8_der(&*args.key.data)?; + let (padding, hashed) = match args + .hash + .ok_or_else(|| type_error("Missing argument hash".to_string()))? + { + CryptoHash::Sha1 => { + let mut hasher = Sha1::new(); + hasher.update(&data); + ( + PaddingScheme::PKCS1v15Sign { + hash: Some(rsa::hash::Hash::SHA1), + }, + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha256 => { + let mut hasher = Sha256::new(); + hasher.update(&data); + ( + PaddingScheme::PKCS1v15Sign { + hash: Some(rsa::hash::Hash::SHA2_256), + }, + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha384 => { + let mut hasher = Sha384::new(); + hasher.update(&data); + ( + PaddingScheme::PKCS1v15Sign { + hash: Some(rsa::hash::Hash::SHA2_384), + }, + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha512 => { + let mut hasher = Sha512::new(); + hasher.update(&data); + ( + PaddingScheme::PKCS1v15Sign { + hash: Some(rsa::hash::Hash::SHA2_512), + }, + hasher.finalize()[..].to_vec(), + ) + } + }; + + private_key.sign(padding, &hashed)? + } + Algorithm::RsaPss => { + let private_key = RsaPrivateKey::from_pkcs8_der(&*args.key.data)?; + + let salt_len = args + .salt_length + .ok_or_else(|| type_error("Missing argument saltLength".to_string()))? + as usize; + + let rng = OsRng; + let (padding, digest_in) = match args + .hash + .ok_or_else(|| type_error("Missing argument hash".to_string()))? + { + CryptoHash::Sha1 => { + let mut hasher = Sha1::new(); + hasher.update(&data); + ( + PaddingScheme::new_pss_with_salt::(rng, salt_len), + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha256 => { + let mut hasher = Sha256::new(); + hasher.update(&data); + ( + PaddingScheme::new_pss_with_salt::(rng, salt_len), + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha384 => { + let mut hasher = Sha384::new(); + hasher.update(&data); + ( + PaddingScheme::new_pss_with_salt::(rng, salt_len), + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha512 => { + let mut hasher = Sha512::new(); + hasher.update(&data); + ( + PaddingScheme::new_pss_with_salt::(rng, salt_len), + hasher.finalize()[..].to_vec(), + ) + } + }; + + // Sign data based on computed padding and return buffer + private_key.sign(padding, &digest_in)? + } + Algorithm::Ecdsa => { + let curve: &EcdsaSigningAlgorithm = + args.named_curve.ok_or_else(not_supported)?.try_into()?; + + let key_pair = EcdsaKeyPair::from_pkcs8(curve, &*args.key.data)?; + // We only support P256-SHA256 & P384-SHA384. These are recommended signature pairs. + // https://briansmith.org/rustdoc/ring/signature/index.html#statics + if let Some(hash) = args.hash { + match hash { + CryptoHash::Sha256 | CryptoHash::Sha384 => (), + _ => return Err(type_error("Unsupported algorithm")), + } + }; + + let rng = RingRand::SystemRandom::new(); + let signature = key_pair.sign(&rng, data)?; + + // Signature data as buffer. + signature.as_ref().to_vec() + } + Algorithm::Hmac => { + let hash: HmacAlgorithm = args.hash.ok_or_else(not_supported)?.into(); + + let key = HmacKey::new(hash, &*args.key.data); + + let signature = ring::hmac::sign(&key, data); + signature.as_ref().to_vec() + } + _ => return Err(type_error("Unsupported algorithm".to_string())), + }; + + Ok(signature.into()) +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VerifyArg { + key: KeyData, + algorithm: Algorithm, + salt_length: Option, + hash: Option, + signature: ZeroCopyBuf, +} + +pub async fn op_crypto_verify_key( + _state: Rc>, + args: VerifyArg, + zero_copy: Option, +) -> Result { + let zero_copy = zero_copy.ok_or_else(null_opbuf)?; + let data = &*zero_copy; + let algorithm = args.algorithm; + + let verification = match algorithm { + Algorithm::RsassaPkcs1v15 => { + let public_key: RsaPublicKey = + RsaPrivateKey::from_pkcs8_der(&*args.key.data)?.to_public_key(); + let (padding, hashed) = match args + .hash + .ok_or_else(|| type_error("Missing argument hash".to_string()))? + { + CryptoHash::Sha1 => { + let mut hasher = Sha1::new(); + hasher.update(&data); + ( + PaddingScheme::PKCS1v15Sign { + hash: Some(rsa::hash::Hash::SHA1), + }, + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha256 => { + let mut hasher = Sha256::new(); + hasher.update(&data); + ( + PaddingScheme::PKCS1v15Sign { + hash: Some(rsa::hash::Hash::SHA2_256), + }, + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha384 => { + let mut hasher = Sha384::new(); + hasher.update(&data); + ( + PaddingScheme::PKCS1v15Sign { + hash: Some(rsa::hash::Hash::SHA2_384), + }, + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha512 => { + let mut hasher = Sha512::new(); + hasher.update(&data); + ( + PaddingScheme::PKCS1v15Sign { + hash: Some(rsa::hash::Hash::SHA2_512), + }, + hasher.finalize()[..].to_vec(), + ) + } + }; + + public_key + .verify(padding, &hashed, &*args.signature) + .is_ok() + } + Algorithm::RsaPss => { + let salt_len = args + .salt_length + .ok_or_else(|| type_error("Missing argument saltLength".to_string()))? + as usize; + let public_key: RsaPublicKey = + RsaPrivateKey::from_pkcs8_der(&*args.key.data)?.to_public_key(); + + let rng = OsRng; + let (padding, hashed) = match args + .hash + .ok_or_else(|| type_error("Missing argument hash".to_string()))? + { + CryptoHash::Sha1 => { + let mut hasher = Sha1::new(); + hasher.update(&data); + ( + PaddingScheme::new_pss_with_salt::(rng, salt_len), + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha256 => { + let mut hasher = Sha256::new(); + hasher.update(&data); + ( + PaddingScheme::new_pss_with_salt::(rng, salt_len), + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha384 => { + let mut hasher = Sha384::new(); + hasher.update(&data); + ( + PaddingScheme::new_pss_with_salt::(rng, salt_len), + hasher.finalize()[..].to_vec(), + ) + } + CryptoHash::Sha512 => { + let mut hasher = Sha512::new(); + hasher.update(&data); + ( + PaddingScheme::new_pss_with_salt::(rng, salt_len), + hasher.finalize()[..].to_vec(), + ) + } + }; + + public_key + .verify(padding, &hashed, &*args.signature) + .is_ok() + } + Algorithm::Hmac => { + let hash: HmacAlgorithm = args.hash.ok_or_else(not_supported)?.into(); + let key = HmacKey::new(hash, &*args.key.data); + ring::hmac::verify(&key, data, &*args.signature).is_ok() + } + _ => return Err(type_error("Unsupported algorithm".to_string())), + }; + + Ok(verification) +} + +pub fn op_crypto_random_uuid( + state: &mut OpState, + _: (), + _: (), +) -> Result { + let maybe_seeded_rng = state.try_borrow_mut::(); + let uuid = if let Some(seeded_rng) = maybe_seeded_rng { + let mut bytes = [0u8; 16]; + seeded_rng.fill(&mut bytes); + uuid::Builder::from_bytes(bytes) + .set_version(uuid::Version::Random) + .build() + } else { + uuid::Uuid::new_v4() + }; + + Ok(uuid.to_string()) +} + +pub async fn op_crypto_subtle_digest( + _state: Rc>, + algorithm: CryptoHash, + data: Option, +) -> Result { + let input = data.ok_or_else(null_opbuf)?; + let output = tokio::task::spawn_blocking(move || { + digest::digest(algorithm.into(), &input) + .as_ref() + .to_vec() + .into() + }) + .await?; + + Ok(output) +} + +pub fn get_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_crypto.d.ts") +} diff --git a/ext/fetch/01_fetch_util.js b/ext/fetch/01_fetch_util.js new file mode 100644 index 000000000..9cf19588b --- /dev/null +++ b/ext/fetch/01_fetch_util.js @@ -0,0 +1,22 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +((window) => { + const { TypeError } = window.__bootstrap.primordials; + function requiredArguments( + name, + length, + required, + ) { + if (length < required) { + const errMsg = `${name} requires at least ${required} argument${ + required === 1 ? "" : "s" + }, but only ${length} present`; + throw new TypeError(errMsg); + } + } + + window.__bootstrap.fetchUtil = { + requiredArguments, + }; +})(this); diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js new file mode 100644 index 000000000..91154d958 --- /dev/null +++ b/ext/fetch/20_headers.js @@ -0,0 +1,479 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + const { + HTTP_TAB_OR_SPACE_PREFIX_RE, + HTTP_TAB_OR_SPACE_SUFFIX_RE, + HTTP_WHITESPACE_PREFIX_RE, + HTTP_WHITESPACE_SUFFIX_RE, + HTTP_TOKEN_CODE_POINT_RE, + byteLowerCase, + collectSequenceOfCodepoints, + collectHttpQuotedString, + } = window.__bootstrap.infra; + const { + ArrayIsArray, + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypeSort, + ArrayPrototypeJoin, + ArrayPrototypeSplice, + ArrayPrototypeFilter, + ObjectKeys, + ObjectEntries, + RegExpPrototypeTest, + Symbol, + SymbolFor, + SymbolIterator, + SymbolToStringTag, + StringPrototypeReplaceAll, + StringPrototypeIncludes, + TypeError, + } = window.__bootstrap.primordials; + + const _headerList = Symbol("header list"); + const _iterableHeaders = Symbol("iterable headers"); + const _guard = Symbol("guard"); + + /** + * @typedef Header + * @type {[string, string]} + */ + + /** + * @typedef HeaderList + * @type {Header[]} + */ + + /** + * @param {string} potentialValue + * @returns {string} + */ + function normalizeHeaderValue(potentialValue) { + potentialValue = StringPrototypeReplaceAll( + potentialValue, + HTTP_WHITESPACE_PREFIX_RE, + "", + ); + potentialValue = StringPrototypeReplaceAll( + potentialValue, + HTTP_WHITESPACE_SUFFIX_RE, + "", + ); + return potentialValue; + } + + /** + * @param {Headers} headers + * @param {HeadersInit} object + */ + function fillHeaders(headers, object) { + if (ArrayIsArray(object)) { + for (const header of object) { + if (header.length !== 2) { + throw new TypeError( + `Invalid header. Length must be 2, but is ${header.length}`, + ); + } + appendHeader(headers, header[0], header[1]); + } + } else { + for (const key of ObjectKeys(object)) { + appendHeader(headers, key, object[key]); + } + } + } + + /** + * https://fetch.spec.whatwg.org/#concept-headers-append + * @param {Headers} headers + * @param {string} name + * @param {string} value + */ + function appendHeader(headers, name, value) { + // 1. + value = normalizeHeaderValue(value); + + // 2. + if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + throw new TypeError("Header name is not valid."); + } + if ( + StringPrototypeIncludes(value, "\x00") || + StringPrototypeIncludes(value, "\x0A") || + StringPrototypeIncludes(value, "\x0D") + ) { + throw new TypeError("Header value is not valid."); + } + + // 3. + if (headers[_guard] == "immutable") { + throw new TypeError("Headers are immutable."); + } + + // 7. + const list = headers[_headerList]; + name = byteLowerCase(name); + ArrayPrototypePush(list, [name, value]); + } + + /** + * https://fetch.spec.whatwg.org/#concept-header-list-get + * @param {HeaderList} list + * @param {string} name + */ + function getHeader(list, name) { + const lowercaseName = byteLowerCase(name); + const entries = ArrayPrototypeMap( + ArrayPrototypeFilter(list, (entry) => entry[0] === lowercaseName), + (entry) => entry[1], + ); + if (entries.length === 0) { + return null; + } else { + return ArrayPrototypeJoin(entries, "\x2C\x20"); + } + } + + /** + * https://fetch.spec.whatwg.org/#concept-header-list-get-decode-split + * @param {HeaderList} list + * @param {string} name + * @returns {string[] | null} + */ + function getDecodeSplitHeader(list, name) { + const initialValue = getHeader(list, name); + if (initialValue === null) return null; + const input = initialValue; + let position = 0; + const values = []; + let value = ""; + while (position < initialValue.length) { + // 7.1. collect up to " or , + const res = collectSequenceOfCodepoints( + initialValue, + position, + (c) => c !== "\u0022" && c !== "\u002C", + ); + value += res.result; + position = res.position; + + if (position < initialValue.length) { + if (input[position] === "\u0022") { + const res = collectHttpQuotedString(input, position, false); + value += res.result; + position = res.position; + if (position < initialValue.length) { + continue; + } + } else { + if (input[position] !== "\u002C") throw new TypeError("Unreachable"); + position += 1; + } + } + + value = StringPrototypeReplaceAll(value, HTTP_TAB_OR_SPACE_PREFIX_RE, ""); + value = StringPrototypeReplaceAll(value, HTTP_TAB_OR_SPACE_SUFFIX_RE, ""); + + ArrayPrototypePush(values, value); + value = ""; + } + return values; + } + + class Headers { + /** @type {HeaderList} */ + [_headerList] = []; + /** @type {"immutable" | "request" | "request-no-cors" | "response" | "none"} */ + [_guard]; + + get [_iterableHeaders]() { + const list = this[_headerList]; + + // The order of steps are not similar to the ones suggested by the + // spec but produce the same result. + const headers = {}; + const cookies = []; + for (const entry of list) { + const name = entry[0]; + const value = entry[1]; + if (value === null) throw new TypeError("Unreachable"); + // The following if statement is not spec compliant. + // `set-cookie` is the only header that can not be concatentated, + // so must be given to the user as multiple headers. + // The else block of the if statement is spec compliant again. + if (name === "set-cookie") { + ArrayPrototypePush(cookies, [name, value]); + } else { + // The following code has the same behaviour as getHeader() + // at the end of loop. But it avoids looping through the entire + // list to combine multiple values with same header name. It + // instead gradually combines them as they are found. + let header = headers[name]; + if (header && header.length > 0) { + header += "\x2C\x20" + value; + } else { + header = value; + } + headers[name] = header; + } + } + + return ArrayPrototypeSort( + [...ObjectEntries(headers), ...cookies], + (a, b) => { + const akey = a[0]; + const bkey = b[0]; + if (akey > bkey) return 1; + if (akey < bkey) return -1; + return 0; + }, + ); + } + + /** @param {HeadersInit} [init] */ + constructor(init = undefined) { + const prefix = "Failed to construct 'Event'"; + if (init !== undefined) { + init = webidl.converters["HeadersInit"](init, { + prefix, + context: "Argument 1", + }); + } + + this[webidl.brand] = webidl.brand; + this[_guard] = "none"; + if (init !== undefined) { + fillHeaders(this, init); + } + } + + /** + * @param {string} name + * @param {string} value + */ + append(name, value) { + webidl.assertBranded(this, Headers); + const prefix = "Failed to execute 'append' on 'Headers'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + name = webidl.converters["ByteString"](name, { + prefix, + context: "Argument 1", + }); + value = webidl.converters["ByteString"](value, { + prefix, + context: "Argument 2", + }); + appendHeader(this, name, value); + } + + /** + * @param {string} name + */ + delete(name) { + const prefix = "Failed to execute 'delete' on 'Headers'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + name = webidl.converters["ByteString"](name, { + prefix, + context: "Argument 1", + }); + + if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + throw new TypeError("Header name is not valid."); + } + if (this[_guard] == "immutable") { + throw new TypeError("Headers are immutable."); + } + + const list = this[_headerList]; + name = byteLowerCase(name); + for (let i = 0; i < list.length; i++) { + if (list[i][0] === name) { + ArrayPrototypeSplice(list, i, 1); + i--; + } + } + } + + /** + * @param {string} name + */ + get(name) { + const prefix = "Failed to execute 'get' on 'Headers'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + name = webidl.converters["ByteString"](name, { + prefix, + context: "Argument 1", + }); + + if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + throw new TypeError("Header name is not valid."); + } + + const list = this[_headerList]; + return getHeader(list, name); + } + + /** + * @param {string} name + */ + has(name) { + const prefix = "Failed to execute 'has' on 'Headers'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + name = webidl.converters["ByteString"](name, { + prefix, + context: "Argument 1", + }); + + if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + throw new TypeError("Header name is not valid."); + } + + const list = this[_headerList]; + name = byteLowerCase(name); + for (let i = 0; i < list.length; i++) { + if (list[i][0] === name) { + return true; + } + } + return false; + } + + /** + * @param {string} name + * @param {string} value + */ + set(name, value) { + webidl.assertBranded(this, Headers); + const prefix = "Failed to execute 'set' on 'Headers'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + name = webidl.converters["ByteString"](name, { + prefix, + context: "Argument 1", + }); + value = webidl.converters["ByteString"](value, { + prefix, + context: "Argument 2", + }); + + value = normalizeHeaderValue(value); + + // 2. + if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + throw new TypeError("Header name is not valid."); + } + if ( + StringPrototypeIncludes(value, "\x00") || + StringPrototypeIncludes(value, "\x0A") || + StringPrototypeIncludes(value, "\x0D") + ) { + throw new TypeError("Header value is not valid."); + } + + if (this[_guard] == "immutable") { + throw new TypeError("Headers are immutable."); + } + + const list = this[_headerList]; + name = byteLowerCase(name); + let added = false; + for (let i = 0; i < list.length; i++) { + if (list[i][0] === name) { + if (!added) { + list[i][1] = value; + added = true; + } else { + ArrayPrototypeSplice(list, i, 1); + i--; + } + } + } + if (!added) { + ArrayPrototypePush(list, [name, value]); + } + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + const headers = {}; + for (const header of this) { + headers[header[0]] = header[1]; + } + return `Headers ${inspect(headers)}`; + } + + get [SymbolToStringTag]() { + return "Headers"; + } + } + + webidl.mixinPairIterable("Headers", Headers, _iterableHeaders, 0, 1); + + webidl.configurePrototype(Headers); + + webidl.converters["HeadersInit"] = (V, opts) => { + // Union for (sequence> or record) + if (webidl.type(V) === "Object" && V !== null) { + if (V[SymbolIterator] !== undefined) { + return webidl.converters["sequence>"](V, opts); + } + return webidl.converters["record"](V, opts); + } + throw webidl.makeException( + TypeError, + "The provided value is not of type '(sequence> or record)'", + opts, + ); + }; + webidl.converters["Headers"] = webidl.createInterfaceConverter( + "Headers", + Headers, + ); + + /** + * @param {HeaderList} list + * @param {"immutable" | "request" | "request-no-cors" | "response" | "none"} guard + * @returns {Headers} + */ + function headersFromHeaderList(list, guard) { + const headers = webidl.createBranded(Headers); + headers[_headerList] = list; + headers[_guard] = guard; + return headers; + } + + /** + * @param {Headers} + * @returns {HeaderList} + */ + function headerListFromHeaders(headers) { + return headers[_headerList]; + } + + /** + * @param {Headers} + * @returns {"immutable" | "request" | "request-no-cors" | "response" | "none"} + */ + function guardFromHeaders(headers) { + return headers[_guard]; + } + + window.__bootstrap.headers = { + Headers, + headersFromHeaderList, + headerListFromHeaders, + fillHeaders, + getDecodeSplitHeader, + guardFromHeaders, + }; +})(this); diff --git a/ext/fetch/21_formdata.js b/ext/fetch/21_formdata.js new file mode 100644 index 000000000..25ed32c2d --- /dev/null +++ b/ext/fetch/21_formdata.js @@ -0,0 +1,507 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const core = window.Deno.core; + const webidl = globalThis.__bootstrap.webidl; + const { Blob, File } = globalThis.__bootstrap.file; + const { + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ArrayPrototypeFilter, + ArrayPrototypeForEach, + Map, + MapPrototypeGet, + MapPrototypeSet, + MathRandom, + Symbol, + SymbolToStringTag, + StringFromCharCode, + StringPrototypeTrim, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeReplace, + StringPrototypeIndexOf, + StringPrototypePadStart, + StringPrototypeCodePointAt, + StringPrototypeReplaceAll, + TypeError, + TypedArrayPrototypeSubarray, + } = window.__bootstrap.primordials; + + const entryList = Symbol("entry list"); + + /** + * @param {string} name + * @param {string | Blob} value + * @param {string | undefined} filename + * @returns {FormDataEntry} + */ + function createEntry(name, value, filename) { + if (value instanceof Blob && !(value instanceof File)) { + value = new File([value], "blob", { type: value.type }); + } + if (value instanceof File && filename !== undefined) { + value = new File([value], filename, { + type: value.type, + lastModified: value.lastModified, + }); + } + return { + name, + // @ts-expect-error because TS is not smart enough + value, + }; + } + + /** + * @typedef FormDataEntry + * @property {string} name + * @property {FormDataEntryValue} value + */ + + class FormData { + get [SymbolToStringTag]() { + return "FormData"; + } + + /** @type {FormDataEntry[]} */ + [entryList] = []; + + /** @param {void} form */ + constructor(form) { + if (form !== undefined) { + webidl.illegalConstructor(); + } + this[webidl.brand] = webidl.brand; + } + + /** + * @param {string} name + * @param {string | Blob} valueOrBlobValue + * @param {string} [filename] + * @returns {void} + */ + append(name, valueOrBlobValue, filename) { + webidl.assertBranded(this, FormData); + const prefix = "Failed to execute 'append' on 'FormData'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + + name = webidl.converters["USVString"](name, { + prefix, + context: "Argument 1", + }); + if (valueOrBlobValue instanceof Blob) { + valueOrBlobValue = webidl.converters["Blob"](valueOrBlobValue, { + prefix, + context: "Argument 2", + }); + if (filename !== undefined) { + filename = webidl.converters["USVString"](filename, { + prefix, + context: "Argument 3", + }); + } + } else { + valueOrBlobValue = webidl.converters["USVString"](valueOrBlobValue, { + prefix, + context: "Argument 2", + }); + } + + const entry = createEntry(name, valueOrBlobValue, filename); + + ArrayPrototypePush(this[entryList], entry); + } + + /** + * @param {string} name + * @returns {void} + */ + delete(name) { + webidl.assertBranded(this, FormData); + const prefix = "Failed to execute 'name' on 'FormData'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + name = webidl.converters["USVString"](name, { + prefix, + context: "Argument 1", + }); + + const list = this[entryList]; + for (let i = 0; i < list.length; i++) { + if (list[i].name === name) { + ArrayPrototypeSplice(list, i, 1); + i--; + } + } + } + + /** + * @param {string} name + * @returns {FormDataEntryValue | null} + */ + get(name) { + webidl.assertBranded(this, FormData); + const prefix = "Failed to execute 'get' on 'FormData'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + name = webidl.converters["USVString"](name, { + prefix, + context: "Argument 1", + }); + + for (const entry of this[entryList]) { + if (entry.name === name) return entry.value; + } + return null; + } + + /** + * @param {string} name + * @returns {FormDataEntryValue[]} + */ + getAll(name) { + webidl.assertBranded(this, FormData); + const prefix = "Failed to execute 'getAll' on 'FormData'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + name = webidl.converters["USVString"](name, { + prefix, + context: "Argument 1", + }); + + const returnList = []; + for (const entry of this[entryList]) { + if (entry.name === name) ArrayPrototypePush(returnList, entry.value); + } + return returnList; + } + + /** + * @param {string} name + * @returns {boolean} + */ + has(name) { + webidl.assertBranded(this, FormData); + const prefix = "Failed to execute 'has' on 'FormData'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + name = webidl.converters["USVString"](name, { + prefix, + context: "Argument 1", + }); + + for (const entry of this[entryList]) { + if (entry.name === name) return true; + } + return false; + } + + /** + * @param {string} name + * @param {string | Blob} valueOrBlobValue + * @param {string} [filename] + * @returns {void} + */ + set(name, valueOrBlobValue, filename) { + webidl.assertBranded(this, FormData); + const prefix = "Failed to execute 'set' on 'FormData'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + + name = webidl.converters["USVString"](name, { + prefix, + context: "Argument 1", + }); + if (valueOrBlobValue instanceof Blob) { + valueOrBlobValue = webidl.converters["Blob"](valueOrBlobValue, { + prefix, + context: "Argument 2", + }); + if (filename !== undefined) { + filename = webidl.converters["USVString"](filename, { + prefix, + context: "Argument 3", + }); + } + } else { + valueOrBlobValue = webidl.converters["USVString"](valueOrBlobValue, { + prefix, + context: "Argument 2", + }); + } + + const entry = createEntry(name, valueOrBlobValue, filename); + + const list = this[entryList]; + let added = false; + for (let i = 0; i < list.length; i++) { + if (list[i].name === name) { + if (!added) { + list[i] = entry; + added = true; + } else { + ArrayPrototypeSplice(list, i, 1); + i--; + } + } + } + if (!added) { + ArrayPrototypePush(list, entry); + } + } + } + + webidl.mixinPairIterable("FormData", FormData, entryList, "name", "value"); + + webidl.configurePrototype(FormData); + + const escape = (str, isFilename) => + StringPrototypeReplace( + StringPrototypeReplace( + StringPrototypeReplace( + (isFilename ? str : StringPrototypeReplace(str, /\r?\n|\r/g, "\r\n")), + /\n/g, + "%0A", + ), + /\r/g, + "%0D", + ), + /"/g, + "%22", + ); + + /** + * convert FormData to a Blob synchronous without reading all of the files + * @param {globalThis.FormData} formData + */ + function formDataToBlob(formData) { + const boundary = StringPrototypePadStart( + StringPrototypeSlice( + StringPrototypeReplaceAll(`${MathRandom()}${MathRandom()}`, ".", ""), + -28, + ), + 32, + "-", + ); + const chunks = []; + const prefix = `--${boundary}\r\nContent-Disposition: form-data; name="`; + + for (const [name, value] of formData) { + if (typeof value === "string") { + ArrayPrototypePush( + chunks, + prefix + escape(name) + '"' + CRLF + CRLF + + StringPrototypeReplace(value, /\r(?!\n)|(?} + */ + function parseContentDisposition(value) { + /** @type {Map} */ + const params = new Map(); + // Forced to do so for some Map constructor param mismatch + ArrayPrototypeForEach( + ArrayPrototypeMap( + ArrayPrototypeFilter( + ArrayPrototypeMap( + ArrayPrototypeSlice(StringPrototypeSplit(value, ";"), 1), + (s) => StringPrototypeSplit(StringPrototypeTrim(s), "="), + ), + (arr) => arr.length > 1, + ), + ([k, v]) => [k, StringPrototypeReplace(v, /^"([^"]*)"$/, "$1")], + ), + ([k, v]) => MapPrototypeSet(params, k, v), + ); + + return params; + } + + const CRLF = "\r\n"; + const LF = StringPrototypeCodePointAt(CRLF, 1); + const CR = StringPrototypeCodePointAt(CRLF, 0); + + class MultipartParser { + /** + * @param {Uint8Array} body + * @param {string | undefined} boundary + */ + constructor(body, boundary) { + if (!boundary) { + throw new TypeError("multipart/form-data must provide a boundary"); + } + + this.boundary = `--${boundary}`; + this.body = body; + this.boundaryChars = core.encode(this.boundary); + } + + /** + * @param {string} headersText + * @returns {{ headers: Headers, disposition: Map }} + */ + #parseHeaders(headersText) { + const headers = new Headers(); + const rawHeaders = StringPrototypeSplit(headersText, "\r\n"); + for (const rawHeader of rawHeaders) { + const sepIndex = StringPrototypeIndexOf(rawHeader, ":"); + if (sepIndex < 0) { + continue; // Skip this header + } + const key = StringPrototypeSlice(rawHeader, 0, sepIndex); + const value = StringPrototypeSlice(rawHeader, sepIndex + 1); + headers.set(key, value); + } + + const disposition = parseContentDisposition( + headers.get("Content-Disposition") ?? "", + ); + + return { headers, disposition }; + } + + /** + * @returns {FormData} + */ + parse() { + // Body must be at least 2 boundaries + \r\n + -- on the last boundary. + if (this.body.length < (this.boundary.length * 2) + 4) { + throw new TypeError("Form data too short to be valid."); + } + + const formData = new FormData(); + let headerText = ""; + let boundaryIndex = 0; + let state = 0; + let fileStart = 0; + + for (let i = 0; i < this.body.length; i++) { + const byte = this.body[i]; + const prevByte = this.body[i - 1]; + const isNewLine = byte === LF && prevByte === CR; + + if (state === 1 || state === 2 || state == 3) { + headerText += StringFromCharCode(byte); + } + if (state === 0 && isNewLine) { + state = 1; + } else if (state === 1 && isNewLine) { + state = 2; + const headersDone = this.body[i + 1] === CR && + this.body[i + 2] === LF; + + if (headersDone) { + state = 3; + } + } else if (state === 2 && isNewLine) { + state = 3; + } else if (state === 3 && isNewLine) { + state = 4; + fileStart = i + 1; + } else if (state === 4) { + if (this.boundaryChars[boundaryIndex] !== byte) { + boundaryIndex = 0; + } else { + boundaryIndex++; + } + + if (boundaryIndex >= this.boundary.length) { + const { headers, disposition } = this.#parseHeaders(headerText); + const content = TypedArrayPrototypeSubarray( + this.body, + fileStart, + i - boundaryIndex - 1, + ); + // https://fetch.spec.whatwg.org/#ref-for-dom-body-formdata + const filename = MapPrototypeGet(disposition, "filename"); + const name = MapPrototypeGet(disposition, "name"); + + state = 5; + // Reset + boundaryIndex = 0; + headerText = ""; + + if (!name) { + continue; // Skip, unknown name + } + + if (filename) { + const blob = new Blob([content], { + type: headers.get("Content-Type") || "application/octet-stream", + }); + formData.append(name, blob, filename); + } else { + formData.append(name, core.decode(content)); + } + } + } else if (state === 5 && isNewLine) { + state = 1; + } + } + + return formData; + } + } + + /** + * @param {Uint8Array} body + * @param {string | undefined} boundary + * @returns {FormData} + */ + function parseFormData(body, boundary) { + const parser = new MultipartParser(body, boundary); + return parser.parse(); + } + + /** + * @param {FormDataEntry[]} entries + * @returns {FormData} + */ + function formDataFromEntries(entries) { + const fd = new FormData(); + fd[entryList] = entries; + return fd; + } + + webidl.converters["FormData"] = webidl + .createInterfaceConverter("FormData", FormData); + + globalThis.__bootstrap.formData = { + FormData, + formDataToBlob, + parseFormData, + formDataFromEntries, + }; +})(globalThis); diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js new file mode 100644 index 000000000..49da149c2 --- /dev/null +++ b/ext/fetch/22_body.js @@ -0,0 +1,403 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const core = window.Deno.core; + const webidl = globalThis.__bootstrap.webidl; + const { parseUrlEncoded } = globalThis.__bootstrap.url; + const { parseFormData, formDataFromEntries, formDataToBlob } = + globalThis.__bootstrap.formData; + const mimesniff = globalThis.__bootstrap.mimesniff; + const { isReadableStreamDisturbed, errorReadableStream, createProxy } = + globalThis.__bootstrap.streams; + const { + ArrayBuffer, + ArrayBufferIsView, + ArrayPrototypePush, + ArrayPrototypeMap, + JSONParse, + ObjectDefineProperties, + PromiseResolve, + TypedArrayPrototypeSet, + TypedArrayPrototypeSlice, + TypeError, + Uint8Array, + } = window.__bootstrap.primordials; + + class InnerBody { + /** @type {ReadableStream | { body: Uint8Array, consumed: boolean }} */ + streamOrStatic; + /** @type {null | Uint8Array | Blob | FormData} */ + source = null; + /** @type {null | number} */ + length = null; + + /** + * @param {ReadableStream | { body: Uint8Array, consumed: boolean }} stream + */ + constructor(stream) { + this.streamOrStatic = stream ?? + { body: new Uint8Array(), consumed: false }; + } + + get stream() { + if (!(this.streamOrStatic instanceof ReadableStream)) { + const { body, consumed } = this.streamOrStatic; + if (consumed) { + this.streamOrStatic = new ReadableStream(); + this.streamOrStatic.getReader(); + } else { + this.streamOrStatic = new ReadableStream({ + start(controller) { + controller.enqueue(body); + controller.close(); + }, + }); + } + } + return this.streamOrStatic; + } + + /** + * https://fetch.spec.whatwg.org/#body-unusable + * @returns {boolean} + */ + unusable() { + if (this.streamOrStatic instanceof ReadableStream) { + return this.streamOrStatic.locked || + isReadableStreamDisturbed(this.streamOrStatic); + } + return this.streamOrStatic.consumed; + } + + /** + * @returns {boolean} + */ + consumed() { + if (this.streamOrStatic instanceof ReadableStream) { + return isReadableStreamDisturbed(this.streamOrStatic); + } + return this.streamOrStatic.consumed; + } + + /** + * https://fetch.spec.whatwg.org/#concept-body-consume-body + * @returns {Promise} + */ + async consume() { + if (this.unusable()) throw new TypeError("Body already consumed."); + if (this.streamOrStatic instanceof ReadableStream) { + const reader = this.stream.getReader(); + /** @type {Uint8Array[]} */ + const chunks = []; + let totalLength = 0; + while (true) { + const { value: chunk, done } = await reader.read(); + if (done) break; + ArrayPrototypePush(chunks, chunk); + totalLength += chunk.byteLength; + } + const finalBuffer = new Uint8Array(totalLength); + let i = 0; + for (const chunk of chunks) { + TypedArrayPrototypeSet(finalBuffer, chunk, i); + i += chunk.byteLength; + } + return finalBuffer; + } else { + this.streamOrStatic.consumed = true; + return this.streamOrStatic.body; + } + } + + cancel(error) { + if (this.streamOrStatic instanceof ReadableStream) { + this.streamOrStatic.cancel(error); + } else { + this.streamOrStatic.consumed = true; + } + } + + error(error) { + if (this.streamOrStatic instanceof ReadableStream) { + errorReadableStream(this.streamOrStatic, error); + } else { + this.streamOrStatic.consumed = true; + } + } + + /** + * @returns {InnerBody} + */ + clone() { + const [out1, out2] = this.stream.tee(); + this.streamOrStatic = out1; + const second = new InnerBody(out2); + second.source = core.deserialize(core.serialize(this.source)); + second.length = this.length; + return second; + } + + /** + * @returns {InnerBody} + */ + createProxy() { + let proxyStreamOrStatic; + if (this.streamOrStatic instanceof ReadableStream) { + proxyStreamOrStatic = createProxy(this.streamOrStatic); + } else { + proxyStreamOrStatic = { ...this.streamOrStatic }; + this.streamOrStatic.consumed = true; + } + const proxy = new InnerBody(proxyStreamOrStatic); + proxy.source = this.source; + proxy.length = this.length; + return proxy; + } + } + + /** + * @param {any} prototype + * @param {symbol} bodySymbol + * @param {symbol} mimeTypeSymbol + * @returns {void} + */ + function mixinBody(prototype, bodySymbol, mimeTypeSymbol) { + function consumeBody(object) { + if (object[bodySymbol] !== null) { + return object[bodySymbol].consume(); + } + return PromiseResolve(new Uint8Array()); + } + + /** @type {PropertyDescriptorMap} */ + const mixin = { + body: { + /** + * @returns {ReadableStream | null} + */ + get() { + webidl.assertBranded(this, prototype); + if (this[bodySymbol] === null) { + return null; + } else { + return this[bodySymbol].stream; + } + }, + configurable: true, + enumerable: true, + }, + bodyUsed: { + /** + * @returns {boolean} + */ + get() { + webidl.assertBranded(this, prototype); + if (this[bodySymbol] !== null) { + return this[bodySymbol].consumed(); + } + return false; + }, + configurable: true, + enumerable: true, + }, + arrayBuffer: { + /** @returns {Promise} */ + value: async function arrayBuffer() { + webidl.assertBranded(this, prototype); + const body = await consumeBody(this); + return packageData(body, "ArrayBuffer"); + }, + writable: true, + configurable: true, + enumerable: true, + }, + blob: { + /** @returns {Promise} */ + value: async function blob() { + webidl.assertBranded(this, prototype); + const body = await consumeBody(this); + return packageData(body, "Blob", this[mimeTypeSymbol]); + }, + writable: true, + configurable: true, + enumerable: true, + }, + formData: { + /** @returns {Promise} */ + value: async function formData() { + webidl.assertBranded(this, prototype); + const body = await consumeBody(this); + return packageData(body, "FormData", this[mimeTypeSymbol]); + }, + writable: true, + configurable: true, + enumerable: true, + }, + json: { + /** @returns {Promise} */ + value: async function json() { + webidl.assertBranded(this, prototype); + const body = await consumeBody(this); + return packageData(body, "JSON"); + }, + writable: true, + configurable: true, + enumerable: true, + }, + text: { + /** @returns {Promise} */ + value: async function text() { + webidl.assertBranded(this, prototype); + const body = await consumeBody(this); + return packageData(body, "text"); + }, + writable: true, + configurable: true, + enumerable: true, + }, + }; + return ObjectDefineProperties(prototype.prototype, mixin); + } + + /** + * https://fetch.spec.whatwg.org/#concept-body-package-data + * @param {Uint8Array} bytes + * @param {"ArrayBuffer" | "Blob" | "FormData" | "JSON" | "text"} type + * @param {MimeType | null} [mimeType] + */ + function packageData(bytes, type, mimeType) { + switch (type) { + case "ArrayBuffer": + return bytes.buffer; + case "Blob": + return new Blob([bytes], { + type: mimeType !== null ? mimesniff.serializeMimeType(mimeType) : "", + }); + case "FormData": { + if (mimeType !== null) { + const essence = mimesniff.essence(mimeType); + if (essence === "multipart/form-data") { + const boundary = mimeType.parameters.get("boundary"); + if (boundary === null) { + throw new TypeError( + "Missing boundary parameter in mime type of multipart formdata.", + ); + } + return parseFormData(bytes, boundary); + } else if (essence === "application/x-www-form-urlencoded") { + const entries = parseUrlEncoded(bytes); + return formDataFromEntries( + ArrayPrototypeMap( + entries, + (x) => ({ name: x[0], value: x[1] }), + ), + ); + } + throw new TypeError("Body can not be decoded as form data"); + } + throw new TypeError("Missing content type"); + } + case "JSON": + return JSONParse(core.decode(bytes)); + case "text": + return core.decode(bytes); + } + } + + /** + * @param {BodyInit} object + * @returns {{body: InnerBody, contentType: string | null}} + */ + function extractBody(object) { + /** @type {ReadableStream | { body: Uint8Array, consumed: boolean }} */ + let stream; + let source = null; + let length = null; + let contentType = null; + if (object instanceof Blob) { + stream = object.stream(); + source = object; + length = object.size; + if (object.type.length !== 0) { + contentType = object.type; + } + } else if (ArrayBufferIsView(object) || object instanceof ArrayBuffer) { + const u8 = ArrayBufferIsView(object) + ? new Uint8Array( + object.buffer, + object.byteOffset, + object.byteLength, + ) + : new Uint8Array(object); + const copy = TypedArrayPrototypeSlice(u8, 0, u8.byteLength); + source = copy; + } else if (object instanceof FormData) { + const res = formDataToBlob(object); + stream = res.stream(); + source = res; + length = res.size; + contentType = res.type; + } else if (object instanceof URLSearchParams) { + // TODO(@satyarohith): not sure what primordial here. + source = core.encode(object.toString()); + contentType = "application/x-www-form-urlencoded;charset=UTF-8"; + } else if (typeof object === "string") { + source = core.encode(object); + contentType = "text/plain;charset=UTF-8"; + } else if (object instanceof ReadableStream) { + stream = object; + if (object.locked || isReadableStreamDisturbed(object)) { + throw new TypeError("ReadableStream is locked or disturbed"); + } + } + if (source instanceof Uint8Array) { + stream = { body: source, consumed: false }; + length = source.byteLength; + } + const body = new InnerBody(stream); + body.source = source; + body.length = length; + return { body, contentType }; + } + + webidl.converters["BodyInit"] = (V, opts) => { + // Union for (ReadableStream or Blob or ArrayBufferView or ArrayBuffer or FormData or URLSearchParams or USVString) + if (V instanceof ReadableStream) { + // TODO(lucacasonato): ReadableStream is not branded + return V; + } else if (V instanceof Blob) { + return webidl.converters["Blob"](V, opts); + } else if (V instanceof FormData) { + return webidl.converters["FormData"](V, opts); + } else if (V instanceof URLSearchParams) { + // TODO(lucacasonato): URLSearchParams is not branded + return V; + } + if (typeof V === "object") { + if (V instanceof ArrayBuffer || V instanceof SharedArrayBuffer) { + return webidl.converters["ArrayBuffer"](V, opts); + } + if (ArrayBufferIsView(V)) { + return webidl.converters["ArrayBufferView"](V, opts); + } + } + return webidl.converters["USVString"](V, opts); + }; + webidl.converters["BodyInit?"] = webidl.createNullableConverter( + webidl.converters["BodyInit"], + ); + + window.__bootstrap.fetchBody = { mixinBody, InnerBody, extractBody }; +})(globalThis); diff --git a/ext/fetch/22_http_client.js b/ext/fetch/22_http_client.js new file mode 100644 index 000000000..60b069aa7 --- /dev/null +++ b/ext/fetch/22_http_client.js @@ -0,0 +1,40 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const core = window.Deno.core; + + /** + * @param {Deno.CreateHttpClientOptions} options + * @returns {HttpClient} + */ + function createHttpClient(options) { + return new HttpClient(core.opSync("op_create_http_client", options)); + } + + class HttpClient { + /** + * @param {number} rid + */ + constructor(rid) { + this.rid = rid; + } + close() { + core.close(this.rid); + } + } + + window.__bootstrap.fetch ??= {}; + window.__bootstrap.fetch.createHttpClient = createHttpClient; + window.__bootstrap.fetch.HttpClient = HttpClient; +})(globalThis); diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js new file mode 100644 index 000000000..1372125c1 --- /dev/null +++ b/ext/fetch/23_request.js @@ -0,0 +1,484 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + const consoleInternal = window.__bootstrap.console; + const { HTTP_TOKEN_CODE_POINT_RE, byteUpperCase } = window.__bootstrap.infra; + const { URL } = window.__bootstrap.url; + const { guardFromHeaders } = window.__bootstrap.headers; + const { mixinBody, extractBody } = window.__bootstrap.fetchBody; + const { getLocationHref } = window.__bootstrap.location; + const mimesniff = window.__bootstrap.mimesniff; + const { + headersFromHeaderList, + headerListFromHeaders, + fillHeaders, + getDecodeSplitHeader, + } = window.__bootstrap.headers; + const { HttpClient } = window.__bootstrap.fetch; + const abortSignal = window.__bootstrap.abortSignal; + const { + ArrayPrototypeMap, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + MapPrototypeHas, + MapPrototypeGet, + MapPrototypeSet, + ObjectKeys, + RegExpPrototypeTest, + Symbol, + SymbolFor, + SymbolToStringTag, + TypeError, + } = window.__bootstrap.primordials; + + const _request = Symbol("request"); + const _headers = Symbol("headers"); + const _signal = Symbol("signal"); + const _mimeType = Symbol("mime type"); + const _body = Symbol("body"); + + /** + * @typedef InnerRequest + * @property {string} method + * @property {() => string} url + * @property {() => string} currentUrl + * @property {[string, string][]} headerList + * @property {null | typeof __window.bootstrap.fetchBody.InnerBody} body + * @property {"follow" | "error" | "manual"} redirectMode + * @property {number} redirectCount + * @property {string[]} urlList + * @property {number | null} clientRid NOTE: non standard extension for `Deno.HttpClient`. + */ + + const defaultInnerRequest = { + url() { + return this.urlList[0]; + }, + currentUrl() { + return this.urlList[this.urlList.length - 1]; + }, + redirectMode: "follow", + redirectCount: 0, + clientRid: null, + }; + + /** + * @param {string} method + * @param {string} url + * @param {[string, string][]} headerList + * @param {typeof __window.bootstrap.fetchBody.InnerBody} body + * @returns + */ + function newInnerRequest(method, url, headerList = [], body = null) { + return { + method: method, + headerList, + body, + urlList: [url], + ...defaultInnerRequest, + }; + } + + /** + * https://fetch.spec.whatwg.org/#concept-request-clone + * @param {InnerRequest} request + * @returns {InnerRequest} + */ + function cloneInnerRequest(request) { + const headerList = [ + ...ArrayPrototypeMap(request.headerList, (x) => [x[0], x[1]]), + ]; + let body = null; + if (request.body !== null) { + body = request.body.clone(); + } + + return { + method: request.method, + url() { + return this.urlList[0]; + }, + currentUrl() { + return this.urlList[this.urlList.length - 1]; + }, + headerList, + body, + redirectMode: request.redirectMode, + redirectCount: request.redirectCount, + urlList: request.urlList, + clientRid: request.clientRid, + }; + } + + /** + * @param {string} m + * @returns {boolean} + */ + function isKnownMethod(m) { + return ( + m === "DELETE" || + m === "GET" || + m === "HEAD" || + m === "OPTIONS" || + m === "POST" || + m === "PUT" + ); + } + /** + * @param {string} m + * @returns {string} + */ + function validateAndNormalizeMethod(m) { + // Fast path for well-known methods + if (isKnownMethod(m)) { + return m; + } + + // Regular path + if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, m)) { + throw new TypeError("Method is not valid."); + } + const upperCase = byteUpperCase(m); + if ( + upperCase === "CONNECT" || upperCase === "TRACE" || upperCase === "TRACK" + ) { + throw new TypeError("Method is forbidden."); + } + return upperCase; + } + + class Request { + /** @type {InnerRequest} */ + [_request]; + /** @type {Headers} */ + [_headers]; + /** @type {AbortSignal} */ + [_signal]; + get [_mimeType]() { + let charset = null; + let essence = null; + let mimeType = null; + const headerList = headerListFromHeaders(this[_headers]); + const values = getDecodeSplitHeader(headerList, "content-type"); + if (values === null) return null; + for (const value of values) { + const temporaryMimeType = mimesniff.parseMimeType(value); + if ( + temporaryMimeType === null || + mimesniff.essence(temporaryMimeType) == "*/*" + ) { + continue; + } + mimeType = temporaryMimeType; + if (mimesniff.essence(mimeType) !== essence) { + charset = null; + const newCharset = MapPrototypeGet(mimeType.parameters, "charset"); + if (newCharset !== undefined) { + charset = newCharset; + } + essence = mimesniff.essence(mimeType); + } else { + if ( + MapPrototypeHas(mimeType.parameters, "charset") === null && + charset !== null + ) { + MapPrototypeSet(mimeType.parameters, "charset", charset); + } + } + } + if (mimeType === null) return null; + return mimeType; + } + get [_body]() { + return this[_request].body; + } + + /** + * https://fetch.spec.whatwg.org/#dom-request + * @param {RequestInfo} input + * @param {RequestInit} init + */ + constructor(input, init = {}) { + const prefix = "Failed to construct 'Request'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + input = webidl.converters["RequestInfo"](input, { + prefix, + context: "Argument 1", + }); + init = webidl.converters["RequestInit"](init, { + prefix, + context: "Argument 2", + }); + + this[webidl.brand] = webidl.brand; + + /** @type {InnerRequest} */ + let request; + const baseURL = getLocationHref(); + + // 4. + let signal = null; + + // 5. + if (typeof input === "string") { + const parsedURL = new URL(input, baseURL); + request = newInnerRequest("GET", parsedURL.href, [], null); + } else { // 6. + if (!(input instanceof Request)) throw new TypeError("Unreachable"); + request = input[_request]; + signal = input[_signal]; + } + + // 12. + // TODO(lucacasonato): create a copy of `request` + + // 22. + if (init.redirect !== undefined) { + request.redirectMode = init.redirect; + } + + // 25. + if (init.method !== undefined) { + let method = init.method; + method = validateAndNormalizeMethod(method); + request.method = method; + } + + // 26. + if (init.signal !== undefined) { + signal = init.signal; + } + + // NOTE: non standard extension. This handles Deno.HttpClient parameter + if (init.client !== undefined) { + if (init.client !== null && !(init.client instanceof HttpClient)) { + throw webidl.makeException( + TypeError, + "`client` must be a Deno.HttpClient", + { prefix, context: "Argument 2" }, + ); + } + request.clientRid = init.client?.rid ?? null; + } + + // 27. + this[_request] = request; + + // 28. + this[_signal] = abortSignal.newSignal(); + + // 29. + if (signal !== null) { + abortSignal.follow(this[_signal], signal); + } + + // 30. + this[_headers] = headersFromHeaderList(request.headerList, "request"); + + // 32. + if (ObjectKeys(init).length > 0) { + let headers = ArrayPrototypeSlice( + headerListFromHeaders(this[_headers]), + 0, + headerListFromHeaders(this[_headers]).length, + ); + if (init.headers !== undefined) { + headers = init.headers; + } + ArrayPrototypeSplice( + headerListFromHeaders(this[_headers]), + 0, + headerListFromHeaders(this[_headers]).length, + ); + fillHeaders(this[_headers], headers); + } + + // 33. + let inputBody = null; + if (input instanceof Request) { + inputBody = input[_body]; + } + + // 34. + if ( + (request.method === "GET" || request.method === "HEAD") && + ((init.body !== undefined && init.body !== null) || + inputBody !== null) + ) { + throw new TypeError("Request with GET/HEAD method cannot have body."); + } + + // 35. + let initBody = null; + + // 36. + if (init.body !== undefined && init.body !== null) { + const res = extractBody(init.body); + initBody = res.body; + if (res.contentType !== null && !this[_headers].has("content-type")) { + this[_headers].append("Content-Type", res.contentType); + } + } + + // 37. + const inputOrInitBody = initBody ?? inputBody; + + // 39. + let finalBody = inputOrInitBody; + + // 40. + if (initBody === null && inputBody !== null) { + if (input[_body] && input[_body].unusable()) { + throw new TypeError("Input request's body is unusable."); + } + finalBody = inputBody.createProxy(); + } + + // 41. + request.body = finalBody; + } + + get method() { + webidl.assertBranded(this, Request); + return this[_request].method; + } + + get url() { + webidl.assertBranded(this, Request); + return this[_request].url(); + } + + get headers() { + webidl.assertBranded(this, Request); + return this[_headers]; + } + + get redirect() { + webidl.assertBranded(this, Request); + return this[_request].redirectMode; + } + + get signal() { + webidl.assertBranded(this, Request); + return this[_signal]; + } + + clone() { + webidl.assertBranded(this, Request); + if (this[_body] && this[_body].unusable()) { + throw new TypeError("Body is unusable."); + } + const newReq = cloneInnerRequest(this[_request]); + const newSignal = abortSignal.newSignal(); + abortSignal.follow(newSignal, this[_signal]); + return fromInnerRequest( + newReq, + newSignal, + guardFromHeaders(this[_headers]), + ); + } + + get [SymbolToStringTag]() { + return "Request"; + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof Request, + keys: [ + "bodyUsed", + "headers", + "method", + "redirect", + "url", + ], + })); + } + } + + mixinBody(Request, _body, _mimeType); + + webidl.configurePrototype(Request); + + webidl.converters["Request"] = webidl.createInterfaceConverter( + "Request", + Request, + ); + webidl.converters["RequestInfo"] = (V, opts) => { + // Union for (Request or USVString) + if (typeof V == "object") { + if (V instanceof Request) { + return webidl.converters["Request"](V, opts); + } + } + return webidl.converters["USVString"](V, opts); + }; + webidl.converters["RequestRedirect"] = webidl.createEnumConverter( + "RequestRedirect", + [ + "follow", + "error", + "manual", + ], + ); + webidl.converters["RequestInit"] = webidl.createDictionaryConverter( + "RequestInit", + [ + { key: "method", converter: webidl.converters["ByteString"] }, + { key: "headers", converter: webidl.converters["HeadersInit"] }, + { + key: "body", + converter: webidl.createNullableConverter( + webidl.converters["BodyInit"], + ), + }, + { key: "redirect", converter: webidl.converters["RequestRedirect"] }, + { + key: "signal", + converter: webidl.createNullableConverter( + webidl.converters["AbortSignal"], + ), + }, + { key: "client", converter: webidl.converters.any }, + ], + ); + + /** + * @param {Request} request + * @returns {InnerRequest} + */ + function toInnerRequest(request) { + return request[_request]; + } + + /** + * @param {InnerRequest} inner + * @param {"request" | "immutable" | "request-no-cors" | "response" | "none"} guard + * @returns {Request} + */ + function fromInnerRequest(inner, signal, guard) { + const request = webidl.createBranded(Request); + request[_request] = inner; + request[_signal] = signal; + request[_headers] = headersFromHeaderList(inner.headerList, guard); + return request; + } + + window.__bootstrap.fetch ??= {}; + window.__bootstrap.fetch.Request = Request; + window.__bootstrap.fetch.toInnerRequest = toInnerRequest; + window.__bootstrap.fetch.fromInnerRequest = fromInnerRequest; + window.__bootstrap.fetch.newInnerRequest = newInnerRequest; +})(globalThis); diff --git a/ext/fetch/23_response.js b/ext/fetch/23_response.js new file mode 100644 index 000000000..0db20e90e --- /dev/null +++ b/ext/fetch/23_response.js @@ -0,0 +1,451 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + const consoleInternal = window.__bootstrap.console; + const { HTTP_TAB_OR_SPACE, regexMatcher } = window.__bootstrap.infra; + const { extractBody, mixinBody } = window.__bootstrap.fetchBody; + const { getLocationHref } = window.__bootstrap.location; + const mimesniff = window.__bootstrap.mimesniff; + const { URL } = window.__bootstrap.url; + const { + getDecodeSplitHeader, + headerListFromHeaders, + headersFromHeaderList, + guardFromHeaders, + fillHeaders, + } = window.__bootstrap.headers; + const { + ArrayPrototypeMap, + ArrayPrototypePush, + MapPrototypeHas, + MapPrototypeGet, + MapPrototypeSet, + RangeError, + RegExp, + RegExpPrototypeTest, + Symbol, + SymbolFor, + SymbolToStringTag, + TypeError, + } = window.__bootstrap.primordials; + + const VCHAR = ["\x21-\x7E"]; + const OBS_TEXT = ["\x80-\xFF"]; + + const REASON_PHRASE = [...HTTP_TAB_OR_SPACE, ...VCHAR, ...OBS_TEXT]; + const REASON_PHRASE_MATCHER = regexMatcher(REASON_PHRASE); + const REASON_PHRASE_RE = new RegExp(`^[${REASON_PHRASE_MATCHER}]*$`); + + const _response = Symbol("response"); + const _headers = Symbol("headers"); + const _mimeType = Symbol("mime type"); + const _body = Symbol("body"); + + /** + * @typedef InnerResponse + * @property {"basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect"} type + * @property {() => string | null} url + * @property {string[]} urlList + * @property {number} status + * @property {string} statusMessage + * @property {[string, string][]} headerList + * @property {null | typeof __window.bootstrap.fetchBody.InnerBody} body + * @property {boolean} aborted + * @property {string} [error] + */ + + /** + * @param {number} status + * @returns {boolean} + */ + function nullBodyStatus(status) { + return status === 101 || status === 204 || status === 205 || status === 304; + } + + /** + * @param {number} status + * @returns {boolean} + */ + function redirectStatus(status) { + return status === 301 || status === 302 || status === 303 || + status === 307 || status === 308; + } + + /** + * https://fetch.spec.whatwg.org/#concept-response-clone + * @param {InnerResponse} response + * @returns {InnerResponse} + */ + function cloneInnerResponse(response) { + const urlList = [...response.urlList]; + const headerList = [ + ...ArrayPrototypeMap(response.headerList, (x) => [x[0], x[1]]), + ]; + let body = null; + if (response.body !== null) { + body = response.body.clone(); + } + + return { + type: response.type, + body, + headerList, + url() { + if (this.urlList.length == 0) return null; + return this.urlList[this.urlList.length - 1]; + }, + urlList, + status: response.status, + statusMessage: response.statusMessage, + aborted: response.aborted, + }; + } + + const defaultInnerResponse = { + type: "default", + body: null, + aborted: false, + url() { + if (this.urlList.length == 0) return null; + return this.urlList[this.urlList.length - 1]; + }, + }; + + /** + * @returns {InnerResponse} + */ + function newInnerResponse(status = 200, statusMessage = "") { + return { + headerList: [], + urlList: [], + status, + statusMessage, + ...defaultInnerResponse, + }; + } + + /** + * @param {string} error + * @returns {InnerResponse} + */ + function networkError(error) { + const resp = newInnerResponse(0); + resp.type = "error"; + resp.error = error; + return resp; + } + + /** + * @returns {InnerResponse} + */ + function abortedNetworkError() { + const resp = networkError("aborted"); + resp.aborted = true; + return resp; + } + + class Response { + /** @type {InnerResponse} */ + [_response]; + /** @type {Headers} */ + [_headers]; + get [_mimeType]() { + let charset = null; + let essence = null; + let mimeType = null; + const headerList = headerListFromHeaders(this[_headers]); + const values = getDecodeSplitHeader(headerList, "content-type"); + if (values === null) return null; + for (const value of values) { + const temporaryMimeType = mimesniff.parseMimeType(value); + if ( + temporaryMimeType === null || + mimesniff.essence(temporaryMimeType) == "*/*" + ) { + continue; + } + mimeType = temporaryMimeType; + if (mimesniff.essence(mimeType) !== essence) { + charset = null; + const newCharset = MapPrototypeGet(mimeType.parameters, "charset"); + if (newCharset !== undefined) { + charset = newCharset; + } + essence = mimesniff.essence(mimeType); + } else { + if ( + MapPrototypeHas(mimeType.parameters, "charset") === null && + charset !== null + ) { + MapPrototypeSet(mimeType.parameters, "charset", charset); + } + } + } + if (mimeType === null) return null; + return mimeType; + } + get [_body]() { + return this[_response].body; + } + + /** + * @returns {Response} + */ + static error() { + const inner = newInnerResponse(0); + inner.type = "error"; + const response = webidl.createBranded(Response); + response[_response] = inner; + response[_headers] = headersFromHeaderList( + response[_response].headerList, + "immutable", + ); + return response; + } + + /** + * @param {string} url + * @param {number} status + * @returns {Response} + */ + static redirect(url, status = 302) { + const prefix = "Failed to call 'Response.redirect'"; + url = webidl.converters["USVString"](url, { + prefix, + context: "Argument 1", + }); + status = webidl.converters["unsigned short"](status, { + prefix, + context: "Argument 2", + }); + + const baseURL = getLocationHref(); + const parsedURL = new URL(url, baseURL); + if (!redirectStatus(status)) { + throw new RangeError("Invalid redirect status code."); + } + const inner = newInnerResponse(status); + inner.type = "default"; + ArrayPrototypePush(inner.headerList, ["location", parsedURL.href]); + const response = webidl.createBranded(Response); + response[_response] = inner; + response[_headers] = headersFromHeaderList( + response[_response].headerList, + "immutable", + ); + return response; + } + + /** + * @param {BodyInit | null} body + * @param {ResponseInit} init + */ + constructor(body = null, init = {}) { + const prefix = "Failed to construct 'Response'"; + body = webidl.converters["BodyInit?"](body, { + prefix, + context: "Argument 1", + }); + init = webidl.converters["ResponseInit"](init, { + prefix, + context: "Argument 2", + }); + + if (init.status < 200 || init.status > 599) { + throw new RangeError( + `The status provided (${init.status}) is outside the range [200, 599].`, + ); + } + + if (!RegExpPrototypeTest(REASON_PHRASE_RE, init.statusText)) { + throw new TypeError("Status text is not valid."); + } + + this[webidl.brand] = webidl.brand; + const response = newInnerResponse(init.status, init.statusText); + this[_response] = response; + this[_headers] = headersFromHeaderList(response.headerList, "response"); + if (init.headers !== undefined) { + fillHeaders(this[_headers], init.headers); + } + if (body !== null) { + if (nullBodyStatus(response.status)) { + throw new TypeError( + "Response with null body status cannot have body", + ); + } + const res = extractBody(body); + response.body = res.body; + if (res.contentType !== null && !this[_headers].has("content-type")) { + this[_headers].append("Content-Type", res.contentType); + } + } + } + + /** + * @returns {"basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect"} + */ + get type() { + webidl.assertBranded(this, Response); + return this[_response].type; + } + + /** + * @returns {string} + */ + get url() { + webidl.assertBranded(this, Response); + const url = this[_response].url(); + if (url === null) return ""; + const newUrl = new URL(url); + newUrl.hash = ""; + return newUrl.href; + } + + /** + * @returns {boolean} + */ + get redirected() { + webidl.assertBranded(this, Response); + return this[_response].urlList.length > 1; + } + + /** + * @returns {number} + */ + get status() { + webidl.assertBranded(this, Response); + return this[_response].status; + } + + /** + * @returns {boolean} + */ + get ok() { + webidl.assertBranded(this, Response); + const status = this[_response].status; + return status >= 200 && status <= 299; + } + + /** + * @returns {string} + */ + get statusText() { + webidl.assertBranded(this, Response); + return this[_response].statusMessage; + } + + /** + * @returns {Headers} + */ + get headers() { + webidl.assertBranded(this, Response); + return this[_headers]; + } + + /** + * @returns {Response} + */ + clone() { + webidl.assertBranded(this, Response); + if (this[_body] && this[_body].unusable()) { + throw new TypeError("Body is unusable."); + } + const second = webidl.createBranded(Response); + const newRes = cloneInnerResponse(this[_response]); + second[_response] = newRes; + second[_headers] = headersFromHeaderList( + newRes.headerList, + guardFromHeaders(this[_headers]), + ); + return second; + } + + get [SymbolToStringTag]() { + return "Response"; + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof Response, + keys: [ + "body", + "bodyUsed", + "headers", + "ok", + "redirected", + "status", + "statusText", + "url", + ], + })); + } + } + + mixinBody(Response, _body, _mimeType); + + webidl.configurePrototype(Response); + + webidl.converters["Response"] = webidl.createInterfaceConverter( + "Response", + Response, + ); + webidl.converters["ResponseInit"] = webidl.createDictionaryConverter( + "ResponseInit", + [{ + key: "status", + defaultValue: 200, + converter: webidl.converters["unsigned short"], + }, { + key: "statusText", + defaultValue: "", + converter: webidl.converters["ByteString"], + }, { + key: "headers", + converter: webidl.converters["HeadersInit"], + }], + ); + + /** + * @param {Response} response + * @returns {InnerResponse} + */ + function toInnerResponse(response) { + return response[_response]; + } + + /** + * @param {InnerResponse} inner + * @param {"request" | "immutable" | "request-no-cors" | "response" | "none"} guard + * @returns {Response} + */ + function fromInnerResponse(inner, guard) { + const response = webidl.createBranded(Response); + response[_response] = inner; + response[_headers] = headersFromHeaderList(inner.headerList, guard); + return response; + } + + window.__bootstrap.fetch ??= {}; + window.__bootstrap.fetch.Response = Response; + window.__bootstrap.fetch.newInnerResponse = newInnerResponse; + window.__bootstrap.fetch.toInnerResponse = toInnerResponse; + window.__bootstrap.fetch.fromInnerResponse = fromInnerResponse; + window.__bootstrap.fetch.redirectStatus = redirectStatus; + window.__bootstrap.fetch.nullBodyStatus = nullBodyStatus; + window.__bootstrap.fetch.networkError = networkError; + window.__bootstrap.fetch.abortedNetworkError = abortedNetworkError; +})(globalThis); diff --git a/ext/fetch/26_fetch.js b/ext/fetch/26_fetch.js new file mode 100644 index 000000000..f7166001e --- /dev/null +++ b/ext/fetch/26_fetch.js @@ -0,0 +1,542 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const core = window.Deno.core; + const webidl = window.__bootstrap.webidl; + const { errorReadableStream } = window.__bootstrap.streams; + const { InnerBody, extractBody } = window.__bootstrap.fetchBody; + const { + toInnerRequest, + toInnerResponse, + fromInnerResponse, + redirectStatus, + nullBodyStatus, + networkError, + abortedNetworkError, + } = window.__bootstrap.fetch; + const abortSignal = window.__bootstrap.abortSignal; + const { DOMException } = window.__bootstrap.domException; + const { + ArrayPrototypePush, + ArrayPrototypeSplice, + ArrayPrototypeFilter, + ArrayPrototypeIncludes, + Promise, + PromisePrototypeThen, + PromisePrototypeCatch, + StringPrototypeToLowerCase, + TypedArrayPrototypeSubarray, + TypeError, + Uint8Array, + } = window.__bootstrap.primordials; + + const REQUEST_BODY_HEADER_NAMES = [ + "content-encoding", + "content-language", + "content-location", + "content-type", + ]; + + /** + * @param {{ method: string, url: string, headers: [string, string][], clientRid: number | null, hasBody: boolean }} args + * @param {Uint8Array | null} body + * @returns {{ requestRid: number, requestBodyRid: number | null }} + */ + function opFetch(args, body) { + return core.opSync("op_fetch", args, body); + } + + /** + * @param {number} rid + * @returns {Promise<{ status: number, statusText: string, headers: [string, string][], url: string, responseRid: number }>} + */ + function opFetchSend(rid) { + return core.opAsync("op_fetch_send", rid); + } + + /** + * @param {number} rid + * @param {Uint8Array} body + * @returns {Promise} + */ + function opFetchRequestWrite(rid, body) { + return core.opAsync("op_fetch_request_write", rid, body); + } + + /** + * @param {number} rid + * @param {Uint8Array} body + * @returns {Promise} + */ + function opFetchResponseRead(rid, body) { + return core.opAsync("op_fetch_response_read", rid, body); + } + + // A finalization registry to clean up underlying fetch resources that are GC'ed. + const RESOURCE_REGISTRY = new FinalizationRegistry((rid) => { + try { + core.close(rid); + } catch { + // might have already been closed + } + }); + + /** + * @param {number} responseBodyRid + * @param {AbortSignal} [terminator] + * @returns {ReadableStream} + */ + function createResponseBodyStream(responseBodyRid, terminator) { + function onAbort() { + if (readable) { + errorReadableStream( + readable, + new DOMException("Ongoing fetch was aborted.", "AbortError"), + ); + } + try { + core.close(responseBodyRid); + } catch (_) { + // might have already been closed + } + } + // TODO(lucacasonato): clean up registration + terminator[abortSignal.add](onAbort); + const readable = new ReadableStream({ + type: "bytes", + async pull(controller) { + try { + // This is the largest possible size for a single packet on a TLS + // stream. + const chunk = new Uint8Array(16 * 1024 + 256); + const read = await opFetchResponseRead( + responseBodyRid, + chunk, + ); + if (read > 0) { + // We read some data. Enqueue it onto the stream. + controller.enqueue(TypedArrayPrototypeSubarray(chunk, 0, read)); + } else { + RESOURCE_REGISTRY.unregister(readable); + // We have reached the end of the body, so we close the stream. + controller.close(); + try { + core.close(responseBodyRid); + } catch (_) { + // might have already been closed + } + } + } catch (err) { + RESOURCE_REGISTRY.unregister(readable); + if (terminator.aborted) { + controller.error( + new DOMException("Ongoing fetch was aborted.", "AbortError"), + ); + } else { + // There was an error while reading a chunk of the body, so we + // error. + controller.error(err); + } + try { + core.close(responseBodyRid); + } catch (_) { + // might have already been closed + } + } + }, + cancel() { + if (!terminator.aborted) { + terminator[abortSignal.signalAbort](); + } + }, + }); + RESOURCE_REGISTRY.register(readable, responseBodyRid, readable); + return readable; + } + + /** + * @param {InnerRequest} req + * @param {boolean} recursive + * @param {AbortSignal} terminator + * @returns {Promise} + */ + async function mainFetch(req, recursive, terminator) { + /** @type {ReadableStream | Uint8Array | null} */ + let reqBody = null; + + if (req.body !== null) { + if (req.body.streamOrStatic instanceof ReadableStream) { + if (req.body.length === null || req.body.source instanceof Blob) { + reqBody = req.body.stream; + } else { + const reader = req.body.stream.getReader(); + const r1 = await reader.read(); + if (r1.done) { + reqBody = new Uint8Array(0); + } else { + reqBody = r1.value; + const r2 = await reader.read(); + if (!r2.done) throw new TypeError("Unreachable"); + } + } + } else { + req.body.streamOrStatic.consumed = true; + reqBody = req.body.streamOrStatic.body; + } + } + + const { requestRid, requestBodyRid, cancelHandleRid } = opFetch({ + method: req.method, + url: req.currentUrl(), + headers: req.headerList, + clientRid: req.clientRid, + hasBody: reqBody !== null, + bodyLength: req.body?.length, + }, reqBody instanceof Uint8Array ? reqBody : null); + + function onAbort() { + try { + core.close(cancelHandleRid); + } catch (_) { + // might have already been closed + } + try { + core.close(requestBodyRid); + } catch (_) { + // might have already been closed + } + } + terminator[abortSignal.add](onAbort); + + if (requestBodyRid !== null) { + if (reqBody === null || !(reqBody instanceof ReadableStream)) { + throw new TypeError("Unreachable"); + } + const reader = reqBody.getReader(); + (async () => { + while (true) { + const { value, done } = await PromisePrototypeCatch( + reader.read(), + (err) => { + if (terminator.aborted) return { done: true, value: undefined }; + throw err; + }, + ); + if (done) break; + if (!(value instanceof Uint8Array)) { + await reader.cancel("value not a Uint8Array"); + break; + } + try { + await PromisePrototypeCatch( + opFetchRequestWrite(requestBodyRid, value), + (err) => { + if (terminator.aborted) return; + throw err; + }, + ); + if (terminator.aborted) break; + } catch (err) { + await reader.cancel(err); + break; + } + } + try { + core.close(requestBodyRid); + } catch (_) { + // might have already been closed + } + })(); + } + + let resp; + try { + resp = await PromisePrototypeCatch(opFetchSend(requestRid), (err) => { + if (terminator.aborted) return; + throw err; + }); + } finally { + try { + core.close(cancelHandleRid); + } catch (_) { + // might have already been closed + } + } + if (terminator.aborted) return abortedNetworkError(); + + /** @type {InnerResponse} */ + const response = { + headerList: resp.headers, + status: resp.status, + body: null, + statusMessage: resp.statusText, + type: "basic", + url() { + if (this.urlList.length == 0) return null; + return this.urlList[this.urlList.length - 1]; + }, + urlList: req.urlList, + }; + if (redirectStatus(resp.status)) { + switch (req.redirectMode) { + case "error": + core.close(resp.responseRid); + return networkError( + "Encountered redirect while redirect mode is set to 'error'", + ); + case "follow": + core.close(resp.responseRid); + return httpRedirectFetch(req, response, terminator); + case "manual": + break; + } + } + + if (nullBodyStatus(response.status)) { + core.close(resp.responseRid); + } else { + if (req.method === "HEAD" || req.method === "CONNECT") { + response.body = null; + core.close(resp.responseRid); + } else { + response.body = new InnerBody( + createResponseBodyStream(resp.responseRid, terminator), + ); + } + } + + if (recursive) return response; + + if (response.urlList.length === 0) { + response.urlList = [...req.urlList]; + } + + return response; + } + + /** + * @param {InnerRequest} request + * @param {InnerResponse} response + * @returns {Promise} + */ + function httpRedirectFetch(request, response, terminator) { + const locationHeaders = ArrayPrototypeFilter( + response.headerList, + (entry) => entry[0] === "location", + ); + if (locationHeaders.length === 0) { + return response; + } + const locationURL = new URL( + locationHeaders[0][1], + response.url() ?? undefined, + ); + if (locationURL.hash === "") { + locationURL.hash = request.currentUrl().hash; + } + if (locationURL.protocol !== "https:" && locationURL.protocol !== "http:") { + return networkError("Can not redirect to a non HTTP(s) url"); + } + if (request.redirectCount === 20) { + return networkError("Maximum number of redirects (20) reached"); + } + request.redirectCount++; + if ( + response.status !== 303 && + request.body !== null && + request.body.source === null + ) { + return networkError( + "Can not redeliver a streaming request body after a redirect", + ); + } + if ( + ((response.status === 301 || response.status === 302) && + request.method === "POST") || + (response.status === 303 && + request.method !== "GET" && + request.method !== "HEAD") + ) { + request.method = "GET"; + request.body = null; + for (let i = 0; i < request.headerList.length; i++) { + if ( + ArrayPrototypeIncludes( + REQUEST_BODY_HEADER_NAMES, + request.headerList[i][0], + ) + ) { + ArrayPrototypeSplice(request.headerList, i, 1); + i--; + } + } + } + if (request.body !== null) { + const res = extractBody(request.body.source); + request.body = res.body; + } + ArrayPrototypePush(request.urlList, locationURL.href); + return mainFetch(request, true, terminator); + } + + /** + * @param {RequestInfo} input + * @param {RequestInit} init + */ + function fetch(input, init = {}) { + // 1. + const p = new Promise((resolve, reject) => { + const prefix = "Failed to call 'fetch'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + input = webidl.converters["RequestInfo"](input, { + prefix, + context: "Argument 1", + }); + init = webidl.converters["RequestInit"](init, { + prefix, + context: "Argument 2", + }); + + // 2. + const requestObject = new Request(input, init); + // 3. + const request = toInnerRequest(requestObject); + // 4. + if (requestObject.signal.aborted) { + reject(abortFetch(request, null)); + return; + } + + // 7. + let responseObject = null; + // 9. + let locallyAborted = false; + // 10. + function onabort() { + locallyAborted = true; + reject(abortFetch(request, responseObject)); + } + requestObject.signal[abortSignal.add](onabort); + + if (!requestObject.headers.has("accept")) { + ArrayPrototypePush(request.headerList, ["accept", "*/*"]); + } + + // 12. + PromisePrototypeCatch( + PromisePrototypeThen( + mainFetch(request, false, requestObject.signal), + (response) => { + // 12.1. + if (locallyAborted) return; + // 12.2. + if (response.aborted) { + reject(request, responseObject); + requestObject.signal[abortSignal.remove](onabort); + return; + } + // 12.3. + if (response.type === "error") { + const err = new TypeError( + "Fetch failed: " + (response.error ?? "unknown error"), + ); + reject(err); + requestObject.signal[abortSignal.remove](onabort); + return; + } + responseObject = fromInnerResponse(response, "immutable"); + resolve(responseObject); + requestObject.signal[abortSignal.remove](onabort); + }, + ), + (err) => { + reject(err); + requestObject.signal[abortSignal.remove](onabort); + }, + ); + }); + return p; + } + + function abortFetch(request, responseObject) { + const error = new DOMException("Ongoing fetch was aborted.", "AbortError"); + if (request.body !== null) request.body.cancel(error); + if (responseObject !== null) { + const response = toInnerResponse(responseObject); + if (response.body !== null) response.body.error(error); + } + return error; + } + + /** + * Handle the Promise argument to the WebAssembly streaming + * APIs. This function should be registered through + * `Deno.core.setWasmStreamingCallback`. + * + * @param {any} source The source parameter that the WebAssembly + * streaming API was called with. + * @param {number} rid An rid that can be used with + * `Deno.core.wasmStreamingFeed`. + */ + function handleWasmStreaming(source, rid) { + // This implements part of + // https://webassembly.github.io/spec/web-api/#compile-a-potential-webassembly-response + (async () => { + try { + const res = webidl.converters["Response"](await source, { + prefix: "Failed to call 'WebAssembly.compileStreaming'", + context: "Argument 1", + }); + + // 2.3. + // The spec is ambiguous here, see + // https://github.com/WebAssembly/spec/issues/1138. The WPT tests + // expect the raw value of the Content-Type attribute lowercased. + if ( + StringPrototypeToLowerCase(res.headers.get("Content-Type")) !== + "application/wasm" + ) { + throw new TypeError("Invalid WebAssembly content type."); + } + + // 2.5. + if (!res.ok) { + throw new TypeError(`HTTP status code ${res.status}`); + } + + // 2.6. + // Rather than consuming the body as an ArrayBuffer, this passes each + // chunk to the feed as soon as it's available. + if (res.body !== null) { + const reader = res.body.getReader(); + while (true) { + const { value: chunk, done } = await reader.read(); + if (done) break; + Deno.core.wasmStreamingFeed(rid, "bytes", chunk); + } + } + + // 2.7. + Deno.core.wasmStreamingFeed(rid, "finish"); + } catch (err) { + // 2.8 and 3 + Deno.core.wasmStreamingFeed(rid, "abort", err); + } + })(); + } + + window.__bootstrap.fetch ??= {}; + window.__bootstrap.fetch.fetch = fetch; + window.__bootstrap.fetch.handleWasmStreaming = handleWasmStreaming; +})(this); diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml new file mode 100644 index 000000000..80d0cb2e1 --- /dev/null +++ b/ext/fetch/Cargo.toml @@ -0,0 +1,28 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_fetch" +version = "0.37.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "Fetch API implementation for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +bytes = "1.0.1" +data-url = "0.1.0" +deno_core = { version = "0.96.0", path = "../../core" } +deno_tls = { version = "0.1.0", path = "../tls" } +deno_web = { version = "0.45.0", path = "../web" } +http = "0.2.4" +lazy_static = "1.4.0" +reqwest = { version = "0.11.4", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } +serde = { version = "1.0.126", features = ["derive"] } +tokio = { version = "1.8.1", features = ["full"] } +tokio-stream = "0.1.7" +tokio-util = "0.6.7" diff --git a/ext/fetch/README.md b/ext/fetch/README.md new file mode 100644 index 000000000..2c946197e --- /dev/null +++ b/ext/fetch/README.md @@ -0,0 +1,5 @@ +# deno_fetch + +This crate implements the Fetch API. + +Spec: https://fetch.spec.whatwg.org/ diff --git a/ext/fetch/internal.d.ts b/ext/fetch/internal.d.ts new file mode 100644 index 000000000..a84e0bcce --- /dev/null +++ b/ext/fetch/internal.d.ts @@ -0,0 +1,108 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file no-explicit-any + +/// +/// + +declare namespace globalThis { + declare namespace __bootstrap { + declare var fetchUtil: { + requiredArguments(name: string, length: number, required: number): void; + }; + + declare var domIterable: { + DomIterableMixin(base: any, dataSymbol: symbol): any; + }; + + declare namespace headers { + class Headers { + } + type HeaderList = [string, string][]; + function headersFromHeaderList( + list: HeaderList, + guard: + | "immutable" + | "request" + | "request-no-cors" + | "response" + | "none", + ): Headers; + function headerListFromHeaders(headers: Headers): HeaderList; + function fillHeaders(headers: Headers, object: HeadersInit): void; + function getDecodeSplitHeader( + list: HeaderList, + name: string, + ): string[] | null; + function guardFromHeaders( + headers: Headers, + ): "immutable" | "request" | "request-no-cors" | "response" | "none"; + } + + declare namespace formData { + declare type FormData = typeof FormData; + declare function formDataToBlob( + formData: globalThis.FormData, + ): Blob; + declare function parseFormData( + body: Uint8Array, + boundary: string | undefined, + ): FormData; + declare function formDataFromEntries(entries: FormDataEntry[]): FormData; + } + + declare namespace fetchBody { + function mixinBody( + prototype: any, + bodySymbol: symbol, + mimeTypeSymbol: symbol, + ): void; + class InnerBody { + constructor(stream?: ReadableStream); + stream: ReadableStream; + source: null | Uint8Array | Blob | FormData; + length: null | number; + unusable(): boolean; + consume(): Promise; + clone(): InnerBody; + } + function extractBody(object: BodyInit): { + body: InnerBody; + contentType: string | null; + }; + } + + declare namespace fetch { + function toInnerRequest(request: Request): InnerRequest; + function fromInnerRequest( + inner: InnerRequest, + signal: AbortSignal | null, + guard: + | "request" + | "immutable" + | "request-no-cors" + | "response" + | "none", + ): Request; + function redirectStatus(status: number): boolean; + function nullBodyStatus(status: number): boolean; + function newInnerRequest( + method: string, + url: any, + headerList?: [string, string][], + body?: globalThis.__bootstrap.fetchBody.InnerBody, + ): InnerResponse; + function toInnerResponse(response: Response): InnerResponse; + function fromInnerResponse( + inner: InnerResponse, + guard: + | "request" + | "immutable" + | "request-no-cors" + | "response" + | "none", + ): Response; + function networkError(error: string): InnerResponse; + } + } +} diff --git a/ext/fetch/lib.deno_fetch.d.ts b/ext/fetch/lib.deno_fetch.d.ts new file mode 100644 index 000000000..7fe7d9453 --- /dev/null +++ b/ext/fetch/lib.deno_fetch.d.ts @@ -0,0 +1,437 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file no-explicit-any + +/// +/// + +interface DomIterable { + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + forEach( + callback: (value: V, key: K, parent: this) => void, + thisArg?: any, + ): void; +} + +type FormDataEntryValue = File | string; + +/** Provides a way to easily construct a set of key/value pairs representing + * form fields and their values, which can then be easily sent using the + * XMLHttpRequest.send() method. It uses the same format a form would use if the + * encoding type were set to "multipart/form-data". */ +declare class FormData implements DomIterable { + // TODO(ry) FormData constructor is non-standard. + // new(form?: HTMLFormElement): FormData; + constructor(); + + append(name: string, value: string | Blob, fileName?: string): void; + delete(name: string): void; + get(name: string): FormDataEntryValue | null; + getAll(name: string): FormDataEntryValue[]; + has(name: string): boolean; + set(name: string, value: string | Blob, fileName?: string): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[string, FormDataEntryValue]>; + [Symbol.iterator](): IterableIterator<[string, FormDataEntryValue]>; + forEach( + callback: (value: FormDataEntryValue, key: string, parent: this) => void, + thisArg?: any, + ): void; +} + +interface Body { + /** A simple getter used to expose a `ReadableStream` of the body contents. */ + readonly body: ReadableStream | null; + /** Stores a `Boolean` that declares whether the body has been used in a + * response yet. + */ + readonly bodyUsed: boolean; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with an `ArrayBuffer`. + */ + arrayBuffer(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `Blob`. + */ + blob(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `FormData` object. + */ + formData(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with the result of parsing the body text as JSON. + */ + json(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `USVString` (text). + */ + text(): Promise; +} + +type HeadersInit = Headers | string[][] | Record; + +/** This Fetch API interface allows you to perform various actions on HTTP + * request and response headers. These actions include retrieving, setting, + * adding to, and removing. A Headers object has an associated header list, + * which is initially empty and consists of zero or more name and value pairs. + * You can add to this using methods like append() (see Examples). In all + * methods of this interface, header names are matched by case-insensitive byte + * sequence. */ +interface Headers { + append(name: string, value: string): void; + delete(name: string): void; + get(name: string): string | null; + has(name: string): boolean; + set(name: string, value: string): void; + forEach( + callbackfn: (value: string, key: string, parent: Headers) => void, + thisArg?: any, + ): void; +} + +declare class Headers implements DomIterable { + constructor(init?: HeadersInit); + + /** Appends a new value onto an existing header inside a `Headers` object, or + * adds the header if it does not already exist. + */ + append(name: string, value: string): void; + /** Deletes a header from a `Headers` object. */ + delete(name: string): void; + /** Returns an iterator allowing to go through all key/value pairs + * contained in this Headers object. The both the key and value of each pairs + * are ByteString objects. + */ + entries(): IterableIterator<[string, string]>; + /** Returns a `ByteString` sequence of all the values of a header within a + * `Headers` object with a given name. + */ + get(name: string): string | null; + /** Returns a boolean stating whether a `Headers` object contains a certain + * header. + */ + has(name: string): boolean; + /** Returns an iterator allowing to go through all keys contained in + * this Headers object. The keys are ByteString objects. + */ + keys(): IterableIterator; + /** Sets a new value for an existing header inside a Headers object, or adds + * the header if it does not already exist. + */ + set(name: string, value: string): void; + /** Returns an iterator allowing to go through all values contained in + * this Headers object. The values are ByteString objects. + */ + values(): IterableIterator; + forEach( + callbackfn: (value: string, key: string, parent: this) => void, + thisArg?: any, + ): void; + /** The Symbol.iterator well-known symbol specifies the default + * iterator for this Headers object + */ + [Symbol.iterator](): IterableIterator<[string, string]>; +} + +type RequestInfo = Request | string; +type RequestCache = + | "default" + | "force-cache" + | "no-cache" + | "no-store" + | "only-if-cached" + | "reload"; +type RequestCredentials = "include" | "omit" | "same-origin"; +type RequestMode = "cors" | "navigate" | "no-cors" | "same-origin"; +type RequestRedirect = "error" | "follow" | "manual"; +type ReferrerPolicy = + | "" + | "no-referrer" + | "no-referrer-when-downgrade" + | "origin" + | "origin-when-cross-origin" + | "same-origin" + | "strict-origin" + | "strict-origin-when-cross-origin" + | "unsafe-url"; +type BodyInit = + | Blob + | BufferSource + | FormData + | URLSearchParams + | ReadableStream + | string; +type RequestDestination = + | "" + | "audio" + | "audioworklet" + | "document" + | "embed" + | "font" + | "image" + | "manifest" + | "object" + | "paintworklet" + | "report" + | "script" + | "sharedworker" + | "style" + | "track" + | "video" + | "worker" + | "xslt"; + +interface RequestInit { + /** + * A BodyInit object or null to set request's body. + */ + body?: BodyInit | null; + /** + * A string indicating how the request will interact with the browser's cache + * to set request's cache. + */ + cache?: RequestCache; + /** + * A string indicating whether credentials will be sent with the request + * always, never, or only when sent to a same-origin URL. Sets request's + * credentials. + */ + credentials?: RequestCredentials; + /** + * A Headers object, an object literal, or an array of two-item arrays to set + * request's headers. + */ + headers?: HeadersInit; + /** + * A cryptographic hash of the resource to be fetched by request. Sets + * request's integrity. + */ + integrity?: string; + /** + * A boolean to set request's keepalive. + */ + keepalive?: boolean; + /** + * A string to set request's method. + */ + method?: string; + /** + * A string to indicate whether the request will use CORS, or will be + * restricted to same-origin URLs. Sets request's mode. + */ + mode?: RequestMode; + /** + * A string indicating whether request follows redirects, results in an error + * upon encountering a redirect, or returns the redirect (in an opaque + * fashion). Sets request's redirect. + */ + redirect?: RequestRedirect; + /** + * A string whose value is a same-origin URL, "about:client", or the empty + * string, to set request's referrer. + */ + referrer?: string; + /** + * A referrer policy to set request's referrerPolicy. + */ + referrerPolicy?: ReferrerPolicy; + /** + * An AbortSignal to set request's signal. + */ + signal?: AbortSignal | null; + /** + * Can only be null. Used to disassociate request from any Window. + */ + window?: any; +} + +/** This Fetch API interface represents a resource request. */ +declare class Request implements Body { + constructor(input: RequestInfo, init?: RequestInit); + + /** + * Returns the cache mode associated with request, which is a string + * indicating how the request will interact with the browser's cache when + * fetching. + */ + readonly cache: RequestCache; + /** + * Returns the credentials mode associated with request, which is a string + * indicating whether credentials will be sent with the request always, never, + * or only when sent to a same-origin URL. + */ + readonly credentials: RequestCredentials; + /** + * Returns the kind of resource requested by request, e.g., "document" or "script". + */ + readonly destination: RequestDestination; + /** + * Returns a Headers object consisting of the headers associated with request. + * Note that headers added in the network layer by the user agent will not be + * accounted for in this object, e.g., the "Host" header. + */ + readonly headers: Headers; + /** + * Returns request's subresource integrity metadata, which is a cryptographic + * hash of the resource being fetched. Its value consists of multiple hashes + * separated by whitespace. [SRI] + */ + readonly integrity: string; + /** + * Returns a boolean indicating whether or not request is for a history + * navigation (a.k.a. back-forward navigation). + */ + readonly isHistoryNavigation: boolean; + /** + * Returns a boolean indicating whether or not request is for a reload + * navigation. + */ + readonly isReloadNavigation: boolean; + /** + * Returns a boolean indicating whether or not request can outlive the global + * in which it was created. + */ + readonly keepalive: boolean; + /** + * Returns request's HTTP method, which is "GET" by default. + */ + readonly method: string; + /** + * Returns the mode associated with request, which is a string indicating + * whether the request will use CORS, or will be restricted to same-origin + * URLs. + */ + readonly mode: RequestMode; + /** + * Returns the redirect mode associated with request, which is a string + * indicating how redirects for the request will be handled during fetching. A + * request will follow redirects by default. + */ + readonly redirect: RequestRedirect; + /** + * Returns the referrer of request. Its value can be a same-origin URL if + * explicitly set in init, the empty string to indicate no referrer, and + * "about:client" when defaulting to the global's default. This is used during + * fetching to determine the value of the `Referer` header of the request + * being made. + */ + readonly referrer: string; + /** + * Returns the referrer policy associated with request. This is used during + * fetching to compute the value of the request's referrer. + */ + readonly referrerPolicy: ReferrerPolicy; + /** + * Returns the signal associated with request, which is an AbortSignal object + * indicating whether or not request has been aborted, and its abort event + * handler. + */ + readonly signal: AbortSignal; + /** + * Returns the URL of request as a string. + */ + readonly url: string; + clone(): Request; + + /** A simple getter used to expose a `ReadableStream` of the body contents. */ + readonly body: ReadableStream | null; + /** Stores a `Boolean` that declares whether the body has been used in a + * response yet. + */ + readonly bodyUsed: boolean; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with an `ArrayBuffer`. + */ + arrayBuffer(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `Blob`. + */ + blob(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `FormData` object. + */ + formData(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with the result of parsing the body text as JSON. + */ + json(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `USVString` (text). + */ + text(): Promise; +} + +interface ResponseInit { + headers?: HeadersInit; + status?: number; + statusText?: string; +} + +type ResponseType = + | "basic" + | "cors" + | "default" + | "error" + | "opaque" + | "opaqueredirect"; + +/** This Fetch API interface represents the response to a request. */ +declare class Response implements Body { + constructor(body?: BodyInit | null, init?: ResponseInit); + static error(): Response; + static redirect(url: string, status?: number): Response; + + readonly headers: Headers; + readonly ok: boolean; + readonly redirected: boolean; + readonly status: number; + readonly statusText: string; + readonly trailer: Promise; + readonly type: ResponseType; + readonly url: string; + clone(): Response; + + /** A simple getter used to expose a `ReadableStream` of the body contents. */ + readonly body: ReadableStream | null; + /** Stores a `Boolean` that declares whether the body has been used in a + * response yet. + */ + readonly bodyUsed: boolean; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with an `ArrayBuffer`. + */ + arrayBuffer(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `Blob`. + */ + blob(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `FormData` object. + */ + formData(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with the result of parsing the body text as JSON. + */ + json(): Promise; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `USVString` (text). + */ + text(): Promise; +} + +/** Fetch a resource from the network. It returns a Promise that resolves to the + * Response to that request, whether it is successful or not. + * + * const response = await fetch("http://my.json.host/data.json"); + * console.log(response.status); // e.g. 200 + * console.log(response.statusText); // e.g. "OK" + * const jsonData = await response.json(); + */ +declare function fetch( + input: Request | URL | string, + init?: RequestInit, +): Promise; diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs new file mode 100644 index 000000000..e89df470a --- /dev/null +++ b/ext/fetch/lib.rs @@ -0,0 +1,567 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use data_url::DataUrl; +use deno_core::error::bad_resource_id; +use deno_core::error::null_opbuf; +use deno_core::error::type_error; +use deno_core::error::AnyError; +use deno_core::futures::Future; +use deno_core::futures::Stream; +use deno_core::futures::StreamExt; +use deno_core::include_js_files; +use deno_core::op_async; +use deno_core::op_sync; +use deno_core::url::Url; +use deno_core::AsyncRefCell; +use deno_core::ByteString; +use deno_core::CancelFuture; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::Canceled; +use deno_core::Extension; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_core::ZeroCopyBuf; +use deno_tls::create_http_client; +use deno_tls::rustls::RootCertStore; +use deno_tls::Proxy; +use deno_web::BlobStore; +use http::header::CONTENT_LENGTH; +use reqwest::header::HeaderName; +use reqwest::header::HeaderValue; +use reqwest::header::HOST; +use reqwest::Body; +use reqwest::Client; +use reqwest::Method; +use reqwest::RequestBuilder; +use reqwest::Response; +use serde::Deserialize; +use serde::Serialize; +use std::borrow::Cow; +use std::cell::RefCell; +use std::convert::From; +use std::fs::File; +use std::io::Read; +use std::path::Path; +use std::path::PathBuf; +use std::pin::Pin; +use std::rc::Rc; +use tokio::io::AsyncReadExt; +use tokio::sync::mpsc; +use tokio_stream::wrappers::ReceiverStream; +use tokio_util::io::StreamReader; + +pub use reqwest; // Re-export reqwest + +pub fn init( + user_agent: String, + root_cert_store: Option, + proxy: Option, + request_builder_hook: Option RequestBuilder>, + unsafely_ignore_certificate_errors: Option>, +) -> Extension { + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/fetch", + "01_fetch_util.js", + "20_headers.js", + "21_formdata.js", + "22_body.js", + "22_http_client.js", + "23_request.js", + "23_response.js", + "26_fetch.js", + )) + .ops(vec![ + ("op_fetch", op_sync(op_fetch::

)), + ("op_fetch_send", op_async(op_fetch_send)), + ("op_fetch_request_write", op_async(op_fetch_request_write)), + ("op_fetch_response_read", op_async(op_fetch_response_read)), + ("op_create_http_client", op_sync(op_create_http_client::

)), + ]) + .state(move |state| { + state.put::({ + create_http_client( + user_agent.clone(), + root_cert_store.clone(), + None, + proxy.clone(), + unsafely_ignore_certificate_errors.clone(), + ) + .unwrap() + }); + state.put::(HttpClientDefaults { + user_agent: user_agent.clone(), + root_cert_store: root_cert_store.clone(), + proxy: proxy.clone(), + request_builder_hook, + unsafely_ignore_certificate_errors: unsafely_ignore_certificate_errors + .clone(), + }); + Ok(()) + }) + .build() +} + +pub struct HttpClientDefaults { + pub user_agent: String, + pub root_cert_store: Option, + pub proxy: Option, + pub request_builder_hook: Option RequestBuilder>, + pub unsafely_ignore_certificate_errors: Option>, +} + +pub trait FetchPermissions { + fn check_net_url(&mut self, _url: &Url) -> Result<(), AnyError>; + fn check_read(&mut self, _p: &Path) -> Result<(), AnyError>; +} + +/// For use with `op_fetch` when the user does not want permissions. +pub struct NoFetchPermissions; + +impl FetchPermissions for NoFetchPermissions { + fn check_net_url(&mut self, _url: &Url) -> Result<(), AnyError> { + Ok(()) + } + + fn check_read(&mut self, _p: &Path) -> Result<(), AnyError> { + Ok(()) + } +} + +pub fn get_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_fetch.d.ts") +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct FetchArgs { + method: ByteString, + url: String, + headers: Vec<(ByteString, ByteString)>, + client_rid: Option, + has_body: bool, + body_length: Option, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FetchReturn { + request_rid: ResourceId, + request_body_rid: Option, + cancel_handle_rid: Option, +} + +pub fn op_fetch( + state: &mut OpState, + args: FetchArgs, + data: Option, +) -> Result +where + FP: FetchPermissions + 'static, +{ + let client = if let Some(rid) = args.client_rid { + let r = state + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + r.client.clone() + } else { + let client = state.borrow::(); + client.clone() + }; + + let method = Method::from_bytes(&args.method)?; + let url = Url::parse(&args.url)?; + + // Check scheme before asking for net permission + let scheme = url.scheme(); + let (request_rid, request_body_rid, cancel_handle_rid) = match scheme { + "http" | "https" => { + let permissions = state.borrow_mut::(); + permissions.check_net_url(&url)?; + + let mut request = client.request(method, url); + + let request_body_rid = if args.has_body { + match data { + None => { + // If no body is passed, we return a writer for streaming the body. + let (tx, rx) = mpsc::channel::>>(1); + + // If the size of the body is known, we include a content-length + // header explicitly. + if let Some(body_size) = args.body_length { + request = + request.header(CONTENT_LENGTH, HeaderValue::from(body_size)) + } + + request = request.body(Body::wrap_stream(ReceiverStream::new(rx))); + + let request_body_rid = + state.resource_table.add(FetchRequestBodyResource { + body: AsyncRefCell::new(tx), + cancel: CancelHandle::default(), + }); + + Some(request_body_rid) + } + Some(data) => { + // If a body is passed, we use it, and don't return a body for streaming. + request = request.body(Vec::from(&*data)); + None + } + } + } else { + None + }; + + for (key, value) in args.headers { + let name = HeaderName::from_bytes(&key).unwrap(); + let v = HeaderValue::from_bytes(&value).unwrap(); + if name != HOST { + request = request.header(name, v); + } + } + + let defaults = state.borrow::(); + if let Some(request_builder_hook) = defaults.request_builder_hook { + request = request_builder_hook(request); + } + + let cancel_handle = CancelHandle::new_rc(); + let cancel_handle_ = cancel_handle.clone(); + + let fut = async move { + request + .send() + .or_cancel(cancel_handle_) + .await + .map(|res| res.map_err(|err| type_error(err.to_string()))) + }; + + let request_rid = state + .resource_table + .add(FetchRequestResource(Box::pin(fut))); + + let cancel_handle_rid = + state.resource_table.add(FetchCancelHandle(cancel_handle)); + + (request_rid, request_body_rid, Some(cancel_handle_rid)) + } + "data" => { + let data_url = DataUrl::process(url.as_str()) + .map_err(|e| type_error(format!("{:?}", e)))?; + + let (body, _) = data_url + .decode_to_vec() + .map_err(|e| type_error(format!("{:?}", e)))?; + + let response = http::Response::builder() + .status(http::StatusCode::OK) + .header(http::header::CONTENT_TYPE, data_url.mime_type().to_string()) + .body(reqwest::Body::from(body))?; + + let fut = async move { Ok(Ok(Response::from(response))) }; + + let request_rid = state + .resource_table + .add(FetchRequestResource(Box::pin(fut))); + + (request_rid, None, None) + } + "blob" => { + let blob_store = state.try_borrow::().ok_or_else(|| { + type_error("Blob URLs are not supported in this context.") + })?; + + let blob = blob_store + .get_object_url(url)? + .ok_or_else(|| type_error("Blob for the given URL not found."))?; + + if method != "GET" { + return Err(type_error("Blob URL fetch only supports GET method.")); + } + + let cancel_handle = CancelHandle::new_rc(); + let cancel_handle_ = cancel_handle.clone(); + + let fut = async move { + // TODO(lucacsonato): this should be a stream! + let chunk = match blob.read_all().or_cancel(cancel_handle_).await? { + Ok(chunk) => chunk, + Err(err) => return Ok(Err(err)), + }; + + let res = http::Response::builder() + .status(http::StatusCode::OK) + .header(http::header::CONTENT_LENGTH, chunk.len()) + .header(http::header::CONTENT_TYPE, blob.media_type.clone()) + .body(reqwest::Body::from(chunk)) + .map_err(|err| type_error(err.to_string())); + + match res { + Ok(response) => Ok(Ok(Response::from(response))), + Err(err) => Ok(Err(err)), + } + }; + + let request_rid = state + .resource_table + .add(FetchRequestResource(Box::pin(fut))); + + let cancel_handle_rid = + state.resource_table.add(FetchCancelHandle(cancel_handle)); + + (request_rid, None, Some(cancel_handle_rid)) + } + _ => return Err(type_error(format!("scheme '{}' not supported", scheme))), + }; + + Ok(FetchReturn { + request_rid, + request_body_rid, + cancel_handle_rid, + }) +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FetchResponse { + status: u16, + status_text: String, + headers: Vec<(ByteString, ByteString)>, + url: String, + response_rid: ResourceId, +} + +pub async fn op_fetch_send( + state: Rc>, + rid: ResourceId, + _: (), +) -> Result { + let request = state + .borrow_mut() + .resource_table + .take::(rid) + .ok_or_else(bad_resource_id)?; + + let request = Rc::try_unwrap(request) + .ok() + .expect("multiple op_fetch_send ongoing"); + + let res = match request.0.await { + Ok(Ok(res)) => res, + Ok(Err(err)) => return Err(type_error(err.to_string())), + Err(_) => return Err(type_error("request was cancelled")), + }; + + //debug!("Fetch response {}", url); + let status = res.status(); + let url = res.url().to_string(); + let mut res_headers = Vec::new(); + for (key, val) in res.headers().iter() { + let key_bytes: &[u8] = key.as_ref(); + res_headers.push(( + ByteString(key_bytes.to_owned()), + ByteString(val.as_bytes().to_owned()), + )); + } + + let stream: BytesStream = Box::pin(res.bytes_stream().map(|r| { + r.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)) + })); + let stream_reader = StreamReader::new(stream); + let rid = state + .borrow_mut() + .resource_table + .add(FetchResponseBodyResource { + reader: AsyncRefCell::new(stream_reader), + cancel: CancelHandle::default(), + }); + + Ok(FetchResponse { + status: status.as_u16(), + status_text: status.canonical_reason().unwrap_or("").to_string(), + headers: res_headers, + url, + response_rid: rid, + }) +} + +pub async fn op_fetch_request_write( + state: Rc>, + rid: ResourceId, + data: Option, +) -> Result<(), AnyError> { + let data = data.ok_or_else(null_opbuf)?; + let buf = Vec::from(&*data); + + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let body = RcRef::map(&resource, |r| &r.body).borrow_mut().await; + let cancel = RcRef::map(resource, |r| &r.cancel); + body.send(Ok(buf)).or_cancel(cancel).await?.map_err(|_| { + type_error("request body receiver not connected (request closed)") + })?; + + Ok(()) +} + +pub async fn op_fetch_response_read( + state: Rc>, + rid: ResourceId, + data: Option, +) -> Result { + let data = data.ok_or_else(null_opbuf)?; + + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let mut reader = RcRef::map(&resource, |r| &r.reader).borrow_mut().await; + let cancel = RcRef::map(resource, |r| &r.cancel); + let mut buf = data.clone(); + let read = reader.read(&mut buf).try_or_cancel(cancel).await?; + Ok(read) +} + +type CancelableResponseResult = Result, Canceled>; + +struct FetchRequestResource( + Pin>>, +); + +impl Resource for FetchRequestResource { + fn name(&self) -> Cow { + "fetchRequest".into() + } +} + +struct FetchCancelHandle(Rc); + +impl Resource for FetchCancelHandle { + fn name(&self) -> Cow { + "fetchCancelHandle".into() + } + + fn close(self: Rc) { + self.0.cancel() + } +} + +struct FetchRequestBodyResource { + body: AsyncRefCell>>>, + cancel: CancelHandle, +} + +impl Resource for FetchRequestBodyResource { + fn name(&self) -> Cow { + "fetchRequestBody".into() + } + + fn close(self: Rc) { + self.cancel.cancel() + } +} + +type BytesStream = + Pin> + Unpin>>; + +struct FetchResponseBodyResource { + reader: AsyncRefCell>, + cancel: CancelHandle, +} + +impl Resource for FetchResponseBodyResource { + fn name(&self) -> Cow { + "fetchResponseBody".into() + } + + fn close(self: Rc) { + self.cancel.cancel() + } +} + +struct HttpClientResource { + client: Client, +} + +impl Resource for HttpClientResource { + fn name(&self) -> Cow { + "httpClient".into() + } +} + +impl HttpClientResource { + fn new(client: Client) -> Self { + Self { client } + } +} + +#[derive(Deserialize, Default, Debug)] +#[serde(rename_all = "camelCase")] +#[serde(default)] +pub struct CreateHttpClientOptions { + ca_stores: Option>, + ca_file: Option, + ca_data: Option, + proxy: Option, +} + +pub fn op_create_http_client( + state: &mut OpState, + args: CreateHttpClientOptions, + _: (), +) -> Result +where + FP: FetchPermissions + 'static, +{ + if let Some(ca_file) = args.ca_file.clone() { + let permissions = state.borrow_mut::(); + permissions.check_read(&PathBuf::from(ca_file))?; + } + + if let Some(proxy) = args.proxy.clone() { + let permissions = state.borrow_mut::(); + let url = Url::parse(&proxy.url)?; + permissions.check_net_url(&url)?; + } + + let defaults = state.borrow::(); + let cert_data = + get_cert_data(args.ca_file.as_deref(), args.ca_data.as_deref())?; + + let client = create_http_client( + defaults.user_agent.clone(), + defaults.root_cert_store.clone(), + cert_data, + args.proxy, + defaults.unsafely_ignore_certificate_errors.clone(), + ) + .unwrap(); + + let rid = state.resource_table.add(HttpClientResource::new(client)); + Ok(rid) +} + +fn get_cert_data( + ca_file: Option<&str>, + ca_data: Option<&[u8]>, +) -> Result>, AnyError> { + if let Some(ca_data) = ca_data { + Ok(Some(ca_data.to_vec())) + } else if let Some(ca_file) = ca_file { + let mut buf = Vec::new(); + File::open(ca_file)?.read_to_end(&mut buf)?; + Ok(Some(buf)) + } else { + Ok(None) + } +} diff --git a/ext/ffi/00_ffi.js b/ext/ffi/00_ffi.js new file mode 100644 index 000000000..3c4112a47 --- /dev/null +++ b/ext/ffi/00_ffi.js @@ -0,0 +1,30 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +((window) => { + const core = window.Deno.core; + + class DynamicLibrary { + #rid; + symbols = {}; + + constructor(path, symbols) { + this.#rid = core.opSync("op_ffi_load", { path, symbols }); + + for (const symbol in symbols) { + this.symbols[symbol] = (...parameters) => + core.opSync("op_ffi_call", { rid: this.#rid, symbol, parameters }); + } + } + + close() { + core.close(this.#rid); + } + } + + function dlopen(path, symbols) { + return new DynamicLibrary(path, symbols); + } + + window.__bootstrap.ffi = { dlopen }; +})(this); diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml new file mode 100644 index 000000000..d9f25ad0c --- /dev/null +++ b/ext/ffi/Cargo.toml @@ -0,0 +1,20 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_ffi" +version = "0.1.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "Dynamic library ffi for deno" + +[lib] +path = "lib.rs" + +[dependencies] +deno_core = { version = "0.96.0", path = "../../core" } +dlopen = "0.1.8" +libffi = { version = "=0.0.7", package = "deno-libffi" } +serde = { version = "1.0.125", features = ["derive"] } diff --git a/ext/ffi/README.md b/ext/ffi/README.md new file mode 100644 index 000000000..cc2d81cd2 --- /dev/null +++ b/ext/ffi/README.md @@ -0,0 +1,3 @@ +# deno_ffi + +This crate implements dynamic library ffi. diff --git a/ext/ffi/lib.rs b/ext/ffi/lib.rs new file mode 100644 index 000000000..2100a4d53 --- /dev/null +++ b/ext/ffi/lib.rs @@ -0,0 +1,397 @@ +// Copyright 2021 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::bad_resource_id; +use deno_core::error::AnyError; +use deno_core::include_js_files; +use deno_core::op_sync; +use deno_core::serde_json::json; +use deno_core::serde_json::Value; +use deno_core::Extension; +use deno_core::OpState; +use deno_core::Resource; +use deno_core::ResourceId; +use dlopen::raw::Library; +use libffi::middle::Arg; +use serde::Deserialize; +use std::borrow::Cow; +use std::collections::HashMap; +use std::convert::TryFrom; +use std::ffi::c_void; +use std::rc::Rc; + +pub struct Unstable(pub bool); + +fn check_unstable(state: &OpState, api_name: &str) { + let unstable = state.borrow::(); + + if !unstable.0 { + eprintln!( + "Unstable API '{}'. The --unstable flag must be provided.", + api_name + ); + std::process::exit(70); + } +} + +pub trait FfiPermissions { + fn check(&mut self, path: &str) -> Result<(), AnyError>; +} + +pub struct NoFfiPermissions; + +impl FfiPermissions for NoFfiPermissions { + fn check(&mut self, _path: &str) -> Result<(), AnyError> { + Ok(()) + } +} + +struct Symbol { + cif: libffi::middle::Cif, + ptr: libffi::middle::CodePtr, + parameter_types: Vec, + result_type: NativeType, +} + +struct DynamicLibraryResource { + lib: Library, + symbols: HashMap, +} + +impl Resource for DynamicLibraryResource { + fn name(&self) -> Cow { + "dynamicLibrary".into() + } + + fn close(self: Rc) { + drop(self) + } +} + +impl DynamicLibraryResource { + fn register( + &mut self, + symbol: String, + foreign_fn: ForeignFunction, + ) -> Result<(), AnyError> { + let fn_ptr = unsafe { self.lib.symbol::<*const c_void>(&symbol) }?; + let ptr = libffi::middle::CodePtr::from_ptr(fn_ptr as _); + let parameter_types = + foreign_fn.parameters.into_iter().map(NativeType::from); + let result_type = NativeType::from(foreign_fn.result); + let cif = libffi::middle::Cif::new( + parameter_types.clone().map(libffi::middle::Type::from), + result_type.into(), + ); + + self.symbols.insert( + symbol, + Symbol { + cif, + ptr, + parameter_types: parameter_types.collect(), + result_type, + }, + ); + + Ok(()) + } +} + +pub fn init(unstable: bool) -> Extension { + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/ffi", + "00_ffi.js", + )) + .ops(vec![ + ("op_ffi_load", op_sync(op_ffi_load::

)), + ("op_ffi_call", op_sync(op_ffi_call)), + ]) + .state(move |state| { + // Stolen from deno_webgpu, is there a better option? + state.put(Unstable(unstable)); + Ok(()) + }) + .build() +} + +#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq)] +#[serde(rename_all = "lowercase")] +enum NativeType { + Void, + U8, + I8, + U16, + I16, + U32, + I32, + U64, + I64, + USize, + ISize, + F32, + F64, +} + +impl From for libffi::middle::Type { + fn from(native_type: NativeType) -> Self { + match native_type { + NativeType::Void => libffi::middle::Type::void(), + NativeType::U8 => libffi::middle::Type::u8(), + NativeType::I8 => libffi::middle::Type::i8(), + NativeType::U16 => libffi::middle::Type::u16(), + NativeType::I16 => libffi::middle::Type::i16(), + NativeType::U32 => libffi::middle::Type::u32(), + NativeType::I32 => libffi::middle::Type::i32(), + NativeType::U64 => libffi::middle::Type::u64(), + NativeType::I64 => libffi::middle::Type::i64(), + NativeType::USize => libffi::middle::Type::usize(), + NativeType::ISize => libffi::middle::Type::isize(), + NativeType::F32 => libffi::middle::Type::f32(), + NativeType::F64 => libffi::middle::Type::f64(), + } + } +} + +impl From for NativeType { + fn from(string: String) -> Self { + match string.as_str() { + "void" => NativeType::Void, + "u8" => NativeType::U8, + "i8" => NativeType::I8, + "u16" => NativeType::U16, + "i16" => NativeType::I16, + "u32" => NativeType::U32, + "i32" => NativeType::I32, + "u64" => NativeType::U64, + "i64" => NativeType::I64, + "usize" => NativeType::USize, + "isize" => NativeType::ISize, + "f32" => NativeType::F32, + "f64" => NativeType::F64, + _ => unimplemented!(), + } + } +} + +#[repr(C)] +union NativeValue { + void_value: (), + u8_value: u8, + i8_value: i8, + u16_value: u16, + i16_value: i16, + u32_value: u32, + i32_value: i32, + u64_value: u64, + i64_value: i64, + usize_value: usize, + isize_value: isize, + f32_value: f32, + f64_value: f64, +} + +impl NativeValue { + fn new(native_type: NativeType, value: Value) -> Self { + match native_type { + NativeType::Void => Self { void_value: () }, + NativeType::U8 => Self { + u8_value: value_as_uint::(value), + }, + NativeType::I8 => Self { + i8_value: value_as_int::(value), + }, + NativeType::U16 => Self { + u16_value: value_as_uint::(value), + }, + NativeType::I16 => Self { + i16_value: value_as_int::(value), + }, + NativeType::U32 => Self { + u32_value: value_as_uint::(value), + }, + NativeType::I32 => Self { + i32_value: value_as_int::(value), + }, + NativeType::U64 => Self { + u64_value: value_as_uint::(value), + }, + NativeType::I64 => Self { + i64_value: value_as_int::(value), + }, + NativeType::USize => Self { + usize_value: value_as_uint::(value), + }, + NativeType::ISize => Self { + isize_value: value_as_int::(value), + }, + NativeType::F32 => Self { + f32_value: value_as_f32(value), + }, + NativeType::F64 => Self { + f64_value: value_as_f64(value), + }, + } + } + + unsafe fn as_arg(&self, native_type: NativeType) -> Arg { + match native_type { + NativeType::Void => Arg::new(&self.void_value), + NativeType::U8 => Arg::new(&self.u8_value), + NativeType::I8 => Arg::new(&self.i8_value), + NativeType::U16 => Arg::new(&self.u16_value), + NativeType::I16 => Arg::new(&self.i16_value), + NativeType::U32 => Arg::new(&self.u32_value), + NativeType::I32 => Arg::new(&self.i32_value), + NativeType::U64 => Arg::new(&self.u64_value), + NativeType::I64 => Arg::new(&self.i64_value), + NativeType::USize => Arg::new(&self.usize_value), + NativeType::ISize => Arg::new(&self.isize_value), + NativeType::F32 => Arg::new(&self.f32_value), + NativeType::F64 => Arg::new(&self.f64_value), + } + } +} + +fn value_as_uint>(value: Value) -> T { + value + .as_u64() + .and_then(|v| T::try_from(v).ok()) + .expect("Expected ffi arg value to be an unsigned integer") +} + +fn value_as_int>(value: Value) -> T { + value + .as_i64() + .and_then(|v| T::try_from(v).ok()) + .expect("Expected ffi arg value to be a signed integer") +} + +fn value_as_f32(value: Value) -> f32 { + value_as_f64(value) as f32 +} + +fn value_as_f64(value: Value) -> f64 { + value + .as_f64() + .expect("Expected ffi arg value to be a float") +} + +#[derive(Deserialize, Debug)] +struct ForeignFunction { + parameters: Vec, + result: String, +} + +#[derive(Deserialize, Debug)] +struct FfiLoadArgs { + path: String, + symbols: HashMap, +} + +fn op_ffi_load( + state: &mut deno_core::OpState, + args: FfiLoadArgs, + _: (), +) -> Result +where + FP: FfiPermissions + 'static, +{ + check_unstable(state, "Deno.dlopen"); + let permissions = state.borrow_mut::(); + permissions.check(&args.path)?; + + let lib = Library::open(args.path)?; + let mut resource = DynamicLibraryResource { + lib, + symbols: HashMap::new(), + }; + + for (symbol, foreign_fn) in args.symbols { + resource.register(symbol, foreign_fn)?; + } + + Ok(state.resource_table.add(resource)) +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +struct FfiCallArgs { + rid: ResourceId, + symbol: String, + parameters: Vec, +} + +fn op_ffi_call( + state: &mut deno_core::OpState, + args: FfiCallArgs, + _: (), +) -> Result { + let resource = state + .resource_table + .get::(args.rid) + .ok_or_else(bad_resource_id)?; + + let symbol = resource + .symbols + .get(&args.symbol) + .ok_or_else(bad_resource_id)?; + + let native_values = symbol + .parameter_types + .iter() + .zip(args.parameters.into_iter()) + .map(|(&native_type, value)| NativeValue::new(native_type, value)) + .collect::>(); + + let call_args = symbol + .parameter_types + .iter() + .zip(native_values.iter()) + .map(|(&native_type, native_value)| unsafe { + native_value.as_arg(native_type) + }) + .collect::>(); + + Ok(match symbol.result_type { + NativeType::Void => { + json!(unsafe { symbol.cif.call::<()>(symbol.ptr, &call_args) }) + } + NativeType::U8 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::I8 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::U16 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::I16 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::U32 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::I32 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::U64 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::I64 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::USize => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::ISize => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::F32 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + NativeType::F64 => { + json!(unsafe { symbol.cif.call::(symbol.ptr, &call_args) }) + } + }) +} diff --git a/ext/http/01_http.js b/ext/http/01_http.js new file mode 100644 index 000000000..3f8bcb3a8 --- /dev/null +++ b/ext/http/01_http.js @@ -0,0 +1,383 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + const { InnerBody } = window.__bootstrap.fetchBody; + const { setEventTargetData } = window.__bootstrap.eventTarget; + const { + Response, + fromInnerRequest, + toInnerResponse, + newInnerRequest, + newInnerResponse, + fromInnerResponse, + } = window.__bootstrap.fetch; + const core = window.Deno.core; + const { BadResource, Interrupted } = core; + const { ReadableStream } = window.__bootstrap.streams; + const abortSignal = window.__bootstrap.abortSignal; + const { WebSocket, _rid, _readyState, _eventLoop, _protocol, _server } = + window.__bootstrap.webSocket; + const { + ArrayPrototypeIncludes, + ArrayPrototypePush, + ArrayPrototypeSome, + Promise, + StringPrototypeIncludes, + StringPrototypeToLowerCase, + StringPrototypeSplit, + Symbol, + SymbolAsyncIterator, + TypedArrayPrototypeSubarray, + TypeError, + Uint8Array, + } = window.__bootstrap.primordials; + + const connErrorSymbol = Symbol("connError"); + + class HttpConn { + #rid = 0; + + constructor(rid) { + this.#rid = rid; + } + + /** @returns {number} */ + get rid() { + return this.#rid; + } + + /** @returns {Promise} */ + async nextRequest() { + let nextRequest; + try { + nextRequest = await core.opAsync( + "op_http_request_next", + this.#rid, + ); + } catch (error) { + // A connection error seen here would cause disrupted responses to throw + // a generic `BadResource` error. Instead store this error and replace + // those with it. + this[connErrorSymbol] = error; + if (error instanceof BadResource) { + return null; + } else if (error instanceof Interrupted) { + return null; + } else if ( + StringPrototypeIncludes(error.message, "connection closed") + ) { + return null; + } + throw error; + } + if (nextRequest === null) return null; + + const [ + requestRid, + responseSenderRid, + method, + headersList, + url, + ] = nextRequest; + + /** @type {ReadableStream | undefined} */ + let body = null; + if (typeof requestRid === "number") { + body = createRequestBodyStream(requestRid); + } + + const innerRequest = newInnerRequest( + method, + url, + headersList, + body !== null ? new InnerBody(body) : null, + ); + const signal = abortSignal.newSignal(); + const request = fromInnerRequest(innerRequest, signal, "immutable"); + + const respondWith = createRespondWith( + this, + responseSenderRid, + requestRid, + ); + + return { request, respondWith }; + } + + /** @returns {void} */ + close() { + core.close(this.#rid); + } + + [SymbolAsyncIterator]() { + // deno-lint-ignore no-this-alias + const httpConn = this; + return { + async next() { + const reqEvt = await httpConn.nextRequest(); + // Change with caution, current form avoids a v8 deopt + return { value: reqEvt, done: reqEvt === null }; + }, + }; + } + } + + function readRequest(requestRid, zeroCopyBuf) { + return core.opAsync( + "op_http_request_read", + requestRid, + zeroCopyBuf, + ); + } + + function createRespondWith(httpConn, responseSenderRid, requestRid) { + return async function respondWith(resp) { + if (resp instanceof Promise) { + resp = await resp; + } + + if (!(resp instanceof Response)) { + throw new TypeError( + "First argument to respondWith must be a Response or a promise resolving to a Response.", + ); + } + + const innerResp = toInnerResponse(resp); + + // If response body length is known, it will be sent synchronously in a + // single op, in other case a "response body" resource will be created and + // we'll be streaming it. + /** @type {ReadableStream | Uint8Array | null} */ + let respBody = null; + if (innerResp.body !== null) { + if (innerResp.body.unusable()) throw new TypeError("Body is unusable."); + if (innerResp.body.streamOrStatic instanceof ReadableStream) { + if ( + innerResp.body.length === null || + innerResp.body.source instanceof Blob + ) { + respBody = innerResp.body.stream; + } else { + const reader = innerResp.body.stream.getReader(); + const r1 = await reader.read(); + if (r1.done) { + respBody = new Uint8Array(0); + } else { + respBody = r1.value; + const r2 = await reader.read(); + if (!r2.done) throw new TypeError("Unreachable"); + } + } + } else { + innerResp.body.streamOrStatic.consumed = true; + respBody = innerResp.body.streamOrStatic.body; + } + } else { + respBody = new Uint8Array(0); + } + + let responseBodyRid; + try { + responseBodyRid = await core.opAsync("op_http_response", [ + responseSenderRid, + innerResp.status ?? 200, + innerResp.headerList, + ], respBody instanceof Uint8Array ? respBody : null); + } catch (error) { + const connError = httpConn[connErrorSymbol]; + if (error instanceof BadResource && connError != null) { + // deno-lint-ignore no-ex-assign + error = new connError.constructor(connError.message); + } + if (respBody !== null && respBody instanceof ReadableStream) { + await respBody.cancel(error); + } + throw error; + } + + // If `respond` returns a responseBodyRid, we should stream the body + // to that resource. + if (responseBodyRid !== null) { + try { + if (respBody === null || !(respBody instanceof ReadableStream)) { + throw new TypeError("Unreachable"); + } + const reader = respBody.getReader(); + while (true) { + const { value, done } = await reader.read(); + if (done) break; + if (!(value instanceof Uint8Array)) { + await reader.cancel(new TypeError("Value not a Uint8Array")); + break; + } + try { + await core.opAsync( + "op_http_response_write", + responseBodyRid, + value, + ); + } catch (error) { + const connError = httpConn[connErrorSymbol]; + if (error instanceof BadResource && connError != null) { + // deno-lint-ignore no-ex-assign + error = new connError.constructor(connError.message); + } + await reader.cancel(error); + throw error; + } + } + } finally { + // Once all chunks are sent, and the request body is closed, we can + // close the response body. + try { + await core.opAsync("op_http_response_close", responseBodyRid); + } catch { /* pass */ } + } + } + + const ws = resp[_ws]; + if (ws) { + if (typeof requestRid !== "number") { + throw new TypeError( + "This request can not be upgraded to a websocket connection.", + ); + } + + const wsRid = await core.opAsync( + "op_http_upgrade_websocket", + requestRid, + ); + ws[_rid] = wsRid; + ws[_protocol] = resp.headers.get("sec-websocket-protocol"); + + if (ws[_readyState] === WebSocket.CLOSING) { + await core.opAsync("op_ws_close", { rid: wsRid }); + + ws[_readyState] = WebSocket.CLOSED; + + const errEvent = new ErrorEvent("error"); + ws.dispatchEvent(errEvent); + + const event = new CloseEvent("close"); + ws.dispatchEvent(event); + + try { + core.close(wsRid); + } catch (err) { + // Ignore error if the socket has already been closed. + if (!(err instanceof Deno.errors.BadResource)) throw err; + } + } else { + ws[_readyState] = WebSocket.OPEN; + const event = new Event("open"); + ws.dispatchEvent(event); + + ws[_eventLoop](); + } + } + }; + } + + function createRequestBodyStream(requestRid) { + return new ReadableStream({ + type: "bytes", + async pull(controller) { + try { + // This is the largest possible size for a single packet on a TLS + // stream. + const chunk = new Uint8Array(16 * 1024 + 256); + const read = await readRequest( + requestRid, + chunk, + ); + if (read > 0) { + // We read some data. Enqueue it onto the stream. + controller.enqueue(TypedArrayPrototypeSubarray(chunk, 0, read)); + } else { + // We have reached the end of the body, so we close the stream. + controller.close(); + core.close(requestRid); + } + } catch (err) { + // There was an error while reading a chunk of the body, so we + // error. + controller.error(err); + controller.close(); + core.close(requestRid); + } + }, + cancel() { + core.close(requestRid); + }, + }); + } + + const _ws = Symbol("[[associated_ws]]"); + + function upgradeWebSocket(request, options = {}) { + if (request.headers.get("upgrade") !== "websocket") { + throw new TypeError( + "Invalid Header: 'upgrade' header must be 'websocket'", + ); + } + + const connection = request.headers.get("connection"); + const connectionHasUpgradeOption = connection !== null && + ArrayPrototypeSome( + StringPrototypeSplit(connection, /\s*,\s*/), + (option) => StringPrototypeToLowerCase(option) === "upgrade", + ); + if (!connectionHasUpgradeOption) { + throw new TypeError( + "Invalid Header: 'connection' header must be 'Upgrade'", + ); + } + + const websocketKey = request.headers.get("sec-websocket-key"); + if (websocketKey === null) { + throw new TypeError( + "Invalid Header: 'sec-websocket-key' header must be set", + ); + } + + const accept = core.opSync("op_http_websocket_accept_header", websocketKey); + + const r = newInnerResponse(101); + r.headerList = [ + ["upgrade", "websocket"], + ["connection", "Upgrade"], + ["sec-websocket-accept", accept], + ]; + + const protocolsStr = request.headers.get("sec-websocket-protocol") || ""; + const protocols = StringPrototypeSplit(protocolsStr, ", "); + if (protocols && options.protocol) { + if (ArrayPrototypeIncludes(protocols, options.protocol)) { + ArrayPrototypePush(r.headerList, [ + "sec-websocket-protocol", + options.protocol, + ]); + } else { + throw new TypeError( + `Protocol '${options.protocol}' not in the request's protocol list (non negotiable)`, + ); + } + } + + const response = fromInnerResponse(r, "immutable"); + + const socket = webidl.createBranded(WebSocket); + setEventTargetData(socket); + socket[_server] = true; + response[_ws] = socket; + + return { response, socket }; + } + + window.__bootstrap.http = { + HttpConn, + upgradeWebSocket, + }; +})(this); diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml new file mode 100644 index 000000000..3463735c5 --- /dev/null +++ b/ext/http/Cargo.toml @@ -0,0 +1,25 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_http" +version = "0.5.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "HTTP server implementation for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +base64 = "0.13.0" +bytes = "1" +deno_core = { version = "0.96.0", path = "../../core" } +deno_websocket = { version = "0.19.0", path = "../websocket" } +hyper = { version = "0.14.9", features = ["server", "stream", "http1", "http2", "runtime"] } +ring = "0.16.20" +serde = { version = "1.0.125", features = ["derive"] } +tokio = { version = "1.8.0", features = ["full"] } +tokio-util = { version = "0.6.7", features = ["io"] } diff --git a/ext/http/README.md b/ext/http/README.md new file mode 100644 index 000000000..ab557017a --- /dev/null +++ b/ext/http/README.md @@ -0,0 +1,4 @@ +# deno_http + +This crate implements server-side HTTP based on primitives from the +[Fetch API](https://fetch.spec.whatwg.org/). diff --git a/ext/http/lib.deno_http.unstable.d.ts b/ext/http/lib.deno_http.unstable.d.ts new file mode 100644 index 000000000..5c5bf78df --- /dev/null +++ b/ext/http/lib.deno_http.unstable.d.ts @@ -0,0 +1,53 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// +/// + +declare namespace Deno { + export interface WebSocketUpgrade { + response: Response; + socket: WebSocket; + } + + export interface UpgradeWebSocketOptions { + protocol?: string; + } + + /** **UNSTABLE**: new API, yet to be vetted. + * + * Used to upgrade an incoming HTTP request to a WebSocket. + * + * Given a request, returns a pair of WebSocket and Response. The original + * request must be responded to with the returned response for the websocket + * upgrade to be successful. + * + * ```ts + * const conn = await Deno.connect({ port: 80, hostname: "127.0.0.1" }); + * const httpConn = Deno.serveHttp(conn); + * const e = await httpConn.nextRequest(); + * if (e) { + * const { socket, response } = Deno.upgradeWebSocket(e.request); + * socket.onopen = () => { + * socket.send("Hello World!"); + * }; + * socket.onmessage = (e) => { + * console.log(e.data); + * socket.close(); + * }; + * socket.onclose = () => console.log("WebSocket has been closed."); + * socket.onerror = (e) => console.error("WebSocket error:", e.message); + * e.respondWith(response); + * } + * ``` + * + * If the request body is disturbed (read from) before the upgrade is + * completed, upgrading fails. + * + * This operation does not yet consume the request or open the websocket. This + * only happens once the returned response has been passed to `respondWith`. + */ + export function upgradeWebSocket( + request: Request, + options?: UpgradeWebSocketOptions, + ): WebSocketUpgrade; +} diff --git a/ext/http/lib.rs b/ext/http/lib.rs new file mode 100644 index 000000000..2c858143c --- /dev/null +++ b/ext/http/lib.rs @@ -0,0 +1,684 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::bad_resource_id; +use deno_core::error::null_opbuf; +use deno_core::error::type_error; +use deno_core::error::AnyError; +use deno_core::futures::future::poll_fn; +use deno_core::futures::FutureExt; +use deno_core::futures::Stream; +use deno_core::futures::StreamExt; +use deno_core::include_js_files; +use deno_core::op_async; +use deno_core::op_sync; +use deno_core::AsyncRefCell; +use deno_core::ByteString; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::Extension; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_core::ZeroCopyBuf; +use hyper::body::HttpBody; +use hyper::http; +use hyper::server::conn::Http; +use hyper::service::Service as HyperService; +use hyper::Body; +use hyper::Request; +use hyper::Response; +use serde::Deserialize; +use serde::Serialize; +use std::borrow::Cow; +use std::cell::RefCell; +use std::future::Future; +use std::net::SocketAddr; +use std::path::PathBuf; +use std::pin::Pin; +use std::rc::Rc; +use std::task::Context; +use std::task::Poll; +use tokio::io::AsyncRead; +use tokio::io::AsyncReadExt; +use tokio::io::AsyncWrite; +use tokio::sync::oneshot; +use tokio_util::io::StreamReader; + +pub fn get_unstable_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_http.unstable.d.ts") +} + +pub fn init() -> Extension { + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/http", + "01_http.js", + )) + .ops(vec![ + ("op_http_request_next", op_async(op_http_request_next)), + ("op_http_request_read", op_async(op_http_request_read)), + ("op_http_response", op_async(op_http_response)), + ("op_http_response_write", op_async(op_http_response_write)), + ("op_http_response_close", op_async(op_http_response_close)), + ( + "op_http_websocket_accept_header", + op_sync(op_http_websocket_accept_header), + ), + ( + "op_http_upgrade_websocket", + op_async(op_http_upgrade_websocket), + ), + ]) + .build() +} + +struct ServiceInner { + request: Request, + response_tx: oneshot::Sender>, +} + +#[derive(Clone, Default)] +struct Service { + inner: Rc>>, + waker: Rc, +} + +impl HyperService> for Service { + type Response = Response; + type Error = http::Error; + #[allow(clippy::type_complexity)] + type Future = + Pin>>>; + + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + if self.inner.borrow().is_some() { + Poll::Pending + } else { + Poll::Ready(Ok(())) + } + } + + fn call(&mut self, req: Request) -> Self::Future { + let (resp_tx, resp_rx) = oneshot::channel(); + self.inner.borrow_mut().replace(ServiceInner { + request: req, + response_tx: resp_tx, + }); + + async move { Ok(resp_rx.await.unwrap()) }.boxed_local() + } +} + +type ConnFuture = Pin>>>; + +struct Conn { + scheme: &'static str, + addr: SocketAddr, + conn: Rc>, +} + +struct ConnResource { + hyper_connection: Conn, + deno_service: Service, + cancel: CancelHandle, +} + +impl ConnResource { + // TODO(ry) impl Future for ConnResource? + fn poll(&self, cx: &mut Context<'_>) -> Poll> { + self + .hyper_connection + .conn + .borrow_mut() + .poll_unpin(cx) + .map_err(AnyError::from) + } +} + +impl Resource for ConnResource { + fn name(&self) -> Cow { + "httpConnection".into() + } + + fn close(self: Rc) { + self.cancel.cancel() + } +} + +// We use a tuple instead of struct to avoid serialization overhead of the keys. +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct NextRequestResponse( + // request_rid: + Option, + // response_sender_rid: + ResourceId, + // method: + // This is a String rather than a ByteString because reqwest will only return + // the method as a str which is guaranteed to be ASCII-only. + String, + // headers: + Vec<(ByteString, ByteString)>, + // url: + String, +); + +async fn op_http_request_next( + state: Rc>, + conn_rid: ResourceId, + _: (), +) -> Result, AnyError> { + let conn_resource = state + .borrow() + .resource_table + .get::(conn_rid) + .ok_or_else(bad_resource_id)?; + + let cancel = RcRef::map(conn_resource.clone(), |r| &r.cancel); + + poll_fn(|cx| { + conn_resource.deno_service.waker.register(cx.waker()); + let connection_closed = match conn_resource.poll(cx) { + Poll::Pending => false, + Poll::Ready(Ok(())) => { + // try to close ConnResource, but don't unwrap as it might + // already be closed + let _ = state + .borrow_mut() + .resource_table + .take::(conn_rid); + true + } + Poll::Ready(Err(e)) => { + // TODO(ry) close RequestResource associated with connection + // TODO(ry) close ResponseBodyResource associated with connection + // close ConnResource + state + .borrow_mut() + .resource_table + .take::(conn_rid) + .unwrap(); + + if should_ignore_error(&e) { + true + } else { + return Poll::Ready(Err(e)); + } + } + }; + if let Some(request_resource) = + conn_resource.deno_service.inner.borrow_mut().take() + { + let tx = request_resource.response_tx; + let req = request_resource.request; + let method = req.method().to_string(); + + // We treat cookies specially, because we don't want them to get them + // mangled by the `Headers` object in JS. What we do is take all cookie + // headers and concat them into a single cookie header, seperated by + // semicolons. + let mut total_cookie_length = 0; + let mut cookies = vec![]; + + let mut headers = Vec::with_capacity(req.headers().len()); + for (name, value) in req.headers().iter() { + if name == hyper::header::COOKIE { + let bytes = value.as_bytes(); + total_cookie_length += bytes.len(); + cookies.push(bytes); + } else { + let name: &[u8] = name.as_ref(); + let value = value.as_bytes(); + headers + .push((ByteString(name.to_owned()), ByteString(value.to_owned()))); + } + } + + if !cookies.is_empty() { + let cookie_count = cookies.len(); + total_cookie_length += (cookie_count * 2) - 2; + let mut bytes = Vec::with_capacity(total_cookie_length); + for (i, cookie) in cookies.into_iter().enumerate() { + bytes.extend(cookie); + if i != cookie_count - 1 { + bytes.extend("; ".as_bytes()); + } + } + headers.push(( + ByteString("cookie".as_bytes().to_owned()), + ByteString(bytes), + )); + } + + let url = { + let scheme = &conn_resource.hyper_connection.scheme; + let host: Cow = if let Some(host) = req.uri().host() { + Cow::Borrowed(host) + } else if let Some(host) = req.headers().get("HOST") { + Cow::Borrowed(host.to_str()?) + } else { + Cow::Owned(conn_resource.hyper_connection.addr.to_string()) + }; + let path = req.uri().path_and_query().map_or("/", |p| p.as_str()); + format!("{}://{}{}", scheme, host, path) + }; + + let is_websocket_request = req + .headers() + .get(hyper::header::CONNECTION) + .and_then(|v| { + v.to_str().ok().map(|s| "Upgrade".eq_ignore_ascii_case(s)) + }) + .unwrap_or(false) + && req + .headers() + .get(hyper::header::UPGRADE) + .and_then(|v| { + v.to_str().ok().map(|s| "websocket".eq_ignore_ascii_case(s)) + }) + .unwrap_or(false); + + let has_body = if let Some(exact_size) = req.size_hint().exact() { + exact_size > 0 + } else { + true + }; + + let maybe_request_rid = if is_websocket_request || has_body { + let mut state = state.borrow_mut(); + let request_rid = state.resource_table.add(RequestResource { + conn_rid, + inner: AsyncRefCell::new(RequestOrStreamReader::Request(Some(req))), + cancel: CancelHandle::default(), + }); + Some(request_rid) + } else { + None + }; + + let mut state = state.borrow_mut(); + let response_sender_rid = + state.resource_table.add(ResponseSenderResource { + sender: tx, + conn_rid, + }); + + Poll::Ready(Ok(Some(NextRequestResponse( + maybe_request_rid, + response_sender_rid, + method, + headers, + url, + )))) + } else if connection_closed { + Poll::Ready(Ok(None)) + } else { + Poll::Pending + } + }) + .try_or_cancel(cancel) + .await + .map_err(AnyError::from) +} + +fn should_ignore_error(e: &AnyError) -> bool { + if let Some(e) = e.downcast_ref::() { + use std::error::Error; + if let Some(std_err) = e.source() { + if let Some(io_err) = std_err.downcast_ref::() { + if io_err.kind() == std::io::ErrorKind::NotConnected { + return true; + } + } + } + } + false +} + +pub fn start_http( + state: &mut OpState, + io: IO, + addr: SocketAddr, + scheme: &'static str, +) -> Result { + let deno_service = Service::default(); + + let hyper_connection = Http::new() + .with_executor(LocalExecutor) + .serve_connection(io, deno_service.clone()) + .with_upgrades(); + let conn = Pin::new(Box::new(hyper_connection)); + let conn_resource = ConnResource { + hyper_connection: Conn { + scheme, + addr, + conn: Rc::new(RefCell::new(conn)), + }, + deno_service, + cancel: CancelHandle::default(), + }; + let rid = state.resource_table.add(conn_resource); + Ok(rid) +} + +// We use a tuple instead of struct to avoid serialization overhead of the keys. +#[derive(Deserialize)] +struct RespondArgs( + // rid: + u32, + // status: + u16, + // headers: + Vec<(ByteString, ByteString)>, +); + +async fn op_http_response( + state: Rc>, + args: RespondArgs, + data: Option, +) -> Result, AnyError> { + let RespondArgs(rid, status, headers) = args; + + let response_sender = state + .borrow_mut() + .resource_table + .take::(rid) + .ok_or_else(bad_resource_id)?; + let response_sender = Rc::try_unwrap(response_sender) + .ok() + .expect("multiple op_http_respond ongoing"); + + let conn_rid = response_sender.conn_rid; + + let conn_resource = state + .borrow() + .resource_table + .get::(conn_rid) + .ok_or_else(bad_resource_id)?; + + let mut builder = Response::builder().status(status); + + builder.headers_mut().unwrap().reserve(headers.len()); + for (key, value) in &headers { + builder = builder.header(key.as_ref(), value.as_ref()); + } + + let res; + let maybe_response_body_rid = if let Some(d) = data { + // If a body is passed, we use it, and don't return a body for streaming. + res = builder.body(Vec::from(&*d).into())?; + None + } else { + // If no body is passed, we return a writer for streaming the body. + let (sender, body) = Body::channel(); + res = builder.body(body)?; + + let response_body_rid = + state.borrow_mut().resource_table.add(ResponseBodyResource { + body: AsyncRefCell::new(sender), + conn_rid, + }); + + Some(response_body_rid) + }; + + // oneshot::Sender::send(v) returns |v| on error, not an error object. + // The only failure mode is the receiver already having dropped its end + // of the channel. + if response_sender.sender.send(res).is_err() { + return Err(type_error("internal communication error")); + } + + poll_fn(|cx| match conn_resource.poll(cx) { + Poll::Ready(x) => { + state.borrow_mut().resource_table.close(conn_rid); + Poll::Ready(x) + } + Poll::Pending => Poll::Ready(Ok(())), + }) + .await?; + + if maybe_response_body_rid.is_none() { + conn_resource.deno_service.waker.wake(); + } + Ok(maybe_response_body_rid) +} + +async fn op_http_response_close( + state: Rc>, + rid: ResourceId, + _: (), +) -> Result<(), AnyError> { + let resource = state + .borrow_mut() + .resource_table + .take::(rid) + .ok_or_else(bad_resource_id)?; + + let conn_resource = state + .borrow() + .resource_table + .get::(resource.conn_rid) + .ok_or_else(bad_resource_id)?; + drop(resource); + + let r = poll_fn(|cx| match conn_resource.poll(cx) { + Poll::Ready(x) => Poll::Ready(x), + Poll::Pending => Poll::Ready(Ok(())), + }) + .await; + conn_resource.deno_service.waker.wake(); + r +} + +async fn op_http_request_read( + state: Rc>, + rid: ResourceId, + data: Option, +) -> Result { + let mut data = data.ok_or_else(null_opbuf)?; + + let resource = state + .borrow() + .resource_table + .get::(rid as u32) + .ok_or_else(bad_resource_id)?; + + let conn_resource = state + .borrow() + .resource_table + .get::(resource.conn_rid) + .ok_or_else(bad_resource_id)?; + + let mut inner = RcRef::map(resource.clone(), |r| &r.inner) + .borrow_mut() + .await; + + if let RequestOrStreamReader::Request(req) = &mut *inner { + let req = req.take().unwrap(); + let stream: BytesStream = Box::pin(req.into_body().map(|r| { + r.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)) + })); + let reader = StreamReader::new(stream); + *inner = RequestOrStreamReader::StreamReader(reader); + }; + + let reader = match &mut *inner { + RequestOrStreamReader::StreamReader(reader) => reader, + _ => unreachable!(), + }; + + let cancel = RcRef::map(resource, |r| &r.cancel); + + let mut read_fut = reader.read(&mut data).try_or_cancel(cancel).boxed_local(); + + poll_fn(|cx| { + if let Poll::Ready(Err(e)) = conn_resource.poll(cx) { + // close ConnResource + // close RequestResource associated with connection + // close ResponseBodyResource associated with connection + return Poll::Ready(Err(e)); + } + + read_fut.poll_unpin(cx).map_err(AnyError::from) + }) + .await +} + +async fn op_http_response_write( + state: Rc>, + rid: ResourceId, + data: Option, +) -> Result<(), AnyError> { + let buf = data.ok_or_else(null_opbuf)?; + let resource = state + .borrow() + .resource_table + .get::(rid as u32) + .ok_or_else(bad_resource_id)?; + + let conn_resource = state + .borrow() + .resource_table + .get::(resource.conn_rid) + .ok_or_else(bad_resource_id)?; + + let mut body = RcRef::map(&resource, |r| &r.body).borrow_mut().await; + + let mut send_data_fut = body.send_data(Vec::from(&*buf).into()).boxed_local(); + + poll_fn(|cx| { + let r = send_data_fut.poll_unpin(cx).map_err(AnyError::from); + + // Poll connection so the data is flushed + if let Poll::Ready(Err(e)) = conn_resource.poll(cx) { + // close ConnResource + // close RequestResource associated with connection + // close ResponseBodyResource associated with connection + return Poll::Ready(Err(e)); + } + + r + }) + .await?; + + Ok(()) +} + +fn op_http_websocket_accept_header( + _: &mut OpState, + key: String, + _: (), +) -> Result { + let digest = ring::digest::digest( + &ring::digest::SHA1_FOR_LEGACY_USE_ONLY, + format!("{}258EAFA5-E914-47DA-95CA-C5AB0DC85B11", key).as_bytes(), + ); + Ok(base64::encode(digest)) +} + +async fn op_http_upgrade_websocket( + state: Rc>, + rid: ResourceId, + _: (), +) -> Result { + let req_resource = state + .borrow_mut() + .resource_table + .take::(rid) + .ok_or_else(bad_resource_id)?; + + let mut inner = RcRef::map(&req_resource, |r| &r.inner).borrow_mut().await; + + if let RequestOrStreamReader::Request(req) = inner.as_mut() { + let upgraded = hyper::upgrade::on(req.as_mut().unwrap()).await?; + let stream = + deno_websocket::tokio_tungstenite::WebSocketStream::from_raw_socket( + upgraded, + deno_websocket::tokio_tungstenite::tungstenite::protocol::Role::Server, + None, + ) + .await; + + let (ws_tx, ws_rx) = stream.split(); + let rid = + state + .borrow_mut() + .resource_table + .add(deno_websocket::WsStreamResource { + stream: deno_websocket::WebSocketStreamType::Server { + rx: AsyncRefCell::new(ws_rx), + tx: AsyncRefCell::new(ws_tx), + }, + cancel: Default::default(), + }); + + Ok(rid) + } else { + Err(bad_resource_id()) + } +} + +type BytesStream = + Pin> + Unpin>>; + +enum RequestOrStreamReader { + Request(Option>), + StreamReader(StreamReader), +} + +struct RequestResource { + conn_rid: ResourceId, + inner: AsyncRefCell, + cancel: CancelHandle, +} + +impl Resource for RequestResource { + fn name(&self) -> Cow { + "request".into() + } + + fn close(self: Rc) { + self.cancel.cancel() + } +} + +struct ResponseSenderResource { + sender: oneshot::Sender>, + conn_rid: ResourceId, +} + +impl Resource for ResponseSenderResource { + fn name(&self) -> Cow { + "responseSender".into() + } +} + +struct ResponseBodyResource { + body: AsyncRefCell, + conn_rid: ResourceId, +} + +impl Resource for ResponseBodyResource { + fn name(&self) -> Cow { + "responseBody".into() + } +} + +// Needed so hyper can use non Send futures +#[derive(Clone)] +struct LocalExecutor; + +impl hyper::rt::Executor for LocalExecutor +where + Fut: Future + 'static, + Fut::Output: 'static, +{ + fn execute(&self, fut: Fut) { + tokio::task::spawn_local(fut); + } +} diff --git a/ext/net/01_net.js b/ext/net/01_net.js new file mode 100644 index 000000000..cc10a1c0a --- /dev/null +++ b/ext/net/01_net.js @@ -0,0 +1,240 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +((window) => { + const core = window.Deno.core; + const { BadResource } = core; + const { + PromiseResolve, + SymbolAsyncIterator, + Uint8Array, + TypedArrayPrototypeSubarray, + } = window.__bootstrap.primordials; + + async function read( + rid, + buffer, + ) { + if (buffer.length === 0) { + return 0; + } + const nread = await core.opAsync("op_net_read_async", rid, buffer); + return nread === 0 ? null : nread; + } + + async function write(rid, data) { + return await core.opAsync("op_net_write_async", rid, data); + } + + function shutdown(rid) { + return core.opAsync("op_net_shutdown", rid); + } + + function opAccept(rid, transport) { + return core.opAsync("op_accept", { rid, transport }); + } + + function opListen(args) { + return core.opSync("op_listen", args); + } + + function opConnect(args) { + return core.opAsync("op_connect", args); + } + + function opReceive(rid, transport, zeroCopy) { + return core.opAsync( + "op_datagram_receive", + { rid, transport }, + zeroCopy, + ); + } + + function opSend(args, zeroCopy) { + return core.opAsync("op_datagram_send", args, zeroCopy); + } + + function resolveDns(query, recordType, options) { + return core.opAsync("op_dns_resolve", { query, recordType, options }); + } + + class Conn { + #rid = 0; + #remoteAddr = null; + #localAddr = null; + constructor(rid, remoteAddr, localAddr) { + this.#rid = rid; + this.#remoteAddr = remoteAddr; + this.#localAddr = localAddr; + } + + get rid() { + return this.#rid; + } + + get remoteAddr() { + return this.#remoteAddr; + } + + get localAddr() { + return this.#localAddr; + } + + write(p) { + return write(this.rid, p); + } + + read(p) { + return read(this.rid, p); + } + + close() { + core.close(this.rid); + } + + closeWrite() { + return shutdown(this.rid); + } + } + + class Listener { + #rid = 0; + #addr = null; + + constructor(rid, addr) { + this.#rid = rid; + this.#addr = addr; + } + + get rid() { + return this.#rid; + } + + get addr() { + return this.#addr; + } + + async accept() { + const res = await opAccept(this.rid, this.addr.transport); + return new Conn(res.rid, res.remoteAddr, res.localAddr); + } + + async next() { + let conn; + try { + conn = await this.accept(); + } catch (error) { + if (error instanceof BadResource) { + return { value: undefined, done: true }; + } + throw error; + } + return { value: conn, done: false }; + } + + return(value) { + this.close(); + return PromiseResolve({ value, done: true }); + } + + close() { + core.close(this.rid); + } + + [SymbolAsyncIterator]() { + return this; + } + } + + class Datagram { + #rid = 0; + #addr = null; + + constructor(rid, addr, bufSize = 1024) { + this.#rid = rid; + this.#addr = addr; + this.bufSize = bufSize; + } + + get rid() { + return this.#rid; + } + + get addr() { + return this.#addr; + } + + async receive(p) { + const buf = p || new Uint8Array(this.bufSize); + const { size, remoteAddr } = await opReceive( + this.rid, + this.addr.transport, + buf, + ); + const sub = TypedArrayPrototypeSubarray(buf, 0, size); + return [sub, remoteAddr]; + } + + send(p, addr) { + const remote = { hostname: "127.0.0.1", ...addr }; + + const args = { ...remote, rid: this.rid }; + return opSend(args, p); + } + + close() { + core.close(this.rid); + } + + async *[SymbolAsyncIterator]() { + while (true) { + try { + yield await this.receive(); + } catch (err) { + if (err instanceof BadResource) { + break; + } + throw err; + } + } + } + } + + function listen({ hostname, ...options }) { + const res = opListen({ + transport: "tcp", + hostname: typeof hostname === "undefined" ? "0.0.0.0" : hostname, + ...options, + }); + + return new Listener(res.rid, res.localAddr); + } + + async function connect(options) { + let res; + + if (options.transport === "unix") { + res = await opConnect(options); + } else { + res = await opConnect({ + transport: "tcp", + hostname: "127.0.0.1", + ...options, + }); + } + + return new Conn(res.rid, res.remoteAddr, res.localAddr); + } + + window.__bootstrap.net = { + connect, + Conn, + opConnect, + listen, + opListen, + Listener, + shutdown, + Datagram, + resolveDns, + }; +})(this); diff --git a/ext/net/02_tls.js b/ext/net/02_tls.js new file mode 100644 index 000000000..343ec2e4f --- /dev/null +++ b/ext/net/02_tls.js @@ -0,0 +1,89 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +((window) => { + const core = window.Deno.core; + const { Listener, Conn } = window.__bootstrap.net; + + function opConnectTls( + args, + ) { + return core.opAsync("op_connect_tls", args); + } + + function opAcceptTLS(rid) { + return core.opAsync("op_accept_tls", rid); + } + + function opListenTls(args) { + return core.opSync("op_listen_tls", args); + } + + function opStartTls(args) { + return core.opAsync("op_start_tls", args); + } + + async function connectTls({ + port, + hostname = "127.0.0.1", + transport = "tcp", + certFile = undefined, + certChain = undefined, + privateKey = undefined, + }) { + const res = await opConnectTls({ + port, + hostname, + transport, + certFile, + certChain, + privateKey, + }); + return new Conn(res.rid, res.remoteAddr, res.localAddr); + } + + class TLSListener extends Listener { + async accept() { + const res = await opAcceptTLS(this.rid); + return new Conn(res.rid, res.remoteAddr, res.localAddr); + } + } + + function listenTls({ + port, + certFile, + keyFile, + hostname = "0.0.0.0", + transport = "tcp", + alpnProtocols, + }) { + const res = opListenTls({ + port, + certFile, + keyFile, + hostname, + transport, + alpnProtocols, + }); + return new TLSListener(res.rid, res.localAddr); + } + + async function startTls( + conn, + { hostname = "127.0.0.1", certFile } = {}, + ) { + const res = await opStartTls({ + rid: conn.rid, + hostname, + certFile, + }); + return new Conn(res.rid, res.remoteAddr, res.localAddr); + } + + window.__bootstrap.tls = { + startTls, + listenTls, + connectTls, + TLSListener, + }; +})(this); diff --git a/ext/net/04_net_unstable.js b/ext/net/04_net_unstable.js new file mode 100644 index 000000000..ca265bfaa --- /dev/null +++ b/ext/net/04_net_unstable.js @@ -0,0 +1,49 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +((window) => { + const net = window.__bootstrap.net; + + function listen(options) { + if (options.transport === "unix") { + const res = net.opListen(options); + return new net.Listener(res.rid, res.localAddr); + } else { + return net.listen(options); + } + } + + function listenDatagram( + options, + ) { + let res; + if (options.transport === "unixpacket") { + res = net.opListen(options); + } else { + res = net.opListen({ + transport: "udp", + hostname: "127.0.0.1", + ...options, + }); + } + + return new net.Datagram(res.rid, res.localAddr); + } + + async function connect( + options, + ) { + if (options.transport === "unix") { + const res = await net.opConnect(options); + return new net.Conn(res.rid, res.remoteAddr, res.localAddr); + } else { + return net.connect(options); + } + } + + window.__bootstrap.netUnstable = { + connect, + listenDatagram, + listen, + }; +})(this); diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml new file mode 100644 index 000000000..09daf0e48 --- /dev/null +++ b/ext/net/Cargo.toml @@ -0,0 +1,25 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_net" +version = "0.5.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "Networking for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +deno_core = { version = "0.96.0", path = "../../core" } +deno_tls = { version = "0.1.0", path = "../tls" } + +lazy_static = "1.4.0" +log = "0.4.14" +serde = { version = "1.0.126", features = ["derive"] } +tokio = { version = "1.8.1", features = ["full"] } +trust-dns-proto = "0.20.3" +trust-dns-resolver = { version = "0.20.3", features = ["tokio-runtime", "serde-config"] } diff --git a/ext/net/README.md b/ext/net/README.md new file mode 100644 index 000000000..cdd8923e1 --- /dev/null +++ b/ext/net/README.md @@ -0,0 +1,30 @@ +# deno_net + +This crate implements networking APIs. + +This crate depends on following extensions: + +- "deno_web" +- "deno_fetch" + +Following ops are provided: + +- "op_net_read_async" +- "op_net_write_async" +- "op_net_shutdown" +- "op_accept" +- "op_connect" +- "op_listen" +- "op_datagram_receive" +- "op_datagram_send" +- "op_dns_resolve" +- "op_start_tls" +- "op_connect_tls" +- "op_listen_tls" +- "op_accept_tls" +- "op_http_start" +- "op_http_request_next" +- "op_http_request_read" +- "op_http_response" +- "op_http_response_write" +- "op_http_response_close" diff --git a/ext/net/io.rs b/ext/net/io.rs new file mode 100644 index 000000000..fc10d7e99 --- /dev/null +++ b/ext/net/io.rs @@ -0,0 +1,232 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use crate::ops_tls as tls; +use deno_core::error::null_opbuf; +use deno_core::error::AnyError; +use deno_core::error::{bad_resource_id, not_supported}; +use deno_core::op_async; +use deno_core::AsyncMutFuture; +use deno_core::AsyncRefCell; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::OpPair; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_core::ZeroCopyBuf; +use std::borrow::Cow; +use std::cell::RefCell; +use std::rc::Rc; +use tokio::io::AsyncRead; +use tokio::io::AsyncReadExt; +use tokio::io::AsyncWrite; +use tokio::io::AsyncWriteExt; +use tokio::net::tcp; + +#[cfg(unix)] +use tokio::net::unix; + +pub fn init() -> Vec { + vec![ + ("op_net_read_async", op_async(op_read_async)), + ("op_net_write_async", op_async(op_write_async)), + ("op_net_shutdown", op_async(op_shutdown)), + ] +} + +/// A full duplex resource has a read and write ends that are completely +/// independent, like TCP/Unix sockets and TLS streams. +#[derive(Debug)] +pub struct FullDuplexResource { + rd: AsyncRefCell, + wr: AsyncRefCell, + // When a full-duplex resource is closed, all pending 'read' ops are + // canceled, while 'write' ops are allowed to complete. Therefore only + // 'read' futures should be attached to this cancel handle. + cancel_handle: CancelHandle, +} + +impl FullDuplexResource +where + R: AsyncRead + Unpin + 'static, + W: AsyncWrite + Unpin + 'static, +{ + pub fn new((rd, wr): (R, W)) -> Self { + Self { + rd: rd.into(), + wr: wr.into(), + cancel_handle: Default::default(), + } + } + + pub fn into_inner(self) -> (R, W) { + (self.rd.into_inner(), self.wr.into_inner()) + } + + pub fn rd_borrow_mut(self: &Rc) -> AsyncMutFuture { + RcRef::map(self, |r| &r.rd).borrow_mut() + } + + pub fn wr_borrow_mut(self: &Rc) -> AsyncMutFuture { + RcRef::map(self, |r| &r.wr).borrow_mut() + } + + pub fn cancel_handle(self: &Rc) -> RcRef { + RcRef::map(self, |r| &r.cancel_handle) + } + + pub fn cancel_read_ops(&self) { + self.cancel_handle.cancel() + } + + pub async fn read( + self: &Rc, + buf: &mut [u8], + ) -> Result { + let mut rd = self.rd_borrow_mut().await; + let nread = rd.read(buf).try_or_cancel(self.cancel_handle()).await?; + Ok(nread) + } + + pub async fn write(self: &Rc, buf: &[u8]) -> Result { + let mut wr = self.wr_borrow_mut().await; + let nwritten = wr.write(buf).await?; + Ok(nwritten) + } + + pub async fn shutdown(self: &Rc) -> Result<(), AnyError> { + let mut wr = self.wr_borrow_mut().await; + wr.shutdown().await?; + Ok(()) + } +} + +pub type TcpStreamResource = + FullDuplexResource; + +impl Resource for TcpStreamResource { + fn name(&self) -> Cow { + "tcpStream".into() + } + + fn close(self: Rc) { + self.cancel_read_ops(); + } +} + +pub type TlsStreamResource = FullDuplexResource; + +impl Resource for TlsStreamResource { + fn name(&self) -> Cow { + "tlsStream".into() + } + + fn close(self: Rc) { + self.cancel_read_ops(); + } +} + +#[cfg(unix)] +pub type UnixStreamResource = + FullDuplexResource; + +#[cfg(not(unix))] +pub struct UnixStreamResource; + +#[cfg(not(unix))] +impl UnixStreamResource { + pub async fn read( + self: &Rc, + _buf: &mut [u8], + ) -> Result { + unreachable!() + } + pub async fn write(self: &Rc, _buf: &[u8]) -> Result { + unreachable!() + } + pub async fn shutdown(self: &Rc) -> Result<(), AnyError> { + unreachable!() + } + pub fn cancel_read_ops(&self) { + unreachable!() + } +} + +impl Resource for UnixStreamResource { + fn name(&self) -> Cow { + "unixStream".into() + } + + fn close(self: Rc) { + self.cancel_read_ops(); + } +} + +async fn op_read_async( + state: Rc>, + rid: ResourceId, + buf: Option, +) -> Result { + let buf = &mut buf.ok_or_else(null_opbuf)?; + let resource = state + .borrow() + .resource_table + .get_any(rid) + .ok_or_else(bad_resource_id)?; + let nread = if let Some(s) = resource.downcast_rc::() { + s.read(buf).await? + } else if let Some(s) = resource.downcast_rc::() { + s.read(buf).await? + } else if let Some(s) = resource.downcast_rc::() { + s.read(buf).await? + } else { + return Err(not_supported()); + }; + Ok(nread as u32) +} + +async fn op_write_async( + state: Rc>, + rid: ResourceId, + buf: Option, +) -> Result { + let buf = &buf.ok_or_else(null_opbuf)?; + let resource = state + .borrow() + .resource_table + .get_any(rid) + .ok_or_else(bad_resource_id)?; + let nwritten = if let Some(s) = resource.downcast_rc::() { + s.write(buf).await? + } else if let Some(s) = resource.downcast_rc::() { + s.write(buf).await? + } else if let Some(s) = resource.downcast_rc::() { + s.write(buf).await? + } else { + return Err(not_supported()); + }; + Ok(nwritten as u32) +} + +async fn op_shutdown( + state: Rc>, + rid: ResourceId, + _: (), +) -> Result<(), AnyError> { + let resource = state + .borrow() + .resource_table + .get_any(rid) + .ok_or_else(bad_resource_id)?; + if let Some(s) = resource.downcast_rc::() { + s.shutdown().await?; + } else if let Some(s) = resource.downcast_rc::() { + s.shutdown().await?; + } else if let Some(s) = resource.downcast_rc::() { + s.shutdown().await?; + } else { + return Err(not_supported()); + } + Ok(()) +} diff --git a/ext/net/lib.deno_net.d.ts b/ext/net/lib.deno_net.d.ts new file mode 100644 index 000000000..d35e01e31 --- /dev/null +++ b/ext/net/lib.deno_net.d.ts @@ -0,0 +1,150 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// +/// + +declare namespace Deno { + export interface NetAddr { + transport: "tcp" | "udp"; + hostname: string; + port: number; + } + + export interface UnixAddr { + transport: "unix" | "unixpacket"; + path: string; + } + + export type Addr = NetAddr | UnixAddr; + + /** A generic network listener for stream-oriented protocols. */ + export interface Listener extends AsyncIterable { + /** Waits for and resolves to the next connection to the `Listener`. */ + accept(): Promise; + /** Close closes the listener. Any pending accept promises will be rejected + * with errors. */ + close(): void; + /** Return the address of the `Listener`. */ + readonly addr: Addr; + + /** Return the rid of the `Listener`. */ + readonly rid: number; + + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + export interface Conn extends Reader, Writer, Closer { + /** The local address of the connection. */ + readonly localAddr: Addr; + /** The remote address of the connection. */ + readonly remoteAddr: Addr; + /** The resource ID of the connection. */ + readonly rid: number; + /** Shuts down (`shutdown(2)`) the write side of the connection. Most + * callers should just use `close()`. */ + closeWrite(): Promise; + } + + export interface ListenOptions { + /** The port to listen on. */ + port: number; + /** A literal IP address or host name that can be resolved to an IP address. + * If not specified, defaults to `0.0.0.0`. */ + hostname?: string; + } + + /** Listen announces on the local transport address. + * + * ```ts + * const listener1 = Deno.listen({ port: 80 }) + * const listener2 = Deno.listen({ hostname: "192.0.2.1", port: 80 }) + * const listener3 = Deno.listen({ hostname: "[2001:db8::1]", port: 80 }); + * const listener4 = Deno.listen({ hostname: "golang.org", port: 80, transport: "tcp" }); + * ``` + * + * Requires `allow-net` permission. */ + export function listen( + options: ListenOptions & { transport?: "tcp" }, + ): Listener; + + export interface ListenTlsOptions extends ListenOptions { + /** Path to a file containing a PEM formatted CA certificate. Requires + * `--allow-read`. */ + certFile: string; + /** Server public key file. Requires `--allow-read`.*/ + keyFile: string; + + transport?: "tcp"; + } + + /** Listen announces on the local transport address over TLS (transport layer + * security). + * + * ```ts + * const lstnr = Deno.listenTls({ port: 443, certFile: "./server.crt", keyFile: "./server.key" }); + * ``` + * + * Requires `allow-net` permission. */ + export function listenTls(options: ListenTlsOptions): Listener; + + export interface ConnectOptions { + /** The port to connect to. */ + port: number; + /** A literal IP address or host name that can be resolved to an IP address. + * If not specified, defaults to `127.0.0.1`. */ + hostname?: string; + transport?: "tcp"; + } + + /** + * Connects to the hostname (default is "127.0.0.1") and port on the named + * transport (default is "tcp"), and resolves to the connection (`Conn`). + * + * ```ts + * const conn1 = await Deno.connect({ port: 80 }); + * const conn2 = await Deno.connect({ hostname: "192.0.2.1", port: 80 }); + * const conn3 = await Deno.connect({ hostname: "[2001:db8::1]", port: 80 }); + * const conn4 = await Deno.connect({ hostname: "golang.org", port: 80, transport: "tcp" }); + * ``` + * + * Requires `allow-net` permission for "tcp". */ + export function connect(options: ConnectOptions): Promise; + + export interface ConnectTlsOptions { + /** The port to connect to. */ + port: number; + /** A literal IP address or host name that can be resolved to an IP address. + * If not specified, defaults to `127.0.0.1`. */ + hostname?: string; + /** Server certificate file. */ + certFile?: string; + } + + /** Establishes a secure connection over TLS (transport layer security) using + * an optional cert file, hostname (default is "127.0.0.1") and port. The + * cert file is optional and if not included Mozilla's root certificates will + * be used (see also https://github.com/ctz/webpki-roots for specifics) + * + * ```ts + * const conn1 = await Deno.connectTls({ port: 80 }); + * const conn2 = await Deno.connectTls({ certFile: "./certs/my_custom_root_CA.pem", hostname: "192.0.2.1", port: 80 }); + * const conn3 = await Deno.connectTls({ hostname: "[2001:db8::1]", port: 80 }); + * const conn4 = await Deno.connectTls({ certFile: "./certs/my_custom_root_CA.pem", hostname: "golang.org", port: 80}); + * ``` + * + * Requires `allow-net` permission. + */ + export function connectTls(options: ConnectTlsOptions): Promise; + + /** Shutdown socket send operations. + * + * Matches behavior of POSIX shutdown(3). + * + * ```ts + * const listener = Deno.listen({ port: 80 }); + * const conn = await listener.accept(); + * Deno.shutdown(conn.rid); + * ``` + */ + export function shutdown(rid: number): Promise; +} diff --git a/ext/net/lib.deno_net.unstable.d.ts b/ext/net/lib.deno_net.unstable.d.ts new file mode 100644 index 000000000..145f232c0 --- /dev/null +++ b/ext/net/lib.deno_net.unstable.d.ts @@ -0,0 +1,258 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// +/// + +declare namespace Deno { + /** The type of the resource record. + * Only the listed types are supported currently. */ + export type RecordType = + | "A" + | "AAAA" + | "ANAME" + | "CNAME" + | "MX" + | "PTR" + | "SRV" + | "TXT"; + + export interface ResolveDnsOptions { + /** The name server to be used for lookups. + * If not specified, defaults to the system configuration e.g. `/etc/resolv.conf` on Unix. */ + nameServer?: { + /** The IP address of the name server */ + ipAddr: string; + /** The port number the query will be sent to. + * If not specified, defaults to 53. */ + port?: number; + }; + } + + /** If `resolveDns` is called with "MX" record type specified, it will return an array of this interface. */ + export interface MXRecord { + preference: number; + exchange: string; + } + + /** If `resolveDns` is called with "SRV" record type specified, it will return an array of this interface. */ + export interface SRVRecord { + priority: number; + weight: number; + port: number; + target: string; + } + + export function resolveDns( + query: string, + recordType: "A" | "AAAA" | "ANAME" | "CNAME" | "PTR", + options?: ResolveDnsOptions, + ): Promise; + + export function resolveDns( + query: string, + recordType: "MX", + options?: ResolveDnsOptions, + ): Promise; + + export function resolveDns( + query: string, + recordType: "SRV", + options?: ResolveDnsOptions, + ): Promise; + + export function resolveDns( + query: string, + recordType: "TXT", + options?: ResolveDnsOptions, + ): Promise; + + /** ** UNSTABLE**: new API, yet to be vetted. +* +* Performs DNS resolution against the given query, returning resolved records. +* Fails in the cases such as: +* - the query is in invalid format +* - the options have an invalid parameter, e.g. `nameServer.port` is beyond the range of 16-bit unsigned integer +* - timed out +* +* ```ts +* const a = await Deno.resolveDns("example.com", "A"); +* +* const aaaa = await Deno.resolveDns("example.com", "AAAA", { +* nameServer: { ipAddr: "8.8.8.8", port: 1234 }, +* }); +* ``` +* +* Requires `allow-net` permission. + */ + export function resolveDns( + query: string, + recordType: RecordType, + options?: ResolveDnsOptions, + ): Promise; + + /** **UNSTABLE**: new API, yet to be vetted. +* +* A generic transport listener for message-oriented protocols. */ + export interface DatagramConn extends AsyncIterable<[Uint8Array, Addr]> { + /** **UNSTABLE**: new API, yet to be vetted. + * + * Waits for and resolves to the next message to the `UDPConn`. */ + receive(p?: Uint8Array): Promise<[Uint8Array, Addr]>; + /** UNSTABLE: new API, yet to be vetted. + * + * Sends a message to the target. */ + send(p: Uint8Array, addr: Addr): Promise; + /** UNSTABLE: new API, yet to be vetted. + * + * Close closes the socket. Any pending message promises will be rejected + * with errors. */ + close(): void; + /** Return the address of the `UDPConn`. */ + readonly addr: Addr; + [Symbol.asyncIterator](): AsyncIterableIterator<[Uint8Array, Addr]>; + } + + export interface UnixListenOptions { + /** A Path to the Unix Socket. */ + path: string; + } + + /** **UNSTABLE**: new API, yet to be vetted. +* +* Listen announces on the local transport address. +* +* ```ts +* const listener = Deno.listen({ path: "/foo/bar.sock", transport: "unix" }) +* ``` +* +* Requires `allow-read` and `allow-write` permission. */ + export function listen( + options: UnixListenOptions & { transport: "unix" }, + ): Listener; + + /** **UNSTABLE**: new API, yet to be vetted +* +* Listen announces on the local transport address. +* +* ```ts +* const listener1 = Deno.listenDatagram({ +* port: 80, +* transport: "udp" +* }); +* const listener2 = Deno.listenDatagram({ +* hostname: "golang.org", +* port: 80, +* transport: "udp" +* }); +* ``` +* +* Requires `allow-net` permission. */ + export function listenDatagram( + options: ListenOptions & { transport: "udp" }, + ): DatagramConn; + + /** **UNSTABLE**: new API, yet to be vetted +* +* Listen announces on the local transport address. +* +* ```ts +* const listener = Deno.listenDatagram({ +* path: "/foo/bar.sock", +* transport: "unixpacket" +* }); +* ``` +* +* Requires `allow-read` and `allow-write` permission. */ + export function listenDatagram( + options: UnixListenOptions & { transport: "unixpacket" }, + ): DatagramConn; + + export interface UnixConnectOptions { + transport: "unix"; + path: string; + } + + /** **UNSTABLE**: The unix socket transport is unstable as a new API yet to +* be vetted. The TCP transport is considered stable. +* +* Connects to the hostname (default is "127.0.0.1") and port on the named +* transport (default is "tcp"), and resolves to the connection (`Conn`). +* +* ```ts +* const conn1 = await Deno.connect({ port: 80 }); +* const conn2 = await Deno.connect({ hostname: "192.0.2.1", port: 80 }); +* const conn3 = await Deno.connect({ hostname: "[2001:db8::1]", port: 80 }); +* const conn4 = await Deno.connect({ hostname: "golang.org", port: 80, transport: "tcp" }); +* const conn5 = await Deno.connect({ path: "/foo/bar.sock", transport: "unix" }); +* ``` +* +* Requires `allow-net` permission for "tcp" and `allow-read` for "unix". */ + export function connect( + options: ConnectOptions | UnixConnectOptions, + ): Promise; + + export interface ConnectTlsClientCertOptions { + /** PEM formatted client certificate chain. */ + certChain: string; + /** PEM formatted (RSA or PKCS8) private key of client certificate. */ + privateKey: string; + } + + /** **UNSTABLE** New API, yet to be vetted. + * + * Create a TLS connection with an attached client certificate. + * + * ```ts + * const conn = await Deno.connectTls({ + * hostname: "deno.land", + * port: 443, + * certChain: "---- BEGIN CERTIFICATE ----\n ...", + * privateKey: "---- BEGIN PRIVATE KEY ----\n ...", + * }); + * ``` + * + * Requires `allow-net` permission. + */ + export function connectTls( + options: ConnectTlsOptions & ConnectTlsClientCertOptions, + ): Promise; + + export interface StartTlsOptions { + /** A literal IP address or host name that can be resolved to an IP address. + * If not specified, defaults to `127.0.0.1`. */ + hostname?: string; + /** Server certificate file. */ + certFile?: string; + } + + /** **UNSTABLE**: new API, yet to be vetted. +* +* Start TLS handshake from an existing connection using +* an optional cert file, hostname (default is "127.0.0.1"). The +* cert file is optional and if not included Mozilla's root certificates will +* be used (see also https://github.com/ctz/webpki-roots for specifics) +* Using this function requires that the other end of the connection is +* prepared for TLS handshake. +* +* ```ts +* const conn = await Deno.connect({ port: 80, hostname: "127.0.0.1" }); +* const tlsConn = await Deno.startTls(conn, { certFile: "./certs/my_custom_root_CA.pem", hostname: "localhost" }); +* ``` +* +* Requires `allow-net` permission. + */ + export function startTls( + conn: Conn, + options?: StartTlsOptions, + ): Promise; + + export interface ListenTlsOptions { + /** **UNSTABLE**: new API, yet to be vetted. + * + * Application-Layer Protocol Negotiation (ALPN) protocols to announce to + * the client. If not specified, no ALPN extension will be included in the + * TLS handshake. + */ + alpnProtocols?: string[]; + } +} diff --git a/ext/net/lib.rs b/ext/net/lib.rs new file mode 100644 index 000000000..3764433e3 --- /dev/null +++ b/ext/net/lib.rs @@ -0,0 +1,131 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +pub mod io; +pub mod ops; +pub mod ops_tls; +#[cfg(unix)] +pub mod ops_unix; +pub mod resolve_addr; + +use deno_core::error::AnyError; +use deno_core::include_js_files; +use deno_core::Extension; +use deno_core::OpState; +use deno_tls::rustls::RootCertStore; +use std::cell::RefCell; +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; + +pub trait NetPermissions { + fn check_net>( + &mut self, + _host: &(T, Option), + ) -> Result<(), AnyError>; + fn check_read(&mut self, _p: &Path) -> Result<(), AnyError>; + fn check_write(&mut self, _p: &Path) -> Result<(), AnyError>; +} + +/// For use with this crate when the user does not want permission checks. +pub struct NoNetPermissions; + +impl NetPermissions for NoNetPermissions { + fn check_net>( + &mut self, + _host: &(T, Option), + ) -> Result<(), AnyError> { + Ok(()) + } + + fn check_read(&mut self, _p: &Path) -> Result<(), AnyError> { + Ok(()) + } + + fn check_write(&mut self, _p: &Path) -> Result<(), AnyError> { + Ok(()) + } +} + +/// `UnstableChecker` is a struct so it can be placed inside `GothamState`; +/// using type alias for a bool could work, but there's a high chance +/// that there might be another type alias pointing to a bool, which +/// would override previously used alias. +pub struct UnstableChecker { + pub unstable: bool, +} + +impl UnstableChecker { + /// Quits the process if the --unstable flag was not provided. + /// + /// This is intentionally a non-recoverable check so that people cannot probe + /// for unstable APIs from stable programs. + // NOTE(bartlomieju): keep in sync with `cli/program_state.rs` + pub fn check_unstable(&self, api_name: &str) { + if !self.unstable { + eprintln!( + "Unstable API '{}'. The --unstable flag must be provided.", + api_name + ); + std::process::exit(70); + } + } +} +/// Helper for checking unstable features. Used for sync ops. +pub fn check_unstable(state: &OpState, api_name: &str) { + state.borrow::().check_unstable(api_name) +} + +/// Helper for checking unstable features. Used for async ops. +pub fn check_unstable2(state: &Rc>, api_name: &str) { + let state = state.borrow(); + state.borrow::().check_unstable(api_name) +} + +pub fn get_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_net.d.ts") +} + +pub fn get_unstable_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_net.unstable.d.ts") +} + +#[derive(Clone)] +pub struct DefaultTlsOptions { + pub root_cert_store: Option, +} + +/// `UnsafelyIgnoreCertificateErrors` is a wrapper struct so it can be placed inside `GothamState`; +/// using type alias for a `Option>` could work, but there's a high chance +/// that there might be another type alias pointing to a `Option>`, which +/// would override previously used alias. +pub struct UnsafelyIgnoreCertificateErrors(Option>); + +pub fn init( + root_cert_store: Option, + unstable: bool, + unsafely_ignore_certificate_errors: Option>, +) -> Extension { + let mut ops_to_register = vec![]; + ops_to_register.extend(io::init()); + ops_to_register.extend(ops::init::

()); + ops_to_register.extend(ops_tls::init::

()); + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/net", + "01_net.js", + "02_tls.js", + "04_net_unstable.js", + )) + .ops(ops_to_register) + .state(move |state| { + state.put(DefaultTlsOptions { + root_cert_store: root_cert_store.clone(), + }); + state.put(UnstableChecker { unstable }); + state.put(UnsafelyIgnoreCertificateErrors( + unsafely_ignore_certificate_errors.clone(), + )); + Ok(()) + }) + .build() +} diff --git a/ext/net/ops.rs b/ext/net/ops.rs new file mode 100644 index 000000000..a0fc2179e --- /dev/null +++ b/ext/net/ops.rs @@ -0,0 +1,795 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use crate::io::TcpStreamResource; +use crate::resolve_addr::resolve_addr; +use crate::resolve_addr::resolve_addr_sync; +use crate::NetPermissions; +use deno_core::error::bad_resource; +use deno_core::error::custom_error; +use deno_core::error::generic_error; +use deno_core::error::null_opbuf; +use deno_core::error::type_error; +use deno_core::error::AnyError; +use deno_core::op_async; +use deno_core::op_sync; +use deno_core::AsyncRefCell; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::OpPair; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_core::ZeroCopyBuf; +use log::debug; +use serde::Deserialize; +use serde::Serialize; +use std::borrow::Cow; +use std::cell::RefCell; +use std::net::SocketAddr; +use std::rc::Rc; +use tokio::net::TcpListener; +use tokio::net::TcpStream; +use tokio::net::UdpSocket; +use trust_dns_proto::rr::record_data::RData; +use trust_dns_proto::rr::record_type::RecordType; +use trust_dns_resolver::config::NameServerConfigGroup; +use trust_dns_resolver::config::ResolverConfig; +use trust_dns_resolver::config::ResolverOpts; +use trust_dns_resolver::system_conf; +use trust_dns_resolver::AsyncResolver; + +#[cfg(unix)] +use super::ops_unix as net_unix; +#[cfg(unix)] +use crate::io::UnixStreamResource; +#[cfg(unix)] +use std::path::Path; + +pub fn init() -> Vec { + vec![ + ("op_accept", op_async(op_accept)), + ("op_connect", op_async(op_connect::

)), + ("op_listen", op_sync(op_listen::

)), + ("op_datagram_receive", op_async(op_datagram_receive)), + ("op_datagram_send", op_async(op_datagram_send::

)), + ("op_dns_resolve", op_async(op_dns_resolve::

)), + ] +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct OpConn { + pub rid: ResourceId, + pub remote_addr: Option, + pub local_addr: Option, +} + +#[derive(Serialize)] +#[serde(tag = "transport", rename_all = "lowercase")] +pub enum OpAddr { + Tcp(IpAddr), + Udp(IpAddr), + #[cfg(unix)] + Unix(net_unix::UnixAddr), + #[cfg(unix)] + UnixPacket(net_unix::UnixAddr), +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +/// A received datagram packet (from udp or unixpacket) +pub struct OpPacket { + pub size: usize, + pub remote_addr: OpAddr, +} + +#[derive(Serialize)] +pub struct IpAddr { + pub hostname: String, + pub port: u16, +} + +#[derive(Deserialize)] +pub(crate) struct AcceptArgs { + pub rid: ResourceId, + pub transport: String, +} + +async fn accept_tcp( + state: Rc>, + args: AcceptArgs, + _: (), +) -> Result { + let rid = args.rid; + + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Listener has been closed"))?; + let listener = RcRef::map(&resource, |r| &r.listener) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Another accept task is ongoing"))?; + let cancel = RcRef::map(resource, |r| &r.cancel); + let (tcp_stream, _socket_addr) = + listener.accept().try_or_cancel(cancel).await.map_err(|e| { + // FIXME(bartlomieju): compatibility with current JS implementation + if let std::io::ErrorKind::Interrupted = e.kind() { + bad_resource("Listener has been closed") + } else { + e.into() + } + })?; + let local_addr = tcp_stream.local_addr()?; + let remote_addr = tcp_stream.peer_addr()?; + + let mut state = state.borrow_mut(); + let rid = state + .resource_table + .add(TcpStreamResource::new(tcp_stream.into_split())); + Ok(OpConn { + rid, + local_addr: Some(OpAddr::Tcp(IpAddr { + hostname: local_addr.ip().to_string(), + port: local_addr.port(), + })), + remote_addr: Some(OpAddr::Tcp(IpAddr { + hostname: remote_addr.ip().to_string(), + port: remote_addr.port(), + })), + }) +} + +async fn op_accept( + state: Rc>, + args: AcceptArgs, + _: (), +) -> Result { + match args.transport.as_str() { + "tcp" => accept_tcp(state, args, ()).await, + #[cfg(unix)] + "unix" => net_unix::accept_unix(state, args, ()).await, + other => Err(bad_transport(other)), + } +} + +fn bad_transport(transport: &str) -> AnyError { + generic_error(format!("Unsupported transport protocol {}", transport)) +} + +#[derive(Deserialize)] +pub(crate) struct ReceiveArgs { + pub rid: ResourceId, + pub transport: String, +} + +async fn receive_udp( + state: Rc>, + args: ReceiveArgs, + zero_copy: Option, +) -> Result { + let zero_copy = zero_copy.ok_or_else(null_opbuf)?; + let mut zero_copy = zero_copy.clone(); + + let rid = args.rid; + + let resource = state + .borrow_mut() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Socket has been closed"))?; + let socket = RcRef::map(&resource, |r| &r.socket).borrow().await; + let cancel_handle = RcRef::map(&resource, |r| &r.cancel); + let (size, remote_addr) = socket + .recv_from(&mut zero_copy) + .try_or_cancel(cancel_handle) + .await?; + Ok(OpPacket { + size, + remote_addr: OpAddr::Udp(IpAddr { + hostname: remote_addr.ip().to_string(), + port: remote_addr.port(), + }), + }) +} + +async fn op_datagram_receive( + state: Rc>, + args: ReceiveArgs, + zero_copy: Option, +) -> Result { + match args.transport.as_str() { + "udp" => receive_udp(state, args, zero_copy).await, + #[cfg(unix)] + "unixpacket" => net_unix::receive_unix_packet(state, args, zero_copy).await, + other => Err(bad_transport(other)), + } +} + +#[derive(Deserialize)] +struct SendArgs { + rid: ResourceId, + transport: String, + #[serde(flatten)] + transport_args: ArgsEnum, +} + +async fn op_datagram_send( + state: Rc>, + args: SendArgs, + zero_copy: Option, +) -> Result +where + NP: NetPermissions + 'static, +{ + let zero_copy = zero_copy.ok_or_else(null_opbuf)?; + let zero_copy = zero_copy.clone(); + + match args { + SendArgs { + rid, + transport, + transport_args: ArgsEnum::Ip(args), + } if transport == "udp" => { + { + let mut s = state.borrow_mut(); + s.borrow_mut::() + .check_net(&(&args.hostname, Some(args.port)))?; + } + let addr = resolve_addr(&args.hostname, args.port) + .await? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + + let resource = state + .borrow_mut() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Socket has been closed"))?; + let socket = RcRef::map(&resource, |r| &r.socket).borrow().await; + let byte_length = socket.send_to(&zero_copy, &addr).await?; + Ok(byte_length) + } + #[cfg(unix)] + SendArgs { + rid, + transport, + transport_args: ArgsEnum::Unix(args), + } if transport == "unixpacket" => { + let address_path = Path::new(&args.path); + { + let mut s = state.borrow_mut(); + s.borrow_mut::().check_write(address_path)?; + } + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| { + custom_error("NotConnected", "Socket has been closed") + })?; + let socket = RcRef::map(&resource, |r| &r.socket) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Socket already in use"))?; + let byte_length = socket.send_to(&zero_copy, address_path).await?; + Ok(byte_length) + } + _ => Err(type_error("Wrong argument format!")), + } +} + +#[derive(Deserialize)] +struct ConnectArgs { + transport: String, + #[serde(flatten)] + transport_args: ArgsEnum, +} + +async fn op_connect( + state: Rc>, + args: ConnectArgs, + _: (), +) -> Result +where + NP: NetPermissions + 'static, +{ + match args { + ConnectArgs { + transport, + transport_args: ArgsEnum::Ip(args), + } if transport == "tcp" => { + { + let mut state_ = state.borrow_mut(); + state_ + .borrow_mut::() + .check_net(&(&args.hostname, Some(args.port)))?; + } + let addr = resolve_addr(&args.hostname, args.port) + .await? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + let tcp_stream = TcpStream::connect(&addr).await?; + let local_addr = tcp_stream.local_addr()?; + let remote_addr = tcp_stream.peer_addr()?; + + let mut state_ = state.borrow_mut(); + let rid = state_ + .resource_table + .add(TcpStreamResource::new(tcp_stream.into_split())); + Ok(OpConn { + rid, + local_addr: Some(OpAddr::Tcp(IpAddr { + hostname: local_addr.ip().to_string(), + port: local_addr.port(), + })), + remote_addr: Some(OpAddr::Tcp(IpAddr { + hostname: remote_addr.ip().to_string(), + port: remote_addr.port(), + })), + }) + } + #[cfg(unix)] + ConnectArgs { + transport, + transport_args: ArgsEnum::Unix(args), + } if transport == "unix" => { + let address_path = Path::new(&args.path); + super::check_unstable2(&state, "Deno.connect"); + { + let mut state_ = state.borrow_mut(); + state_.borrow_mut::().check_read(address_path)?; + state_.borrow_mut::().check_write(address_path)?; + } + let path = args.path; + let unix_stream = net_unix::UnixStream::connect(Path::new(&path)).await?; + let local_addr = unix_stream.local_addr()?; + let remote_addr = unix_stream.peer_addr()?; + + let mut state_ = state.borrow_mut(); + let resource = UnixStreamResource::new(unix_stream.into_split()); + let rid = state_.resource_table.add(resource); + Ok(OpConn { + rid, + local_addr: Some(OpAddr::Unix(net_unix::UnixAddr { + path: local_addr.as_pathname().and_then(net_unix::pathstring), + })), + remote_addr: Some(OpAddr::Unix(net_unix::UnixAddr { + path: remote_addr.as_pathname().and_then(net_unix::pathstring), + })), + }) + } + _ => Err(type_error("Wrong argument format!")), + } +} + +pub struct TcpListenerResource { + pub listener: AsyncRefCell, + pub cancel: CancelHandle, +} + +impl Resource for TcpListenerResource { + fn name(&self) -> Cow { + "tcpListener".into() + } + + fn close(self: Rc) { + self.cancel.cancel(); + } +} + +struct UdpSocketResource { + socket: AsyncRefCell, + cancel: CancelHandle, +} + +impl Resource for UdpSocketResource { + fn name(&self) -> Cow { + "udpSocket".into() + } + + fn close(self: Rc) { + self.cancel.cancel() + } +} + +#[derive(Deserialize)] +struct IpListenArgs { + hostname: String, + port: u16, +} + +#[derive(Deserialize)] +#[serde(untagged)] +enum ArgsEnum { + Ip(IpListenArgs), + #[cfg(unix)] + Unix(net_unix::UnixListenArgs), +} + +#[derive(Deserialize)] +struct ListenArgs { + transport: String, + #[serde(flatten)] + transport_args: ArgsEnum, +} + +fn listen_tcp( + state: &mut OpState, + addr: SocketAddr, +) -> Result<(u32, SocketAddr), AnyError> { + let std_listener = std::net::TcpListener::bind(&addr)?; + std_listener.set_nonblocking(true)?; + let listener = TcpListener::from_std(std_listener)?; + let local_addr = listener.local_addr()?; + let listener_resource = TcpListenerResource { + listener: AsyncRefCell::new(listener), + cancel: Default::default(), + }; + let rid = state.resource_table.add(listener_resource); + + Ok((rid, local_addr)) +} + +fn listen_udp( + state: &mut OpState, + addr: SocketAddr, +) -> Result<(u32, SocketAddr), AnyError> { + let std_socket = std::net::UdpSocket::bind(&addr)?; + std_socket.set_nonblocking(true)?; + let socket = UdpSocket::from_std(std_socket)?; + let local_addr = socket.local_addr()?; + let socket_resource = UdpSocketResource { + socket: AsyncRefCell::new(socket), + cancel: Default::default(), + }; + let rid = state.resource_table.add(socket_resource); + + Ok((rid, local_addr)) +} + +fn op_listen( + state: &mut OpState, + args: ListenArgs, + _: (), +) -> Result +where + NP: NetPermissions + 'static, +{ + match args { + ListenArgs { + transport, + transport_args: ArgsEnum::Ip(args), + } => { + { + if transport == "udp" { + super::check_unstable(state, "Deno.listenDatagram"); + } + state + .borrow_mut::() + .check_net(&(&args.hostname, Some(args.port)))?; + } + let addr = resolve_addr_sync(&args.hostname, args.port)? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + let (rid, local_addr) = if transport == "tcp" { + listen_tcp(state, addr)? + } else { + listen_udp(state, addr)? + }; + debug!( + "New listener {} {}:{}", + rid, + local_addr.ip().to_string(), + local_addr.port() + ); + let ip_addr = IpAddr { + hostname: local_addr.ip().to_string(), + port: local_addr.port(), + }; + Ok(OpConn { + rid, + local_addr: Some(match transport.as_str() { + "udp" => OpAddr::Udp(ip_addr), + "tcp" => OpAddr::Tcp(ip_addr), + // NOTE: This could be unreachable!() + other => return Err(bad_transport(other)), + }), + remote_addr: None, + }) + } + #[cfg(unix)] + ListenArgs { + transport, + transport_args: ArgsEnum::Unix(args), + } if transport == "unix" || transport == "unixpacket" => { + let address_path = Path::new(&args.path); + { + if transport == "unix" { + super::check_unstable(state, "Deno.listen"); + } + if transport == "unixpacket" { + super::check_unstable(state, "Deno.listenDatagram"); + } + let permissions = state.borrow_mut::(); + permissions.check_read(address_path)?; + permissions.check_write(address_path)?; + } + let (rid, local_addr) = if transport == "unix" { + net_unix::listen_unix(state, address_path)? + } else { + net_unix::listen_unix_packet(state, address_path)? + }; + debug!( + "New listener {} {}", + rid, + local_addr.as_pathname().unwrap().display(), + ); + let unix_addr = net_unix::UnixAddr { + path: local_addr.as_pathname().and_then(net_unix::pathstring), + }; + + Ok(OpConn { + rid, + local_addr: Some(match transport.as_str() { + "unix" => OpAddr::Unix(unix_addr), + "unixpacket" => OpAddr::UnixPacket(unix_addr), + other => return Err(bad_transport(other)), + }), + remote_addr: None, + }) + } + #[cfg(unix)] + _ => Err(type_error("Wrong argument format!")), + } +} + +#[derive(Serialize, PartialEq, Debug)] +#[serde(untagged)] +enum DnsReturnRecord { + A(String), + Aaaa(String), + Aname(String), + Cname(String), + Mx { + preference: u16, + exchange: String, + }, + Ptr(String), + Srv { + priority: u16, + weight: u16, + port: u16, + target: String, + }, + Txt(Vec), +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ResolveAddrArgs { + query: String, + record_type: RecordType, + options: Option, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ResolveDnsOption { + name_server: Option, +} + +fn default_port() -> u16 { + 53 +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NameServer { + ip_addr: String, + #[serde(default = "default_port")] + port: u16, +} + +async fn op_dns_resolve( + state: Rc>, + args: ResolveAddrArgs, + _: (), +) -> Result, AnyError> +where + NP: NetPermissions + 'static, +{ + let ResolveAddrArgs { + query, + record_type, + options, + } = args; + + let (config, opts) = if let Some(name_server) = + options.as_ref().and_then(|o| o.name_server.as_ref()) + { + let group = NameServerConfigGroup::from_ips_clear( + &[name_server.ip_addr.parse()?], + name_server.port, + true, + ); + ( + ResolverConfig::from_parts(None, vec![], group), + ResolverOpts::default(), + ) + } else { + system_conf::read_system_conf()? + }; + + { + let mut s = state.borrow_mut(); + let perm = s.borrow_mut::(); + + // Checks permission against the name servers which will be actually queried. + for ns in config.name_servers() { + let socker_addr = &ns.socket_addr; + let ip = socker_addr.ip().to_string(); + let port = socker_addr.port(); + perm.check_net(&(ip, Some(port)))?; + } + } + + let resolver = AsyncResolver::tokio(config, opts)?; + + let results = resolver + .lookup(query, record_type, Default::default()) + .await + .map_err(|e| generic_error(format!("{}", e)))? + .iter() + .filter_map(rdata_to_return_record(record_type)) + .collect(); + + Ok(results) +} + +fn rdata_to_return_record( + ty: RecordType, +) -> impl Fn(&RData) -> Option { + use RecordType::*; + move |r: &RData| -> Option { + match ty { + A => r.as_a().map(ToString::to_string).map(DnsReturnRecord::A), + AAAA => r + .as_aaaa() + .map(ToString::to_string) + .map(DnsReturnRecord::Aaaa), + ANAME => r + .as_aname() + .map(ToString::to_string) + .map(DnsReturnRecord::Aname), + CNAME => r + .as_cname() + .map(ToString::to_string) + .map(DnsReturnRecord::Cname), + MX => r.as_mx().map(|mx| DnsReturnRecord::Mx { + preference: mx.preference(), + exchange: mx.exchange().to_string(), + }), + PTR => r + .as_ptr() + .map(ToString::to_string) + .map(DnsReturnRecord::Ptr), + SRV => r.as_srv().map(|srv| DnsReturnRecord::Srv { + priority: srv.priority(), + weight: srv.weight(), + port: srv.port(), + target: srv.target().to_string(), + }), + TXT => r.as_txt().map(|txt| { + let texts: Vec = txt + .iter() + .map(|bytes| { + // Tries to parse these bytes as Latin-1 + bytes.iter().map(|&b| b as char).collect::() + }) + .collect(); + DnsReturnRecord::Txt(texts) + }), + // TODO(magurotuna): Other record types are not supported + _ => todo!(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::net::Ipv4Addr; + use std::net::Ipv6Addr; + use trust_dns_proto::rr::rdata::mx::MX; + use trust_dns_proto::rr::rdata::srv::SRV; + use trust_dns_proto::rr::rdata::txt::TXT; + use trust_dns_proto::rr::record_data::RData; + use trust_dns_proto::rr::Name; + + #[test] + fn rdata_to_return_record_a() { + let func = rdata_to_return_record(RecordType::A); + let rdata = RData::A(Ipv4Addr::new(127, 0, 0, 1)); + assert_eq!( + func(&rdata), + Some(DnsReturnRecord::A("127.0.0.1".to_string())) + ); + } + + #[test] + fn rdata_to_return_record_aaaa() { + let func = rdata_to_return_record(RecordType::AAAA); + let rdata = RData::AAAA(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)); + assert_eq!(func(&rdata), Some(DnsReturnRecord::Aaaa("::1".to_string()))); + } + + #[test] + fn rdata_to_return_record_aname() { + let func = rdata_to_return_record(RecordType::ANAME); + let rdata = RData::ANAME(Name::new()); + assert_eq!(func(&rdata), Some(DnsReturnRecord::Aname("".to_string()))); + } + + #[test] + fn rdata_to_return_record_cname() { + let func = rdata_to_return_record(RecordType::CNAME); + let rdata = RData::CNAME(Name::new()); + assert_eq!(func(&rdata), Some(DnsReturnRecord::Cname("".to_string()))); + } + + #[test] + fn rdata_to_return_record_mx() { + let func = rdata_to_return_record(RecordType::MX); + let rdata = RData::MX(MX::new(10, Name::new())); + assert_eq!( + func(&rdata), + Some(DnsReturnRecord::Mx { + preference: 10, + exchange: "".to_string() + }) + ); + } + + #[test] + fn rdata_to_return_record_ptr() { + let func = rdata_to_return_record(RecordType::PTR); + let rdata = RData::PTR(Name::new()); + assert_eq!(func(&rdata), Some(DnsReturnRecord::Ptr("".to_string()))); + } + + #[test] + fn rdata_to_return_record_srv() { + let func = rdata_to_return_record(RecordType::SRV); + let rdata = RData::SRV(SRV::new(1, 2, 3, Name::new())); + assert_eq!( + func(&rdata), + Some(DnsReturnRecord::Srv { + priority: 1, + weight: 2, + port: 3, + target: "".to_string() + }) + ); + } + + #[test] + fn rdata_to_return_record_txt() { + let func = rdata_to_return_record(RecordType::TXT); + let rdata = RData::TXT(TXT::from_bytes(vec![ + "foo".as_bytes(), + "bar".as_bytes(), + &[0xa3], // "£" in Latin-1 + &[0xe3, 0x81, 0x82], // "あ" in UTF-8 + ])); + assert_eq!( + func(&rdata), + Some(DnsReturnRecord::Txt(vec![ + "foo".to_string(), + "bar".to_string(), + "£".to_string(), + "ã\u{81}\u{82}".to_string(), + ])) + ); + } +} diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs new file mode 100644 index 000000000..14a135d7d --- /dev/null +++ b/ext/net/ops_tls.rs @@ -0,0 +1,1061 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use crate::io::TcpStreamResource; +use crate::io::TlsStreamResource; +use crate::ops::IpAddr; +use crate::ops::OpAddr; +use crate::ops::OpConn; +use crate::resolve_addr::resolve_addr; +use crate::resolve_addr::resolve_addr_sync; +use crate::DefaultTlsOptions; +use crate::NetPermissions; +use crate::UnsafelyIgnoreCertificateErrors; +use deno_core::error::bad_resource; +use deno_core::error::bad_resource_id; +use deno_core::error::custom_error; +use deno_core::error::generic_error; +use deno_core::error::invalid_hostname; +use deno_core::error::type_error; +use deno_core::error::AnyError; +use deno_core::futures::future::poll_fn; +use deno_core::futures::ready; +use deno_core::futures::task::noop_waker_ref; +use deno_core::futures::task::AtomicWaker; +use deno_core::futures::task::Context; +use deno_core::futures::task::Poll; +use deno_core::futures::task::RawWaker; +use deno_core::futures::task::RawWakerVTable; +use deno_core::futures::task::Waker; +use deno_core::op_async; +use deno_core::op_sync; +use deno_core::parking_lot::Mutex; +use deno_core::AsyncRefCell; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::OpPair; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_tls::create_client_config; +use deno_tls::rustls::internal::pemfile::certs; +use deno_tls::rustls::internal::pemfile::pkcs8_private_keys; +use deno_tls::rustls::internal::pemfile::rsa_private_keys; +use deno_tls::rustls::Certificate; +use deno_tls::rustls::ClientConfig; +use deno_tls::rustls::ClientSession; +use deno_tls::rustls::NoClientAuth; +use deno_tls::rustls::PrivateKey; +use deno_tls::rustls::ServerConfig; +use deno_tls::rustls::ServerSession; +use deno_tls::rustls::Session; +use deno_tls::webpki::DNSNameRef; +use io::Error; +use io::Read; +use io::Write; +use serde::Deserialize; +use std::borrow::Cow; +use std::cell::RefCell; +use std::convert::From; +use std::fs::File; +use std::io; +use std::io::BufRead; +use std::io::BufReader; +use std::io::ErrorKind; +use std::ops::Deref; +use std::ops::DerefMut; +use std::path::Path; +use std::pin::Pin; +use std::rc::Rc; +use std::sync::Arc; +use std::sync::Weak; +use tokio::io::AsyncRead; +use tokio::io::AsyncWrite; +use tokio::io::ReadBuf; +use tokio::net::TcpListener; +use tokio::net::TcpStream; +use tokio::task::spawn_local; + +#[derive(Debug)] +enum TlsSession { + Client(ClientSession), + Server(ServerSession), +} + +impl Deref for TlsSession { + type Target = dyn Session; + + fn deref(&self) -> &Self::Target { + match self { + TlsSession::Client(client_session) => client_session, + TlsSession::Server(server_session) => server_session, + } + } +} + +impl DerefMut for TlsSession { + fn deref_mut(&mut self) -> &mut Self::Target { + match self { + TlsSession::Client(client_session) => client_session, + TlsSession::Server(server_session) => server_session, + } + } +} + +impl From for TlsSession { + fn from(client_session: ClientSession) -> Self { + TlsSession::Client(client_session) + } +} + +impl From for TlsSession { + fn from(server_session: ServerSession) -> Self { + TlsSession::Server(server_session) + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +enum Flow { + Read, + Write, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +enum State { + StreamOpen, + StreamClosed, + TlsClosing, + TlsClosed, + TcpClosed, +} + +#[derive(Debug)] +pub struct TlsStream(Option); + +impl TlsStream { + fn new(tcp: TcpStream, tls: TlsSession) -> Self { + let inner = TlsStreamInner { + tcp, + tls, + rd_state: State::StreamOpen, + wr_state: State::StreamOpen, + }; + Self(Some(inner)) + } + + pub fn new_client_side( + tcp: TcpStream, + tls_config: &Arc, + hostname: DNSNameRef, + ) -> Self { + let tls = TlsSession::Client(ClientSession::new(tls_config, hostname)); + Self::new(tcp, tls) + } + + pub fn new_server_side( + tcp: TcpStream, + tls_config: &Arc, + ) -> Self { + let tls = TlsSession::Server(ServerSession::new(tls_config)); + Self::new(tcp, tls) + } + + pub async fn handshake(&mut self) -> io::Result<()> { + poll_fn(|cx| self.inner_mut().poll_io(cx, Flow::Write)).await + } + + fn into_split(self) -> (ReadHalf, WriteHalf) { + let shared = Shared::new(self); + let rd = ReadHalf { + shared: shared.clone(), + }; + let wr = WriteHalf { shared }; + (rd, wr) + } + + /// Tokio-rustls compatibility: returns a reference to the underlying TCP + /// stream, and a reference to the Rustls `Session` object. + pub fn get_ref(&self) -> (&TcpStream, &dyn Session) { + let inner = self.0.as_ref().unwrap(); + (&inner.tcp, &*inner.tls) + } + + fn inner_mut(&mut self) -> &mut TlsStreamInner { + self.0.as_mut().unwrap() + } +} + +impl AsyncRead for TlsStream { + fn poll_read( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + self.inner_mut().poll_read(cx, buf) + } +} + +impl AsyncWrite for TlsStream { + fn poll_write( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &[u8], + ) -> Poll> { + self.inner_mut().poll_write(cx, buf) + } + + fn poll_flush( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + self.inner_mut().poll_io(cx, Flow::Write) + // The underlying TCP stream does not need to be flushed. + } + + fn poll_shutdown( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + self.inner_mut().poll_shutdown(cx) + } +} + +impl Drop for TlsStream { + fn drop(&mut self) { + let mut inner = self.0.take().unwrap(); + + let mut cx = Context::from_waker(noop_waker_ref()); + let use_linger_task = inner.poll_close(&mut cx).is_pending(); + + if use_linger_task { + spawn_local(poll_fn(move |cx| inner.poll_close(cx))); + } else if cfg!(debug_assertions) { + spawn_local(async {}); // Spawn dummy task to detect missing LocalSet. + } + } +} + +#[derive(Debug)] +pub struct TlsStreamInner { + tls: TlsSession, + tcp: TcpStream, + rd_state: State, + wr_state: State, +} + +impl TlsStreamInner { + fn poll_io( + &mut self, + cx: &mut Context<'_>, + flow: Flow, + ) -> Poll> { + loop { + let wr_ready = loop { + match self.wr_state { + _ if self.tls.is_handshaking() && !self.tls.wants_write() => { + break true; + } + _ if self.tls.is_handshaking() => {} + State::StreamOpen if !self.tls.wants_write() => break true, + State::StreamClosed => { + // Rustls will enqueue the 'CloseNotify' alert and send it after + // flusing the data that is already in the queue. + self.tls.send_close_notify(); + self.wr_state = State::TlsClosing; + continue; + } + State::TlsClosing if !self.tls.wants_write() => { + self.wr_state = State::TlsClosed; + continue; + } + // If a 'CloseNotify' alert sent by the remote end has been received, + // shut down the underlying TCP socket. Otherwise, consider polling + // done for the moment. + State::TlsClosed if self.rd_state < State::TlsClosed => break true, + State::TlsClosed + if Pin::new(&mut self.tcp).poll_shutdown(cx)?.is_pending() => + { + break false; + } + State::TlsClosed => { + self.wr_state = State::TcpClosed; + continue; + } + State::TcpClosed => break true, + _ => {} + } + + // Poll whether there is space in the socket send buffer so we can flush + // the remaining outgoing ciphertext. + if self.tcp.poll_write_ready(cx)?.is_pending() { + break false; + } + + // Write ciphertext to the TCP socket. + let mut wrapped_tcp = ImplementWriteTrait(&mut self.tcp); + match self.tls.write_tls(&mut wrapped_tcp) { + Ok(0) => unreachable!(), + Ok(_) => {} + Err(err) if err.kind() == ErrorKind::WouldBlock => {} + Err(err) => return Poll::Ready(Err(err)), + } + }; + + let rd_ready = loop { + match self.rd_state { + State::TcpClosed if self.tls.is_handshaking() => { + let err = Error::new(ErrorKind::UnexpectedEof, "tls handshake eof"); + return Poll::Ready(Err(err)); + } + _ if self.tls.is_handshaking() && !self.tls.wants_read() => { + break true; + } + _ if self.tls.is_handshaking() => {} + State::StreamOpen if !self.tls.wants_read() => break true, + State::StreamOpen => {} + State::StreamClosed if !self.tls.wants_read() => { + // Rustls has more incoming cleartext buffered up, but the TLS + // session is closing so this data will never be processed by the + // application layer. Just like what would happen if this were a raw + // TCP stream, don't gracefully end the TLS session, but abort it. + return Poll::Ready(Err(Error::from(ErrorKind::ConnectionReset))); + } + State::StreamClosed => {} + State::TlsClosed if self.wr_state == State::TcpClosed => { + // Wait for the remote end to gracefully close the TCP connection. + // TODO(piscisaureus): this is unnecessary; remove when stable. + } + _ => break true, + } + + if self.rd_state < State::TlsClosed { + // Do a zero-length plaintext read so we can detect the arrival of + // 'CloseNotify' messages, even if only the write half is open. + // Actually reading data from the socket is done in `poll_read()`. + match self.tls.read(&mut []) { + Ok(0) => {} + Err(err) if err.kind() == ErrorKind::ConnectionAborted => { + // `Session::read()` returns `ConnectionAborted` when a + // 'CloseNotify' alert has been received, which indicates that + // the remote peer wants to gracefully end the TLS session. + self.rd_state = State::TlsClosed; + continue; + } + Err(err) => return Poll::Ready(Err(err)), + _ => unreachable!(), + } + } + + // Poll whether more ciphertext is available in the socket receive + // buffer. + if self.tcp.poll_read_ready(cx)?.is_pending() { + break false; + } + + // Receive ciphertext from the socket. + let mut wrapped_tcp = ImplementReadTrait(&mut self.tcp); + match self.tls.read_tls(&mut wrapped_tcp) { + Ok(0) => self.rd_state = State::TcpClosed, + Ok(_) => self + .tls + .process_new_packets() + .map_err(|err| Error::new(ErrorKind::InvalidData, err))?, + Err(err) if err.kind() == ErrorKind::WouldBlock => {} + Err(err) => return Poll::Ready(Err(err)), + } + }; + + if wr_ready { + if self.rd_state >= State::TlsClosed + && self.wr_state >= State::TlsClosed + && self.wr_state < State::TcpClosed + { + continue; + } + if self.tls.wants_write() { + continue; + } + } + + let io_ready = match flow { + _ if self.tls.is_handshaking() => false, + Flow::Read => rd_ready, + Flow::Write => wr_ready, + }; + return match io_ready { + false => Poll::Pending, + true => Poll::Ready(Ok(())), + }; + } + } + + fn poll_read( + &mut self, + cx: &mut Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + ready!(self.poll_io(cx, Flow::Read))?; + + if self.rd_state == State::StreamOpen { + let buf_slice = + unsafe { &mut *(buf.unfilled_mut() as *mut [_] as *mut [u8]) }; + let bytes_read = self.tls.read(buf_slice)?; + assert_ne!(bytes_read, 0); + unsafe { buf.assume_init(bytes_read) }; + buf.advance(bytes_read); + } + + Poll::Ready(Ok(())) + } + + fn poll_write( + &mut self, + cx: &mut Context<'_>, + buf: &[u8], + ) -> Poll> { + if buf.is_empty() { + // Tokio-rustls compatibility: a zero byte write always succeeds. + Poll::Ready(Ok(0)) + } else if self.wr_state == State::StreamOpen { + // Flush Rustls' ciphertext send queue. + ready!(self.poll_io(cx, Flow::Write))?; + + // Copy data from `buf` to the Rustls cleartext send queue. + let bytes_written = self.tls.write(buf)?; + assert_ne!(bytes_written, 0); + + // Try to flush as much ciphertext as possible. However, since we just + // handed off at least some bytes to rustls, so we can't return + // `Poll::Pending()` any more: this would tell the caller that it should + // try to send those bytes again. + let _ = self.poll_io(cx, Flow::Write)?; + + Poll::Ready(Ok(bytes_written)) + } else { + // Return error if stream has been shut down for writing. + Poll::Ready(Err(ErrorKind::BrokenPipe.into())) + } + } + + fn poll_shutdown(&mut self, cx: &mut Context<'_>) -> Poll> { + if self.wr_state == State::StreamOpen { + self.wr_state = State::StreamClosed; + } + + ready!(self.poll_io(cx, Flow::Write))?; + + // At minimum, a TLS 'CloseNotify' alert should have been sent. + assert!(self.wr_state >= State::TlsClosed); + // If we received a TLS 'CloseNotify' alert from the remote end + // already, the TCP socket should be shut down at this point. + assert!( + self.rd_state < State::TlsClosed || self.wr_state == State::TcpClosed + ); + + Poll::Ready(Ok(())) + } + + fn poll_close(&mut self, cx: &mut Context<'_>) -> Poll> { + if self.rd_state == State::StreamOpen { + self.rd_state = State::StreamClosed; + } + + // Send TLS 'CloseNotify' alert. + ready!(self.poll_shutdown(cx))?; + // Wait for 'CloseNotify', shut down TCP stream, wait for TCP FIN packet. + ready!(self.poll_io(cx, Flow::Read))?; + + assert_eq!(self.rd_state, State::TcpClosed); + assert_eq!(self.wr_state, State::TcpClosed); + + Poll::Ready(Ok(())) + } +} + +#[derive(Debug)] +pub struct ReadHalf { + shared: Arc, +} + +impl ReadHalf { + pub fn reunite(self, wr: WriteHalf) -> TlsStream { + assert!(Arc::ptr_eq(&self.shared, &wr.shared)); + drop(wr); // Drop `wr`, so only one strong reference to `shared` remains. + + Arc::try_unwrap(self.shared) + .unwrap_or_else(|_| panic!("Arc::::try_unwrap() failed")) + .tls_stream + .into_inner() + } +} + +impl AsyncRead for ReadHalf { + fn poll_read( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + self + .shared + .poll_with_shared_waker(cx, Flow::Read, move |tls, cx| { + tls.poll_read(cx, buf) + }) + } +} + +#[derive(Debug)] +pub struct WriteHalf { + shared: Arc, +} + +impl AsyncWrite for WriteHalf { + fn poll_write( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &[u8], + ) -> Poll> { + self + .shared + .poll_with_shared_waker(cx, Flow::Write, move |tls, cx| { + tls.poll_write(cx, buf) + }) + } + + fn poll_flush( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + self + .shared + .poll_with_shared_waker(cx, Flow::Write, |tls, cx| tls.poll_flush(cx)) + } + + fn poll_shutdown( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + self + .shared + .poll_with_shared_waker(cx, Flow::Write, |tls, cx| tls.poll_shutdown(cx)) + } +} + +#[derive(Debug)] +struct Shared { + tls_stream: Mutex, + rd_waker: AtomicWaker, + wr_waker: AtomicWaker, +} + +impl Shared { + fn new(tls_stream: TlsStream) -> Arc { + let self_ = Self { + tls_stream: Mutex::new(tls_stream), + rd_waker: AtomicWaker::new(), + wr_waker: AtomicWaker::new(), + }; + Arc::new(self_) + } + + fn poll_with_shared_waker( + self: &Arc, + cx: &mut Context<'_>, + flow: Flow, + mut f: impl FnMut(Pin<&mut TlsStream>, &mut Context<'_>) -> R, + ) -> R { + match flow { + Flow::Read => self.rd_waker.register(cx.waker()), + Flow::Write => self.wr_waker.register(cx.waker()), + } + + let shared_waker = self.new_shared_waker(); + let mut cx = Context::from_waker(&shared_waker); + + let mut tls_stream = self.tls_stream.lock(); + f(Pin::new(&mut tls_stream), &mut cx) + } + + const SHARED_WAKER_VTABLE: RawWakerVTable = RawWakerVTable::new( + Self::clone_shared_waker, + Self::wake_shared_waker, + Self::wake_shared_waker_by_ref, + Self::drop_shared_waker, + ); + + fn new_shared_waker(self: &Arc) -> Waker { + let self_weak = Arc::downgrade(self); + let self_ptr = self_weak.into_raw() as *const (); + let raw_waker = RawWaker::new(self_ptr, &Self::SHARED_WAKER_VTABLE); + unsafe { Waker::from_raw(raw_waker) } + } + + fn clone_shared_waker(self_ptr: *const ()) -> RawWaker { + let self_weak = unsafe { Weak::from_raw(self_ptr as *const Self) }; + let ptr1 = self_weak.clone().into_raw(); + let ptr2 = self_weak.into_raw(); + assert!(ptr1 == ptr2); + RawWaker::new(self_ptr, &Self::SHARED_WAKER_VTABLE) + } + + fn wake_shared_waker(self_ptr: *const ()) { + Self::wake_shared_waker_by_ref(self_ptr); + Self::drop_shared_waker(self_ptr); + } + + fn wake_shared_waker_by_ref(self_ptr: *const ()) { + let self_weak = unsafe { Weak::from_raw(self_ptr as *const Self) }; + if let Some(self_arc) = Weak::upgrade(&self_weak) { + self_arc.rd_waker.wake(); + self_arc.wr_waker.wake(); + } + self_weak.into_raw(); + } + + fn drop_shared_waker(self_ptr: *const ()) { + let _ = unsafe { Weak::from_raw(self_ptr as *const Self) }; + } +} + +struct ImplementReadTrait<'a, T>(&'a mut T); + +impl Read for ImplementReadTrait<'_, TcpStream> { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.0.try_read(buf) + } +} + +struct ImplementWriteTrait<'a, T>(&'a mut T); + +impl Write for ImplementWriteTrait<'_, TcpStream> { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.0.try_write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +pub fn init() -> Vec { + vec![ + ("op_start_tls", op_async(op_start_tls::

)), + ("op_connect_tls", op_async(op_connect_tls::

)), + ("op_listen_tls", op_sync(op_listen_tls::

)), + ("op_accept_tls", op_async(op_accept_tls)), + ] +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ConnectTlsArgs { + transport: String, + hostname: String, + port: u16, + cert_file: Option, + cert_chain: Option, + private_key: Option, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct StartTlsArgs { + rid: ResourceId, + cert_file: Option, + hostname: String, +} + +async fn op_start_tls( + state: Rc>, + args: StartTlsArgs, + _: (), +) -> Result +where + NP: NetPermissions + 'static, +{ + let rid = args.rid; + let hostname = match &*args.hostname { + "" => "localhost", + n => n, + }; + let cert_file = args.cert_file.as_deref(); + { + super::check_unstable2(&state, "Deno.startTls"); + let mut s = state.borrow_mut(); + let permissions = s.borrow_mut::(); + permissions.check_net(&(hostname, Some(0)))?; + if let Some(path) = cert_file { + permissions.check_read(Path::new(path))?; + } + } + + let ca_data = match cert_file { + Some(path) => { + let mut buf = Vec::new(); + File::open(path)?.read_to_end(&mut buf)?; + Some(buf) + } + _ => None, + }; + + let hostname_dns = DNSNameRef::try_from_ascii_str(hostname) + .map_err(|_| invalid_hostname(hostname))?; + + let unsafely_ignore_certificate_errors = state + .borrow() + .borrow::() + .0 + .clone(); + + // TODO(@justinmchase): Ideally the certificate store is created once + // and not cloned. The store should be wrapped in Arc to reduce + // copying memory unnecessarily. + let root_cert_store = state + .borrow() + .borrow::() + .root_cert_store + .clone(); + let resource_rc = state + .borrow_mut() + .resource_table + .take::(rid) + .ok_or_else(bad_resource_id)?; + let resource = Rc::try_unwrap(resource_rc) + .expect("Only a single use of this resource should happen"); + let (read_half, write_half) = resource.into_inner(); + let tcp_stream = read_half.reunite(write_half)?; + + let local_addr = tcp_stream.local_addr()?; + let remote_addr = tcp_stream.peer_addr()?; + + let tls_config = Arc::new(create_client_config( + root_cert_store, + ca_data, + unsafely_ignore_certificate_errors, + )?); + let tls_stream = + TlsStream::new_client_side(tcp_stream, &tls_config, hostname_dns); + + let rid = { + let mut state_ = state.borrow_mut(); + state_ + .resource_table + .add(TlsStreamResource::new(tls_stream.into_split())) + }; + + Ok(OpConn { + rid, + local_addr: Some(OpAddr::Tcp(IpAddr { + hostname: local_addr.ip().to_string(), + port: local_addr.port(), + })), + remote_addr: Some(OpAddr::Tcp(IpAddr { + hostname: remote_addr.ip().to_string(), + port: remote_addr.port(), + })), + }) +} + +async fn op_connect_tls( + state: Rc>, + args: ConnectTlsArgs, + _: (), +) -> Result +where + NP: NetPermissions + 'static, +{ + assert_eq!(args.transport, "tcp"); + let hostname = match &*args.hostname { + "" => "localhost", + n => n, + }; + let port = args.port; + let cert_file = args.cert_file.as_deref(); + let unsafely_ignore_certificate_errors = state + .borrow() + .borrow::() + .0 + .clone(); + + if args.cert_chain.is_some() { + super::check_unstable2(&state, "ConnectTlsOptions.certChain"); + } + if args.private_key.is_some() { + super::check_unstable2(&state, "ConnectTlsOptions.privateKey"); + } + + { + let mut s = state.borrow_mut(); + let permissions = s.borrow_mut::(); + permissions.check_net(&(hostname, Some(port)))?; + if let Some(path) = cert_file { + permissions.check_read(Path::new(path))?; + } + } + + let ca_data = match cert_file { + Some(path) => { + let mut buf = Vec::new(); + File::open(path)?.read_to_end(&mut buf)?; + Some(buf) + } + _ => None, + }; + + let root_cert_store = state + .borrow() + .borrow::() + .root_cert_store + .clone(); + let hostname_dns = DNSNameRef::try_from_ascii_str(hostname) + .map_err(|_| invalid_hostname(hostname))?; + + let connect_addr = resolve_addr(hostname, port) + .await? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + let tcp_stream = TcpStream::connect(connect_addr).await?; + let local_addr = tcp_stream.local_addr()?; + let remote_addr = tcp_stream.peer_addr()?; + let mut tls_config = create_client_config( + root_cert_store, + ca_data, + unsafely_ignore_certificate_errors, + )?; + + if args.cert_chain.is_some() || args.private_key.is_some() { + let cert_chain = args + .cert_chain + .ok_or_else(|| type_error("No certificate chain provided"))?; + let private_key = args + .private_key + .ok_or_else(|| type_error("No private key provided"))?; + + // The `remove` is safe because load_private_keys checks that there is at least one key. + let private_key = load_private_keys(private_key.as_bytes())?.remove(0); + + tls_config.set_single_client_cert( + load_certs(&mut cert_chain.as_bytes())?, + private_key, + )?; + } + + let tls_config = Arc::new(tls_config); + + let tls_stream = + TlsStream::new_client_side(tcp_stream, &tls_config, hostname_dns); + + let rid = { + let mut state_ = state.borrow_mut(); + state_ + .resource_table + .add(TlsStreamResource::new(tls_stream.into_split())) + }; + + Ok(OpConn { + rid, + local_addr: Some(OpAddr::Tcp(IpAddr { + hostname: local_addr.ip().to_string(), + port: local_addr.port(), + })), + remote_addr: Some(OpAddr::Tcp(IpAddr { + hostname: remote_addr.ip().to_string(), + port: remote_addr.port(), + })), + }) +} + +fn load_certs(reader: &mut dyn BufRead) -> Result, AnyError> { + let certs = certs(reader) + .map_err(|_| custom_error("InvalidData", "Unable to decode certificate"))?; + + if certs.is_empty() { + let e = custom_error("InvalidData", "No certificates found in cert file"); + return Err(e); + } + + Ok(certs) +} + +fn load_certs_from_file(path: &str) -> Result, AnyError> { + let cert_file = File::open(path)?; + let reader = &mut BufReader::new(cert_file); + load_certs(reader) +} + +fn key_decode_err() -> AnyError { + custom_error("InvalidData", "Unable to decode key") +} + +fn key_not_found_err() -> AnyError { + custom_error("InvalidData", "No keys found in key file") +} + +/// Starts with -----BEGIN RSA PRIVATE KEY----- +fn load_rsa_keys(mut bytes: &[u8]) -> Result, AnyError> { + let keys = rsa_private_keys(&mut bytes).map_err(|_| key_decode_err())?; + Ok(keys) +} + +/// Starts with -----BEGIN PRIVATE KEY----- +fn load_pkcs8_keys(mut bytes: &[u8]) -> Result, AnyError> { + let keys = pkcs8_private_keys(&mut bytes).map_err(|_| key_decode_err())?; + Ok(keys) +} + +fn load_private_keys(bytes: &[u8]) -> Result, AnyError> { + let mut keys = load_rsa_keys(bytes)?; + + if keys.is_empty() { + keys = load_pkcs8_keys(bytes)?; + } + + if keys.is_empty() { + return Err(key_not_found_err()); + } + + Ok(keys) +} + +fn load_private_keys_from_file( + path: &str, +) -> Result, AnyError> { + let key_bytes = std::fs::read(path)?; + load_private_keys(&key_bytes) +} + +pub struct TlsListenerResource { + tcp_listener: AsyncRefCell, + tls_config: Arc, + cancel_handle: CancelHandle, +} + +impl Resource for TlsListenerResource { + fn name(&self) -> Cow { + "tlsListener".into() + } + + fn close(self: Rc) { + self.cancel_handle.cancel(); + } +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ListenTlsArgs { + transport: String, + hostname: String, + port: u16, + cert_file: String, + key_file: String, + alpn_protocols: Option>, +} + +fn op_listen_tls( + state: &mut OpState, + args: ListenTlsArgs, + _: (), +) -> Result +where + NP: NetPermissions + 'static, +{ + assert_eq!(args.transport, "tcp"); + let hostname = &*args.hostname; + let port = args.port; + let cert_file = &*args.cert_file; + let key_file = &*args.key_file; + + { + let permissions = state.borrow_mut::(); + permissions.check_net(&(hostname, Some(port)))?; + permissions.check_read(Path::new(cert_file))?; + permissions.check_read(Path::new(key_file))?; + } + + let mut tls_config = ServerConfig::new(NoClientAuth::new()); + if let Some(alpn_protocols) = args.alpn_protocols { + super::check_unstable(state, "Deno.listenTls#alpn_protocols"); + tls_config.alpn_protocols = + alpn_protocols.into_iter().map(|s| s.into_bytes()).collect(); + } + tls_config + .set_single_cert( + load_certs_from_file(cert_file)?, + load_private_keys_from_file(key_file)?.remove(0), + ) + .expect("invalid key or certificate"); + + let bind_addr = resolve_addr_sync(hostname, port)? + .next() + .ok_or_else(|| generic_error("No resolved address found"))?; + let std_listener = std::net::TcpListener::bind(bind_addr)?; + std_listener.set_nonblocking(true)?; + let tcp_listener = TcpListener::from_std(std_listener)?; + let local_addr = tcp_listener.local_addr()?; + + let tls_listener_resource = TlsListenerResource { + tcp_listener: AsyncRefCell::new(tcp_listener), + tls_config: Arc::new(tls_config), + cancel_handle: Default::default(), + }; + + let rid = state.resource_table.add(tls_listener_resource); + + Ok(OpConn { + rid, + local_addr: Some(OpAddr::Tcp(IpAddr { + hostname: local_addr.ip().to_string(), + port: local_addr.port(), + })), + remote_addr: None, + }) +} + +async fn op_accept_tls( + state: Rc>, + rid: ResourceId, + _: (), +) -> Result { + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Listener has been closed"))?; + + let cancel_handle = RcRef::map(&resource, |r| &r.cancel_handle); + let tcp_listener = RcRef::map(&resource, |r| &r.tcp_listener) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Another accept task is ongoing"))?; + + let (tcp_stream, remote_addr) = + match tcp_listener.accept().try_or_cancel(&cancel_handle).await { + Ok(tuple) => tuple, + Err(err) if err.kind() == ErrorKind::Interrupted => { + // FIXME(bartlomieju): compatibility with current JS implementation. + return Err(bad_resource("Listener has been closed")); + } + Err(err) => return Err(err.into()), + }; + + let local_addr = tcp_stream.local_addr()?; + + let tls_stream = TlsStream::new_server_side(tcp_stream, &resource.tls_config); + + let rid = { + let mut state_ = state.borrow_mut(); + state_ + .resource_table + .add(TlsStreamResource::new(tls_stream.into_split())) + }; + + Ok(OpConn { + rid, + local_addr: Some(OpAddr::Tcp(IpAddr { + hostname: local_addr.ip().to_string(), + port: local_addr.port(), + })), + remote_addr: Some(OpAddr::Tcp(IpAddr { + hostname: remote_addr.ip().to_string(), + port: remote_addr.port(), + })), + }) +} diff --git a/ext/net/ops_unix.rs b/ext/net/ops_unix.rs new file mode 100644 index 000000000..9dfcc231e --- /dev/null +++ b/ext/net/ops_unix.rs @@ -0,0 +1,180 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use crate::io::UnixStreamResource; +use crate::ops::AcceptArgs; +use crate::ops::OpAddr; +use crate::ops::OpConn; +use crate::ops::OpPacket; +use crate::ops::ReceiveArgs; +use deno_core::error::bad_resource; +use deno_core::error::custom_error; +use deno_core::error::null_opbuf; +use deno_core::error::AnyError; +use deno_core::AsyncRefCell; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ZeroCopyBuf; +use serde::Deserialize; +use serde::Serialize; +use std::borrow::Cow; +use std::cell::RefCell; +use std::fs::remove_file; +use std::path::Path; +use std::rc::Rc; +use tokio::net::UnixDatagram; +use tokio::net::UnixListener; +pub use tokio::net::UnixStream; + +/// A utility function to map OsStrings to Strings +pub fn into_string(s: std::ffi::OsString) -> Result { + s.into_string().map_err(|s| { + let message = format!("File name or path {:?} is not valid UTF-8", s); + custom_error("InvalidData", message) + }) +} + +struct UnixListenerResource { + listener: AsyncRefCell, + cancel: CancelHandle, +} + +impl Resource for UnixListenerResource { + fn name(&self) -> Cow { + "unixListener".into() + } + + fn close(self: Rc) { + self.cancel.cancel(); + } +} + +pub struct UnixDatagramResource { + pub socket: AsyncRefCell, + pub cancel: CancelHandle, +} + +impl Resource for UnixDatagramResource { + fn name(&self) -> Cow { + "unixDatagram".into() + } + + fn close(self: Rc) { + self.cancel.cancel(); + } +} + +#[derive(Serialize)] +pub struct UnixAddr { + pub path: Option, +} + +#[derive(Deserialize)] +pub struct UnixListenArgs { + pub path: String, +} + +pub(crate) async fn accept_unix( + state: Rc>, + args: AcceptArgs, + _: (), +) -> Result { + let rid = args.rid; + + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Listener has been closed"))?; + let listener = RcRef::map(&resource, |r| &r.listener) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Listener already in use"))?; + let cancel = RcRef::map(resource, |r| &r.cancel); + let (unix_stream, _socket_addr) = + listener.accept().try_or_cancel(cancel).await?; + + let local_addr = unix_stream.local_addr()?; + let remote_addr = unix_stream.peer_addr()?; + let resource = UnixStreamResource::new(unix_stream.into_split()); + let mut state = state.borrow_mut(); + let rid = state.resource_table.add(resource); + Ok(OpConn { + rid, + local_addr: Some(OpAddr::Unix(UnixAddr { + path: local_addr.as_pathname().and_then(pathstring), + })), + remote_addr: Some(OpAddr::Unix(UnixAddr { + path: remote_addr.as_pathname().and_then(pathstring), + })), + }) +} + +pub(crate) async fn receive_unix_packet( + state: Rc>, + args: ReceiveArgs, + buf: Option, +) -> Result { + let mut buf = buf.ok_or_else(null_opbuf)?; + + let rid = args.rid; + + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Socket has been closed"))?; + let socket = RcRef::map(&resource, |r| &r.socket) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Socket already in use"))?; + let cancel = RcRef::map(resource, |r| &r.cancel); + let (size, remote_addr) = + socket.recv_from(&mut buf).try_or_cancel(cancel).await?; + Ok(OpPacket { + size, + remote_addr: OpAddr::UnixPacket(UnixAddr { + path: remote_addr.as_pathname().and_then(pathstring), + }), + }) +} + +pub fn listen_unix( + state: &mut OpState, + addr: &Path, +) -> Result<(u32, tokio::net::unix::SocketAddr), AnyError> { + if addr.exists() { + remove_file(&addr).unwrap(); + } + let listener = UnixListener::bind(&addr)?; + let local_addr = listener.local_addr()?; + let listener_resource = UnixListenerResource { + listener: AsyncRefCell::new(listener), + cancel: Default::default(), + }; + let rid = state.resource_table.add(listener_resource); + + Ok((rid, local_addr)) +} + +pub fn listen_unix_packet( + state: &mut OpState, + addr: &Path, +) -> Result<(u32, tokio::net::unix::SocketAddr), AnyError> { + if addr.exists() { + remove_file(&addr).unwrap(); + } + let socket = UnixDatagram::bind(&addr)?; + let local_addr = socket.local_addr()?; + let datagram_resource = UnixDatagramResource { + socket: AsyncRefCell::new(socket), + cancel: Default::default(), + }; + let rid = state.resource_table.add(datagram_resource); + + Ok((rid, local_addr)) +} + +pub fn pathstring(pathname: &Path) -> Option { + into_string(pathname.into()).ok() +} diff --git a/ext/net/resolve_addr.rs b/ext/net/resolve_addr.rs new file mode 100644 index 000000000..ebf1374d1 --- /dev/null +++ b/ext/net/resolve_addr.rs @@ -0,0 +1,156 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::AnyError; +use std::net::SocketAddr; +use std::net::ToSocketAddrs; +use tokio::net::lookup_host; + +/// Resolve network address *asynchronously*. +pub async fn resolve_addr( + hostname: &str, + port: u16, +) -> Result + '_, AnyError> { + let addr_port_pair = make_addr_port_pair(hostname, port); + let result = lookup_host(addr_port_pair).await?; + Ok(result) +} + +/// Resolve network address *synchronously*. +pub fn resolve_addr_sync( + hostname: &str, + port: u16, +) -> Result, AnyError> { + let addr_port_pair = make_addr_port_pair(hostname, port); + let result = addr_port_pair.to_socket_addrs()?; + Ok(result) +} + +fn make_addr_port_pair(hostname: &str, port: u16) -> (&str, u16) { + // Default to localhost if given just the port. Example: ":80" + if hostname.is_empty() { + return ("0.0.0.0", port); + } + + // If this looks like an ipv6 IP address. Example: "[2001:db8::1]" + // Then we remove the brackets. + let addr = hostname.trim_start_matches('[').trim_end_matches(']'); + (addr, port) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::net::Ipv4Addr; + use std::net::Ipv6Addr; + use std::net::SocketAddrV4; + use std::net::SocketAddrV6; + + #[tokio::test] + async fn resolve_addr1() { + let expected = vec![SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(127, 0, 0, 1), + 80, + ))]; + let actual = resolve_addr("127.0.0.1", 80) + .await + .unwrap() + .collect::>(); + assert_eq!(actual, expected); + } + + #[tokio::test] + async fn resolve_addr2() { + let expected = vec![SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(0, 0, 0, 0), + 80, + ))]; + let actual = resolve_addr("", 80).await.unwrap().collect::>(); + assert_eq!(actual, expected); + } + + #[tokio::test] + async fn resolve_addr3() { + let expected = vec![SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(192, 0, 2, 1), + 25, + ))]; + let actual = resolve_addr("192.0.2.1", 25) + .await + .unwrap() + .collect::>(); + assert_eq!(actual, expected); + } + + #[tokio::test] + async fn resolve_addr_ipv6() { + let expected = vec![SocketAddr::V6(SocketAddrV6::new( + Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 1), + 8080, + 0, + 0, + ))]; + let actual = resolve_addr("[2001:db8::1]", 8080) + .await + .unwrap() + .collect::>(); + assert_eq!(actual, expected); + } + + #[tokio::test] + async fn resolve_addr_err() { + assert!(resolve_addr("INVALID ADDR", 1234).await.is_err()); + } + + #[test] + fn resolve_addr_sync1() { + let expected = vec![SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(127, 0, 0, 1), + 80, + ))]; + let actual = resolve_addr_sync("127.0.0.1", 80) + .unwrap() + .collect::>(); + assert_eq!(actual, expected); + } + + #[test] + fn resolve_addr_sync2() { + let expected = vec![SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(0, 0, 0, 0), + 80, + ))]; + let actual = resolve_addr_sync("", 80).unwrap().collect::>(); + assert_eq!(actual, expected); + } + + #[test] + fn resolve_addr_sync3() { + let expected = vec![SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(192, 0, 2, 1), + 25, + ))]; + let actual = resolve_addr_sync("192.0.2.1", 25) + .unwrap() + .collect::>(); + assert_eq!(actual, expected); + } + + #[test] + fn resolve_addr_sync_ipv6() { + let expected = vec![SocketAddr::V6(SocketAddrV6::new( + Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 1), + 8080, + 0, + 0, + ))]; + let actual = resolve_addr_sync("[2001:db8::1]", 8080) + .unwrap() + .collect::>(); + assert_eq!(actual, expected); + } + + #[test] + fn resolve_addr_sync_err() { + assert!(resolve_addr_sync("INVALID ADDR", 1234).is_err()); + } +} diff --git a/ext/timers/01_timers.js b/ext/timers/01_timers.js new file mode 100644 index 000000000..a00d5d9b9 --- /dev/null +++ b/ext/timers/01_timers.js @@ -0,0 +1,595 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +((window) => { + const core = window.Deno.core; + const { + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSplice, + DateNow, + Error, + FunctionPrototypeBind, + Map, + MapPrototypeDelete, + MapPrototypeGet, + MapPrototypeHas, + MapPrototypeSet, + MathMax, + Number, + String, + TypeError, + } = window.__bootstrap.primordials; + + // Shamelessly cribbed from extensions/fetch/11_streams.js + class AssertionError extends Error { + constructor(msg) { + super(msg); + this.name = "AssertionError"; + } + } + + /** + * @param {unknown} cond + * @param {string=} msg + * @returns {asserts cond} + */ + function assert(cond, msg = "Assertion failed.") { + if (!cond) { + throw new AssertionError(msg); + } + } + + function opStopGlobalTimer() { + core.opSync("op_global_timer_stop"); + } + + function opStartGlobalTimer(timeout) { + return core.opSync("op_global_timer_start", timeout); + } + + async function opWaitGlobalTimer() { + await core.opAsync("op_global_timer"); + } + + function opNow() { + return core.opSync("op_now"); + } + + function sleepSync(millis = 0) { + return core.opSync("op_sleep_sync", millis); + } + + // Derived from https://github.com/vadimg/js_bintrees. MIT Licensed. + + class RBNode { + constructor(data) { + this.data = data; + this.left = null; + this.right = null; + this.red = true; + } + + getChild(dir) { + return dir ? this.right : this.left; + } + + setChild(dir, val) { + if (dir) { + this.right = val; + } else { + this.left = val; + } + } + } + + class RBTree { + #comparator = null; + #root = null; + + constructor(comparator) { + this.#comparator = comparator; + this.#root = null; + } + + /** Returns `null` if tree is empty. */ + min() { + let res = this.#root; + if (res === null) { + return null; + } + while (res.left !== null) { + res = res.left; + } + return res.data; + } + + /** Returns node `data` if found, `null` otherwise. */ + find(data) { + let res = this.#root; + while (res !== null) { + const c = this.#comparator(data, res.data); + if (c === 0) { + return res.data; + } else { + res = res.getChild(c > 0); + } + } + return null; + } + + /** returns `true` if inserted, `false` if duplicate. */ + insert(data) { + let ret = false; + + if (this.#root === null) { + // empty tree + this.#root = new RBNode(data); + ret = true; + } else { + const head = new RBNode(null); // fake tree root + + let dir = 0; + let last = 0; + + // setup + let gp = null; // grandparent + let ggp = head; // grand-grand-parent + let p = null; // parent + let node = this.#root; + ggp.right = this.#root; + + // search down + while (true) { + if (node === null) { + // insert new node at the bottom + node = new RBNode(data); + p.setChild(dir, node); + ret = true; + } else if (isRed(node.left) && isRed(node.right)) { + // color flip + node.red = true; + node.left.red = false; + node.right.red = false; + } + + // fix red violation + if (isRed(node) && isRed(p)) { + const dir2 = ggp.right === gp; + + assert(gp); + if (node === p.getChild(last)) { + ggp.setChild(dir2, singleRotate(gp, !last)); + } else { + ggp.setChild(dir2, doubleRotate(gp, !last)); + } + } + + const cmp = this.#comparator(node.data, data); + + // stop if found + if (cmp === 0) { + break; + } + + last = dir; + dir = Number(cmp < 0); // Fix type + + // update helpers + if (gp !== null) { + ggp = gp; + } + gp = p; + p = node; + node = node.getChild(dir); + } + + // update root + this.#root = head.right; + } + + // make root black + this.#root.red = false; + + return ret; + } + + /** Returns `true` if removed, `false` if not found. */ + remove(data) { + if (this.#root === null) { + return false; + } + + const head = new RBNode(null); // fake tree root + let node = head; + node.right = this.#root; + let p = null; // parent + let gp = null; // grand parent + let found = null; // found item + let dir = 1; + + while (node.getChild(dir) !== null) { + const last = dir; + + // update helpers + gp = p; + p = node; + node = node.getChild(dir); + + const cmp = this.#comparator(data, node.data); + + dir = cmp > 0; + + // save found node + if (cmp === 0) { + found = node; + } + + // push the red node down + if (!isRed(node) && !isRed(node.getChild(dir))) { + if (isRed(node.getChild(!dir))) { + const sr = singleRotate(node, dir); + p.setChild(last, sr); + p = sr; + } else if (!isRed(node.getChild(!dir))) { + const sibling = p.getChild(!last); + if (sibling !== null) { + if ( + !isRed(sibling.getChild(!last)) && + !isRed(sibling.getChild(last)) + ) { + // color flip + p.red = false; + sibling.red = true; + node.red = true; + } else { + assert(gp); + const dir2 = gp.right === p; + + if (isRed(sibling.getChild(last))) { + gp.setChild(dir2, doubleRotate(p, last)); + } else if (isRed(sibling.getChild(!last))) { + gp.setChild(dir2, singleRotate(p, last)); + } + + // ensure correct coloring + const gpc = gp.getChild(dir2); + assert(gpc); + gpc.red = true; + node.red = true; + assert(gpc.left); + gpc.left.red = false; + assert(gpc.right); + gpc.right.red = false; + } + } + } + } + } + + // replace and remove if found + if (found !== null) { + found.data = node.data; + assert(p); + p.setChild(p.right === node, node.getChild(node.left === null)); + } + + // update root and make it black + this.#root = head.right; + if (this.#root !== null) { + this.#root.red = false; + } + + return found !== null; + } + } + + function isRed(node) { + return node !== null && node.red; + } + + function singleRotate(root, dir) { + const save = root.getChild(!dir); + assert(save); + + root.setChild(!dir, save.getChild(dir)); + save.setChild(dir, root); + + root.red = true; + save.red = false; + + return save; + } + + function doubleRotate(root, dir) { + root.setChild(!dir, singleRotate(root.getChild(!dir), !dir)); + return singleRotate(root, dir); + } + + const { console } = globalThis; + + // Timeout values > TIMEOUT_MAX are set to 1. + const TIMEOUT_MAX = 2 ** 31 - 1; + + let globalTimeoutDue = null; + + let nextTimerId = 1; + const idMap = new Map(); + const dueTree = new RBTree((a, b) => a.due - b.due); + + function clearGlobalTimeout() { + globalTimeoutDue = null; + opStopGlobalTimer(); + } + + let pendingEvents = 0; + const pendingFireTimers = []; + + /** Process and run a single ready timer macrotask. + * This function should be registered through Deno.core.setMacrotaskCallback. + * Returns true when all ready macrotasks have been processed, false if more + * ready ones are available. The Isolate future would rely on the return value + * to repeatedly invoke this function until depletion. Multiple invocations + * of this function one at a time ensures newly ready microtasks are processed + * before next macrotask timer callback is invoked. */ + function handleTimerMacrotask() { + if (pendingFireTimers.length > 0) { + fire(ArrayPrototypeShift(pendingFireTimers)); + return pendingFireTimers.length === 0; + } + return true; + } + + async function setGlobalTimeout(due, now) { + // Since JS and Rust don't use the same clock, pass the time to rust as a + // relative time value. On the Rust side we'll turn that into an absolute + // value again. + const timeout = due - now; + assert(timeout >= 0); + // Send message to the backend. + globalTimeoutDue = due; + pendingEvents++; + // FIXME(bartlomieju): this is problematic, because `clearGlobalTimeout` + // is synchronous. That means that timer is cancelled, but this promise is still pending + // until next turn of event loop. This leads to "leaking of async ops" in tests; + // because `clearTimeout/clearInterval` might be the last statement in test function + // `opSanitizer` will immediately complain that there is pending op going on, unless + // some timeout/defer is put in place to allow promise resolution. + // Ideally `clearGlobalTimeout` doesn't return until this op is resolved, but + // I'm not if that's possible. + opStartGlobalTimer(timeout); + await opWaitGlobalTimer(); + pendingEvents--; + prepareReadyTimers(); + } + + function prepareReadyTimers() { + const now = DateNow(); + // Bail out if we're not expecting the global timer to fire. + if (globalTimeoutDue === null || pendingEvents > 0) { + return; + } + // After firing the timers that are due now, this will hold the first timer + // list that hasn't fired yet. + let nextDueNode; + while ((nextDueNode = dueTree.min()) !== null && nextDueNode.due <= now) { + dueTree.remove(nextDueNode); + // Fire all the timers in the list. + for (const timer of nextDueNode.timers) { + // With the list dropped, the timer is no longer scheduled. + timer.scheduled = false; + // Place the callback to pending timers to fire. + ArrayPrototypePush(pendingFireTimers, timer); + } + } + setOrClearGlobalTimeout(nextDueNode && nextDueNode.due, now); + } + + function setOrClearGlobalTimeout(due, now) { + if (due == null) { + clearGlobalTimeout(); + } else { + setGlobalTimeout(due, now); + } + } + + function schedule(timer, now) { + assert(!timer.scheduled); + assert(now <= timer.due); + // Find or create the list of timers that will fire at point-in-time `due`. + const maybeNewDueNode = { due: timer.due, timers: [] }; + let dueNode = dueTree.find(maybeNewDueNode); + if (dueNode === null) { + dueTree.insert(maybeNewDueNode); + dueNode = maybeNewDueNode; + } + // Append the newly scheduled timer to the list and mark it as scheduled. + ArrayPrototypePush(dueNode.timers, timer); + timer.scheduled = true; + // If the new timer is scheduled to fire before any timer that existed before, + // update the global timeout to reflect this. + if (globalTimeoutDue === null || globalTimeoutDue > timer.due) { + setOrClearGlobalTimeout(timer.due, now); + } + } + + function unschedule(timer) { + // Check if our timer is pending scheduling or pending firing. + // If either is true, they are not in tree, and their idMap entry + // will be deleted soon. Remove it from queue. + let index = -1; + if ((index = ArrayPrototypeIndexOf(pendingFireTimers, timer)) >= 0) { + ArrayPrototypeSplice(pendingFireTimers, index); + return; + } + // If timer is not in the 2 pending queues and is unscheduled, + // it is not in the tree. + if (!timer.scheduled) { + return; + } + const searchKey = { due: timer.due, timers: [] }; + // Find the list of timers that will fire at point-in-time `due`. + const list = dueTree.find(searchKey).timers; + if (list.length === 1) { + // Time timer is the only one in the list. Remove the entire list. + assert(list[0] === timer); + dueTree.remove(searchKey); + // If the unscheduled timer was 'next up', find when the next timer that + // still exists is due, and update the global alarm accordingly. + if (timer.due === globalTimeoutDue) { + const nextDueNode = dueTree.min(); + setOrClearGlobalTimeout( + nextDueNode && nextDueNode.due, + DateNow(), + ); + } + } else { + // Multiple timers that are due at the same point in time. + // Remove this timer from the list. + const index = ArrayPrototypeIndexOf(list, timer); + assert(index > -1); + ArrayPrototypeSplice(list, index, 1); + } + } + + function fire(timer) { + // If the timer isn't found in the ID map, that means it has been cancelled + // between the timer firing and the promise callback (this function). + if (!MapPrototypeHas(idMap, timer.id)) { + return; + } + // Reschedule the timer if it is a repeating one, otherwise drop it. + if (!timer.repeat) { + // One-shot timer: remove the timer from this id-to-timer map. + MapPrototypeDelete(idMap, timer.id); + } else { + // Interval timer: compute when timer was supposed to fire next. + // However make sure to never schedule the next interval in the past. + const now = DateNow(); + timer.due = MathMax(now, timer.due + timer.delay); + schedule(timer, now); + } + // Call the user callback. Intermediate assignment is to avoid leaking `this` + // to it, while also keeping the stack trace neat when it shows up in there. + const callback = timer.callback; + if ("function" === typeof callback) { + callback(); + } else { + (0, eval)(callback); + } + } + + function checkThis(thisArg) { + if (thisArg !== null && thisArg !== undefined && thisArg !== globalThis) { + throw new TypeError("Illegal invocation"); + } + } + + function setTimer( + cb, + delay, + args, + repeat, + ) { + // If the callack is a function, bind `args` to the callback and bind `this` to globalThis(global). + // otherwise call `String` on it, and `eval` it on calls; do not pass variardic args to the string + let callback; + + if ("function" === typeof cb) { + callback = FunctionPrototypeBind(cb, globalThis, ...args); + } else { + callback = String(cb); + args = []; // args are ignored + } + // In the browser, the delay value must be coercible to an integer between 0 + // and INT32_MAX. Any other value will cause the timer to fire immediately. + // We emulate this behavior. + const now = DateNow(); + if (delay > TIMEOUT_MAX) { + console.warn( + `${delay} does not fit into` + + " a 32-bit signed integer." + + "\nTimeout duration was set to 1.", + ); + delay = 1; + } + delay = MathMax(0, delay | 0); + + // Create a new, unscheduled timer object. + const timer = { + id: nextTimerId++, + callback, + args, + delay, + due: now + delay, + repeat, + scheduled: false, + }; + // Register the timer's existence in the id-to-timer map. + MapPrototypeSet(idMap, timer.id, timer); + // Schedule the timer in the due table. + schedule(timer, now); + return timer.id; + } + + function setTimeout( + cb, + delay = 0, + ...args + ) { + delay >>>= 0; + checkThis(this); + return setTimer(cb, delay, args, false); + } + + function setInterval( + cb, + delay = 0, + ...args + ) { + delay >>>= 0; + checkThis(this); + return setTimer(cb, delay, args, true); + } + + function clearTimer(id) { + id >>>= 0; + const timer = MapPrototypeGet(idMap, id); + if (timer === undefined) { + // Timer doesn't exist any more or never existed. This is not an error. + return; + } + // Unschedule the timer if it is currently scheduled, and forget about it. + unschedule(timer); + MapPrototypeDelete(idMap, timer.id); + } + + function clearTimeout(id = 0) { + id >>>= 0; + if (id === 0) { + return; + } + clearTimer(id); + } + + function clearInterval(id = 0) { + id >>>= 0; + if (id === 0) { + return; + } + clearTimer(id); + } + + window.__bootstrap.timers = { + clearInterval, + setInterval, + clearTimeout, + setTimeout, + handleTimerMacrotask, + opStopGlobalTimer, + opStartGlobalTimer, + opNow, + sleepSync, + }; +})(this); diff --git a/ext/timers/02_performance.js b/ext/timers/02_performance.js new file mode 100644 index 000000000..f752ba933 --- /dev/null +++ b/ext/timers/02_performance.js @@ -0,0 +1,569 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +((window) => { + const { + ArrayPrototypeFilter, + ArrayPrototypeFind, + ArrayPrototypePush, + ArrayPrototypeReverse, + ArrayPrototypeSlice, + ObjectKeys, + Symbol, + SymbolFor, + SymbolToStringTag, + TypeError, + } = window.__bootstrap.primordials; + + const { webidl, structuredClone } = window.__bootstrap; + const consoleInternal = window.__bootstrap.console; + const { opNow } = window.__bootstrap.timers; + const { DOMException } = window.__bootstrap.domException; + + const illegalConstructorKey = Symbol("illegalConstructorKey"); + const customInspect = SymbolFor("Deno.customInspect"); + let performanceEntries = []; + + webidl.converters["PerformanceMarkOptions"] = webidl + .createDictionaryConverter( + "PerformanceMarkOptions", + [ + { + key: "detail", + converter: webidl.converters.any, + }, + { + key: "startTime", + converter: webidl.converters.DOMHighResTimeStamp, + }, + ], + ); + + webidl.converters["DOMString or DOMHighResTimeStamp"] = (V, opts) => { + if (webidl.type(V) === "Number" && V !== null) { + return webidl.converters.DOMHighResTimeStamp(V, opts); + } + return webidl.converters.DOMString(V, opts); + }; + + webidl.converters["PerformanceMeasureOptions"] = webidl + .createDictionaryConverter( + "PerformanceMeasureOptions", + [ + { + key: "detail", + converter: webidl.converters.any, + }, + { + key: "start", + converter: webidl.converters["DOMString or DOMHighResTimeStamp"], + }, + { + key: "duration", + converter: webidl.converters.DOMHighResTimeStamp, + }, + { + key: "end", + converter: webidl.converters["DOMString or DOMHighResTimeStamp"], + }, + ], + ); + + webidl.converters["DOMString or PerformanceMeasureOptions"] = (V, opts) => { + if (webidl.type(V) === "Object" && V !== null) { + return webidl.converters["PerformanceMeasureOptions"](V, opts); + } + return webidl.converters.DOMString(V, opts); + }; + + function findMostRecent( + name, + type, + ) { + return ArrayPrototypeFind( + ArrayPrototypeReverse(ArrayPrototypeSlice(performanceEntries)), + (entry) => entry.name === name && entry.entryType === type, + ); + } + + function convertMarkToTimestamp(mark) { + if (typeof mark === "string") { + const entry = findMostRecent(mark, "mark"); + if (!entry) { + throw new DOMException( + `Cannot find mark: "${mark}".`, + "SyntaxError", + ); + } + return entry.startTime; + } + if (mark < 0) { + throw new TypeError("Mark cannot be negative."); + } + return mark; + } + + function filterByNameType( + name, + type, + ) { + return ArrayPrototypeFilter( + performanceEntries, + (entry) => + (name ? entry.name === name : true) && + (type ? entry.entryType === type : true), + ); + } + + const now = opNow; + + const _name = Symbol("[[name]]"); + const _entryType = Symbol("[[entryType]]"); + const _startTime = Symbol("[[startTime]]"); + const _duration = Symbol("[[duration]]"); + class PerformanceEntry { + [_name] = ""; + [_entryType] = ""; + [_startTime] = 0; + [_duration] = 0; + + get name() { + webidl.assertBranded(this, PerformanceEntry); + return this[_name]; + } + + get entryType() { + webidl.assertBranded(this, PerformanceEntry); + return this[_entryType]; + } + + get startTime() { + webidl.assertBranded(this, PerformanceEntry); + return this[_startTime]; + } + + get duration() { + webidl.assertBranded(this, PerformanceEntry); + return this[_duration]; + } + + constructor( + name = null, + entryType = null, + startTime = null, + duration = null, + key = undefined, + ) { + if (key !== illegalConstructorKey) { + webidl.illegalConstructor(); + } + this[webidl.brand] = webidl.brand; + + this[_name] = name; + this[_entryType] = entryType; + this[_startTime] = startTime; + this[_duration] = duration; + } + + toJSON() { + webidl.assertBranded(this, PerformanceEntry); + return { + name: this[_name], + entryType: this[_entryType], + startTime: this[_startTime], + duration: this[_duration], + }; + } + + [customInspect](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof PerformanceEntry, + keys: [ + "name", + "entryType", + "startTime", + "duration", + ], + })); + } + } + webidl.configurePrototype(PerformanceEntry); + + const _detail = Symbol("[[detail]]"); + class PerformanceMark extends PerformanceEntry { + [SymbolToStringTag] = "PerformanceMark"; + + [_detail] = null; + + get detail() { + webidl.assertBranded(this, PerformanceMark); + return this[_detail]; + } + + get entryType() { + webidl.assertBranded(this, PerformanceMark); + return "mark"; + } + + constructor( + name, + options = {}, + ) { + const prefix = "Failed to construct 'PerformanceMark'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + name = webidl.converters.DOMString(name, { + prefix, + context: "Argument 1", + }); + + options = webidl.converters.PerformanceMarkOptions(options, { + prefix, + context: "Argument 2", + }); + + const { detail = null, startTime = now() } = options; + + super(name, "mark", startTime, 0, illegalConstructorKey); + this[webidl.brand] = webidl.brand; + if (startTime < 0) { + throw new TypeError("startTime cannot be negative"); + } + this[_detail] = structuredClone(detail); + } + + toJSON() { + webidl.assertBranded(this, PerformanceMark); + return { + name: this.name, + entryType: this.entryType, + startTime: this.startTime, + duration: this.duration, + detail: this.detail, + }; + } + + [customInspect](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof PerformanceMark, + keys: [ + "name", + "entryType", + "startTime", + "duration", + "detail", + ], + })); + } + } + webidl.configurePrototype(PerformanceMark); + + class PerformanceMeasure extends PerformanceEntry { + [SymbolToStringTag] = "PerformanceMeasure"; + + [_detail] = null; + + get detail() { + webidl.assertBranded(this, PerformanceMeasure); + return this[_detail]; + } + + get entryType() { + webidl.assertBranded(this, PerformanceMeasure); + return "measure"; + } + + constructor( + name = null, + startTime = null, + duration = null, + detail = null, + key = undefined, + ) { + if (key !== illegalConstructorKey) { + webidl.illegalConstructor(); + } + + super(name, "measure", startTime, duration, key); + this[webidl.brand] = webidl.brand; + this[_detail] = structuredClone(detail); + } + + toJSON() { + webidl.assertBranded(this, PerformanceMeasure); + return { + name: this.name, + entryType: this.entryType, + startTime: this.startTime, + duration: this.duration, + detail: this.detail, + }; + } + + [customInspect](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof PerformanceMeasure, + keys: [ + "name", + "entryType", + "startTime", + "duration", + "detail", + ], + })); + } + } + webidl.configurePrototype(PerformanceMeasure); + + class Performance { + constructor() { + webidl.illegalConstructor(); + } + + clearMarks(markName = undefined) { + webidl.assertBranded(this, Performance); + if (markName !== undefined) { + markName = webidl.converters.DOMString(markName, { + prefix: "Failed to execute 'clearMarks' on 'Performance'", + context: "Argument 1", + }); + + performanceEntries = ArrayPrototypeFilter( + performanceEntries, + (entry) => !(entry.name === markName && entry.entryType === "mark"), + ); + } else { + performanceEntries = ArrayPrototypeFilter( + performanceEntries, + (entry) => entry.entryType !== "mark", + ); + } + } + + clearMeasures(measureName = undefined) { + webidl.assertBranded(this, Performance); + if (measureName !== undefined) { + measureName = webidl.converters.DOMString(measureName, { + prefix: "Failed to execute 'clearMeasures' on 'Performance'", + context: "Argument 1", + }); + + performanceEntries = ArrayPrototypeFilter( + performanceEntries, + (entry) => + !(entry.name === measureName && entry.entryType === "measure"), + ); + } else { + performanceEntries = ArrayPrototypeFilter( + performanceEntries, + (entry) => entry.entryType !== "measure", + ); + } + } + + getEntries() { + webidl.assertBranded(this, Performance); + return filterByNameType(); + } + + getEntriesByName( + name, + type = undefined, + ) { + webidl.assertBranded(this, Performance); + const prefix = "Failed to execute 'getEntriesByName' on 'Performance'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + name = webidl.converters.DOMString(name, { + prefix, + context: "Argument 1", + }); + + if (type !== undefined) { + type = webidl.converters.DOMString(type, { + prefix, + context: "Argument 2", + }); + } + + return filterByNameType(name, type); + } + + getEntriesByType(type) { + webidl.assertBranded(this, Performance); + const prefix = "Failed to execute 'getEntriesByName' on 'Performance'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + type = webidl.converters.DOMString(type, { + prefix, + context: "Argument 1", + }); + + return filterByNameType(undefined, type); + } + + mark( + markName, + markOptions = {}, + ) { + webidl.assertBranded(this, Performance); + const prefix = "Failed to execute 'mark' on 'Performance'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + markName = webidl.converters.DOMString(markName, { + prefix, + context: "Argument 1", + }); + + markOptions = webidl.converters.PerformanceMarkOptions(markOptions, { + prefix, + context: "Argument 2", + }); + + // 3.1.1.1 If the global object is a Window object and markName uses the + // same name as a read only attribute in the PerformanceTiming interface, + // throw a SyntaxError. - not implemented + const entry = new PerformanceMark(markName, markOptions); + // 3.1.1.7 Queue entry - not implemented + ArrayPrototypePush(performanceEntries, entry); + return entry; + } + + measure( + measureName, + startOrMeasureOptions = {}, + endMark = undefined, + ) { + webidl.assertBranded(this, Performance); + const prefix = "Failed to execute 'measure' on 'Performance'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + + measureName = webidl.converters.DOMString(measureName, { + prefix, + context: "Argument 1", + }); + + startOrMeasureOptions = webidl.converters + ["DOMString or PerformanceMeasureOptions"](startOrMeasureOptions, { + prefix, + context: "Argument 2", + }); + + if (endMark !== undefined) { + endMark = webidl.converters.DOMString(endMark, { + prefix, + context: "Argument 3", + }); + } + + if ( + startOrMeasureOptions && typeof startOrMeasureOptions === "object" && + ObjectKeys(startOrMeasureOptions).length > 0 + ) { + if (endMark) { + throw new TypeError("Options cannot be passed with endMark."); + } + if ( + !("start" in startOrMeasureOptions) && + !("end" in startOrMeasureOptions) + ) { + throw new TypeError( + "A start or end mark must be supplied in options.", + ); + } + if ( + "start" in startOrMeasureOptions && + "duration" in startOrMeasureOptions && + "end" in startOrMeasureOptions + ) { + throw new TypeError( + "Cannot specify start, end, and duration together in options.", + ); + } + } + let endTime; + if (endMark) { + endTime = convertMarkToTimestamp(endMark); + } else if ( + typeof startOrMeasureOptions === "object" && + "end" in startOrMeasureOptions + ) { + endTime = convertMarkToTimestamp(startOrMeasureOptions.end); + } else if ( + typeof startOrMeasureOptions === "object" && + "start" in startOrMeasureOptions && + "duration" in startOrMeasureOptions + ) { + const start = convertMarkToTimestamp(startOrMeasureOptions.start); + const duration = convertMarkToTimestamp(startOrMeasureOptions.duration); + endTime = start + duration; + } else { + endTime = now(); + } + let startTime; + if ( + typeof startOrMeasureOptions === "object" && + "start" in startOrMeasureOptions + ) { + startTime = convertMarkToTimestamp(startOrMeasureOptions.start); + } else if ( + typeof startOrMeasureOptions === "object" && + "end" in startOrMeasureOptions && + "duration" in startOrMeasureOptions + ) { + const end = convertMarkToTimestamp(startOrMeasureOptions.end); + const duration = convertMarkToTimestamp(startOrMeasureOptions.duration); + startTime = end - duration; + } else if (typeof startOrMeasureOptions === "string") { + startTime = convertMarkToTimestamp(startOrMeasureOptions); + } else { + startTime = 0; + } + const entry = new PerformanceMeasure( + measureName, + startTime, + endTime - startTime, + typeof startOrMeasureOptions === "object" + ? startOrMeasureOptions.detail ?? null + : null, + illegalConstructorKey, + ); + ArrayPrototypePush(performanceEntries, entry); + return entry; + } + + now() { + webidl.assertBranded(this, Performance); + return now(); + } + + toJSON() { + webidl.assertBranded(this, Performance); + return {}; + } + + [customInspect](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof Performance, + keys: [], + })); + } + + get [SymbolToStringTag]() { + return "Performance"; + } + } + webidl.configurePrototype(Performance); + + window.__bootstrap.performance = { + PerformanceEntry, + PerformanceMark, + PerformanceMeasure, + Performance, + performance: webidl.createBranded(Performance), + }; +})(this); diff --git a/ext/timers/Cargo.toml b/ext/timers/Cargo.toml new file mode 100644 index 000000000..eeaef5749 --- /dev/null +++ b/ext/timers/Cargo.toml @@ -0,0 +1,28 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_timers" +version = "0.12.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "Timers API implementation for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +deno_core = { version = "0.96.0", path = "../../core" } +tokio = { version = "1.8.1", features = ["full"] } + +[dev-dependencies] +deno_bench_util = { version = "0.8.0", path = "../../bench_util" } +deno_url = { version = "0.14.0", path = "../url" } +deno_web = { version = "0.45.0", path = "../web" } +deno_webidl = { version = "0.14.0", path = "../webidl" } + +[[bench]] +name = "timers_ops" +harness = false diff --git a/ext/timers/README.md b/ext/timers/README.md new file mode 100644 index 000000000..5a2a8e516 --- /dev/null +++ b/ext/timers/README.md @@ -0,0 +1,5 @@ +# deno_timers + +This crate implements the timers API. + +Spec: https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#timers diff --git a/ext/timers/benches/timers_ops.rs b/ext/timers/benches/timers_ops.rs new file mode 100644 index 000000000..269d9627d --- /dev/null +++ b/ext/timers/benches/timers_ops.rs @@ -0,0 +1,40 @@ +use deno_core::Extension; + +use deno_bench_util::bench_or_profile; +use deno_bench_util::bencher::{benchmark_group, Bencher}; +use deno_bench_util::{bench_js_async, bench_js_sync}; +use deno_web::BlobStore; + +fn setup() -> Vec { + vec![ + deno_webidl::init(), + deno_url::init(), + deno_web::init(BlobStore::default(), None), + deno_timers::init::(), + Extension::builder() + .js(vec![ + ("setup", + Box::new(|| Ok(r#" + const { opNow, setTimeout, handleTimerMacrotask } = globalThis.__bootstrap.timers; + Deno.core.setMacrotaskCallback(handleTimerMacrotask); + "#.to_owned())), + ), + ]) + .state(|state| { + state.put(deno_timers::NoTimersPermission{}); + Ok(()) + }) + .build() + ] +} + +fn bench_op_now(b: &mut Bencher) { + bench_js_sync(b, r#"opNow();"#, setup); +} + +fn bench_set_timeout(b: &mut Bencher) { + bench_js_async(b, r#"setTimeout(() => {}, 0);"#, setup); +} + +benchmark_group!(benches, bench_op_now, bench_set_timeout,); +bench_or_profile!(benches); diff --git a/ext/timers/lib.rs b/ext/timers/lib.rs new file mode 100644 index 000000000..2b9948d1f --- /dev/null +++ b/ext/timers/lib.rs @@ -0,0 +1,193 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +//! This module helps deno implement timers. +//! +//! As an optimization, we want to avoid an expensive calls into rust for every +//! setTimeout in JavaScript. Thus in //js/timers.ts a data structure is +//! implemented that calls into Rust for only the smallest timeout. Thus we +//! only need to be able to start, cancel and await a single timer (or Delay, as Tokio +//! calls it) for an entire Isolate. This is what is implemented here. + +use deno_core::error::AnyError; +use deno_core::futures; +use deno_core::futures::channel::oneshot; +use deno_core::futures::FutureExt; +use deno_core::futures::TryFutureExt; +use deno_core::include_js_files; +use deno_core::op_async; +use deno_core::op_sync; +use deno_core::Extension; +use deno_core::OpState; +use std::cell::RefCell; +use std::future::Future; +use std::pin::Pin; +use std::rc::Rc; +use std::thread::sleep; +use std::time::Duration; +use std::time::Instant; + +pub trait TimersPermission { + fn allow_hrtime(&mut self) -> bool; + fn check_unstable(&self, state: &OpState, api_name: &'static str); +} + +pub struct NoTimersPermission; + +impl TimersPermission for NoTimersPermission { + fn allow_hrtime(&mut self) -> bool { + false + } + fn check_unstable(&self, _: &OpState, _: &'static str) {} +} + +pub fn init() -> Extension { + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/timers", + "01_timers.js", + "02_performance.js", + )) + .ops(vec![ + ("op_global_timer_stop", op_sync(op_global_timer_stop)), + ("op_global_timer_start", op_sync(op_global_timer_start)), + ("op_global_timer", op_async(op_global_timer)), + ("op_now", op_sync(op_now::

)), + ("op_sleep_sync", op_sync(op_sleep_sync::

)), + ]) + .state(|state| { + state.put(GlobalTimer::default()); + state.put(StartTime::now()); + Ok(()) + }) + .build() +} + +pub type StartTime = Instant; + +type TimerFuture = Pin>>>; + +#[derive(Default)] +pub struct GlobalTimer { + tx: Option>, + pub future: Option, +} + +impl GlobalTimer { + pub fn cancel(&mut self) { + if let Some(tx) = self.tx.take() { + tx.send(()).ok(); + } + } + + pub fn new_timeout(&mut self, deadline: Instant) { + if self.tx.is_some() { + self.cancel(); + } + assert!(self.tx.is_none()); + self.future.take(); + + let (tx, rx) = oneshot::channel(); + self.tx = Some(tx); + + let delay = tokio::time::sleep_until(deadline.into()).boxed_local(); + let rx = rx + .map_err(|err| panic!("Unexpected error in receiving channel {:?}", err)); + + let fut = futures::future::select(delay, rx) + .then(|_| futures::future::ok(())) + .boxed_local(); + self.future = Some(fut); + } +} + +pub fn op_global_timer_stop( + state: &mut OpState, + _args: (), + _: (), +) -> Result<(), AnyError> { + let global_timer = state.borrow_mut::(); + global_timer.cancel(); + Ok(()) +} + +// Set up a timer that will be later awaited by JS promise. +// It's a separate op, because canceling a timeout immediately +// after setting it caused a race condition (because Tokio timeout) +// might have been registered after next event loop tick. +// +// See https://github.com/denoland/deno/issues/7599 for more +// details. +pub fn op_global_timer_start( + state: &mut OpState, + timeout: u64, + _: (), +) -> Result<(), AnyError> { + // According to spec, minimum allowed timeout is 4 ms. + // https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#timers + // TODO(#10974) Per spec this is actually a little more complicated than this. + // The minimum timeout depends on the nesting level of the timeout. + let timeout = std::cmp::max(timeout, 4); + + let deadline = Instant::now() + Duration::from_millis(timeout); + let global_timer = state.borrow_mut::(); + global_timer.new_timeout(deadline); + Ok(()) +} + +pub async fn op_global_timer( + state: Rc>, + _args: (), + _: (), +) -> Result<(), AnyError> { + let maybe_timer_fut = { + let mut s = state.borrow_mut(); + let global_timer = s.borrow_mut::(); + global_timer.future.take() + }; + if let Some(timer_fut) = maybe_timer_fut { + let _ = timer_fut.await; + } + Ok(()) +} + +// Returns a milliseconds and nanoseconds subsec +// since the start time of the deno runtime. +// If the High precision flag is not set, the +// nanoseconds are rounded on 2ms. +pub fn op_now( + state: &mut OpState, + _argument: (), + _: (), +) -> Result +where + TP: TimersPermission + 'static, +{ + let start_time = state.borrow::(); + let seconds = start_time.elapsed().as_secs(); + let mut subsec_nanos = start_time.elapsed().subsec_nanos() as f64; + let reduced_time_precision = 2_000_000.0; // 2ms in nanoseconds + + // If the permission is not enabled + // Round the nano result on 2 milliseconds + // see: https://developer.mozilla.org/en-US/docs/Web/API/DOMHighResTimeStamp#Reduced_time_precision + if !state.borrow_mut::().allow_hrtime() { + subsec_nanos -= subsec_nanos % reduced_time_precision; + } + + let result = (seconds * 1_000) as f64 + (subsec_nanos / 1_000_000.0); + + Ok(result) +} + +pub fn op_sleep_sync( + state: &mut OpState, + millis: u64, + _: (), +) -> Result<(), AnyError> +where + TP: TimersPermission + 'static, +{ + state.borrow::().check_unstable(state, "Deno.sleepSync"); + sleep(Duration::from_millis(millis)); + Ok(()) +} diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml new file mode 100644 index 000000000..42060f418 --- /dev/null +++ b/ext/tls/Cargo.toml @@ -0,0 +1,24 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_tls" +version = "0.1.1" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "TLS for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +deno_core = { version = "0.96.0", path = "../../core" } +lazy_static = "1.4.0" +reqwest = { version = "0.11.4", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } +rustls = { version = "0.19.1", features = ["dangerous_configuration"] } +rustls-native-certs = "0.5.0" +serde = { version = "1.0.126", features = ["derive"] } +webpki = "0.21.4" +webpki-roots = "0.21.1" diff --git a/ext/tls/README.md b/ext/tls/README.md new file mode 100644 index 000000000..c3a8eb839 --- /dev/null +++ b/ext/tls/README.md @@ -0,0 +1,4 @@ +# deno_tls + +This crate implements common utilities for TLS handling in other Deno +extensions. diff --git a/ext/tls/lib.rs b/ext/tls/lib.rs new file mode 100644 index 000000000..8f56f0ffd --- /dev/null +++ b/ext/tls/lib.rs @@ -0,0 +1,192 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +pub use reqwest; +pub use rustls; +pub use rustls_native_certs; +pub use webpki; +pub use webpki_roots; + +use deno_core::error::anyhow; +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; +use deno_core::Extension; + +use reqwest::header::HeaderMap; +use reqwest::header::USER_AGENT; +use reqwest::redirect::Policy; +use reqwest::Client; +use rustls::internal::msgs::handshake::DigitallySignedStruct; +use rustls::Certificate; +use rustls::ClientConfig; +use rustls::HandshakeSignatureValid; +use rustls::RootCertStore; +use rustls::ServerCertVerified; +use rustls::ServerCertVerifier; +use rustls::StoresClientSessions; +use rustls::TLSError; +use rustls::WebPKIVerifier; +use serde::Deserialize; +use std::collections::HashMap; +use std::io::BufReader; +use std::io::Cursor; +use std::sync::Arc; +use webpki::DNSNameRef; + +/// This extension has no runtime apis, it only exports some shared native functions. +pub fn init() -> Extension { + Extension::builder().build() +} + +pub struct NoCertificateVerification(pub Vec); + +impl ServerCertVerifier for NoCertificateVerification { + fn verify_server_cert( + &self, + roots: &RootCertStore, + presented_certs: &[Certificate], + dns_name_ref: DNSNameRef<'_>, + ocsp: &[u8], + ) -> Result { + let dns_name: &str = dns_name_ref.into(); + let dns_name: String = dns_name.to_owned(); + if self.0.is_empty() || self.0.contains(&dns_name) { + Ok(ServerCertVerified::assertion()) + } else { + WebPKIVerifier::new().verify_server_cert( + roots, + presented_certs, + dns_name_ref, + ocsp, + ) + } + } + + fn verify_tls12_signature( + &self, + _message: &[u8], + _cert: &Certificate, + _dss: &DigitallySignedStruct, + ) -> Result { + Ok(HandshakeSignatureValid::assertion()) + } + + fn verify_tls13_signature( + &self, + _message: &[u8], + _cert: &Certificate, + _dss: &DigitallySignedStruct, + ) -> Result { + Ok(HandshakeSignatureValid::assertion()) + } +} + +#[derive(Deserialize, Default, Debug, Clone)] +#[serde(rename_all = "camelCase")] +#[serde(default)] +pub struct Proxy { + pub url: String, + pub basic_auth: Option, +} + +#[derive(Deserialize, Default, Debug, Clone)] +#[serde(default)] +pub struct BasicAuth { + pub username: String, + pub password: String, +} + +lazy_static::lazy_static! { + static ref CLIENT_SESSION_MEMORY_CACHE: Arc = + Arc::new(ClientSessionMemoryCache::default()); +} + +#[derive(Default)] +struct ClientSessionMemoryCache(Mutex, Vec>>); + +impl StoresClientSessions for ClientSessionMemoryCache { + fn get(&self, key: &[u8]) -> Option> { + self.0.lock().get(key).cloned() + } + + fn put(&self, key: Vec, value: Vec) -> bool { + let mut sessions = self.0.lock(); + // TODO(bnoordhuis) Evict sessions LRU-style instead of arbitrarily. + while sessions.len() >= 1024 { + let key = sessions.keys().next().unwrap().clone(); + sessions.remove(&key); + } + sessions.insert(key, value); + true + } +} + +pub fn create_default_root_cert_store() -> RootCertStore { + let mut root_cert_store = RootCertStore::empty(); + // TODO(@justinmchase): Consider also loading the system keychain here + root_cert_store.add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS); + root_cert_store +} + +pub fn create_client_config( + root_cert_store: Option, + ca_data: Option>, + unsafely_ignore_certificate_errors: Option>, +) -> Result { + let mut tls_config = ClientConfig::new(); + tls_config.set_persistence(CLIENT_SESSION_MEMORY_CACHE.clone()); + tls_config.root_store = + root_cert_store.unwrap_or_else(create_default_root_cert_store); + + // If a custom cert is specified, add it to the store + if let Some(cert) = ca_data { + let reader = &mut BufReader::new(Cursor::new(cert)); + // This function does not return specific errors, if it fails give a generic message. + if let Err(_err) = tls_config.root_store.add_pem_file(reader) { + return Err(anyhow!("Unable to add pem file to certificate store")); + } + } + + if let Some(ic_allowlist) = unsafely_ignore_certificate_errors { + tls_config.dangerous().set_certificate_verifier(Arc::new( + NoCertificateVerification(ic_allowlist), + )); + } + + Ok(tls_config) +} + +/// Create new instance of async reqwest::Client. This client supports +/// proxies and doesn't follow redirects. +pub fn create_http_client( + user_agent: String, + root_cert_store: Option, + ca_data: Option>, + proxy: Option, + unsafely_ignore_certificate_errors: Option>, +) -> Result { + let tls_config = create_client_config( + root_cert_store, + ca_data, + unsafely_ignore_certificate_errors, + )?; + let mut headers = HeaderMap::new(); + headers.insert(USER_AGENT, user_agent.parse().unwrap()); + let mut builder = Client::builder() + .redirect(Policy::none()) + .default_headers(headers) + .use_preconfigured_tls(tls_config); + + if let Some(proxy) = proxy { + let mut reqwest_proxy = reqwest::Proxy::all(&proxy.url)?; + if let Some(basic_auth) = &proxy.basic_auth { + reqwest_proxy = + reqwest_proxy.basic_auth(&basic_auth.username, &basic_auth.password); + } + builder = builder.proxy(reqwest_proxy); + } + + builder + .build() + .map_err(|e| generic_error(format!("Unable to build http client: {}", e))) +} diff --git a/ext/url/00_url.js b/ext/url/00_url.js new file mode 100644 index 000000000..f3c12d0c2 --- /dev/null +++ b/ext/url/00_url.js @@ -0,0 +1,623 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// + +"use strict"; + +((window) => { + const core = window.Deno.core; + const webidl = window.__bootstrap.webidl; + const { + ArrayIsArray, + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypeSome, + ArrayPrototypeSort, + ArrayPrototypeSplice, + ObjectKeys, + StringPrototypeSlice, + Symbol, + SymbolFor, + SymbolIterator, + SymbolToStringTag, + TypeError, + } = window.__bootstrap.primordials; + + const _list = Symbol("list"); + const _urlObject = Symbol("url object"); + + class URLSearchParams { + [_list]; + [_urlObject] = null; + + /** + * @param {string | [string][] | Record} init + */ + constructor(init = "") { + const prefix = "Failed to construct 'URL'"; + init = webidl.converters + ["sequence> or record or USVString"]( + init, + { prefix, context: "Argument 1" }, + ); + this[webidl.brand] = webidl.brand; + + if (typeof init === "string") { + // Overload: USVString + // If init is a string and starts with U+003F (?), + // remove the first code point from init. + if (init[0] == "?") { + init = StringPrototypeSlice(init, 1); + } + this[_list] = core.opSync("op_url_parse_search_params", init); + } else if (ArrayIsArray(init)) { + // Overload: sequence> + this[_list] = ArrayPrototypeMap(init, (pair, i) => { + if (pair.length !== 2) { + throw new TypeError( + `${prefix}: Item ${i + + 0} in the parameter list does have length 2 exactly.`, + ); + } + return [pair[0], pair[1]]; + }); + } else { + // Overload: record + this[_list] = ArrayPrototypeMap( + ObjectKeys(init), + (key) => [key, init[key]], + ); + } + } + + #updateUrlSearch() { + const url = this[_urlObject]; + if (url === null) { + return; + } + const parts = core.opSync("op_url_parse", { + href: url.href, + setSearch: this.toString(), + }); + url[_url] = parts; + } + + /** + * @param {string} name + * @param {string} value + */ + append(name, value) { + webidl.assertBranded(this, URLSearchParams); + const prefix = "Failed to execute 'append' on 'URLSearchParams'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + name = webidl.converters.USVString(name, { + prefix, + context: "Argument 1", + }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 2", + }); + ArrayPrototypePush(this[_list], [name, value]); + this.#updateUrlSearch(); + } + + /** + * @param {string} name + */ + delete(name) { + webidl.assertBranded(this, URLSearchParams); + const prefix = "Failed to execute 'append' on 'URLSearchParams'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + name = webidl.converters.USVString(name, { + prefix, + context: "Argument 1", + }); + const list = this[_list]; + let i = 0; + while (i < list.length) { + if (list[i][0] === name) { + ArrayPrototypeSplice(list, i, 1); + } else { + i++; + } + } + this.#updateUrlSearch(); + } + + /** + * @param {string} name + * @returns {string[]} + */ + getAll(name) { + webidl.assertBranded(this, URLSearchParams); + const prefix = "Failed to execute 'getAll' on 'URLSearchParams'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + name = webidl.converters.USVString(name, { + prefix, + context: "Argument 1", + }); + const values = []; + for (const entry of this[_list]) { + if (entry[0] === name) { + ArrayPrototypePush(values, entry[1]); + } + } + return values; + } + + /** + * @param {string} name + * @return {string | null} + */ + get(name) { + webidl.assertBranded(this, URLSearchParams); + const prefix = "Failed to execute 'get' on 'URLSearchParams'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + name = webidl.converters.USVString(name, { + prefix, + context: "Argument 1", + }); + for (const entry of this[_list]) { + if (entry[0] === name) { + return entry[1]; + } + } + return null; + } + + /** + * @param {string} name + * @return {boolean} + */ + has(name) { + webidl.assertBranded(this, URLSearchParams); + const prefix = "Failed to execute 'has' on 'URLSearchParams'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + name = webidl.converters.USVString(name, { + prefix, + context: "Argument 1", + }); + return ArrayPrototypeSome(this[_list], (entry) => entry[0] === name); + } + + /** + * @param {string} name + * @param {string} value + */ + set(name, value) { + webidl.assertBranded(this, URLSearchParams); + const prefix = "Failed to execute 'set' on 'URLSearchParams'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + name = webidl.converters.USVString(name, { + prefix, + context: "Argument 1", + }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 2", + }); + + const list = this[_list]; + + // If there are any name-value pairs whose name is name, in list, + // set the value of the first such name-value pair to value + // and remove the others. + let found = false; + let i = 0; + while (i < list.length) { + if (list[i][0] === name) { + if (!found) { + list[i][1] = value; + found = true; + i++; + } else { + ArrayPrototypeSplice(list, i, 1); + } + } else { + i++; + } + } + + // Otherwise, append a new name-value pair whose name is name + // and value is value, to list. + if (!found) { + ArrayPrototypePush(list, [name, value]); + } + + this.#updateUrlSearch(); + } + + sort() { + webidl.assertBranded(this, URLSearchParams); + ArrayPrototypeSort( + this[_list], + (a, b) => (a[0] === b[0] ? 0 : a[0] > b[0] ? 1 : -1), + ); + this.#updateUrlSearch(); + } + + /** + * @return {string} + */ + toString() { + webidl.assertBranded(this, URLSearchParams); + return core.opSync("op_url_stringify_search_params", this[_list]); + } + + get [SymbolToStringTag]() { + return "URLSearchParams"; + } + } + + webidl.mixinPairIterable("URLSearchParams", URLSearchParams, _list, 0, 1); + + webidl.configurePrototype(URLSearchParams); + + const _url = Symbol("url"); + + class URL { + [_url]; + #queryObject = null; + + /** + * @param {string} url + * @param {string} base + */ + constructor(url, base = undefined) { + const prefix = "Failed to construct 'URL'"; + url = webidl.converters.USVString(url, { prefix, context: "Argument 1" }); + if (base !== undefined) { + base = webidl.converters.USVString(base, { + prefix, + context: "Argument 2", + }); + } + this[webidl.brand] = webidl.brand; + + const parts = core.opSync("op_url_parse", { href: url, baseHref: base }); + this[_url] = parts; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + const object = { + href: this.href, + origin: this.origin, + protocol: this.protocol, + username: this.username, + password: this.password, + host: this.host, + hostname: this.hostname, + port: this.port, + pathname: this.pathname, + hash: this.hash, + search: this.search, + }; + return `${this.constructor.name} ${inspect(object)}`; + } + + #updateSearchParams() { + if (this.#queryObject !== null) { + const params = this.#queryObject[_list]; + const newParams = core.opSync( + "op_url_parse_search_params", + StringPrototypeSlice(this.search, 1), + ); + ArrayPrototypeSplice(params, 0, params.length, ...newParams); + } + } + + /** @return {string} */ + get hash() { + webidl.assertBranded(this, URL); + return this[_url].hash; + } + + /** @param {string} value */ + set hash(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'hash' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setHash: value, + }); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get host() { + webidl.assertBranded(this, URL); + return this[_url].host; + } + + /** @param {string} value */ + set host(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'host' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setHost: value, + }); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get hostname() { + webidl.assertBranded(this, URL); + return this[_url].hostname; + } + + /** @param {string} value */ + set hostname(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'hostname' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setHostname: value, + }); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get href() { + webidl.assertBranded(this, URL); + return this[_url].href; + } + + /** @param {string} value */ + set href(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'href' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + this[_url] = core.opSync("op_url_parse", { + href: value, + }); + this.#updateSearchParams(); + } + + /** @return {string} */ + get origin() { + webidl.assertBranded(this, URL); + return this[_url].origin; + } + + /** @return {string} */ + get password() { + webidl.assertBranded(this, URL); + return this[_url].password; + } + + /** @param {string} value */ + set password(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'password' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setPassword: value, + }); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get pathname() { + webidl.assertBranded(this, URL); + return this[_url].pathname; + } + + /** @param {string} value */ + set pathname(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'pathname' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setPathname: value, + }); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get port() { + webidl.assertBranded(this, URL); + return this[_url].port; + } + + /** @param {string} value */ + set port(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'port' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setPort: value, + }); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get protocol() { + webidl.assertBranded(this, URL); + return this[_url].protocol; + } + + /** @param {string} value */ + set protocol(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'protocol' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setProtocol: value, + }); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get search() { + webidl.assertBranded(this, URL); + return this[_url].search; + } + + /** @param {string} value */ + set search(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'search' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setSearch: value, + }); + this.#updateSearchParams(); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get username() { + webidl.assertBranded(this, URL); + return this[_url].username; + } + + /** @param {string} value */ + set username(value) { + webidl.assertBranded(this, URL); + const prefix = "Failed to set 'username' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + value = webidl.converters.USVString(value, { + prefix, + context: "Argument 1", + }); + try { + this[_url] = core.opSync("op_url_parse", { + href: this[_url].href, + setUsername: value, + }); + } catch { + /* pass */ + } + } + + /** @return {string} */ + get searchParams() { + if (this.#queryObject == null) { + this.#queryObject = new URLSearchParams(this.search); + this.#queryObject[_urlObject] = this; + } + return this.#queryObject; + } + + /** @return {string} */ + toString() { + webidl.assertBranded(this, URL); + return this[_url].href; + } + + /** @return {string} */ + toJSON() { + webidl.assertBranded(this, URL); + return this[_url].href; + } + + get [SymbolToStringTag]() { + return "URL"; + } + } + + webidl.configurePrototype(URL); + + /** + * This function implements application/x-www-form-urlencoded parsing. + * https://url.spec.whatwg.org/#concept-urlencoded-parser + * @param {Uint8Array} bytes + * @returns {[string, string][]} + */ + function parseUrlEncoded(bytes) { + return core.opSync("op_url_parse_search_params", null, bytes); + } + + webidl + .converters[ + "sequence> or record or USVString" + ] = (V, opts) => { + // Union for (sequence> or record or USVString) + if (webidl.type(V) === "Object" && V !== null) { + if (V[SymbolIterator] !== undefined) { + return webidl.converters["sequence>"](V, opts); + } + return webidl.converters["record"](V, opts); + } + return webidl.converters.USVString(V, opts); + }; + + window.__bootstrap.url = { + URL, + URLSearchParams, + parseUrlEncoded, + }; +})(this); diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml new file mode 100644 index 000000000..a76dac2e6 --- /dev/null +++ b/ext/url/Cargo.toml @@ -0,0 +1,28 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_url" +version = "0.14.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "URL API implementation for Deno" + +[lib] +path = "lib.rs" + +[dependencies] +deno_core = { version = "0.96.0", path = "../../core" } +idna = "0.2.3" +percent-encoding = "2.1.0" +serde = { version = "1.0.126", features = ["derive"] } + +[dev-dependencies] +deno_bench_util = { version = "0.8.0", path = "../../bench_util" } +deno_webidl = { version = "0.14.0", path = "../webidl" } + +[[bench]] +name = "url_ops" +harness = false diff --git a/ext/url/README.md b/ext/url/README.md new file mode 100644 index 000000000..991dd8b20 --- /dev/null +++ b/ext/url/README.md @@ -0,0 +1,5 @@ +# deno_url + +This crate implements the URL API for Deno. + +Spec: https://url.spec.whatwg.org/ diff --git a/ext/url/benches/url_ops.rs b/ext/url/benches/url_ops.rs new file mode 100644 index 000000000..ed27b6f80 --- /dev/null +++ b/ext/url/benches/url_ops.rs @@ -0,0 +1,27 @@ +use deno_bench_util::bench_js_sync; +use deno_bench_util::bench_or_profile; +use deno_bench_util::bencher::{benchmark_group, Bencher}; + +use deno_core::Extension; + +fn setup() -> Vec { + vec![ + deno_webidl::init(), + deno_url::init(), + Extension::builder() + .js(vec![( + "setup", + Box::new(|| { + Ok(r#"const { URL } = globalThis.__bootstrap.url;"#.to_owned()) + }), + )]) + .build(), + ] +} + +fn bench_url_parse(b: &mut Bencher) { + bench_js_sync(b, r#"new URL(`http://www.google.com/`);"#, setup); +} + +benchmark_group!(benches, bench_url_parse,); +bench_or_profile!(benches); diff --git a/ext/url/internal.d.ts b/ext/url/internal.d.ts new file mode 100644 index 000000000..ec2c2688c --- /dev/null +++ b/ext/url/internal.d.ts @@ -0,0 +1,14 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// +/// + +declare namespace globalThis { + declare namespace __bootstrap { + declare var url: { + URL: typeof URL; + URLSearchParams: typeof URLSearchParams; + parseUrlEncoded(bytes: Uint8Array): [string, string][]; + }; + } +} diff --git a/ext/url/lib.deno_url.d.ts b/ext/url/lib.deno_url.d.ts new file mode 100644 index 000000000..3f9745352 --- /dev/null +++ b/ext/url/lib.deno_url.d.ts @@ -0,0 +1,175 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file no-explicit-any + +/// +/// + +declare class URLSearchParams { + constructor( + init?: string[][] | Record | string | URLSearchParams, + ); + static toString(): string; + + /** Appends a specified key/value pair as a new search parameter. + * + * ```ts + * let searchParams = new URLSearchParams(); + * searchParams.append('name', 'first'); + * searchParams.append('name', 'second'); + * ``` + */ + append(name: string, value: string): void; + + /** Deletes the given search parameter and its associated value, + * from the list of all search parameters. + * + * ```ts + * let searchParams = new URLSearchParams([['name', 'value']]); + * searchParams.delete('name'); + * ``` + */ + delete(name: string): void; + + /** Returns all the values associated with a given search parameter + * as an array. + * + * ```ts + * searchParams.getAll('name'); + * ``` + */ + getAll(name: string): string[]; + + /** Returns the first value associated to the given search parameter. + * + * ```ts + * searchParams.get('name'); + * ``` + */ + get(name: string): string | null; + + /** Returns a Boolean that indicates whether a parameter with the + * specified name exists. + * + * ```ts + * searchParams.has('name'); + * ``` + */ + has(name: string): boolean; + + /** Sets the value associated with a given search parameter to the + * given value. If there were several matching values, this method + * deletes the others. If the search parameter doesn't exist, this + * method creates it. + * + * ```ts + * searchParams.set('name', 'value'); + * ``` + */ + set(name: string, value: string): void; + + /** Sort all key/value pairs contained in this object in place and + * return undefined. The sort order is according to Unicode code + * points of the keys. + * + * ```ts + * searchParams.sort(); + * ``` + */ + sort(): void; + + /** Calls a function for each element contained in this object in + * place and return undefined. Optionally accepts an object to use + * as this when executing callback as second argument. + * + * ```ts + * const params = new URLSearchParams([["a", "b"], ["c", "d"]]); + * params.forEach((value, key, parent) => { + * console.log(value, key, parent); + * }); + * ``` + * + */ + forEach( + callbackfn: (value: string, key: string, parent: this) => void, + thisArg?: any, + ): void; + + /** Returns an iterator allowing to go through all keys contained + * in this object. + * + * ```ts + * const params = new URLSearchParams([["a", "b"], ["c", "d"]]); + * for (const key of params.keys()) { + * console.log(key); + * } + * ``` + */ + keys(): IterableIterator; + + /** Returns an iterator allowing to go through all values contained + * in this object. + * + * ```ts + * const params = new URLSearchParams([["a", "b"], ["c", "d"]]); + * for (const value of params.values()) { + * console.log(value); + * } + * ``` + */ + values(): IterableIterator; + + /** Returns an iterator allowing to go through all key/value + * pairs contained in this object. + * + * ```ts + * const params = new URLSearchParams([["a", "b"], ["c", "d"]]); + * for (const [key, value] of params.entries()) { + * console.log(key, value); + * } + * ``` + */ + entries(): IterableIterator<[string, string]>; + + /** Returns an iterator allowing to go through all key/value + * pairs contained in this object. + * + * ```ts + * const params = new URLSearchParams([["a", "b"], ["c", "d"]]); + * for (const [key, value] of params) { + * console.log(key, value); + * } + * ``` + */ + [Symbol.iterator](): IterableIterator<[string, string]>; + + /** Returns a query string suitable for use in a URL. + * + * ```ts + * searchParams.toString(); + * ``` + */ + toString(): string; +} + +/** The URL interface represents an object providing static methods used for creating object URLs. */ +declare class URL { + constructor(url: string, base?: string | URL); + static createObjectURL(blob: Blob): string; + static revokeObjectURL(url: string): void; + + hash: string; + host: string; + hostname: string; + href: string; + toString(): string; + readonly origin: string; + password: string; + pathname: string; + port: string; + protocol: string; + search: string; + readonly searchParams: URLSearchParams; + username: string; + toJSON(): string; +} diff --git a/ext/url/lib.rs b/ext/url/lib.rs new file mode 100644 index 000000000..8ccc59eb8 --- /dev/null +++ b/ext/url/lib.rs @@ -0,0 +1,173 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::generic_error; +use deno_core::error::type_error; +use deno_core::error::uri_error; +use deno_core::error::AnyError; +use deno_core::include_js_files; +use deno_core::op_sync; +use deno_core::url::form_urlencoded; +use deno_core::url::quirks; +use deno_core::url::Url; +use deno_core::Extension; +use deno_core::ZeroCopyBuf; +use serde::Deserialize; +use serde::Serialize; +use std::panic::catch_unwind; +use std::path::PathBuf; + +pub fn init() -> Extension { + Extension::builder() + .js(include_js_files!( + prefix "deno:ext/url", + "00_url.js", + )) + .ops(vec![ + ("op_url_parse", op_sync(op_url_parse)), + ( + "op_url_parse_search_params", + op_sync(op_url_parse_search_params), + ), + ( + "op_url_stringify_search_params", + op_sync(op_url_stringify_search_params), + ), + ]) + .build() +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UrlParseArgs { + href: String, + base_href: Option, + // If one of the following are present, this is a setter call. Apply the + // proper `Url::set_*()` method after (re)parsing `href`. + set_hash: Option, + set_host: Option, + set_hostname: Option, + set_password: Option, + set_pathname: Option, + set_port: Option, + set_protocol: Option, + set_search: Option, + set_username: Option, +} + +#[derive(Serialize)] +pub struct UrlParts { + href: String, + hash: String, + host: String, + hostname: String, + origin: String, + password: String, + pathname: String, + port: String, + protocol: String, + search: String, + username: String, +} + +/// Parse `UrlParseArgs::href` with an optional `UrlParseArgs::base_href`, or an +/// optional part to "set" after parsing. Return `UrlParts`. +pub fn op_url_parse( + _state: &mut deno_core::OpState, + args: UrlParseArgs, + _: (), +) -> Result { + let base_url = args + .base_href + .as_ref() + .map(|b| Url::parse(b).map_err(|_| type_error("Invalid base URL"))) + .transpose()?; + let mut url = Url::options() + .base_url(base_url.as_ref()) + .parse(&args.href) + .map_err(|_| type_error("Invalid URL"))?; + + if let Some(hash) = args.set_hash.as_ref() { + quirks::set_hash(&mut url, hash); + } else if let Some(host) = args.set_host.as_ref() { + quirks::set_host(&mut url, host).map_err(|_| uri_error("Invalid host"))?; + } else if let Some(hostname) = args.set_hostname.as_ref() { + quirks::set_hostname(&mut url, hostname) + .map_err(|_| uri_error("Invalid hostname"))?; + } else if let Some(password) = args.set_password.as_ref() { + quirks::set_password(&mut url, password) + .map_err(|_| uri_error("Invalid password"))?; + } else if let Some(pathname) = args.set_pathname.as_ref() { + quirks::set_pathname(&mut url, pathname); + } else if let Some(port) = args.set_port.as_ref() { + quirks::set_port(&mut url, port).map_err(|_| uri_error("Invalid port"))?; + } else if let Some(protocol) = args.set_protocol.as_ref() { + quirks::set_protocol(&mut url, protocol) + .map_err(|_| uri_error("Invalid protocol"))?; + } else if let Some(search) = args.set_search.as_ref() { + quirks::set_search(&mut url, search); + } else if let Some(username) = args.set_username.as_ref() { + quirks::set_username(&mut url, username) + .map_err(|_| uri_error("Invalid username"))?; + } + + // TODO(nayeemrmn): Panic that occurs in rust-url for the `non-spec:` + // url-constructor wpt tests: https://github.com/servo/rust-url/issues/670. + let username = catch_unwind(|| quirks::username(&url)).map_err(|_| { + generic_error(format!( + "Internal error while parsing \"{}\"{}, \ + see https://github.com/servo/rust-url/issues/670", + args.href, + args + .base_href + .map(|b| format!(" against \"{}\"", b)) + .unwrap_or_default() + )) + })?; + Ok(UrlParts { + href: quirks::href(&url).to_string(), + hash: quirks::hash(&url).to_string(), + host: quirks::host(&url).to_string(), + hostname: quirks::hostname(&url).to_string(), + origin: quirks::origin(&url), + password: quirks::password(&url).to_string(), + pathname: quirks::pathname(&url).to_string(), + port: quirks::port(&url).to_string(), + protocol: quirks::protocol(&url).to_string(), + search: quirks::search(&url).to_string(), + username: username.to_string(), + }) +} + +pub fn op_url_parse_search_params( + _state: &mut deno_core::OpState, + args: Option, + zero_copy: Option, +) -> Result, AnyError> { + let params = match (args, zero_copy) { + (None, Some(zero_copy)) => form_urlencoded::parse(&zero_copy) + .into_iter() + .map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned())) + .collect(), + (Some(args), None) => form_urlencoded::parse(args.as_bytes()) + .into_iter() + .map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned())) + .collect(), + _ => return Err(type_error("invalid parameters")), + }; + Ok(params) +} + +pub fn op_url_stringify_search_params( + _state: &mut deno_core::OpState, + args: Vec<(String, String)>, + _: (), +) -> Result { + let search = form_urlencoded::Serializer::new(String::new()) + .extend_pairs(args) + .finish(); + Ok(search) +} + +pub fn get_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_url.d.ts") +} diff --git a/ext/web/00_infra.js b/ext/web/00_infra.js new file mode 100644 index 000000000..7c065bcd3 --- /dev/null +++ b/ext/web/00_infra.js @@ -0,0 +1,264 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// + +"use strict"; + +((window) => { + const core = Deno.core; + const { + RegExp, + ArrayPrototypeMap, + StringPrototypeCharCodeAt, + NumberPrototypeToString, + StringPrototypePadStart, + TypeError, + ArrayPrototypeJoin, + StringPrototypeCharAt, + StringPrototypeSlice, + String, + StringPrototypeReplace, + StringPrototypeToUpperCase, + StringPrototypeToLowerCase, + StringPrototypeSubstring, + } = window.__bootstrap.primordials; + + const ASCII_DIGIT = ["\u0030-\u0039"]; + const ASCII_UPPER_ALPHA = ["\u0041-\u005A"]; + const ASCII_LOWER_ALPHA = ["\u0061-\u007A"]; + const ASCII_ALPHA = [...ASCII_UPPER_ALPHA, ...ASCII_LOWER_ALPHA]; + const ASCII_ALPHANUMERIC = [...ASCII_DIGIT, ...ASCII_ALPHA]; + + const HTTP_TAB_OR_SPACE = ["\u0009", "\u0020"]; + const HTTP_WHITESPACE = ["\u000A", "\u000D", ...HTTP_TAB_OR_SPACE]; + + const HTTP_TOKEN_CODE_POINT = [ + "\u0021", + "\u0023", + "\u0024", + "\u0025", + "\u0026", + "\u0027", + "\u002A", + "\u002B", + "\u002D", + "\u002E", + "\u005E", + "\u005F", + "\u0060", + "\u007C", + "\u007E", + ...ASCII_ALPHANUMERIC, + ]; + const HTTP_TOKEN_CODE_POINT_RE = new RegExp( + `^[${regexMatcher(HTTP_TOKEN_CODE_POINT)}]+$`, + ); + const HTTP_QUOTED_STRING_TOKEN_POINT = [ + "\u0009", + "\u0020-\u007E", + "\u0080-\u00FF", + ]; + const HTTP_QUOTED_STRING_TOKEN_POINT_RE = new RegExp( + `^[${regexMatcher(HTTP_QUOTED_STRING_TOKEN_POINT)}]+$`, + ); + const HTTP_TAB_OR_SPACE_MATCHER = regexMatcher(HTTP_TAB_OR_SPACE); + const HTTP_TAB_OR_SPACE_PREFIX_RE = new RegExp( + `^[${HTTP_TAB_OR_SPACE_MATCHER}]+`, + "g", + ); + const HTTP_TAB_OR_SPACE_SUFFIX_RE = new RegExp( + `[${HTTP_TAB_OR_SPACE_MATCHER}]+$`, + "g", + ); + const HTTP_WHITESPACE_MATCHER = regexMatcher(HTTP_WHITESPACE); + const HTTP_WHITESPACE_PREFIX_RE = new RegExp( + `^[${HTTP_WHITESPACE_MATCHER}]+`, + "g", + ); + const HTTP_WHITESPACE_SUFFIX_RE = new RegExp( + `[${HTTP_WHITESPACE_MATCHER}]+$`, + "g", + ); + + /** + * Turn a string of chars into a regex safe matcher. + * @param {string[]} chars + * @returns {string} + */ + function regexMatcher(chars) { + const matchers = ArrayPrototypeMap(chars, (char) => { + if (char.length === 1) { + const a = StringPrototypePadStart( + NumberPrototypeToString(StringPrototypeCharCodeAt(char, 0), 16), + 4, + "0", + ); + return `\\u${a}`; + } else if (char.length === 3 && char[1] === "-") { + const a = StringPrototypePadStart( + NumberPrototypeToString(StringPrototypeCharCodeAt(char, 0), 16), + 4, + "0", + ); + const b = StringPrototypePadStart( + NumberPrototypeToString(StringPrototypeCharCodeAt(char, 2), 16), + 4, + "0", + ); + return `\\u${a}-\\u${b}`; + } else { + throw TypeError("unreachable"); + } + }); + return ArrayPrototypeJoin(matchers, ""); + } + + /** + * https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points + * @param {string} input + * @param {number} position + * @param {(char: string) => boolean} condition + * @returns {{result: string, position: number}} + */ + function collectSequenceOfCodepoints(input, position, condition) { + const start = position; + for ( + let c = StringPrototypeCharAt(input, position); + position < input.length && condition(c); + c = StringPrototypeCharAt(input, ++position) + ); + return { result: StringPrototypeSlice(input, start, position), position }; + } + + /** + * @param {string} s + * @returns {string} + */ + function byteUpperCase(s) { + return StringPrototypeReplace( + String(s), + /[a-z]/g, + function byteUpperCaseReplace(c) { + return StringPrototypeToUpperCase(c); + }, + ); + } + + /** + * @param {string} s + * @returns {string} + */ + function byteLowerCase(s) { + return StringPrototypeReplace( + String(s), + /[A-Z]/g, + function byteUpperCaseReplace(c) { + return StringPrototypeToLowerCase(c); + }, + ); + } + + /** + * https://fetch.spec.whatwg.org/#collect-an-http-quoted-string + * @param {string} input + * @param {number} position + * @param {boolean} extractValue + * @returns {{result: string, position: number}} + */ + function collectHttpQuotedString(input, position, extractValue) { + // 1. + const positionStart = position; + // 2. + let value = ""; + // 3. + if (input[position] !== "\u0022") throw new TypeError('must be "'); + // 4. + position++; + // 5. + while (true) { + // 5.1. + const res = collectSequenceOfCodepoints( + input, + position, + (c) => c !== "\u0022" && c !== "\u005C", + ); + value += res.result; + position = res.position; + // 5.2. + if (position >= input.length) break; + // 5.3. + const quoteOrBackslash = input[position]; + // 5.4. + position++; + // 5.5. + if (quoteOrBackslash === "\u005C") { + // 5.5.1. + if (position >= input.length) { + value += "\u005C"; + break; + } + // 5.5.2. + value += input[position]; + // 5.5.3. + position++; + } else { // 5.6. + // 5.6.1 + if (quoteOrBackslash !== "\u0022") throw new TypeError('must be "'); + // 5.6.2 + break; + } + } + // 6. + if (extractValue) return { result: value, position }; + // 7. + return { + result: StringPrototypeSubstring(input, positionStart, position + 1), + position, + }; + } + + /** + * @param {Uint8Array} data + * @returns {string} + */ + function forgivingBase64Encode(data) { + return core.opSync("op_base64_encode", data); + } + + /** + * @param {string} data + * @returns {Uint8Array} + */ + function forgivingBase64Decode(data) { + return core.opSync("op_base64_decode", data); + } + + window.__bootstrap.infra = { + collectSequenceOfCodepoints, + ASCII_DIGIT, + ASCII_UPPER_ALPHA, + ASCII_LOWER_ALPHA, + ASCII_ALPHA, + ASCII_ALPHANUMERIC, + HTTP_TAB_OR_SPACE, + HTTP_WHITESPACE, + HTTP_TOKEN_CODE_POINT, + HTTP_TOKEN_CODE_POINT_RE, + HTTP_QUOTED_STRING_TOKEN_POINT, + HTTP_QUOTED_STRING_TOKEN_POINT_RE, + HTTP_TAB_OR_SPACE_PREFIX_RE, + HTTP_TAB_OR_SPACE_SUFFIX_RE, + HTTP_WHITESPACE_PREFIX_RE, + HTTP_WHITESPACE_SUFFIX_RE, + regexMatcher, + byteUpperCase, + byteLowerCase, + collectHttpQuotedString, + forgivingBase64Encode, + forgivingBase64Decode, + }; +})(globalThis); diff --git a/ext/web/01_dom_exception.js b/ext/web/01_dom_exception.js new file mode 100644 index 000000000..c6f60ae2f --- /dev/null +++ b/ext/web/01_dom_exception.js @@ -0,0 +1,171 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// + +"use strict"; + +((window) => { + const { + ErrorPrototype, + ObjectDefineProperty, + ObjectEntries, + ObjectSetPrototypeOf, + SymbolFor, + SymbolToStringTag, + } = window.__bootstrap.primordials; + const webidl = window.__bootstrap.webidl; + const consoleInternal = window.__bootstrap.console; + + // Defined in WebIDL 4.3. + // https://heycam.github.io/webidl/#idl-DOMException + const INDEX_SIZE_ERR = 1; + const DOMSTRING_SIZE_ERR = 2; + const HIERARCHY_REQUEST_ERR = 3; + const WRONG_DOCUMENT_ERR = 4; + const INVALID_CHARACTER_ERR = 5; + const NO_DATA_ALLOWED_ERR = 6; + const NO_MODIFICATION_ALLOWED_ERR = 7; + const NOT_FOUND_ERR = 8; + const NOT_SUPPORTED_ERR = 9; + const INUSE_ATTRIBUTE_ERR = 10; + const INVALID_STATE_ERR = 11; + const SYNTAX_ERR = 12; + const INVALID_MODIFICATION_ERR = 13; + const NAMESPACE_ERR = 14; + const INVALID_ACCESS_ERR = 15; + const VALIDATION_ERR = 16; + const TYPE_MISMATCH_ERR = 17; + const SECURITY_ERR = 18; + const NETWORK_ERR = 19; + const ABORT_ERR = 20; + const URL_MISMATCH_ERR = 21; + const QUOTA_EXCEEDED_ERR = 22; + const TIMEOUT_ERR = 23; + const INVALID_NODE_TYPE_ERR = 24; + const DATA_CLONE_ERR = 25; + + // Defined in WebIDL 2.8.1. + // https://heycam.github.io/webidl/#dfn-error-names-table + /** @type {Record} */ + const nameToCodeMapping = { + IndexSizeError: INDEX_SIZE_ERR, + HierarchyRequestError: HIERARCHY_REQUEST_ERR, + WrongDocumentError: WRONG_DOCUMENT_ERR, + InvalidCharacterError: INVALID_CHARACTER_ERR, + NoModificationAllowedError: NO_MODIFICATION_ALLOWED_ERR, + NotFoundError: NOT_FOUND_ERR, + NotSupportedError: NOT_SUPPORTED_ERR, + InUseAttributeError: INUSE_ATTRIBUTE_ERR, + InvalidStateError: INVALID_STATE_ERR, + SyntaxError: SYNTAX_ERR, + InvalidModificationError: INVALID_MODIFICATION_ERR, + NamespaceError: NAMESPACE_ERR, + InvalidAccessError: INVALID_ACCESS_ERR, + TypeMismatchError: TYPE_MISMATCH_ERR, + SecurityError: SECURITY_ERR, + NetworkError: NETWORK_ERR, + AbortError: ABORT_ERR, + URLMismatchError: URL_MISMATCH_ERR, + QuotaExceededError: QUOTA_EXCEEDED_ERR, + TimeoutError: TIMEOUT_ERR, + InvalidNodeTypeError: INVALID_NODE_TYPE_ERR, + DataCloneError: DATA_CLONE_ERR, + }; + + // Defined in WebIDL 4.3. + // https://heycam.github.io/webidl/#idl-DOMException + class DOMException { + #message = ""; + #name = ""; + #code = 0; + + constructor(message = "", name = "Error") { + this.#message = webidl.converters.DOMString(message, { + prefix: "Failed to construct 'DOMException'", + context: "Argument 1", + }); + this.#name = webidl.converters.DOMString(name, { + prefix: "Failed to construct 'DOMException'", + context: "Argument 2", + }); + this.#code = nameToCodeMapping[this.#name] ?? 0; + } + + get message() { + return this.#message; + } + + get name() { + return this.#name; + } + + get code() { + return this.#code; + } + + get [SymbolToStringTag]() { + return "DOMException"; + } + + [SymbolFor("Deno.customInspect")](inspect) { + if (this instanceof DOMException) { + return `DOMException: ${this.#message}`; + } else { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: false, + keys: [ + "message", + "name", + "code", + ], + })); + } + } + } + + ObjectSetPrototypeOf(DOMException.prototype, ErrorPrototype); + + webidl.configurePrototype(DOMException); + + for ( + const [key, value] of ObjectEntries({ + INDEX_SIZE_ERR, + DOMSTRING_SIZE_ERR, + HIERARCHY_REQUEST_ERR, + WRONG_DOCUMENT_ERR, + INVALID_CHARACTER_ERR, + NO_DATA_ALLOWED_ERR, + NO_MODIFICATION_ALLOWED_ERR, + NOT_FOUND_ERR, + NOT_SUPPORTED_ERR, + INUSE_ATTRIBUTE_ERR, + INVALID_STATE_ERR, + SYNTAX_ERR, + INVALID_MODIFICATION_ERR, + NAMESPACE_ERR, + INVALID_ACCESS_ERR, + VALIDATION_ERR, + TYPE_MISMATCH_ERR, + SECURITY_ERR, + NETWORK_ERR, + ABORT_ERR, + URL_MISMATCH_ERR, + QUOTA_EXCEEDED_ERR, + TIMEOUT_ERR, + INVALID_NODE_TYPE_ERR, + DATA_CLONE_ERR, + }) + ) { + const desc = { value, enumerable: true }; + ObjectDefineProperty(DOMException, key, desc); + ObjectDefineProperty(DOMException.prototype, key, desc); + } + + window.__bootstrap.domException = { DOMException }; +})(this); diff --git a/ext/web/01_mimesniff.js b/ext/web/01_mimesniff.js new file mode 100644 index 000000000..360d1ffe4 --- /dev/null +++ b/ext/web/01_mimesniff.js @@ -0,0 +1,211 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// + +"use strict"; + +((window) => { + const { + ArrayPrototypeIncludes, + Map, + MapPrototypeHas, + MapPrototypeSet, + RegExpPrototypeTest, + StringPrototypeReplaceAll, + StringPrototypeToLowerCase, + } = window.__bootstrap.primordials; + const { + collectSequenceOfCodepoints, + HTTP_WHITESPACE, + HTTP_WHITESPACE_PREFIX_RE, + HTTP_WHITESPACE_SUFFIX_RE, + HTTP_QUOTED_STRING_TOKEN_POINT_RE, + HTTP_TOKEN_CODE_POINT_RE, + collectHttpQuotedString, + } = window.__bootstrap.infra; + + /** + * @typedef MimeType + * @property {string} type + * @property {string} subtype + * @property {Map} parameters + */ + + /** + * @param {string} input + * @returns {MimeType | null} + */ + function parseMimeType(input) { + // 1. + input = StringPrototypeReplaceAll(input, HTTP_WHITESPACE_PREFIX_RE, ""); + input = StringPrototypeReplaceAll(input, HTTP_WHITESPACE_SUFFIX_RE, ""); + + // 2. + let position = 0; + const endOfInput = input.length; + + // 3. + const res1 = collectSequenceOfCodepoints( + input, + position, + (c) => c != "\u002F", + ); + const type = res1.result; + position = res1.position; + + // 4. + if (type === "" || !RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, type)) { + return null; + } + + // 5. + if (position >= endOfInput) return null; + + // 6. + position++; + + // 7. + const res2 = collectSequenceOfCodepoints( + input, + position, + (c) => c != "\u003B", + ); + let subtype = res2.result; + position = res2.position; + + // 8. + subtype = StringPrototypeReplaceAll(subtype, HTTP_WHITESPACE_SUFFIX_RE, ""); + + // 9. + if ( + subtype === "" || !RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, subtype) + ) { + return null; + } + + // 10. + const mimeType = { + type: StringPrototypeToLowerCase(type), + subtype: StringPrototypeToLowerCase(subtype), + /** @type {Map} */ + parameters: new Map(), + }; + + // 11. + while (position < endOfInput) { + // 11.1. + position++; + + // 11.2. + const res1 = collectSequenceOfCodepoints( + input, + position, + (c) => ArrayPrototypeIncludes(HTTP_WHITESPACE, c), + ); + position = res1.position; + + // 11.3. + const res2 = collectSequenceOfCodepoints( + input, + position, + (c) => c !== "\u003B" && c !== "\u003D", + ); + let parameterName = res2.result; + position = res2.position; + + // 11.4. + parameterName = StringPrototypeToLowerCase(parameterName); + + // 11.5. + if (position < endOfInput) { + if (input[position] == "\u003B") continue; + position++; + } + + // 11.6. + if (position >= endOfInput) break; + + // 11.7. + let parameterValue = null; + + // 11.8. + if (input[position] === "\u0022") { + // 11.8.1. + const res = collectHttpQuotedString(input, position, true); + parameterValue = res.result; + position = res.position; + + // 11.8.2. + position++; + } else { // 11.9. + // 11.9.1. + const res = collectSequenceOfCodepoints( + input, + position, + (c) => c !== "\u003B", + ); + parameterValue = res.result; + position = res.position; + + // 11.9.2. + parameterValue = StringPrototypeReplaceAll( + parameterValue, + HTTP_WHITESPACE_SUFFIX_RE, + "", + ); + + // 11.9.3. + if (parameterValue === "") continue; + } + + // 11.10. + if ( + parameterName !== "" && + RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, parameterName) && + RegExpPrototypeTest( + HTTP_QUOTED_STRING_TOKEN_POINT_RE, + parameterValue, + ) && + !MapPrototypeHas(mimeType.parameters, parameterName) + ) { + MapPrototypeSet(mimeType.parameters, parameterName, parameterValue); + } + } + + // 12. + return mimeType; + } + + /** + * @param {MimeType} mimeType + * @returns {string} + */ + function essence(mimeType) { + return `${mimeType.type}/${mimeType.subtype}`; + } + + /** + * @param {MimeType} mimeType + * @returns {string} + */ + function serializeMimeType(mimeType) { + let serialization = essence(mimeType); + for (const param of mimeType.parameters) { + serialization += `;${param[0]}=`; + let value = param[1]; + if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, value)) { + value = StringPrototypeReplaceAll(value, "\\", "\\\\"); + value = StringPrototypeReplaceAll(value, '"', '\\"'); + value = `"${value}"`; + } + serialization += value; + } + return serialization; + } + + window.__bootstrap.mimesniff = { parseMimeType, essence, serializeMimeType }; +})(this); diff --git a/ext/web/02_event.js b/ext/web/02_event.js new file mode 100644 index 000000000..4cca20e00 --- /dev/null +++ b/ext/web/02_event.js @@ -0,0 +1,1294 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// This module follows most of the WHATWG Living Standard for the DOM logic. +// Many parts of the DOM are not implemented in Deno, but the logic for those +// parts still exists. This means you will observe a lot of strange structures +// and impossible logic branches based on what Deno currently supports. +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + const { DOMException } = window.__bootstrap.domException; + const consoleInternal = window.__bootstrap.console; + const { + ArrayPrototypeFilter, + ArrayPrototypeIncludes, + ArrayPrototypeIndexOf, + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + Boolean, + DateNow, + Error, + FunctionPrototypeCall, + Map, + MapPrototypeGet, + MapPrototypeSet, + ObjectCreate, + ObjectDefineProperty, + ObjectGetOwnPropertyDescriptor, + ReflectDefineProperty, + Symbol, + SymbolFor, + SymbolToStringTag, + TypeError, + WeakMap, + WeakMapPrototypeGet, + WeakMapPrototypeSet, + } = window.__bootstrap.primordials; + + // accessors for non runtime visible data + + function getDispatched(event) { + return Boolean(event[_dispatched]); + } + + function getPath(event) { + return event[_path] ?? []; + } + + function getStopImmediatePropagation(event) { + return Boolean(event[_stopImmediatePropagationFlag]); + } + + function setCurrentTarget( + event, + value, + ) { + event[_attributes].currentTarget = value; + } + + function setIsTrusted(event, value) { + event[_isTrusted] = value; + } + + function setDispatched(event, value) { + event[_dispatched] = value; + } + + function setEventPhase(event, value) { + event[_attributes].eventPhase = value; + } + + function setInPassiveListener(event, value) { + event[_inPassiveListener] = value; + } + + function setPath(event, value) { + event[_path] = value; + } + + function setRelatedTarget( + event, + value, + ) { + event[_attributes].relatedTarget = value; + } + + function setTarget(event, value) { + event[_attributes].target = value; + } + + function setStopImmediatePropagation( + event, + value, + ) { + event[_stopImmediatePropagationFlag] = value; + } + + // Type guards that widen the event type + + function hasRelatedTarget( + event, + ) { + return "relatedTarget" in event; + } + + const isTrusted = ObjectGetOwnPropertyDescriptor({ + get isTrusted() { + return this[_isTrusted]; + }, + }, "isTrusted").get; + + const eventInitConverter = webidl.createDictionaryConverter("EventInit", [{ + key: "bubbles", + defaultValue: false, + converter: webidl.converters.boolean, + }, { + key: "cancelable", + defaultValue: false, + converter: webidl.converters.boolean, + }, { + key: "composed", + defaultValue: false, + converter: webidl.converters.boolean, + }]); + + const _attributes = Symbol("[[attributes]]"); + const _canceledFlag = Symbol("[[canceledFlag]]"); + const _stopPropagationFlag = Symbol("[[stopPropagationFlag]]"); + const _stopImmediatePropagationFlag = Symbol( + "[[stopImmediatePropagationFlag]]", + ); + const _inPassiveListener = Symbol("[[inPassiveListener]]"); + const _dispatched = Symbol("[[dispatched]]"); + const _isTrusted = Symbol("[[isTrusted]]"); + const _path = Symbol("[[path]]"); + + class Event { + [_attributes] = {}; + [_canceledFlag] = false; + [_stopPropagationFlag] = false; + [_stopImmediatePropagationFlag] = false; + [_inPassiveListener] = false; + [_dispatched] = false; + [_isTrusted] = false; + [_path] = []; + + constructor(type, eventInitDict = {}) { + webidl.requiredArguments(arguments.length, 1, { + prefix: "Failed to construct 'Event'", + }); + type = webidl.converters.DOMString(type, { + prefix: "Failed to construct 'Event'", + context: "Argument 1", + }); + const eventInit = eventInitConverter(eventInitDict, { + prefix: "Failed to construct 'Event'", + context: "Argument 2", + }); + this[_attributes] = { + type, + ...eventInit, + currentTarget: null, + eventPhase: Event.NONE, + target: null, + timeStamp: DateNow(), + }; + ReflectDefineProperty(this, "isTrusted", { + enumerable: true, + get: isTrusted, + }); + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof Event, + keys: EVENT_PROPS, + })); + } + + get type() { + return this[_attributes].type; + } + + get target() { + return this[_attributes].target; + } + + get srcElement() { + return null; + } + + set srcElement(_) { + // this member is deprecated + } + + get currentTarget() { + return this[_attributes].currentTarget; + } + + composedPath() { + const path = this[_path]; + if (path.length === 0) { + return []; + } + + if (!this.currentTarget) { + throw new Error("assertion error"); + } + const composedPath = [ + { + item: this.currentTarget, + itemInShadowTree: false, + relatedTarget: null, + rootOfClosedTree: false, + slotInClosedTree: false, + target: null, + touchTargetList: [], + }, + ]; + + let currentTargetIndex = 0; + let currentTargetHiddenSubtreeLevel = 0; + + for (let index = path.length - 1; index >= 0; index--) { + const { item, rootOfClosedTree, slotInClosedTree } = path[index]; + + if (rootOfClosedTree) { + currentTargetHiddenSubtreeLevel++; + } + + if (item === this.currentTarget) { + currentTargetIndex = index; + break; + } + + if (slotInClosedTree) { + currentTargetHiddenSubtreeLevel--; + } + } + + let currentHiddenLevel = currentTargetHiddenSubtreeLevel; + let maxHiddenLevel = currentTargetHiddenSubtreeLevel; + + for (let i = currentTargetIndex - 1; i >= 0; i--) { + const { item, rootOfClosedTree, slotInClosedTree } = path[i]; + + if (rootOfClosedTree) { + currentHiddenLevel++; + } + + if (currentHiddenLevel <= maxHiddenLevel) { + ArrayPrototypeUnshift(composedPath, { + item, + itemInShadowTree: false, + relatedTarget: null, + rootOfClosedTree: false, + slotInClosedTree: false, + target: null, + touchTargetList: [], + }); + } + + if (slotInClosedTree) { + currentHiddenLevel--; + + if (currentHiddenLevel < maxHiddenLevel) { + maxHiddenLevel = currentHiddenLevel; + } + } + } + + currentHiddenLevel = currentTargetHiddenSubtreeLevel; + maxHiddenLevel = currentTargetHiddenSubtreeLevel; + + for (let index = currentTargetIndex + 1; index < path.length; index++) { + const { item, rootOfClosedTree, slotInClosedTree } = path[index]; + + if (slotInClosedTree) { + currentHiddenLevel++; + } + + if (currentHiddenLevel <= maxHiddenLevel) { + ArrayPrototypePush(composedPath, { + item, + itemInShadowTree: false, + relatedTarget: null, + rootOfClosedTree: false, + slotInClosedTree: false, + target: null, + touchTargetList: [], + }); + } + + if (rootOfClosedTree) { + currentHiddenLevel--; + + if (currentHiddenLevel < maxHiddenLevel) { + maxHiddenLevel = currentHiddenLevel; + } + } + } + return ArrayPrototypeMap(composedPath, (p) => p.item); + } + + get NONE() { + return Event.NONE; + } + + get CAPTURING_PHASE() { + return Event.CAPTURING_PHASE; + } + + get AT_TARGET() { + return Event.AT_TARGET; + } + + get BUBBLING_PHASE() { + return Event.BUBBLING_PHASE; + } + + static get NONE() { + return 0; + } + + static get CAPTURING_PHASE() { + return 1; + } + + static get AT_TARGET() { + return 2; + } + + static get BUBBLING_PHASE() { + return 3; + } + + get eventPhase() { + return this[_attributes].eventPhase; + } + + stopPropagation() { + this[_stopPropagationFlag] = true; + } + + get cancelBubble() { + return this[_stopPropagationFlag]; + } + + set cancelBubble(value) { + this[_stopPropagationFlag] = webidl.converters.boolean(value); + } + + stopImmediatePropagation() { + this[_stopPropagationFlag] = true; + this[_stopImmediatePropagationFlag] = true; + } + + get bubbles() { + return this[_attributes].bubbles; + } + + get cancelable() { + return this[_attributes].cancelable; + } + + get returnValue() { + return !this[_canceledFlag]; + } + + set returnValue(value) { + if (!webidl.converters.boolean(value)) { + this[_canceledFlag] = true; + } + } + + preventDefault() { + if (this[_attributes].cancelable && !this[_inPassiveListener]) { + this[_canceledFlag] = true; + } + } + + get defaultPrevented() { + return this[_canceledFlag]; + } + + get composed() { + return this[_attributes].composed; + } + + get initialized() { + return true; + } + + get timeStamp() { + return this[_attributes].timeStamp; + } + } + + function defineEnumerableProps( + Ctor, + props, + ) { + for (const prop of props) { + ReflectDefineProperty(Ctor.prototype, prop, { enumerable: true }); + } + } + + const EVENT_PROPS = [ + "bubbles", + "cancelable", + "composed", + "currentTarget", + "defaultPrevented", + "eventPhase", + "srcElement", + "target", + "returnValue", + "timeStamp", + "type", + ]; + + defineEnumerableProps(Event, EVENT_PROPS); + + // This is currently the only node type we are using, so instead of implementing + // the whole of the Node interface at the moment, this just gives us the one + // value to power the standards based logic + const DOCUMENT_FRAGMENT_NODE = 11; + + // DOM Logic Helper functions and type guards + + /** Get the parent node, for event targets that have a parent. + * + * Ref: https://dom.spec.whatwg.org/#get-the-parent */ + function getParent(eventTarget) { + return isNode(eventTarget) ? eventTarget.parentNode : null; + } + + function getRoot(eventTarget) { + return isNode(eventTarget) + ? eventTarget.getRootNode({ composed: true }) + : null; + } + + function isNode( + eventTarget, + ) { + return Boolean(eventTarget && "nodeType" in eventTarget); + } + + // https://dom.spec.whatwg.org/#concept-shadow-including-inclusive-ancestor + function isShadowInclusiveAncestor( + ancestor, + node, + ) { + while (isNode(node)) { + if (node === ancestor) { + return true; + } + + if (isShadowRoot(node)) { + node = node && getHost(node); + } else { + node = getParent(node); + } + } + + return false; + } + + function isShadowRoot(nodeImpl) { + return Boolean( + nodeImpl && + isNode(nodeImpl) && + nodeImpl.nodeType === DOCUMENT_FRAGMENT_NODE && + getHost(nodeImpl) != null, + ); + } + + function isSlotable( + nodeImpl, + ) { + return Boolean(isNode(nodeImpl) && "assignedSlot" in nodeImpl); + } + + // DOM Logic functions + + /** Append a path item to an event's path. + * + * Ref: https://dom.spec.whatwg.org/#concept-event-path-append + */ + function appendToEventPath( + eventImpl, + target, + targetOverride, + relatedTarget, + touchTargets, + slotInClosedTree, + ) { + const itemInShadowTree = isNode(target) && isShadowRoot(getRoot(target)); + const rootOfClosedTree = isShadowRoot(target) && + getMode(target) === "closed"; + + ArrayPrototypePush(getPath(eventImpl), { + item: target, + itemInShadowTree, + target: targetOverride, + relatedTarget, + touchTargetList: touchTargets, + rootOfClosedTree, + slotInClosedTree, + }); + } + + function dispatch( + targetImpl, + eventImpl, + targetOverride, + ) { + let clearTargets = false; + let activationTarget = null; + + setDispatched(eventImpl, true); + + targetOverride = targetOverride ?? targetImpl; + const eventRelatedTarget = hasRelatedTarget(eventImpl) + ? eventImpl.relatedTarget + : null; + let relatedTarget = retarget(eventRelatedTarget, targetImpl); + + if (targetImpl !== relatedTarget || targetImpl === eventRelatedTarget) { + const touchTargets = []; + + appendToEventPath( + eventImpl, + targetImpl, + targetOverride, + relatedTarget, + touchTargets, + false, + ); + + const isActivationEvent = eventImpl.type === "click"; + + if (isActivationEvent && getHasActivationBehavior(targetImpl)) { + activationTarget = targetImpl; + } + + let slotInClosedTree = false; + let slotable = isSlotable(targetImpl) && getAssignedSlot(targetImpl) + ? targetImpl + : null; + let parent = getParent(targetImpl); + + // Populate event path + // https://dom.spec.whatwg.org/#event-path + while (parent !== null) { + if (slotable !== null) { + slotable = null; + + const parentRoot = getRoot(parent); + if ( + isShadowRoot(parentRoot) && + parentRoot && + getMode(parentRoot) === "closed" + ) { + slotInClosedTree = true; + } + } + + relatedTarget = retarget(eventRelatedTarget, parent); + + if ( + isNode(parent) && + isShadowInclusiveAncestor(getRoot(targetImpl), parent) + ) { + appendToEventPath( + eventImpl, + parent, + null, + relatedTarget, + touchTargets, + slotInClosedTree, + ); + } else if (parent === relatedTarget) { + parent = null; + } else { + targetImpl = parent; + + if ( + isActivationEvent && + activationTarget === null && + getHasActivationBehavior(targetImpl) + ) { + activationTarget = targetImpl; + } + + appendToEventPath( + eventImpl, + parent, + targetImpl, + relatedTarget, + touchTargets, + slotInClosedTree, + ); + } + + if (parent !== null) { + parent = getParent(parent); + } + + slotInClosedTree = false; + } + + let clearTargetsTupleIndex = -1; + const path = getPath(eventImpl); + for ( + let i = path.length - 1; + i >= 0 && clearTargetsTupleIndex === -1; + i-- + ) { + if (path[i].target !== null) { + clearTargetsTupleIndex = i; + } + } + const clearTargetsTuple = path[clearTargetsTupleIndex]; + + clearTargets = (isNode(clearTargetsTuple.target) && + isShadowRoot(getRoot(clearTargetsTuple.target))) || + (isNode(clearTargetsTuple.relatedTarget) && + isShadowRoot(getRoot(clearTargetsTuple.relatedTarget))); + + setEventPhase(eventImpl, Event.CAPTURING_PHASE); + + for (let i = path.length - 1; i >= 0; --i) { + const tuple = path[i]; + + if (tuple.target === null) { + invokeEventListeners(tuple, eventImpl); + } + } + + for (let i = 0; i < path.length; i++) { + const tuple = path[i]; + + if (tuple.target !== null) { + setEventPhase(eventImpl, Event.AT_TARGET); + } else { + setEventPhase(eventImpl, Event.BUBBLING_PHASE); + } + + if ( + (eventImpl.eventPhase === Event.BUBBLING_PHASE && + eventImpl.bubbles) || + eventImpl.eventPhase === Event.AT_TARGET + ) { + invokeEventListeners(tuple, eventImpl); + } + } + } + + setEventPhase(eventImpl, Event.NONE); + setCurrentTarget(eventImpl, null); + setPath(eventImpl, []); + setDispatched(eventImpl, false); + eventImpl.cancelBubble = false; + setStopImmediatePropagation(eventImpl, false); + + if (clearTargets) { + setTarget(eventImpl, null); + setRelatedTarget(eventImpl, null); + } + + // TODO(bartlomieju): invoke activation targets if HTML nodes will be implemented + // if (activationTarget !== null) { + // if (!eventImpl.defaultPrevented) { + // activationTarget._activationBehavior(); + // } + // } + + return !eventImpl.defaultPrevented; + } + + /** Inner invoking of the event listeners where the resolved listeners are + * called. + * + * Ref: https://dom.spec.whatwg.org/#concept-event-listener-inner-invoke */ + function innerInvokeEventListeners( + eventImpl, + targetListeners, + ) { + let found = false; + + const { type } = eventImpl; + + if (!targetListeners || !targetListeners[type]) { + return found; + } + + // Copy event listeners before iterating since the list can be modified during the iteration. + const handlers = ArrayPrototypeSlice(targetListeners[type]); + + for (let i = 0; i < handlers.length; i++) { + const listener = handlers[i]; + + let capture, once, passive; + if (typeof listener.options === "boolean") { + capture = listener.options; + once = false; + passive = false; + } else { + capture = listener.options.capture; + once = listener.options.once; + passive = listener.options.passive; + } + + // Check if the event listener has been removed since the listeners has been cloned. + if (!ArrayPrototypeIncludes(targetListeners[type], listener)) { + continue; + } + + found = true; + + if ( + (eventImpl.eventPhase === Event.CAPTURING_PHASE && !capture) || + (eventImpl.eventPhase === Event.BUBBLING_PHASE && capture) + ) { + continue; + } + + if (once) { + ArrayPrototypeSplice( + targetListeners[type], + ArrayPrototypeIndexOf(targetListeners[type], listener), + 1, + ); + } + + if (passive) { + setInPassiveListener(eventImpl, true); + } + + if (typeof listener.callback === "object") { + if (typeof listener.callback.handleEvent === "function") { + listener.callback.handleEvent(eventImpl); + } + } else { + FunctionPrototypeCall( + listener.callback, + eventImpl.currentTarget, + eventImpl, + ); + } + + setInPassiveListener(eventImpl, false); + + if (getStopImmediatePropagation(eventImpl)) { + return found; + } + } + + return found; + } + + /** Invokes the listeners on a given event path with the supplied event. + * + * Ref: https://dom.spec.whatwg.org/#concept-event-listener-invoke */ + function invokeEventListeners(tuple, eventImpl) { + const path = getPath(eventImpl); + const tupleIndex = ArrayPrototypeIndexOf(path, tuple); + for (let i = tupleIndex; i >= 0; i--) { + const t = path[i]; + if (t.target) { + setTarget(eventImpl, t.target); + break; + } + } + + setRelatedTarget(eventImpl, tuple.relatedTarget); + + if (eventImpl.cancelBubble) { + return; + } + + setCurrentTarget(eventImpl, tuple.item); + + innerInvokeEventListeners(eventImpl, getListeners(tuple.item)); + } + + function normalizeAddEventHandlerOptions( + options, + ) { + if (typeof options === "boolean" || typeof options === "undefined") { + return { + capture: Boolean(options), + once: false, + passive: false, + }; + } else { + return options; + } + } + + function normalizeEventHandlerOptions( + options, + ) { + if (typeof options === "boolean" || typeof options === "undefined") { + return { + capture: Boolean(options), + }; + } else { + return options; + } + } + + /** Retarget the target following the spec logic. + * + * Ref: https://dom.spec.whatwg.org/#retarget */ + function retarget(a, b) { + while (true) { + if (!isNode(a)) { + return a; + } + + const aRoot = a.getRootNode(); + + if (aRoot) { + if ( + !isShadowRoot(aRoot) || + (isNode(b) && isShadowInclusiveAncestor(aRoot, b)) + ) { + return a; + } + + a = getHost(aRoot); + } + } + } + + // Accessors for non-public data + + const eventTargetData = new WeakMap(); + + function setEventTargetData(value) { + WeakMapPrototypeSet(eventTargetData, value, getDefaultTargetData()); + } + + function getAssignedSlot(target) { + return Boolean(WeakMapPrototypeGet(eventTargetData, target)?.assignedSlot); + } + + function getHasActivationBehavior(target) { + return Boolean( + WeakMapPrototypeGet(eventTargetData, target)?.hasActivationBehavior, + ); + } + + function getHost(target) { + return WeakMapPrototypeGet(eventTargetData, target)?.host ?? null; + } + + function getListeners(target) { + return WeakMapPrototypeGet(eventTargetData, target)?.listeners ?? {}; + } + + function getMode(target) { + return WeakMapPrototypeGet(eventTargetData, target)?.mode ?? null; + } + + function getDefaultTargetData() { + return { + assignedSlot: false, + hasActivationBehavior: false, + host: null, + listeners: ObjectCreate(null), + mode: "", + }; + } + + class EventTarget { + constructor() { + WeakMapPrototypeSet(eventTargetData, this, getDefaultTargetData()); + } + + addEventListener( + type, + callback, + options, + ) { + webidl.requiredArguments(arguments.length, 2, { + prefix: "Failed to execute 'addEventListener' on 'EventTarget'", + }); + if (callback === null) { + return; + } + + options = normalizeAddEventHandlerOptions(options); + const { listeners } = WeakMapPrototypeGet( + eventTargetData, + this ?? globalThis, + ); + + if (!(type in listeners)) { + listeners[type] = []; + } + + for (const listener of listeners[type]) { + if ( + ((typeof listener.options === "boolean" && + listener.options === options.capture) || + (typeof listener.options === "object" && + listener.options.capture === options.capture)) && + listener.callback === callback + ) { + return; + } + } + if (options?.signal) { + const signal = options?.signal; + if (signal.aborted) { + // If signal is not null and its aborted flag is set, then return. + return; + } else { + // If listener’s signal is not null, then add the following abort + // abort steps to it: Remove an event listener. + signal.addEventListener("abort", () => { + this.removeEventListener(type, callback, options); + }); + } + } else if (options?.signal === null) { + throw new TypeError("signal must be non-null"); + } + + ArrayPrototypePush(listeners[type], { callback, options }); + } + + removeEventListener( + type, + callback, + options, + ) { + webidl.requiredArguments(arguments.length, 2, { + prefix: "Failed to execute 'removeEventListener' on 'EventTarget'", + }); + + const listeners = + WeakMapPrototypeGet(eventTargetData, this ?? globalThis).listeners; + if (callback !== null && type in listeners) { + listeners[type] = ArrayPrototypeFilter( + listeners[type], + (listener) => listener.callback !== callback, + ); + } else if (callback === null || !listeners[type]) { + return; + } + + options = normalizeEventHandlerOptions(options); + + for (let i = 0; i < listeners[type].length; ++i) { + const listener = listeners[type][i]; + if ( + ((typeof listener.options === "boolean" && + listener.options === options.capture) || + (typeof listener.options === "object" && + listener.options.capture === options.capture)) && + listener.callback === callback + ) { + ArrayPrototypeSplice(listeners[type], i, 1); + break; + } + } + } + + dispatchEvent(event) { + webidl.requiredArguments(arguments.length, 1, { + prefix: "Failed to execute 'dispatchEvent' on 'EventTarget'", + }); + const self = this ?? globalThis; + + const listeners = WeakMapPrototypeGet(eventTargetData, self).listeners; + if (!(event.type in listeners)) { + setTarget(event, this); + return true; + } + + if (getDispatched(event)) { + throw new DOMException("Invalid event state.", "InvalidStateError"); + } + + if (event.eventPhase !== Event.NONE) { + throw new DOMException("Invalid event state.", "InvalidStateError"); + } + + return dispatch(self, event); + } + + get [SymbolToStringTag]() { + return "EventTarget"; + } + + getParent(_event) { + return null; + } + } + + defineEnumerableProps(EventTarget, [ + "addEventListener", + "removeEventListener", + "dispatchEvent", + ]); + + class ErrorEvent extends Event { + #message = ""; + #filename = ""; + #lineno = ""; + #colno = ""; + #error = ""; + + get message() { + return this.#message; + } + get filename() { + return this.#filename; + } + get lineno() { + return this.#lineno; + } + get colno() { + return this.#colno; + } + get error() { + return this.#error; + } + + constructor( + type, + { + bubbles, + cancelable, + composed, + message = "", + filename = "", + lineno = 0, + colno = 0, + error = null, + } = {}, + ) { + super(type, { + bubbles: bubbles, + cancelable: cancelable, + composed: composed, + }); + + this.#message = message; + this.#filename = filename; + this.#lineno = lineno; + this.#colno = colno; + this.#error = error; + } + + get [SymbolToStringTag]() { + return "ErrorEvent"; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof ErrorEvent, + keys: [ + ...EVENT_PROPS, + "message", + "filename", + "lineno", + "colno", + "error", + ], + })); + } + } + + defineEnumerableProps(ErrorEvent, [ + "message", + "filename", + "lineno", + "colno", + "error", + ]); + + class CloseEvent extends Event { + #wasClean = ""; + #code = ""; + #reason = ""; + + get wasClean() { + return this.#wasClean; + } + get code() { + return this.#code; + } + get reason() { + return this.#reason; + } + + constructor(type, { + bubbles, + cancelable, + composed, + wasClean = false, + code = 0, + reason = "", + } = {}) { + super(type, { + bubbles: bubbles, + cancelable: cancelable, + composed: composed, + }); + + this.#wasClean = wasClean; + this.#code = code; + this.#reason = reason; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof CloseEvent, + keys: [ + ...EVENT_PROPS, + "wasClean", + "code", + "reason", + ], + })); + } + } + + class MessageEvent extends Event { + get source() { + return null; + } + + constructor(type, eventInitDict) { + super(type, { + bubbles: eventInitDict?.bubbles ?? false, + cancelable: eventInitDict?.cancelable ?? false, + composed: eventInitDict?.composed ?? false, + }); + + this.data = eventInitDict?.data ?? null; + this.ports = eventInitDict?.ports ?? []; + this.origin = eventInitDict?.origin ?? ""; + this.lastEventId = eventInitDict?.lastEventId ?? ""; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof MessageEvent, + keys: [ + ...EVENT_PROPS, + "data", + "origin", + "lastEventId", + ], + })); + } + } + + class CustomEvent extends Event { + #detail = null; + + constructor(type, eventInitDict = {}) { + super(type, eventInitDict); + webidl.requiredArguments(arguments.length, 1, { + prefix: "Failed to construct 'CustomEvent'", + }); + const { detail } = eventInitDict; + this.#detail = detail; + } + + get detail() { + return this.#detail; + } + + get [SymbolToStringTag]() { + return "CustomEvent"; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof CustomEvent, + keys: [ + ...EVENT_PROPS, + "detail", + ], + })); + } + } + + ReflectDefineProperty(CustomEvent.prototype, "detail", { + enumerable: true, + }); + + // ProgressEvent could also be used in other DOM progress event emits. + // Current use is for FileReader. + class ProgressEvent extends Event { + constructor(type, eventInitDict = {}) { + super(type, eventInitDict); + + this.lengthComputable = eventInitDict?.lengthComputable ?? false; + this.loaded = eventInitDict?.loaded ?? 0; + this.total = eventInitDict?.total ?? 0; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof ProgressEvent, + keys: [ + ...EVENT_PROPS, + "lengthComputable", + "loaded", + "total", + ], + })); + } + } + + const _eventHandlers = Symbol("eventHandlers"); + + function makeWrappedHandler(handler) { + function wrappedHandler(...args) { + if (typeof wrappedHandler.handler !== "function") { + return; + } + return FunctionPrototypeCall(wrappedHandler.handler, this, ...args); + } + wrappedHandler.handler = handler; + return wrappedHandler; + } + + // TODO(benjamingr) reuse this here and websocket where possible + function defineEventHandler(emitter, name, init) { + // HTML specification section 8.1.5.1 + ObjectDefineProperty(emitter, `on${name}`, { + get() { + const map = this[_eventHandlers]; + + if (!map) return undefined; + return MapPrototypeGet(map, name)?.handler; + }, + set(value) { + if (!this[_eventHandlers]) { + this[_eventHandlers] = new Map(); + } + let handlerWrapper = MapPrototypeGet(this[_eventHandlers], name); + if (handlerWrapper) { + handlerWrapper.handler = value; + } else { + handlerWrapper = makeWrappedHandler(value); + this.addEventListener(name, handlerWrapper); + init?.(this); + } + MapPrototypeSet(this[_eventHandlers], name, handlerWrapper); + }, + configurable: true, + enumerable: true, + }); + } + + window.Event = Event; + window.EventTarget = EventTarget; + window.ErrorEvent = ErrorEvent; + window.CloseEvent = CloseEvent; + window.MessageEvent = MessageEvent; + window.CustomEvent = CustomEvent; + window.ProgressEvent = ProgressEvent; + window.dispatchEvent = EventTarget.prototype.dispatchEvent; + window.addEventListener = EventTarget.prototype.addEventListener; + window.removeEventListener = EventTarget.prototype.removeEventListener; + window.__bootstrap.eventTarget = { + EventTarget, + setEventTargetData, + }; + window.__bootstrap.event = { + setIsTrusted, + setTarget, + defineEventHandler, + }; +})(this); diff --git a/ext/web/02_structured_clone.js b/ext/web/02_structured_clone.js new file mode 100644 index 000000000..4845c6508 --- /dev/null +++ b/ext/web/02_structured_clone.js @@ -0,0 +1,85 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// + +"use strict"; + +((window) => { + const core = window.Deno.core; + const { DOMException } = window.__bootstrap.domException; + const { + ArrayBuffer, + ArrayBufferIsView, + DataView, + TypedArrayPrototypeSlice, + TypeError, + WeakMap, + WeakMapPrototypeSet, + } = window.__bootstrap.primordials; + + const objectCloneMemo = new WeakMap(); + + function cloneArrayBuffer( + srcBuffer, + srcByteOffset, + srcLength, + _cloneConstructor, + ) { + // this function fudges the return type but SharedArrayBuffer is disabled for a while anyway + return TypedArrayPrototypeSlice( + srcBuffer, + srcByteOffset, + srcByteOffset + srcLength, + ); + } + + /** Clone a value in a similar way to structured cloning. It is similar to a +* StructureDeserialize(StructuredSerialize(...)). */ + function structuredClone(value) { + // Performance optimization for buffers, otherwise + // `serialize/deserialize` will allocate new buffer. + if (value instanceof ArrayBuffer) { + const cloned = cloneArrayBuffer( + value, + 0, + value.byteLength, + ArrayBuffer, + ); + WeakMapPrototypeSet(objectCloneMemo, value, cloned); + return cloned; + } + if (ArrayBufferIsView(value)) { + const clonedBuffer = structuredClone(value.buffer); + // Use DataViewConstructor type purely for type-checking, can be a + // DataView or TypedArray. They use the same constructor signature, + // only DataView has a length in bytes and TypedArrays use a length in + // terms of elements, so we adjust for that. + let length; + if (value instanceof DataView) { + length = value.byteLength; + } else { + length = value.length; + } + return new (value.constructor)( + clonedBuffer, + value.byteOffset, + length, + ); + } + + try { + return core.deserialize(core.serialize(value)); + } catch (e) { + if (e instanceof TypeError) { + throw new DOMException("Uncloneable value", "DataCloneError"); + } + throw e; + } + } + + window.__bootstrap.structuredClone = structuredClone; +})(globalThis); diff --git a/ext/web/03_abort_signal.js b/ext/web/03_abort_signal.js new file mode 100644 index 000000000..d67bfef26 --- /dev/null +++ b/ext/web/03_abort_signal.js @@ -0,0 +1,123 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +// @ts-check +/// + +((window) => { + const webidl = window.__bootstrap.webidl; + const { setIsTrusted, defineEventHandler } = window.__bootstrap.event; + const { + Boolean, + Set, + SetPrototypeAdd, + SetPrototypeClear, + SetPrototypeDelete, + Symbol, + SymbolToStringTag, + TypeError, + } = window.__bootstrap.primordials; + + const add = Symbol("add"); + const signalAbort = Symbol("signalAbort"); + const remove = Symbol("remove"); + + const illegalConstructorKey = Symbol("illegalConstructorKey"); + + class AbortSignal extends EventTarget { + #aborted = false; + #abortAlgorithms = new Set(); + + static abort() { + const signal = new AbortSignal(illegalConstructorKey); + signal[signalAbort](); + return signal; + } + + [add](algorithm) { + SetPrototypeAdd(this.#abortAlgorithms, algorithm); + } + + [signalAbort]() { + if (this.#aborted) { + return; + } + this.#aborted = true; + for (const algorithm of this.#abortAlgorithms) { + algorithm(); + } + SetPrototypeClear(this.#abortAlgorithms); + const event = new Event("abort"); + setIsTrusted(event, true); + this.dispatchEvent(event); + } + + [remove](algorithm) { + SetPrototypeDelete(this.#abortAlgorithms, algorithm); + } + + constructor(key = null) { + if (key != illegalConstructorKey) { + throw new TypeError("Illegal constructor."); + } + super(); + this[webidl.brand] = webidl.brand; + } + + get aborted() { + return Boolean(this.#aborted); + } + + get [SymbolToStringTag]() { + return "AbortSignal"; + } + } + defineEventHandler(AbortSignal.prototype, "abort"); + + webidl.configurePrototype(AbortSignal); + + class AbortController { + #signal = new AbortSignal(illegalConstructorKey); + + get signal() { + return this.#signal; + } + + abort() { + this.#signal[signalAbort](); + } + + get [SymbolToStringTag]() { + return "AbortController"; + } + } + + webidl.configurePrototype(AbortController); + + webidl.converters["AbortSignal"] = webidl.createInterfaceConverter( + "AbortSignal", + AbortSignal, + ); + + function newSignal() { + return new AbortSignal(illegalConstructorKey); + } + + function follow(followingSignal, parentSignal) { + if (parentSignal.aborted) { + followingSignal[signalAbort](); + } else { + parentSignal[add](() => followingSignal[signalAbort]()); + } + } + + window.AbortSignal = AbortSignal; + window.AbortController = AbortController; + window.__bootstrap.abortSignal = { + add, + signalAbort, + remove, + follow, + newSignal, + }; +})(this); diff --git a/ext/web/04_global_interfaces.js b/ext/web/04_global_interfaces.js new file mode 100644 index 000000000..8117bface --- /dev/null +++ b/ext/web/04_global_interfaces.js @@ -0,0 +1,79 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +// @ts-check +/// + +((window) => { + const { EventTarget } = window; + const { + Symbol, + SymbolToStringTag, + TypeError, + } = window.__bootstrap.primordials; + + const illegalConstructorKey = Symbol("illegalConstructorKey"); + + class Window extends EventTarget { + constructor(key = null) { + if (key !== illegalConstructorKey) { + throw new TypeError("Illegal constructor."); + } + super(); + } + + get [SymbolToStringTag]() { + return "Window"; + } + } + + class WorkerGlobalScope extends EventTarget { + constructor(key = null) { + if (key != illegalConstructorKey) { + throw new TypeError("Illegal constructor."); + } + super(); + } + + get [SymbolToStringTag]() { + return "WorkerGlobalScope"; + } + } + + class DedicatedWorkerGlobalScope extends WorkerGlobalScope { + constructor(key = null) { + if (key != illegalConstructorKey) { + throw new TypeError("Illegal constructor."); + } + super(); + } + + get [SymbolToStringTag]() { + return "DedicatedWorkerGlobalScope"; + } + } + + window.__bootstrap.globalInterfaces = { + DedicatedWorkerGlobalScope, + Window, + WorkerGlobalScope, + dedicatedWorkerGlobalScopeConstructorDescriptor: { + configurable: true, + enumerable: false, + value: DedicatedWorkerGlobalScope, + writable: true, + }, + windowConstructorDescriptor: { + configurable: true, + enumerable: false, + value: Window, + writable: true, + }, + workerGlobalScopeConstructorDescriptor: { + configurable: true, + enumerable: false, + value: WorkerGlobalScope, + writable: true, + }, + }; +})(this); diff --git a/ext/web/05_base64.js b/ext/web/05_base64.js new file mode 100644 index 000000000..9c9c23b0f --- /dev/null +++ b/ext/web/05_base64.js @@ -0,0 +1,73 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// + +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + const { + forgivingBase64Encode, + forgivingBase64Decode, + } = window.__bootstrap.infra; + const { DOMException } = window.__bootstrap.domException; + const { + ArrayPrototypeMap, + StringPrototypeCharCodeAt, + ArrayPrototypeJoin, + StringFromCharCode, + TypedArrayFrom, + Uint8Array, + } = window.__bootstrap.primordials; + + /** + * @param {string} data + * @returns {string} + */ + function atob(data) { + data = webidl.converters.DOMString(data, { + prefix: "Failed to execute 'atob'", + context: "Argument 1", + }); + + const uint8Array = forgivingBase64Decode(data); + const result = ArrayPrototypeMap( + [...uint8Array], + (byte) => StringFromCharCode(byte), + ); + return ArrayPrototypeJoin(result, ""); + } + + /** + * @param {string} data + * @returns {string} + */ + function btoa(data) { + const prefix = "Failed to execute 'btoa'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + data = webidl.converters.DOMString(data, { + prefix, + context: "Argument 1", + }); + const byteArray = ArrayPrototypeMap([...data], (char) => { + const charCode = StringPrototypeCharCodeAt(char, 0); + if (charCode > 0xff) { + throw new DOMException( + "The string to be encoded contains characters outside of the Latin1 range.", + "InvalidCharacterError", + ); + } + return charCode; + }); + return forgivingBase64Encode(TypedArrayFrom(Uint8Array, byteArray)); + } + + window.__bootstrap.base64 = { + atob, + btoa, + }; +})(globalThis); diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js new file mode 100644 index 000000000..c4bfad0c8 --- /dev/null +++ b/ext/web/06_streams.js @@ -0,0 +1,4473 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + // TODO(lucacasonato): get AbortSignal from __bootstrap. + const { + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypeShift, + Error, + NumberIsInteger, + NumberIsNaN, + ObjectCreate, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectGetPrototypeOf, + ObjectSetPrototypeOf, + Promise, + PromiseAll, + PromisePrototypeThen, + PromiseReject, + queueMicrotask, + RangeError, + Symbol, + SymbolAsyncIterator, + SymbolFor, + SymbolToStringTag, + TypeError, + Uint8Array, + WeakMap, + WeakMapPrototypeGet, + WeakMapPrototypeHas, + WeakMapPrototypeSet, + } = globalThis.__bootstrap.primordials; + const consoleInternal = window.__bootstrap.console; + const { DOMException } = window.__bootstrap.domException; + + class AssertionError extends Error { + constructor(msg) { + super(msg); + this.name = "AssertionError"; + } + } + + /** + * @param {unknown} cond + * @param {string=} msg + * @returns {asserts cond} + */ + function assert(cond, msg = "Assertion failed.") { + if (!cond) { + throw new AssertionError(msg); + } + } + + /** @template T */ + class Deferred { + /** @type {Promise} */ + #promise; + /** @type {(reject?: any) => void} */ + #reject; + /** @type {(value: T | PromiseLike) => void} */ + #resolve; + /** @type {"pending" | "fulfilled"} */ + #state = "pending"; + + constructor() { + this.#promise = new Promise((resolve, reject) => { + this.#resolve = resolve; + this.#reject = reject; + }); + } + + /** @returns {Promise} */ + get promise() { + return this.#promise; + } + + /** @returns {"pending" | "fulfilled"} */ + get state() { + return this.#state; + } + + /** @param {any=} reason */ + reject(reason) { + // already settled promises are a no-op + if (this.#state !== "pending") { + return; + } + this.#state = "fulfilled"; + this.#reject(reason); + } + + /** @param {T | PromiseLike} value */ + resolve(value) { + // already settled promises are a no-op + if (this.#state !== "pending") { + return; + } + this.#state = "fulfilled"; + this.#resolve(value); + } + } + + /** + * @template T + * @param {T | PromiseLike} value + * @returns {Promise} + */ + function resolvePromiseWith(value) { + return new Promise((resolve) => resolve(value)); + } + + /** @param {any} e */ + function rethrowAssertionErrorRejection(e) { + if (e && e instanceof AssertionError) { + queueMicrotask(() => { + console.error(`Internal Error: ${e.stack}`); + }); + } + } + + /** @param {Promise} promise */ + function setPromiseIsHandledToTrue(promise) { + PromisePrototypeThen(promise, undefined, rethrowAssertionErrorRejection); + } + + /** + * @template T + * @template TResult1 + * @template TResult2 + * @param {Promise} promise + * @param {(value: T) => TResult1 | PromiseLike} fulfillmentHandler + * @param {(reason: any) => TResult2 | PromiseLike=} rejectionHandler + * @returns {Promise} + */ + function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) { + return PromisePrototypeThen(promise, fulfillmentHandler, rejectionHandler); + } + + /** + * @template T + * @template TResult + * @param {Promise} promise + * @param {(value: T) => TResult | PromiseLike} onFulfilled + * @returns {void} + */ + function uponFulfillment(promise, onFulfilled) { + uponPromise(promise, onFulfilled); + } + + /** + * @template T + * @template TResult + * @param {Promise} promise + * @param {(value: T) => TResult | PromiseLike} onRejected + * @returns {void} + */ + function uponRejection(promise, onRejected) { + uponPromise(promise, undefined, onRejected); + } + + /** + * @template T + * @template TResult1 + * @template TResult2 + * @param {Promise} promise + * @param {(value: T) => TResult1 | PromiseLike} onFulfilled + * @param {(reason: any) => TResult2 | PromiseLike=} onRejected + * @returns {void} + */ + function uponPromise(promise, onFulfilled, onRejected) { + PromisePrototypeThen( + PromisePrototypeThen(promise, onFulfilled, onRejected), + undefined, + rethrowAssertionErrorRejection, + ); + } + + const isFakeDetached = Symbol("<>"); + + /** + * @param {ArrayBufferLike} O + * @returns {boolean} + */ + function isDetachedBuffer(O) { + return isFakeDetached in O; + } + + /** + * @param {ArrayBufferLike} O + * @returns {ArrayBufferLike} + */ + function transferArrayBuffer(O) { + assert(!isDetachedBuffer(O)); + const transferredIshVersion = O.slice(0); + ObjectDefineProperty(O, "byteLength", { + get() { + return 0; + }, + }); + O[isFakeDetached] = true; + return transferredIshVersion; + } + + const _abortAlgorithm = Symbol("[[abortAlgorithm]]"); + const _abortSteps = Symbol("[[AbortSteps]]"); + const _autoAllocateChunkSize = Symbol("[[autoAllocateChunkSize]]"); + const _backpressure = Symbol("[[backpressure]]"); + const _backpressureChangePromise = Symbol("[[backpressureChangePromise]]"); + const _byobRequest = Symbol("[[byobRequest]]"); + const _cancelAlgorithm = Symbol("[[cancelAlgorithm]]"); + const _cancelSteps = Symbol("[[CancelSteps]]"); + const _close = Symbol("close sentinel"); + const _closeAlgorithm = Symbol("[[closeAlgorithm]]"); + const _closedPromise = Symbol("[[closedPromise]]"); + const _closeRequest = Symbol("[[closeRequest]]"); + const _closeRequested = Symbol("[[closeRequested]]"); + const _controller = Symbol("[[controller]]"); + const _detached = Symbol("[[Detached]]"); + const _disturbed = Symbol("[[disturbed]]"); + const _errorSteps = Symbol("[[ErrorSteps]]"); + const _flushAlgorithm = Symbol("[[flushAlgorithm]]"); + const _globalObject = Symbol("[[globalObject]]"); + const _highWaterMark = Symbol("[[highWaterMark]]"); + const _inFlightCloseRequest = Symbol("[[inFlightCloseRequest]]"); + const _inFlightWriteRequest = Symbol("[[inFlightWriteRequest]]"); + const _pendingAbortRequest = Symbol("[pendingAbortRequest]"); + const _preventCancel = Symbol("[[preventCancel]]"); + const _pullAgain = Symbol("[[pullAgain]]"); + const _pullAlgorithm = Symbol("[[pullAlgorithm]]"); + const _pulling = Symbol("[[pulling]]"); + const _pullSteps = Symbol("[[PullSteps]]"); + const _queue = Symbol("[[queue]]"); + const _queueTotalSize = Symbol("[[queueTotalSize]]"); + const _readable = Symbol("[[readable]]"); + const _reader = Symbol("[[reader]]"); + const _readRequests = Symbol("[[readRequests]]"); + const _readyPromise = Symbol("[[readyPromise]]"); + const _started = Symbol("[[started]]"); + const _state = Symbol("[[state]]"); + const _storedError = Symbol("[[storedError]]"); + const _strategyHWM = Symbol("[[strategyHWM]]"); + const _strategySizeAlgorithm = Symbol("[[strategySizeAlgorithm]]"); + const _stream = Symbol("[[stream]]"); + const _transformAlgorithm = Symbol("[[transformAlgorithm]]"); + const _writable = Symbol("[[writable]]"); + const _writeAlgorithm = Symbol("[[writeAlgorithm]]"); + const _writer = Symbol("[[writer]]"); + const _writeRequests = Symbol("[[writeRequests]]"); + + /** + * @template R + * @param {ReadableStream} stream + * @returns {ReadableStreamDefaultReader} + */ + function acquireReadableStreamDefaultReader(stream) { + return new ReadableStreamDefaultReader(stream); + } + + /** + * @template W + * @param {WritableStream} stream + * @returns {WritableStreamDefaultWriter} + */ + function acquireWritableStreamDefaultWriter(stream) { + return new WritableStreamDefaultWriter(stream); + } + + /** + * @template R + * @param {() => void} startAlgorithm + * @param {() => Promise} pullAlgorithm + * @param {(reason: any) => Promise} cancelAlgorithm + * @param {number=} highWaterMark + * @param {((chunk: R) => number)=} sizeAlgorithm + * @returns {ReadableStream} + */ + function createReadableStream( + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark = 1, + sizeAlgorithm = () => 1, + ) { + assert(isNonNegativeNumber(highWaterMark)); + /** @type {ReadableStream} */ + const stream = webidl.createBranded(ReadableStream); + initializeReadableStream(stream); + const controller = webidl.createBranded(ReadableStreamDefaultController); + setUpReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm, + ); + return stream; + } + + /** + * @template W + * @param {(controller: WritableStreamDefaultController) => Promise} startAlgorithm + * @param {(chunk: W) => Promise} writeAlgorithm + * @param {() => Promise} closeAlgorithm + * @param {(reason: any) => Promise} abortAlgorithm + * @param {number} highWaterMark + * @param {(chunk: W) => number} sizeAlgorithm + * @returns {WritableStream} + */ + function createWritableStream( + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm, + ) { + assert(isNonNegativeNumber(highWaterMark)); + const stream = webidl.createBranded(WritableStream); + initializeWritableStream(stream); + const controller = webidl.createBranded(WritableStreamDefaultController); + setUpWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm, + ); + return stream; + } + + /** + * @template T + * @param {{ [_queue]: Array>, [_queueTotalSize]: number }} container + * @returns {T} + */ + function dequeueValue(container) { + assert(_queue in container && _queueTotalSize in container); + assert(container[_queue].length); + const valueWithSize = ArrayPrototypeShift(container[_queue]); + container[_queueTotalSize] -= valueWithSize.size; + if (container[_queueTotalSize] < 0) { + container[_queueTotalSize] = 0; + } + return valueWithSize.value; + } + + /** + * @template T + * @param {{ [_queue]: Array>, [_queueTotalSize]: number }} container + * @param {T} value + * @param {number} size + * @returns {void} + */ + function enqueueValueWithSize(container, value, size) { + assert(_queue in container && _queueTotalSize in container); + if (isNonNegativeNumber(size) === false) { + throw RangeError("chunk size isn't a positive number"); + } + if (size === Infinity) { + throw RangeError("chunk size is invalid"); + } + ArrayPrototypePush(container[_queue], { value, size }); + container[_queueTotalSize] += size; + } + + /** + * @param {QueuingStrategy} strategy + * @param {number} defaultHWM + */ + function extractHighWaterMark(strategy, defaultHWM) { + if (strategy.highWaterMark === undefined) { + return defaultHWM; + } + const highWaterMark = strategy.highWaterMark; + if (NumberIsNaN(highWaterMark) || highWaterMark < 0) { + throw RangeError( + `Expected highWaterMark to be a positive number or Infinity, got "${highWaterMark}".`, + ); + } + return highWaterMark; + } + + /** + * @template T + * @param {QueuingStrategy} strategy + * @return {(chunk: T) => number} + */ + function extractSizeAlgorithm(strategy) { + if (strategy.size === undefined) { + return () => 1; + } + return (chunk) => + webidl.invokeCallbackFunction( + strategy.size, + [chunk], + undefined, + webidl.converters["unrestricted double"], + { prefix: "Failed to call `sizeAlgorithm`" }, + ); + } + + /** + * @param {ReadableStream} stream + * @returns {void} + */ + function initializeReadableStream(stream) { + stream[_state] = "readable"; + stream[_reader] = stream[_storedError] = undefined; + stream[_disturbed] = false; + } + + /** + * @template I + * @template O + * @param {TransformStream} stream + * @param {Deferred} startPromise + * @param {number} writableHighWaterMark + * @param {(chunk: I) => number} writableSizeAlgorithm + * @param {number} readableHighWaterMark + * @param {(chunk: O) => number} readableSizeAlgorithm + */ + function initializeTransformStream( + stream, + startPromise, + writableHighWaterMark, + writableSizeAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm, + ) { + function startAlgorithm() { + return startPromise.promise; + } + + function writeAlgorithm(chunk) { + return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); + } + + function abortAlgorithm(reason) { + return transformStreamDefaultSinkAbortAlgorithm(stream, reason); + } + + function closeAlgorithm() { + return transformStreamDefaultSinkCloseAlgorithm(stream); + } + + stream[_writable] = createWritableStream( + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + writableHighWaterMark, + writableSizeAlgorithm, + ); + + function pullAlgorithm() { + return transformStreamDefaultSourcePullAlgorithm(stream); + } + + function cancelAlgorithm(reason) { + transformStreamErrorWritableAndUnblockWrite(stream, reason); + return resolvePromiseWith(undefined); + } + + stream[_readable] = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm, + ); + + stream[_backpressure] = stream[_backpressureChangePromise] = undefined; + transformStreamSetBackpressure(stream, true); + stream[_controller] = undefined; + } + + /** @param {WritableStream} stream */ + function initializeWritableStream(stream) { + stream[_state] = "writable"; + stream[_storedError] = stream[_writer] = stream[_controller] = + stream[_inFlightWriteRequest] = stream[_closeRequest] = + stream[_inFlightCloseRequest] = stream[_pendingAbortRequest] = + undefined; + stream[_writeRequests] = []; + stream[_backpressure] = false; + } + + /** + * @param {unknown} v + * @returns {v is number} + */ + function isNonNegativeNumber(v) { + if (typeof v !== "number") { + return false; + } + if (NumberIsNaN(v)) { + return false; + } + if (v < 0) { + return false; + } + return true; + } + + /** + * @param {unknown} value + * @returns {value is ReadableStream} + */ + function isReadableStream(value) { + return !(typeof value !== "object" || value === null || + !(_controller in value)); + } + + /** + * @param {ReadableStream} stream + * @returns {boolean} + */ + function isReadableStreamLocked(stream) { + if (stream[_reader] === undefined) { + return false; + } + return true; + } + + /** + * @param {unknown} value + * @returns {value is ReadableStreamDefaultReader} + */ + function isReadableStreamDefaultReader(value) { + return !(typeof value !== "object" || value === null || + !(_readRequests in value)); + } + + /** + * @param {ReadableStream} stream + * @returns {boolean} + */ + function isReadableStreamDisturbed(stream) { + assert(isReadableStream(stream)); + return stream[_disturbed]; + } + + /** + * @param {unknown} value + * @returns {value is WritableStream} + */ + function isWritableStream(value) { + return !(typeof value !== "object" || value === null || + !(_controller in value)); + } + + /** + * @param {WritableStream} stream + * @returns {boolean} + */ + function isWritableStreamLocked(stream) { + if (stream[_writer] === undefined) { + return false; + } + return true; + } + + /** + * @template T + * @param {{ [_queue]: Array>, [_queueTotalSize]: number }} container + * @returns {T | _close} + */ + function peekQueueValue(container) { + assert(_queue in container && _queueTotalSize in container); + assert(container[_queue].length); + const valueWithSize = container[_queue][0]; + return valueWithSize.value; + } + + /** + * @param {ReadableByteStreamController} controller + * @returns {void} + */ + function readableByteStreamControllerCallPullIfNeeded(controller) { + const shouldPull = readableByteStreamControllerShouldCallPull(controller); + if (!shouldPull) { + return; + } + if (controller[_pulling]) { + controller[_pullAgain] = true; + return; + } + assert(controller[_pullAgain] === false); + controller[_pulling] = true; + /** @type {Promise} */ + const pullPromise = controller[_pullAlgorithm](controller); + setPromiseIsHandledToTrue( + PromisePrototypeThen( + pullPromise, + () => { + controller[_pulling] = false; + if (controller[_pullAgain]) { + controller[_pullAgain] = false; + readableByteStreamControllerCallPullIfNeeded(controller); + } + }, + (e) => { + readableByteStreamControllerError(controller, e); + }, + ), + ); + } + + /** + * @param {ReadableByteStreamController} controller + * @returns {void} + */ + function readableByteStreamControllerClearAlgorithms(controller) { + controller[_pullAlgorithm] = undefined; + controller[_cancelAlgorithm] = undefined; + } + + /** + * @param {ReadableByteStreamController} controller + * @param {any} e + */ + function readableByteStreamControllerError(controller, e) { + /** @type {ReadableStream} */ + const stream = controller[_stream]; + if (stream[_state] !== "readable") { + return; + } + // 3. Perform ! ReadableByteStreamControllerClearPendingPullIntos(controller). + resetQueue(controller); + readableByteStreamControllerClearAlgorithms(controller); + readableStreamError(stream, e); + } + + /** + * @param {ReadableByteStreamController} controller + * @returns {void} + */ + function readableByteStreamControllerClose(controller) { + /** @type {ReadableStream} */ + const stream = controller[_stream]; + if (controller[_closeRequested] || stream[_state] !== "readable") { + return; + } + if (controller[_queueTotalSize] > 0) { + controller[_closeRequested] = true; + return; + } + // 3.13.6.4 If controller.[[pendingPullIntos]] is not empty, (BYOB Support) + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); + } + + /** + * @param {ReadableByteStreamController} controller + * @param {ArrayBufferView} chunk + */ + function readableByteStreamControllerEnqueue(controller, chunk) { + /** @type {ReadableStream} */ + const stream = controller[_stream]; + if ( + controller[_closeRequested] || + controller[_stream][_state] !== "readable" + ) { + return; + } + + const { buffer, byteOffset, byteLength } = chunk; + const transferredBuffer = transferArrayBuffer(buffer); + if (readableStreamHasDefaultReader(stream)) { + if (readableStreamGetNumReadRequests(stream) === 0) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength, + ); + } else { + assert(controller[_queue].length === 0); + const transferredView = new Uint8Array( + transferredBuffer, + byteOffset, + byteLength, + ); + readableStreamFulfillReadRequest(stream, transferredView, false); + } + // 8 Otherwise, if ! ReadableStreamHasBYOBReader(stream) is true, + } else { + assert(isReadableStreamLocked(stream) === false); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength, + ); + } + readableByteStreamControllerCallPullIfNeeded(controller); + } + + /** + * @param {ReadableByteStreamController} controller + * @param {ArrayBufferLike} buffer + * @param {number} byteOffset + * @param {number} byteLength + * @returns {void} + */ + function readableByteStreamControllerEnqueueChunkToQueue( + controller, + buffer, + byteOffset, + byteLength, + ) { + ArrayPrototypePush(controller[_queue], { buffer, byteOffset, byteLength }); + controller[_queueTotalSize] += byteLength; + } + + /** + * @param {ReadableByteStreamController} controller + * @returns {number | null} + */ + function readableByteStreamControllerGetDesiredSize(controller) { + const state = controller[_stream][_state]; + if (state === "errored") { + return null; + } + if (state === "closed") { + return 0; + } + return controller[_strategyHWM] - controller[_queueTotalSize]; + } + + /** + * @param {{ [_queue]: any[], [_queueTotalSize]: number }} container + * @returns {void} + */ + function resetQueue(container) { + container[_queue] = []; + container[_queueTotalSize] = 0; + } + + /** + * @param {ReadableByteStreamController} controller + * @returns {void} + */ + function readableByteStreamControllerHandleQueueDrain(controller) { + assert(controller[_stream][_state] === "readable"); + if ( + controller[_queueTotalSize] === 0 && controller[_closeRequested] + ) { + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(controller[_stream]); + } else { + readableByteStreamControllerCallPullIfNeeded(controller); + } + } + + /** + * @param {ReadableByteStreamController} controller + * @returns {boolean} + */ + function readableByteStreamControllerShouldCallPull(controller) { + /** @type {ReadableStream} */ + const stream = controller[_stream]; + if ( + stream[_state] !== "readable" || + controller[_closeRequested] || + !controller[_started] + ) { + return false; + } + if ( + readableStreamHasDefaultReader(stream) && + readableStreamGetNumReadRequests(stream) > 0 + ) { + return true; + } + // 3.13.25.6 If ! ReadableStreamHasBYOBReader(stream) is true and ! + // ReadableStreamGetNumReadIntoRequests(stream) > 0, return true. + const desiredSize = readableByteStreamControllerGetDesiredSize(controller); + assert(desiredSize !== null); + return desiredSize > 0; + } + + /** + * @template R + * @param {ReadableStream} stream + * @param {ReadRequest} readRequest + * @returns {void} + */ + function readableStreamAddReadRequest(stream, readRequest) { + assert(isReadableStreamDefaultReader(stream[_reader])); + assert(stream[_state] === "readable"); + ArrayPrototypePush(stream[_reader][_readRequests], readRequest); + } + + /** + * @template R + * @param {ReadableStream} stream + * @param {any=} reason + * @returns {Promise} + */ + function readableStreamCancel(stream, reason) { + stream[_disturbed] = true; + if (stream[_state] === "closed") { + return resolvePromiseWith(undefined); + } + if (stream[_state] === "errored") { + return PromiseReject(stream[_storedError]); + } + readableStreamClose(stream); + /** @type {Promise} */ + const sourceCancelPromise = stream[_controller][_cancelSteps](reason); + return PromisePrototypeThen(sourceCancelPromise, () => undefined); + } + + /** + * @template R + * @param {ReadableStream} stream + * @returns {void} + */ + function readableStreamClose(stream) { + assert(stream[_state] === "readable"); + stream[_state] = "closed"; + /** @type {ReadableStreamDefaultReader | undefined} */ + const reader = stream[_reader]; + if (!reader) { + return; + } + if (isReadableStreamDefaultReader(reader)) { + /** @type {Array>} */ + const readRequests = reader[_readRequests]; + for (const readRequest of readRequests) { + readRequest.closeSteps(); + } + reader[_readRequests] = []; + } + // This promise can be double resolved. + // See: https://github.com/whatwg/streams/issues/1100 + reader[_closedPromise].resolve(undefined); + } + + /** @param {ReadableStreamDefaultController} controller */ + function readableStreamDefaultControllerCallPullIfNeeded(controller) { + const shouldPull = readableStreamDefaultcontrollerShouldCallPull( + controller, + ); + if (shouldPull === false) { + return; + } + if (controller[_pulling] === true) { + controller[_pullAgain] = true; + return; + } + assert(controller[_pullAgain] === false); + controller[_pulling] = true; + const pullPromise = controller[_pullAlgorithm](controller); + uponFulfillment(pullPromise, () => { + controller[_pulling] = false; + if (controller[_pullAgain] === true) { + controller[_pullAgain] = false; + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + }); + uponRejection(pullPromise, (e) => { + readableStreamDefaultControllerError(controller, e); + }); + } + + /** + * @param {ReadableStreamDefaultController} controller + * @returns {boolean} + */ + function readableStreamDefaultControllerCanCloseOrEnqueue(controller) { + const state = controller[_stream][_state]; + if (controller[_closeRequested] === false && state === "readable") { + return true; + } else { + return false; + } + } + + /** @param {ReadableStreamDefaultController} controller */ + function readableStreamDefaultControllerClearAlgorithms(controller) { + controller[_pullAlgorithm] = undefined; + controller[_cancelAlgorithm] = undefined; + controller[_strategySizeAlgorithm] = undefined; + } + + /** @param {ReadableStreamDefaultController} controller */ + function readableStreamDefaultControllerClose(controller) { + if ( + readableStreamDefaultControllerCanCloseOrEnqueue(controller) === false + ) { + return; + } + const stream = controller[_stream]; + controller[_closeRequested] = true; + if (controller[_queue].length === 0) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(stream); + } + } + + /** + * @template R + * @param {ReadableStreamDefaultController} controller + * @param {R} chunk + * @returns {void} + */ + function readableStreamDefaultControllerEnqueue(controller, chunk) { + if ( + readableStreamDefaultControllerCanCloseOrEnqueue(controller) === false + ) { + return; + } + const stream = controller[_stream]; + if ( + isReadableStreamLocked(stream) === true && + readableStreamGetNumReadRequests(stream) > 0 + ) { + readableStreamFulfillReadRequest(stream, chunk, false); + } else { + let chunkSize; + try { + chunkSize = controller[_strategySizeAlgorithm](chunk); + } catch (e) { + readableStreamDefaultControllerError(controller, e); + throw e; + } + + try { + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (e) { + readableStreamDefaultControllerError(controller, e); + throw e; + } + } + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + + /** + * @param {ReadableStreamDefaultController} controller + * @param {any} e + */ + function readableStreamDefaultControllerError(controller, e) { + const stream = controller[_stream]; + if (stream[_state] !== "readable") { + return; + } + resetQueue(controller); + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamError(stream, e); + } + + /** + * @param {ReadableStreamDefaultController} controller + * @returns {number | null} + */ + function readableStreamDefaultControllerGetDesiredSize(controller) { + const state = controller[_stream][_state]; + if (state === "errored") { + return null; + } + if (state === "closed") { + return 0; + } + return controller[_strategyHWM] - controller[_queueTotalSize]; + } + + /** @param {ReadableStreamDefaultController} controller */ + function readableStreamDefaultcontrollerHasBackpressure(controller) { + if (readableStreamDefaultcontrollerShouldCallPull(controller) === true) { + return false; + } else { + return true; + } + } + + /** + * @param {ReadableStreamDefaultController} controller + * @returns {boolean} + */ + function readableStreamDefaultcontrollerShouldCallPull(controller) { + const stream = controller[_stream]; + if ( + readableStreamDefaultControllerCanCloseOrEnqueue(controller) === false + ) { + return false; + } + if (controller[_started] === false) { + return false; + } + if ( + isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream) > 0 + ) { + return true; + } + const desiredSize = readableStreamDefaultControllerGetDesiredSize( + controller, + ); + assert(desiredSize !== null); + if (desiredSize > 0) { + return true; + } + return false; + } + + /** + * @template R + * @param {ReadableStreamDefaultReader} reader + * @param {ReadRequest} readRequest + * @returns {void} + */ + function readableStreamDefaultReaderRead(reader, readRequest) { + const stream = reader[_stream]; + assert(stream); + stream[_disturbed] = true; + if (stream[_state] === "closed") { + readRequest.closeSteps(); + } else if (stream[_state] === "errored") { + readRequest.errorSteps(stream[_storedError]); + } else { + assert(stream[_state] === "readable"); + stream[_controller][_pullSteps](readRequest); + } + } + + /** + * @template R + * @param {ReadableStream} stream + * @param {any} e + */ + function readableStreamError(stream, e) { + assert(stream[_state] === "readable"); + stream[_state] = "errored"; + stream[_storedError] = e; + /** @type {ReadableStreamDefaultReader | undefined} */ + const reader = stream[_reader]; + if (reader === undefined) { + return; + } + /** @type {Deferred} */ + const closedPromise = reader[_closedPromise]; + closedPromise.reject(e); + setPromiseIsHandledToTrue(closedPromise.promise); + if (isReadableStreamDefaultReader(reader)) { + /** @type {Array>} */ + const readRequests = reader[_readRequests]; + for (const readRequest of readRequests) { + readRequest.errorSteps(e); + } + reader[_readRequests] = []; + } + // 3.5.6.8 Otherwise, support BYOB Reader + } + + /** + * @template R + * @param {ReadableStream} stream + * @param {R} chunk + * @param {boolean} done + */ + function readableStreamFulfillReadRequest(stream, chunk, done) { + assert(readableStreamHasDefaultReader(stream) === true); + /** @type {ReadableStreamDefaultReader} */ + const reader = stream[_reader]; + assert(reader[_readRequests].length); + /** @type {ReadRequest} */ + const readRequest = ArrayPrototypeShift(reader[_readRequests]); + if (done) { + readRequest.closeSteps(); + } else { + readRequest.chunkSteps(chunk); + } + } + + /** + * @param {ReadableStream} stream + * @return {number} + */ + function readableStreamGetNumReadRequests(stream) { + assert(readableStreamHasDefaultReader(stream) === true); + return stream[_reader][_readRequests].length; + } + + /** + * @param {ReadableStream} stream + * @returns {boolean} + */ + function readableStreamHasDefaultReader(stream) { + const reader = stream[_reader]; + if (reader === undefined) { + return false; + } + if (isReadableStreamDefaultReader(reader)) { + return true; + } + return false; + } + + /** + * @template T + * @param {ReadableStream} source + * @param {WritableStream} dest + * @param {boolean} preventClose + * @param {boolean} preventAbort + * @param {boolean} preventCancel + * @param {AbortSignal=} signal + * @returns {Promise} + */ + function readableStreamPipeTo( + source, + dest, + preventClose, + preventAbort, + preventCancel, + signal, + ) { + assert(isReadableStream(source)); + assert(isWritableStream(dest)); + assert( + typeof preventClose === "boolean" && typeof preventAbort === "boolean" && + typeof preventCancel === "boolean", + ); + assert(signal === undefined || signal instanceof AbortSignal); + assert(!isReadableStreamLocked(source)); + assert(!isWritableStreamLocked(dest)); + const reader = acquireReadableStreamDefaultReader(source); + const writer = acquireWritableStreamDefaultWriter(dest); + source[_disturbed] = true; + let shuttingDown = false; + let currentWrite = resolvePromiseWith(undefined); + /** @type {Deferred} */ + const promise = new Deferred(); + /** @type {() => void} */ + let abortAlgorithm; + if (signal) { + abortAlgorithm = () => { + const error = new DOMException("Aborted", "AbortError"); + /** @type {Array<() => Promise>} */ + const actions = []; + if (preventAbort === false) { + ArrayPrototypePush(actions, () => { + if (dest[_state] === "writable") { + return writableStreamAbort(dest, error); + } else { + return resolvePromiseWith(undefined); + } + }); + } + if (preventCancel === false) { + ArrayPrototypePush(actions, () => { + if (source[_state] === "readable") { + return readableStreamCancel(source, error); + } else { + return resolvePromiseWith(undefined); + } + }); + } + shutdownWithAction( + () => PromiseAll(ArrayPrototypeMap(actions, (action) => action())), + true, + error, + ); + }; + + if (signal.aborted) { + abortAlgorithm(); + return promise.promise; + } + // TODO(lucacasonato): use the internal API to listen for abort. + signal.addEventListener("abort", abortAlgorithm); + } + + function pipeLoop() { + return new Promise((resolveLoop, rejectLoop) => { + /** @param {boolean} done */ + function next(done) { + if (done) { + resolveLoop(); + } else { + uponPromise(pipeStep(), next, rejectLoop); + } + } + next(false); + }); + } + + /** @returns {Promise} */ + function pipeStep() { + if (shuttingDown === true) { + return resolvePromiseWith(true); + } + + return transformPromiseWith(writer[_readyPromise].promise, () => { + return new Promise((resolveRead, rejectRead) => { + readableStreamDefaultReaderRead( + reader, + { + chunkSteps(chunk) { + currentWrite = transformPromiseWith( + writableStreamDefaultWriterWrite(writer, chunk), + undefined, + () => {}, + ); + resolveRead(false); + }, + closeSteps() { + resolveRead(true); + }, + errorSteps: rejectRead, + }, + ); + }); + }); + } + + isOrBecomesErrored( + source, + reader[_closedPromise].promise, + (storedError) => { + if (preventAbort === false) { + shutdownWithAction( + () => writableStreamAbort(dest, storedError), + true, + storedError, + ); + } else { + shutdown(true, storedError); + } + }, + ); + + isOrBecomesErrored(dest, writer[_closedPromise].promise, (storedError) => { + if (preventCancel === false) { + shutdownWithAction( + () => readableStreamCancel(source, storedError), + true, + storedError, + ); + } else { + shutdown(true, storedError); + } + }); + + isOrBecomesClosed(source, reader[_closedPromise].promise, () => { + if (preventClose === false) { + shutdownWithAction(() => + writableStreamDefaultWriterCloseWithErrorPropagation(writer) + ); + } else { + shutdown(); + } + }); + + if ( + writableStreamCloseQueuedOrInFlight(dest) === true || + dest[_state] === "closed" + ) { + const destClosed = new TypeError( + "The destination writable stream closed before all the data could be piped to it.", + ); + if (preventCancel === false) { + shutdownWithAction( + () => readableStreamCancel(source, destClosed), + true, + destClosed, + ); + } else { + shutdown(true, destClosed); + } + } + + setPromiseIsHandledToTrue(pipeLoop()); + + return promise.promise; + + /** @returns {Promise} */ + function waitForWritesToFinish() { + const oldCurrentWrite = currentWrite; + return transformPromiseWith( + currentWrite, + () => + oldCurrentWrite !== currentWrite + ? waitForWritesToFinish() + : undefined, + ); + } + + /** + * @param {ReadableStream | WritableStream} stream + * @param {Promise} promise + * @param {(e: any) => void} action + */ + function isOrBecomesErrored(stream, promise, action) { + if (stream[_state] === "errored") { + action(stream[_storedError]); + } else { + uponRejection(promise, action); + } + } + + /** + * @param {ReadableStream} stream + * @param {Promise} promise + * @param {() => void} action + */ + function isOrBecomesClosed(stream, promise, action) { + if (stream[_state] === "closed") { + action(); + } else { + uponFulfillment(promise, action); + } + } + + /** + * @param {() => Promise} action + * @param {boolean=} originalIsError + * @param {any=} originalError + */ + function shutdownWithAction(action, originalIsError, originalError) { + function doTheRest() { + uponPromise( + action(), + () => finalize(originalIsError, originalError), + (newError) => finalize(true, newError), + ); + } + + if (shuttingDown === true) { + return; + } + shuttingDown = true; + + if ( + dest[_state] === "writable" && + writableStreamCloseQueuedOrInFlight(dest) === false + ) { + uponFulfillment(waitForWritesToFinish(), doTheRest); + } else { + doTheRest(); + } + } + + /** + * @param {boolean=} isError + * @param {any=} error + */ + function shutdown(isError, error) { + if (shuttingDown) { + return; + } + shuttingDown = true; + if ( + dest[_state] === "writable" && + writableStreamCloseQueuedOrInFlight(dest) === false + ) { + uponFulfillment( + waitForWritesToFinish(), + () => finalize(isError, error), + ); + } else { + finalize(isError, error); + } + } + + /** + * @param {boolean=} isError + * @param {any=} error + */ + function finalize(isError, error) { + writableStreamDefaultWriterRelease(writer); + readableStreamReaderGenericRelease(reader); + + if (signal !== undefined) { + // TODO(lucacasonato): use the internal API to remove the listener. + signal.removeEventListener("abort", abortAlgorithm); + } + if (isError) { + promise.reject(error); + } else { + promise.resolve(undefined); + } + } + } + + /** + * @param {ReadableStreamGenericReader} reader + * @param {any} reason + * @returns {Promise} + */ + function readableStreamReaderGenericCancel(reader, reason) { + const stream = reader[_stream]; + assert(stream !== undefined); + return readableStreamCancel(stream, reason); + } + + /** + * @template R + * @param {ReadableStreamDefaultReader} reader + * @param {ReadableStream} stream + */ + function readableStreamReaderGenericInitialize(reader, stream) { + reader[_stream] = stream; + stream[_reader] = reader; + if (stream[_state] === "readable") { + reader[_closedPromise] = new Deferred(); + } else if (stream[_state] === "closed") { + reader[_closedPromise] = new Deferred(); + reader[_closedPromise].resolve(undefined); + } else { + assert(stream[_state] === "errored"); + reader[_closedPromise] = new Deferred(); + reader[_closedPromise].reject(stream[_storedError]); + setPromiseIsHandledToTrue(reader[_closedPromise].promise); + } + } + + /** + * @template R + * @param {ReadableStreamGenericReader} reader + */ + function readableStreamReaderGenericRelease(reader) { + assert(reader[_stream] !== undefined); + assert(reader[_stream][_reader] === reader); + if (reader[_stream][_state] === "readable") { + reader[_closedPromise].reject( + new TypeError( + "Reader was released and can no longer be used to monitor the stream's closedness.", + ), + ); + } else { + reader[_closedPromise] = new Deferred(); + reader[_closedPromise].reject( + new TypeError( + "Reader was released and can no longer be used to monitor the stream's closedness.", + ), + ); + } + setPromiseIsHandledToTrue(reader[_closedPromise].promise); + reader[_stream][_reader] = undefined; + reader[_stream] = undefined; + } + + /** + * @template R + * @param {ReadableStream} stream + * @param {boolean} cloneForBranch2 + * @returns {[ReadableStream, ReadableStream]} + */ + function readableStreamTee(stream, cloneForBranch2) { + assert(isReadableStream(stream)); + assert(typeof cloneForBranch2 === "boolean"); + const reader = acquireReadableStreamDefaultReader(stream); + let reading = false; + let canceled1 = false; + let canceled2 = false; + /** @type {any} */ + let reason1; + /** @type {any} */ + let reason2; + /** @type {ReadableStream} */ + // deno-lint-ignore prefer-const + let branch1; + /** @type {ReadableStream} */ + // deno-lint-ignore prefer-const + let branch2; + + /** @type {Deferred} */ + const cancelPromise = new Deferred(); + + function pullAlgorithm() { + if (reading === true) { + return resolvePromiseWith(undefined); + } + reading = true; + /** @type {ReadRequest} */ + const readRequest = { + chunkSteps(value) { + queueMicrotask(() => { + reading = false; + const value1 = value; + const value2 = value; + + // TODO(lucacasonato): respect clonedForBranch2. + + if (canceled1 === false) { + readableStreamDefaultControllerEnqueue( + /** @type {ReadableStreamDefaultController} */ (branch1[ + _controller + ]), + value1, + ); + } + if (canceled2 === false) { + readableStreamDefaultControllerEnqueue( + /** @type {ReadableStreamDefaultController} */ (branch2[ + _controller + ]), + value2, + ); + } + }); + }, + closeSteps() { + reading = false; + if (canceled1 === false) { + readableStreamDefaultControllerClose( + /** @type {ReadableStreamDefaultController} */ (branch1[ + _controller + ]), + ); + } + if (canceled2 === false) { + readableStreamDefaultControllerClose( + /** @type {ReadableStreamDefaultController} */ (branch2[ + _controller + ]), + ); + } + cancelPromise.resolve(undefined); + }, + errorSteps() { + reading = false; + }, + }; + readableStreamDefaultReaderRead(reader, readRequest); + return resolvePromiseWith(undefined); + } + + /** + * @param {any} reason + * @returns {Promise} + */ + function cancel1Algorithm(reason) { + canceled1 = true; + reason1 = reason; + if (canceled2 === true) { + const compositeReason = [reason1, reason2]; + const cancelResult = readableStreamCancel(stream, compositeReason); + cancelPromise.resolve(cancelResult); + } + return cancelPromise.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + function cancel2Algorithm(reason) { + canceled2 = true; + reason2 = reason; + if (canceled1 === true) { + const compositeReason = [reason1, reason2]; + const cancelResult = readableStreamCancel(stream, compositeReason); + cancelPromise.resolve(cancelResult); + } + return cancelPromise.promise; + } + + function startAlgorithm() {} + + branch1 = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancel1Algorithm, + ); + branch2 = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancel2Algorithm, + ); + + uponRejection(reader[_closedPromise].promise, (r) => { + readableStreamDefaultControllerError( + /** @type {ReadableStreamDefaultController} */ (branch1[ + _controller + ]), + r, + ); + readableStreamDefaultControllerError( + /** @type {ReadableStreamDefaultController} */ (branch2[ + _controller + ]), + r, + ); + if (canceled1 === false || canceled2 === false) { + cancelPromise.resolve(undefined); + } + }); + + return [branch1, branch2]; + } + + /** + * @param {ReadableStream} stream + * @param {ReadableByteStreamController} controller + * @param {() => void} startAlgorithm + * @param {() => Promise} pullAlgorithm + * @param {(reason: any) => Promise} cancelAlgorithm + * @param {number} highWaterMark + * @param {number | undefined} autoAllocateChunkSize + */ + function setUpReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize, + ) { + assert(stream[_controller] === undefined); + if (autoAllocateChunkSize !== undefined) { + assert(NumberIsInteger(autoAllocateChunkSize)); + assert(autoAllocateChunkSize >= 0); + } + controller[_stream] = stream; + controller[_pullAgain] = controller[_pulling] = false; + controller[_byobRequest] = undefined; + resetQueue(controller); + controller[_closeRequested] = controller[_started] = false; + controller[_strategyHWM] = highWaterMark; + controller[_pullAlgorithm] = pullAlgorithm; + controller[_cancelAlgorithm] = cancelAlgorithm; + controller[_autoAllocateChunkSize] = autoAllocateChunkSize; + // 12. Set controller.[[pendingPullIntos]] to a new empty list. + stream[_controller] = controller; + const startResult = startAlgorithm(); + const startPromise = resolvePromiseWith(startResult); + setPromiseIsHandledToTrue( + PromisePrototypeThen( + startPromise, + () => { + controller[_started] = true; + assert(controller[_pulling] === false); + assert(controller[_pullAgain] === false); + readableByteStreamControllerCallPullIfNeeded(controller); + }, + (r) => { + readableByteStreamControllerError(controller, r); + }, + ), + ); + } + + /** + * @param {ReadableStream} stream + * @param {UnderlyingSource} underlyingSource + * @param {UnderlyingSource} underlyingSourceDict + * @param {number} highWaterMark + */ + function setUpReadableByteStreamControllerFromUnderlyingSource( + stream, + underlyingSource, + underlyingSourceDict, + highWaterMark, + ) { + const controller = webidl.createBranded(ReadableByteStreamController); + /** @type {() => void} */ + let startAlgorithm = () => undefined; + /** @type {() => Promise} */ + let pullAlgorithm = () => resolvePromiseWith(undefined); + /** @type {(reason: any) => Promise} */ + let cancelAlgorithm = (_reason) => resolvePromiseWith(undefined); + if (underlyingSourceDict.start !== undefined) { + startAlgorithm = () => + webidl.invokeCallbackFunction( + underlyingSourceDict.start, + [controller], + underlyingSource, + webidl.converters.any, + { + prefix: + "Failed to call 'startAlgorithm' on 'ReadableByteStreamController'", + }, + ); + } + if (underlyingSourceDict.pull !== undefined) { + pullAlgorithm = () => + webidl.invokeCallbackFunction( + underlyingSourceDict.pull, + [controller], + underlyingSource, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'pullAlgorithm' on 'ReadableByteStreamController'", + returnsPromise: true, + }, + ); + } + if (underlyingSourceDict.cancel !== undefined) { + cancelAlgorithm = (reason) => + webidl.invokeCallbackFunction( + underlyingSourceDict.cancel, + [reason], + underlyingSource, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'cancelAlgorithm' on 'ReadableByteStreamController'", + returnsPromise: true, + }, + ); + } + // 3.13.27.6 Let autoAllocateChunkSize be ? GetV(underlyingByteSource, "autoAllocateChunkSize"). + /** @type {undefined} */ + const autoAllocateChunkSize = undefined; + setUpReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize, + ); + } + + /** + * @template R + * @param {ReadableStream} stream + * @param {ReadableStreamDefaultController} controller + * @param {(controller: ReadableStreamDefaultController) => void | Promise} startAlgorithm + * @param {(controller: ReadableStreamDefaultController) => Promise} pullAlgorithm + * @param {(reason: any) => Promise} cancelAlgorithm + * @param {number} highWaterMark + * @param {(chunk: R) => number} sizeAlgorithm + */ + function setUpReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm, + ) { + assert(stream[_controller] === undefined); + controller[_stream] = stream; + resetQueue(controller); + controller[_started] = controller[_closeRequested] = + controller[_pullAgain] = controller[_pulling] = false; + controller[_strategySizeAlgorithm] = sizeAlgorithm; + controller[_strategyHWM] = highWaterMark; + controller[_pullAlgorithm] = pullAlgorithm; + controller[_cancelAlgorithm] = cancelAlgorithm; + stream[_controller] = controller; + const startResult = startAlgorithm(controller); + const startPromise = resolvePromiseWith(startResult); + uponPromise(startPromise, () => { + controller[_started] = true; + assert(controller[_pulling] === false); + assert(controller[_pullAgain] === false); + readableStreamDefaultControllerCallPullIfNeeded(controller); + }, (r) => { + readableStreamDefaultControllerError(controller, r); + }); + } + + /** + * @template R + * @param {ReadableStream} stream + * @param {UnderlyingSource} underlyingSource + * @param {UnderlyingSource} underlyingSourceDict + * @param {number} highWaterMark + * @param {(chunk: R) => number} sizeAlgorithm + */ + function setUpReadableStreamDefaultControllerFromUnderlyingSource( + stream, + underlyingSource, + underlyingSourceDict, + highWaterMark, + sizeAlgorithm, + ) { + const controller = webidl.createBranded(ReadableStreamDefaultController); + /** @type {() => Promise} */ + let startAlgorithm = () => undefined; + /** @type {() => Promise} */ + let pullAlgorithm = () => resolvePromiseWith(undefined); + /** @type {(reason?: any) => Promise} */ + let cancelAlgorithm = () => resolvePromiseWith(undefined); + if (underlyingSourceDict.start !== undefined) { + startAlgorithm = () => + webidl.invokeCallbackFunction( + underlyingSourceDict.start, + [controller], + underlyingSource, + webidl.converters.any, + { + prefix: + "Failed to call 'startAlgorithm' on 'ReadableStreamDefaultController'", + }, + ); + } + if (underlyingSourceDict.pull !== undefined) { + pullAlgorithm = () => + webidl.invokeCallbackFunction( + underlyingSourceDict.pull, + [controller], + underlyingSource, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'pullAlgorithm' on 'ReadableStreamDefaultController'", + returnsPromise: true, + }, + ); + } + if (underlyingSourceDict.cancel !== undefined) { + cancelAlgorithm = (reason) => + webidl.invokeCallbackFunction( + underlyingSourceDict.cancel, + [reason], + underlyingSource, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'cancelAlgorithm' on 'ReadableStreamDefaultController'", + returnsPromise: true, + }, + ); + } + setUpReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm, + ); + } + + /** + * @template R + * @param {ReadableStreamDefaultReader} reader + * @param {ReadableStream} stream + */ + function setUpReadableStreamDefaultReader(reader, stream) { + if (isReadableStreamLocked(stream)) { + throw new TypeError("ReadableStream is locked."); + } + readableStreamReaderGenericInitialize(reader, stream); + reader[_readRequests] = []; + } + + /** + * @template O + * @param {TransformStream} stream + * @param {TransformStreamDefaultController} controller + * @param {(chunk: O, controller: TransformStreamDefaultController) => Promise} transformAlgorithm + * @param {(controller: TransformStreamDefaultController) => Promise} flushAlgorithm + */ + function setUpTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm, + ) { + assert(stream instanceof TransformStream); + assert(stream[_controller] === undefined); + controller[_stream] = stream; + stream[_controller] = controller; + controller[_transformAlgorithm] = transformAlgorithm; + controller[_flushAlgorithm] = flushAlgorithm; + } + + /** + * @template I + * @template O + * @param {TransformStream} stream + * @param {Transformer} transformer + * @param {Transformer} transformerDict + */ + function setUpTransformStreamDefaultControllerFromTransformer( + stream, + transformer, + transformerDict, + ) { + /** @type {TransformStreamDefaultController} */ + const controller = webidl.createBranded(TransformStreamDefaultController); + /** @type {(chunk: O, controller: TransformStreamDefaultController) => Promise} */ + let transformAlgorithm = (chunk) => { + try { + transformStreamDefaultControllerEnqueue(controller, chunk); + } catch (e) { + return PromiseReject(e); + } + return resolvePromiseWith(undefined); + }; + /** @type {(controller: TransformStreamDefaultController) => Promise} */ + let flushAlgorithm = () => resolvePromiseWith(undefined); + if (transformerDict.transform !== undefined) { + transformAlgorithm = (chunk, controller) => + webidl.invokeCallbackFunction( + transformerDict.transform, + [chunk, controller], + transformer, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'transformAlgorithm' on 'TransformStreamDefaultController'", + returnsPromise: true, + }, + ); + } + if (transformerDict.flush !== undefined) { + flushAlgorithm = (controller) => + webidl.invokeCallbackFunction( + transformerDict.flush, + [controller], + transformer, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'flushAlgorithm' on 'TransformStreamDefaultController'", + returnsPromise: true, + }, + ); + } + setUpTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm, + ); + } + + /** + * @template W + * @param {WritableStream} stream + * @param {WritableStreamDefaultController} controller + * @param {(controller: WritableStreamDefaultController) => Promise} startAlgorithm + * @param {(chunk: W, controller: WritableStreamDefaultController) => Promise} writeAlgorithm + * @param {() => Promise} closeAlgorithm + * @param {(reason?: any) => Promise} abortAlgorithm + * @param {number} highWaterMark + * @param {(chunk: W) => number} sizeAlgorithm + */ + function setUpWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm, + ) { + assert(isWritableStream(stream)); + assert(stream[_controller] === undefined); + controller[_stream] = stream; + stream[_controller] = controller; + resetQueue(controller); + controller[_started] = false; + controller[_strategySizeAlgorithm] = sizeAlgorithm; + controller[_strategyHWM] = highWaterMark; + controller[_writeAlgorithm] = writeAlgorithm; + controller[_closeAlgorithm] = closeAlgorithm; + controller[_abortAlgorithm] = abortAlgorithm; + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller, + ); + writableStreamUpdateBackpressure(stream, backpressure); + const startResult = startAlgorithm(controller); + const startPromise = resolvePromiseWith(startResult); + uponPromise(startPromise, () => { + assert(stream[_state] === "writable" || stream[_state] === "erroring"); + controller[_started] = true; + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, (r) => { + assert(stream[_state] === "writable" || stream[_state] === "erroring"); + controller[_started] = true; + writableStreamDealWithRejection(stream, r); + }); + } + + /** + * @template W + * @param {WritableStream} stream + * @param {UnderlyingSink} underlyingSink + * @param {UnderlyingSink} underlyingSinkDict + * @param {number} highWaterMark + * @param {(chunk: W) => number} sizeAlgorithm + */ + function setUpWritableStreamDefaultControllerFromUnderlyingSink( + stream, + underlyingSink, + underlyingSinkDict, + highWaterMark, + sizeAlgorithm, + ) { + const controller = webidl.createBranded(WritableStreamDefaultController); + /** @type {(controller: WritableStreamDefaultController) => any} */ + let startAlgorithm = () => undefined; + /** @type {(chunk: W, controller: WritableStreamDefaultController) => Promise} */ + let writeAlgorithm = () => resolvePromiseWith(undefined); + let closeAlgorithm = () => resolvePromiseWith(undefined); + /** @type {(reason?: any) => Promise} */ + let abortAlgorithm = () => resolvePromiseWith(undefined); + + if (underlyingSinkDict.start !== undefined) { + startAlgorithm = () => + webidl.invokeCallbackFunction( + underlyingSinkDict.start, + [controller], + underlyingSink, + webidl.converters.any, + { + prefix: + "Failed to call 'startAlgorithm' on 'WritableStreamDefaultController'", + }, + ); + } + if (underlyingSinkDict.write !== undefined) { + writeAlgorithm = (chunk) => + webidl.invokeCallbackFunction( + underlyingSinkDict.write, + [chunk, controller], + underlyingSink, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'writeAlgorithm' on 'WritableStreamDefaultController'", + returnsPromise: true, + }, + ); + } + if (underlyingSinkDict.close !== undefined) { + closeAlgorithm = () => + webidl.invokeCallbackFunction( + underlyingSinkDict.close, + [], + underlyingSink, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'closeAlgorithm' on 'WritableStreamDefaultController'", + returnsPromise: true, + }, + ); + } + if (underlyingSinkDict.abort !== undefined) { + abortAlgorithm = (reason) => + webidl.invokeCallbackFunction( + underlyingSinkDict.abort, + [reason], + underlyingSink, + webidl.converters["Promise"], + { + prefix: + "Failed to call 'abortAlgorithm' on 'WritableStreamDefaultController'", + returnsPromise: true, + }, + ); + } + setUpWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm, + ); + } + + /** + * @template W + * @param {WritableStreamDefaultWriter} writer + * @param {WritableStream} stream + */ + function setUpWritableStreamDefaultWriter(writer, stream) { + if (isWritableStreamLocked(stream) === true) { + throw new TypeError("The stream is already locked."); + } + writer[_stream] = stream; + stream[_writer] = writer; + const state = stream[_state]; + if (state === "writable") { + if ( + writableStreamCloseQueuedOrInFlight(stream) === false && + stream[_backpressure] === true + ) { + writer[_readyPromise] = new Deferred(); + } else { + writer[_readyPromise] = new Deferred(); + writer[_readyPromise].resolve(undefined); + } + writer[_closedPromise] = new Deferred(); + } else if (state === "erroring") { + writer[_readyPromise] = new Deferred(); + writer[_readyPromise].reject(stream[_storedError]); + setPromiseIsHandledToTrue(writer[_readyPromise].promise); + writer[_closedPromise] = new Deferred(); + } else if (state === "closed") { + writer[_readyPromise] = new Deferred(); + writer[_readyPromise].resolve(undefined); + writer[_closedPromise] = new Deferred(); + writer[_closedPromise].resolve(undefined); + } else { + assert(state === "errored"); + const storedError = stream[_storedError]; + writer[_readyPromise] = new Deferred(); + writer[_readyPromise].reject(storedError); + setPromiseIsHandledToTrue(writer[_readyPromise].promise); + writer[_closedPromise] = new Deferred(); + writer[_closedPromise].reject(storedError); + setPromiseIsHandledToTrue(writer[_closedPromise].promise); + } + } + + /** @param {TransformStreamDefaultController} controller */ + function transformStreamDefaultControllerClearAlgorithms(controller) { + controller[_transformAlgorithm] = undefined; + controller[_flushAlgorithm] = undefined; + } + + /** + * @template O + * @param {TransformStreamDefaultController} controller + * @param {O} chunk + */ + function transformStreamDefaultControllerEnqueue(controller, chunk) { + const stream = controller[_stream]; + const readableController = stream[_readable][_controller]; + if ( + readableStreamDefaultControllerCanCloseOrEnqueue( + /** @type {ReadableStreamDefaultController} */ (readableController), + ) === false + ) { + throw new TypeError("Readable stream is unavailable."); + } + try { + readableStreamDefaultControllerEnqueue( + /** @type {ReadableStreamDefaultController} */ (readableController), + chunk, + ); + } catch (e) { + transformStreamErrorWritableAndUnblockWrite(stream, e); + throw stream[_readable][_storedError]; + } + const backpressure = readableStreamDefaultcontrollerHasBackpressure( + /** @type {ReadableStreamDefaultController} */ (readableController), + ); + if (backpressure !== stream[_backpressure]) { + assert(backpressure === true); + transformStreamSetBackpressure(stream, true); + } + } + + /** + * @param {TransformStreamDefaultController} controller + * @param {any=} e + */ + function transformStreamDefaultControllerError(controller, e) { + transformStreamError(controller[_stream], e); + } + + /** + * @template O + * @param {TransformStreamDefaultController} controller + * @param {any} chunk + * @returns {Promise} + */ + function transformStreamDefaultControllerPerformTransform(controller, chunk) { + const transformPromise = controller[_transformAlgorithm](chunk, controller); + return transformPromiseWith(transformPromise, undefined, (r) => { + transformStreamError(controller[_stream], r); + throw r; + }); + } + + /** @param {TransformStreamDefaultController} controller */ + function transformStreamDefaultControllerTerminate(controller) { + const stream = controller[_stream]; + const readableController = stream[_readable][_controller]; + readableStreamDefaultControllerClose( + /** @type {ReadableStreamDefaultController} */ (readableController), + ); + const error = new TypeError("The stream has been terminated."); + transformStreamErrorWritableAndUnblockWrite(stream, error); + } + + /** + * @param {TransformStream} stream + * @param {any=} reason + * @returns {Promise} + */ + function transformStreamDefaultSinkAbortAlgorithm(stream, reason) { + transformStreamError(stream, reason); + return resolvePromiseWith(undefined); + } + + /** + * @template I + * @template O + * @param {TransformStream} stream + * @returns {Promise} + */ + function transformStreamDefaultSinkCloseAlgorithm(stream) { + const readable = stream[_readable]; + const controller = stream[_controller]; + const flushPromise = controller[_flushAlgorithm](controller); + transformStreamDefaultControllerClearAlgorithms(controller); + return transformPromiseWith(flushPromise, () => { + if (readable[_state] === "errored") { + throw readable[_storedError]; + } + readableStreamDefaultControllerClose( + /** @type {ReadableStreamDefaultController} */ (readable[_controller]), + ); + }, (r) => { + transformStreamError(stream, r); + throw readable[_storedError]; + }); + } + + /** + * @template I + * @template O + * @param {TransformStream} stream + * @param {I} chunk + * @returns {Promise} + */ + function transformStreamDefaultSinkWriteAlgorithm(stream, chunk) { + assert(stream[_writable][_state] === "writable"); + const controller = stream[_controller]; + if (stream[_backpressure] === true) { + const backpressureChangePromise = stream[_backpressureChangePromise]; + assert(backpressureChangePromise !== undefined); + return transformPromiseWith(backpressureChangePromise.promise, () => { + const writable = stream[_writable]; + const state = writable[_state]; + if (state === "erroring") { + throw writable[_storedError]; + } + assert(state === "writable"); + return transformStreamDefaultControllerPerformTransform( + controller, + chunk, + ); + }); + } + return transformStreamDefaultControllerPerformTransform(controller, chunk); + } + + /** + * @param {TransformStream} stream + * @returns {Promise} + */ + function transformStreamDefaultSourcePullAlgorithm(stream) { + assert(stream[_backpressure] === true); + assert(stream[_backpressureChangePromise] !== undefined); + transformStreamSetBackpressure(stream, false); + return stream[_backpressureChangePromise].promise; + } + + /** + * @param {TransformStream} stream + * @param {any=} e + */ + function transformStreamError(stream, e) { + readableStreamDefaultControllerError( + /** @type {ReadableStreamDefaultController} */ (stream[_readable][ + _controller + ]), + e, + ); + transformStreamErrorWritableAndUnblockWrite(stream, e); + } + + /** + * @param {TransformStream} stream + * @param {any=} e + */ + function transformStreamErrorWritableAndUnblockWrite(stream, e) { + transformStreamDefaultControllerClearAlgorithms(stream[_controller]); + writableStreamDefaultControllerErrorIfNeeded( + stream[_writable][_controller], + e, + ); + if (stream[_backpressure] === true) { + transformStreamSetBackpressure(stream, false); + } + } + + /** + * @param {TransformStream} stream + * @param {boolean} backpressure + */ + function transformStreamSetBackpressure(stream, backpressure) { + assert(stream[_backpressure] !== backpressure); + if (stream[_backpressureChangePromise] !== undefined) { + stream[_backpressureChangePromise].resolve(undefined); + } + stream[_backpressureChangePromise] = new Deferred(); + stream[_backpressure] = backpressure; + } + + /** + * @param {WritableStream} stream + * @param {any=} reason + * @returns {Promise} + */ + function writableStreamAbort(stream, reason) { + const state = stream[_state]; + if (state === "closed" || state === "errored") { + return resolvePromiseWith(undefined); + } + if (stream[_pendingAbortRequest] !== undefined) { + return stream[_pendingAbortRequest].deferred.promise; + } + assert(state === "writable" || state === "erroring"); + let wasAlreadyErroring = false; + if (state === "erroring") { + wasAlreadyErroring = true; + reason = undefined; + } + /** Deferred */ + const deferred = new Deferred(); + stream[_pendingAbortRequest] = { + deferred, + reason, + wasAlreadyErroring, + }; + if (wasAlreadyErroring === false) { + writableStreamStartErroring(stream, reason); + } + return deferred.promise; + } + + /** + * @param {WritableStream} stream + * @returns {Promise} + */ + function writableStreamAddWriteRequest(stream) { + assert(isWritableStreamLocked(stream) === true); + assert(stream[_state] === "writable"); + /** @type {Deferred} */ + const deferred = new Deferred(); + ArrayPrototypePush(stream[_writeRequests], deferred); + return deferred.promise; + } + + /** + * @param {WritableStream} stream + * @returns {Promise} + */ + function writableStreamClose(stream) { + const state = stream[_state]; + if (state === "closed" || state === "errored") { + return PromiseReject( + new TypeError("Writable stream is closed or errored."), + ); + } + assert(state === "writable" || state === "erroring"); + assert(writableStreamCloseQueuedOrInFlight(stream) === false); + /** @type {Deferred} */ + const deferred = new Deferred(); + stream[_closeRequest] = deferred; + const writer = stream[_writer]; + if ( + writer !== undefined && stream[_backpressure] === true && + state === "writable" + ) { + writer[_readyPromise].resolve(undefined); + } + writableStreamDefaultControllerClose(stream[_controller]); + return deferred.promise; + } + + /** + * @param {WritableStream} stream + * @returns {boolean} + */ + function writableStreamCloseQueuedOrInFlight(stream) { + if ( + stream[_closeRequest] === undefined && + stream[_inFlightCloseRequest] === undefined + ) { + return false; + } + return true; + } + + /** + * @param {WritableStream} stream + * @param {any=} error + */ + function writableStreamDealWithRejection(stream, error) { + const state = stream[_state]; + if (state === "writable") { + writableStreamStartErroring(stream, error); + return; + } + assert(state === "erroring"); + writableStreamFinishErroring(stream); + } + + /** + * @template W + * @param {WritableStreamDefaultController} controller + */ + function writableStreamDefaultControllerAdvanceQueueIfNeeded(controller) { + const stream = controller[_stream]; + if (controller[_started] === false) { + return; + } + if (stream[_inFlightWriteRequest] !== undefined) { + return; + } + const state = stream[_state]; + assert(state !== "closed" && state !== "errored"); + if (state === "erroring") { + writableStreamFinishErroring(stream); + return; + } + if (controller[_queue].length === 0) { + return; + } + const value = peekQueueValue(controller); + if (value === _close) { + writableStreamDefaultControllerProcessClose(controller); + } else { + writableStreamDefaultControllerProcessWrite(controller, value); + } + } + + function writableStreamDefaultControllerClearAlgorithms(controller) { + controller[_writeAlgorithm] = undefined; + controller[_closeAlgorithm] = undefined; + controller[_abortAlgorithm] = undefined; + controller[_strategySizeAlgorithm] = undefined; + } + + /** @param {WritableStreamDefaultController} controller */ + function writableStreamDefaultControllerClose(controller) { + enqueueValueWithSize(controller, _close, 0); + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + } + + /** + * @param {WritableStreamDefaultController} controller + * @param {any} error + */ + function writableStreamDefaultControllerError(controller, error) { + const stream = controller[_stream]; + assert(stream[_state] === "writable"); + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamStartErroring(stream, error); + } + + /** + * @param {WritableStreamDefaultController} controller + * @param {any} error + */ + function writableStreamDefaultControllerErrorIfNeeded(controller, error) { + if (controller[_stream][_state] === "writable") { + writableStreamDefaultControllerError(controller, error); + } + } + + /** + * @param {WritableStreamDefaultController} controller + * @returns {boolean} + */ + function writableStreamDefaultControllerGetBackpressure(controller) { + const desiredSize = writableStreamDefaultControllerGetDesiredSize( + controller, + ); + return desiredSize <= 0; + } + + /** + * @template W + * @param {WritableStreamDefaultController} controller + * @param {W} chunk + * @returns {number} + */ + function writableStreamDefaultControllerGetChunkSize(controller, chunk) { + let value; + try { + value = controller[_strategySizeAlgorithm](chunk); + } catch (e) { + writableStreamDefaultControllerErrorIfNeeded(controller, e); + return 1; + } + return value; + } + + /** + * @param {WritableStreamDefaultController} controller + * @returns {number} + */ + function writableStreamDefaultControllerGetDesiredSize(controller) { + return controller[_strategyHWM] - controller[_queueTotalSize]; + } + + /** @param {WritableStreamDefaultController} controller */ + function writableStreamDefaultControllerProcessClose(controller) { + const stream = controller[_stream]; + writableStreamMarkCloseRequestInFlight(stream); + dequeueValue(controller); + assert(controller[_queue].length === 0); + const sinkClosePromise = controller[_closeAlgorithm](); + writableStreamDefaultControllerClearAlgorithms(controller); + uponPromise(sinkClosePromise, () => { + writableStreamFinishInFlightClose(stream); + }, (reason) => { + writableStreamFinishInFlightCloseWithError(stream, reason); + }); + } + + /** + * @template W + * @param {WritableStreamDefaultController} controller + * @param {W} chunk + */ + function writableStreamDefaultControllerProcessWrite(controller, chunk) { + const stream = controller[_stream]; + writableStreamMarkFirstWriteRequestInFlight(stream); + const sinkWritePromise = controller[_writeAlgorithm](chunk, controller); + uponPromise(sinkWritePromise, () => { + writableStreamFinishInFlightWrite(stream); + const state = stream[_state]; + assert(state === "writable" || state === "erroring"); + dequeueValue(controller); + if ( + writableStreamCloseQueuedOrInFlight(stream) === false && + state === "writable" + ) { + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller, + ); + writableStreamUpdateBackpressure(stream, backpressure); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, (reason) => { + if (stream[_state] === "writable") { + writableStreamDefaultControllerClearAlgorithms(controller); + } + writableStreamFinishInFlightWriteWithError(stream, reason); + }); + } + + /** + * @template W + * @param {WritableStreamDefaultController} controller + * @param {W} chunk + * @param {number} chunkSize + */ + function writableStreamDefaultControllerWrite(controller, chunk, chunkSize) { + try { + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (e) { + writableStreamDefaultControllerErrorIfNeeded(controller, e); + return; + } + const stream = controller[_stream]; + if ( + writableStreamCloseQueuedOrInFlight(stream) === false && + stream[_state] === "writable" + ) { + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller, + ); + writableStreamUpdateBackpressure(stream, backpressure); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + } + + /** + * @param {WritableStreamDefaultWriter} writer + * @param {any=} reason + * @returns {Promise} + */ + function writableStreamDefaultWriterAbort(writer, reason) { + const stream = writer[_stream]; + assert(stream !== undefined); + return writableStreamAbort(stream, reason); + } + + /** + * @param {WritableStreamDefaultWriter} writer + * @returns {Promise} + */ + function writableStreamDefaultWriterClose(writer) { + const stream = writer[_stream]; + assert(stream !== undefined); + return writableStreamClose(stream); + } + + /** + * @param {WritableStreamDefaultWriter} writer + * @returns {Promise} + */ + function writableStreamDefaultWriterCloseWithErrorPropagation(writer) { + const stream = writer[_stream]; + assert(stream !== undefined); + const state = stream[_state]; + if ( + writableStreamCloseQueuedOrInFlight(stream) === true || state === "closed" + ) { + return resolvePromiseWith(undefined); + } + if (state === "errored") { + return PromiseReject(stream[_storedError]); + } + assert(state === "writable" || state === "erroring"); + return writableStreamDefaultWriterClose(writer); + } + + /** + * @param {WritableStreamDefaultWriter} writer + * @param {any=} error + */ + function writableStreamDefaultWriterEnsureClosedPromiseRejected( + writer, + error, + ) { + if (writer[_closedPromise].state === "pending") { + writer[_closedPromise].reject(error); + } else { + writer[_closedPromise] = new Deferred(); + writer[_closedPromise].reject(error); + } + setPromiseIsHandledToTrue(writer[_closedPromise].promise); + } + + /** + * @param {WritableStreamDefaultWriter} writer + * @param {any=} error + */ + function writableStreamDefaultWriterEnsureReadyPromiseRejected( + writer, + error, + ) { + if (writer[_readyPromise].state === "pending") { + writer[_readyPromise].reject(error); + } else { + writer[_readyPromise] = new Deferred(); + writer[_readyPromise].reject(error); + } + setPromiseIsHandledToTrue(writer[_readyPromise].promise); + } + + /** + * @param {WritableStreamDefaultWriter} writer + * @returns {number | null} + */ + function writableStreamDefaultWriterGetDesiredSize(writer) { + const stream = writer[_stream]; + const state = stream[_state]; + if (state === "errored" || state === "erroring") { + return null; + } + if (state === "closed") { + return 0; + } + return writableStreamDefaultControllerGetDesiredSize(stream[_controller]); + } + + /** @param {WritableStreamDefaultWriter} writer */ + function writableStreamDefaultWriterRelease(writer) { + const stream = writer[_stream]; + assert(stream !== undefined); + assert(stream[_writer] === writer); + const releasedError = new TypeError( + "The writer has already been released.", + ); + writableStreamDefaultWriterEnsureReadyPromiseRejected( + writer, + releasedError, + ); + writableStreamDefaultWriterEnsureClosedPromiseRejected( + writer, + releasedError, + ); + stream[_writer] = undefined; + writer[_stream] = undefined; + } + + /** + * @template W + * @param {WritableStreamDefaultWriter} writer + * @param {W} chunk + * @returns {Promise} + */ + function writableStreamDefaultWriterWrite(writer, chunk) { + const stream = writer[_stream]; + assert(stream !== undefined); + const controller = stream[_controller]; + const chunkSize = writableStreamDefaultControllerGetChunkSize( + controller, + chunk, + ); + if (stream !== writer[_stream]) { + return PromiseReject(new TypeError("Writer's stream is unexpected.")); + } + const state = stream[_state]; + if (state === "errored") { + return PromiseReject(stream[_storedError]); + } + if ( + writableStreamCloseQueuedOrInFlight(stream) === true || state === "closed" + ) { + return PromiseReject( + new TypeError("The stream is closing or is closed."), + ); + } + if (state === "erroring") { + return PromiseReject(stream[_storedError]); + } + assert(state === "writable"); + const promise = writableStreamAddWriteRequest(stream); + writableStreamDefaultControllerWrite(controller, chunk, chunkSize); + return promise; + } + + /** @param {WritableStream} stream */ + function writableStreamFinishErroring(stream) { + assert(stream[_state] === "erroring"); + assert(writableStreamHasOperationMarkedInFlight(stream) === false); + stream[_state] = "errored"; + stream[_controller][_errorSteps](); + const storedError = stream[_storedError]; + for (const writeRequest of stream[_writeRequests]) { + writeRequest.reject(storedError); + } + stream[_writeRequests] = []; + if (stream[_pendingAbortRequest] === undefined) { + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + const abortRequest = stream[_pendingAbortRequest]; + stream[_pendingAbortRequest] = undefined; + if (abortRequest.wasAlreadyErroring === true) { + abortRequest.deferred.reject(storedError); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + const promise = stream[_controller][_abortSteps](abortRequest.reason); + uponPromise(promise, () => { + abortRequest.deferred.resolve(undefined); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }, (reason) => { + abortRequest.deferred.reject(reason); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }); + } + + /** @param {WritableStream} stream */ + function writableStreamFinishInFlightClose(stream) { + assert(stream[_inFlightCloseRequest] !== undefined); + stream[_inFlightCloseRequest].resolve(undefined); + stream[_inFlightCloseRequest] = undefined; + const state = stream[_state]; + assert(state === "writable" || state === "erroring"); + if (state === "erroring") { + stream[_storedError] = undefined; + if (stream[_pendingAbortRequest] !== undefined) { + stream[_pendingAbortRequest].deferred.resolve(undefined); + stream[_pendingAbortRequest] = undefined; + } + } + stream[_state] = "closed"; + const writer = stream[_writer]; + if (writer !== undefined) { + writer[_closedPromise].resolve(undefined); + } + assert(stream[_pendingAbortRequest] === undefined); + assert(stream[_storedError] === undefined); + } + + /** + * @param {WritableStream} stream + * @param {any=} error + */ + function writableStreamFinishInFlightCloseWithError(stream, error) { + assert(stream[_inFlightCloseRequest] !== undefined); + stream[_inFlightCloseRequest].reject(error); + stream[_inFlightCloseRequest] = undefined; + assert(stream[_state] === "writable" || stream[_state] === "erroring"); + if (stream[_pendingAbortRequest] !== undefined) { + stream[_pendingAbortRequest].deferred.reject(error); + stream[_pendingAbortRequest] = undefined; + } + writableStreamDealWithRejection(stream, error); + } + + /** @param {WritableStream} stream */ + function writableStreamFinishInFlightWrite(stream) { + assert(stream[_inFlightWriteRequest] !== undefined); + stream[_inFlightWriteRequest].resolve(undefined); + stream[_inFlightWriteRequest] = undefined; + } + + /** + * @param {WritableStream} stream + * @param {any=} error + */ + function writableStreamFinishInFlightWriteWithError(stream, error) { + assert(stream[_inFlightWriteRequest] !== undefined); + stream[_inFlightWriteRequest].reject(error); + stream[_inFlightWriteRequest] = undefined; + assert(stream[_state] === "writable" || stream[_state] === "erroring"); + writableStreamDealWithRejection(stream, error); + } + + /** + * @param {WritableStream} stream + * @returns {boolean} + */ + function writableStreamHasOperationMarkedInFlight(stream) { + if ( + stream[_inFlightWriteRequest] === undefined && + stream[_inFlightCloseRequest] === undefined + ) { + return false; + } + return true; + } + + /** @param {WritableStream} stream */ + function writableStreamMarkCloseRequestInFlight(stream) { + assert(stream[_inFlightCloseRequest] === undefined); + assert(stream[_closeRequest] !== undefined); + stream[_inFlightCloseRequest] = stream[_closeRequest]; + stream[_closeRequest] = undefined; + } + + /** + * @template W + * @param {WritableStream} stream + * */ + function writableStreamMarkFirstWriteRequestInFlight(stream) { + assert(stream[_inFlightWriteRequest] === undefined); + assert(stream[_writeRequests].length); + const writeRequest = stream[_writeRequests].shift(); + stream[_inFlightWriteRequest] = writeRequest; + } + + /** @param {WritableStream} stream */ + function writableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { + assert(stream[_state] === "errored"); + if (stream[_closeRequest] !== undefined) { + assert(stream[_inFlightCloseRequest] === undefined); + stream[_closeRequest].reject(stream[_storedError]); + stream[_closeRequest] = undefined; + } + const writer = stream[_writer]; + if (writer !== undefined) { + writer[_closedPromise].reject(stream[_storedError]); + setPromiseIsHandledToTrue(writer[_closedPromise].promise); + } + } + + /** + * @param {WritableStream} stream + * @param {any=} reason + */ + function writableStreamStartErroring(stream, reason) { + assert(stream[_storedError] === undefined); + assert(stream[_state] === "writable"); + const controller = stream[_controller]; + assert(controller !== undefined); + stream[_state] = "erroring"; + stream[_storedError] = reason; + const writer = stream[_writer]; + if (writer !== undefined) { + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); + } + if ( + writableStreamHasOperationMarkedInFlight(stream) === false && + controller[_started] === true + ) { + writableStreamFinishErroring(stream); + } + } + + /** + * @param {WritableStream} stream + * @param {boolean} backpressure + */ + function writableStreamUpdateBackpressure(stream, backpressure) { + assert(stream[_state] === "writable"); + assert(writableStreamCloseQueuedOrInFlight(stream) === false); + const writer = stream[_writer]; + if (writer !== undefined && backpressure !== stream[_backpressure]) { + if (backpressure === true) { + writer[_readyPromise] = new Deferred(); + } else { + assert(backpressure === false); + writer[_readyPromise].resolve(undefined); + } + } + stream[_backpressure] = backpressure; + } + + /** + * @template T + * @param {T} value + * @param {boolean} done + * @returns {IteratorResult} + */ + function createIteratorResult(value, done) { + const result = ObjectCreate(null); + ObjectDefineProperties(result, { + value: { value, writable: true, enumerable: true, configurable: true }, + done: { + value: done, + writable: true, + enumerable: true, + configurable: true, + }, + }); + return result; + } + + /** @type {AsyncIterator} */ + const asyncIteratorPrototype = ObjectGetPrototypeOf( + ObjectGetPrototypeOf(async function* () {}).prototype, + ); + + /** @type {AsyncIterator} */ + const readableStreamAsyncIteratorPrototype = ObjectSetPrototypeOf({ + /** @returns {Promise>} */ + next() { + /** @type {ReadableStreamDefaultReader} */ + const reader = this[_reader]; + if (reader[_stream] === undefined) { + return PromiseReject( + new TypeError( + "Cannot get the next iteration result once the reader has been released.", + ), + ); + } + /** @type {Deferred>} */ + const promise = new Deferred(); + /** @type {ReadRequest} */ + const readRequest = { + chunkSteps(chunk) { + promise.resolve(createIteratorResult(chunk, false)); + }, + closeSteps() { + readableStreamReaderGenericRelease(reader); + promise.resolve(createIteratorResult(undefined, true)); + }, + errorSteps(e) { + readableStreamReaderGenericRelease(reader); + promise.reject(e); + }, + }; + readableStreamDefaultReaderRead(reader, readRequest); + return promise.promise; + }, + /** + * @param {unknown} arg + * @returns {Promise>} + */ + async return(arg) { + /** @type {ReadableStreamDefaultReader} */ + const reader = this[_reader]; + if (reader[_stream] === undefined) { + return createIteratorResult(undefined, true); + } + assert(reader[_readRequests].length === 0); + if (this[_preventCancel] === false) { + const result = readableStreamReaderGenericCancel(reader, arg); + readableStreamReaderGenericRelease(reader); + await result; + return createIteratorResult(arg, true); + } + readableStreamReaderGenericRelease(reader); + return createIteratorResult(undefined, true); + }, + }, asyncIteratorPrototype); + + class ByteLengthQueuingStrategy { + /** @param {{ highWaterMark: number }} init */ + constructor(init) { + const prefix = "Failed to construct 'ByteLengthQueuingStrategy'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + init = webidl.converters.QueuingStrategyInit(init, { + prefix, + context: "Argument 1", + }); + this[webidl.brand] = webidl.brand; + this[_globalObject] = window; + this[_highWaterMark] = init.highWaterMark; + } + + /** @returns {number} */ + get highWaterMark() { + webidl.assertBranded(this, ByteLengthQueuingStrategy); + return this[_highWaterMark]; + } + + /** @returns {(chunk: ArrayBufferView) => number} */ + get size() { + webidl.assertBranded(this, ByteLengthQueuingStrategy); + initializeByteLengthSizeFunction(this[_globalObject]); + return WeakMapPrototypeGet(byteSizeFunctionWeakMap, this[_globalObject]); + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof ByteLengthQueuingStrategy, + keys: [ + "highWaterMark", + "size", + ], + })); + } + + get [SymbolToStringTag]() { + return "ByteLengthQueuingStrategy"; + } + } + + webidl.configurePrototype(ByteLengthQueuingStrategy); + + /** @type {WeakMap number>} */ + const byteSizeFunctionWeakMap = new WeakMap(); + + function initializeByteLengthSizeFunction(globalObject) { + if (WeakMapPrototypeHas(byteSizeFunctionWeakMap, globalObject)) { + return; + } + const size = (chunk) => chunk.byteLength; + WeakMapPrototypeSet(byteSizeFunctionWeakMap, globalObject, size); + } + + class CountQueuingStrategy { + /** @param {{ highWaterMark: number }} init */ + constructor(init) { + const prefix = "Failed to construct 'CountQueuingStrategy'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + init = webidl.converters.QueuingStrategyInit(init, { + prefix, + context: "Argument 1", + }); + this[webidl.brand] = webidl.brand; + this[_globalObject] = window; + this[_highWaterMark] = init.highWaterMark; + } + + /** @returns {number} */ + get highWaterMark() { + webidl.assertBranded(this, CountQueuingStrategy); + return this[_highWaterMark]; + } + + /** @returns {(chunk: any) => 1} */ + get size() { + webidl.assertBranded(this, CountQueuingStrategy); + initializeCountSizeFunction(this[_globalObject]); + return WeakMapPrototypeGet(countSizeFunctionWeakMap, this[_globalObject]); + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof CountQueuingStrategy, + keys: [ + "highWaterMark", + "size", + ], + })); + } + + get [SymbolToStringTag]() { + return "CountQueuingStrategy"; + } + } + + webidl.configurePrototype(CountQueuingStrategy); + + /** @type {WeakMap 1>} */ + const countSizeFunctionWeakMap = new WeakMap(); + + /** @param {typeof globalThis} globalObject */ + function initializeCountSizeFunction(globalObject) { + if (WeakMapPrototypeHas(countSizeFunctionWeakMap, globalObject)) { + return; + } + const size = () => 1; + WeakMapPrototypeSet(countSizeFunctionWeakMap, globalObject, size); + } + + /** @template R */ + class ReadableStream { + /** @type {ReadableStreamDefaultController | ReadableByteStreamController} */ + [_controller]; + /** @type {boolean} */ + [_detached]; + /** @type {boolean} */ + [_disturbed]; + /** @type {ReadableStreamDefaultReader | undefined} */ + [_reader]; + /** @type {"readable" | "closed" | "errored"} */ + [_state]; + /** @type {any} */ + [_storedError]; + + /** + * @param {UnderlyingSource=} underlyingSource + * @param {QueuingStrategy=} strategy + */ + constructor(underlyingSource = undefined, strategy = {}) { + const prefix = "Failed to construct 'ReadableStream'"; + if (underlyingSource !== undefined) { + underlyingSource = webidl.converters.object(underlyingSource, { + prefix, + context: "Argument 1", + }); + } + strategy = webidl.converters.QueuingStrategy(strategy, { + prefix, + context: "Argument 2", + }); + this[webidl.brand] = webidl.brand; + if (underlyingSource === undefined) { + underlyingSource = null; + } + const underlyingSourceDict = webidl.converters.UnderlyingSource( + underlyingSource, + { prefix, context: "underlyingSource" }, + ); + initializeReadableStream(this); + if (underlyingSourceDict.type === "bytes") { + if (strategy.size !== undefined) { + throw new RangeError( + `${prefix}: When underlying source is "bytes", strategy.size must be undefined.`, + ); + } + const highWaterMark = extractHighWaterMark(strategy, 0); + setUpReadableByteStreamControllerFromUnderlyingSource( + // @ts-ignore cannot easily assert this is ReadableStream + this, + underlyingSource, + underlyingSourceDict, + highWaterMark, + ); + } else { + assert(!("type" in underlyingSourceDict)); + const sizeAlgorithm = extractSizeAlgorithm(strategy); + const highWaterMark = extractHighWaterMark(strategy, 1); + setUpReadableStreamDefaultControllerFromUnderlyingSource( + this, + underlyingSource, + underlyingSourceDict, + highWaterMark, + sizeAlgorithm, + ); + } + } + + /** @returns {boolean} */ + get locked() { + webidl.assertBranded(this, ReadableStream); + return isReadableStreamLocked(this); + } + + /** + * @param {any=} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + try { + webidl.assertBranded(this, ReadableStream); + if (reason !== undefined) { + reason = webidl.converters.any(reason); + } + } catch (err) { + return PromiseReject(err); + } + if (isReadableStreamLocked(this)) { + return PromiseReject( + new TypeError("Cannot cancel a locked ReadableStream."), + ); + } + return readableStreamCancel(this, reason); + } + + /** + * @deprecated TODO(@kitsonk): Remove in Deno 1.8 + * @param {ReadableStreamIteratorOptions=} options + * @returns {AsyncIterableIterator} + */ + getIterator(options = {}) { + return this[SymbolAsyncIterator](options); + } + + /** + * @param {ReadableStreamGetReaderOptions=} options + * @returns {ReadableStreamDefaultReader} + */ + getReader(options = {}) { + webidl.assertBranded(this, ReadableStream); + const prefix = "Failed to execute 'getReader' on 'ReadableStream'"; + options = webidl.converters.ReadableStreamGetReaderOptions(options, { + prefix, + context: "Argument 1", + }); + const { mode } = options; + if (mode === undefined) { + return acquireReadableStreamDefaultReader(this); + } + // 3. Return ? AcquireReadableStreamBYOBReader(this). + throw new RangeError(`${prefix}: Unsupported mode '${mode}'`); + } + + /** + * @template T + * @param {{ readable: ReadableStream, writable: WritableStream }} transform + * @param {PipeOptions=} options + * @returns {ReadableStream} + */ + pipeThrough(transform, options = {}) { + webidl.assertBranded(this, ReadableStream); + const prefix = "Failed to execute 'pipeThrough' on 'ReadableStream'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + transform = webidl.converters.ReadableWritablePair(transform, { + prefix, + context: "Argument 1", + }); + options = webidl.converters.StreamPipeOptions(options, { + prefix, + context: "Argument 2", + }); + const { readable, writable } = transform; + const { preventClose, preventAbort, preventCancel, signal } = options; + if (isReadableStreamLocked(this)) { + throw new TypeError("ReadableStream is already locked."); + } + if (isWritableStreamLocked(writable)) { + throw new TypeError("Target WritableStream is already locked."); + } + const promise = readableStreamPipeTo( + this, + writable, + preventClose, + preventAbort, + preventCancel, + signal, + ); + setPromiseIsHandledToTrue(promise); + return readable; + } + + /** + * @param {WritableStream} destination + * @param {PipeOptions=} options + * @returns {Promise} + */ + pipeTo(destination, options = {}) { + try { + webidl.assertBranded(this, ReadableStream); + const prefix = "Failed to execute 'pipeTo' on 'ReadableStream'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + destination = webidl.converters.WritableStream(destination, { + prefix, + context: "Argument 1", + }); + options = webidl.converters.StreamPipeOptions(options, { + prefix, + context: "Argument 2", + }); + } catch (err) { + return PromiseReject(err); + } + const { preventClose, preventAbort, preventCancel, signal } = options; + if (isReadableStreamLocked(this)) { + return PromiseReject( + new TypeError("ReadableStream is already locked."), + ); + } + if (isWritableStreamLocked(destination)) { + return PromiseReject( + new TypeError("destination WritableStream is already locked."), + ); + } + return readableStreamPipeTo( + this, + destination, + preventClose, + preventAbort, + preventCancel, + signal, + ); + } + + /** @returns {[ReadableStream, ReadableStream]} */ + tee() { + webidl.assertBranded(this, ReadableStream); + return readableStreamTee(this, false); + } + + // TODO(lucacasonato): should be moved to webidl crate + /** + * @param {ReadableStreamIteratorOptions=} options + * @returns {AsyncIterableIterator} + */ + values(options = {}) { + webidl.assertBranded(this, ReadableStream); + const prefix = "Failed to execute 'values' on 'ReadableStream'"; + options = webidl.converters.ReadableStreamIteratorOptions(options, { + prefix, + context: "Argument 1", + }); + /** @type {AsyncIterableIterator} */ + const iterator = ObjectCreate(readableStreamAsyncIteratorPrototype); + const reader = acquireReadableStreamDefaultReader(this); + iterator[_reader] = reader; + iterator[_preventCancel] = options.preventCancel; + return iterator; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return `${this.constructor.name} ${inspect({ locked: this.locked })}`; + } + + get [SymbolToStringTag]() { + return "ReadableStream"; + } + } + + // TODO(lucacasonato): should be moved to webidl crate + ReadableStream.prototype[SymbolAsyncIterator] = + ReadableStream.prototype.values; + ObjectDefineProperty(ReadableStream.prototype, SymbolAsyncIterator, { + writable: true, + enumerable: false, + configurable: true, + }); + + webidl.configurePrototype(ReadableStream); + + function errorReadableStream(stream, e) { + readableStreamDefaultControllerError(stream[_controller], e); + } + + /** @template R */ + class ReadableStreamDefaultReader { + /** @type {Deferred} */ + [_closedPromise]; + /** @type {ReadableStream | undefined} */ + [_stream]; + /** @type {ReadRequest[]} */ + [_readRequests]; + + /** @param {ReadableStream} stream */ + constructor(stream) { + const prefix = "Failed to construct 'ReadableStreamDefaultReader'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + stream = webidl.converters.ReadableStream(stream, { + prefix, + context: "Argument 1", + }); + this[webidl.brand] = webidl.brand; + setUpReadableStreamDefaultReader(this, stream); + } + + /** @returns {Promise>} */ + read() { + try { + webidl.assertBranded(this, ReadableStreamDefaultReader); + } catch (err) { + return PromiseReject(err); + } + if (this[_stream] === undefined) { + return PromiseReject( + new TypeError("Reader has no associated stream."), + ); + } + /** @type {Deferred>} */ + const promise = new Deferred(); + /** @type {ReadRequest} */ + const readRequest = { + chunkSteps(chunk) { + promise.resolve({ value: chunk, done: false }); + }, + closeSteps() { + promise.resolve({ value: undefined, done: true }); + }, + errorSteps(e) { + promise.reject(e); + }, + }; + readableStreamDefaultReaderRead(this, readRequest); + return promise.promise; + } + + /** @returns {void} */ + releaseLock() { + webidl.assertBranded(this, ReadableStreamDefaultReader); + if (this[_stream] === undefined) { + return; + } + if (this[_readRequests].length) { + throw new TypeError( + "There are pending read requests, so the reader cannot be release.", + ); + } + readableStreamReaderGenericRelease(this); + } + + get closed() { + try { + webidl.assertBranded(this, ReadableStreamDefaultReader); + } catch (err) { + return PromiseReject(err); + } + return this[_closedPromise].promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + try { + webidl.assertBranded(this, ReadableStreamDefaultReader); + if (reason !== undefined) { + reason = webidl.converters.any(reason); + } + } catch (err) { + return PromiseReject(err); + } + + if (this[_stream] === undefined) { + return PromiseReject( + new TypeError("Reader has no associated stream."), + ); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return `${this.constructor.name} ${inspect({ closed: this.closed })}`; + } + + get [SymbolToStringTag]() { + return "ReadableStreamDefaultReader"; + } + } + + webidl.configurePrototype(ReadableStreamDefaultReader); + + class ReadableByteStreamController { + /** @type {number | undefined} */ + [_autoAllocateChunkSize]; + /** @type {null} */ + [_byobRequest]; + /** @type {(reason: any) => Promise} */ + [_cancelAlgorithm]; + /** @type {boolean} */ + [_closeRequested]; + /** @type {boolean} */ + [_pullAgain]; + /** @type {(controller: this) => Promise} */ + [_pullAlgorithm]; + /** @type {boolean} */ + [_pulling]; + /** @type {ReadableByteStreamQueueEntry[]} */ + [_queue]; + /** @type {number} */ + [_queueTotalSize]; + /** @type {boolean} */ + [_started]; + /** @type {number} */ + [_strategyHWM]; + /** @type {ReadableStream} */ + [_stream]; + + constructor() { + webidl.illegalConstructor(); + } + + get byobRequest() { + webidl.assertBranded(this, ReadableByteStreamController); + return undefined; + } + + /** @returns {number | null} */ + get desiredSize() { + webidl.assertBranded(this, ReadableByteStreamController); + return readableByteStreamControllerGetDesiredSize(this); + } + + /** @returns {void} */ + close() { + webidl.assertBranded(this, ReadableByteStreamController); + if (this[_closeRequested] === true) { + throw new TypeError("Closed already requested."); + } + if (this[_stream][_state] !== "readable") { + throw new TypeError( + "ReadableByteStreamController's stream is not in a readable state.", + ); + } + readableByteStreamControllerClose(this); + } + + /** + * @param {ArrayBufferView} chunk + * @returns {void} + */ + enqueue(chunk) { + webidl.assertBranded(this, ReadableByteStreamController); + const prefix = + "Failed to execute 'enqueue' on 'ReadableByteStreamController'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + const arg1 = "Argument 1"; + chunk = webidl.converters.ArrayBufferView(chunk, { + prefix, + context: arg1, + }); + if (chunk.byteLength === 0) { + throw webidl.makeException(TypeError, "length must be non-zero", { + prefix, + context: arg1, + }); + } + if (chunk.buffer.byteLength === 0) { + throw webidl.makeException( + TypeError, + "buffer length must be non-zero", + { prefix, context: arg1 }, + ); + } + if (this[_closeRequested] === true) { + throw new TypeError( + "Cannot enqueue chunk after a close has been requested.", + ); + } + if (this[_stream][_state] !== "readable") { + throw new TypeError( + "Cannot enqueue chunk when underlying stream is not readable.", + ); + } + return readableByteStreamControllerEnqueue(this, chunk); + } + + /** + * @param {any=} e + * @returns {void} + */ + error(e = undefined) { + webidl.assertBranded(this, ReadableByteStreamController); + if (e !== undefined) { + e = webidl.converters.any(e); + } + readableByteStreamControllerError(this, e); + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof ReadableByteStreamController, + keys: ["desiredSize"], + })); + } + + get [SymbolToStringTag]() { + return "ReadableByteStreamController"; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + [_cancelSteps](reason) { + // 4.7.4. CancelStep 1. If this.[[pendingPullIntos]] is not empty, + resetQueue(this); + const result = this[_cancelAlgorithm](reason); + readableByteStreamControllerClearAlgorithms(this); + return result; + } + + /** + * @param {ReadRequest} readRequest + * @returns {void} + */ + [_pullSteps](readRequest) { + /** @type {ReadableStream} */ + const stream = this[_stream]; + assert(readableStreamHasDefaultReader(stream)); + if (this[_queueTotalSize] > 0) { + assert(readableStreamGetNumReadRequests(stream) === 0); + const entry = ArrayPrototypeShift(this[_queue]); + this[_queueTotalSize] -= entry.byteLength; + readableByteStreamControllerHandleQueueDrain(this); + const view = new Uint8Array( + entry.buffer, + entry.byteOffset, + entry.byteLength, + ); + readRequest.chunkSteps(view); + return; + } + // 4. Let autoAllocateChunkSize be this.[[autoAllocateChunkSize]]. + // 5. If autoAllocateChunkSize is not undefined, + readableStreamAddReadRequest(stream, readRequest); + readableByteStreamControllerCallPullIfNeeded(this); + } + } + + webidl.configurePrototype(ReadableByteStreamController); + + /** @template R */ + class ReadableStreamDefaultController { + /** @type {(reason: any) => Promise} */ + [_cancelAlgorithm]; + /** @type {boolean} */ + [_closeRequested]; + /** @type {boolean} */ + [_pullAgain]; + /** @type {(controller: this) => Promise} */ + [_pullAlgorithm]; + /** @type {boolean} */ + [_pulling]; + /** @type {Array>} */ + [_queue]; + /** @type {number} */ + [_queueTotalSize]; + /** @type {boolean} */ + [_started]; + /** @type {number} */ + [_strategyHWM]; + /** @type {(chunk: R) => number} */ + [_strategySizeAlgorithm]; + /** @type {ReadableStream} */ + [_stream]; + + constructor() { + webidl.illegalConstructor(); + } + + /** @returns {number | null} */ + get desiredSize() { + webidl.assertBranded(this, ReadableStreamDefaultController); + return readableStreamDefaultControllerGetDesiredSize(this); + } + + /** @returns {void} */ + close() { + webidl.assertBranded(this, ReadableStreamDefaultController); + if (readableStreamDefaultControllerCanCloseOrEnqueue(this) === false) { + throw new TypeError("The stream controller cannot close or enqueue."); + } + readableStreamDefaultControllerClose(this); + } + + /** + * @param {R} chunk + * @returns {void} + */ + enqueue(chunk = undefined) { + webidl.assertBranded(this, ReadableStreamDefaultController); + if (chunk !== undefined) { + chunk = webidl.converters.any(chunk); + } + if (readableStreamDefaultControllerCanCloseOrEnqueue(this) === false) { + throw new TypeError("The stream controller cannot close or enqueue."); + } + readableStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any=} e + * @returns {void} + */ + error(e = undefined) { + webidl.assertBranded(this, ReadableStreamDefaultController); + if (e !== undefined) { + e = webidl.converters.any(e); + } + readableStreamDefaultControllerError(this, e); + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof ReadableStreamDefaultController, + keys: ["desiredSize"], + })); + } + + get [SymbolToStringTag]() { + return "ReadableStreamDefaultController"; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + [_cancelSteps](reason) { + resetQueue(this); + const result = this[_cancelAlgorithm](reason); + readableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + /** + * @param {ReadRequest} readRequest + * @returns {void} + */ + [_pullSteps](readRequest) { + const stream = this[_stream]; + if (this[_queue].length) { + const chunk = dequeueValue(this); + if (this[_closeRequested] && this[_queue].length === 0) { + readableStreamDefaultControllerClearAlgorithms(this); + readableStreamClose(stream); + } else { + readableStreamDefaultControllerCallPullIfNeeded(this); + } + readRequest.chunkSteps(chunk); + } else { + readableStreamAddReadRequest(stream, readRequest); + readableStreamDefaultControllerCallPullIfNeeded(this); + } + } + } + + webidl.configurePrototype(ReadableStreamDefaultController); + + /** + * @template I + * @template O + */ + class TransformStream { + /** @type {boolean} */ + [_backpressure]; + /** @type {Deferred} */ + [_backpressureChangePromise]; + /** @type {TransformStreamDefaultController} */ + [_controller]; + /** @type {boolean} */ + [_detached]; + /** @type {ReadableStream} */ + [_readable]; + /** @type {WritableStream} */ + [_writable]; + + /** + * + * @param {Transformer} transformer + * @param {QueuingStrategy} writableStrategy + * @param {QueuingStrategy} readableStrategy + */ + constructor( + transformer = undefined, + writableStrategy = {}, + readableStrategy = {}, + ) { + const prefix = "Failed to construct 'TransformStream'"; + if (transformer !== undefined) { + transformer = webidl.converters.object(transformer, { + prefix, + context: "Argument 1", + }); + } + writableStrategy = webidl.converters.QueuingStrategy(writableStrategy, { + prefix, + context: "Argument 2", + }); + readableStrategy = webidl.converters.QueuingStrategy(readableStrategy, { + prefix, + context: "Argument 2", + }); + this[webidl.brand] = webidl.brand; + if (transformer === undefined) { + transformer = null; + } + const transformerDict = webidl.converters.Transformer(transformer, { + prefix, + context: "transformer", + }); + if (transformerDict.readableType !== undefined) { + throw new RangeError( + `${prefix}: readableType transformers not supported.`, + ); + } + if (transformerDict.writableType !== undefined) { + throw new RangeError( + `${prefix}: writableType transformers not supported.`, + ); + } + const readableHighWaterMark = extractHighWaterMark(readableStrategy, 0); + const readableSizeAlgorithm = extractSizeAlgorithm(readableStrategy); + const writableHighWaterMark = extractHighWaterMark(writableStrategy, 1); + const writableSizeAlgorithm = extractSizeAlgorithm(writableStrategy); + /** @type {Deferred} */ + const startPromise = new Deferred(); + initializeTransformStream( + this, + startPromise, + writableHighWaterMark, + writableSizeAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm, + ); + setUpTransformStreamDefaultControllerFromTransformer( + this, + transformer, + transformerDict, + ); + if (transformerDict.start) { + startPromise.resolve( + webidl.invokeCallbackFunction( + transformerDict.start, + [this[_controller]], + transformer, + webidl.converters.any, + { + prefix: + "Failed to call 'start' on 'TransformStreamDefaultController'", + }, + ), + ); + } else { + startPromise.resolve(undefined); + } + } + + /** @returns {ReadableStream} */ + get readable() { + webidl.assertBranded(this, TransformStream); + return this[_readable]; + } + + /** @returns {WritableStream} */ + get writable() { + webidl.assertBranded(this, TransformStream); + return this[_writable]; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return `${this.constructor.name} ${ + inspect({ readable: this.readable, writable: this.writable }) + }`; + } + + get [SymbolToStringTag]() { + return "TransformStream"; + } + } + + webidl.configurePrototype(TransformStream); + + /** @template O */ + class TransformStreamDefaultController { + /** @type {(controller: this) => Promise} */ + [_flushAlgorithm]; + /** @type {TransformStream} */ + [_stream]; + /** @type {(chunk: O, controller: this) => Promise} */ + [_transformAlgorithm]; + + constructor() { + webidl.illegalConstructor(); + } + + /** @returns {number | null} */ + get desiredSize() { + webidl.assertBranded(this, TransformStreamDefaultController); + const readableController = this[_stream][_readable][_controller]; + return readableStreamDefaultControllerGetDesiredSize( + /** @type {ReadableStreamDefaultController} */ (readableController), + ); + } + + /** + * @param {O} chunk + * @returns {void} + */ + enqueue(chunk = undefined) { + webidl.assertBranded(this, TransformStreamDefaultController); + if (chunk !== undefined) { + chunk = webidl.converters.any(chunk); + } + transformStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any=} reason + * @returns {void} + */ + error(reason = undefined) { + webidl.assertBranded(this, TransformStreamDefaultController); + if (reason !== undefined) { + reason = webidl.converters.any(reason); + } + transformStreamDefaultControllerError(this, reason); + } + + /** @returns {void} */ + terminate() { + webidl.assertBranded(this, TransformStreamDefaultController); + transformStreamDefaultControllerTerminate(this); + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof TransformStreamDefaultController, + keys: ["desiredSize"], + })); + } + + get [SymbolToStringTag]() { + return "TransformStreamDefaultController"; + } + } + + webidl.configurePrototype(TransformStreamDefaultController); + + /** @template W */ + class WritableStream { + /** @type {boolean} */ + [_backpressure]; + /** @type {Deferred | undefined} */ + [_closeRequest]; + /** @type {WritableStreamDefaultController} */ + [_controller]; + /** @type {boolean} */ + [_detached]; + /** @type {Deferred | undefined} */ + [_inFlightWriteRequest]; + /** @type {Deferred | undefined} */ + [_inFlightCloseRequest]; + /** @type {PendingAbortRequest | undefined} */ + [_pendingAbortRequest]; + /** @type {"writable" | "closed" | "erroring" | "errored"} */ + [_state]; + /** @type {any} */ + [_storedError]; + /** @type {WritableStreamDefaultWriter} */ + [_writer]; + /** @type {Deferred[]} */ + [_writeRequests]; + + /** + * @param {UnderlyingSink=} underlyingSink + * @param {QueuingStrategy=} strategy + */ + constructor(underlyingSink = undefined, strategy = {}) { + const prefix = "Failed to construct 'WritableStream'"; + if (underlyingSink !== undefined) { + underlyingSink = webidl.converters.object(underlyingSink, { + prefix, + context: "Argument 1", + }); + } + strategy = webidl.converters.QueuingStrategy(strategy, { + prefix, + context: "Argument 2", + }); + this[webidl.brand] = webidl.brand; + if (underlyingSink === undefined) { + underlyingSink = null; + } + const underlyingSinkDict = webidl.converters.UnderlyingSink( + underlyingSink, + { prefix, context: "underlyingSink" }, + ); + if (underlyingSinkDict.type != null) { + throw new RangeError( + `${prefix}: WritableStream does not support 'type' in the underlying sink.`, + ); + } + initializeWritableStream(this); + const sizeAlgorithm = extractSizeAlgorithm(strategy); + const highWaterMark = extractHighWaterMark(strategy, 1); + setUpWritableStreamDefaultControllerFromUnderlyingSink( + this, + underlyingSink, + underlyingSinkDict, + highWaterMark, + sizeAlgorithm, + ); + } + + /** @returns {boolean} */ + get locked() { + webidl.assertBranded(this, WritableStream); + return isWritableStreamLocked(this); + } + + /** + * @param {any=} reason + * @returns {Promise} + */ + abort(reason = undefined) { + try { + webidl.assertBranded(this, WritableStream); + } catch (err) { + return PromiseReject(err); + } + if (reason !== undefined) { + reason = webidl.converters.any(reason); + } + if (isWritableStreamLocked(this)) { + return PromiseReject( + new TypeError( + "The writable stream is locked, therefore cannot be aborted.", + ), + ); + } + return writableStreamAbort(this, reason); + } + + /** @returns {Promise} */ + close() { + try { + webidl.assertBranded(this, WritableStream); + } catch (err) { + return PromiseReject(err); + } + if (isWritableStreamLocked(this)) { + return PromiseReject( + new TypeError( + "The writable stream is locked, therefore cannot be closed.", + ), + ); + } + if (writableStreamCloseQueuedOrInFlight(this) === true) { + return PromiseReject( + new TypeError("The writable stream is already closing."), + ); + } + return writableStreamClose(this); + } + + /** @returns {WritableStreamDefaultWriter} */ + getWriter() { + webidl.assertBranded(this, WritableStream); + return acquireWritableStreamDefaultWriter(this); + } + + [SymbolFor("Deno.privateCustomInspect")](inspect) { + return `${this.constructor.name} ${inspect({ locked: this.locked })}`; + } + + get [SymbolToStringTag]() { + return "WritableStream"; + } + } + + webidl.configurePrototype(WritableStream); + + /** @template W */ + class WritableStreamDefaultWriter { + /** @type {Deferred} */ + [_closedPromise]; + + /** @type {Deferred} */ + [_readyPromise]; + + /** @type {WritableStream} */ + [_stream]; + + /** + * @param {WritableStream} stream + */ + constructor(stream) { + const prefix = "Failed to construct 'WritableStreamDefaultWriter'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + stream = webidl.converters.WritableStream(stream, { + prefix, + context: "Argument 1", + }); + this[webidl.brand] = webidl.brand; + setUpWritableStreamDefaultWriter(this, stream); + } + + /** @returns {Promise} */ + get closed() { + try { + webidl.assertBranded(this, WritableStreamDefaultWriter); + } catch (err) { + return PromiseReject(err); + } + return this[_closedPromise].promise; + } + + /** @returns {number} */ + get desiredSize() { + webidl.assertBranded(this, WritableStreamDefaultWriter); + if (this[_stream] === undefined) { + throw new TypeError( + "A writable stream is not associated with the writer.", + ); + } + return writableStreamDefaultWriterGetDesiredSize(this); + } + + /** @returns {Promise} */ + get ready() { + try { + webidl.assertBranded(this, WritableStreamDefaultWriter); + } catch (err) { + return PromiseReject(err); + } + return this[_readyPromise].promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + try { + webidl.assertBranded(this, WritableStreamDefaultWriter); + } catch (err) { + return PromiseReject(err); + } + if (reason !== undefined) { + reason = webidl.converters.any(reason); + } + if (this[_stream] === undefined) { + return PromiseReject( + new TypeError("A writable stream is not associated with the writer."), + ); + } + return writableStreamDefaultWriterAbort(this, reason); + } + + /** @returns {Promise} */ + close() { + try { + webidl.assertBranded(this, WritableStreamDefaultWriter); + } catch (err) { + return PromiseReject(err); + } + const stream = this[_stream]; + if (stream === undefined) { + return PromiseReject( + new TypeError("A writable stream is not associated with the writer."), + ); + } + if (writableStreamCloseQueuedOrInFlight(stream) === true) { + return PromiseReject( + new TypeError("The associated stream is already closing."), + ); + } + return writableStreamDefaultWriterClose(this); + } + + /** @returns {void} */ + releaseLock() { + webidl.assertBranded(this, WritableStreamDefaultWriter); + const stream = this[_stream]; + if (stream === undefined) { + return; + } + assert(stream[_writer] !== undefined); + writableStreamDefaultWriterRelease(this); + } + + /** + * @param {W} chunk + * @returns {Promise} + */ + write(chunk = undefined) { + try { + webidl.assertBranded(this, WritableStreamDefaultWriter); + if (chunk !== undefined) { + chunk = webidl.converters.any(chunk); + } + } catch (err) { + return PromiseReject(err); + } + if (this[_stream] === undefined) { + return PromiseReject( + new TypeError("A writable stream is not associate with the writer."), + ); + } + return writableStreamDefaultWriterWrite(this, chunk); + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof WritableStreamDefaultWriter, + keys: [ + "closed", + "desiredSize", + "ready", + ], + })); + } + + get [SymbolToStringTag]() { + return "WritableStreamDefaultWriter"; + } + } + + webidl.configurePrototype(WritableStreamDefaultWriter); + + /** @template W */ + class WritableStreamDefaultController { + /** @type {(reason?: any) => Promise} */ + [_abortAlgorithm]; + /** @type {() => Promise} */ + [_closeAlgorithm]; + /** @type {ValueWithSize[]} */ + [_queue]; + /** @type {number} */ + [_queueTotalSize]; + /** @type {boolean} */ + [_started]; + /** @type {number} */ + [_strategyHWM]; + /** @type {(chunk: W) => number} */ + [_strategySizeAlgorithm]; + /** @type {WritableStream} */ + [_stream]; + /** @type {(chunk: W, controller: this) => Promise} */ + [_writeAlgorithm]; + + constructor() { + webidl.illegalConstructor(); + } + + /** + * @param {any=} e + * @returns {void} + */ + error(e = undefined) { + webidl.assertBranded(this, WritableStreamDefaultController); + if (e !== undefined) { + e = webidl.converters.any(e); + } + const state = this[_stream][_state]; + if (state !== "writable") { + return; + } + writableStreamDefaultControllerError(this, e); + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof WritableStreamDefaultController, + keys: [], + })); + } + + get [SymbolToStringTag]() { + return "WritableStreamDefaultController"; + } + + /** + * @param {any=} reason + * @returns {Promise} + */ + [_abortSteps](reason) { + const result = this[_abortAlgorithm](reason); + writableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [_errorSteps]() { + resetQueue(this); + } + } + + webidl.configurePrototype(WritableStreamDefaultController); + + /** + * @param {ReadableStream} stream + */ + function createProxy(stream) { + return stream.pipeThrough(new TransformStream()); + } + + webidl.converters.ReadableStream = webidl + .createInterfaceConverter("ReadableStream", ReadableStream); + webidl.converters.WritableStream = webidl + .createInterfaceConverter("WritableStream", WritableStream); + + webidl.converters.ReadableStreamType = webidl.createEnumConverter( + "ReadableStreamType", + ["bytes"], + ); + + webidl.converters.UnderlyingSource = webidl + .createDictionaryConverter("UnderlyingSource", [ + { + key: "start", + converter: webidl.converters.Function, + }, + { + key: "pull", + converter: webidl.converters.Function, + }, + { + key: "cancel", + converter: webidl.converters.Function, + }, + { + key: "type", + converter: webidl.converters.ReadableStreamType, + }, + { + key: "autoAllocateChunkSize", + converter: (V, opts) => + webidl.converters["unsigned long long"](V, { + ...opts, + enforceRange: true, + }), + }, + ]); + webidl.converters.UnderlyingSink = webidl + .createDictionaryConverter("UnderlyingSink", [ + { + key: "start", + converter: webidl.converters.Function, + }, + { + key: "write", + converter: webidl.converters.Function, + }, + { + key: "close", + converter: webidl.converters.Function, + }, + { + key: "abort", + converter: webidl.converters.Function, + }, + { + key: "type", + converter: webidl.converters.any, + }, + ]); + webidl.converters.Transformer = webidl + .createDictionaryConverter("Transformer", [ + { + key: "start", + converter: webidl.converters.Function, + }, + { + key: "transform", + converter: webidl.converters.Function, + }, + { + key: "flush", + converter: webidl.converters.Function, + }, + { + key: "readableType", + converter: webidl.converters.any, + }, + { + key: "writableType", + converter: webidl.converters.any, + }, + ]); + webidl.converters.QueuingStrategy = webidl + .createDictionaryConverter("QueuingStrategy", [ + { + key: "highWaterMark", + converter: webidl.converters["unrestricted double"], + }, + { + key: "size", + converter: webidl.converters.Function, + }, + ]); + webidl.converters.QueuingStrategyInit = webidl + .createDictionaryConverter("QueuingStrategyInit", [ + { + key: "highWaterMark", + converter: webidl.converters["unrestricted double"], + required: true, + }, + ]); + + webidl.converters.ReadableStreamIteratorOptions = webidl + .createDictionaryConverter("ReadableStreamIteratorOptions", [ + { + key: "preventCancel", + defaultValue: false, + converter: webidl.converters.boolean, + }, + ]); + + webidl.converters.ReadableStreamReaderMode = webidl + .createEnumConverter("ReadableStreamReaderMode", ["byob"]); + webidl.converters.ReadableStreamGetReaderOptions = webidl + .createDictionaryConverter("ReadableStreamGetReaderOptions", [{ + key: "mode", + converter: webidl.converters.ReadableStreamReaderMode, + }]); + + webidl.converters.ReadableWritablePair = webidl + .createDictionaryConverter("ReadableWritablePair", [ + { + key: "readable", + converter: webidl.converters.ReadableStream, + required: true, + }, + { + key: "writable", + converter: webidl.converters.WritableStream, + required: true, + }, + ]); + webidl.converters.StreamPipeOptions = webidl + .createDictionaryConverter("StreamPipeOptions", [ + { + key: "preventClose", + defaultValue: false, + converter: webidl.converters.boolean, + }, + { + key: "preventAbort", + defaultValue: false, + converter: webidl.converters.boolean, + }, + { + key: "preventCancel", + defaultValue: false, + converter: webidl.converters.boolean, + }, + { key: "signal", converter: webidl.converters.AbortSignal }, + ]); + + window.__bootstrap.streams = { + // Non-Public + isReadableStreamDisturbed, + errorReadableStream, + createProxy, + writableStreamClose, + Deferred, + // Exposed in global runtime scope + ByteLengthQueuingStrategy, + CountQueuingStrategy, + ReadableStream, + ReadableStreamDefaultReader, + TransformStream, + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransformStreamDefaultController, + }; +})(this); diff --git a/ext/web/06_streams_types.d.ts b/ext/web/06_streams_types.d.ts new file mode 100644 index 000000000..61621c003 --- /dev/null +++ b/ext/web/06_streams_types.d.ts @@ -0,0 +1,55 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// ** Internal Interfaces ** + +interface PendingAbortRequest { + deferred: Deferred; + // deno-lint-ignore no-explicit-any + reason: any; + wasAlreadyErroring: boolean; +} + +// deno-lint-ignore no-explicit-any +interface ReadRequest { + chunkSteps: (chunk: R) => void; + closeSteps: () => void; + // deno-lint-ignore no-explicit-any + errorSteps: (error: any) => void; +} + +interface ReadableByteStreamQueueEntry { + buffer: ArrayBufferLike; + byteOffset: number; + byteLength: number; +} + +interface ReadableStreamGetReaderOptions { + mode?: "byob"; +} + +interface ReadableStreamIteratorOptions { + preventCancel?: boolean; +} + +interface ValueWithSize { + value: T; + size: number; +} + +interface VoidFunction { + (): void; +} + +interface ReadableStreamGenericReader { + readonly closed: Promise; + // deno-lint-ignore no-explicit-any + cancel(reason?: any): Promise; +} + +// ** Ambient Definitions and Interfaces not provided by fetch ** + +declare function queueMicrotask(callback: VoidFunction): void; + +declare namespace Deno { + function inspect(value: unknown, options?: Record): string; +} diff --git a/ext/web/08_text_encoding.js b/ext/web/08_text_encoding.js new file mode 100644 index 000000000..9be4aa753 --- /dev/null +++ b/ext/web/08_text_encoding.js @@ -0,0 +1,420 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// + +"use strict"; + +((window) => { + const core = Deno.core; + const webidl = window.__bootstrap.webidl; + const { + ArrayBufferIsView, + PromiseReject, + PromiseResolve, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + SymbolToStringTag, + TypedArrayPrototypeSubarray, + TypedArrayPrototypeSlice, + Uint8Array, + } = window.__bootstrap.primordials; + + class TextDecoder { + /** @type {string} */ + #encoding; + /** @type {boolean} */ + #fatal; + /** @type {boolean} */ + #ignoreBOM; + + /** @type {number | null} */ + #rid = null; + + /** + * + * @param {string} label + * @param {TextDecoderOptions} options + */ + constructor(label = "utf-8", options = {}) { + const prefix = "Failed to construct 'TextDecoder'"; + label = webidl.converters.DOMString(label, { + prefix, + context: "Argument 1", + }); + options = webidl.converters.TextDecoderOptions(options, { + prefix, + context: "Argument 2", + }); + const encoding = core.opSync("op_encoding_normalize_label", label); + this.#encoding = encoding; + this.#fatal = options.fatal; + this.#ignoreBOM = options.ignoreBOM; + this[webidl.brand] = webidl.brand; + } + + /** @returns {string} */ + get encoding() { + webidl.assertBranded(this, TextDecoder); + return this.#encoding; + } + + /** @returns {boolean} */ + get fatal() { + webidl.assertBranded(this, TextDecoder); + return this.#fatal; + } + + /** @returns {boolean} */ + get ignoreBOM() { + webidl.assertBranded(this, TextDecoder); + return this.#ignoreBOM; + } + + /** + * @param {BufferSource} [input] + * @param {TextDecodeOptions} options + */ + decode(input = new Uint8Array(), options = {}) { + webidl.assertBranded(this, TextDecoder); + const prefix = "Failed to execute 'decode' on 'TextDecoder'"; + if (input !== undefined) { + input = webidl.converters.BufferSource(input, { + prefix, + context: "Argument 1", + allowShared: true, + }); + } + options = webidl.converters.TextDecodeOptions(options, { + prefix, + context: "Argument 2", + }); + + // TODO(lucacasonato): add fast path for non-streaming decoder & decode + + if (this.#rid === null) { + this.#rid = core.opSync("op_encoding_new_decoder", { + label: this.#encoding, + fatal: this.#fatal, + ignoreBom: this.#ignoreBOM, + }); + } + + try { + if (ArrayBufferIsView(input)) { + input = new Uint8Array( + input.buffer, + input.byteOffset, + input.byteLength, + ); + } else { + input = new Uint8Array(input); + } + return core.opSync("op_encoding_decode", new Uint8Array(input), { + rid: this.#rid, + stream: options.stream, + }); + } finally { + if (!options.stream) { + core.close(this.#rid); + this.#rid = null; + } + } + } + + get [SymbolToStringTag]() { + return "TextDecoder"; + } + } + + webidl.configurePrototype(TextDecoder); + + class TextEncoder { + constructor() { + this[webidl.brand] = webidl.brand; + } + + /** @returns {string} */ + get encoding() { + webidl.assertBranded(this, TextEncoder); + return "utf-8"; + } + + /** + * @param {string} input + * @returns {Uint8Array} + */ + encode(input = "") { + webidl.assertBranded(this, TextEncoder); + const prefix = "Failed to execute 'encode' on 'TextEncoder'"; + // The WebIDL type of `input` is `USVString`, but `core.encode` already + // converts lone surrogates to the replacement character. + input = webidl.converters.DOMString(input, { + prefix, + context: "Argument 1", + }); + return core.encode(input); + } + + /** + * @param {string} source + * @param {Uint8Array} destination + * @returns {TextEncoderEncodeIntoResult} + */ + encodeInto(source, destination) { + webidl.assertBranded(this, TextEncoder); + const prefix = "Failed to execute 'encodeInto' on 'TextEncoder'"; + // The WebIDL type of `source` is `USVString`, but the ops bindings + // already convert lone surrogates to the replacement character. + source = webidl.converters.DOMString(source, { + prefix, + context: "Argument 1", + }); + destination = webidl.converters.Uint8Array(destination, { + prefix, + context: "Argument 2", + allowShared: true, + }); + return core.opSync("op_encoding_encode_into", source, destination); + } + + get [SymbolToStringTag]() { + return "TextEncoder"; + } + } + + webidl.configurePrototype(TextEncoder); + + class TextDecoderStream { + /** @type {TextDecoder} */ + #decoder; + /** @type {TransformStream} */ + #transform; + + /** + * + * @param {string} label + * @param {TextDecoderOptions} options + */ + constructor(label = "utf-8", options = {}) { + const prefix = "Failed to construct 'TextDecoderStream'"; + label = webidl.converters.DOMString(label, { + prefix, + context: "Argument 1", + }); + options = webidl.converters.TextDecoderOptions(options, { + prefix, + context: "Argument 2", + }); + this.#decoder = new TextDecoder(label, options); + this.#transform = new TransformStream({ + // The transform and flush functions need access to TextDecoderStream's + // `this`, so they are defined as functions rather than methods. + transform: (chunk, controller) => { + try { + chunk = webidl.converters.BufferSource(chunk, { + allowShared: true, + }); + const decoded = this.#decoder.decode(chunk, { stream: true }); + if (decoded) { + controller.enqueue(decoded); + } + return PromiseResolve(); + } catch (err) { + return PromiseReject(err); + } + }, + flush: (controller) => { + try { + const final = this.#decoder.decode(); + if (final) { + controller.enqueue(final); + } + return PromiseResolve(); + } catch (err) { + return PromiseReject(err); + } + }, + }); + this[webidl.brand] = webidl.brand; + } + + /** @returns {string} */ + get encoding() { + webidl.assertBranded(this, TextDecoderStream); + return this.#decoder.encoding; + } + + /** @returns {boolean} */ + get fatal() { + webidl.assertBranded(this, TextDecoderStream); + return this.#decoder.fatal; + } + + /** @returns {boolean} */ + get ignoreBOM() { + webidl.assertBranded(this, TextDecoderStream); + return this.#decoder.ignoreBOM; + } + + /** @returns {ReadableStream} */ + get readable() { + webidl.assertBranded(this, TextDecoderStream); + return this.#transform.readable; + } + + /** @returns {WritableStream} */ + get writable() { + webidl.assertBranded(this, TextDecoderStream); + return this.#transform.writable; + } + + get [SymbolToStringTag]() { + return "TextDecoderStream"; + } + } + + webidl.configurePrototype(TextDecoderStream); + + class TextEncoderStream { + /** @type {string | null} */ + #pendingHighSurrogate = null; + /** @type {TransformStream} */ + #transform; + + constructor() { + this.#transform = new TransformStream({ + // The transform and flush functions need access to TextEncoderStream's + // `this`, so they are defined as functions rather than methods. + transform: (chunk, controller) => { + try { + chunk = webidl.converters.DOMString(chunk); + if (this.#pendingHighSurrogate !== null) { + chunk = this.#pendingHighSurrogate + chunk; + } + const lastCodeUnit = StringPrototypeCharCodeAt( + chunk, + chunk.length - 1, + ); + if (0xD800 <= lastCodeUnit && lastCodeUnit <= 0xDBFF) { + this.#pendingHighSurrogate = StringPrototypeSlice(chunk, -1); + chunk = StringPrototypeSlice(chunk, 0, -1); + } else { + this.#pendingHighSurrogate = null; + } + if (chunk) { + controller.enqueue(core.encode(chunk)); + } + return PromiseResolve(); + } catch (err) { + return PromiseReject(err); + } + }, + flush: (controller) => { + try { + if (this.#pendingHighSurrogate !== null) { + controller.enqueue(new Uint8Array([0xEF, 0xBF, 0xBD])); + } + return PromiseResolve(); + } catch (err) { + return PromiseReject(err); + } + }, + }); + this[webidl.brand] = webidl.brand; + } + + /** @returns {string} */ + get encoding() { + webidl.assertBranded(this, TextEncoderStream); + return "utf-8"; + } + + /** @returns {ReadableStream} */ + get readable() { + webidl.assertBranded(this, TextEncoderStream); + return this.#transform.readable; + } + + /** @returns {WritableStream} */ + get writable() { + webidl.assertBranded(this, TextEncoderStream); + return this.#transform.writable; + } + + get [SymbolToStringTag]() { + return "TextEncoderStream"; + } + } + + webidl.configurePrototype(TextEncoderStream); + + webidl.converters.TextDecoderOptions = webidl.createDictionaryConverter( + "TextDecoderOptions", + [ + { + key: "fatal", + converter: webidl.converters.boolean, + defaultValue: false, + }, + { + key: "ignoreBOM", + converter: webidl.converters.boolean, + defaultValue: false, + }, + ], + ); + webidl.converters.TextDecodeOptions = webidl.createDictionaryConverter( + "TextDecodeOptions", + [ + { + key: "stream", + converter: webidl.converters.boolean, + defaultValue: false, + }, + ], + ); + + /** + * @param {Uint8Array} bytes + */ + function decode(bytes, encoding) { + const BOMEncoding = BOMSniff(bytes); + let start = 0; + if (BOMEncoding !== null) { + encoding = BOMEncoding; + if (BOMEncoding === "UTF-8") start = 3; + else start = 2; + } + return new TextDecoder(encoding).decode( + TypedArrayPrototypeSlice(bytes, start), + ); + } + + /** + * @param {Uint8Array} bytes + */ + function BOMSniff(bytes) { + const BOM = TypedArrayPrototypeSubarray(bytes, 0, 3); + if (BOM[0] === 0xEF && BOM[1] === 0xBB && BOM[2] === 0xBF) { + return "UTF-8"; + } + if (BOM[0] === 0xFE && BOM[1] === 0xFF) return "UTF-16BE"; + if (BOM[0] === 0xFF && BOM[1] === 0xFE) return "UTF-16LE"; + return null; + } + + window.__bootstrap.encoding = { + TextEncoder, + TextDecoder, + TextEncoderStream, + TextDecoderStream, + decode, + }; +})(this); diff --git a/ext/web/09_file.js b/ext/web/09_file.js new file mode 100644 index 000000000..516e80adf --- /dev/null +++ b/ext/web/09_file.js @@ -0,0 +1,569 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const core = window.Deno.core; + const webidl = window.__bootstrap.webidl; + const { + ArrayBuffer, + ArrayBufferPrototypeSlice, + ArrayBufferIsView, + ArrayPrototypePush, + Date, + DatePrototypeGetTime, + MathMax, + MathMin, + RegExpPrototypeTest, + StringPrototypeCharAt, + StringPrototypeToLowerCase, + StringPrototypeSlice, + Symbol, + SymbolFor, + TypedArrayPrototypeSet, + SymbolToStringTag, + TypeError, + Uint8Array, + } = window.__bootstrap.primordials; + const consoleInternal = window.__bootstrap.console; + + // TODO(lucacasonato): this needs to not be hardcoded and instead depend on + // host os. + const isWindows = false; + + /** + * @param {string} input + * @param {number} position + * @returns {{result: string, position: number}} + */ + function collectCodepointsNotCRLF(input, position) { + // See https://w3c.github.io/FileAPI/#convert-line-endings-to-native and + // https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points + const start = position; + for ( + let c = StringPrototypeCharAt(input, position); + position < input.length && !(c === "\r" || c === "\n"); + c = StringPrototypeCharAt(input, ++position) + ); + return { result: StringPrototypeSlice(input, start, position), position }; + } + + /** + * @param {string} s + * @returns {string} + */ + function convertLineEndingsToNative(s) { + const nativeLineEnding = isWindows ? "\r\n" : "\n"; + + let { result, position } = collectCodepointsNotCRLF(s, 0); + + while (position < s.length) { + const codePoint = StringPrototypeCharAt(s, position); + if (codePoint === "\r") { + result += nativeLineEnding; + position++; + if ( + position < s.length && StringPrototypeCharAt(s, position) === "\n" + ) { + position++; + } + } else if (codePoint === "\n") { + position++; + result += nativeLineEnding; + } + const { result: token, position: newPosition } = collectCodepointsNotCRLF( + s, + position, + ); + position = newPosition; + result += token; + } + + return result; + } + + /** @param {(BlobReference | Blob)[]} parts */ + async function* toIterator(parts) { + for (const part of parts) { + yield* part.stream(); + } + } + + /** @typedef {BufferSource | Blob | string} BlobPart */ + + /** + * @param {BlobPart[]} parts + * @param {string} endings + * @returns {{ parts: (BlobReference|Blob)[], size: number }} + */ + function processBlobParts(parts, endings) { + /** @type {(BlobReference|Blob)[]} */ + const processedParts = []; + let size = 0; + for (const element of parts) { + if (element instanceof ArrayBuffer) { + const chunk = new Uint8Array(ArrayBufferPrototypeSlice(element, 0)); + ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk)); + size += element.byteLength; + } else if (ArrayBufferIsView(element)) { + const chunk = new Uint8Array( + element.buffer, + element.byteOffset, + element.byteLength, + ); + size += element.byteLength; + ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk)); + } else if (element instanceof Blob) { + ArrayPrototypePush(processedParts, element); + size += element.size; + } else if (typeof element === "string") { + const chunk = core.encode( + endings == "native" ? convertLineEndingsToNative(element) : element, + ); + size += chunk.byteLength; + ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk)); + } else { + throw new TypeError("Unreachable code (invalid element type)"); + } + } + return { parts: processedParts, size }; + } + + /** + * @param {string} str + * @returns {string} + */ + function normalizeType(str) { + let normalizedType = str; + if (!RegExpPrototypeTest(/^[\x20-\x7E]*$/, str)) { + normalizedType = ""; + } + return StringPrototypeToLowerCase(normalizedType); + } + + /** + * Get all Parts as a flat array containing all references + * @param {Blob} blob + * @param {string[]} bag + * @returns {string[]} + */ + function getParts(blob, bag = []) { + for (const part of blob[_parts]) { + if (part instanceof Blob) { + getParts(part, bag); + } else { + ArrayPrototypePush(bag, part._id); + } + } + return bag; + } + + const _size = Symbol("Size"); + const _parts = Symbol("Parts"); + + class Blob { + #type = ""; + [_size] = 0; + [_parts]; + + /** + * @param {BlobPart[]} blobParts + * @param {BlobPropertyBag} options + */ + constructor(blobParts = [], options = {}) { + const prefix = "Failed to construct 'Blob'"; + blobParts = webidl.converters["sequence"](blobParts, { + context: "Argument 1", + prefix, + }); + options = webidl.converters["BlobPropertyBag"](options, { + context: "Argument 2", + prefix, + }); + + this[webidl.brand] = webidl.brand; + + const { parts, size } = processBlobParts( + blobParts, + options.endings, + ); + + this[_parts] = parts; + this[_size] = size; + this.#type = normalizeType(options.type); + } + + /** @returns {number} */ + get size() { + webidl.assertBranded(this, Blob); + return this[_size]; + } + + /** @returns {string} */ + get type() { + webidl.assertBranded(this, Blob); + return this.#type; + } + + /** + * @param {number} [start] + * @param {number} [end] + * @param {string} [contentType] + * @returns {Blob} + */ + slice(start, end, contentType) { + webidl.assertBranded(this, Blob); + const prefix = "Failed to execute 'slice' on 'Blob'"; + if (start !== undefined) { + start = webidl.converters["long long"](start, { + clamp: true, + context: "Argument 1", + prefix, + }); + } + if (end !== undefined) { + end = webidl.converters["long long"](end, { + clamp: true, + context: "Argument 2", + prefix, + }); + } + if (contentType !== undefined) { + contentType = webidl.converters["DOMString"](contentType, { + context: "Argument 3", + prefix, + }); + } + + // deno-lint-ignore no-this-alias + const O = this; + /** @type {number} */ + let relativeStart; + if (start === undefined) { + relativeStart = 0; + } else { + if (start < 0) { + relativeStart = MathMax(O.size + start, 0); + } else { + relativeStart = MathMin(start, O.size); + } + } + /** @type {number} */ + let relativeEnd; + if (end === undefined) { + relativeEnd = O.size; + } else { + if (end < 0) { + relativeEnd = MathMax(O.size + end, 0); + } else { + relativeEnd = MathMin(end, O.size); + } + } + + const span = MathMax(relativeEnd - relativeStart, 0); + const blobParts = []; + let added = 0; + + for (const part of this[_parts]) { + // don't add the overflow to new blobParts + if (added >= span) { + // Could maybe be possible to remove variable `added` + // and only use relativeEnd? + break; + } + const size = part.size; + if (relativeStart && size <= relativeStart) { + // Skip the beginning and change the relative + // start & end position as we skip the unwanted parts + relativeStart -= size; + relativeEnd -= size; + } else { + const chunk = part.slice( + relativeStart, + MathMin(part.size, relativeEnd), + ); + added += chunk.size; + relativeEnd -= part.size; + ArrayPrototypePush(blobParts, chunk); + relativeStart = 0; // All next sequential parts should start at 0 + } + } + + /** @type {string} */ + let relativeContentType; + if (contentType === undefined) { + relativeContentType = ""; + } else { + relativeContentType = normalizeType(contentType); + } + + const blob = new Blob([], { type: relativeContentType }); + blob[_parts] = blobParts; + blob[_size] = span; + return blob; + } + + /** + * @returns {ReadableStream} + */ + stream() { + webidl.assertBranded(this, Blob); + const partIterator = toIterator(this[_parts]); + const stream = new ReadableStream({ + type: "bytes", + /** @param {ReadableByteStreamController} controller */ + async pull(controller) { + while (true) { + const { value, done } = await partIterator.next(); + if (done) return controller.close(); + if (value.byteLength > 0) { + return controller.enqueue(value); + } + } + }, + }); + return stream; + } + + /** + * @returns {Promise} + */ + async text() { + webidl.assertBranded(this, Blob); + const buffer = await this.arrayBuffer(); + return core.decode(new Uint8Array(buffer)); + } + + /** + * @returns {Promise} + */ + async arrayBuffer() { + webidl.assertBranded(this, Blob); + const stream = this.stream(); + const bytes = new Uint8Array(this.size); + let offset = 0; + for await (const chunk of stream) { + TypedArrayPrototypeSet(bytes, chunk, offset); + offset += chunk.byteLength; + } + return bytes.buffer; + } + + get [SymbolToStringTag]() { + return "Blob"; + } + + [SymbolFor("Deno.customInspect")](inspect) { + return inspect(consoleInternal.createFilteredInspectProxy({ + object: this, + evaluate: this instanceof Blob, + keys: [ + "size", + "type", + ], + })); + } + } + + webidl.configurePrototype(Blob); + + webidl.converters["Blob"] = webidl.createInterfaceConverter("Blob", Blob); + webidl.converters["BlobPart"] = (V, opts) => { + // Union for ((ArrayBuffer or ArrayBufferView) or Blob or USVString) + if (typeof V == "object") { + if (V instanceof Blob) { + return webidl.converters["Blob"](V, opts); + } + if (V instanceof ArrayBuffer || V instanceof SharedArrayBuffer) { + return webidl.converters["ArrayBuffer"](V, opts); + } + if (ArrayBufferIsView(V)) { + return webidl.converters["ArrayBufferView"](V, opts); + } + } + return webidl.converters["USVString"](V, opts); + }; + webidl.converters["sequence"] = webidl.createSequenceConverter( + webidl.converters["BlobPart"], + ); + webidl.converters["EndingType"] = webidl.createEnumConverter("EndingType", [ + "transparent", + "native", + ]); + const blobPropertyBagDictionary = [ + { + key: "type", + converter: webidl.converters["DOMString"], + defaultValue: "", + }, + { + key: "endings", + converter: webidl.converters["EndingType"], + defaultValue: "transparent", + }, + ]; + webidl.converters["BlobPropertyBag"] = webidl.createDictionaryConverter( + "BlobPropertyBag", + blobPropertyBagDictionary, + ); + + const _Name = Symbol("[[Name]]"); + const _LastModified = Symbol("[[LastModified]]"); + + class File extends Blob { + /** @type {string} */ + [_Name]; + /** @type {number} */ + [_LastModified]; + + /** + * @param {BlobPart[]} fileBits + * @param {string} fileName + * @param {FilePropertyBag} options + */ + constructor(fileBits, fileName, options = {}) { + const prefix = "Failed to construct 'File'"; + webidl.requiredArguments(arguments.length, 2, { prefix }); + + fileBits = webidl.converters["sequence"](fileBits, { + context: "Argument 1", + prefix, + }); + fileName = webidl.converters["USVString"](fileName, { + context: "Argument 2", + prefix, + }); + options = webidl.converters["FilePropertyBag"](options, { + context: "Argument 3", + prefix, + }); + + super(fileBits, options); + + /** @type {string} */ + this[_Name] = fileName; + if (options.lastModified === undefined) { + /** @type {number} */ + this[_LastModified] = DatePrototypeGetTime(new Date()); + } else { + /** @type {number} */ + this[_LastModified] = options.lastModified; + } + } + + /** @returns {string} */ + get name() { + webidl.assertBranded(this, File); + return this[_Name]; + } + + /** @returns {number} */ + get lastModified() { + webidl.assertBranded(this, File); + return this[_LastModified]; + } + + get [SymbolToStringTag]() { + return "File"; + } + } + + webidl.configurePrototype(File); + + webidl.converters["FilePropertyBag"] = webidl.createDictionaryConverter( + "FilePropertyBag", + blobPropertyBagDictionary, + [ + { + key: "lastModified", + converter: webidl.converters["long long"], + }, + ], + ); + + // A finalization registry to deallocate a blob part when its JS reference is + // garbage collected. + const registry = new FinalizationRegistry((uuid) => { + core.opSync("op_blob_remove_part", uuid); + }); + + // TODO(lucacasonato): get a better stream from Rust in BlobReference#stream + + /** + * An opaque reference to a blob part in Rust. This could be backed by a file, + * in memory storage, or something else. + */ + class BlobReference { + /** + * Don't use directly. Use `BlobReference.fromUint8Array`. + * @param {string} id + * @param {number} size + */ + constructor(id, size) { + this._id = id; + this.size = size; + registry.register(this, id); + } + + /** + * Create a new blob part from a Uint8Array. + * + * @param {Uint8Array} data + * @returns {BlobReference} + */ + static fromUint8Array(data) { + const id = core.opSync("op_blob_create_part", data); + return new BlobReference(id, data.byteLength); + } + + /** + * Create a new BlobReference by slicing this BlobReference. This is a copy + * free operation - the sliced reference will still reference the original + * underlying bytes. + * + * @param {number} start + * @param {number} end + * @returns {BlobReference} + */ + slice(start, end) { + const size = end - start; + const id = core.opSync("op_blob_slice_part", this._id, { + start, + len: size, + }); + return new BlobReference(id, size); + } + + /** + * Read the entire contents of the reference blob. + * @returns {AsyncGenerator} + */ + async *stream() { + yield core.opAsync("op_blob_read_part", this._id); + + // let position = 0; + // const end = this.size; + // while (position !== end) { + // const size = MathMin(end - position, 65536); + // const chunk = this.slice(position, position + size); + // position += chunk.size; + // yield core.opAsync("op_blob_read_part", chunk._id); + // } + } + } + + window.__bootstrap.file = { + getParts, + Blob, + File, + }; +})(this); diff --git a/ext/web/10_filereader.js b/ext/web/10_filereader.js new file mode 100644 index 000000000..13fe6af2d --- /dev/null +++ b/ext/web/10_filereader.js @@ -0,0 +1,461 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +/// + +"use strict"; + +((window) => { + const webidl = window.__bootstrap.webidl; + const { forgivingBase64Encode } = window.__bootstrap.infra; + const { decode, TextDecoder } = window.__bootstrap.encoding; + const { parseMimeType } = window.__bootstrap.mimesniff; + const { DOMException } = window.__bootstrap.domException; + const { + ArrayPrototypeJoin, + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypeReduce, + FunctionPrototypeCall, + Map, + MapPrototypeGet, + MapPrototypeSet, + ObjectDefineProperty, + queueMicrotask, + StringFromCodePoint, + Symbol, + SymbolToStringTag, + TypedArrayPrototypeSet, + TypeError, + Uint8Array, + } = window.__bootstrap.primordials; + + const state = Symbol("[[state]]"); + const result = Symbol("[[result]]"); + const error = Symbol("[[error]]"); + const aborted = Symbol("[[aborted]]"); + + class FileReader extends EventTarget { + get [SymbolToStringTag]() { + return "FileReader"; + } + + /** @type {"empty" | "loading" | "done"} */ + [state] = "empty"; + /** @type {null | string | ArrayBuffer} */ + [result] = null; + /** @type {null | DOMException} */ + [error] = null; + /** @type {null | {aborted: boolean}} */ + [aborted] = null; + + /** + * @param {Blob} blob + * @param {{kind: "ArrayBuffer" | "Text" | "DataUrl" | "BinaryString", encoding?: string}} readtype + */ + #readOperation(blob, readtype) { + // 1. If fr’s state is "loading", throw an InvalidStateError DOMException. + if (this[state] === "loading") { + throw new DOMException( + "Invalid FileReader state.", + "InvalidStateError", + ); + } + // 2. Set fr’s state to "loading". + this[state] = "loading"; + // 3. Set fr’s result to null. + this[result] = null; + // 4. Set fr’s error to null. + this[error] = null; + + // We set this[aborted] to a new object, and keep track of it in a + // separate variable, so if a new read operation starts while there are + // remaining tasks from a previous aborted operation, the new operation + // will run while the tasks from the previous one are still aborted. + const abortedState = this[aborted] = { aborted: false }; + + // 5. Let stream be the result of calling get stream on blob. + const stream /*: ReadableStream*/ = blob.stream(); + + // 6. Let reader be the result of getting a reader from stream. + const reader = stream.getReader(); + + // 7. Let bytes be an empty byte sequence. + /** @type {Uint8Array[]} */ + const chunks = []; + + // 8. Let chunkPromise be the result of reading a chunk from stream with reader. + let chunkPromise = reader.read(); + + // 9. Let isFirstChunk be true. + let isFirstChunk = true; + + // 10 in parallel while true + (async () => { + while (!abortedState.aborted) { + // 1. Wait for chunkPromise to be fulfilled or rejected. + try { + const chunk = await chunkPromise; + if (abortedState.aborted) return; + + // 2. If chunkPromise is fulfilled, and isFirstChunk is true, queue a task to fire a progress event called loadstart at fr. + if (isFirstChunk) { + // TODO(lucacasonato): this is wrong, should be HTML "queue a task" + queueMicrotask(() => { + if (abortedState.aborted) return; + // fire a progress event for loadstart + const ev = new ProgressEvent("loadstart", {}); + this.dispatchEvent(ev); + }); + } + // 3. Set isFirstChunk to false. + isFirstChunk = false; + + // 4. If chunkPromise is fulfilled with an object whose done property is false + // and whose value property is a Uint8Array object, run these steps: + if (!chunk.done && chunk.value instanceof Uint8Array) { + ArrayPrototypePush(chunks, chunk.value); + + // TODO(bartlomieju): (only) If roughly 50ms have passed since last progress + { + const size = ArrayPrototypeReduce( + chunks, + (p, i) => p + i.byteLength, + 0, + ); + const ev = new ProgressEvent("progress", { + loaded: size, + }); + // TODO(lucacasonato): this is wrong, should be HTML "queue a task" + queueMicrotask(() => { + if (abortedState.aborted) return; + this.dispatchEvent(ev); + }); + } + + chunkPromise = reader.read(); + } // 5 Otherwise, if chunkPromise is fulfilled with an object whose done property is true, queue a task to run the following steps and abort this algorithm: + else if (chunk.done === true) { + // TODO(lucacasonato): this is wrong, should be HTML "queue a task" + queueMicrotask(() => { + if (abortedState.aborted) return; + // 1. Set fr’s state to "done". + this[state] = "done"; + // 2. Let result be the result of package data given bytes, type, blob’s type, and encodingName. + const size = ArrayPrototypeReduce( + chunks, + (p, i) => p + i.byteLength, + 0, + ); + const bytes = new Uint8Array(size); + let offs = 0; + for (const chunk of chunks) { + TypedArrayPrototypeSet(bytes, chunk, offs); + offs += chunk.byteLength; + } + switch (readtype.kind) { + case "ArrayBuffer": { + this[result] = bytes.buffer; + break; + } + case "BinaryString": + this[result] = ArrayPrototypeJoin( + ArrayPrototypeMap( + [...new Uint8Array(bytes.buffer)], + (v) => StringFromCodePoint(v), + ), + "", + ); + break; + case "Text": { + let decoder = undefined; + if (readtype.encoding) { + try { + decoder = new TextDecoder(readtype.encoding); + } catch { + // don't care about the error + } + } + if (decoder === undefined) { + const mimeType = parseMimeType(blob.type); + if (mimeType) { + const charset = MapPrototypeGet( + mimeType.parameters, + "charset", + ); + if (charset) { + try { + decoder = new TextDecoder(charset); + } catch { + // don't care about the error + } + } + } + } + if (decoder === undefined) { + decoder = new TextDecoder(); + } + this[result] = decode(bytes, decoder.encoding); + break; + } + case "DataUrl": { + const mediaType = blob.type || "application/octet-stream"; + this[result] = `data:${mediaType};base64,${ + forgivingBase64Encode(bytes) + }`; + break; + } + } + // 4.2 Fire a progress event called load at the fr. + { + const ev = new ProgressEvent("load", { + lengthComputable: true, + loaded: size, + total: size, + }); + this.dispatchEvent(ev); + } + + // 5. If fr’s state is not "loading", fire a progress event called loadend at the fr. + //Note: Event handler for the load or error events could have started another load, if that happens the loadend event for this load is not fired. + if (this[state] !== "loading") { + const ev = new ProgressEvent("loadend", { + lengthComputable: true, + loaded: size, + total: size, + }); + this.dispatchEvent(ev); + } + }); + break; + } + } catch (err) { + // TODO(lucacasonato): this is wrong, should be HTML "queue a task" + queueMicrotask(() => { + if (abortedState.aborted) return; + + // chunkPromise rejected + this[state] = "done"; + this[error] = err; + + { + const ev = new ProgressEvent("error", {}); + this.dispatchEvent(ev); + } + + //If fr’s state is not "loading", fire a progress event called loadend at fr. + //Note: Event handler for the error event could have started another load, if that happens the loadend event for this load is not fired. + if (this[state] !== "loading") { + const ev = new ProgressEvent("loadend", {}); + this.dispatchEvent(ev); + } + }); + break; + } + } + })(); + } + + constructor() { + super(); + this[webidl.brand] = webidl.brand; + } + + /** @returns {number} */ + get readyState() { + webidl.assertBranded(this, FileReader); + switch (this[state]) { + case "empty": + return FileReader.EMPTY; + case "loading": + return FileReader.LOADING; + case "done": + return FileReader.DONE; + default: + throw new TypeError("Invalid state"); + } + } + + get result() { + webidl.assertBranded(this, FileReader); + return this[result]; + } + + get error() { + webidl.assertBranded(this, FileReader); + return this[error]; + } + + abort() { + webidl.assertBranded(this, FileReader); + // If context object's state is "empty" or if context object's state is "done" set context object's result to null and terminate this algorithm. + if ( + this[state] === "empty" || + this[state] === "done" + ) { + this[result] = null; + return; + } + // If context object's state is "loading" set context object's state to "done" and set context object's result to null. + if (this[state] === "loading") { + this[state] = "done"; + this[result] = null; + } + // If there are any tasks from the context object on the file reading task source in an affiliated task queue, then remove those tasks from that task queue. + // Terminate the algorithm for the read method being processed. + if (this[aborted] !== null) { + this[aborted].aborted = true; + } + + // Fire a progress event called abort at the context object. + const ev = new ProgressEvent("abort", {}); + this.dispatchEvent(ev); + + // If context object's state is not "loading", fire a progress event called loadend at the context object. + if (this[state] !== "loading") { + const ev = new ProgressEvent("loadend", {}); + this.dispatchEvent(ev); + } + } + + /** @param {Blob} blob */ + readAsArrayBuffer(blob) { + webidl.assertBranded(this, FileReader); + const prefix = "Failed to execute 'readAsArrayBuffer' on 'FileReader'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + this.#readOperation(blob, { kind: "ArrayBuffer" }); + } + + /** @param {Blob} blob */ + readAsBinaryString(blob) { + webidl.assertBranded(this, FileReader); + const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + // alias for readAsArrayBuffer + this.#readOperation(blob, { kind: "BinaryString" }); + } + + /** @param {Blob} blob */ + readAsDataURL(blob) { + webidl.assertBranded(this, FileReader); + const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + // alias for readAsArrayBuffer + this.#readOperation(blob, { kind: "DataUrl" }); + } + + /** + * @param {Blob} blob + * @param {string} [encoding] + */ + readAsText(blob, encoding) { + webidl.assertBranded(this, FileReader); + const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + if (encoding !== undefined) { + encoding = webidl.converters["DOMString"](encoding, { + prefix, + context: "Argument 2", + }); + } + // alias for readAsArrayBuffer + this.#readOperation(blob, { kind: "Text", encoding }); + } + } + + webidl.configurePrototype(FileReader); + + ObjectDefineProperty(FileReader, "EMPTY", { + writable: false, + enumerable: true, + configurable: false, + value: 0, + }); + ObjectDefineProperty(FileReader, "LOADING", { + writable: false, + enumerable: true, + configurable: false, + value: 1, + }); + ObjectDefineProperty(FileReader, "DONE", { + writable: false, + enumerable: true, + configurable: false, + value: 2, + }); + ObjectDefineProperty(FileReader.prototype, "EMPTY", { + writable: false, + enumerable: true, + configurable: false, + value: 0, + }); + ObjectDefineProperty(FileReader.prototype, "LOADING", { + writable: false, + enumerable: true, + configurable: false, + value: 1, + }); + ObjectDefineProperty(FileReader.prototype, "DONE", { + writable: false, + enumerable: true, + configurable: false, + value: 2, + }); + + const handlerSymbol = Symbol("eventHandlers"); + + function makeWrappedHandler(handler) { + function wrappedHandler(...args) { + if (typeof wrappedHandler.handler !== "function") { + return; + } + return FunctionPrototypeCall(wrappedHandler.handler, this, ...args); + } + wrappedHandler.handler = handler; + return wrappedHandler; + } + // TODO(benjamingr) reuse when we can reuse code between web crates + function defineEventHandler(emitter, name) { + // HTML specification section 8.1.5.1 + ObjectDefineProperty(emitter, `on${name}`, { + get() { + const maybeMap = this[handlerSymbol]; + if (!maybeMap) return null; + + return MapPrototypeGet(maybeMap, name)?.handler ?? null; + }, + set(value) { + if (!this[handlerSymbol]) { + this[handlerSymbol] = new Map(); + } + let handlerWrapper = MapPrototypeGet(this[handlerSymbol], name); + if (handlerWrapper) { + handlerWrapper.handler = value; + } else { + handlerWrapper = makeWrappedHandler(value); + this.addEventListener(name, handlerWrapper); + } + MapPrototypeSet(this[handlerSymbol], name, handlerWrapper); + }, + configurable: true, + enumerable: true, + }); + } + defineEventHandler(FileReader.prototype, "error"); + defineEventHandler(FileReader.prototype, "loadstart"); + defineEventHandler(FileReader.prototype, "load"); + defineEventHandler(FileReader.prototype, "loadend"); + defineEventHandler(FileReader.prototype, "progress"); + defineEventHandler(FileReader.prototype, "abort"); + + window.__bootstrap.fileReader = { + FileReader, + }; +})(this); diff --git a/ext/web/11_blob_url.js b/ext/web/11_blob_url.js new file mode 100644 index 000000000..fa0ea041c --- /dev/null +++ b/ext/web/11_blob_url.js @@ -0,0 +1,59 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// +/// +/// +/// +/// +/// +"use strict"; + +((window) => { + const core = Deno.core; + const webidl = window.__bootstrap.webidl; + const { getParts } = window.__bootstrap.file; + const { URL } = window.__bootstrap.url; + + /** + * @param {Blob} blob + * @returns {string} + */ + function createObjectURL(blob) { + const prefix = "Failed to execute 'createObjectURL' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + blob = webidl.converters["Blob"](blob, { + context: "Argument 1", + prefix, + }); + + const url = core.opSync( + "op_blob_create_object_url", + blob.type, + getParts(blob), + ); + + return url; + } + + /** + * @param {string} url + * @returns {void} + */ + function revokeObjectURL(url) { + const prefix = "Failed to execute 'revokeObjectURL' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + url = webidl.converters["DOMString"](url, { + context: "Argument 1", + prefix, + }); + + core.opSync("op_blob_revoke_object_url", url); + } + + URL.createObjectURL = createObjectURL; + URL.revokeObjectURL = revokeObjectURL; +})(globalThis); diff --git a/ext/web/12_location.js b/ext/web/12_location.js new file mode 100644 index 000000000..40dd545fe --- /dev/null +++ b/ext/web/12_location.js @@ -0,0 +1,409 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +"use strict"; + +/// + +((window) => { + const { URL } = window.__bootstrap.url; + const { DOMException } = window.__bootstrap.domException; + const { + Error, + ObjectDefineProperties, + ReferenceError, + Symbol, + SymbolFor, + SymbolToStringTag, + TypeError, + WeakMap, + WeakMapPrototypeGet, + WeakMapPrototypeSet, + } = window.__bootstrap.primordials; + + const locationConstructorKey = Symbol("locationConstuctorKey"); + + // The differences between the definitions of `Location` and `WorkerLocation` + // are because of the `LegacyUnforgeable` attribute only specified upon + // `Location`'s properties. See: + // - https://html.spec.whatwg.org/multipage/history.html#the-location-interface + // - https://heycam.github.io/webidl/#LegacyUnforgeable + class Location { + constructor(href = null, key = null) { + if (key != locationConstructorKey) { + throw new TypeError("Illegal constructor."); + } + const url = new URL(href); + url.username = ""; + url.password = ""; + ObjectDefineProperties(this, { + hash: { + get() { + return url.hash; + }, + set() { + throw new DOMException( + `Cannot set "location.hash".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + host: { + get() { + return url.host; + }, + set() { + throw new DOMException( + `Cannot set "location.host".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + hostname: { + get() { + return url.hostname; + }, + set() { + throw new DOMException( + `Cannot set "location.hostname".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + href: { + get() { + return url.href; + }, + set() { + throw new DOMException( + `Cannot set "location.href".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + origin: { + get() { + return url.origin; + }, + enumerable: true, + }, + pathname: { + get() { + return url.pathname; + }, + set() { + throw new DOMException( + `Cannot set "location.pathname".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + port: { + get() { + return url.port; + }, + set() { + throw new DOMException( + `Cannot set "location.port".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + protocol: { + get() { + return url.protocol; + }, + set() { + throw new DOMException( + `Cannot set "location.protocol".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + search: { + get() { + return url.search; + }, + set() { + throw new DOMException( + `Cannot set "location.search".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + ancestorOrigins: { + get() { + // TODO(nayeemrmn): Replace with a `DOMStringList` instance. + return { + length: 0, + item: () => null, + contains: () => false, + }; + }, + enumerable: true, + }, + assign: { + value: function assign() { + throw new DOMException( + `Cannot call "location.assign()".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + reload: { + value: function reload() { + throw new DOMException( + `Cannot call "location.reload()".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + replace: { + value: function replace() { + throw new DOMException( + `Cannot call "location.replace()".`, + "NotSupportedError", + ); + }, + enumerable: true, + }, + toString: { + value: function toString() { + return url.href; + }, + enumerable: true, + }, + [SymbolFor("Deno.privateCustomInspect")]: { + value: function (inspect) { + const object = { + hash: this.hash, + host: this.host, + hostname: this.hostname, + href: this.href, + origin: this.origin, + pathname: this.pathname, + port: this.port, + protocol: this.protocol, + search: this.search, + }; + return `${this.constructor.name} ${inspect(object)}`; + }, + }, + }); + } + } + + ObjectDefineProperties(Location.prototype, { + [SymbolToStringTag]: { + value: "Location", + configurable: true, + }, + }); + + const workerLocationUrls = new WeakMap(); + + class WorkerLocation { + constructor(href = null, key = null) { + if (key != locationConstructorKey) { + throw new TypeError("Illegal constructor."); + } + const url = new URL(href); + url.username = ""; + url.password = ""; + WeakMapPrototypeSet(workerLocationUrls, this, url); + } + } + + ObjectDefineProperties(WorkerLocation.prototype, { + hash: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.hash; + }, + configurable: true, + enumerable: true, + }, + host: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.host; + }, + configurable: true, + enumerable: true, + }, + hostname: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.hostname; + }, + configurable: true, + enumerable: true, + }, + href: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.href; + }, + configurable: true, + enumerable: true, + }, + origin: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.origin; + }, + configurable: true, + enumerable: true, + }, + pathname: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.pathname; + }, + configurable: true, + enumerable: true, + }, + port: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.port; + }, + configurable: true, + enumerable: true, + }, + protocol: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.protocol; + }, + configurable: true, + enumerable: true, + }, + search: { + get() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.search; + }, + configurable: true, + enumerable: true, + }, + toString: { + value: function toString() { + const url = WeakMapPrototypeGet(workerLocationUrls, this); + if (url == null) { + throw new TypeError("Illegal invocation."); + } + return url.href; + }, + configurable: true, + enumerable: true, + writable: true, + }, + [SymbolToStringTag]: { + value: "WorkerLocation", + configurable: true, + }, + [SymbolFor("Deno.privateCustomInspect")]: { + value: function (inspect) { + const object = { + hash: this.hash, + host: this.host, + hostname: this.hostname, + href: this.href, + origin: this.origin, + pathname: this.pathname, + port: this.port, + protocol: this.protocol, + search: this.search, + }; + return `${this.constructor.name} ${inspect(object)}`; + }, + }, + }); + + let location = null; + let workerLocation = null; + + function setLocationHref(href) { + location = new Location(href, locationConstructorKey); + workerLocation = new WorkerLocation(href, locationConstructorKey); + } + + window.__bootstrap.location = { + locationConstructorDescriptor: { + value: Location, + configurable: true, + writable: true, + }, + workerLocationConstructorDescriptor: { + value: WorkerLocation, + configurable: true, + writable: true, + }, + locationDescriptor: { + get() { + if (location == null) { + throw new ReferenceError( + `Access to "location", run again with --location .`, + ); + } + return location; + }, + set() { + throw new DOMException(`Cannot set "location".`, "NotSupportedError"); + }, + enumerable: true, + }, + workerLocationDescriptor: { + get() { + if (workerLocation == null) { + throw new Error( + `Assertion: "globalThis.location" must be defined in a worker.`, + ); + } + return workerLocation; + }, + configurable: true, + enumerable: true, + }, + setLocationHref, + getLocationHref() { + return location?.href; + }, + }; +})(this); diff --git a/ext/web/13_message_port.js b/ext/web/13_message_port.js new file mode 100644 index 000000000..d5014fdb9 --- /dev/null +++ b/ext/web/13_message_port.js @@ -0,0 +1,286 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// @ts-check +/// +/// +/// +/// + +"use strict"; + +((window) => { + const core = window.Deno.core; + const webidl = window.__bootstrap.webidl; + const { setEventTargetData } = window.__bootstrap.eventTarget; + const { defineEventHandler } = window.__bootstrap.event; + const { DOMException } = window.__bootstrap.domException; + const { + ObjectSetPrototypeOf, + Symbol, + SymbolFor, + SymbolToStringTag, + TypeError, + } = window.__bootstrap.primordials; + + class MessageChannel { + /** @type {MessagePort} */ + #port1; + /** @type {MessagePort} */ + #port2; + + constructor() { + this[webidl.brand] = webidl.brand; + const [port1Id, port2Id] = opCreateEntangledMessagePort(); + const port1 = createMessagePort(port1Id); + const port2 = createMessagePort(port2Id); + this.#port1 = port1; + this.#port2 = port2; + } + + get port1() { + webidl.assertBranded(this, MessageChannel); + return this.#port1; + } + + get port2() { + webidl.assertBranded(this, MessageChannel); + return this.#port2; + } + + [SymbolFor("Deno.inspect")](inspect) { + return `MessageChannel ${ + inspect({ port1: this.port1, port2: this.port2 }) + }`; + } + + get [SymbolToStringTag]() { + return "MessageChannel"; + } + } + + webidl.configurePrototype(MessageChannel); + + const _id = Symbol("id"); + const _enabled = Symbol("enabled"); + + /** + * @param {number} id + * @returns {MessagePort} + */ + function createMessagePort(id) { + const port = core.createHostObject(); + ObjectSetPrototypeOf(port, MessagePort.prototype); + port[webidl.brand] = webidl.brand; + setEventTargetData(port); + port[_id] = id; + return port; + } + + class MessagePort extends EventTarget { + /** @type {number | null} */ + [_id] = null; + /** @type {boolean} */ + [_enabled] = false; + + constructor() { + super(); + webidl.illegalConstructor(); + } + + /** + * @param {any} message + * @param {object[] | StructuredSerializeOptions} transferOrOptions + */ + postMessage(message, transferOrOptions = {}) { + webidl.assertBranded(this, MessagePort); + const prefix = "Failed to execute 'postMessage' on 'MessagePort'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + message = webidl.converters.any(message); + let options; + if ( + webidl.type(transferOrOptions) === "Object" && + transferOrOptions !== undefined && + transferOrOptions[Symbol.iterator] !== undefined + ) { + const transfer = webidl.converters["sequence"]( + transferOrOptions, + { prefix, context: "Argument 2" }, + ); + options = { transfer }; + } else { + options = webidl.converters.StructuredSerializeOptions( + transferOrOptions, + { + prefix, + context: "Argument 2", + }, + ); + } + const { transfer } = options; + if (transfer.includes(this)) { + throw new DOMException("Can not tranfer self", "DataCloneError"); + } + const data = serializeJsMessageData(message, transfer); + if (this[_id] === null) return; + core.opSync("op_message_port_post_message", this[_id], data); + } + + start() { + webidl.assertBranded(this, MessagePort); + if (this[_enabled]) return; + (async () => { + this[_enabled] = true; + while (true) { + if (this[_id] === null) break; + const data = await core.opAsync( + "op_message_port_recv_message", + this[_id], + ); + if (data === null) break; + let message, transfer; + try { + const v = deserializeJsMessageData(data); + message = v[0]; + transfer = v[1]; + } catch (err) { + const event = new MessageEvent("messageerror", { data: err }); + this.dispatchEvent(event); + return; + } + const event = new MessageEvent("message", { + data: message, + ports: transfer, + }); + this.dispatchEvent(event); + } + this[_enabled] = false; + })(); + } + + close() { + webidl.assertBranded(this, MessagePort); + if (this[_id] !== null) { + core.close(this[_id]); + this[_id] = null; + } + } + + get [SymbolToStringTag]() { + return "MessagePort"; + } + } + + defineEventHandler(MessagePort.prototype, "message", function (self) { + self.start(); + }); + defineEventHandler(MessagePort.prototype, "messageerror"); + + webidl.configurePrototype(MessagePort); + + /** + * @returns {[number, number]} + */ + function opCreateEntangledMessagePort() { + return core.opSync("op_message_port_create_entangled"); + } + + /** + * @param {globalThis.__bootstrap.messagePort.MessageData} messageData + * @returns {[any, object[]]} + */ + function deserializeJsMessageData(messageData) { + /** @type {object[]} */ + const transferables = []; + + for (const transferable of messageData.transferables) { + switch (transferable.kind) { + case "messagePort": { + const port = createMessagePort(transferable.data); + transferables.push(port); + break; + } + default: + throw new TypeError("Unreachable"); + } + } + + const data = core.deserialize(messageData.data, { + hostObjects: transferables, + }); + + return [data, transferables]; + } + + /** + * @param {any} data + * @param {object[]} tranferables + * @returns {globalThis.__bootstrap.messagePort.MessageData} + */ + function serializeJsMessageData(data, tranferables) { + let serializedData; + try { + serializedData = core.serialize(data, { hostObjects: tranferables }); + } catch (err) { + throw new DOMException(err.message, "DataCloneError"); + } + + /** @type {globalThis.__bootstrap.messagePort.Transferable[]} */ + const serializedTransferables = []; + + for (const transferable of tranferables) { + if (transferable instanceof MessagePort) { + webidl.assertBranded(transferable, MessagePort); + const id = transferable[_id]; + if (id === null) { + throw new DOMException( + "Can not transfer disentangled message port", + "DataCloneError", + ); + } + transferable[_id] = null; + serializedTransferables.push({ kind: "messagePort", data: id }); + } else { + throw new DOMException("Value not transferable", "DataCloneError"); + } + } + + return { + data: serializedData, + transferables: serializedTransferables, + }; + } + + webidl.converters.StructuredSerializeOptions = webidl + .createDictionaryConverter( + "StructuredSerializeOptions", + [ + { + key: "transfer", + converter: webidl.converters["sequence"], + get defaultValue() { + return []; + }, + }, + ], + ); + + function structuredClone(value, options) { + const prefix = "Failed to execute 'structuredClone'"; + webidl.requiredArguments(arguments.length, 1, { prefix }); + options = webidl.converters.StructuredSerializeOptions(options, { + prefix, + context: "Argument 2", + }); + const messageData = serializeJsMessageData(value, options.transfer); + const [data] = deserializeJsMessageData(messageData); + return data; + } + + window.__bootstrap.messagePort = { + MessageChannel, + MessagePort, + deserializeJsMessageData, + serializeJsMessageData, + structuredClone, + }; +})(globalThis); diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml new file mode 100644 index 000000000..6160ef89f --- /dev/null +++ b/ext/web/Cargo.toml @@ -0,0 +1,26 @@ +# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_web" +version = "0.45.0" +authors = ["the Deno authors"] +edition = "2018" +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" +description = "Collection of Web APIs" + +[lib] +path = "lib.rs" + +[dependencies] +async-trait = "0.1.50" +base64 = "0.13.0" +deno_core = { version = "0.96.0", path = "../../core" } +encoding_rs = "0.8.28" +serde = "1.0" +tokio = { version = "1.8.1", features = ["full"] } +uuid = { version = "0.8.2", features = ["v4", "serde"] } + +[dev-dependencies] +futures = "0.3.15" diff --git a/ext/web/README.md b/ext/web/README.md new file mode 100644 index 000000000..d847ae52e --- /dev/null +++ b/ext/web/README.md @@ -0,0 +1,6 @@ +# deno web + +Op crate that implements Event, TextEncoder, TextDecoder and File API +(https://w3c.github.io/FileAPI). + +Testing for text encoding is done via WPT in cli/. diff --git a/ext/web/blob.rs b/ext/web/blob.rs new file mode 100644 index 000000000..0f27553c7 --- /dev/null +++ b/ext/web/blob.rs @@ -0,0 +1,265 @@ +use async_trait::async_trait; +use deno_core::error::type_error; +use deno_core::parking_lot::Mutex; +use deno_core::url::Url; +use deno_core::ZeroCopyBuf; +use serde::Deserialize; +use std::cell::RefCell; +use std::collections::HashMap; +use std::fmt::Debug; +use std::rc::Rc; +use std::sync::Arc; + +use deno_core::error::AnyError; +use uuid::Uuid; + +use crate::Location; + +pub type PartMap = HashMap>>; + +#[derive(Clone, Default, Debug)] +pub struct BlobStore { + parts: Arc>, + object_urls: Arc>>>, +} + +impl BlobStore { + pub fn insert_part(&self, part: Box) -> Uuid { + let id = Uuid::new_v4(); + let mut parts = self.parts.lock(); + parts.insert(id, Arc::new(part)); + id + } + + pub fn get_part( + &self, + id: &Uuid, + ) -> Option>> { + let parts = self.parts.lock(); + let part = parts.get(id); + part.cloned() + } + + pub fn remove_part( + &self, + id: &Uuid, + ) -> Option>> { + let mut parts = self.parts.lock(); + parts.remove(id) + } + + pub fn get_object_url( + &self, + mut url: Url, + ) -> Result>, AnyError> { + let blob_store = self.object_urls.lock(); + url.set_fragment(None); + Ok(blob_store.get(&url).cloned()) + } + + pub fn insert_object_url( + &self, + blob: Blob, + maybe_location: Option, + ) -> Url { + let origin = if let Some(location) = maybe_location { + location.origin().ascii_serialization() + } else { + "null".to_string() + }; + let id = Uuid::new_v4(); + let url = Url::parse(&format!("blob:{}/{}", origin, id)).unwrap(); + + let mut blob_store = self.object_urls.lock(); + blob_store.insert(url.clone(), Arc::new(blob)); + + url + } + + pub fn remove_object_url(&self, url: &Url) { + let mut blob_store = self.object_urls.lock(); + blob_store.remove(url); + } +} + +#[derive(Debug)] +pub struct Blob { + pub media_type: String, + + pub parts: Vec>>, +} + +impl Blob { + // TODO(lucacsonato): this should be a stream! + pub async fn read_all(&self) -> Result, AnyError> { + let size = self.size(); + let mut bytes = Vec::with_capacity(size); + + for part in &self.parts { + let chunk = part.read().await?; + bytes.extend_from_slice(chunk); + } + + assert_eq!(bytes.len(), size); + + Ok(bytes) + } + + fn size(&self) -> usize { + let mut total = 0; + for part in &self.parts { + total += part.size() + } + total + } +} + +#[async_trait] +pub trait BlobPart: Debug { + // TODO(lucacsonato): this should be a stream! + async fn read(&self) -> Result<&[u8], AnyError>; + fn size(&self) -> usize; +} + +#[derive(Debug)] +pub struct InMemoryBlobPart(Vec); + +impl From> for InMemoryBlobPart { + fn from(vec: Vec) -> Self { + Self(vec) + } +} + +#[async_trait] +impl BlobPart for InMemoryBlobPart { + async fn read(&self) -> Result<&[u8], AnyError> { + Ok(&self.0) + } + + fn size(&self) -> usize { + self.0.len() + } +} + +#[derive(Debug)] +pub struct SlicedBlobPart { + part: Arc>, + start: usize, + len: usize, +} + +#[async_trait] +impl BlobPart for SlicedBlobPart { + async fn read(&self) -> Result<&[u8], AnyError> { + let original = self.part.read().await?; + Ok(&original[self.start..self.start + self.len]) + } + + fn size(&self) -> usize { + self.len + } +} + +pub fn op_blob_create_part( + state: &mut deno_core::OpState, + data: ZeroCopyBuf, + _: (), +) -> Result { + let blob_store = state.borrow::(); + let part = InMemoryBlobPart(data.to_vec()); + let id = blob_store.insert_part(Box::new(part)); + Ok(id) +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SliceOptions { + start: usize, + len: usize, +} + +pub fn op_blob_slice_part( + state: &mut deno_core::OpState, + id: Uuid, + options: SliceOptions, +) -> Result { + let blob_store = state.borrow::(); + let part = blob_store + .get_part(&id) + .ok_or_else(|| type_error("Blob part not found"))?; + + let SliceOptions { start, len } = options; + + let size = part.size(); + if start + len > size { + return Err(type_error( + "start + len can not be larger than blob part size", + )); + } + + let sliced_part = SlicedBlobPart { part, start, len }; + let id = blob_store.insert_part(Box::new(sliced_part)); + + Ok(id) +} + +pub async fn op_blob_read_part( + state: Rc>, + id: Uuid, + _: (), +) -> Result { + let part = { + let state = state.borrow(); + let blob_store = state.borrow::(); + blob_store.get_part(&id) + } + .ok_or_else(|| type_error("Blob part not found"))?; + let buf = part.read().await?; + Ok(ZeroCopyBuf::from(buf.to_vec())) +} + +pub fn op_blob_remove_part( + state: &mut deno_core::OpState, + id: Uuid, + _: (), +) -> Result<(), AnyError> { + let blob_store = state.borrow::(); + blob_store.remove_part(&id); + Ok(()) +} + +pub fn op_blob_create_object_url( + state: &mut deno_core::OpState, + media_type: String, + part_ids: Vec, +) -> Result { + let mut parts = Vec::with_capacity(part_ids.len()); + let blob_store = state.borrow::(); + for part_id in part_ids { + let part = blob_store + .get_part(&part_id) + .ok_or_else(|| type_error("Blob part not found"))?; + parts.push(part); + } + + let blob = Blob { media_type, parts }; + + let maybe_location = state.try_borrow::(); + let blob_store = state.borrow::(); + + let url = blob_store + .insert_object_url(blob, maybe_location.map(|location| location.0.clone())); + + Ok(url.to_string()) +} + +pub fn op_blob_revoke_object_url( + state: &mut deno_core::OpState, + url: String, + _: (), +) -> Result<(), AnyError> { + let url = Url::parse(&url)?; + let blob_store = state.borrow::(); + blob_store.remove_object_url(&url); + Ok(()) +} diff --git a/ext/web/internal.d.ts b/ext/web/internal.d.ts new file mode 100644 index 000000000..3a2a0c1be --- /dev/null +++ b/ext/web/internal.d.ts @@ -0,0 +1,98 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +/// +/// + +declare namespace globalThis { + declare namespace __bootstrap { + declare var infra: { + collectSequenceOfCodepoints( + input: string, + position: number, + condition: (char: string) => boolean, + ): { + result: string; + position: number; + }; + ASCII_DIGIT: string[]; + ASCII_UPPER_ALPHA: string[]; + ASCII_LOWER_ALPHA: string[]; + ASCII_ALPHA: string[]; + ASCII_ALPHANUMERIC: string[]; + HTTP_TAB_OR_SPACE: string[]; + HTTP_WHITESPACE: string[]; + HTTP_TOKEN_CODE_POINT: string[]; + HTTP_TOKEN_CODE_POINT_RE: RegExp; + HTTP_QUOTED_STRING_TOKEN_POINT: string[]; + HTTP_QUOTED_STRING_TOKEN_POINT_RE: RegExp; + HTTP_TAB_OR_SPACE_PREFIX_RE: RegExp; + HTTP_TAB_OR_SPACE_SUFFIX_RE: RegExp; + HTTP_WHITESPACE_PREFIX_RE: RegExp; + HTTP_WHITESPACE_SUFFIX_RE: RegExp; + regexMatcher(chars: string[]): string; + byteUpperCase(s: string): string; + byteLowerCase(s: string): string; + collectHttpQuotedString( + input: string, + position: number, + extractValue: boolean, + ): { + result: string; + position: number; + }; + forgivingBase64Encode(data: Uint8Array): string; + forgivingBase64Decode(data: string): Uint8Array; + }; + + declare var domException: { + DOMException: typeof DOMException; + }; + + declare namespace mimesniff { + declare interface MimeType { + type: string; + subtype: string; + parameters: Map; + } + declare function parseMimeType(input: string): MimeType | null; + declare function essence(mimeType: MimeType): string; + declare function serializeMimeType(mimeType: MimeType): string; + } + + declare var eventTarget: { + EventTarget: typeof EventTarget; + }; + + declare var location: { + getLocationHref(): string | undefined; + }; + + declare var base64: { + atob(data: string): string; + btoa(data: string): string; + }; + + declare var file: { + getParts(blob: Blob): string[]; + Blob: typeof Blob; + File: typeof File; + }; + + declare var streams: { + ReadableStream: typeof ReadableStream; + isReadableStreamDisturbed(stream: ReadableStream): boolean; + createProxy(stream: ReadableStream): ReadableStream; + }; + + declare namespace messagePort { + declare type Transferable = { + kind: "messagePort"; + data: number; + }; + declare interface MessageData { + data: Uint8Array; + transferables: Transferable[]; + } + } + } +} diff --git a/ext/web/lib.deno_web.d.ts b/ext/web/lib.deno_web.d.ts new file mode 100644 index 000000000..3f110353f --- /dev/null +++ b/ext/web/lib.deno_web.d.ts @@ -0,0 +1,752 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file no-explicit-any + +/// +/// + +declare class DOMException extends Error { + constructor(message?: string, name?: string); + readonly name: string; + readonly message: string; + readonly code: number; +} + +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} + +/** An event which takes place in the DOM. */ +declare class Event { + constructor(type: string, eventInitDict?: EventInit); + /** Returns true or false depending on how event was initialized. True if + * event goes through its target's ancestors in reverse tree order, and + * false otherwise. */ + readonly bubbles: boolean; + cancelBubble: boolean; + /** Returns true or false depending on how event was initialized. Its return + * value does not always carry meaning, but true can indicate that part of the + * operation during which event was dispatched, can be canceled by invoking + * the preventDefault() method. */ + readonly cancelable: boolean; + /** Returns true or false depending on how event was initialized. True if + * event invokes listeners past a ShadowRoot node that is the root of its + * target, and false otherwise. */ + readonly composed: boolean; + /** Returns the object whose event listener's callback is currently being + * invoked. */ + readonly currentTarget: EventTarget | null; + /** Returns true if preventDefault() was invoked successfully to indicate + * cancellation, and false otherwise. */ + readonly defaultPrevented: boolean; + /** Returns the event's phase, which is one of NONE, CAPTURING_PHASE, + * AT_TARGET, and BUBBLING_PHASE. */ + readonly eventPhase: number; + /** Returns true if event was dispatched by the user agent, and false + * otherwise. */ + readonly isTrusted: boolean; + /** Returns the object to which event is dispatched (its target). */ + readonly target: EventTarget | null; + /** Returns the event's timestamp as the number of milliseconds measured + * relative to the time origin. */ + readonly timeStamp: number; + /** Returns the type of event, e.g. "click", "hashchange", or "submit". */ + readonly type: string; + /** Returns the invocation target objects of event's path (objects on which + * listeners will be invoked), except for any nodes in shadow trees of which + * the shadow root's mode is "closed" that are not reachable from event's + * currentTarget. */ + composedPath(): EventTarget[]; + /** If invoked when the cancelable attribute value is true, and while + * executing a listener for the event with passive set to false, signals to + * the operation that caused event to be dispatched that it needs to be + * canceled. */ + preventDefault(): void; + /** Invoking this method prevents event from reaching any registered event + * listeners after the current one finishes running and, when dispatched in a + * tree, also prevents event from reaching any other objects. */ + stopImmediatePropagation(): void; + /** When dispatched in a tree, invoking this method prevents event from + * reaching any objects other than the current object. */ + stopPropagation(): void; + readonly AT_TARGET: number; + readonly BUBBLING_PHASE: number; + readonly CAPTURING_PHASE: number; + readonly NONE: number; + static readonly AT_TARGET: number; + static readonly BUBBLING_PHASE: number; + static readonly CAPTURING_PHASE: number; + static readonly NONE: number; +} + +/** + * EventTarget is a DOM interface implemented by objects that can receive events + * and may have listeners for them. + */ +declare class EventTarget { + /** Appends an event listener for events whose type attribute value is type. + * The callback argument sets the callback that will be invoked when the event + * is dispatched. + * + * The options argument sets listener-specific options. For compatibility this + * can be a boolean, in which case the method behaves exactly as if the value + * was specified as options's capture. + * + * When set to true, options's capture prevents callback from being invoked + * when the event's eventPhase attribute value is BUBBLING_PHASE. When false + * (or not present), callback will not be invoked when event's eventPhase + * attribute value is CAPTURING_PHASE. Either way, callback will be invoked if + * event's eventPhase attribute value is AT_TARGET. + * + * When set to true, options's passive indicates that the callback will not + * cancel the event by invoking preventDefault(). This is used to enable + * performance optimizations described in § 2.8 Observing event listeners. + * + * When set to true, options's once indicates that the callback will only be + * invoked once after which the event listener will be removed. + * + * The event listener is appended to target's event listener list and is not + * appended if it has the same type, callback, and capture. */ + addEventListener( + type: string, + listener: EventListenerOrEventListenerObject | null, + options?: boolean | AddEventListenerOptions, + ): void; + /** Dispatches a synthetic event event to target and returns true if either + * event's cancelable attribute value is false or its preventDefault() method + * was not invoked, and false otherwise. */ + dispatchEvent(event: Event): boolean; + /** Removes the event listener in target's event listener list with the same + * type, callback, and options. */ + removeEventListener( + type: string, + callback: EventListenerOrEventListenerObject | null, + options?: EventListenerOptions | boolean, + ): void; +} + +interface EventListener { + (evt: Event): void | Promise; +} + +interface EventListenerObject { + handleEvent(evt: Event): void | Promise; +} + +declare type EventListenerOrEventListenerObject = + | EventListener + | EventListenerObject; + +interface AddEventListenerOptions extends EventListenerOptions { + once?: boolean; + passive?: boolean; +} + +interface EventListenerOptions { + capture?: boolean; +} + +interface ProgressEventInit extends EventInit { + lengthComputable?: boolean; + loaded?: number; + total?: number; +} + +/** Events measuring progress of an underlying process, like an HTTP request + * (for an XMLHttpRequest, or the loading of the underlying resource of an + * ,