summaryrefslogtreecommitdiff
path: root/cli/js/web
diff options
context:
space:
mode:
Diffstat (limited to 'cli/js/web')
-rw-r--r--cli/js/web/base64.ts150
-rw-r--r--cli/js/web/blob.ts185
-rw-r--r--cli/js/web/body.ts340
-rw-r--r--cli/js/web/custom_event.ts48
-rw-r--r--cli/js/web/decode_utf8.ts134
-rw-r--r--cli/js/web/dom_file.ts24
-rw-r--r--cli/js/web/dom_iterable.ts85
-rw-r--r--cli/js/web/dom_types.ts755
-rw-r--r--cli/js/web/dom_util.ts85
-rw-r--r--cli/js/web/encode_utf8.ts80
-rw-r--r--cli/js/web/event.ts348
-rw-r--r--cli/js/web/event_target.ts500
-rw-r--r--cli/js/web/fetch.ts584
-rw-r--r--cli/js/web/form_data.ts149
-rw-r--r--cli/js/web/headers.ts152
-rw-r--r--cli/js/web/location.ts51
-rw-r--r--cli/js/web/request.ts159
-rw-r--r--cli/js/web/streams/mod.ts20
-rw-r--r--cli/js/web/streams/pipe-to.ts237
-rw-r--r--cli/js/web/streams/queue-mixin.ts84
-rw-r--r--cli/js/web/streams/queue.ts65
-rw-r--r--cli/js/web/streams/readable-byte-stream-controller.ts214
-rw-r--r--cli/js/web/streams/readable-internals.ts1357
-rw-r--r--cli/js/web/streams/readable-stream-byob-reader.ts93
-rw-r--r--cli/js/web/streams/readable-stream-byob-request.ts60
-rw-r--r--cli/js/web/streams/readable-stream-default-controller.ts139
-rw-r--r--cli/js/web/streams/readable-stream-default-reader.ts75
-rw-r--r--cli/js/web/streams/readable-stream.ts391
-rw-r--r--cli/js/web/streams/shared-internals.ts306
-rw-r--r--cli/js/web/streams/strategies.ts39
-rw-r--r--cli/js/web/streams/transform-internals.ts371
-rw-r--r--cli/js/web/streams/transform-stream-default-controller.ts58
-rw-r--r--cli/js/web/streams/transform-stream.ts147
-rw-r--r--cli/js/web/streams/writable-internals.ts800
-rw-r--r--cli/js/web/streams/writable-stream-default-controller.ts101
-rw-r--r--cli/js/web/streams/writable-stream-default-writer.ts136
-rw-r--r--cli/js/web/streams/writable-stream.ts118
-rw-r--r--cli/js/web/text_encoding.ts461
-rw-r--r--cli/js/web/url.ts396
-rw-r--r--cli/js/web/url_search_params.ts311
40 files changed, 9808 insertions, 0 deletions
diff --git a/cli/js/web/base64.ts b/cli/js/web/base64.ts
new file mode 100644
index 000000000..4d30e00f1
--- /dev/null
+++ b/cli/js/web/base64.ts
@@ -0,0 +1,150 @@
+// Forked from https://github.com/beatgammit/base64-js
+// Copyright (c) 2014 Jameson Little. MIT License.
+
+const lookup: string[] = [];
+const revLookup: number[] = [];
+
+const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+for (let i = 0, len = code.length; i < len; ++i) {
+ lookup[i] = code[i];
+ revLookup[code.charCodeAt(i)] = i;
+}
+
+// Support decoding URL-safe base64 strings, as Node.js does.
+// See: https://en.wikipedia.org/wiki/Base64#URL_applications
+revLookup["-".charCodeAt(0)] = 62;
+revLookup["_".charCodeAt(0)] = 63;
+
+function getLens(b64: string): [number, number] {
+ const len = b64.length;
+
+ if (len % 4 > 0) {
+ throw new Error("Invalid string. Length must be a multiple of 4");
+ }
+
+ // Trim off extra bytes after placeholder bytes are found
+ // See: https://github.com/beatgammit/base64-js/issues/42
+ let validLen = b64.indexOf("=");
+ if (validLen === -1) validLen = len;
+
+ const placeHoldersLen = validLen === len ? 0 : 4 - (validLen % 4);
+
+ return [validLen, placeHoldersLen];
+}
+
+// base64 is 4/3 + up to two characters of the original data
+export function byteLength(b64: string): number {
+ const lens = getLens(b64);
+ const validLen = lens[0];
+ const placeHoldersLen = lens[1];
+ return ((validLen + placeHoldersLen) * 3) / 4 - placeHoldersLen;
+}
+
+function _byteLength(
+ b64: string,
+ validLen: number,
+ placeHoldersLen: number
+): number {
+ return ((validLen + placeHoldersLen) * 3) / 4 - placeHoldersLen;
+}
+
+export function toByteArray(b64: string): Uint8Array {
+ let tmp;
+ const lens = getLens(b64);
+ const validLen = lens[0];
+ const placeHoldersLen = lens[1];
+
+ const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
+
+ let curByte = 0;
+
+ // if there are placeholders, only get up to the last complete 4 chars
+ const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
+
+ let i;
+ for (i = 0; i < len; i += 4) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 18) |
+ (revLookup[b64.charCodeAt(i + 1)] << 12) |
+ (revLookup[b64.charCodeAt(i + 2)] << 6) |
+ revLookup[b64.charCodeAt(i + 3)];
+ arr[curByte++] = (tmp >> 16) & 0xff;
+ arr[curByte++] = (tmp >> 8) & 0xff;
+ arr[curByte++] = tmp & 0xff;
+ }
+
+ if (placeHoldersLen === 2) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 2) |
+ (revLookup[b64.charCodeAt(i + 1)] >> 4);
+ arr[curByte++] = tmp & 0xff;
+ }
+
+ if (placeHoldersLen === 1) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 10) |
+ (revLookup[b64.charCodeAt(i + 1)] << 4) |
+ (revLookup[b64.charCodeAt(i + 2)] >> 2);
+ arr[curByte++] = (tmp >> 8) & 0xff;
+ arr[curByte++] = tmp & 0xff;
+ }
+
+ return arr;
+}
+
+function tripletToBase64(num: number): string {
+ return (
+ lookup[(num >> 18) & 0x3f] +
+ lookup[(num >> 12) & 0x3f] +
+ lookup[(num >> 6) & 0x3f] +
+ lookup[num & 0x3f]
+ );
+}
+
+function encodeChunk(uint8: Uint8Array, start: number, end: number): string {
+ let tmp;
+ const output = [];
+ for (let i = start; i < end; i += 3) {
+ tmp =
+ ((uint8[i] << 16) & 0xff0000) +
+ ((uint8[i + 1] << 8) & 0xff00) +
+ (uint8[i + 2] & 0xff);
+ output.push(tripletToBase64(tmp));
+ }
+ return output.join("");
+}
+
+export function fromByteArray(uint8: Uint8Array): string {
+ let tmp;
+ const len = uint8.length;
+ const extraBytes = len % 3; // if we have 1 byte left, pad 2 bytes
+ const parts = [];
+ const maxChunkLength = 16383; // must be multiple of 3
+
+ // go through the array every three bytes, we'll deal with trailing stuff later
+ for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
+ parts.push(
+ encodeChunk(
+ uint8,
+ i,
+ i + maxChunkLength > len2 ? len2 : i + maxChunkLength
+ )
+ );
+ }
+
+ // pad the end with zeros, but make sure to not forget the extra bytes
+ if (extraBytes === 1) {
+ tmp = uint8[len - 1];
+ parts.push(lookup[tmp >> 2] + lookup[(tmp << 4) & 0x3f] + "==");
+ } else if (extraBytes === 2) {
+ tmp = (uint8[len - 2] << 8) + uint8[len - 1];
+ parts.push(
+ lookup[tmp >> 10] +
+ lookup[(tmp >> 4) & 0x3f] +
+ lookup[(tmp << 2) & 0x3f] +
+ "="
+ );
+ }
+
+ return parts.join("");
+}
diff --git a/cli/js/web/blob.ts b/cli/js/web/blob.ts
new file mode 100644
index 000000000..896337b10
--- /dev/null
+++ b/cli/js/web/blob.ts
@@ -0,0 +1,185 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import { hasOwnProperty } from "../util.ts";
+import { TextEncoder } from "./text_encoding.ts";
+import { build } from "../build.ts";
+
+export const bytesSymbol = Symbol("bytes");
+
+export function containsOnlyASCII(str: string): boolean {
+ if (typeof str !== "string") {
+ return false;
+ }
+ return /^[\x00-\x7F]*$/.test(str);
+}
+
+function convertLineEndingsToNative(s: string): string {
+ const nativeLineEnd = build.os == "win" ? "\r\n" : "\n";
+
+ let position = 0;
+
+ let collectionResult = collectSequenceNotCRLF(s, position);
+
+ let token = collectionResult.collected;
+ position = collectionResult.newPosition;
+
+ let result = token;
+
+ while (position < s.length) {
+ const c = s.charAt(position);
+ if (c == "\r") {
+ result += nativeLineEnd;
+ position++;
+ if (position < s.length && s.charAt(position) == "\n") {
+ position++;
+ }
+ } else if (c == "\n") {
+ position++;
+ result += nativeLineEnd;
+ }
+
+ collectionResult = collectSequenceNotCRLF(s, position);
+
+ token = collectionResult.collected;
+ position = collectionResult.newPosition;
+
+ result += token;
+ }
+
+ return result;
+}
+
+function collectSequenceNotCRLF(
+ s: string,
+ position: number
+): { collected: string; newPosition: number } {
+ const start = position;
+ for (
+ let c = s.charAt(position);
+ position < s.length && !(c == "\r" || c == "\n");
+ c = s.charAt(++position)
+ );
+ return { collected: s.slice(start, position), newPosition: position };
+}
+
+function toUint8Arrays(
+ blobParts: domTypes.BlobPart[],
+ doNormalizeLineEndingsToNative: boolean
+): Uint8Array[] {
+ const ret: Uint8Array[] = [];
+ const enc = new TextEncoder();
+ for (const element of blobParts) {
+ if (typeof element === "string") {
+ let str = element;
+ if (doNormalizeLineEndingsToNative) {
+ str = convertLineEndingsToNative(element);
+ }
+ ret.push(enc.encode(str));
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ } else if (element instanceof DenoBlob) {
+ ret.push(element[bytesSymbol]);
+ } else if (element instanceof Uint8Array) {
+ ret.push(element);
+ } else if (element instanceof Uint16Array) {
+ const uint8 = new Uint8Array(element.buffer);
+ ret.push(uint8);
+ } else if (element instanceof Uint32Array) {
+ const uint8 = new Uint8Array(element.buffer);
+ ret.push(uint8);
+ } else if (ArrayBuffer.isView(element)) {
+ // Convert view to Uint8Array.
+ const uint8 = new Uint8Array(element.buffer);
+ ret.push(uint8);
+ } else if (element instanceof ArrayBuffer) {
+ // Create a new Uint8Array view for the given ArrayBuffer.
+ const uint8 = new Uint8Array(element);
+ ret.push(uint8);
+ } else {
+ ret.push(enc.encode(String(element)));
+ }
+ }
+ return ret;
+}
+
+function processBlobParts(
+ blobParts: domTypes.BlobPart[],
+ options: domTypes.BlobPropertyBag
+): Uint8Array {
+ const normalizeLineEndingsToNative = options.ending === "native";
+ // ArrayBuffer.transfer is not yet implemented in V8, so we just have to
+ // pre compute size of the array buffer and do some sort of static allocation
+ // instead of dynamic allocation.
+ const uint8Arrays = toUint8Arrays(blobParts, normalizeLineEndingsToNative);
+ const byteLength = uint8Arrays
+ .map((u8): number => u8.byteLength)
+ .reduce((a, b): number => a + b, 0);
+ const ab = new ArrayBuffer(byteLength);
+ const bytes = new Uint8Array(ab);
+
+ let courser = 0;
+ for (const u8 of uint8Arrays) {
+ bytes.set(u8, courser);
+ courser += u8.byteLength;
+ }
+
+ return bytes;
+}
+
+// A WeakMap holding blob to byte array mapping.
+// Ensures it does not impact garbage collection.
+export const blobBytesWeakMap = new WeakMap<domTypes.Blob, Uint8Array>();
+
+export class DenoBlob implements domTypes.Blob {
+ private readonly [bytesSymbol]: Uint8Array;
+ readonly size: number = 0;
+ readonly type: string = "";
+
+ /** A blob object represents a file-like object of immutable, raw data. */
+ constructor(
+ blobParts?: domTypes.BlobPart[],
+ options?: domTypes.BlobPropertyBag
+ ) {
+ if (arguments.length === 0) {
+ this[bytesSymbol] = new Uint8Array();
+ return;
+ }
+
+ options = options || {};
+ // Set ending property's default value to "transparent".
+ if (!hasOwnProperty(options, "ending")) {
+ options.ending = "transparent";
+ }
+
+ if (options.type && !containsOnlyASCII(options.type)) {
+ const errMsg = "The 'type' property must consist of ASCII characters.";
+ throw new SyntaxError(errMsg);
+ }
+
+ const bytes = processBlobParts(blobParts!, options);
+ // Normalize options.type.
+ let type = options.type ? options.type : "";
+ if (type.length) {
+ for (let i = 0; i < type.length; ++i) {
+ const char = type[i];
+ if (char < "\u0020" || char > "\u007E") {
+ type = "";
+ break;
+ }
+ }
+ type = type.toLowerCase();
+ }
+ // Set Blob object's properties.
+ this[bytesSymbol] = bytes;
+ this.size = bytes.byteLength;
+ this.type = type;
+
+ // Register bytes for internal private use.
+ blobBytesWeakMap.set(this, bytes);
+ }
+
+ slice(start?: number, end?: number, contentType?: string): DenoBlob {
+ return new DenoBlob([this[bytesSymbol].slice(start, end)], {
+ type: contentType || this.type
+ });
+ }
+}
diff --git a/cli/js/web/body.ts b/cli/js/web/body.ts
new file mode 100644
index 000000000..ed21fa0ec
--- /dev/null
+++ b/cli/js/web/body.ts
@@ -0,0 +1,340 @@
+import * as formData from "./form_data.ts";
+import * as blob from "./blob.ts";
+import * as encoding from "./text_encoding.ts";
+import * as headers from "./headers.ts";
+import * as domTypes from "./dom_types.ts";
+import { ReadableStream } from "./streams/mod.ts";
+
+const { Headers } = headers;
+
+// only namespace imports work for now, plucking out what we need
+const { FormData } = formData;
+const { TextEncoder, TextDecoder } = encoding;
+const Blob = blob.DenoBlob;
+const DenoBlob = blob.DenoBlob;
+
+type ReadableStreamReader = domTypes.ReadableStreamReader;
+
+interface ReadableStreamController {
+ enqueue(chunk: string | ArrayBuffer): void;
+ close(): void;
+}
+
+export type BodySource =
+ | domTypes.Blob
+ | domTypes.BufferSource
+ | domTypes.FormData
+ | domTypes.URLSearchParams
+ | domTypes.ReadableStream
+ | string;
+
+function validateBodyType(owner: Body, bodySource: BodySource): boolean {
+ if (
+ bodySource instanceof Int8Array ||
+ bodySource instanceof Int16Array ||
+ bodySource instanceof Int32Array ||
+ bodySource instanceof Uint8Array ||
+ bodySource instanceof Uint16Array ||
+ bodySource instanceof Uint32Array ||
+ bodySource instanceof Uint8ClampedArray ||
+ bodySource instanceof Float32Array ||
+ bodySource instanceof Float64Array
+ ) {
+ return true;
+ } else if (bodySource instanceof ArrayBuffer) {
+ return true;
+ } else if (typeof bodySource === "string") {
+ return true;
+ } else if (bodySource instanceof ReadableStream) {
+ return true;
+ } else if (bodySource instanceof FormData) {
+ return true;
+ } else if (!bodySource) {
+ return true; // null body is fine
+ }
+ throw new Error(
+ `Bad ${owner.constructor.name} body type: ${bodySource.constructor.name}`
+ );
+}
+
+function concatenate(...arrays: Uint8Array[]): ArrayBuffer {
+ let totalLength = 0;
+ for (const arr of arrays) {
+ totalLength += arr.length;
+ }
+ const result = new Uint8Array(totalLength);
+ let offset = 0;
+ for (const arr of arrays) {
+ result.set(arr, offset);
+ offset += arr.length;
+ }
+ return result.buffer as ArrayBuffer;
+}
+
+function bufferFromStream(stream: ReadableStreamReader): Promise<ArrayBuffer> {
+ return new Promise((resolve, reject): void => {
+ const parts: Uint8Array[] = [];
+ const encoder = new TextEncoder();
+ // recurse
+ (function pump(): void {
+ stream
+ .read()
+ .then(({ done, value }): void => {
+ if (done) {
+ return resolve(concatenate(...parts));
+ }
+
+ if (typeof value === "string") {
+ parts.push(encoder.encode(value));
+ } else if (value instanceof ArrayBuffer) {
+ parts.push(new Uint8Array(value));
+ } else if (!value) {
+ // noop for undefined
+ } else {
+ reject("unhandled type on stream read");
+ }
+
+ return pump();
+ })
+ .catch((err): void => {
+ reject(err);
+ });
+ })();
+ });
+}
+
+function getHeaderValueParams(value: string): Map<string, string> {
+ const params = new Map();
+ // Forced to do so for some Map constructor param mismatch
+ value
+ .split(";")
+ .slice(1)
+ .map((s): string[] => s.trim().split("="))
+ .filter((arr): boolean => arr.length > 1)
+ .map(([k, v]): [string, string] => [k, v.replace(/^"([^"]*)"$/, "$1")])
+ .forEach(([k, v]): Map<string, string> => params.set(k, v));
+ return params;
+}
+
+function hasHeaderValueOf(s: string, value: string): boolean {
+ return new RegExp(`^${value}[\t\s]*;?`).test(s);
+}
+
+export const BodyUsedError =
+ "Failed to execute 'clone' on 'Body': body is already used";
+
+export class Body implements domTypes.Body {
+ protected _stream: domTypes.ReadableStream | null;
+
+ constructor(protected _bodySource: BodySource, readonly contentType: string) {
+ validateBodyType(this, _bodySource);
+ this._bodySource = _bodySource;
+ this.contentType = contentType;
+ this._stream = null;
+ }
+
+ get body(): domTypes.ReadableStream | null {
+ if (this._stream) {
+ return this._stream;
+ }
+
+ if (this._bodySource instanceof ReadableStream) {
+ // @ts-ignore
+ this._stream = this._bodySource;
+ }
+ if (typeof this._bodySource === "string") {
+ const bodySource = this._bodySource;
+ this._stream = new ReadableStream({
+ start(controller: ReadableStreamController): void {
+ controller.enqueue(bodySource);
+ controller.close();
+ }
+ });
+ }
+ return this._stream;
+ }
+
+ get bodyUsed(): boolean {
+ if (this.body && this.body.locked) {
+ return true;
+ }
+ return false;
+ }
+
+ public async blob(): Promise<domTypes.Blob> {
+ return new Blob([await this.arrayBuffer()]);
+ }
+
+ // ref: https://fetch.spec.whatwg.org/#body-mixin
+ public async formData(): Promise<domTypes.FormData> {
+ const formData = new FormData();
+ const enc = new TextEncoder();
+ if (hasHeaderValueOf(this.contentType, "multipart/form-data")) {
+ const params = getHeaderValueParams(this.contentType);
+ if (!params.has("boundary")) {
+ // TypeError is required by spec
+ throw new TypeError("multipart/form-data must provide a boundary");
+ }
+ // ref: https://tools.ietf.org/html/rfc2046#section-5.1
+ const boundary = params.get("boundary")!;
+ const dashBoundary = `--${boundary}`;
+ const delimiter = `\r\n${dashBoundary}`;
+ const closeDelimiter = `${delimiter}--`;
+
+ const body = await this.text();
+ let bodyParts: string[];
+ const bodyEpilogueSplit = body.split(closeDelimiter);
+ if (bodyEpilogueSplit.length < 2) {
+ bodyParts = [];
+ } else {
+ // discard epilogue
+ const bodyEpilogueTrimmed = bodyEpilogueSplit[0];
+ // first boundary treated special due to optional prefixed \r\n
+ const firstBoundaryIndex = bodyEpilogueTrimmed.indexOf(dashBoundary);
+ if (firstBoundaryIndex < 0) {
+ throw new TypeError("Invalid boundary");
+ }
+ const bodyPreambleTrimmed = bodyEpilogueTrimmed
+ .slice(firstBoundaryIndex + dashBoundary.length)
+ .replace(/^[\s\r\n\t]+/, ""); // remove transport-padding CRLF
+ // trimStart might not be available
+ // Be careful! body-part allows trailing \r\n!
+ // (as long as it is not part of `delimiter`)
+ bodyParts = bodyPreambleTrimmed
+ .split(delimiter)
+ .map((s): string => s.replace(/^[\s\r\n\t]+/, ""));
+ // TODO: LWSP definition is actually trickier,
+ // but should be fine in our case since without headers
+ // we should just discard the part
+ }
+ for (const bodyPart of bodyParts) {
+ const headers = new Headers();
+ const headerOctetSeperatorIndex = bodyPart.indexOf("\r\n\r\n");
+ if (headerOctetSeperatorIndex < 0) {
+ continue; // Skip unknown part
+ }
+ const headerText = bodyPart.slice(0, headerOctetSeperatorIndex);
+ const octets = bodyPart.slice(headerOctetSeperatorIndex + 4);
+
+ // TODO: use textproto.readMIMEHeader from deno_std
+ const rawHeaders = headerText.split("\r\n");
+ for (const rawHeader of rawHeaders) {
+ const sepIndex = rawHeader.indexOf(":");
+ if (sepIndex < 0) {
+ continue; // Skip this header
+ }
+ const key = rawHeader.slice(0, sepIndex);
+ const value = rawHeader.slice(sepIndex + 1);
+ headers.set(key, value);
+ }
+ if (!headers.has("content-disposition")) {
+ continue; // Skip unknown part
+ }
+ // Content-Transfer-Encoding Deprecated
+ const contentDisposition = headers.get("content-disposition")!;
+ const partContentType = headers.get("content-type") || "text/plain";
+ // TODO: custom charset encoding (needs TextEncoder support)
+ // const contentTypeCharset =
+ // getHeaderValueParams(partContentType).get("charset") || "";
+ if (!hasHeaderValueOf(contentDisposition, "form-data")) {
+ continue; // Skip, might not be form-data
+ }
+ const dispositionParams = getHeaderValueParams(contentDisposition);
+ if (!dispositionParams.has("name")) {
+ continue; // Skip, unknown name
+ }
+ const dispositionName = dispositionParams.get("name")!;
+ if (dispositionParams.has("filename")) {
+ const filename = dispositionParams.get("filename")!;
+ const blob = new DenoBlob([enc.encode(octets)], {
+ type: partContentType
+ });
+ // TODO: based on spec
+ // https://xhr.spec.whatwg.org/#dom-formdata-append
+ // https://xhr.spec.whatwg.org/#create-an-entry
+ // Currently it does not mention how I could pass content-type
+ // to the internally created file object...
+ formData.append(dispositionName, blob, filename);
+ } else {
+ formData.append(dispositionName, octets);
+ }
+ }
+ return formData;
+ } else if (
+ hasHeaderValueOf(this.contentType, "application/x-www-form-urlencoded")
+ ) {
+ // From https://github.com/github/fetch/blob/master/fetch.js
+ // Copyright (c) 2014-2016 GitHub, Inc. MIT License
+ const body = await this.text();
+ try {
+ body
+ .trim()
+ .split("&")
+ .forEach((bytes): void => {
+ if (bytes) {
+ const split = bytes.split("=");
+ const name = split.shift()!.replace(/\+/g, " ");
+ const value = split.join("=").replace(/\+/g, " ");
+ formData.append(
+ decodeURIComponent(name),
+ decodeURIComponent(value)
+ );
+ }
+ });
+ } catch (e) {
+ throw new TypeError("Invalid form urlencoded format");
+ }
+ return formData;
+ } else {
+ throw new TypeError("Invalid form data");
+ }
+ }
+
+ public async text(): Promise<string> {
+ if (typeof this._bodySource === "string") {
+ return this._bodySource;
+ }
+
+ const ab = await this.arrayBuffer();
+ const decoder = new TextDecoder("utf-8");
+ return decoder.decode(ab);
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ public async json(): Promise<any> {
+ const raw = await this.text();
+ return JSON.parse(raw);
+ }
+
+ public async arrayBuffer(): Promise<ArrayBuffer> {
+ if (
+ this._bodySource instanceof Int8Array ||
+ this._bodySource instanceof Int16Array ||
+ this._bodySource instanceof Int32Array ||
+ this._bodySource instanceof Uint8Array ||
+ this._bodySource instanceof Uint16Array ||
+ this._bodySource instanceof Uint32Array ||
+ this._bodySource instanceof Uint8ClampedArray ||
+ this._bodySource instanceof Float32Array ||
+ this._bodySource instanceof Float64Array
+ ) {
+ return this._bodySource.buffer as ArrayBuffer;
+ } else if (this._bodySource instanceof ArrayBuffer) {
+ return this._bodySource;
+ } else if (typeof this._bodySource === "string") {
+ const enc = new TextEncoder();
+ return enc.encode(this._bodySource).buffer as ArrayBuffer;
+ } else if (this._bodySource instanceof ReadableStream) {
+ // @ts-ignore
+ return bufferFromStream(this._bodySource.getReader());
+ } else if (this._bodySource instanceof FormData) {
+ const enc = new TextEncoder();
+ return enc.encode(this._bodySource.toString()).buffer as ArrayBuffer;
+ } else if (!this._bodySource) {
+ return new ArrayBuffer(0);
+ }
+ throw new Error(
+ `Body type not yet implemented: ${this._bodySource.constructor.name}`
+ );
+ }
+}
diff --git a/cli/js/web/custom_event.ts b/cli/js/web/custom_event.ts
new file mode 100644
index 000000000..6c8a3c19b
--- /dev/null
+++ b/cli/js/web/custom_event.ts
@@ -0,0 +1,48 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import * as event from "./event.ts";
+import { getPrivateValue, requiredArguments } from "../util.ts";
+
+// WeakMaps are recommended for private attributes (see MDN link below)
+// https://developer.mozilla.org/en-US/docs/Archive/Add-ons/Add-on_SDK/Guides/Contributor_s_Guide/Private_Properties#Using_WeakMaps
+export const customEventAttributes = new WeakMap();
+
+export class CustomEvent extends event.Event implements domTypes.CustomEvent {
+ constructor(
+ type: string,
+ customEventInitDict: domTypes.CustomEventInit = {}
+ ) {
+ requiredArguments("CustomEvent", arguments.length, 1);
+ super(type, customEventInitDict);
+ const { detail = null } = customEventInitDict;
+ customEventAttributes.set(this, { detail });
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ get detail(): any {
+ return getPrivateValue(this, customEventAttributes, "detail");
+ }
+
+ initCustomEvent(
+ type: string,
+ bubbles?: boolean,
+ cancelable?: boolean,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ detail?: any
+ ): void {
+ if (this.dispatched) {
+ return;
+ }
+
+ customEventAttributes.set(this, { detail });
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "CustomEvent";
+ }
+}
+
+/** Built-in objects providing `get` methods for our
+ * interceptable JavaScript operations.
+ */
+Reflect.defineProperty(CustomEvent.prototype, "detail", { enumerable: true });
diff --git a/cli/js/web/decode_utf8.ts b/cli/js/web/decode_utf8.ts
new file mode 100644
index 000000000..32d67b0e4
--- /dev/null
+++ b/cli/js/web/decode_utf8.ts
@@ -0,0 +1,134 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+// The following code is based off:
+// https://github.com/inexorabletash/text-encoding
+//
+// Copyright (c) 2008-2009 Bjoern Hoehrmann <bjoern@hoehrmann.de>
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+// SOFTWARE.
+
+// `.apply` can actually take a typed array, though the type system doesn't
+// really support it, so we have to "hack" it a bit to get past some of the
+// strict type checks.
+declare global {
+ interface CallableFunction extends Function {
+ apply<T, R>(
+ this: (this: T, ...args: number[]) => R,
+ thisArg: T,
+ args: Uint16Array
+ ): R;
+ }
+}
+
+export function decodeUtf8(
+ input: Uint8Array,
+ fatal: boolean,
+ ignoreBOM: boolean
+): string {
+ let outString = "";
+
+ // Prepare a buffer so that we don't have to do a lot of string concats, which
+ // are very slow.
+ const outBufferLength: number = Math.min(1024, input.length);
+ const outBuffer = new Uint16Array(outBufferLength);
+ let outIndex = 0;
+
+ let state = 0;
+ let codepoint = 0;
+ let type: number;
+
+ let i =
+ ignoreBOM && input[0] === 0xef && input[1] === 0xbb && input[2] === 0xbf
+ ? 3
+ : 0;
+
+ for (; i < input.length; ++i) {
+ // Encoding error handling
+ if (state === 12 || (state !== 0 && (input[i] & 0xc0) !== 0x80)) {
+ if (fatal)
+ throw new TypeError(
+ `Decoder error. Invalid byte in sequence at position ${i} in data.`
+ );
+ outBuffer[outIndex++] = 0xfffd; // Replacement character
+ if (outIndex === outBufferLength) {
+ outString += String.fromCharCode.apply(null, outBuffer);
+ outIndex = 0;
+ }
+ state = 0;
+ }
+
+ // prettier-ignore
+ type = [
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,
+ 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
+ 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+ 10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8
+ ][input[i]];
+ codepoint =
+ state !== 0
+ ? (input[i] & 0x3f) | (codepoint << 6)
+ : (0xff >> type) & input[i];
+ // prettier-ignore
+ state = [
+ 0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12,
+ 12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12,
+ 12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12,
+ 12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12,
+ 12,36,12,12,12,12,12,12,12,12,12,12
+ ][state + type];
+
+ if (state !== 0) continue;
+
+ // Add codepoint to buffer (as charcodes for utf-16), and flush buffer to
+ // string if needed.
+ if (codepoint > 0xffff) {
+ outBuffer[outIndex++] = 0xd7c0 + (codepoint >> 10);
+ if (outIndex === outBufferLength) {
+ outString += String.fromCharCode.apply(null, outBuffer);
+ outIndex = 0;
+ }
+ outBuffer[outIndex++] = 0xdc00 | (codepoint & 0x3ff);
+ if (outIndex === outBufferLength) {
+ outString += String.fromCharCode.apply(null, outBuffer);
+ outIndex = 0;
+ }
+ } else {
+ outBuffer[outIndex++] = codepoint;
+ if (outIndex === outBufferLength) {
+ outString += String.fromCharCode.apply(null, outBuffer);
+ outIndex = 0;
+ }
+ }
+ }
+
+ // Add a replacement character if we ended in the middle of a sequence or
+ // encountered an invalid code at the end.
+ if (state !== 0) {
+ if (fatal) throw new TypeError(`Decoder error. Unexpected end of data.`);
+ outBuffer[outIndex++] = 0xfffd; // Replacement character
+ }
+
+ // Final flush of buffer
+ outString += String.fromCharCode.apply(null, outBuffer.subarray(0, outIndex));
+
+ return outString;
+}
diff --git a/cli/js/web/dom_file.ts b/cli/js/web/dom_file.ts
new file mode 100644
index 000000000..2b9dbff24
--- /dev/null
+++ b/cli/js/web/dom_file.ts
@@ -0,0 +1,24 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import * as blob from "./blob.ts";
+
+export class DomFileImpl extends blob.DenoBlob implements domTypes.DomFile {
+ lastModified: number;
+ name: string;
+
+ constructor(
+ fileBits: domTypes.BlobPart[],
+ fileName: string,
+ options?: domTypes.FilePropertyBag
+ ) {
+ options = options || {};
+ super(fileBits, options);
+
+ // 4.1.2.1 Replace any "/" character (U+002F SOLIDUS)
+ // with a ":" (U + 003A COLON)
+ this.name = String(fileName).replace(/\u002F/g, "\u003A");
+ // 4.1.3.3 If lastModified is not provided, set lastModified to the current
+ // date and time represented in number of milliseconds since the Unix Epoch.
+ this.lastModified = options.lastModified || Date.now();
+ }
+}
diff --git a/cli/js/web/dom_iterable.ts b/cli/js/web/dom_iterable.ts
new file mode 100644
index 000000000..bd8e7d8cd
--- /dev/null
+++ b/cli/js/web/dom_iterable.ts
@@ -0,0 +1,85 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+// eslint-disable-next-line @typescript-eslint/no-unused-vars
+import { DomIterable } from "./dom_types.ts";
+import { requiredArguments } from "../util.ts";
+import { exposeForTest } from "../internals.ts";
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+type Constructor<T = {}> = new (...args: any[]) => T;
+
+/** Mixes in a DOM iterable methods into a base class, assumes that there is
+ * a private data iterable that is part of the base class, located at
+ * `[dataSymbol]`.
+ */
+export function DomIterableMixin<K, V, TBase extends Constructor>(
+ Base: TBase,
+ dataSymbol: symbol
+): TBase & Constructor<DomIterable<K, V>> {
+ // we have to cast `this` as `any` because there is no way to describe the
+ // Base class in a way where the Symbol `dataSymbol` is defined. So the
+ // runtime code works, but we do lose a little bit of type safety.
+
+ // Additionally, we have to not use .keys() nor .values() since the internal
+ // slot differs in type - some have a Map, which yields [K, V] in
+ // Symbol.iterator, and some have an Array, which yields V, in this case
+ // [K, V] too as they are arrays of tuples.
+
+ const DomIterable = class extends Base {
+ *entries(): IterableIterator<[K, V]> {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const entry of (this as any)[dataSymbol]) {
+ yield entry;
+ }
+ }
+
+ *keys(): IterableIterator<K> {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const [key] of (this as any)[dataSymbol]) {
+ yield key;
+ }
+ }
+
+ *values(): IterableIterator<V> {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const [, value] of (this as any)[dataSymbol]) {
+ yield value;
+ }
+ }
+
+ forEach(
+ callbackfn: (value: V, key: K, parent: this) => void,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ thisArg?: any
+ ): void {
+ requiredArguments(
+ `${this.constructor.name}.forEach`,
+ arguments.length,
+ 1
+ );
+ callbackfn = callbackfn.bind(
+ thisArg == null ? globalThis : Object(thisArg)
+ );
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const [key, value] of (this as any)[dataSymbol]) {
+ callbackfn(value, key, this);
+ }
+ }
+
+ *[Symbol.iterator](): IterableIterator<[K, V]> {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const entry of (this as any)[dataSymbol]) {
+ yield entry;
+ }
+ }
+ };
+
+ // we want the Base class name to be the name of the class.
+ Object.defineProperty(DomIterable, "name", {
+ value: Base.name,
+ configurable: true
+ });
+
+ return DomIterable;
+}
+
+exposeForTest("DomIterableMixin", DomIterableMixin);
diff --git a/cli/js/web/dom_types.ts b/cli/js/web/dom_types.ts
new file mode 100644
index 000000000..cdd681615
--- /dev/null
+++ b/cli/js/web/dom_types.ts
@@ -0,0 +1,755 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+
+/*! ****************************************************************************
+Copyright (c) Microsoft Corporation. All rights reserved.
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License. You may obtain a copy of the
+License at http://www.apache.org/licenses/LICENSE-2.0
+
+THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
+WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
+MERCHANTABLITY OR NON-INFRINGEMENT.
+
+See the Apache Version 2.0 License for specific language governing permissions
+and limitations under the License.
+*******************************************************************************/
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+
+export type BufferSource = ArrayBufferView | ArrayBuffer;
+
+export type HeadersInit =
+ | Headers
+ | Array<[string, string]>
+ | Record<string, string>;
+export type URLSearchParamsInit = string | string[][] | Record<string, string>;
+type BodyInit =
+ | Blob
+ | BufferSource
+ | FormData
+ | URLSearchParams
+ | ReadableStream
+ | string;
+export type RequestInfo = Request | string;
+type ReferrerPolicy =
+ | ""
+ | "no-referrer"
+ | "no-referrer-when-downgrade"
+ | "origin-only"
+ | "origin-when-cross-origin"
+ | "unsafe-url";
+export type BlobPart = BufferSource | Blob | string;
+export type FormDataEntryValue = DomFile | string;
+
+export interface DomIterable<K, V> {
+ keys(): IterableIterator<K>;
+ values(): IterableIterator<V>;
+ entries(): IterableIterator<[K, V]>;
+ [Symbol.iterator](): IterableIterator<[K, V]>;
+ forEach(
+ callback: (value: V, key: K, parent: this) => void,
+ thisArg?: any
+ ): void;
+}
+
+type EndingType = "transparent" | "native";
+
+export interface BlobPropertyBag {
+ type?: string;
+ ending?: EndingType;
+}
+
+interface AbortSignalEventMap {
+ abort: ProgressEvent;
+}
+
+// https://dom.spec.whatwg.org/#node
+export enum NodeType {
+ ELEMENT_NODE = 1,
+ TEXT_NODE = 3,
+ DOCUMENT_FRAGMENT_NODE = 11
+}
+
+export const eventTargetHost: unique symbol = Symbol();
+export const eventTargetListeners: unique symbol = Symbol();
+export const eventTargetMode: unique symbol = Symbol();
+export const eventTargetNodeType: unique symbol = Symbol();
+
+export interface EventListener {
+ // Different from lib.dom.d.ts. Added Promise<void>
+ (evt: Event): void | Promise<void>;
+}
+
+export interface EventListenerObject {
+ // Different from lib.dom.d.ts. Added Promise<void>
+ handleEvent(evt: Event): void | Promise<void>;
+}
+
+export type EventListenerOrEventListenerObject =
+ | EventListener
+ | EventListenerObject;
+
+// This is actually not part of actual DOM types,
+// but an implementation specific thing on our custom EventTarget
+// (due to the presence of our custom symbols)
+export interface EventTargetListener {
+ callback: EventListenerOrEventListenerObject;
+ options: AddEventListenerOptions;
+}
+
+export interface EventTarget {
+ // TODO: below 4 symbol props should not present on EventTarget WebIDL.
+ // They should be implementation specific details.
+ [eventTargetHost]: EventTarget | null;
+ [eventTargetListeners]: { [type in string]: EventTargetListener[] };
+ [eventTargetMode]: string;
+ [eventTargetNodeType]: NodeType;
+ addEventListener(
+ type: string,
+ listener: EventListenerOrEventListenerObject | null,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ dispatchEvent(event: Event): boolean;
+ removeEventListener(
+ type: string,
+ listener: EventListenerOrEventListenerObject | null,
+ options?: EventListenerOptions | boolean
+ ): void;
+}
+
+export interface ProgressEventInit extends EventInit {
+ lengthComputable?: boolean;
+ loaded?: number;
+ total?: number;
+}
+
+export interface URLSearchParams extends DomIterable<string, string> {
+ /**
+ * Appends a specified key/value pair as a new search parameter.
+ */
+ append(name: string, value: string): void;
+ /**
+ * Deletes the given search parameter, and its associated value,
+ * from the list of all search parameters.
+ */
+ delete(name: string): void;
+ /**
+ * Returns the first value associated to the given search parameter.
+ */
+ get(name: string): string | null;
+ /**
+ * Returns all the values association with a given search parameter.
+ */
+ getAll(name: string): string[];
+ /**
+ * Returns a Boolean indicating if such a search parameter exists.
+ */
+ has(name: string): boolean;
+ /**
+ * Sets the value associated to a given search parameter to the given value.
+ * If there were several values, delete the others.
+ */
+ set(name: string, value: string): void;
+ /**
+ * Sort all key/value pairs contained in this object in place
+ * and return undefined. The sort order is according to Unicode
+ * code points of the keys.
+ */
+ sort(): void;
+ /**
+ * Returns a query string suitable for use in a URL.
+ */
+ toString(): string;
+ /**
+ * Iterates over each name-value pair in the query
+ * and invokes the given function.
+ */
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ thisArg?: any
+ ): void;
+}
+
+export interface EventInit {
+ bubbles?: boolean;
+ cancelable?: boolean;
+ composed?: boolean;
+}
+
+export interface CustomEventInit extends EventInit {
+ detail?: any;
+}
+
+export enum EventPhase {
+ NONE = 0,
+ CAPTURING_PHASE = 1,
+ AT_TARGET = 2,
+ BUBBLING_PHASE = 3
+}
+
+export interface EventPath {
+ item: EventTarget;
+ itemInShadowTree: boolean;
+ relatedTarget: EventTarget | null;
+ rootOfClosedTree: boolean;
+ slotInClosedTree: boolean;
+ target: EventTarget | null;
+ touchTargetList: EventTarget[];
+}
+
+export interface Event {
+ readonly type: string;
+ target: EventTarget | null;
+ currentTarget: EventTarget | null;
+ composedPath(): EventPath[];
+
+ eventPhase: number;
+
+ stopPropagation(): void;
+ stopImmediatePropagation(): void;
+
+ readonly bubbles: boolean;
+ readonly cancelable: boolean;
+ preventDefault(): void;
+ readonly defaultPrevented: boolean;
+ readonly composed: boolean;
+
+ isTrusted: boolean;
+ readonly timeStamp: Date;
+
+ dispatched: boolean;
+ readonly initialized: boolean;
+ inPassiveListener: boolean;
+ cancelBubble: boolean;
+ cancelBubbleImmediately: boolean;
+ path: EventPath[];
+ relatedTarget: EventTarget | null;
+}
+
+export interface CustomEvent extends Event {
+ readonly detail: any;
+ initCustomEvent(
+ type: string,
+ bubbles?: boolean,
+ cancelable?: boolean,
+ detail?: any | null
+ ): void;
+}
+
+export interface DomFile extends Blob {
+ readonly lastModified: number;
+ readonly name: string;
+}
+
+export interface DomFileConstructor {
+ new (bits: BlobPart[], filename: string, options?: FilePropertyBag): DomFile;
+ prototype: DomFile;
+}
+
+export interface FilePropertyBag extends BlobPropertyBag {
+ lastModified?: number;
+}
+
+interface ProgressEvent extends Event {
+ readonly lengthComputable: boolean;
+ readonly loaded: number;
+ readonly total: number;
+}
+
+export interface EventListenerOptions {
+ capture: boolean;
+}
+
+export interface AddEventListenerOptions extends EventListenerOptions {
+ once: boolean;
+ passive: boolean;
+}
+
+export interface AbortSignal extends EventTarget {
+ readonly aborted: boolean;
+ onabort: ((this: AbortSignal, ev: ProgressEvent) => any) | null;
+ addEventListener<K extends keyof AbortSignalEventMap>(
+ type: K,
+ listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ addEventListener(
+ type: string,
+ listener: EventListener,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ removeEventListener<K extends keyof AbortSignalEventMap>(
+ type: K,
+ listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any,
+ options?: boolean | EventListenerOptions
+ ): void;
+ removeEventListener(
+ type: string,
+ listener: EventListener,
+ options?: boolean | EventListenerOptions
+ ): void;
+}
+
+export interface FormData extends DomIterable<string, FormDataEntryValue> {
+ append(name: string, value: string | Blob, fileName?: string): void;
+ delete(name: string): void;
+ get(name: string): FormDataEntryValue | null;
+ getAll(name: string): FormDataEntryValue[];
+ has(name: string): boolean;
+ set(name: string, value: string | Blob, fileName?: string): void;
+}
+
+export interface FormDataConstructor {
+ new (): FormData;
+ prototype: FormData;
+}
+
+/** A blob object represents a file-like object of immutable, raw data. */
+export interface Blob {
+ /** The size, in bytes, of the data contained in the `Blob` object. */
+ readonly size: number;
+ /** A string indicating the media type of the data contained in the `Blob`.
+ * If the type is unknown, this string is empty.
+ */
+ readonly type: string;
+ /** Returns a new `Blob` object containing the data in the specified range of
+ * bytes of the source `Blob`.
+ */
+ slice(start?: number, end?: number, contentType?: string): Blob;
+}
+
+export interface Body {
+ /** A simple getter used to expose a `ReadableStream` of the body contents. */
+ readonly body: ReadableStream | null;
+ /** Stores a `Boolean` that declares whether the body has been used in a
+ * response yet.
+ */
+ readonly bodyUsed: boolean;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with an `ArrayBuffer`.
+ */
+ arrayBuffer(): Promise<ArrayBuffer>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `Blob`.
+ */
+ blob(): Promise<Blob>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `FormData` object.
+ */
+ formData(): Promise<FormData>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with the result of parsing the body text as JSON.
+ */
+ json(): Promise<any>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `USVString` (text).
+ */
+ text(): Promise<string>;
+}
+
+export interface ReadableStream {
+ readonly locked: boolean;
+ cancel(reason?: any): Promise<void>;
+ getReader(): ReadableStreamReader;
+ tee(): ReadableStream[];
+}
+
+export interface UnderlyingSource<R = any> {
+ cancel?: ReadableStreamErrorCallback;
+ pull?: ReadableStreamDefaultControllerCallback<R>;
+ start?: ReadableStreamDefaultControllerCallback<R>;
+ type?: undefined;
+}
+
+export interface UnderlyingByteSource {
+ autoAllocateChunkSize?: number;
+ cancel?: ReadableStreamErrorCallback;
+ pull?: ReadableByteStreamControllerCallback;
+ start?: ReadableByteStreamControllerCallback;
+ type: "bytes";
+}
+
+export interface ReadableStreamReader {
+ cancel(reason?: any): Promise<void>;
+ read(): Promise<any>;
+ releaseLock(): void;
+}
+
+export interface ReadableStreamErrorCallback {
+ (reason: any): void | PromiseLike<void>;
+}
+
+export interface ReadableByteStreamControllerCallback {
+ (controller: ReadableByteStreamController): void | PromiseLike<void>;
+}
+
+export interface ReadableStreamDefaultControllerCallback<R> {
+ (controller: ReadableStreamDefaultController<R>): void | PromiseLike<void>;
+}
+
+export interface ReadableStreamDefaultController<R = any> {
+ readonly desiredSize: number | null;
+ close(): void;
+ enqueue(chunk: R): void;
+ error(error?: any): void;
+}
+
+export interface ReadableByteStreamController {
+ readonly byobRequest: ReadableStreamBYOBRequest | undefined;
+ readonly desiredSize: number | null;
+ close(): void;
+ enqueue(chunk: ArrayBufferView): void;
+ error(error?: any): void;
+}
+
+export interface ReadableStreamBYOBRequest {
+ readonly view: ArrayBufferView;
+ respond(bytesWritten: number): void;
+ respondWithNewView(view: ArrayBufferView): void;
+}
+/* TODO reenable these interfaces. These are needed to enable WritableStreams in js/streams/
+export interface WritableStream<W = any> {
+ readonly locked: boolean;
+ abort(reason?: any): Promise<void>;
+ getWriter(): WritableStreamDefaultWriter<W>;
+}
+
+TODO reenable these interfaces. These are needed to enable WritableStreams in js/streams/
+export interface UnderlyingSink<W = any> {
+ abort?: WritableStreamErrorCallback;
+ close?: WritableStreamDefaultControllerCloseCallback;
+ start?: WritableStreamDefaultControllerStartCallback;
+ type?: undefined;
+ write?: WritableStreamDefaultControllerWriteCallback<W>;
+}
+
+export interface PipeOptions {
+ preventAbort?: boolean;
+ preventCancel?: boolean;
+ preventClose?: boolean;
+ signal?: AbortSignal;
+}
+
+
+export interface WritableStreamDefaultWriter<W = any> {
+ readonly closed: Promise<void>;
+ readonly desiredSize: number | null;
+ readonly ready: Promise<void>;
+ abort(reason?: any): Promise<void>;
+ close(): Promise<void>;
+ releaseLock(): void;
+ write(chunk: W): Promise<void>;
+}
+
+export interface WritableStreamErrorCallback {
+ (reason: any): void | PromiseLike<void>;
+}
+
+export interface WritableStreamDefaultControllerCloseCallback {
+ (): void | PromiseLike<void>;
+}
+
+export interface WritableStreamDefaultControllerStartCallback {
+ (controller: WritableStreamDefaultController): void | PromiseLike<void>;
+}
+
+export interface WritableStreamDefaultControllerWriteCallback<W> {
+ (chunk: W, controller: WritableStreamDefaultController): void | PromiseLike<
+ void
+ >;
+}
+
+export interface WritableStreamDefaultController {
+ error(error?: any): void;
+}
+*/
+export interface QueuingStrategy<T = any> {
+ highWaterMark?: number;
+ size?: QueuingStrategySizeCallback<T>;
+}
+
+export interface QueuingStrategySizeCallback<T = any> {
+ (chunk: T): number;
+}
+
+export interface Headers extends DomIterable<string, string> {
+ /** Appends a new value onto an existing header inside a `Headers` object, or
+ * adds the header if it does not already exist.
+ */
+ append(name: string, value: string): void;
+ /** Deletes a header from a `Headers` object. */
+ delete(name: string): void;
+ /** Returns an iterator allowing to go through all key/value pairs
+ * contained in this Headers object. The both the key and value of each pairs
+ * are ByteString objects.
+ */
+ entries(): IterableIterator<[string, string]>;
+ /** Returns a `ByteString` sequence of all the values of a header within a
+ * `Headers` object with a given name.
+ */
+ get(name: string): string | null;
+ /** Returns a boolean stating whether a `Headers` object contains a certain
+ * header.
+ */
+ has(name: string): boolean;
+ /** Returns an iterator allowing to go through all keys contained in
+ * this Headers object. The keys are ByteString objects.
+ */
+ keys(): IterableIterator<string>;
+ /** Sets a new value for an existing header inside a Headers object, or adds
+ * the header if it does not already exist.
+ */
+ set(name: string, value: string): void;
+ /** Returns an iterator allowing to go through all values contained in
+ * this Headers object. The values are ByteString objects.
+ */
+ values(): IterableIterator<string>;
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ thisArg?: any
+ ): void;
+ /** The Symbol.iterator well-known symbol specifies the default
+ * iterator for this Headers object
+ */
+ [Symbol.iterator](): IterableIterator<[string, string]>;
+}
+
+export interface HeadersConstructor {
+ new (init?: HeadersInit): Headers;
+ prototype: Headers;
+}
+
+type RequestCache =
+ | "default"
+ | "no-store"
+ | "reload"
+ | "no-cache"
+ | "force-cache"
+ | "only-if-cached";
+type RequestCredentials = "omit" | "same-origin" | "include";
+type RequestDestination =
+ | ""
+ | "audio"
+ | "audioworklet"
+ | "document"
+ | "embed"
+ | "font"
+ | "image"
+ | "manifest"
+ | "object"
+ | "paintworklet"
+ | "report"
+ | "script"
+ | "sharedworker"
+ | "style"
+ | "track"
+ | "video"
+ | "worker"
+ | "xslt";
+type RequestMode = "navigate" | "same-origin" | "no-cors" | "cors";
+type RequestRedirect = "follow" | "nofollow" | "error" | "manual";
+export type ResponseType =
+ | "basic"
+ | "cors"
+ | "default"
+ | "error"
+ | "opaque"
+ | "opaqueredirect";
+
+export interface RequestInit {
+ body?: BodyInit | null;
+ cache?: RequestCache;
+ credentials?: RequestCredentials;
+ headers?: HeadersInit;
+ integrity?: string;
+ keepalive?: boolean;
+ method?: string;
+ mode?: RequestMode;
+ redirect?: RequestRedirect;
+ referrer?: string;
+ referrerPolicy?: ReferrerPolicy;
+ signal?: AbortSignal | null;
+ window?: any;
+}
+
+export interface ResponseInit {
+ headers?: HeadersInit;
+ status?: number;
+ statusText?: string;
+}
+
+export interface RequestConstructor {
+ new (input: RequestInfo, init?: RequestInit): Request;
+ prototype: Request;
+}
+
+export interface Request extends Body {
+ /** Returns the cache mode associated with request, which is a string
+ * indicating how the the request will interact with the browser's cache when
+ * fetching.
+ */
+ readonly cache?: RequestCache;
+ /** Returns the credentials mode associated with request, which is a string
+ * indicating whether credentials will be sent with the request always, never,
+ * or only when sent to a same-origin URL.
+ */
+ readonly credentials?: RequestCredentials;
+ /** Returns the kind of resource requested by request, (e.g., `document` or
+ * `script`).
+ */
+ readonly destination?: RequestDestination;
+ /** Returns a Headers object consisting of the headers associated with
+ * request.
+ *
+ * Note that headers added in the network layer by the user agent
+ * will not be accounted for in this object, (e.g., the `Host` header).
+ */
+ readonly headers: Headers;
+ /** Returns request's subresource integrity metadata, which is a cryptographic
+ * hash of the resource being fetched. Its value consists of multiple hashes
+ * separated by whitespace. [SRI]
+ */
+ readonly integrity?: string;
+ /** Returns a boolean indicating whether or not request is for a history
+ * navigation (a.k.a. back-forward navigation).
+ */
+ readonly isHistoryNavigation?: boolean;
+ /** Returns a boolean indicating whether or not request is for a reload
+ * navigation.
+ */
+ readonly isReloadNavigation?: boolean;
+ /** Returns a boolean indicating whether or not request can outlive the global
+ * in which it was created.
+ */
+ readonly keepalive?: boolean;
+ /** Returns request's HTTP method, which is `GET` by default. */
+ readonly method: string;
+ /** Returns the mode associated with request, which is a string indicating
+ * whether the request will use CORS, or will be restricted to same-origin
+ * URLs.
+ */
+ readonly mode?: RequestMode;
+ /** Returns the redirect mode associated with request, which is a string
+ * indicating how redirects for the request will be handled during fetching.
+ *
+ * A request will follow redirects by default.
+ */
+ readonly redirect?: RequestRedirect;
+ /** Returns the referrer of request. Its value can be a same-origin URL if
+ * explicitly set in init, the empty string to indicate no referrer, and
+ * `about:client` when defaulting to the global's default.
+ *
+ * This is used during fetching to determine the value of the `Referer`
+ * header of the request being made.
+ */
+ readonly referrer?: string;
+ /** Returns the referrer policy associated with request. This is used during
+ * fetching to compute the value of the request's referrer.
+ */
+ readonly referrerPolicy?: ReferrerPolicy;
+ /** Returns the signal associated with request, which is an AbortSignal object
+ * indicating whether or not request has been aborted, and its abort event
+ * handler.
+ */
+ readonly signal?: AbortSignal;
+ /** Returns the URL of request as a string. */
+ readonly url: string;
+ clone(): Request;
+}
+
+export interface Response extends Body {
+ /** Contains the `Headers` object associated with the response. */
+ readonly headers: Headers;
+ /** Contains a boolean stating whether the response was successful (status in
+ * the range 200-299) or not.
+ */
+ readonly ok: boolean;
+ /** Indicates whether or not the response is the result of a redirect; that
+ * is, its URL list has more than one entry.
+ */
+ readonly redirected: boolean;
+ /** Contains the status code of the response (e.g., `200` for a success). */
+ readonly status: number;
+ /** Contains the status message corresponding to the status code (e.g., `OK`
+ * for `200`).
+ */
+ readonly statusText: string;
+ readonly trailer: Promise<Headers>;
+ /** Contains the type of the response (e.g., `basic`, `cors`). */
+ readonly type: ResponseType;
+ /** Contains the URL of the response. */
+ readonly url: string;
+ /** Creates a clone of a `Response` object. */
+ clone(): Response;
+}
+
+export interface Location {
+ /**
+ * Returns a DOMStringList object listing the origins of the ancestor browsing
+ * contexts, from the parent browsing context to the top-level browsing
+ * context.
+ */
+ readonly ancestorOrigins: string[];
+ /**
+ * Returns the Location object's URL's fragment (includes leading "#" if
+ * non-empty).
+ * Can be set, to navigate to the same URL with a changed fragment (ignores
+ * leading "#").
+ */
+ hash: string;
+ /**
+ * Returns the Location object's URL's host and port (if different from the
+ * default port for the scheme). Can be set, to navigate to the same URL with
+ * a changed host and port.
+ */
+ host: string;
+ /**
+ * Returns the Location object's URL's host. Can be set, to navigate to the
+ * same URL with a changed host.
+ */
+ hostname: string;
+ /**
+ * Returns the Location object's URL. Can be set, to navigate to the given
+ * URL.
+ */
+ href: string;
+ /** Returns the Location object's URL's origin. */
+ readonly origin: string;
+ /**
+ * Returns the Location object's URL's path.
+ * Can be set, to navigate to the same URL with a changed path.
+ */
+ pathname: string;
+ /**
+ * Returns the Location object's URL's port.
+ * Can be set, to navigate to the same URL with a changed port.
+ */
+ port: string;
+ /**
+ * Returns the Location object's URL's scheme.
+ * Can be set, to navigate to the same URL with a changed scheme.
+ */
+ protocol: string;
+ /**
+ * Returns the Location object's URL's query (includes leading "?" if
+ * non-empty). Can be set, to navigate to the same URL with a changed query
+ * (ignores leading "?").
+ */
+ search: string;
+ /**
+ * Navigates to the given URL.
+ */
+ assign(url: string): void;
+ /**
+ * Reloads the current page.
+ */
+ reload(): void;
+ /** @deprecated */
+ reload(forcedReload: boolean): void;
+ /**
+ * Removes the current page from the session history and navigates to the
+ * given URL.
+ */
+ replace(url: string): void;
+}
diff --git a/cli/js/web/dom_util.ts b/cli/js/web/dom_util.ts
new file mode 100644
index 000000000..5780d9c52
--- /dev/null
+++ b/cli/js/web/dom_util.ts
@@ -0,0 +1,85 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+// Utility functions for DOM nodes
+import * as domTypes from "./dom_types.ts";
+
+export function isNode(nodeImpl: domTypes.EventTarget | null): boolean {
+ return Boolean(nodeImpl && "nodeType" in nodeImpl);
+}
+
+export function isShadowRoot(nodeImpl: domTypes.EventTarget | null): boolean {
+ return Boolean(
+ nodeImpl &&
+ nodeImpl[domTypes.eventTargetNodeType] ===
+ domTypes.NodeType.DOCUMENT_FRAGMENT_NODE &&
+ nodeImpl[domTypes.eventTargetHost] != null
+ );
+}
+
+export function isSlotable(nodeImpl: domTypes.EventTarget | null): boolean {
+ return Boolean(
+ nodeImpl &&
+ (nodeImpl[domTypes.eventTargetNodeType] ===
+ domTypes.NodeType.ELEMENT_NODE ||
+ nodeImpl[domTypes.eventTargetNodeType] === domTypes.NodeType.TEXT_NODE)
+ );
+}
+
+// https://dom.spec.whatwg.org/#node-trees
+// const domSymbolTree = Symbol("DOM Symbol Tree");
+
+// https://dom.spec.whatwg.org/#concept-shadow-including-inclusive-ancestor
+export function isShadowInclusiveAncestor(
+ ancestor: domTypes.EventTarget | null,
+ node: domTypes.EventTarget | null
+): boolean {
+ while (isNode(node)) {
+ if (node === ancestor) {
+ return true;
+ }
+
+ if (isShadowRoot(node)) {
+ node = node && node[domTypes.eventTargetHost];
+ } else {
+ node = null; // domSymbolTree.parent(node);
+ }
+ }
+
+ return false;
+}
+
+export function getRoot(
+ node: domTypes.EventTarget | null
+): domTypes.EventTarget | null {
+ const root = node;
+
+ // for (const ancestor of domSymbolTree.ancestorsIterator(node)) {
+ // root = ancestor;
+ // }
+
+ return root;
+}
+
+// https://dom.spec.whatwg.org/#retarget
+export function retarget(
+ a: domTypes.EventTarget | null,
+ b: domTypes.EventTarget
+): domTypes.EventTarget | null {
+ while (true) {
+ if (!isNode(a)) {
+ return a;
+ }
+
+ const aRoot = getRoot(a);
+
+ if (aRoot) {
+ if (
+ !isShadowRoot(aRoot) ||
+ (isNode(b) && isShadowInclusiveAncestor(aRoot, b))
+ ) {
+ return a;
+ }
+
+ a = aRoot[domTypes.eventTargetHost];
+ }
+ }
+}
diff --git a/cli/js/web/encode_utf8.ts b/cli/js/web/encode_utf8.ts
new file mode 100644
index 000000000..04e2560b7
--- /dev/null
+++ b/cli/js/web/encode_utf8.ts
@@ -0,0 +1,80 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+// The following code is based off:
+// https://github.com/samthor/fast-text-encoding
+//
+// Copyright 2017 Sam Thorogood. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+//
+
+export function encodeUtf8(input: string): Uint8Array {
+ let pos = 0;
+ const len = input.length;
+
+ let at = 0; // output position
+ let tlen = Math.max(32, len + (len >> 1) + 7); // 1.5x size
+ let target = new Uint8Array((tlen >> 3) << 3); // ... but at 8 byte offset
+
+ while (pos < len) {
+ let value = input.charCodeAt(pos++);
+ if (value >= 0xd800 && value <= 0xdbff) {
+ // high surrogate
+ if (pos < len) {
+ const extra = input.charCodeAt(pos);
+ if ((extra & 0xfc00) === 0xdc00) {
+ ++pos;
+ value = ((value & 0x3ff) << 10) + (extra & 0x3ff) + 0x10000;
+ }
+ }
+ if (value >= 0xd800 && value <= 0xdbff) {
+ continue; // drop lone surrogate
+ }
+ }
+
+ // expand the buffer if we couldn't write 4 bytes
+ if (at + 4 > target.length) {
+ tlen += 8; // minimum extra
+ tlen *= 1.0 + (pos / input.length) * 2; // take 2x the remaining
+ tlen = (tlen >> 3) << 3; // 8 byte offset
+
+ const update = new Uint8Array(tlen);
+ update.set(target);
+ target = update;
+ }
+
+ if ((value & 0xffffff80) === 0) {
+ // 1-byte
+ target[at++] = value; // ASCII
+ continue;
+ } else if ((value & 0xfffff800) === 0) {
+ // 2-byte
+ target[at++] = ((value >> 6) & 0x1f) | 0xc0;
+ } else if ((value & 0xffff0000) === 0) {
+ // 3-byte
+ target[at++] = ((value >> 12) & 0x0f) | 0xe0;
+ target[at++] = ((value >> 6) & 0x3f) | 0x80;
+ } else if ((value & 0xffe00000) === 0) {
+ // 4-byte
+ target[at++] = ((value >> 18) & 0x07) | 0xf0;
+ target[at++] = ((value >> 12) & 0x3f) | 0x80;
+ target[at++] = ((value >> 6) & 0x3f) | 0x80;
+ } else {
+ // FIXME: do we care
+ continue;
+ }
+
+ target[at++] = (value & 0x3f) | 0x80;
+ }
+
+ return target.slice(0, at);
+}
diff --git a/cli/js/web/event.ts b/cli/js/web/event.ts
new file mode 100644
index 000000000..e365fb6b2
--- /dev/null
+++ b/cli/js/web/event.ts
@@ -0,0 +1,348 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import { getPrivateValue, requiredArguments } from "../util.ts";
+
+// WeakMaps are recommended for private attributes (see MDN link below)
+// https://developer.mozilla.org/en-US/docs/Archive/Add-ons/Add-on_SDK/Guides/Contributor_s_Guide/Private_Properties#Using_WeakMaps
+export const eventAttributes = new WeakMap();
+
+function isTrusted(this: Event): boolean {
+ return getPrivateValue(this, eventAttributes, "isTrusted");
+}
+
+export class Event implements domTypes.Event {
+ // The default value is `false`.
+ // Use `defineProperty` to define on each instance, NOT on the prototype.
+ isTrusted!: boolean;
+ // Each event has the following associated flags
+ private _canceledFlag = false;
+ private _dispatchedFlag = false;
+ private _initializedFlag = false;
+ private _inPassiveListenerFlag = false;
+ private _stopImmediatePropagationFlag = false;
+ private _stopPropagationFlag = false;
+
+ // Property for objects on which listeners will be invoked
+ private _path: domTypes.EventPath[] = [];
+
+ constructor(type: string, eventInitDict: domTypes.EventInit = {}) {
+ requiredArguments("Event", arguments.length, 1);
+ type = String(type);
+ this._initializedFlag = true;
+ eventAttributes.set(this, {
+ type,
+ bubbles: eventInitDict.bubbles || false,
+ cancelable: eventInitDict.cancelable || false,
+ composed: eventInitDict.composed || false,
+ currentTarget: null,
+ eventPhase: domTypes.EventPhase.NONE,
+ isTrusted: false,
+ relatedTarget: null,
+ target: null,
+ timeStamp: Date.now()
+ });
+ Reflect.defineProperty(this, "isTrusted", {
+ enumerable: true,
+ get: isTrusted
+ });
+ }
+
+ get bubbles(): boolean {
+ return getPrivateValue(this, eventAttributes, "bubbles");
+ }
+
+ get cancelBubble(): boolean {
+ return this._stopPropagationFlag;
+ }
+
+ set cancelBubble(value: boolean) {
+ this._stopPropagationFlag = value;
+ }
+
+ get cancelBubbleImmediately(): boolean {
+ return this._stopImmediatePropagationFlag;
+ }
+
+ set cancelBubbleImmediately(value: boolean) {
+ this._stopImmediatePropagationFlag = value;
+ }
+
+ get cancelable(): boolean {
+ return getPrivateValue(this, eventAttributes, "cancelable");
+ }
+
+ get composed(): boolean {
+ return getPrivateValue(this, eventAttributes, "composed");
+ }
+
+ get currentTarget(): domTypes.EventTarget {
+ return getPrivateValue(this, eventAttributes, "currentTarget");
+ }
+
+ set currentTarget(value: domTypes.EventTarget) {
+ eventAttributes.set(this, {
+ type: this.type,
+ bubbles: this.bubbles,
+ cancelable: this.cancelable,
+ composed: this.composed,
+ currentTarget: value,
+ eventPhase: this.eventPhase,
+ isTrusted: this.isTrusted,
+ relatedTarget: this.relatedTarget,
+ target: this.target,
+ timeStamp: this.timeStamp
+ });
+ }
+
+ get defaultPrevented(): boolean {
+ return this._canceledFlag;
+ }
+
+ get dispatched(): boolean {
+ return this._dispatchedFlag;
+ }
+
+ set dispatched(value: boolean) {
+ this._dispatchedFlag = value;
+ }
+
+ get eventPhase(): number {
+ return getPrivateValue(this, eventAttributes, "eventPhase");
+ }
+
+ set eventPhase(value: number) {
+ eventAttributes.set(this, {
+ type: this.type,
+ bubbles: this.bubbles,
+ cancelable: this.cancelable,
+ composed: this.composed,
+ currentTarget: this.currentTarget,
+ eventPhase: value,
+ isTrusted: this.isTrusted,
+ relatedTarget: this.relatedTarget,
+ target: this.target,
+ timeStamp: this.timeStamp
+ });
+ }
+
+ get initialized(): boolean {
+ return this._initializedFlag;
+ }
+
+ set inPassiveListener(value: boolean) {
+ this._inPassiveListenerFlag = value;
+ }
+
+ get path(): domTypes.EventPath[] {
+ return this._path;
+ }
+
+ set path(value: domTypes.EventPath[]) {
+ this._path = value;
+ }
+
+ get relatedTarget(): domTypes.EventTarget {
+ return getPrivateValue(this, eventAttributes, "relatedTarget");
+ }
+
+ set relatedTarget(value: domTypes.EventTarget) {
+ eventAttributes.set(this, {
+ type: this.type,
+ bubbles: this.bubbles,
+ cancelable: this.cancelable,
+ composed: this.composed,
+ currentTarget: this.currentTarget,
+ eventPhase: this.eventPhase,
+ isTrusted: this.isTrusted,
+ relatedTarget: value,
+ target: this.target,
+ timeStamp: this.timeStamp
+ });
+ }
+
+ get target(): domTypes.EventTarget {
+ return getPrivateValue(this, eventAttributes, "target");
+ }
+
+ set target(value: domTypes.EventTarget) {
+ eventAttributes.set(this, {
+ type: this.type,
+ bubbles: this.bubbles,
+ cancelable: this.cancelable,
+ composed: this.composed,
+ currentTarget: this.currentTarget,
+ eventPhase: this.eventPhase,
+ isTrusted: this.isTrusted,
+ relatedTarget: this.relatedTarget,
+ target: value,
+ timeStamp: this.timeStamp
+ });
+ }
+
+ get timeStamp(): Date {
+ return getPrivateValue(this, eventAttributes, "timeStamp");
+ }
+
+ get type(): string {
+ return getPrivateValue(this, eventAttributes, "type");
+ }
+
+ /** Returns the event’s path (objects on which listeners will be
+ * invoked). This does not include nodes in shadow trees if the
+ * shadow root was created with its ShadowRoot.mode closed.
+ *
+ * event.composedPath();
+ */
+ composedPath(): domTypes.EventPath[] {
+ if (this._path.length === 0) {
+ return [];
+ }
+
+ const composedPath: domTypes.EventPath[] = [
+ {
+ item: this.currentTarget,
+ itemInShadowTree: false,
+ relatedTarget: null,
+ rootOfClosedTree: false,
+ slotInClosedTree: false,
+ target: null,
+ touchTargetList: []
+ }
+ ];
+
+ let currentTargetIndex = 0;
+ let currentTargetHiddenSubtreeLevel = 0;
+
+ for (let index = this._path.length - 1; index >= 0; index--) {
+ const { item, rootOfClosedTree, slotInClosedTree } = this._path[index];
+
+ if (rootOfClosedTree) {
+ currentTargetHiddenSubtreeLevel++;
+ }
+
+ if (item === this.currentTarget) {
+ currentTargetIndex = index;
+ break;
+ }
+
+ if (slotInClosedTree) {
+ currentTargetHiddenSubtreeLevel--;
+ }
+ }
+
+ let currentHiddenLevel = currentTargetHiddenSubtreeLevel;
+ let maxHiddenLevel = currentTargetHiddenSubtreeLevel;
+
+ for (let i = currentTargetIndex - 1; i >= 0; i--) {
+ const { item, rootOfClosedTree, slotInClosedTree } = this._path[i];
+
+ if (rootOfClosedTree) {
+ currentHiddenLevel++;
+ }
+
+ if (currentHiddenLevel <= maxHiddenLevel) {
+ composedPath.unshift({
+ item,
+ itemInShadowTree: false,
+ relatedTarget: null,
+ rootOfClosedTree: false,
+ slotInClosedTree: false,
+ target: null,
+ touchTargetList: []
+ });
+ }
+
+ if (slotInClosedTree) {
+ currentHiddenLevel--;
+
+ if (currentHiddenLevel < maxHiddenLevel) {
+ maxHiddenLevel = currentHiddenLevel;
+ }
+ }
+ }
+
+ currentHiddenLevel = currentTargetHiddenSubtreeLevel;
+ maxHiddenLevel = currentTargetHiddenSubtreeLevel;
+
+ for (
+ let index = currentTargetIndex + 1;
+ index < this._path.length;
+ index++
+ ) {
+ const { item, rootOfClosedTree, slotInClosedTree } = this._path[index];
+
+ if (slotInClosedTree) {
+ currentHiddenLevel++;
+ }
+
+ if (currentHiddenLevel <= maxHiddenLevel) {
+ composedPath.push({
+ item,
+ itemInShadowTree: false,
+ relatedTarget: null,
+ rootOfClosedTree: false,
+ slotInClosedTree: false,
+ target: null,
+ touchTargetList: []
+ });
+ }
+
+ if (rootOfClosedTree) {
+ currentHiddenLevel--;
+
+ if (currentHiddenLevel < maxHiddenLevel) {
+ maxHiddenLevel = currentHiddenLevel;
+ }
+ }
+ }
+
+ return composedPath;
+ }
+
+ /** Cancels the event (if it is cancelable).
+ * See https://dom.spec.whatwg.org/#set-the-canceled-flag
+ *
+ * event.preventDefault();
+ */
+ preventDefault(): void {
+ if (this.cancelable && !this._inPassiveListenerFlag) {
+ this._canceledFlag = true;
+ }
+ }
+
+ /** Stops the propagation of events further along in the DOM.
+ *
+ * event.stopPropagation();
+ */
+ stopPropagation(): void {
+ this._stopPropagationFlag = true;
+ }
+
+ /** For this particular event, no other listener will be called.
+ * Neither those attached on the same element, nor those attached
+ * on elements which will be traversed later (in capture phase,
+ * for instance).
+ *
+ * event.stopImmediatePropagation();
+ */
+ stopImmediatePropagation(): void {
+ this._stopPropagationFlag = true;
+ this._stopImmediatePropagationFlag = true;
+ }
+}
+
+/** Built-in objects providing `get` methods for our
+ * interceptable JavaScript operations.
+ */
+Reflect.defineProperty(Event.prototype, "bubbles", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "cancelable", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "composed", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "currentTarget", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "defaultPrevented", {
+ enumerable: true
+});
+Reflect.defineProperty(Event.prototype, "dispatched", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "eventPhase", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "target", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "timeStamp", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "type", { enumerable: true });
diff --git a/cli/js/web/event_target.ts b/cli/js/web/event_target.ts
new file mode 100644
index 000000000..09e80a731
--- /dev/null
+++ b/cli/js/web/event_target.ts
@@ -0,0 +1,500 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import { hasOwnProperty, requiredArguments } from "../util.ts";
+import {
+ getRoot,
+ isNode,
+ isShadowRoot,
+ isShadowInclusiveAncestor,
+ isSlotable,
+ retarget
+} from "./dom_util.ts";
+
+// https://dom.spec.whatwg.org/#get-the-parent
+// Note: Nodes, shadow roots, and documents override this algorithm so we set it to null.
+function getEventTargetParent(
+ _eventTarget: domTypes.EventTarget,
+ _event: domTypes.Event
+): null {
+ return null;
+}
+
+export const eventTargetAssignedSlot: unique symbol = Symbol();
+export const eventTargetHasActivationBehavior: unique symbol = Symbol();
+
+export class EventTarget implements domTypes.EventTarget {
+ public [domTypes.eventTargetHost]: domTypes.EventTarget | null = null;
+ public [domTypes.eventTargetListeners]: {
+ [type in string]: domTypes.EventTargetListener[];
+ } = {};
+ public [domTypes.eventTargetMode] = "";
+ public [domTypes.eventTargetNodeType]: domTypes.NodeType =
+ domTypes.NodeType.DOCUMENT_FRAGMENT_NODE;
+ private [eventTargetAssignedSlot] = false;
+ private [eventTargetHasActivationBehavior] = false;
+
+ public addEventListener(
+ type: string,
+ callback: domTypes.EventListenerOrEventListenerObject | null,
+ options?: domTypes.AddEventListenerOptions | boolean
+ ): void {
+ const this_ = this || globalThis;
+
+ requiredArguments("EventTarget.addEventListener", arguments.length, 2);
+ const normalizedOptions: domTypes.AddEventListenerOptions = eventTargetHelpers.normalizeAddEventHandlerOptions(
+ options
+ );
+
+ if (callback === null) {
+ return;
+ }
+
+ const listeners = this_[domTypes.eventTargetListeners];
+
+ if (!hasOwnProperty(listeners, type)) {
+ listeners[type] = [];
+ }
+
+ for (let i = 0; i < listeners[type].length; ++i) {
+ const listener = listeners[type][i];
+ if (
+ ((typeof listener.options === "boolean" &&
+ listener.options === normalizedOptions.capture) ||
+ (typeof listener.options === "object" &&
+ listener.options.capture === normalizedOptions.capture)) &&
+ listener.callback === callback
+ ) {
+ return;
+ }
+ }
+
+ listeners[type].push({
+ callback,
+ options: normalizedOptions
+ });
+ }
+
+ public removeEventListener(
+ type: string,
+ callback: domTypes.EventListenerOrEventListenerObject | null,
+ options?: domTypes.EventListenerOptions | boolean
+ ): void {
+ const this_ = this || globalThis;
+
+ requiredArguments("EventTarget.removeEventListener", arguments.length, 2);
+ const listeners = this_[domTypes.eventTargetListeners];
+ if (hasOwnProperty(listeners, type) && callback !== null) {
+ listeners[type] = listeners[type].filter(
+ (listener): boolean => listener.callback !== callback
+ );
+ }
+
+ const normalizedOptions: domTypes.EventListenerOptions = eventTargetHelpers.normalizeEventHandlerOptions(
+ options
+ );
+
+ if (callback === null) {
+ // Optimization, not in the spec.
+ return;
+ }
+
+ if (!listeners[type]) {
+ return;
+ }
+
+ for (let i = 0; i < listeners[type].length; ++i) {
+ const listener = listeners[type][i];
+
+ if (
+ ((typeof listener.options === "boolean" &&
+ listener.options === normalizedOptions.capture) ||
+ (typeof listener.options === "object" &&
+ listener.options.capture === normalizedOptions.capture)) &&
+ listener.callback === callback
+ ) {
+ listeners[type].splice(i, 1);
+ break;
+ }
+ }
+ }
+
+ public dispatchEvent(event: domTypes.Event): boolean {
+ const this_ = this || globalThis;
+
+ requiredArguments("EventTarget.dispatchEvent", arguments.length, 1);
+ const listeners = this_[domTypes.eventTargetListeners];
+ if (!hasOwnProperty(listeners, event.type)) {
+ return true;
+ }
+
+ if (event.dispatched || !event.initialized) {
+ // TODO(bartlomieju): very likely that different error
+ // should be thrown here (DOMException?)
+ throw new TypeError("Tried to dispatch an uninitialized event");
+ }
+
+ if (event.eventPhase !== domTypes.EventPhase.NONE) {
+ // TODO(bartlomieju): very likely that different error
+ // should be thrown here (DOMException?)
+ throw new TypeError("Tried to dispatch a dispatching event");
+ }
+
+ return eventTargetHelpers.dispatch(this_, event);
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "EventTarget";
+ }
+}
+
+const eventTargetHelpers = {
+ // https://dom.spec.whatwg.org/#concept-event-dispatch
+ dispatch(
+ targetImpl: EventTarget,
+ eventImpl: domTypes.Event,
+ targetOverride?: domTypes.EventTarget
+ ): boolean {
+ let clearTargets = false;
+ let activationTarget = null;
+
+ eventImpl.dispatched = true;
+
+ targetOverride = targetOverride || targetImpl;
+ let relatedTarget = retarget(eventImpl.relatedTarget, targetImpl);
+
+ if (
+ targetImpl !== relatedTarget ||
+ targetImpl === eventImpl.relatedTarget
+ ) {
+ const touchTargets: domTypes.EventTarget[] = [];
+
+ eventTargetHelpers.appendToEventPath(
+ eventImpl,
+ targetImpl,
+ targetOverride,
+ relatedTarget,
+ touchTargets,
+ false
+ );
+
+ const isActivationEvent = eventImpl.type === "click";
+
+ if (isActivationEvent && targetImpl[eventTargetHasActivationBehavior]) {
+ activationTarget = targetImpl;
+ }
+
+ let slotInClosedTree = false;
+ let slotable =
+ isSlotable(targetImpl) && targetImpl[eventTargetAssignedSlot]
+ ? targetImpl
+ : null;
+ let parent = getEventTargetParent(targetImpl, eventImpl);
+
+ // Populate event path
+ // https://dom.spec.whatwg.org/#event-path
+ while (parent !== null) {
+ if (slotable !== null) {
+ slotable = null;
+
+ const parentRoot = getRoot(parent);
+ if (
+ isShadowRoot(parentRoot) &&
+ parentRoot &&
+ parentRoot[domTypes.eventTargetMode] === "closed"
+ ) {
+ slotInClosedTree = true;
+ }
+ }
+
+ relatedTarget = retarget(eventImpl.relatedTarget, parent);
+
+ if (
+ isNode(parent) &&
+ isShadowInclusiveAncestor(getRoot(targetImpl), parent)
+ ) {
+ eventTargetHelpers.appendToEventPath(
+ eventImpl,
+ parent,
+ null,
+ relatedTarget,
+ touchTargets,
+ slotInClosedTree
+ );
+ } else if (parent === relatedTarget) {
+ parent = null;
+ } else {
+ targetImpl = parent;
+
+ if (
+ isActivationEvent &&
+ activationTarget === null &&
+ targetImpl[eventTargetHasActivationBehavior]
+ ) {
+ activationTarget = targetImpl;
+ }
+
+ eventTargetHelpers.appendToEventPath(
+ eventImpl,
+ parent,
+ targetImpl,
+ relatedTarget,
+ touchTargets,
+ slotInClosedTree
+ );
+ }
+
+ if (parent !== null) {
+ parent = getEventTargetParent(parent, eventImpl);
+ }
+
+ slotInClosedTree = false;
+ }
+
+ let clearTargetsTupleIndex = -1;
+ for (
+ let i = eventImpl.path.length - 1;
+ i >= 0 && clearTargetsTupleIndex === -1;
+ i--
+ ) {
+ if (eventImpl.path[i].target !== null) {
+ clearTargetsTupleIndex = i;
+ }
+ }
+ const clearTargetsTuple = eventImpl.path[clearTargetsTupleIndex];
+
+ clearTargets =
+ (isNode(clearTargetsTuple.target) &&
+ isShadowRoot(getRoot(clearTargetsTuple.target))) ||
+ (isNode(clearTargetsTuple.relatedTarget) &&
+ isShadowRoot(getRoot(clearTargetsTuple.relatedTarget)));
+
+ eventImpl.eventPhase = domTypes.EventPhase.CAPTURING_PHASE;
+
+ for (let i = eventImpl.path.length - 1; i >= 0; --i) {
+ const tuple = eventImpl.path[i];
+
+ if (tuple.target === null) {
+ eventTargetHelpers.invokeEventListeners(targetImpl, tuple, eventImpl);
+ }
+ }
+
+ for (let i = 0; i < eventImpl.path.length; i++) {
+ const tuple = eventImpl.path[i];
+
+ if (tuple.target !== null) {
+ eventImpl.eventPhase = domTypes.EventPhase.AT_TARGET;
+ } else {
+ eventImpl.eventPhase = domTypes.EventPhase.BUBBLING_PHASE;
+ }
+
+ if (
+ (eventImpl.eventPhase === domTypes.EventPhase.BUBBLING_PHASE &&
+ eventImpl.bubbles) ||
+ eventImpl.eventPhase === domTypes.EventPhase.AT_TARGET
+ ) {
+ eventTargetHelpers.invokeEventListeners(targetImpl, tuple, eventImpl);
+ }
+ }
+ }
+
+ eventImpl.eventPhase = domTypes.EventPhase.NONE;
+
+ eventImpl.currentTarget = null;
+ eventImpl.path = [];
+ eventImpl.dispatched = false;
+ eventImpl.cancelBubble = false;
+ eventImpl.cancelBubbleImmediately = false;
+
+ if (clearTargets) {
+ eventImpl.target = null;
+ eventImpl.relatedTarget = null;
+ }
+
+ // TODO: invoke activation targets if HTML nodes will be implemented
+ // if (activationTarget !== null) {
+ // if (!eventImpl.defaultPrevented) {
+ // activationTarget._activationBehavior();
+ // }
+ // }
+
+ return !eventImpl.defaultPrevented;
+ },
+
+ // https://dom.spec.whatwg.org/#concept-event-listener-invoke
+ invokeEventListeners(
+ targetImpl: EventTarget,
+ tuple: domTypes.EventPath,
+ eventImpl: domTypes.Event
+ ): void {
+ const tupleIndex = eventImpl.path.indexOf(tuple);
+ for (let i = tupleIndex; i >= 0; i--) {
+ const t = eventImpl.path[i];
+ if (t.target) {
+ eventImpl.target = t.target;
+ break;
+ }
+ }
+
+ eventImpl.relatedTarget = tuple.relatedTarget;
+
+ if (eventImpl.cancelBubble) {
+ return;
+ }
+
+ eventImpl.currentTarget = tuple.item;
+
+ eventTargetHelpers.innerInvokeEventListeners(
+ targetImpl,
+ eventImpl,
+ tuple.item[domTypes.eventTargetListeners]
+ );
+ },
+
+ // https://dom.spec.whatwg.org/#concept-event-listener-inner-invoke
+ innerInvokeEventListeners(
+ targetImpl: EventTarget,
+ eventImpl: domTypes.Event,
+ targetListeners: { [type in string]: domTypes.EventTargetListener[] }
+ ): boolean {
+ let found = false;
+
+ const { type } = eventImpl;
+
+ if (!targetListeners || !targetListeners[type]) {
+ return found;
+ }
+
+ // Copy event listeners before iterating since the list can be modified during the iteration.
+ const handlers = targetListeners[type].slice();
+
+ for (let i = 0; i < handlers.length; i++) {
+ const listener = handlers[i];
+
+ let capture, once, passive;
+ if (typeof listener.options === "boolean") {
+ capture = listener.options;
+ once = false;
+ passive = false;
+ } else {
+ capture = listener.options.capture;
+ once = listener.options.once;
+ passive = listener.options.passive;
+ }
+
+ // Check if the event listener has been removed since the listeners has been cloned.
+ if (!targetListeners[type].includes(listener)) {
+ continue;
+ }
+
+ found = true;
+
+ if (
+ (eventImpl.eventPhase === domTypes.EventPhase.CAPTURING_PHASE &&
+ !capture) ||
+ (eventImpl.eventPhase === domTypes.EventPhase.BUBBLING_PHASE && capture)
+ ) {
+ continue;
+ }
+
+ if (once) {
+ targetListeners[type].splice(
+ targetListeners[type].indexOf(listener),
+ 1
+ );
+ }
+
+ if (passive) {
+ eventImpl.inPassiveListener = true;
+ }
+
+ try {
+ if (typeof listener.callback === "object") {
+ if (typeof listener.callback.handleEvent === "function") {
+ listener.callback.handleEvent(eventImpl);
+ }
+ } else {
+ listener.callback.call(eventImpl.currentTarget, eventImpl);
+ }
+ } catch (error) {
+ // TODO(bartlomieju): very likely that different error
+ // should be thrown here (DOMException?)
+ throw new Error(error.message);
+ }
+
+ eventImpl.inPassiveListener = false;
+
+ if (eventImpl.cancelBubbleImmediately) {
+ return found;
+ }
+ }
+
+ return found;
+ },
+
+ normalizeAddEventHandlerOptions(
+ options: boolean | domTypes.AddEventListenerOptions | undefined
+ ): domTypes.AddEventListenerOptions {
+ if (typeof options === "boolean" || typeof options === "undefined") {
+ const returnValue: domTypes.AddEventListenerOptions = {
+ capture: Boolean(options),
+ once: false,
+ passive: false
+ };
+
+ return returnValue;
+ } else {
+ return options;
+ }
+ },
+
+ normalizeEventHandlerOptions(
+ options: boolean | domTypes.EventListenerOptions | undefined
+ ): domTypes.EventListenerOptions {
+ if (typeof options === "boolean" || typeof options === "undefined") {
+ const returnValue: domTypes.EventListenerOptions = {
+ capture: Boolean(options)
+ };
+
+ return returnValue;
+ } else {
+ return options;
+ }
+ },
+
+ // https://dom.spec.whatwg.org/#concept-event-path-append
+ appendToEventPath(
+ eventImpl: domTypes.Event,
+ target: domTypes.EventTarget,
+ targetOverride: domTypes.EventTarget | null,
+ relatedTarget: domTypes.EventTarget | null,
+ touchTargets: domTypes.EventTarget[],
+ slotInClosedTree: boolean
+ ): void {
+ const itemInShadowTree = isNode(target) && isShadowRoot(getRoot(target));
+ const rootOfClosedTree =
+ isShadowRoot(target) && target[domTypes.eventTargetMode] === "closed";
+
+ eventImpl.path.push({
+ item: target,
+ itemInShadowTree,
+ target: targetOverride,
+ relatedTarget,
+ touchTargetList: touchTargets,
+ rootOfClosedTree,
+ slotInClosedTree
+ });
+ }
+};
+
+/** Built-in objects providing `get` methods for our
+ * interceptable JavaScript operations.
+ */
+Reflect.defineProperty(EventTarget.prototype, "addEventListener", {
+ enumerable: true
+});
+Reflect.defineProperty(EventTarget.prototype, "removeEventListener", {
+ enumerable: true
+});
+Reflect.defineProperty(EventTarget.prototype, "dispatchEvent", {
+ enumerable: true
+});
diff --git a/cli/js/web/fetch.ts b/cli/js/web/fetch.ts
new file mode 100644
index 000000000..17cd43129
--- /dev/null
+++ b/cli/js/web/fetch.ts
@@ -0,0 +1,584 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import {
+ assert,
+ createResolvable,
+ notImplemented,
+ isTypedArray
+} from "../util.ts";
+import * as domTypes from "./dom_types.ts";
+import { TextDecoder, TextEncoder } from "./text_encoding.ts";
+import { DenoBlob, bytesSymbol as blobBytesSymbol } from "./blob.ts";
+import { Headers } from "./headers.ts";
+import * as io from "../io.ts";
+import { read, close } from "../files.ts";
+import { Buffer } from "../buffer.ts";
+import { FormData } from "./form_data.ts";
+import { URL } from "./url.ts";
+import { URLSearchParams } from "./url_search_params.ts";
+import { sendAsync } from "../dispatch_json.ts";
+
+function getHeaderValueParams(value: string): Map<string, string> {
+ const params = new Map();
+ // Forced to do so for some Map constructor param mismatch
+ value
+ .split(";")
+ .slice(1)
+ .map((s): string[] => s.trim().split("="))
+ .filter((arr): boolean => arr.length > 1)
+ .map(([k, v]): [string, string] => [k, v.replace(/^"([^"]*)"$/, "$1")])
+ .forEach(([k, v]): Map<string, string> => params.set(k, v));
+ return params;
+}
+
+function hasHeaderValueOf(s: string, value: string): boolean {
+ return new RegExp(`^${value}[\t\s]*;?`).test(s);
+}
+
+class Body implements domTypes.Body, domTypes.ReadableStream, io.ReadCloser {
+ private _bodyUsed = false;
+ private _bodyPromise: null | Promise<ArrayBuffer> = null;
+ private _data: ArrayBuffer | null = null;
+ readonly locked: boolean = false; // TODO
+ readonly body: null | Body = this;
+
+ constructor(private rid: number, readonly contentType: string) {}
+
+ private async _bodyBuffer(): Promise<ArrayBuffer> {
+ assert(this._bodyPromise == null);
+ const buf = new Buffer();
+ try {
+ const nread = await buf.readFrom(this);
+ const ui8 = buf.bytes();
+ assert(ui8.byteLength === nread);
+ this._data = ui8.buffer.slice(
+ ui8.byteOffset,
+ ui8.byteOffset + nread
+ ) as ArrayBuffer;
+ assert(this._data.byteLength === nread);
+ } finally {
+ this.close();
+ }
+
+ return this._data;
+ }
+
+ async arrayBuffer(): Promise<ArrayBuffer> {
+ // If we've already bufferred the response, just return it.
+ if (this._data != null) {
+ return this._data;
+ }
+
+ // If there is no _bodyPromise yet, start it.
+ if (this._bodyPromise == null) {
+ this._bodyPromise = this._bodyBuffer();
+ }
+
+ return this._bodyPromise;
+ }
+
+ async blob(): Promise<domTypes.Blob> {
+ const arrayBuffer = await this.arrayBuffer();
+ return new DenoBlob([arrayBuffer], {
+ type: this.contentType
+ });
+ }
+
+ // ref: https://fetch.spec.whatwg.org/#body-mixin
+ async formData(): Promise<domTypes.FormData> {
+ const formData = new FormData();
+ const enc = new TextEncoder();
+ if (hasHeaderValueOf(this.contentType, "multipart/form-data")) {
+ const params = getHeaderValueParams(this.contentType);
+ if (!params.has("boundary")) {
+ // TypeError is required by spec
+ throw new TypeError("multipart/form-data must provide a boundary");
+ }
+ // ref: https://tools.ietf.org/html/rfc2046#section-5.1
+ const boundary = params.get("boundary")!;
+ const dashBoundary = `--${boundary}`;
+ const delimiter = `\r\n${dashBoundary}`;
+ const closeDelimiter = `${delimiter}--`;
+
+ const body = await this.text();
+ let bodyParts: string[];
+ const bodyEpilogueSplit = body.split(closeDelimiter);
+ if (bodyEpilogueSplit.length < 2) {
+ bodyParts = [];
+ } else {
+ // discard epilogue
+ const bodyEpilogueTrimmed = bodyEpilogueSplit[0];
+ // first boundary treated special due to optional prefixed \r\n
+ const firstBoundaryIndex = bodyEpilogueTrimmed.indexOf(dashBoundary);
+ if (firstBoundaryIndex < 0) {
+ throw new TypeError("Invalid boundary");
+ }
+ const bodyPreambleTrimmed = bodyEpilogueTrimmed
+ .slice(firstBoundaryIndex + dashBoundary.length)
+ .replace(/^[\s\r\n\t]+/, ""); // remove transport-padding CRLF
+ // trimStart might not be available
+ // Be careful! body-part allows trailing \r\n!
+ // (as long as it is not part of `delimiter`)
+ bodyParts = bodyPreambleTrimmed
+ .split(delimiter)
+ .map((s): string => s.replace(/^[\s\r\n\t]+/, ""));
+ // TODO: LWSP definition is actually trickier,
+ // but should be fine in our case since without headers
+ // we should just discard the part
+ }
+ for (const bodyPart of bodyParts) {
+ const headers = new Headers();
+ const headerOctetSeperatorIndex = bodyPart.indexOf("\r\n\r\n");
+ if (headerOctetSeperatorIndex < 0) {
+ continue; // Skip unknown part
+ }
+ const headerText = bodyPart.slice(0, headerOctetSeperatorIndex);
+ const octets = bodyPart.slice(headerOctetSeperatorIndex + 4);
+
+ // TODO: use textproto.readMIMEHeader from deno_std
+ const rawHeaders = headerText.split("\r\n");
+ for (const rawHeader of rawHeaders) {
+ const sepIndex = rawHeader.indexOf(":");
+ if (sepIndex < 0) {
+ continue; // Skip this header
+ }
+ const key = rawHeader.slice(0, sepIndex);
+ const value = rawHeader.slice(sepIndex + 1);
+ headers.set(key, value);
+ }
+ if (!headers.has("content-disposition")) {
+ continue; // Skip unknown part
+ }
+ // Content-Transfer-Encoding Deprecated
+ const contentDisposition = headers.get("content-disposition")!;
+ const partContentType = headers.get("content-type") || "text/plain";
+ // TODO: custom charset encoding (needs TextEncoder support)
+ // const contentTypeCharset =
+ // getHeaderValueParams(partContentType).get("charset") || "";
+ if (!hasHeaderValueOf(contentDisposition, "form-data")) {
+ continue; // Skip, might not be form-data
+ }
+ const dispositionParams = getHeaderValueParams(contentDisposition);
+ if (!dispositionParams.has("name")) {
+ continue; // Skip, unknown name
+ }
+ const dispositionName = dispositionParams.get("name")!;
+ if (dispositionParams.has("filename")) {
+ const filename = dispositionParams.get("filename")!;
+ const blob = new DenoBlob([enc.encode(octets)], {
+ type: partContentType
+ });
+ // TODO: based on spec
+ // https://xhr.spec.whatwg.org/#dom-formdata-append
+ // https://xhr.spec.whatwg.org/#create-an-entry
+ // Currently it does not mention how I could pass content-type
+ // to the internally created file object...
+ formData.append(dispositionName, blob, filename);
+ } else {
+ formData.append(dispositionName, octets);
+ }
+ }
+ return formData;
+ } else if (
+ hasHeaderValueOf(this.contentType, "application/x-www-form-urlencoded")
+ ) {
+ // From https://github.com/github/fetch/blob/master/fetch.js
+ // Copyright (c) 2014-2016 GitHub, Inc. MIT License
+ const body = await this.text();
+ try {
+ body
+ .trim()
+ .split("&")
+ .forEach((bytes): void => {
+ if (bytes) {
+ const split = bytes.split("=");
+ const name = split.shift()!.replace(/\+/g, " ");
+ const value = split.join("=").replace(/\+/g, " ");
+ formData.append(
+ decodeURIComponent(name),
+ decodeURIComponent(value)
+ );
+ }
+ });
+ } catch (e) {
+ throw new TypeError("Invalid form urlencoded format");
+ }
+ return formData;
+ } else {
+ throw new TypeError("Invalid form data");
+ }
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ async json(): Promise<any> {
+ const text = await this.text();
+ return JSON.parse(text);
+ }
+
+ async text(): Promise<string> {
+ const ab = await this.arrayBuffer();
+ const decoder = new TextDecoder("utf-8");
+ return decoder.decode(ab);
+ }
+
+ read(p: Uint8Array): Promise<number | io.EOF> {
+ this._bodyUsed = true;
+ return read(this.rid, p);
+ }
+
+ close(): void {
+ close(this.rid);
+ }
+
+ async cancel(): Promise<void> {
+ return notImplemented();
+ }
+
+ getReader(): domTypes.ReadableStreamReader {
+ return notImplemented();
+ }
+
+ tee(): [domTypes.ReadableStream, domTypes.ReadableStream] {
+ return notImplemented();
+ }
+
+ [Symbol.asyncIterator](): AsyncIterableIterator<Uint8Array> {
+ return io.toAsyncIterator(this);
+ }
+
+ get bodyUsed(): boolean {
+ return this._bodyUsed;
+ }
+}
+
+export class Response implements domTypes.Response {
+ readonly type: domTypes.ResponseType;
+ readonly redirected: boolean;
+ headers: domTypes.Headers;
+ readonly trailer: Promise<domTypes.Headers>;
+ readonly body: null | Body;
+
+ constructor(
+ readonly url: string,
+ readonly status: number,
+ readonly statusText: string,
+ headersList: Array<[string, string]>,
+ rid: number,
+ redirected_: boolean,
+ readonly type_: null | domTypes.ResponseType = "default",
+ body_: null | Body = null
+ ) {
+ this.trailer = createResolvable();
+ this.headers = new Headers(headersList);
+ const contentType = this.headers.get("content-type") || "";
+
+ if (body_ == null) {
+ this.body = new Body(rid, contentType);
+ } else {
+ this.body = body_;
+ }
+
+ if (type_ == null) {
+ this.type = "default";
+ } else {
+ this.type = type_;
+ if (type_ == "error") {
+ // spec: https://fetch.spec.whatwg.org/#concept-network-error
+ this.status = 0;
+ this.statusText = "";
+ this.headers = new Headers();
+ this.body = null;
+ /* spec for other Response types:
+ https://fetch.spec.whatwg.org/#concept-filtered-response-basic
+ Please note that type "basic" is not the same thing as "default".*/
+ } else if (type_ == "basic") {
+ for (const h of this.headers) {
+ /* Forbidden Response-Header Names:
+ https://fetch.spec.whatwg.org/#forbidden-response-header-name */
+ if (["set-cookie", "set-cookie2"].includes(h[0].toLowerCase())) {
+ this.headers.delete(h[0]);
+ }
+ }
+ } else if (type_ == "cors") {
+ /* CORS-safelisted Response-Header Names:
+ https://fetch.spec.whatwg.org/#cors-safelisted-response-header-name */
+ const allowedHeaders = [
+ "Cache-Control",
+ "Content-Language",
+ "Content-Length",
+ "Content-Type",
+ "Expires",
+ "Last-Modified",
+ "Pragma"
+ ].map((c: string) => c.toLowerCase());
+ for (const h of this.headers) {
+ /* Technically this is still not standards compliant because we are
+ supposed to allow headers allowed in the
+ 'Access-Control-Expose-Headers' header in the 'internal response'
+ However, this implementation of response doesn't seem to have an
+ easy way to access the internal response, so we ignore that
+ header.
+ TODO(serverhiccups): change how internal responses are handled
+ so we can do this properly. */
+ if (!allowedHeaders.includes(h[0].toLowerCase())) {
+ this.headers.delete(h[0]);
+ }
+ }
+ /* TODO(serverhiccups): Once I fix the 'internal response' thing,
+ these actually need to treat the internal response differently */
+ } else if (type_ == "opaque" || type_ == "opaqueredirect") {
+ this.url = "";
+ this.status = 0;
+ this.statusText = "";
+ this.headers = new Headers();
+ this.body = null;
+ }
+ }
+
+ this.redirected = redirected_;
+ }
+
+ private bodyViewable(): boolean {
+ if (
+ this.type == "error" ||
+ this.type == "opaque" ||
+ this.type == "opaqueredirect" ||
+ this.body == undefined
+ )
+ return true;
+ return false;
+ }
+
+ async arrayBuffer(): Promise<ArrayBuffer> {
+ /* You have to do the null check here and not in the function because
+ * otherwise TS complains about this.body potentially being null */
+ if (this.bodyViewable() || this.body == null) {
+ return Promise.reject(new Error("Response body is null"));
+ }
+ return this.body.arrayBuffer();
+ }
+
+ async blob(): Promise<domTypes.Blob> {
+ if (this.bodyViewable() || this.body == null) {
+ return Promise.reject(new Error("Response body is null"));
+ }
+ return this.body.blob();
+ }
+
+ async formData(): Promise<domTypes.FormData> {
+ if (this.bodyViewable() || this.body == null) {
+ return Promise.reject(new Error("Response body is null"));
+ }
+ return this.body.formData();
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ async json(): Promise<any> {
+ if (this.bodyViewable() || this.body == null) {
+ return Promise.reject(new Error("Response body is null"));
+ }
+ return this.body.json();
+ }
+
+ async text(): Promise<string> {
+ if (this.bodyViewable() || this.body == null) {
+ return Promise.reject(new Error("Response body is null"));
+ }
+ return this.body.text();
+ }
+
+ get ok(): boolean {
+ return 200 <= this.status && this.status < 300;
+ }
+
+ get bodyUsed(): boolean {
+ if (this.body === null) return false;
+ return this.body.bodyUsed;
+ }
+
+ clone(): domTypes.Response {
+ if (this.bodyUsed) {
+ throw new TypeError(
+ "Failed to execute 'clone' on 'Response': Response body is already used"
+ );
+ }
+
+ const iterators = this.headers.entries();
+ const headersList: Array<[string, string]> = [];
+ for (const header of iterators) {
+ headersList.push(header);
+ }
+
+ return new Response(
+ this.url,
+ this.status,
+ this.statusText,
+ headersList,
+ -1,
+ this.redirected,
+ this.type,
+ this.body
+ );
+ }
+
+ redirect(url: URL | string, status: number): domTypes.Response {
+ if (![301, 302, 303, 307, 308].includes(status)) {
+ throw new RangeError(
+ "The redirection status must be one of 301, 302, 303, 307 and 308."
+ );
+ }
+ return new Response(
+ "",
+ status,
+ "",
+ [["Location", typeof url === "string" ? url : url.toString()]],
+ -1,
+ false,
+ "default",
+ null
+ );
+ }
+}
+
+interface FetchResponse {
+ bodyRid: number;
+ status: number;
+ statusText: string;
+ headers: Array<[string, string]>;
+}
+
+async function sendFetchReq(
+ url: string,
+ method: string | null,
+ headers: domTypes.Headers | null,
+ body: ArrayBufferView | undefined
+): Promise<FetchResponse> {
+ let headerArray: Array<[string, string]> = [];
+ if (headers) {
+ headerArray = Array.from(headers.entries());
+ }
+
+ let zeroCopy = undefined;
+ if (body) {
+ zeroCopy = new Uint8Array(body.buffer, body.byteOffset, body.byteLength);
+ }
+
+ const args = {
+ method,
+ url,
+ headers: headerArray
+ };
+
+ return (await sendAsync("op_fetch", args, zeroCopy)) as FetchResponse;
+}
+
+/** Fetch a resource from the network. */
+export async function fetch(
+ input: domTypes.Request | URL | string,
+ init?: domTypes.RequestInit
+): Promise<Response> {
+ let url: string;
+ let method: string | null = null;
+ let headers: domTypes.Headers | null = null;
+ let body: ArrayBufferView | undefined;
+ let redirected = false;
+ let remRedirectCount = 20; // TODO: use a better way to handle
+
+ if (typeof input === "string" || input instanceof URL) {
+ url = typeof input === "string" ? (input as string) : (input as URL).href;
+ if (init != null) {
+ method = init.method || null;
+ if (init.headers) {
+ headers =
+ init.headers instanceof Headers
+ ? init.headers
+ : new Headers(init.headers);
+ } else {
+ headers = null;
+ }
+
+ // ref: https://fetch.spec.whatwg.org/#body-mixin
+ // Body should have been a mixin
+ // but we are treating it as a separate class
+ if (init.body) {
+ if (!headers) {
+ headers = new Headers();
+ }
+ let contentType = "";
+ if (typeof init.body === "string") {
+ body = new TextEncoder().encode(init.body);
+ contentType = "text/plain;charset=UTF-8";
+ } else if (isTypedArray(init.body)) {
+ body = init.body;
+ } else if (init.body instanceof URLSearchParams) {
+ body = new TextEncoder().encode(init.body.toString());
+ contentType = "application/x-www-form-urlencoded;charset=UTF-8";
+ } else if (init.body instanceof DenoBlob) {
+ body = init.body[blobBytesSymbol];
+ contentType = init.body.type;
+ } else {
+ // TODO: FormData, ReadableStream
+ notImplemented();
+ }
+ if (contentType && !headers.has("content-type")) {
+ headers.set("content-type", contentType);
+ }
+ }
+ }
+ } else {
+ url = input.url;
+ method = input.method;
+ headers = input.headers;
+
+ //@ts-ignore
+ if (input._bodySource) {
+ body = new DataView(await input.arrayBuffer());
+ }
+ }
+
+ while (remRedirectCount) {
+ const fetchResponse = await sendFetchReq(url, method, headers, body);
+
+ const response = new Response(
+ url,
+ fetchResponse.status,
+ fetchResponse.statusText,
+ fetchResponse.headers,
+ fetchResponse.bodyRid,
+ redirected
+ );
+ if ([301, 302, 303, 307, 308].includes(response.status)) {
+ // We're in a redirect status
+ switch ((init && init.redirect) || "follow") {
+ case "error":
+ /* I suspect that deno will probably crash if you try to use that
+ rid, which suggests to me that Response needs to be refactored */
+ return new Response("", 0, "", [], -1, false, "error", null);
+ case "manual":
+ return new Response("", 0, "", [], -1, false, "opaqueredirect", null);
+ case "follow":
+ default:
+ let redirectUrl = response.headers.get("Location");
+ if (redirectUrl == null) {
+ return response; // Unspecified
+ }
+ if (
+ !redirectUrl.startsWith("http://") &&
+ !redirectUrl.startsWith("https://")
+ ) {
+ redirectUrl =
+ url.split("//")[0] +
+ "//" +
+ url.split("//")[1].split("/")[0] +
+ redirectUrl; // TODO: handle relative redirection more gracefully
+ }
+ url = redirectUrl;
+ redirected = true;
+ remRedirectCount--;
+ }
+ } else {
+ return response;
+ }
+ }
+ // Return a network error due to too many redirections
+ throw notImplemented();
+}
diff --git a/cli/js/web/form_data.ts b/cli/js/web/form_data.ts
new file mode 100644
index 000000000..9c0590c32
--- /dev/null
+++ b/cli/js/web/form_data.ts
@@ -0,0 +1,149 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import * as blob from "./blob.ts";
+import * as domFile from "./dom_file.ts";
+import { DomIterableMixin } from "./dom_iterable.ts";
+import { requiredArguments } from "../util.ts";
+
+const dataSymbol = Symbol("data");
+
+class FormDataBase {
+ private [dataSymbol]: Array<[string, domTypes.FormDataEntryValue]> = [];
+
+ /** Appends a new value onto an existing key inside a `FormData`
+ * object, or adds the key if it does not already exist.
+ *
+ * formData.append('name', 'first');
+ * formData.append('name', 'second');
+ */
+ append(name: string, value: string): void;
+ append(name: string, value: blob.DenoBlob, filename?: string): void;
+ append(name: string, value: string | blob.DenoBlob, filename?: string): void {
+ requiredArguments("FormData.append", arguments.length, 2);
+ name = String(name);
+ if (value instanceof blob.DenoBlob) {
+ const dfile = new domFile.DomFileImpl([value], filename || name);
+ this[dataSymbol].push([name, dfile]);
+ } else {
+ this[dataSymbol].push([name, String(value)]);
+ }
+ }
+
+ /** Deletes a key/value pair from a `FormData` object.
+ *
+ * formData.delete('name');
+ */
+ delete(name: string): void {
+ requiredArguments("FormData.delete", arguments.length, 1);
+ name = String(name);
+ let i = 0;
+ while (i < this[dataSymbol].length) {
+ if (this[dataSymbol][i][0] === name) {
+ this[dataSymbol].splice(i, 1);
+ } else {
+ i++;
+ }
+ }
+ }
+
+ /** Returns an array of all the values associated with a given key
+ * from within a `FormData`.
+ *
+ * formData.getAll('name');
+ */
+ getAll(name: string): domTypes.FormDataEntryValue[] {
+ requiredArguments("FormData.getAll", arguments.length, 1);
+ name = String(name);
+ const values = [];
+ for (const entry of this[dataSymbol]) {
+ if (entry[0] === name) {
+ values.push(entry[1]);
+ }
+ }
+
+ return values;
+ }
+
+ /** Returns the first value associated with a given key from within a
+ * `FormData` object.
+ *
+ * formData.get('name');
+ */
+ get(name: string): domTypes.FormDataEntryValue | null {
+ requiredArguments("FormData.get", arguments.length, 1);
+ name = String(name);
+ for (const entry of this[dataSymbol]) {
+ if (entry[0] === name) {
+ return entry[1];
+ }
+ }
+
+ return null;
+ }
+
+ /** Returns a boolean stating whether a `FormData` object contains a
+ * certain key/value pair.
+ *
+ * formData.has('name');
+ */
+ has(name: string): boolean {
+ requiredArguments("FormData.has", arguments.length, 1);
+ name = String(name);
+ return this[dataSymbol].some((entry): boolean => entry[0] === name);
+ }
+
+ /** Sets a new value for an existing key inside a `FormData` object, or
+ * adds the key/value if it does not already exist.
+ * ref: https://xhr.spec.whatwg.org/#dom-formdata-set
+ *
+ * formData.set('name', 'value');
+ */
+ set(name: string, value: string): void;
+ set(name: string, value: blob.DenoBlob, filename?: string): void;
+ set(name: string, value: string | blob.DenoBlob, filename?: string): void {
+ requiredArguments("FormData.set", arguments.length, 2);
+ name = String(name);
+
+ // If there are any entries in the context object’s entry list whose name
+ // is name, replace the first such entry with entry and remove the others
+ let found = false;
+ let i = 0;
+ while (i < this[dataSymbol].length) {
+ if (this[dataSymbol][i][0] === name) {
+ if (!found) {
+ if (value instanceof blob.DenoBlob) {
+ const dfile = new domFile.DomFileImpl([value], filename || name);
+ this[dataSymbol][i][1] = dfile;
+ } else {
+ this[dataSymbol][i][1] = String(value);
+ }
+ found = true;
+ } else {
+ this[dataSymbol].splice(i, 1);
+ continue;
+ }
+ }
+ i++;
+ }
+
+ // Otherwise, append entry to the context object’s entry list.
+ if (!found) {
+ if (value instanceof blob.DenoBlob) {
+ const dfile = new domFile.DomFileImpl([value], filename || name);
+ this[dataSymbol].push([name, dfile]);
+ } else {
+ this[dataSymbol].push([name, String(value)]);
+ }
+ }
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "FormData";
+ }
+}
+
+export class FormData extends DomIterableMixin<
+ string,
+ domTypes.FormDataEntryValue,
+ typeof FormDataBase
+>(FormDataBase, dataSymbol) {}
diff --git a/cli/js/web/headers.ts b/cli/js/web/headers.ts
new file mode 100644
index 000000000..65d52cacd
--- /dev/null
+++ b/cli/js/web/headers.ts
@@ -0,0 +1,152 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import { DomIterableMixin } from "./dom_iterable.ts";
+import { requiredArguments } from "../util.ts";
+import { customInspect } from "../console.ts";
+
+// From node-fetch
+// Copyright (c) 2016 David Frank. MIT License.
+const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
+const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function isHeaders(value: any): value is domTypes.Headers {
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ return value instanceof Headers;
+}
+
+const headerMap = Symbol("header map");
+
+// ref: https://fetch.spec.whatwg.org/#dom-headers
+class HeadersBase {
+ private [headerMap]: Map<string, string>;
+ // TODO: headerGuard? Investigate if it is needed
+ // node-fetch did not implement this but it is in the spec
+
+ private _normalizeParams(name: string, value?: string): string[] {
+ name = String(name).toLowerCase();
+ value = String(value).trim();
+ return [name, value];
+ }
+
+ // The following name/value validations are copied from
+ // https://github.com/bitinn/node-fetch/blob/master/src/headers.js
+ // Copyright (c) 2016 David Frank. MIT License.
+ private _validateName(name: string): void {
+ if (invalidTokenRegex.test(name) || name === "") {
+ throw new TypeError(`${name} is not a legal HTTP header name`);
+ }
+ }
+
+ private _validateValue(value: string): void {
+ if (invalidHeaderCharRegex.test(value)) {
+ throw new TypeError(`${value} is not a legal HTTP header value`);
+ }
+ }
+
+ constructor(init?: domTypes.HeadersInit) {
+ if (init === null) {
+ throw new TypeError(
+ "Failed to construct 'Headers'; The provided value was not valid"
+ );
+ } else if (isHeaders(init)) {
+ this[headerMap] = new Map(init);
+ } else {
+ this[headerMap] = new Map();
+ if (Array.isArray(init)) {
+ for (const tuple of init) {
+ // If header does not contain exactly two items,
+ // then throw a TypeError.
+ // ref: https://fetch.spec.whatwg.org/#concept-headers-fill
+ requiredArguments(
+ "Headers.constructor tuple array argument",
+ tuple.length,
+ 2
+ );
+
+ const [name, value] = this._normalizeParams(tuple[0], tuple[1]);
+ this._validateName(name);
+ this._validateValue(value);
+ const existingValue = this[headerMap].get(name);
+ this[headerMap].set(
+ name,
+ existingValue ? `${existingValue}, ${value}` : value
+ );
+ }
+ } else if (init) {
+ const names = Object.keys(init);
+ for (const rawName of names) {
+ const rawValue = init[rawName];
+ const [name, value] = this._normalizeParams(rawName, rawValue);
+ this._validateName(name);
+ this._validateValue(value);
+ this[headerMap].set(name, value);
+ }
+ }
+ }
+ }
+
+ [customInspect](): string {
+ let headerSize = this[headerMap].size;
+ let output = "";
+ this[headerMap].forEach((value, key) => {
+ const prefix = headerSize === this[headerMap].size ? " " : "";
+ const postfix = headerSize === 1 ? " " : ", ";
+ output = output + `${prefix}${key}: ${value}${postfix}`;
+ headerSize--;
+ });
+ return `Headers {${output}}`;
+ }
+
+ // ref: https://fetch.spec.whatwg.org/#concept-headers-append
+ append(name: string, value: string): void {
+ requiredArguments("Headers.append", arguments.length, 2);
+ const [newname, newvalue] = this._normalizeParams(name, value);
+ this._validateName(newname);
+ this._validateValue(newvalue);
+ const v = this[headerMap].get(newname);
+ const str = v ? `${v}, ${newvalue}` : newvalue;
+ this[headerMap].set(newname, str);
+ }
+
+ delete(name: string): void {
+ requiredArguments("Headers.delete", arguments.length, 1);
+ const [newname] = this._normalizeParams(name);
+ this._validateName(newname);
+ this[headerMap].delete(newname);
+ }
+
+ get(name: string): string | null {
+ requiredArguments("Headers.get", arguments.length, 1);
+ const [newname] = this._normalizeParams(name);
+ this._validateName(newname);
+ const value = this[headerMap].get(newname);
+ return value || null;
+ }
+
+ has(name: string): boolean {
+ requiredArguments("Headers.has", arguments.length, 1);
+ const [newname] = this._normalizeParams(name);
+ this._validateName(newname);
+ return this[headerMap].has(newname);
+ }
+
+ set(name: string, value: string): void {
+ requiredArguments("Headers.set", arguments.length, 2);
+ const [newname, newvalue] = this._normalizeParams(name, value);
+ this._validateName(newname);
+ this._validateValue(newvalue);
+ this[headerMap].set(newname, newvalue);
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "Headers";
+ }
+}
+
+// @internal
+export class Headers extends DomIterableMixin<
+ string,
+ string,
+ typeof HeadersBase
+>(HeadersBase, headerMap) {}
diff --git a/cli/js/web/location.ts b/cli/js/web/location.ts
new file mode 100644
index 000000000..d48cce3c7
--- /dev/null
+++ b/cli/js/web/location.ts
@@ -0,0 +1,51 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import { URL } from "./url.ts";
+import { notImplemented } from "../util.ts";
+import { Location } from "./dom_types.ts";
+
+export class LocationImpl implements Location {
+ constructor(url: string) {
+ const u = new URL(url);
+ this.url = u;
+ this.hash = u.hash;
+ this.host = u.host;
+ this.href = u.href;
+ this.hostname = u.hostname;
+ this.origin = u.protocol + "//" + u.host;
+ this.pathname = u.pathname;
+ this.protocol = u.protocol;
+ this.port = u.port;
+ this.search = u.search;
+ }
+
+ private url: URL;
+
+ toString(): string {
+ return this.url.toString();
+ }
+
+ readonly ancestorOrigins: string[] = [];
+ hash: string;
+ host: string;
+ hostname: string;
+ href: string;
+ readonly origin: string;
+ pathname: string;
+ port: string;
+ protocol: string;
+ search: string;
+ assign(_url: string): void {
+ throw notImplemented();
+ }
+ reload(): void {
+ throw notImplemented();
+ }
+ replace(_url: string): void {
+ throw notImplemented();
+ }
+}
+
+export function setLocation(url: string): void {
+ globalThis.location = new LocationImpl(url);
+ Object.freeze(globalThis.location);
+}
diff --git a/cli/js/web/request.ts b/cli/js/web/request.ts
new file mode 100644
index 000000000..1416a95d6
--- /dev/null
+++ b/cli/js/web/request.ts
@@ -0,0 +1,159 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as headers from "./headers.ts";
+import * as body from "./body.ts";
+import * as domTypes from "./dom_types.ts";
+import * as streams from "./streams/mod.ts";
+
+const { Headers } = headers;
+const { ReadableStream } = streams;
+
+function byteUpperCase(s: string): string {
+ return String(s).replace(/[a-z]/g, function byteUpperCaseReplace(c): string {
+ return c.toUpperCase();
+ });
+}
+
+function normalizeMethod(m: string): string {
+ const u = byteUpperCase(m);
+ if (
+ u === "DELETE" ||
+ u === "GET" ||
+ u === "HEAD" ||
+ u === "OPTIONS" ||
+ u === "POST" ||
+ u === "PUT"
+ ) {
+ return u;
+ }
+ return m;
+}
+
+/**
+ * An HTTP request
+ * @param {Blob|String} [body]
+ * @param {Object} [init]
+ */
+export class Request extends body.Body implements domTypes.Request {
+ public method: string;
+ public url: string;
+ public credentials?: "omit" | "same-origin" | "include";
+ public headers: domTypes.Headers;
+
+ constructor(input: domTypes.RequestInfo, init?: domTypes.RequestInit) {
+ if (arguments.length < 1) {
+ throw TypeError("Not enough arguments");
+ }
+
+ if (!init) {
+ init = {};
+ }
+
+ let b: body.BodySource;
+
+ // prefer body from init
+ if (init.body) {
+ b = init.body;
+ } else if (input instanceof Request && input._bodySource) {
+ if (input.bodyUsed) {
+ throw TypeError(body.BodyUsedError);
+ }
+ b = input._bodySource;
+ } else if (typeof input === "object" && "body" in input && input.body) {
+ if (input.bodyUsed) {
+ throw TypeError(body.BodyUsedError);
+ }
+ b = input.body;
+ } else {
+ b = "";
+ }
+
+ let headers: domTypes.Headers;
+
+ // prefer headers from init
+ if (init.headers) {
+ headers = new Headers(init.headers);
+ } else if (input instanceof Request) {
+ headers = input.headers;
+ } else {
+ headers = new Headers();
+ }
+
+ const contentType = headers.get("content-type") || "";
+ super(b, contentType);
+ this.headers = headers;
+
+ // readonly attribute ByteString method;
+ /**
+ * The HTTP request method
+ * @readonly
+ * @default GET
+ * @type {string}
+ */
+ this.method = "GET";
+
+ // readonly attribute USVString url;
+ /**
+ * The request URL
+ * @readonly
+ * @type {string}
+ */
+ this.url = "";
+
+ // readonly attribute RequestCredentials credentials;
+ this.credentials = "omit";
+
+ if (input instanceof Request) {
+ if (input.bodyUsed) {
+ throw TypeError(body.BodyUsedError);
+ }
+ this.method = input.method;
+ this.url = input.url;
+ this.headers = new Headers(input.headers);
+ this.credentials = input.credentials;
+ this._stream = input._stream;
+ } else if (typeof input === "string") {
+ this.url = input;
+ }
+
+ if (init && "method" in init) {
+ this.method = normalizeMethod(init.method as string);
+ }
+
+ if (
+ init &&
+ "credentials" in init &&
+ init.credentials &&
+ ["omit", "same-origin", "include"].indexOf(init.credentials) !== -1
+ ) {
+ this.credentials = init.credentials;
+ }
+ }
+
+ public clone(): domTypes.Request {
+ if (this.bodyUsed) {
+ throw TypeError(body.BodyUsedError);
+ }
+
+ const iterators = this.headers.entries();
+ const headersList: Array<[string, string]> = [];
+ for (const header of iterators) {
+ headersList.push(header);
+ }
+
+ let body2 = this._bodySource;
+
+ if (this._bodySource instanceof ReadableStream) {
+ const tees = (this._bodySource as domTypes.ReadableStream).tee();
+ this._stream = this._bodySource = tees[0];
+ body2 = tees[1];
+ }
+
+ const cloned = new Request(this.url, {
+ body: body2,
+ method: this.method,
+ headers: new Headers(headersList),
+ credentials: this.credentials
+ });
+ return cloned;
+ }
+}
diff --git a/cli/js/web/streams/mod.ts b/cli/js/web/streams/mod.ts
new file mode 100644
index 000000000..5389aaf6d
--- /dev/null
+++ b/cli/js/web/streams/mod.ts
@@ -0,0 +1,20 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * @stardazed/streams - implementation of the web streams standard
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+export { SDReadableStream as ReadableStream } from "./readable-stream.ts";
+/* TODO The following are currently unused so not exported for clarity.
+export { WritableStream } from "./writable-stream.ts";
+
+export { TransformStream } from "./transform-stream.ts";
+export {
+ ByteLengthQueuingStrategy,
+ CountQueuingStrategy
+} from "./strategies.ts";
+*/
diff --git a/cli/js/web/streams/pipe-to.ts b/cli/js/web/streams/pipe-to.ts
new file mode 100644
index 000000000..1d5579217
--- /dev/null
+++ b/cli/js/web/streams/pipe-to.ts
@@ -0,0 +1,237 @@
+// TODO reenable this code when we enable writableStreams and transport types
+// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+// /**
+// * streams/pipe-to - pipeTo algorithm implementation
+// * Part of Stardazed
+// * (c) 2018-Present by Arthur Langereis - @zenmumbler
+// * https://github.com/stardazed/sd-streams
+// */
+
+// /* eslint-disable @typescript-eslint/no-explicit-any */
+// // TODO reenable this lint here
+
+// import * as rs from "./readable-internals.ts";
+// import * as ws from "./writable-internals.ts";
+// import * as shared from "./shared-internals.ts";
+
+// import { ReadableStreamDefaultReader } from "./readable-stream-default-reader.ts";
+// import { WritableStreamDefaultWriter } from "./writable-stream-default-writer.ts";
+// import { PipeOptions } from "../dom_types.ts";
+// import { Err } from "../errors.ts";
+
+// // add a wrapper to handle falsy rejections
+// interface ErrorWrapper {
+// actualError: shared.ErrorResult;
+// }
+
+// export function pipeTo<ChunkType>(
+// source: rs.SDReadableStream<ChunkType>,
+// dest: ws.WritableStream<ChunkType>,
+// options: PipeOptions
+// ): Promise<void> {
+// const preventClose = !!options.preventClose;
+// const preventAbort = !!options.preventAbort;
+// const preventCancel = !!options.preventCancel;
+// const signal = options.signal;
+
+// let shuttingDown = false;
+// let latestWrite = Promise.resolve();
+// const promise = shared.createControlledPromise<void>();
+
+// // If IsReadableByteStreamController(this.[[readableStreamController]]) is true, let reader be either ! AcquireReadableStreamBYOBReader(this) or ! AcquireReadableStreamDefaultReader(this), at the user agent’s discretion.
+// // Otherwise, let reader be ! AcquireReadableStreamDefaultReader(this).
+// const reader = new ReadableStreamDefaultReader(source);
+// const writer = new WritableStreamDefaultWriter(dest);
+
+// let abortAlgorithm: () => any;
+// if (signal !== undefined) {
+// abortAlgorithm = (): void => {
+// // TODO this should be a DOMException,
+// // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/pipe-to.ts#L38
+// const error = new errors.Aborted("Aborted");
+// const actions: Array<() => Promise<void>> = [];
+// if (preventAbort === false) {
+// actions.push(() => {
+// if (dest[shared.state_] === "writable") {
+// return ws.writableStreamAbort(dest, error);
+// }
+// return Promise.resolve();
+// });
+// }
+// if (preventCancel === false) {
+// actions.push(() => {
+// if (source[shared.state_] === "readable") {
+// return rs.readableStreamCancel(source, error);
+// }
+// return Promise.resolve();
+// });
+// }
+// shutDown(
+// () => {
+// return Promise.all(actions.map(a => a())).then(_ => undefined);
+// },
+// { actualError: error }
+// );
+// };
+
+// if (signal.aborted === true) {
+// abortAlgorithm();
+// } else {
+// signal.addEventListener("abort", abortAlgorithm);
+// }
+// }
+
+// function onStreamErrored(
+// stream: rs.SDReadableStream<ChunkType> | ws.WritableStream<ChunkType>,
+// promise: Promise<void>,
+// action: (error: shared.ErrorResult) => void
+// ): void {
+// if (stream[shared.state_] === "errored") {
+// action(stream[shared.storedError_]);
+// } else {
+// promise.catch(action);
+// }
+// }
+
+// function onStreamClosed(
+// stream: rs.SDReadableStream<ChunkType> | ws.WritableStream<ChunkType>,
+// promise: Promise<void>,
+// action: () => void
+// ): void {
+// if (stream[shared.state_] === "closed") {
+// action();
+// } else {
+// promise.then(action);
+// }
+// }
+
+// onStreamErrored(source, reader[rs.closedPromise_].promise, error => {
+// if (!preventAbort) {
+// shutDown(() => ws.writableStreamAbort(dest, error), {
+// actualError: error
+// });
+// } else {
+// shutDown(undefined, { actualError: error });
+// }
+// });
+
+// onStreamErrored(dest, writer[ws.closedPromise_].promise, error => {
+// if (!preventCancel) {
+// shutDown(() => rs.readableStreamCancel(source, error), {
+// actualError: error
+// });
+// } else {
+// shutDown(undefined, { actualError: error });
+// }
+// });
+
+// onStreamClosed(source, reader[rs.closedPromise_].promise, () => {
+// if (!preventClose) {
+// shutDown(() =>
+// ws.writableStreamDefaultWriterCloseWithErrorPropagation(writer)
+// );
+// } else {
+// shutDown();
+// }
+// });
+
+// if (
+// ws.writableStreamCloseQueuedOrInFlight(dest) ||
+// dest[shared.state_] === "closed"
+// ) {
+// // Assert: no chunks have been read or written.
+// const destClosed = new TypeError();
+// if (!preventCancel) {
+// shutDown(() => rs.readableStreamCancel(source, destClosed), {
+// actualError: destClosed
+// });
+// } else {
+// shutDown(undefined, { actualError: destClosed });
+// }
+// }
+
+// function awaitLatestWrite(): Promise<void> {
+// const curLatestWrite = latestWrite;
+// return latestWrite.then(() =>
+// curLatestWrite === latestWrite ? undefined : awaitLatestWrite()
+// );
+// }
+
+// function flushRemainder(): Promise<void> | undefined {
+// if (
+// dest[shared.state_] === "writable" &&
+// !ws.writableStreamCloseQueuedOrInFlight(dest)
+// ) {
+// return awaitLatestWrite();
+// } else {
+// return undefined;
+// }
+// }
+
+// function shutDown(action?: () => Promise<void>, error?: ErrorWrapper): void {
+// if (shuttingDown) {
+// return;
+// }
+// shuttingDown = true;
+
+// if (action === undefined) {
+// action = (): Promise<void> => Promise.resolve();
+// }
+
+// function finishShutDown(): void {
+// action!().then(
+// _ => finalize(error),
+// newError => finalize({ actualError: newError })
+// );
+// }
+
+// const flushWait = flushRemainder();
+// if (flushWait) {
+// flushWait.then(finishShutDown);
+// } else {
+// finishShutDown();
+// }
+// }
+
+// function finalize(error?: ErrorWrapper): void {
+// ws.writableStreamDefaultWriterRelease(writer);
+// rs.readableStreamReaderGenericRelease(reader);
+// if (signal && abortAlgorithm) {
+// signal.removeEventListener("abort", abortAlgorithm);
+// }
+// if (error) {
+// promise.reject(error.actualError);
+// } else {
+// promise.resolve(undefined);
+// }
+// }
+
+// function next(): Promise<void> | undefined {
+// if (shuttingDown) {
+// return;
+// }
+
+// writer[ws.readyPromise_].promise.then(() => {
+// rs.readableStreamDefaultReaderRead(reader).then(
+// ({ value, done }) => {
+// if (done) {
+// return;
+// }
+// latestWrite = ws
+// .writableStreamDefaultWriterWrite(writer, value!)
+// .catch(() => {});
+// next();
+// },
+// _error => {
+// latestWrite = Promise.resolve();
+// }
+// );
+// });
+// }
+
+// next();
+
+// return promise.promise;
+// }
diff --git a/cli/js/web/streams/queue-mixin.ts b/cli/js/web/streams/queue-mixin.ts
new file mode 100644
index 000000000..23c57d75f
--- /dev/null
+++ b/cli/js/web/streams/queue-mixin.ts
@@ -0,0 +1,84 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/queue-mixin - internal queue operations for stream controllers
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+// TODO reenable this lint here
+
+import { Queue, QueueImpl } from "./queue.ts";
+import { isFiniteNonNegativeNumber } from "./shared-internals.ts";
+
+export const queue_ = Symbol("queue_");
+export const queueTotalSize_ = Symbol("queueTotalSize_");
+
+export interface QueueElement<V> {
+ value: V;
+ size: number;
+}
+
+export interface QueueContainer<V> {
+ [queue_]: Queue<QueueElement<V>>;
+ [queueTotalSize_]: number;
+}
+
+export interface ByteQueueContainer {
+ [queue_]: Queue<{
+ buffer: ArrayBufferLike;
+ byteOffset: number;
+ byteLength: number;
+ }>;
+ [queueTotalSize_]: number;
+}
+
+export function dequeueValue<V>(container: QueueContainer<V>): V {
+ // Assert: container has[[queue]] and[[queueTotalSize]] internal slots.
+ // Assert: container.[[queue]] is not empty.
+ const pair = container[queue_].shift()!;
+ const newTotalSize = container[queueTotalSize_] - pair.size;
+ container[queueTotalSize_] = Math.max(0, newTotalSize); // < 0 can occur due to rounding errors.
+ return pair.value;
+}
+
+export function enqueueValueWithSize<V>(
+ container: QueueContainer<V>,
+ value: V,
+ size: number
+): void {
+ // Assert: container has[[queue]] and[[queueTotalSize]] internal slots.
+ if (!isFiniteNonNegativeNumber(size)) {
+ throw new RangeError("Chunk size must be a non-negative, finite numbers");
+ }
+ container[queue_].push({ value, size });
+ container[queueTotalSize_] += size;
+}
+
+export function peekQueueValue<V>(container: QueueContainer<V>): V {
+ // Assert: container has[[queue]] and[[queueTotalSize]] internal slots.
+ // Assert: container.[[queue]] is not empty.
+ return container[queue_].front()!.value;
+}
+
+export function resetQueue<V>(
+ container: ByteQueueContainer | QueueContainer<V>
+): void {
+ // Chrome (as of v67) has a steep performance cliff with large arrays
+ // and shift(), around about 50k elements. While this is an unusual case
+ // we use a simple wrapper around shift and push that is chunked to
+ // avoid this pitfall.
+ // @see: https://github.com/stardazed/sd-streams/issues/1
+ container[queue_] = new QueueImpl<any>();
+
+ // The code below can be used as a plain array implementation of the
+ // Queue interface.
+ // const q = [] as any;
+ // q.front = function() { return this[0]; };
+ // container[queue_] = q;
+
+ container[queueTotalSize_] = 0;
+}
diff --git a/cli/js/web/streams/queue.ts b/cli/js/web/streams/queue.ts
new file mode 100644
index 000000000..264851baf
--- /dev/null
+++ b/cli/js/web/streams/queue.ts
@@ -0,0 +1,65 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/queue - simple queue type with chunked array backing
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+const CHUNK_SIZE = 16384;
+
+export interface Queue<T> {
+ push(t: T): void;
+ shift(): T | undefined;
+ front(): T | undefined;
+ readonly length: number;
+}
+
+export class QueueImpl<T> implements Queue<T> {
+ private readonly chunks_: T[][];
+ private readChunk_: T[];
+ private writeChunk_: T[];
+ private length_: number;
+
+ constructor() {
+ this.chunks_ = [[]];
+ this.readChunk_ = this.writeChunk_ = this.chunks_[0];
+ this.length_ = 0;
+ }
+
+ push(t: T): void {
+ this.writeChunk_.push(t);
+ this.length_ += 1;
+ if (this.writeChunk_.length === CHUNK_SIZE) {
+ this.writeChunk_ = [];
+ this.chunks_.push(this.writeChunk_);
+ }
+ }
+
+ front(): T | undefined {
+ if (this.length_ === 0) {
+ return undefined;
+ }
+ return this.readChunk_[0];
+ }
+
+ shift(): T | undefined {
+ if (this.length_ === 0) {
+ return undefined;
+ }
+ const t = this.readChunk_.shift();
+
+ this.length_ -= 1;
+ if (this.readChunk_.length === 0 && this.readChunk_ !== this.writeChunk_) {
+ this.chunks_.shift();
+ this.readChunk_ = this.chunks_[0];
+ }
+ return t;
+ }
+
+ get length(): number {
+ return this.length_;
+ }
+}
diff --git a/cli/js/web/streams/readable-byte-stream-controller.ts b/cli/js/web/streams/readable-byte-stream-controller.ts
new file mode 100644
index 000000000..86efd416c
--- /dev/null
+++ b/cli/js/web/streams/readable-byte-stream-controller.ts
@@ -0,0 +1,214 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/readable-byte-stream-controller - ReadableByteStreamController class implementation
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+// TODO reenable this lint here
+
+import * as rs from "./readable-internals.ts";
+import * as q from "./queue-mixin.ts";
+import * as shared from "./shared-internals.ts";
+import { ReadableStreamBYOBRequest } from "./readable-stream-byob-request.ts";
+import { Queue } from "./queue.ts";
+import { UnderlyingByteSource } from "../dom_types.ts";
+
+export class ReadableByteStreamController
+ implements rs.SDReadableByteStreamController {
+ [rs.autoAllocateChunkSize_]: number | undefined;
+ [rs.byobRequest_]: rs.SDReadableStreamBYOBRequest | undefined;
+ [rs.cancelAlgorithm_]: rs.CancelAlgorithm;
+ [rs.closeRequested_]: boolean;
+ [rs.controlledReadableByteStream_]: rs.SDReadableStream<ArrayBufferView>;
+ [rs.pullAgain_]: boolean;
+ [rs.pullAlgorithm_]: rs.PullAlgorithm<ArrayBufferView>;
+ [rs.pulling_]: boolean;
+ [rs.pendingPullIntos_]: rs.PullIntoDescriptor[];
+ [rs.started_]: boolean;
+ [rs.strategyHWM_]: number;
+
+ [q.queue_]: Queue<{
+ buffer: ArrayBufferLike;
+ byteOffset: number;
+ byteLength: number;
+ }>;
+ [q.queueTotalSize_]: number;
+
+ constructor() {
+ throw new TypeError();
+ }
+
+ get byobRequest(): rs.SDReadableStreamBYOBRequest | undefined {
+ if (!rs.isReadableByteStreamController(this)) {
+ throw new TypeError();
+ }
+ if (
+ this[rs.byobRequest_] === undefined &&
+ this[rs.pendingPullIntos_].length > 0
+ ) {
+ const firstDescriptor = this[rs.pendingPullIntos_][0];
+ const view = new Uint8Array(
+ firstDescriptor.buffer,
+ firstDescriptor.byteOffset + firstDescriptor.bytesFilled,
+ firstDescriptor.byteLength - firstDescriptor.bytesFilled
+ );
+ const byobRequest = Object.create(
+ ReadableStreamBYOBRequest.prototype
+ ) as ReadableStreamBYOBRequest;
+ rs.setUpReadableStreamBYOBRequest(byobRequest, this, view);
+ this[rs.byobRequest_] = byobRequest;
+ }
+ return this[rs.byobRequest_];
+ }
+
+ get desiredSize(): number | null {
+ if (!rs.isReadableByteStreamController(this)) {
+ throw new TypeError();
+ }
+ return rs.readableByteStreamControllerGetDesiredSize(this);
+ }
+
+ close(): void {
+ if (!rs.isReadableByteStreamController(this)) {
+ throw new TypeError();
+ }
+ if (this[rs.closeRequested_]) {
+ throw new TypeError("Stream is already closing");
+ }
+ if (this[rs.controlledReadableByteStream_][shared.state_] !== "readable") {
+ throw new TypeError("Stream is closed or errored");
+ }
+ rs.readableByteStreamControllerClose(this);
+ }
+
+ enqueue(chunk: ArrayBufferView): void {
+ if (!rs.isReadableByteStreamController(this)) {
+ throw new TypeError();
+ }
+ if (this[rs.closeRequested_]) {
+ throw new TypeError("Stream is already closing");
+ }
+ if (this[rs.controlledReadableByteStream_][shared.state_] !== "readable") {
+ throw new TypeError("Stream is closed or errored");
+ }
+ if (!ArrayBuffer.isView(chunk)) {
+ throw new TypeError("chunk must be a valid ArrayBufferView");
+ }
+ // If ! IsDetachedBuffer(chunk.[[ViewedArrayBuffer]]) is true, throw a TypeError exception.
+ return rs.readableByteStreamControllerEnqueue(this, chunk);
+ }
+
+ error(error?: shared.ErrorResult): void {
+ if (!rs.isReadableByteStreamController(this)) {
+ throw new TypeError();
+ }
+ rs.readableByteStreamControllerError(this, error);
+ }
+
+ [rs.cancelSteps_](reason: shared.ErrorResult): Promise<void> {
+ if (this[rs.pendingPullIntos_].length > 0) {
+ const firstDescriptor = this[rs.pendingPullIntos_][0];
+ firstDescriptor.bytesFilled = 0;
+ }
+ q.resetQueue(this);
+ const result = this[rs.cancelAlgorithm_](reason);
+ rs.readableByteStreamControllerClearAlgorithms(this);
+ return result;
+ }
+
+ [rs.pullSteps_](
+ forAuthorCode: boolean
+ ): Promise<IteratorResult<ArrayBufferView, any>> {
+ const stream = this[rs.controlledReadableByteStream_];
+ // Assert: ! ReadableStreamHasDefaultReader(stream) is true.
+ if (this[q.queueTotalSize_] > 0) {
+ // Assert: ! ReadableStreamGetNumReadRequests(stream) is 0.
+ const entry = this[q.queue_].shift()!;
+ this[q.queueTotalSize_] -= entry.byteLength;
+ rs.readableByteStreamControllerHandleQueueDrain(this);
+ const view = new Uint8Array(
+ entry.buffer,
+ entry.byteOffset,
+ entry.byteLength
+ );
+ return Promise.resolve(
+ rs.readableStreamCreateReadResult(view, false, forAuthorCode)
+ );
+ }
+ const autoAllocateChunkSize = this[rs.autoAllocateChunkSize_];
+ if (autoAllocateChunkSize !== undefined) {
+ let buffer: ArrayBuffer;
+ try {
+ buffer = new ArrayBuffer(autoAllocateChunkSize);
+ } catch (error) {
+ return Promise.reject(error);
+ }
+ const pullIntoDescriptor: rs.PullIntoDescriptor = {
+ buffer,
+ byteOffset: 0,
+ byteLength: autoAllocateChunkSize,
+ bytesFilled: 0,
+ elementSize: 1,
+ ctor: Uint8Array,
+ readerType: "default"
+ };
+ this[rs.pendingPullIntos_].push(pullIntoDescriptor);
+ }
+
+ const promise = rs.readableStreamAddReadRequest(stream, forAuthorCode);
+ rs.readableByteStreamControllerCallPullIfNeeded(this);
+ return promise;
+ }
+}
+
+export function setUpReadableByteStreamControllerFromUnderlyingSource(
+ stream: rs.SDReadableStream<ArrayBufferView>,
+ underlyingByteSource: UnderlyingByteSource,
+ highWaterMark: number
+): void {
+ // Assert: underlyingByteSource is not undefined.
+ const controller = Object.create(
+ ReadableByteStreamController.prototype
+ ) as ReadableByteStreamController;
+
+ const startAlgorithm = (): any => {
+ return shared.invokeOrNoop(underlyingByteSource, "start", [controller]);
+ };
+ const pullAlgorithm = shared.createAlgorithmFromUnderlyingMethod(
+ underlyingByteSource,
+ "pull",
+ [controller]
+ );
+ const cancelAlgorithm = shared.createAlgorithmFromUnderlyingMethod(
+ underlyingByteSource,
+ "cancel",
+ []
+ );
+
+ let autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;
+ if (autoAllocateChunkSize !== undefined) {
+ autoAllocateChunkSize = Number(autoAllocateChunkSize);
+ if (
+ !shared.isInteger(autoAllocateChunkSize) ||
+ autoAllocateChunkSize <= 0
+ ) {
+ throw new RangeError(
+ "autoAllocateChunkSize must be a positive, finite integer"
+ );
+ }
+ }
+ rs.setUpReadableByteStreamController(
+ stream,
+ controller,
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark,
+ autoAllocateChunkSize
+ );
+}
diff --git a/cli/js/web/streams/readable-internals.ts b/cli/js/web/streams/readable-internals.ts
new file mode 100644
index 000000000..67f5a69b1
--- /dev/null
+++ b/cli/js/web/streams/readable-internals.ts
@@ -0,0 +1,1357 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/readable-internals - internal types and functions for readable streams
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+// TODO reenable this lint here
+
+import * as shared from "./shared-internals.ts";
+import * as q from "./queue-mixin.ts";
+import {
+ QueuingStrategy,
+ QueuingStrategySizeCallback,
+ UnderlyingSource,
+ UnderlyingByteSource
+} from "../dom_types.ts";
+
+// ReadableStreamDefaultController
+export const controlledReadableStream_ = Symbol("controlledReadableStream_");
+export const pullAlgorithm_ = Symbol("pullAlgorithm_");
+export const cancelAlgorithm_ = Symbol("cancelAlgorithm_");
+export const strategySizeAlgorithm_ = Symbol("strategySizeAlgorithm_");
+export const strategyHWM_ = Symbol("strategyHWM_");
+export const started_ = Symbol("started_");
+export const closeRequested_ = Symbol("closeRequested_");
+export const pullAgain_ = Symbol("pullAgain_");
+export const pulling_ = Symbol("pulling_");
+export const cancelSteps_ = Symbol("cancelSteps_");
+export const pullSteps_ = Symbol("pullSteps_");
+
+// ReadableByteStreamController
+export const autoAllocateChunkSize_ = Symbol("autoAllocateChunkSize_");
+export const byobRequest_ = Symbol("byobRequest_");
+export const controlledReadableByteStream_ = Symbol(
+ "controlledReadableByteStream_"
+);
+export const pendingPullIntos_ = Symbol("pendingPullIntos_");
+
+// ReadableStreamDefaultReader
+export const closedPromise_ = Symbol("closedPromise_");
+export const ownerReadableStream_ = Symbol("ownerReadableStream_");
+export const readRequests_ = Symbol("readRequests_");
+export const readIntoRequests_ = Symbol("readIntoRequests_");
+
+// ReadableStreamBYOBRequest
+export const associatedReadableByteStreamController_ = Symbol(
+ "associatedReadableByteStreamController_"
+);
+export const view_ = Symbol("view_");
+
+// ReadableStreamBYOBReader
+
+// ReadableStream
+export const reader_ = Symbol("reader_");
+export const readableStreamController_ = Symbol("readableStreamController_");
+
+export type StartFunction<OutputType> = (
+ controller: SDReadableStreamControllerBase<OutputType>
+) => void | PromiseLike<void>;
+export type StartAlgorithm = () => Promise<void> | void;
+export type PullFunction<OutputType> = (
+ controller: SDReadableStreamControllerBase<OutputType>
+) => void | PromiseLike<void>;
+export type PullAlgorithm<OutputType> = (
+ controller: SDReadableStreamControllerBase<OutputType>
+) => PromiseLike<void>;
+export type CancelAlgorithm = (reason?: shared.ErrorResult) => Promise<void>;
+
+// ----
+
+export interface SDReadableStreamControllerBase<OutputType> {
+ readonly desiredSize: number | null;
+ close(): void;
+ error(e?: shared.ErrorResult): void;
+
+ [cancelSteps_](reason: shared.ErrorResult): Promise<void>;
+ [pullSteps_](forAuthorCode: boolean): Promise<IteratorResult<OutputType>>;
+}
+
+export interface SDReadableStreamBYOBRequest {
+ readonly view: ArrayBufferView;
+ respond(bytesWritten: number): void;
+ respondWithNewView(view: ArrayBufferView): void;
+
+ [associatedReadableByteStreamController_]:
+ | SDReadableByteStreamController
+ | undefined;
+ [view_]: ArrayBufferView | undefined;
+}
+
+interface ArrayBufferViewCtor {
+ new (
+ buffer: ArrayBufferLike,
+ byteOffset?: number,
+ byteLength?: number
+ ): ArrayBufferView;
+}
+
+export interface PullIntoDescriptor {
+ readerType: "default" | "byob";
+ ctor: ArrayBufferViewCtor;
+ buffer: ArrayBufferLike;
+ byteOffset: number;
+ byteLength: number;
+ bytesFilled: number;
+ elementSize: number;
+}
+
+export interface SDReadableByteStreamController
+ extends SDReadableStreamControllerBase<ArrayBufferView>,
+ q.ByteQueueContainer {
+ readonly byobRequest: SDReadableStreamBYOBRequest | undefined;
+ enqueue(chunk: ArrayBufferView): void;
+
+ [autoAllocateChunkSize_]: number | undefined; // A positive integer, when the automatic buffer allocation feature is enabled. In that case, this value specifies the size of buffer to allocate. It is undefined otherwise.
+ [byobRequest_]: SDReadableStreamBYOBRequest | undefined; // A ReadableStreamBYOBRequest instance representing the current BYOB pull request
+ [cancelAlgorithm_]: CancelAlgorithm; // A promise-returning algorithm, taking one argument (the cancel reason), which communicates a requested cancelation to the underlying source
+ [closeRequested_]: boolean; // A boolean flag indicating whether the stream has been closed by its underlying byte source, but still has chunks in its internal queue that have not yet been read
+ [controlledReadableByteStream_]: SDReadableStream<ArrayBufferView>; // The ReadableStream instance controlled
+ [pullAgain_]: boolean; // A boolean flag set to true if the stream’s mechanisms requested a call to the underlying byte source’s pull() method to pull more data, but the pull could not yet be done since a previous call is still executing
+ [pullAlgorithm_]: PullAlgorithm<ArrayBufferView>; // A promise-returning algorithm that pulls data from the underlying source
+ [pulling_]: boolean; // A boolean flag set to true while the underlying byte source’s pull() method is executing and has not yet fulfilled, used to prevent reentrant calls
+ [pendingPullIntos_]: PullIntoDescriptor[]; // A List of descriptors representing pending BYOB pull requests
+ [started_]: boolean; // A boolean flag indicating whether the underlying source has finished starting
+ [strategyHWM_]: number; // A number supplied to the constructor as part of the stream’s queuing strategy, indicating the point at which the stream will apply backpressure to its underlying byte source
+}
+
+export interface SDReadableStreamDefaultController<OutputType>
+ extends SDReadableStreamControllerBase<OutputType>,
+ q.QueueContainer<OutputType> {
+ enqueue(chunk?: OutputType): void;
+
+ [controlledReadableStream_]: SDReadableStream<OutputType>;
+ [pullAlgorithm_]: PullAlgorithm<OutputType>;
+ [cancelAlgorithm_]: CancelAlgorithm;
+ [strategySizeAlgorithm_]: QueuingStrategySizeCallback<OutputType>;
+ [strategyHWM_]: number;
+
+ [started_]: boolean;
+ [closeRequested_]: boolean;
+ [pullAgain_]: boolean;
+ [pulling_]: boolean;
+}
+
+// ----
+
+export interface SDReadableStreamReader<OutputType> {
+ readonly closed: Promise<void>;
+ cancel(reason: shared.ErrorResult): Promise<void>;
+ releaseLock(): void;
+
+ [ownerReadableStream_]: SDReadableStream<OutputType> | undefined;
+ [closedPromise_]: shared.ControlledPromise<void>;
+}
+
+export interface ReadRequest<V> extends shared.ControlledPromise<V> {
+ forAuthorCode: boolean;
+}
+
+export declare class SDReadableStreamDefaultReader<OutputType>
+ implements SDReadableStreamReader<OutputType> {
+ constructor(stream: SDReadableStream<OutputType>);
+
+ readonly closed: Promise<void>;
+ cancel(reason: shared.ErrorResult): Promise<void>;
+ releaseLock(): void;
+ read(): Promise<IteratorResult<OutputType | undefined>>;
+
+ [ownerReadableStream_]: SDReadableStream<OutputType> | undefined;
+ [closedPromise_]: shared.ControlledPromise<void>;
+ [readRequests_]: Array<ReadRequest<IteratorResult<OutputType>>>;
+}
+
+export declare class SDReadableStreamBYOBReader
+ implements SDReadableStreamReader<ArrayBufferView> {
+ constructor(stream: SDReadableStream<ArrayBufferView>);
+
+ readonly closed: Promise<void>;
+ cancel(reason: shared.ErrorResult): Promise<void>;
+ releaseLock(): void;
+ read(view: ArrayBufferView): Promise<IteratorResult<ArrayBufferView>>;
+
+ [ownerReadableStream_]: SDReadableStream<ArrayBufferView> | undefined;
+ [closedPromise_]: shared.ControlledPromise<void>;
+ [readIntoRequests_]: Array<ReadRequest<IteratorResult<ArrayBufferView>>>;
+}
+
+/* TODO reenable this when we add WritableStreams and Transforms
+export interface GenericTransformStream<InputType, OutputType> {
+ readable: SDReadableStream<OutputType>;
+ writable: ws.WritableStream<InputType>;
+}
+*/
+
+export type ReadableStreamState = "readable" | "closed" | "errored";
+
+export declare class SDReadableStream<OutputType> {
+ constructor(
+ underlyingSource: UnderlyingByteSource,
+ strategy?: { highWaterMark?: number; size?: undefined }
+ );
+ constructor(
+ underlyingSource?: UnderlyingSource<OutputType>,
+ strategy?: QueuingStrategy<OutputType>
+ );
+
+ readonly locked: boolean;
+ cancel(reason?: shared.ErrorResult): Promise<void>;
+ getReader(): SDReadableStreamReader<OutputType>;
+ getReader(options: { mode: "byob" }): SDReadableStreamBYOBReader;
+ tee(): Array<SDReadableStream<OutputType>>;
+
+ /* TODO reenable these methods when we bring in writableStreams and transport types
+ pipeThrough<ResultType>(
+ transform: GenericTransformStream<OutputType, ResultType>,
+ options?: PipeOptions
+ ): SDReadableStream<ResultType>;
+ pipeTo(
+ dest: ws.WritableStream<OutputType>,
+ options?: PipeOptions
+ ): Promise<void>;
+ */
+ [shared.state_]: ReadableStreamState;
+ [shared.storedError_]: shared.ErrorResult;
+ [reader_]: SDReadableStreamReader<OutputType> | undefined;
+ [readableStreamController_]: SDReadableStreamControllerBase<OutputType>;
+}
+
+// ---- Stream
+
+export function initializeReadableStream<OutputType>(
+ stream: SDReadableStream<OutputType>
+): void {
+ stream[shared.state_] = "readable";
+ stream[reader_] = undefined;
+ stream[shared.storedError_] = undefined;
+ stream[readableStreamController_] = undefined!; // mark slot as used for brand check
+}
+
+export function isReadableStream(
+ value: unknown
+): value is SDReadableStream<any> {
+ if (typeof value !== "object" || value === null) {
+ return false;
+ }
+ return readableStreamController_ in value;
+}
+
+export function isReadableStreamLocked<OutputType>(
+ stream: SDReadableStream<OutputType>
+): boolean {
+ return stream[reader_] !== undefined;
+}
+
+export function readableStreamGetNumReadIntoRequests<OutputType>(
+ stream: SDReadableStream<OutputType>
+): number {
+ // TODO remove the "as unknown" cast
+ // This is in to workaround a compiler error
+ // error TS2352: Conversion of type 'SDReadableStreamReader<OutputType>' to type 'SDReadableStreamBYOBReader' may be a mistake because neither type sufficiently overlaps with the other. If this was intentional, convert the expression to 'unknown' first.
+ // Type 'SDReadableStreamReader<OutputType>' is missing the following properties from type 'SDReadableStreamBYOBReader': read, [readIntoRequests_]
+ const reader = (stream[reader_] as unknown) as SDReadableStreamBYOBReader;
+ if (reader === undefined) {
+ return 0;
+ }
+ return reader[readIntoRequests_].length;
+}
+
+export function readableStreamGetNumReadRequests<OutputType>(
+ stream: SDReadableStream<OutputType>
+): number {
+ const reader = stream[reader_] as SDReadableStreamDefaultReader<OutputType>;
+ if (reader === undefined) {
+ return 0;
+ }
+ return reader[readRequests_].length;
+}
+
+export function readableStreamCreateReadResult<T>(
+ value: T,
+ done: boolean,
+ forAuthorCode: boolean
+): IteratorResult<T> {
+ const prototype = forAuthorCode ? Object.prototype : null;
+ const result = Object.create(prototype);
+ result.value = value;
+ result.done = done;
+ return result;
+}
+
+export function readableStreamAddReadIntoRequest(
+ stream: SDReadableStream<ArrayBufferView>,
+ forAuthorCode: boolean
+): Promise<IteratorResult<ArrayBufferView, any>> {
+ // Assert: ! IsReadableStreamBYOBReader(stream.[[reader]]) is true.
+ // Assert: stream.[[state]] is "readable" or "closed".
+ const reader = stream[reader_] as SDReadableStreamBYOBReader;
+ const conProm = shared.createControlledPromise<
+ IteratorResult<ArrayBufferView>
+ >() as ReadRequest<IteratorResult<ArrayBufferView>>;
+ conProm.forAuthorCode = forAuthorCode;
+ reader[readIntoRequests_].push(conProm);
+ return conProm.promise;
+}
+
+export function readableStreamAddReadRequest<OutputType>(
+ stream: SDReadableStream<OutputType>,
+ forAuthorCode: boolean
+): Promise<IteratorResult<OutputType, any>> {
+ // Assert: ! IsReadableStreamDefaultReader(stream.[[reader]]) is true.
+ // Assert: stream.[[state]] is "readable".
+ const reader = stream[reader_] as SDReadableStreamDefaultReader<OutputType>;
+ const conProm = shared.createControlledPromise<
+ IteratorResult<OutputType>
+ >() as ReadRequest<IteratorResult<OutputType>>;
+ conProm.forAuthorCode = forAuthorCode;
+ reader[readRequests_].push(conProm);
+ return conProm.promise;
+}
+
+export function readableStreamHasBYOBReader<OutputType>(
+ stream: SDReadableStream<OutputType>
+): boolean {
+ const reader = stream[reader_];
+ return isReadableStreamBYOBReader(reader);
+}
+
+export function readableStreamHasDefaultReader<OutputType>(
+ stream: SDReadableStream<OutputType>
+): boolean {
+ const reader = stream[reader_];
+ return isReadableStreamDefaultReader(reader);
+}
+
+export function readableStreamCancel<OutputType>(
+ stream: SDReadableStream<OutputType>,
+ reason: shared.ErrorResult
+): Promise<undefined> {
+ if (stream[shared.state_] === "closed") {
+ return Promise.resolve(undefined);
+ }
+ if (stream[shared.state_] === "errored") {
+ return Promise.reject(stream[shared.storedError_]);
+ }
+ readableStreamClose(stream);
+
+ const sourceCancelPromise = stream[readableStreamController_][cancelSteps_](
+ reason
+ );
+ return sourceCancelPromise.then(_ => undefined);
+}
+
+export function readableStreamClose<OutputType>(
+ stream: SDReadableStream<OutputType>
+): void {
+ // Assert: stream.[[state]] is "readable".
+ stream[shared.state_] = "closed";
+ const reader = stream[reader_];
+ if (reader === undefined) {
+ return;
+ }
+
+ if (isReadableStreamDefaultReader(reader)) {
+ for (const readRequest of reader[readRequests_]) {
+ readRequest.resolve(
+ readableStreamCreateReadResult(
+ undefined,
+ true,
+ readRequest.forAuthorCode
+ )
+ );
+ }
+ reader[readRequests_] = [];
+ }
+ reader[closedPromise_].resolve();
+ reader[closedPromise_].promise.catch(() => {});
+}
+
+export function readableStreamError<OutputType>(
+ stream: SDReadableStream<OutputType>,
+ error: shared.ErrorResult
+): void {
+ if (stream[shared.state_] !== "readable") {
+ throw new RangeError("Stream is in an invalid state");
+ }
+ stream[shared.state_] = "errored";
+ stream[shared.storedError_] = error;
+
+ const reader = stream[reader_];
+ if (reader === undefined) {
+ return;
+ }
+ if (isReadableStreamDefaultReader(reader)) {
+ for (const readRequest of reader[readRequests_]) {
+ readRequest.reject(error);
+ }
+ reader[readRequests_] = [];
+ } else {
+ // Assert: IsReadableStreamBYOBReader(reader).
+ // TODO remove the "as unknown" cast
+ const readIntoRequests = ((reader as unknown) as SDReadableStreamBYOBReader)[
+ readIntoRequests_
+ ];
+ for (const readIntoRequest of readIntoRequests) {
+ readIntoRequest.reject(error);
+ }
+ // TODO remove the "as unknown" cast
+ ((reader as unknown) as SDReadableStreamBYOBReader)[readIntoRequests_] = [];
+ }
+
+ reader[closedPromise_].reject(error);
+}
+
+// ---- Readers
+
+export function isReadableStreamDefaultReader(
+ reader: unknown
+): reader is SDReadableStreamDefaultReader<any> {
+ if (typeof reader !== "object" || reader === null) {
+ return false;
+ }
+ return readRequests_ in reader;
+}
+
+export function isReadableStreamBYOBReader(
+ reader: unknown
+): reader is SDReadableStreamBYOBReader {
+ if (typeof reader !== "object" || reader === null) {
+ return false;
+ }
+ return readIntoRequests_ in reader;
+}
+
+export function readableStreamReaderGenericInitialize<OutputType>(
+ reader: SDReadableStreamReader<OutputType>,
+ stream: SDReadableStream<OutputType>
+): void {
+ reader[ownerReadableStream_] = stream;
+ stream[reader_] = reader;
+ const streamState = stream[shared.state_];
+
+ reader[closedPromise_] = shared.createControlledPromise<void>();
+ if (streamState === "readable") {
+ // leave as is
+ } else if (streamState === "closed") {
+ reader[closedPromise_].resolve(undefined);
+ } else {
+ reader[closedPromise_].reject(stream[shared.storedError_]);
+ reader[closedPromise_].promise.catch(() => {});
+ }
+}
+
+export function readableStreamReaderGenericRelease<OutputType>(
+ reader: SDReadableStreamReader<OutputType>
+): void {
+ // Assert: reader.[[ownerReadableStream]] is not undefined.
+ // Assert: reader.[[ownerReadableStream]].[[reader]] is reader.
+ const stream = reader[ownerReadableStream_];
+ if (stream === undefined) {
+ throw new TypeError("Reader is in an inconsistent state");
+ }
+
+ if (stream[shared.state_] === "readable") {
+ // code moved out
+ } else {
+ reader[closedPromise_] = shared.createControlledPromise<void>();
+ }
+ reader[closedPromise_].reject(new TypeError());
+ reader[closedPromise_].promise.catch(() => {});
+
+ stream[reader_] = undefined;
+ reader[ownerReadableStream_] = undefined;
+}
+
+export function readableStreamBYOBReaderRead(
+ reader: SDReadableStreamBYOBReader,
+ view: ArrayBufferView,
+ forAuthorCode = false
+): Promise<IteratorResult<ArrayBufferView, any>> {
+ const stream = reader[ownerReadableStream_]!;
+ // Assert: stream is not undefined.
+
+ if (stream[shared.state_] === "errored") {
+ return Promise.reject(stream[shared.storedError_]);
+ }
+ return readableByteStreamControllerPullInto(
+ stream[readableStreamController_] as SDReadableByteStreamController,
+ view,
+ forAuthorCode
+ );
+}
+
+export function readableStreamDefaultReaderRead<OutputType>(
+ reader: SDReadableStreamDefaultReader<OutputType>,
+ forAuthorCode = false
+): Promise<IteratorResult<OutputType | undefined>> {
+ const stream = reader[ownerReadableStream_]!;
+ // Assert: stream is not undefined.
+
+ if (stream[shared.state_] === "closed") {
+ return Promise.resolve(
+ readableStreamCreateReadResult(undefined, true, forAuthorCode)
+ );
+ }
+ if (stream[shared.state_] === "errored") {
+ return Promise.reject(stream[shared.storedError_]);
+ }
+ // Assert: stream.[[state]] is "readable".
+ return stream[readableStreamController_][pullSteps_](forAuthorCode);
+}
+
+export function readableStreamFulfillReadIntoRequest<OutputType>(
+ stream: SDReadableStream<OutputType>,
+ chunk: ArrayBufferView,
+ done: boolean
+): void {
+ // TODO remove the "as unknown" cast
+ const reader = (stream[reader_] as unknown) as SDReadableStreamBYOBReader;
+ const readIntoRequest = reader[readIntoRequests_].shift()!; // <-- length check done in caller
+ readIntoRequest.resolve(
+ readableStreamCreateReadResult(chunk, done, readIntoRequest.forAuthorCode)
+ );
+}
+
+export function readableStreamFulfillReadRequest<OutputType>(
+ stream: SDReadableStream<OutputType>,
+ chunk: OutputType,
+ done: boolean
+): void {
+ const reader = stream[reader_] as SDReadableStreamDefaultReader<OutputType>;
+ const readRequest = reader[readRequests_].shift()!; // <-- length check done in caller
+ readRequest.resolve(
+ readableStreamCreateReadResult(chunk, done, readRequest.forAuthorCode)
+ );
+}
+
+// ---- DefaultController
+
+export function setUpReadableStreamDefaultController<OutputType>(
+ stream: SDReadableStream<OutputType>,
+ controller: SDReadableStreamDefaultController<OutputType>,
+ startAlgorithm: StartAlgorithm,
+ pullAlgorithm: PullAlgorithm<OutputType>,
+ cancelAlgorithm: CancelAlgorithm,
+ highWaterMark: number,
+ sizeAlgorithm: QueuingStrategySizeCallback<OutputType>
+): void {
+ // Assert: stream.[[readableStreamController]] is undefined.
+ controller[controlledReadableStream_] = stream;
+ q.resetQueue(controller);
+ controller[started_] = false;
+ controller[closeRequested_] = false;
+ controller[pullAgain_] = false;
+ controller[pulling_] = false;
+ controller[strategySizeAlgorithm_] = sizeAlgorithm;
+ controller[strategyHWM_] = highWaterMark;
+ controller[pullAlgorithm_] = pullAlgorithm;
+ controller[cancelAlgorithm_] = cancelAlgorithm;
+ stream[readableStreamController_] = controller;
+
+ const startResult = startAlgorithm();
+ Promise.resolve(startResult).then(
+ _ => {
+ controller[started_] = true;
+ // Assert: controller.[[pulling]] is false.
+ // Assert: controller.[[pullAgain]] is false.
+ readableStreamDefaultControllerCallPullIfNeeded(controller);
+ },
+ error => {
+ readableStreamDefaultControllerError(controller, error);
+ }
+ );
+}
+
+export function isReadableStreamDefaultController(
+ value: unknown
+): value is SDReadableStreamDefaultController<any> {
+ if (typeof value !== "object" || value === null) {
+ return false;
+ }
+ return controlledReadableStream_ in value;
+}
+
+export function readableStreamDefaultControllerHasBackpressure<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>
+): boolean {
+ return !readableStreamDefaultControllerShouldCallPull(controller);
+}
+
+export function readableStreamDefaultControllerCanCloseOrEnqueue<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>
+): boolean {
+ const state = controller[controlledReadableStream_][shared.state_];
+ return controller[closeRequested_] === false && state === "readable";
+}
+
+export function readableStreamDefaultControllerGetDesiredSize<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>
+): number | null {
+ const state = controller[controlledReadableStream_][shared.state_];
+ if (state === "errored") {
+ return null;
+ }
+ if (state === "closed") {
+ return 0;
+ }
+ return controller[strategyHWM_] - controller[q.queueTotalSize_];
+}
+
+export function readableStreamDefaultControllerClose<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>
+): void {
+ // Assert: !ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) is true.
+ controller[closeRequested_] = true;
+ const stream = controller[controlledReadableStream_];
+ if (controller[q.queue_].length === 0) {
+ readableStreamDefaultControllerClearAlgorithms(controller);
+ readableStreamClose(stream);
+ }
+}
+
+export function readableStreamDefaultControllerEnqueue<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>,
+ chunk: OutputType
+): void {
+ const stream = controller[controlledReadableStream_];
+ // Assert: !ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) is true.
+ if (
+ isReadableStreamLocked(stream) &&
+ readableStreamGetNumReadRequests(stream) > 0
+ ) {
+ readableStreamFulfillReadRequest(stream, chunk, false);
+ } else {
+ // Let result be the result of performing controller.[[strategySizeAlgorithm]], passing in chunk,
+ // and interpreting the result as an ECMAScript completion value.
+ // impl note: assuming that in JS land this just means try/catch with rethrow
+ let chunkSize: number;
+ try {
+ chunkSize = controller[strategySizeAlgorithm_](chunk);
+ } catch (error) {
+ readableStreamDefaultControllerError(controller, error);
+ throw error;
+ }
+ try {
+ q.enqueueValueWithSize(controller, chunk, chunkSize);
+ } catch (error) {
+ readableStreamDefaultControllerError(controller, error);
+ throw error;
+ }
+ }
+ readableStreamDefaultControllerCallPullIfNeeded(controller);
+}
+
+export function readableStreamDefaultControllerError<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>,
+ error: shared.ErrorResult
+): void {
+ const stream = controller[controlledReadableStream_];
+ if (stream[shared.state_] !== "readable") {
+ return;
+ }
+ q.resetQueue(controller);
+ readableStreamDefaultControllerClearAlgorithms(controller);
+ readableStreamError(stream, error);
+}
+
+export function readableStreamDefaultControllerCallPullIfNeeded<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>
+): void {
+ if (!readableStreamDefaultControllerShouldCallPull(controller)) {
+ return;
+ }
+ if (controller[pulling_]) {
+ controller[pullAgain_] = true;
+ return;
+ }
+ if (controller[pullAgain_]) {
+ throw new RangeError("Stream controller is in an invalid state.");
+ }
+
+ controller[pulling_] = true;
+ controller[pullAlgorithm_](controller).then(
+ _ => {
+ controller[pulling_] = false;
+ if (controller[pullAgain_]) {
+ controller[pullAgain_] = false;
+ readableStreamDefaultControllerCallPullIfNeeded(controller);
+ }
+ },
+ error => {
+ readableStreamDefaultControllerError(controller, error);
+ }
+ );
+}
+
+export function readableStreamDefaultControllerShouldCallPull<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>
+): boolean {
+ const stream = controller[controlledReadableStream_];
+ if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) {
+ return false;
+ }
+ if (controller[started_] === false) {
+ return false;
+ }
+ if (
+ isReadableStreamLocked(stream) &&
+ readableStreamGetNumReadRequests(stream) > 0
+ ) {
+ return true;
+ }
+ const desiredSize = readableStreamDefaultControllerGetDesiredSize(controller);
+ if (desiredSize === null) {
+ throw new RangeError("Stream is in an invalid state.");
+ }
+ return desiredSize > 0;
+}
+
+export function readableStreamDefaultControllerClearAlgorithms<OutputType>(
+ controller: SDReadableStreamDefaultController<OutputType>
+): void {
+ controller[pullAlgorithm_] = undefined!;
+ controller[cancelAlgorithm_] = undefined!;
+ controller[strategySizeAlgorithm_] = undefined!;
+}
+
+// ---- BYOBController
+
+export function setUpReadableByteStreamController(
+ stream: SDReadableStream<ArrayBufferView>,
+ controller: SDReadableByteStreamController,
+ startAlgorithm: StartAlgorithm,
+ pullAlgorithm: PullAlgorithm<ArrayBufferView>,
+ cancelAlgorithm: CancelAlgorithm,
+ highWaterMark: number,
+ autoAllocateChunkSize: number | undefined
+): void {
+ // Assert: stream.[[readableStreamController]] is undefined.
+ if (stream[readableStreamController_] !== undefined) {
+ throw new TypeError("Cannot reuse streams");
+ }
+ if (autoAllocateChunkSize !== undefined) {
+ if (
+ !shared.isInteger(autoAllocateChunkSize) ||
+ autoAllocateChunkSize <= 0
+ ) {
+ throw new RangeError(
+ "autoAllocateChunkSize must be a positive, finite integer"
+ );
+ }
+ }
+ // Set controller.[[controlledReadableByteStream]] to stream.
+ controller[controlledReadableByteStream_] = stream;
+ // Set controller.[[pullAgain]] and controller.[[pulling]] to false.
+ controller[pullAgain_] = false;
+ controller[pulling_] = false;
+ readableByteStreamControllerClearPendingPullIntos(controller);
+ q.resetQueue(controller);
+ controller[closeRequested_] = false;
+ controller[started_] = false;
+ controller[strategyHWM_] = shared.validateAndNormalizeHighWaterMark(
+ highWaterMark
+ );
+ controller[pullAlgorithm_] = pullAlgorithm;
+ controller[cancelAlgorithm_] = cancelAlgorithm;
+ controller[autoAllocateChunkSize_] = autoAllocateChunkSize;
+ controller[pendingPullIntos_] = [];
+ stream[readableStreamController_] = controller;
+
+ // Let startResult be the result of performing startAlgorithm.
+ const startResult = startAlgorithm();
+ Promise.resolve(startResult).then(
+ _ => {
+ controller[started_] = true;
+ // Assert: controller.[[pulling]] is false.
+ // Assert: controller.[[pullAgain]] is false.
+ readableByteStreamControllerCallPullIfNeeded(controller);
+ },
+ error => {
+ readableByteStreamControllerError(controller, error);
+ }
+ );
+}
+
+export function isReadableStreamBYOBRequest(
+ value: unknown
+): value is SDReadableStreamBYOBRequest {
+ if (typeof value !== "object" || value === null) {
+ return false;
+ }
+ return associatedReadableByteStreamController_ in value;
+}
+
+export function isReadableByteStreamController(
+ value: unknown
+): value is SDReadableByteStreamController {
+ if (typeof value !== "object" || value === null) {
+ return false;
+ }
+ return controlledReadableByteStream_ in value;
+}
+
+export function readableByteStreamControllerCallPullIfNeeded(
+ controller: SDReadableByteStreamController
+): void {
+ if (!readableByteStreamControllerShouldCallPull(controller)) {
+ return;
+ }
+ if (controller[pulling_]) {
+ controller[pullAgain_] = true;
+ return;
+ }
+ // Assert: controller.[[pullAgain]] is false.
+ controller[pulling_] = true;
+ controller[pullAlgorithm_](controller).then(
+ _ => {
+ controller[pulling_] = false;
+ if (controller[pullAgain_]) {
+ controller[pullAgain_] = false;
+ readableByteStreamControllerCallPullIfNeeded(controller);
+ }
+ },
+ error => {
+ readableByteStreamControllerError(controller, error);
+ }
+ );
+}
+
+export function readableByteStreamControllerClearAlgorithms(
+ controller: SDReadableByteStreamController
+): void {
+ controller[pullAlgorithm_] = undefined!;
+ controller[cancelAlgorithm_] = undefined!;
+}
+
+export function readableByteStreamControllerClearPendingPullIntos(
+ controller: SDReadableByteStreamController
+): void {
+ readableByteStreamControllerInvalidateBYOBRequest(controller);
+ controller[pendingPullIntos_] = [];
+}
+
+export function readableByteStreamControllerClose(
+ controller: SDReadableByteStreamController
+): void {
+ const stream = controller[controlledReadableByteStream_];
+ // Assert: controller.[[closeRequested]] is false.
+ // Assert: stream.[[state]] is "readable".
+ if (controller[q.queueTotalSize_] > 0) {
+ controller[closeRequested_] = true;
+ return;
+ }
+ if (controller[pendingPullIntos_].length > 0) {
+ const firstPendingPullInto = controller[pendingPullIntos_][0];
+ if (firstPendingPullInto.bytesFilled > 0) {
+ const error = new TypeError();
+ readableByteStreamControllerError(controller, error);
+ throw error;
+ }
+ }
+ readableByteStreamControllerClearAlgorithms(controller);
+ readableStreamClose(stream);
+}
+
+export function readableByteStreamControllerCommitPullIntoDescriptor(
+ stream: SDReadableStream<ArrayBufferView>,
+ pullIntoDescriptor: PullIntoDescriptor
+): void {
+ // Assert: stream.[[state]] is not "errored".
+ let done = false;
+ if (stream[shared.state_] === "closed") {
+ // Assert: pullIntoDescriptor.[[bytesFilled]] is 0.
+ done = true;
+ }
+ const filledView = readableByteStreamControllerConvertPullIntoDescriptor(
+ pullIntoDescriptor
+ );
+ if (pullIntoDescriptor.readerType === "default") {
+ readableStreamFulfillReadRequest(stream, filledView, done);
+ } else {
+ // Assert: pullIntoDescriptor.[[readerType]] is "byob".
+ readableStreamFulfillReadIntoRequest(stream, filledView, done);
+ }
+}
+
+export function readableByteStreamControllerConvertPullIntoDescriptor(
+ pullIntoDescriptor: PullIntoDescriptor
+): ArrayBufferView {
+ const { bytesFilled, elementSize } = pullIntoDescriptor;
+ // Assert: bytesFilled <= pullIntoDescriptor.byteLength
+ // Assert: bytesFilled mod elementSize is 0
+ return new pullIntoDescriptor.ctor(
+ pullIntoDescriptor.buffer,
+ pullIntoDescriptor.byteOffset,
+ bytesFilled / elementSize
+ );
+}
+
+export function readableByteStreamControllerEnqueue(
+ controller: SDReadableByteStreamController,
+ chunk: ArrayBufferView
+): void {
+ const stream = controller[controlledReadableByteStream_];
+ // Assert: controller.[[closeRequested]] is false.
+ // Assert: stream.[[state]] is "readable".
+ const { buffer, byteOffset, byteLength } = chunk;
+
+ const transferredBuffer = shared.transferArrayBuffer(buffer);
+
+ if (readableStreamHasDefaultReader(stream)) {
+ if (readableStreamGetNumReadRequests(stream) === 0) {
+ readableByteStreamControllerEnqueueChunkToQueue(
+ controller,
+ transferredBuffer,
+ byteOffset,
+ byteLength
+ );
+ } else {
+ // Assert: controller.[[queue]] is empty.
+ const transferredView = new Uint8Array(
+ transferredBuffer,
+ byteOffset,
+ byteLength
+ );
+ readableStreamFulfillReadRequest(stream, transferredView, false);
+ }
+ } else if (readableStreamHasBYOBReader(stream)) {
+ readableByteStreamControllerEnqueueChunkToQueue(
+ controller,
+ transferredBuffer,
+ byteOffset,
+ byteLength
+ );
+ readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(
+ controller
+ );
+ } else {
+ // Assert: !IsReadableStreamLocked(stream) is false.
+ readableByteStreamControllerEnqueueChunkToQueue(
+ controller,
+ transferredBuffer,
+ byteOffset,
+ byteLength
+ );
+ }
+ readableByteStreamControllerCallPullIfNeeded(controller);
+}
+
+export function readableByteStreamControllerEnqueueChunkToQueue(
+ controller: SDReadableByteStreamController,
+ buffer: ArrayBufferLike,
+ byteOffset: number,
+ byteLength: number
+): void {
+ controller[q.queue_].push({ buffer, byteOffset, byteLength });
+ controller[q.queueTotalSize_] += byteLength;
+}
+
+export function readableByteStreamControllerError(
+ controller: SDReadableByteStreamController,
+ error: shared.ErrorResult
+): void {
+ const stream = controller[controlledReadableByteStream_];
+ if (stream[shared.state_] !== "readable") {
+ return;
+ }
+ readableByteStreamControllerClearPendingPullIntos(controller);
+ q.resetQueue(controller);
+ readableByteStreamControllerClearAlgorithms(controller);
+ readableStreamError(stream, error);
+}
+
+export function readableByteStreamControllerFillHeadPullIntoDescriptor(
+ controller: SDReadableByteStreamController,
+ size: number,
+ pullIntoDescriptor: PullIntoDescriptor
+): void {
+ // Assert: either controller.[[pendingPullIntos]] is empty, or the first element of controller.[[pendingPullIntos]] is pullIntoDescriptor.
+ readableByteStreamControllerInvalidateBYOBRequest(controller);
+ pullIntoDescriptor.bytesFilled += size;
+}
+
+export function readableByteStreamControllerFillPullIntoDescriptorFromQueue(
+ controller: SDReadableByteStreamController,
+ pullIntoDescriptor: PullIntoDescriptor
+): boolean {
+ const elementSize = pullIntoDescriptor.elementSize;
+ const currentAlignedBytes =
+ pullIntoDescriptor.bytesFilled -
+ (pullIntoDescriptor.bytesFilled % elementSize);
+ const maxBytesToCopy = Math.min(
+ controller[q.queueTotalSize_],
+ pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled
+ );
+ const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;
+ const maxAlignedBytes = maxBytesFilled - (maxBytesFilled % elementSize);
+ let totalBytesToCopyRemaining = maxBytesToCopy;
+ let ready = false;
+
+ if (maxAlignedBytes > currentAlignedBytes) {
+ totalBytesToCopyRemaining =
+ maxAlignedBytes - pullIntoDescriptor.bytesFilled;
+ ready = true;
+ }
+ const queue = controller[q.queue_];
+
+ while (totalBytesToCopyRemaining > 0) {
+ const headOfQueue = queue.front()!;
+ const bytesToCopy = Math.min(
+ totalBytesToCopyRemaining,
+ headOfQueue.byteLength
+ );
+ const destStart =
+ pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;
+ shared.copyDataBlockBytes(
+ pullIntoDescriptor.buffer,
+ destStart,
+ headOfQueue.buffer,
+ headOfQueue.byteOffset,
+ bytesToCopy
+ );
+ if (headOfQueue.byteLength === bytesToCopy) {
+ queue.shift();
+ } else {
+ headOfQueue.byteOffset += bytesToCopy;
+ headOfQueue.byteLength -= bytesToCopy;
+ }
+ controller[q.queueTotalSize_] -= bytesToCopy;
+ readableByteStreamControllerFillHeadPullIntoDescriptor(
+ controller,
+ bytesToCopy,
+ pullIntoDescriptor
+ );
+ totalBytesToCopyRemaining -= bytesToCopy;
+ }
+ if (!ready) {
+ // Assert: controller[queueTotalSize_] === 0
+ // Assert: pullIntoDescriptor.bytesFilled > 0
+ // Assert: pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize
+ }
+ return ready;
+}
+
+export function readableByteStreamControllerGetDesiredSize(
+ controller: SDReadableByteStreamController
+): number | null {
+ const stream = controller[controlledReadableByteStream_];
+ const state = stream[shared.state_];
+ if (state === "errored") {
+ return null;
+ }
+ if (state === "closed") {
+ return 0;
+ }
+ return controller[strategyHWM_] - controller[q.queueTotalSize_];
+}
+
+export function readableByteStreamControllerHandleQueueDrain(
+ controller: SDReadableByteStreamController
+): void {
+ // Assert: controller.[[controlledReadableByteStream]].[[state]] is "readable".
+ if (controller[q.queueTotalSize_] === 0 && controller[closeRequested_]) {
+ readableByteStreamControllerClearAlgorithms(controller);
+ readableStreamClose(controller[controlledReadableByteStream_]);
+ } else {
+ readableByteStreamControllerCallPullIfNeeded(controller);
+ }
+}
+
+export function readableByteStreamControllerInvalidateBYOBRequest(
+ controller: SDReadableByteStreamController
+): void {
+ const byobRequest = controller[byobRequest_];
+ if (byobRequest === undefined) {
+ return;
+ }
+ byobRequest[associatedReadableByteStreamController_] = undefined;
+ byobRequest[view_] = undefined;
+ controller[byobRequest_] = undefined;
+}
+
+export function readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(
+ controller: SDReadableByteStreamController
+): void {
+ // Assert: controller.[[closeRequested]] is false.
+ const pendingPullIntos = controller[pendingPullIntos_];
+ while (pendingPullIntos.length > 0) {
+ if (controller[q.queueTotalSize_] === 0) {
+ return;
+ }
+ const pullIntoDescriptor = pendingPullIntos[0];
+ if (
+ readableByteStreamControllerFillPullIntoDescriptorFromQueue(
+ controller,
+ pullIntoDescriptor
+ )
+ ) {
+ readableByteStreamControllerShiftPendingPullInto(controller);
+ readableByteStreamControllerCommitPullIntoDescriptor(
+ controller[controlledReadableByteStream_],
+ pullIntoDescriptor
+ );
+ }
+ }
+}
+
+export function readableByteStreamControllerPullInto(
+ controller: SDReadableByteStreamController,
+ view: ArrayBufferView,
+ forAuthorCode: boolean
+): Promise<IteratorResult<ArrayBufferView, any>> {
+ const stream = controller[controlledReadableByteStream_];
+
+ const elementSize = (view as Uint8Array).BYTES_PER_ELEMENT || 1; // DataView exposes this in Webkit as 1, is not present in FF or Blink
+ const ctor = view.constructor as Uint8ArrayConstructor; // the typecast here is just for TS typing, it does not influence buffer creation
+
+ const byteOffset = view.byteOffset;
+ const byteLength = view.byteLength;
+ const buffer = shared.transferArrayBuffer(view.buffer);
+ const pullIntoDescriptor: PullIntoDescriptor = {
+ buffer,
+ byteOffset,
+ byteLength,
+ bytesFilled: 0,
+ elementSize,
+ ctor,
+ readerType: "byob"
+ };
+
+ if (controller[pendingPullIntos_].length > 0) {
+ controller[pendingPullIntos_].push(pullIntoDescriptor);
+ return readableStreamAddReadIntoRequest(stream, forAuthorCode);
+ }
+ if (stream[shared.state_] === "closed") {
+ const emptyView = new ctor(
+ pullIntoDescriptor.buffer,
+ pullIntoDescriptor.byteOffset,
+ 0
+ );
+ return Promise.resolve(
+ readableStreamCreateReadResult(emptyView, true, forAuthorCode)
+ );
+ }
+
+ if (controller[q.queueTotalSize_] > 0) {
+ if (
+ readableByteStreamControllerFillPullIntoDescriptorFromQueue(
+ controller,
+ pullIntoDescriptor
+ )
+ ) {
+ const filledView = readableByteStreamControllerConvertPullIntoDescriptor(
+ pullIntoDescriptor
+ );
+ readableByteStreamControllerHandleQueueDrain(controller);
+ return Promise.resolve(
+ readableStreamCreateReadResult(filledView, false, forAuthorCode)
+ );
+ }
+ if (controller[closeRequested_]) {
+ const error = new TypeError();
+ readableByteStreamControllerError(controller, error);
+ return Promise.reject(error);
+ }
+ }
+
+ controller[pendingPullIntos_].push(pullIntoDescriptor);
+ const promise = readableStreamAddReadIntoRequest(stream, forAuthorCode);
+ readableByteStreamControllerCallPullIfNeeded(controller);
+ return promise;
+}
+
+export function readableByteStreamControllerRespond(
+ controller: SDReadableByteStreamController,
+ bytesWritten: number
+): void {
+ bytesWritten = Number(bytesWritten);
+ if (!shared.isFiniteNonNegativeNumber(bytesWritten)) {
+ throw new RangeError("bytesWritten must be a finite, non-negative number");
+ }
+ // Assert: controller.[[pendingPullIntos]] is not empty.
+ readableByteStreamControllerRespondInternal(controller, bytesWritten);
+}
+
+export function readableByteStreamControllerRespondInClosedState(
+ controller: SDReadableByteStreamController,
+ firstDescriptor: PullIntoDescriptor
+): void {
+ firstDescriptor.buffer = shared.transferArrayBuffer(firstDescriptor.buffer);
+ // Assert: firstDescriptor.[[bytesFilled]] is 0.
+ const stream = controller[controlledReadableByteStream_];
+ if (readableStreamHasBYOBReader(stream)) {
+ while (readableStreamGetNumReadIntoRequests(stream) > 0) {
+ const pullIntoDescriptor = readableByteStreamControllerShiftPendingPullInto(
+ controller
+ )!;
+ readableByteStreamControllerCommitPullIntoDescriptor(
+ stream,
+ pullIntoDescriptor
+ );
+ }
+ }
+}
+
+export function readableByteStreamControllerRespondInReadableState(
+ controller: SDReadableByteStreamController,
+ bytesWritten: number,
+ pullIntoDescriptor: PullIntoDescriptor
+): void {
+ if (
+ pullIntoDescriptor.bytesFilled + bytesWritten >
+ pullIntoDescriptor.byteLength
+ ) {
+ throw new RangeError();
+ }
+ readableByteStreamControllerFillHeadPullIntoDescriptor(
+ controller,
+ bytesWritten,
+ pullIntoDescriptor
+ );
+ if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {
+ return;
+ }
+ readableByteStreamControllerShiftPendingPullInto(controller);
+ const remainderSize =
+ pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;
+ if (remainderSize > 0) {
+ const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;
+ const remainder = shared.cloneArrayBuffer(
+ pullIntoDescriptor.buffer,
+ end - remainderSize,
+ remainderSize,
+ ArrayBuffer
+ );
+ readableByteStreamControllerEnqueueChunkToQueue(
+ controller,
+ remainder,
+ 0,
+ remainder.byteLength
+ );
+ }
+ pullIntoDescriptor.buffer = shared.transferArrayBuffer(
+ pullIntoDescriptor.buffer
+ );
+ pullIntoDescriptor.bytesFilled =
+ pullIntoDescriptor.bytesFilled - remainderSize;
+ readableByteStreamControllerCommitPullIntoDescriptor(
+ controller[controlledReadableByteStream_],
+ pullIntoDescriptor
+ );
+ readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);
+}
+
+export function readableByteStreamControllerRespondInternal(
+ controller: SDReadableByteStreamController,
+ bytesWritten: number
+): void {
+ const firstDescriptor = controller[pendingPullIntos_][0];
+ const stream = controller[controlledReadableByteStream_];
+ if (stream[shared.state_] === "closed") {
+ if (bytesWritten !== 0) {
+ throw new TypeError();
+ }
+ readableByteStreamControllerRespondInClosedState(
+ controller,
+ firstDescriptor
+ );
+ } else {
+ // Assert: stream.[[state]] is "readable".
+ readableByteStreamControllerRespondInReadableState(
+ controller,
+ bytesWritten,
+ firstDescriptor
+ );
+ }
+ readableByteStreamControllerCallPullIfNeeded(controller);
+}
+
+export function readableByteStreamControllerRespondWithNewView(
+ controller: SDReadableByteStreamController,
+ view: ArrayBufferView
+): void {
+ // Assert: controller.[[pendingPullIntos]] is not empty.
+ const firstDescriptor = controller[pendingPullIntos_][0];
+ if (
+ firstDescriptor.byteOffset + firstDescriptor.bytesFilled !==
+ view.byteOffset
+ ) {
+ throw new RangeError();
+ }
+ if (firstDescriptor.byteLength !== view.byteLength) {
+ throw new RangeError();
+ }
+ firstDescriptor.buffer = view.buffer;
+ readableByteStreamControllerRespondInternal(controller, view.byteLength);
+}
+
+export function readableByteStreamControllerShiftPendingPullInto(
+ controller: SDReadableByteStreamController
+): PullIntoDescriptor | undefined {
+ const descriptor = controller[pendingPullIntos_].shift();
+ readableByteStreamControllerInvalidateBYOBRequest(controller);
+ return descriptor;
+}
+
+export function readableByteStreamControllerShouldCallPull(
+ controller: SDReadableByteStreamController
+): boolean {
+ // Let stream be controller.[[controlledReadableByteStream]].
+ const stream = controller[controlledReadableByteStream_];
+ if (stream[shared.state_] !== "readable") {
+ return false;
+ }
+ if (controller[closeRequested_]) {
+ return false;
+ }
+ if (!controller[started_]) {
+ return false;
+ }
+ if (
+ readableStreamHasDefaultReader(stream) &&
+ readableStreamGetNumReadRequests(stream) > 0
+ ) {
+ return true;
+ }
+ if (
+ readableStreamHasBYOBReader(stream) &&
+ readableStreamGetNumReadIntoRequests(stream) > 0
+ ) {
+ return true;
+ }
+ const desiredSize = readableByteStreamControllerGetDesiredSize(controller);
+ // Assert: desiredSize is not null.
+ return desiredSize! > 0;
+}
+
+export function setUpReadableStreamBYOBRequest(
+ request: SDReadableStreamBYOBRequest,
+ controller: SDReadableByteStreamController,
+ view: ArrayBufferView
+): void {
+ if (!isReadableByteStreamController(controller)) {
+ throw new TypeError();
+ }
+ if (!ArrayBuffer.isView(view)) {
+ throw new TypeError();
+ }
+ // Assert: !IsDetachedBuffer(view.[[ViewedArrayBuffer]]) is false.
+
+ request[associatedReadableByteStreamController_] = controller;
+ request[view_] = view;
+}
diff --git a/cli/js/web/streams/readable-stream-byob-reader.ts b/cli/js/web/streams/readable-stream-byob-reader.ts
new file mode 100644
index 000000000..0f9bfb037
--- /dev/null
+++ b/cli/js/web/streams/readable-stream-byob-reader.ts
@@ -0,0 +1,93 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/readable-stream-byob-reader - ReadableStreamBYOBReader class implementation
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+import * as rs from "./readable-internals.ts";
+import * as shared from "./shared-internals.ts";
+
+export class SDReadableStreamBYOBReader
+ implements rs.SDReadableStreamBYOBReader {
+ [rs.closedPromise_]: shared.ControlledPromise<void>;
+ [rs.ownerReadableStream_]: rs.SDReadableStream<ArrayBufferView> | undefined;
+ [rs.readIntoRequests_]: Array<
+ rs.ReadRequest<IteratorResult<ArrayBufferView>>
+ >;
+
+ constructor(stream: rs.SDReadableStream<ArrayBufferView>) {
+ if (!rs.isReadableStream(stream)) {
+ throw new TypeError();
+ }
+ if (
+ !rs.isReadableByteStreamController(stream[rs.readableStreamController_])
+ ) {
+ throw new TypeError();
+ }
+ if (rs.isReadableStreamLocked(stream)) {
+ throw new TypeError("The stream is locked.");
+ }
+ rs.readableStreamReaderGenericInitialize(this, stream);
+ this[rs.readIntoRequests_] = [];
+ }
+
+ get closed(): Promise<void> {
+ if (!rs.isReadableStreamBYOBReader(this)) {
+ return Promise.reject(new TypeError());
+ }
+ return this[rs.closedPromise_].promise;
+ }
+
+ cancel(reason: shared.ErrorResult): Promise<void> {
+ if (!rs.isReadableStreamBYOBReader(this)) {
+ return Promise.reject(new TypeError());
+ }
+ const stream = this[rs.ownerReadableStream_];
+ if (stream === undefined) {
+ return Promise.reject(
+ new TypeError("Reader is not associated with a stream")
+ );
+ }
+ return rs.readableStreamCancel(stream, reason);
+ }
+
+ read(view: ArrayBufferView): Promise<IteratorResult<ArrayBufferView>> {
+ if (!rs.isReadableStreamBYOBReader(this)) {
+ return Promise.reject(new TypeError());
+ }
+ if (this[rs.ownerReadableStream_] === undefined) {
+ return Promise.reject(
+ new TypeError("Reader is not associated with a stream")
+ );
+ }
+ if (!ArrayBuffer.isView(view)) {
+ return Promise.reject(
+ new TypeError("view argument must be a valid ArrayBufferView")
+ );
+ }
+ // If ! IsDetachedBuffer(view.[[ViewedArrayBuffer]]) is true, return a promise rejected with a TypeError exception.
+ if (view.byteLength === 0) {
+ return Promise.reject(
+ new TypeError("supplied buffer view must be > 0 bytes")
+ );
+ }
+ return rs.readableStreamBYOBReaderRead(this, view, true);
+ }
+
+ releaseLock(): void {
+ if (!rs.isReadableStreamBYOBReader(this)) {
+ throw new TypeError();
+ }
+ if (this[rs.ownerReadableStream_] === undefined) {
+ throw new TypeError("Reader is not associated with a stream");
+ }
+ if (this[rs.readIntoRequests_].length > 0) {
+ throw new TypeError();
+ }
+ rs.readableStreamReaderGenericRelease(this);
+ }
+}
diff --git a/cli/js/web/streams/readable-stream-byob-request.ts b/cli/js/web/streams/readable-stream-byob-request.ts
new file mode 100644
index 000000000..25b937f10
--- /dev/null
+++ b/cli/js/web/streams/readable-stream-byob-request.ts
@@ -0,0 +1,60 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/readable-stream-byob-request - ReadableStreamBYOBRequest class implementation
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+import * as rs from "./readable-internals.ts";
+
+export class ReadableStreamBYOBRequest {
+ [rs.associatedReadableByteStreamController_]:
+ | rs.SDReadableByteStreamController
+ | undefined;
+ [rs.view_]: ArrayBufferView | undefined;
+
+ constructor() {
+ throw new TypeError();
+ }
+
+ get view(): ArrayBufferView {
+ if (!rs.isReadableStreamBYOBRequest(this)) {
+ throw new TypeError();
+ }
+ return this[rs.view_]!;
+ }
+
+ respond(bytesWritten: number): void {
+ if (!rs.isReadableStreamBYOBRequest(this)) {
+ throw new TypeError();
+ }
+ if (this[rs.associatedReadableByteStreamController_] === undefined) {
+ throw new TypeError();
+ }
+ // If! IsDetachedBuffer(this.[[view]].[[ViewedArrayBuffer]]) is true, throw a TypeError exception.
+ return rs.readableByteStreamControllerRespond(
+ this[rs.associatedReadableByteStreamController_]!,
+ bytesWritten
+ );
+ }
+
+ respondWithNewView(view: ArrayBufferView): void {
+ if (!rs.isReadableStreamBYOBRequest(this)) {
+ throw new TypeError();
+ }
+ if (this[rs.associatedReadableByteStreamController_] === undefined) {
+ throw new TypeError();
+ }
+ if (!ArrayBuffer.isView(view)) {
+ throw new TypeError("view parameter must be a TypedArray");
+ }
+ // If! IsDetachedBuffer(view.[[ViewedArrayBuffer]]) is true, throw a TypeError exception.
+ return rs.readableByteStreamControllerRespondWithNewView(
+ this[rs.associatedReadableByteStreamController_]!,
+ view
+ );
+ }
+}
diff --git a/cli/js/web/streams/readable-stream-default-controller.ts b/cli/js/web/streams/readable-stream-default-controller.ts
new file mode 100644
index 000000000..e9ddce1bc
--- /dev/null
+++ b/cli/js/web/streams/readable-stream-default-controller.ts
@@ -0,0 +1,139 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/readable-stream-default-controller - ReadableStreamDefaultController class implementation
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+// TODO reenable this lint here
+
+import * as rs from "./readable-internals.ts";
+import * as shared from "./shared-internals.ts";
+import * as q from "./queue-mixin.ts";
+import { Queue } from "./queue.ts";
+import { QueuingStrategySizeCallback, UnderlyingSource } from "../dom_types.ts";
+
+export class ReadableStreamDefaultController<OutputType>
+ implements rs.SDReadableStreamDefaultController<OutputType> {
+ [rs.cancelAlgorithm_]: rs.CancelAlgorithm;
+ [rs.closeRequested_]: boolean;
+ [rs.controlledReadableStream_]: rs.SDReadableStream<OutputType>;
+ [rs.pullAgain_]: boolean;
+ [rs.pullAlgorithm_]: rs.PullAlgorithm<OutputType>;
+ [rs.pulling_]: boolean;
+ [rs.strategyHWM_]: number;
+ [rs.strategySizeAlgorithm_]: QueuingStrategySizeCallback<OutputType>;
+ [rs.started_]: boolean;
+
+ [q.queue_]: Queue<q.QueueElement<OutputType>>;
+ [q.queueTotalSize_]: number;
+
+ constructor() {
+ throw new TypeError();
+ }
+
+ get desiredSize(): number | null {
+ return rs.readableStreamDefaultControllerGetDesiredSize(this);
+ }
+
+ close(): void {
+ if (!rs.isReadableStreamDefaultController(this)) {
+ throw new TypeError();
+ }
+ if (!rs.readableStreamDefaultControllerCanCloseOrEnqueue(this)) {
+ throw new TypeError(
+ "Cannot close, the stream is already closing or not readable"
+ );
+ }
+ rs.readableStreamDefaultControllerClose(this);
+ }
+
+ enqueue(chunk?: OutputType): void {
+ if (!rs.isReadableStreamDefaultController(this)) {
+ throw new TypeError();
+ }
+ if (!rs.readableStreamDefaultControllerCanCloseOrEnqueue(this)) {
+ throw new TypeError(
+ "Cannot enqueue, the stream is closing or not readable"
+ );
+ }
+ rs.readableStreamDefaultControllerEnqueue(this, chunk!);
+ }
+
+ error(e?: shared.ErrorResult): void {
+ if (!rs.isReadableStreamDefaultController(this)) {
+ throw new TypeError();
+ }
+ rs.readableStreamDefaultControllerError(this, e);
+ }
+
+ [rs.cancelSteps_](reason: shared.ErrorResult): Promise<void> {
+ q.resetQueue(this);
+ const result = this[rs.cancelAlgorithm_](reason);
+ rs.readableStreamDefaultControllerClearAlgorithms(this);
+ return result;
+ }
+
+ [rs.pullSteps_](
+ forAuthorCode: boolean
+ ): Promise<IteratorResult<OutputType, any>> {
+ const stream = this[rs.controlledReadableStream_];
+ if (this[q.queue_].length > 0) {
+ const chunk = q.dequeueValue(this);
+ if (this[rs.closeRequested_] && this[q.queue_].length === 0) {
+ rs.readableStreamDefaultControllerClearAlgorithms(this);
+ rs.readableStreamClose(stream);
+ } else {
+ rs.readableStreamDefaultControllerCallPullIfNeeded(this);
+ }
+ return Promise.resolve(
+ rs.readableStreamCreateReadResult(chunk, false, forAuthorCode)
+ );
+ }
+
+ const pendingPromise = rs.readableStreamAddReadRequest(
+ stream,
+ forAuthorCode
+ );
+ rs.readableStreamDefaultControllerCallPullIfNeeded(this);
+ return pendingPromise;
+ }
+}
+
+export function setUpReadableStreamDefaultControllerFromUnderlyingSource<
+ OutputType
+>(
+ stream: rs.SDReadableStream<OutputType>,
+ underlyingSource: UnderlyingSource<OutputType>,
+ highWaterMark: number,
+ sizeAlgorithm: QueuingStrategySizeCallback<OutputType>
+): void {
+ // Assert: underlyingSource is not undefined.
+ const controller = Object.create(ReadableStreamDefaultController.prototype);
+ const startAlgorithm = (): any => {
+ return shared.invokeOrNoop(underlyingSource, "start", [controller]);
+ };
+ const pullAlgorithm = shared.createAlgorithmFromUnderlyingMethod(
+ underlyingSource,
+ "pull",
+ [controller]
+ );
+ const cancelAlgorithm = shared.createAlgorithmFromUnderlyingMethod(
+ underlyingSource,
+ "cancel",
+ []
+ );
+ rs.setUpReadableStreamDefaultController(
+ stream,
+ controller,
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark,
+ sizeAlgorithm
+ );
+}
diff --git a/cli/js/web/streams/readable-stream-default-reader.ts b/cli/js/web/streams/readable-stream-default-reader.ts
new file mode 100644
index 000000000..eb1910a9d
--- /dev/null
+++ b/cli/js/web/streams/readable-stream-default-reader.ts
@@ -0,0 +1,75 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/readable-stream-default-reader - ReadableStreamDefaultReader class implementation
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+import * as rs from "./readable-internals.ts";
+import * as shared from "./shared-internals.ts";
+
+export class ReadableStreamDefaultReader<OutputType>
+ implements rs.SDReadableStreamReader<OutputType> {
+ [rs.closedPromise_]: shared.ControlledPromise<void>;
+ [rs.ownerReadableStream_]: rs.SDReadableStream<OutputType> | undefined;
+ [rs.readRequests_]: Array<rs.ReadRequest<IteratorResult<OutputType>>>;
+
+ constructor(stream: rs.SDReadableStream<OutputType>) {
+ if (!rs.isReadableStream(stream)) {
+ throw new TypeError();
+ }
+ if (rs.isReadableStreamLocked(stream)) {
+ throw new TypeError("The stream is locked.");
+ }
+ rs.readableStreamReaderGenericInitialize(this, stream);
+ this[rs.readRequests_] = [];
+ }
+
+ get closed(): Promise<void> {
+ if (!rs.isReadableStreamDefaultReader(this)) {
+ return Promise.reject(new TypeError());
+ }
+ return this[rs.closedPromise_].promise;
+ }
+
+ cancel(reason: shared.ErrorResult): Promise<void> {
+ if (!rs.isReadableStreamDefaultReader(this)) {
+ return Promise.reject(new TypeError());
+ }
+ const stream = this[rs.ownerReadableStream_];
+ if (stream === undefined) {
+ return Promise.reject(
+ new TypeError("Reader is not associated with a stream")
+ );
+ }
+ return rs.readableStreamCancel(stream, reason);
+ }
+
+ read(): Promise<IteratorResult<OutputType | undefined>> {
+ if (!rs.isReadableStreamDefaultReader(this)) {
+ return Promise.reject(new TypeError());
+ }
+ if (this[rs.ownerReadableStream_] === undefined) {
+ return Promise.reject(
+ new TypeError("Reader is not associated with a stream")
+ );
+ }
+ return rs.readableStreamDefaultReaderRead(this, true);
+ }
+
+ releaseLock(): void {
+ if (!rs.isReadableStreamDefaultReader(this)) {
+ throw new TypeError();
+ }
+ if (this[rs.ownerReadableStream_] === undefined) {
+ return;
+ }
+ if (this[rs.readRequests_].length !== 0) {
+ throw new TypeError("Cannot release a stream with pending read requests");
+ }
+ rs.readableStreamReaderGenericRelease(this);
+ }
+}
diff --git a/cli/js/web/streams/readable-stream.ts b/cli/js/web/streams/readable-stream.ts
new file mode 100644
index 000000000..4d9d85889
--- /dev/null
+++ b/cli/js/web/streams/readable-stream.ts
@@ -0,0 +1,391 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/readable-stream - ReadableStream class implementation
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+/* eslint prefer-const: "off" */
+// TODO remove this, surpressed because of
+// 284:7 error 'branch1' is never reassigned. Use 'const' instead prefer-const
+
+import * as rs from "./readable-internals.ts";
+import * as shared from "./shared-internals.ts";
+import {
+ QueuingStrategy,
+ QueuingStrategySizeCallback,
+ UnderlyingSource,
+ UnderlyingByteSource
+} from "../dom_types.ts";
+
+import {
+ ReadableStreamDefaultController,
+ setUpReadableStreamDefaultControllerFromUnderlyingSource
+} from "./readable-stream-default-controller.ts";
+import { ReadableStreamDefaultReader } from "./readable-stream-default-reader.ts";
+
+import {
+ ReadableByteStreamController,
+ setUpReadableByteStreamControllerFromUnderlyingSource
+} from "./readable-byte-stream-controller.ts";
+import { SDReadableStreamBYOBReader } from "./readable-stream-byob-reader.ts";
+
+export class SDReadableStream<OutputType>
+ implements rs.SDReadableStream<OutputType> {
+ [shared.state_]: rs.ReadableStreamState;
+ [shared.storedError_]: shared.ErrorResult;
+ [rs.reader_]: rs.SDReadableStreamReader<OutputType> | undefined;
+ [rs.readableStreamController_]: rs.SDReadableStreamControllerBase<OutputType>;
+
+ constructor(
+ underlyingSource: UnderlyingByteSource,
+ strategy?: { highWaterMark?: number; size?: undefined }
+ );
+ constructor(
+ underlyingSource?: UnderlyingSource<OutputType>,
+ strategy?: QueuingStrategy<OutputType>
+ );
+ constructor(
+ underlyingSource: UnderlyingSource<OutputType> | UnderlyingByteSource = {},
+ strategy:
+ | QueuingStrategy<OutputType>
+ | { highWaterMark?: number; size?: undefined } = {}
+ ) {
+ rs.initializeReadableStream(this);
+
+ const sizeFunc = strategy.size;
+ const stratHWM = strategy.highWaterMark;
+ const sourceType = underlyingSource.type;
+
+ if (sourceType === undefined) {
+ const sizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction(sizeFunc);
+ const highWaterMark = shared.validateAndNormalizeHighWaterMark(
+ stratHWM === undefined ? 1 : stratHWM
+ );
+ setUpReadableStreamDefaultControllerFromUnderlyingSource(
+ this,
+ underlyingSource as UnderlyingSource<OutputType>,
+ highWaterMark,
+ sizeAlgorithm
+ );
+ } else if (String(sourceType) === "bytes") {
+ if (sizeFunc !== undefined) {
+ throw new RangeError(
+ "bytes streams cannot have a strategy with a `size` field"
+ );
+ }
+ const highWaterMark = shared.validateAndNormalizeHighWaterMark(
+ stratHWM === undefined ? 0 : stratHWM
+ );
+ setUpReadableByteStreamControllerFromUnderlyingSource(
+ (this as unknown) as rs.SDReadableStream<ArrayBufferView>,
+ underlyingSource as UnderlyingByteSource,
+ highWaterMark
+ );
+ } else {
+ throw new RangeError(
+ "The underlying source's `type` field must be undefined or 'bytes'"
+ );
+ }
+ }
+
+ get locked(): boolean {
+ return rs.isReadableStreamLocked(this);
+ }
+
+ getReader(): rs.SDReadableStreamDefaultReader<OutputType>;
+ getReader(options: { mode?: "byob" }): rs.SDReadableStreamBYOBReader;
+ getReader(options?: {
+ mode?: "byob";
+ }):
+ | rs.SDReadableStreamDefaultReader<OutputType>
+ | rs.SDReadableStreamBYOBReader {
+ if (!rs.isReadableStream(this)) {
+ throw new TypeError();
+ }
+ if (options === undefined) {
+ options = {};
+ }
+ const { mode } = options;
+ if (mode === undefined) {
+ return new ReadableStreamDefaultReader(this);
+ } else if (String(mode) === "byob") {
+ return new SDReadableStreamBYOBReader(
+ (this as unknown) as rs.SDReadableStream<ArrayBufferView>
+ );
+ }
+ throw RangeError("mode option must be undefined or `byob`");
+ }
+
+ cancel(reason: shared.ErrorResult): Promise<void> {
+ if (!rs.isReadableStream(this)) {
+ return Promise.reject(new TypeError());
+ }
+ if (rs.isReadableStreamLocked(this)) {
+ return Promise.reject(new TypeError("Cannot cancel a locked stream"));
+ }
+ return rs.readableStreamCancel(this, reason);
+ }
+
+ tee(): Array<SDReadableStream<OutputType>> {
+ return readableStreamTee(this, false);
+ }
+
+ /* TODO reenable these methods when we bring in writableStreams and transport types
+ pipeThrough<ResultType>(
+ transform: rs.GenericTransformStream<OutputType, ResultType>,
+ options: PipeOptions = {}
+ ): rs.SDReadableStream<ResultType> {
+ const { readable, writable } = transform;
+ if (!rs.isReadableStream(this)) {
+ throw new TypeError();
+ }
+ if (!ws.isWritableStream(writable)) {
+ throw new TypeError("writable must be a WritableStream");
+ }
+ if (!rs.isReadableStream(readable)) {
+ throw new TypeError("readable must be a ReadableStream");
+ }
+ if (options.signal !== undefined && !shared.isAbortSignal(options.signal)) {
+ throw new TypeError("options.signal must be an AbortSignal instance");
+ }
+ if (rs.isReadableStreamLocked(this)) {
+ throw new TypeError("Cannot pipeThrough on a locked stream");
+ }
+ if (ws.isWritableStreamLocked(writable)) {
+ throw new TypeError("Cannot pipeThrough to a locked stream");
+ }
+
+ const pipeResult = pipeTo(this, writable, options);
+ pipeResult.catch(() => {});
+
+ return readable;
+ }
+
+ pipeTo(
+ dest: ws.WritableStream<OutputType>,
+ options: PipeOptions = {}
+ ): Promise<void> {
+ if (!rs.isReadableStream(this)) {
+ return Promise.reject(new TypeError());
+ }
+ if (!ws.isWritableStream(dest)) {
+ return Promise.reject(
+ new TypeError("destination must be a WritableStream")
+ );
+ }
+ if (options.signal !== undefined && !shared.isAbortSignal(options.signal)) {
+ return Promise.reject(
+ new TypeError("options.signal must be an AbortSignal instance")
+ );
+ }
+ if (rs.isReadableStreamLocked(this)) {
+ return Promise.reject(new TypeError("Cannot pipe from a locked stream"));
+ }
+ if (ws.isWritableStreamLocked(dest)) {
+ return Promise.reject(new TypeError("Cannot pipe to a locked stream"));
+ }
+
+ return pipeTo(this, dest, options);
+ }
+ */
+}
+
+export function createReadableStream<OutputType>(
+ startAlgorithm: rs.StartAlgorithm,
+ pullAlgorithm: rs.PullAlgorithm<OutputType>,
+ cancelAlgorithm: rs.CancelAlgorithm,
+ highWaterMark?: number,
+ sizeAlgorithm?: QueuingStrategySizeCallback<OutputType>
+): SDReadableStream<OutputType> {
+ if (highWaterMark === undefined) {
+ highWaterMark = 1;
+ }
+ if (sizeAlgorithm === undefined) {
+ sizeAlgorithm = (): number => 1;
+ }
+ // Assert: ! IsNonNegativeNumber(highWaterMark) is true.
+
+ const stream = Object.create(SDReadableStream.prototype) as SDReadableStream<
+ OutputType
+ >;
+ rs.initializeReadableStream(stream);
+ const controller = Object.create(
+ ReadableStreamDefaultController.prototype
+ ) as ReadableStreamDefaultController<OutputType>;
+ rs.setUpReadableStreamDefaultController(
+ stream,
+ controller,
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark,
+ sizeAlgorithm
+ );
+ return stream;
+}
+
+export function createReadableByteStream<OutputType>(
+ startAlgorithm: rs.StartAlgorithm,
+ pullAlgorithm: rs.PullAlgorithm<OutputType>,
+ cancelAlgorithm: rs.CancelAlgorithm,
+ highWaterMark?: number,
+ autoAllocateChunkSize?: number
+): SDReadableStream<OutputType> {
+ if (highWaterMark === undefined) {
+ highWaterMark = 0;
+ }
+ // Assert: ! IsNonNegativeNumber(highWaterMark) is true.
+ if (autoAllocateChunkSize !== undefined) {
+ if (
+ !shared.isInteger(autoAllocateChunkSize) ||
+ autoAllocateChunkSize <= 0
+ ) {
+ throw new RangeError(
+ "autoAllocateChunkSize must be a positive, finite integer"
+ );
+ }
+ }
+
+ const stream = Object.create(SDReadableStream.prototype) as SDReadableStream<
+ OutputType
+ >;
+ rs.initializeReadableStream(stream);
+ const controller = Object.create(
+ ReadableByteStreamController.prototype
+ ) as ReadableByteStreamController;
+ rs.setUpReadableByteStreamController(
+ (stream as unknown) as SDReadableStream<ArrayBufferView>,
+ controller,
+ startAlgorithm,
+ (pullAlgorithm as unknown) as rs.PullAlgorithm<ArrayBufferView>,
+ cancelAlgorithm,
+ highWaterMark,
+ autoAllocateChunkSize
+ );
+ return stream;
+}
+
+export function readableStreamTee<OutputType>(
+ stream: SDReadableStream<OutputType>,
+ cloneForBranch2: boolean
+): [SDReadableStream<OutputType>, SDReadableStream<OutputType>] {
+ if (!rs.isReadableStream(stream)) {
+ throw new TypeError();
+ }
+
+ const reader = new ReadableStreamDefaultReader(stream);
+ let closedOrErrored = false;
+ let canceled1 = false;
+ let canceled2 = false;
+ let reason1: shared.ErrorResult;
+ let reason2: shared.ErrorResult;
+ let branch1: SDReadableStream<OutputType>;
+ let branch2: SDReadableStream<OutputType>;
+
+ let cancelResolve: (reason: shared.ErrorResult) => void;
+ const cancelPromise = new Promise<void>(resolve => (cancelResolve = resolve));
+
+ const pullAlgorithm = (): Promise<void> => {
+ return rs
+ .readableStreamDefaultReaderRead(reader)
+ .then(({ value, done }) => {
+ if (done && !closedOrErrored) {
+ if (!canceled1) {
+ rs.readableStreamDefaultControllerClose(
+ branch1![
+ rs.readableStreamController_
+ ] as ReadableStreamDefaultController<OutputType>
+ );
+ }
+ if (!canceled2) {
+ rs.readableStreamDefaultControllerClose(
+ branch2![
+ rs.readableStreamController_
+ ] as ReadableStreamDefaultController<OutputType>
+ );
+ }
+ closedOrErrored = true;
+ }
+ if (closedOrErrored) {
+ return;
+ }
+ const value1 = value;
+ let value2 = value;
+ if (!canceled1) {
+ rs.readableStreamDefaultControllerEnqueue(
+ branch1![
+ rs.readableStreamController_
+ ] as ReadableStreamDefaultController<OutputType>,
+ value1!
+ );
+ }
+ if (!canceled2) {
+ if (cloneForBranch2) {
+ value2 = shared.cloneValue(value2);
+ }
+ rs.readableStreamDefaultControllerEnqueue(
+ branch2![
+ rs.readableStreamController_
+ ] as ReadableStreamDefaultController<OutputType>,
+ value2!
+ );
+ }
+ });
+ };
+
+ const cancel1Algorithm = (reason: shared.ErrorResult): Promise<void> => {
+ canceled1 = true;
+ reason1 = reason;
+ if (canceled2) {
+ const cancelResult = rs.readableStreamCancel(stream, [reason1, reason2]);
+ cancelResolve(cancelResult);
+ }
+ return cancelPromise;
+ };
+
+ const cancel2Algorithm = (reason: shared.ErrorResult): Promise<void> => {
+ canceled2 = true;
+ reason2 = reason;
+ if (canceled1) {
+ const cancelResult = rs.readableStreamCancel(stream, [reason1, reason2]);
+ cancelResolve(cancelResult);
+ }
+ return cancelPromise;
+ };
+
+ const startAlgorithm = (): undefined => undefined;
+ branch1 = createReadableStream(
+ startAlgorithm,
+ pullAlgorithm,
+ cancel1Algorithm
+ );
+ branch2 = createReadableStream(
+ startAlgorithm,
+ pullAlgorithm,
+ cancel2Algorithm
+ );
+
+ reader[rs.closedPromise_].promise.catch(error => {
+ if (!closedOrErrored) {
+ rs.readableStreamDefaultControllerError(
+ branch1![
+ rs.readableStreamController_
+ ] as ReadableStreamDefaultController<OutputType>,
+ error
+ );
+ rs.readableStreamDefaultControllerError(
+ branch2![
+ rs.readableStreamController_
+ ] as ReadableStreamDefaultController<OutputType>,
+ error
+ );
+ closedOrErrored = true;
+ }
+ });
+
+ return [branch1, branch2];
+}
diff --git a/cli/js/web/streams/shared-internals.ts b/cli/js/web/streams/shared-internals.ts
new file mode 100644
index 000000000..93155fecc
--- /dev/null
+++ b/cli/js/web/streams/shared-internals.ts
@@ -0,0 +1,306 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/shared-internals - common types and methods for streams
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+// TODO don't disable this warning
+
+import { AbortSignal, QueuingStrategySizeCallback } from "../dom_types.ts";
+
+// common stream fields
+
+export const state_ = Symbol("state_");
+export const storedError_ = Symbol("storedError_");
+
+// ---------
+
+/** An error reason / result can be anything */
+export type ErrorResult = any;
+
+// ---------
+
+export function isInteger(value: number): boolean {
+ if (!isFinite(value)) {
+ // covers NaN, +Infinity and -Infinity
+ return false;
+ }
+ const absValue = Math.abs(value);
+ return Math.floor(absValue) === absValue;
+}
+
+export function isFiniteNonNegativeNumber(value: unknown): boolean {
+ if (!(typeof value === "number" && isFinite(value))) {
+ // covers NaN, +Infinity and -Infinity
+ return false;
+ }
+ return value >= 0;
+}
+
+export function isAbortSignal(signal: any): signal is AbortSignal {
+ if (typeof signal !== "object" || signal === null) {
+ return false;
+ }
+ try {
+ // TODO
+ // calling signal.aborted() probably isn't the right way to perform this test
+ // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/shared-internals.ts#L41
+ signal.aborted();
+ return true;
+ } catch (err) {
+ return false;
+ }
+}
+
+export function invokeOrNoop<O extends object, P extends keyof O>(
+ o: O,
+ p: P,
+ args: any[]
+): any {
+ // Assert: O is not undefined.
+ // Assert: IsPropertyKey(P) is true.
+ // Assert: args is a List.
+ const method: Function | undefined = (o as any)[p]; // tslint:disable-line:ban-types
+ if (method === undefined) {
+ return undefined;
+ }
+ return Function.prototype.apply.call(method, o, args);
+}
+
+export function cloneArrayBuffer(
+ srcBuffer: ArrayBufferLike,
+ srcByteOffset: number,
+ srcLength: number,
+ cloneConstructor: ArrayBufferConstructor | SharedArrayBufferConstructor
+): InstanceType<typeof cloneConstructor> {
+ // this function fudges the return type but SharedArrayBuffer is disabled for a while anyway
+ return srcBuffer.slice(
+ srcByteOffset,
+ srcByteOffset + srcLength
+ ) as InstanceType<typeof cloneConstructor>;
+}
+
+export function transferArrayBuffer(buffer: ArrayBufferLike): ArrayBuffer {
+ // This would in a JS engine context detach the buffer's backing store and return
+ // a new ArrayBuffer with the same backing store, invalidating `buffer`,
+ // i.e. a move operation in C++ parlance.
+ // Sadly ArrayBuffer.transfer is yet to be implemented by a single browser vendor.
+ return buffer.slice(0); // copies instead of moves
+}
+
+export function copyDataBlockBytes(
+ toBlock: ArrayBufferLike,
+ toIndex: number,
+ fromBlock: ArrayBufferLike,
+ fromIndex: number,
+ count: number
+): void {
+ new Uint8Array(toBlock, toIndex, count).set(
+ new Uint8Array(fromBlock, fromIndex, count)
+ );
+}
+
+// helper memoisation map for object values
+// weak so it doesn't keep memoized versions of old objects indefinitely.
+const objectCloneMemo = new WeakMap<object, object>();
+
+let sharedArrayBufferSupported_: boolean | undefined;
+function supportsSharedArrayBuffer(): boolean {
+ if (sharedArrayBufferSupported_ === undefined) {
+ try {
+ new SharedArrayBuffer(16);
+ sharedArrayBufferSupported_ = true;
+ } catch (e) {
+ sharedArrayBufferSupported_ = false;
+ }
+ }
+ return sharedArrayBufferSupported_;
+}
+
+/**
+ * Implement a method of value cloning that is reasonably close to performing `StructuredSerialize(StructuredDeserialize(value))`
+ * from the HTML standard. Used by the internal `readableStreamTee` method to clone values for connected implementations.
+ * @see https://html.spec.whatwg.org/multipage/structured-data.html#structuredserializeinternal
+ */
+export function cloneValue(value: any): any {
+ const valueType = typeof value;
+ switch (valueType) {
+ case "number":
+ case "string":
+ case "boolean":
+ case "undefined":
+ // @ts-ignore
+ case "bigint":
+ return value;
+ case "object": {
+ if (objectCloneMemo.has(value)) {
+ return objectCloneMemo.get(value);
+ }
+ if (value === null) {
+ return value;
+ }
+ if (value instanceof Date) {
+ return new Date(value.valueOf());
+ }
+ if (value instanceof RegExp) {
+ return new RegExp(value);
+ }
+ if (supportsSharedArrayBuffer() && value instanceof SharedArrayBuffer) {
+ return value;
+ }
+ if (value instanceof ArrayBuffer) {
+ const cloned = cloneArrayBuffer(
+ value,
+ 0,
+ value.byteLength,
+ ArrayBuffer
+ );
+ objectCloneMemo.set(value, cloned);
+ return cloned;
+ }
+ if (ArrayBuffer.isView(value)) {
+ const clonedBuffer = cloneValue(value.buffer) as ArrayBufferLike;
+ // Use DataViewConstructor type purely for type-checking, can be a DataView or TypedArray.
+ // They use the same constructor signature, only DataView has a length in bytes and TypedArrays
+ // use a length in terms of elements, so we adjust for that.
+ let length: number;
+ if (value instanceof DataView) {
+ length = value.byteLength;
+ } else {
+ length = (value as Uint8Array).length;
+ }
+ return new (value.constructor as DataViewConstructor)(
+ clonedBuffer,
+ value.byteOffset,
+ length
+ );
+ }
+ if (value instanceof Map) {
+ const clonedMap = new Map();
+ objectCloneMemo.set(value, clonedMap);
+ value.forEach((v, k) => clonedMap.set(k, cloneValue(v)));
+ return clonedMap;
+ }
+ if (value instanceof Set) {
+ const clonedSet = new Map();
+ objectCloneMemo.set(value, clonedSet);
+ value.forEach((v, k) => clonedSet.set(k, cloneValue(v)));
+ return clonedSet;
+ }
+
+ // generic object
+ const clonedObj = {} as any;
+ objectCloneMemo.set(value, clonedObj);
+ const sourceKeys = Object.getOwnPropertyNames(value);
+ for (const key of sourceKeys) {
+ clonedObj[key] = cloneValue(value[key]);
+ }
+ return clonedObj;
+ }
+ case "symbol":
+ case "function":
+ default:
+ // TODO this should be a DOMException,
+ // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/shared-internals.ts#L171
+ throw new Error("Uncloneable value in stream");
+ }
+}
+
+export function promiseCall<F extends Function>(
+ f: F,
+ v: object | undefined,
+ args: any[]
+): Promise<any> {
+ // tslint:disable-line:ban-types
+ try {
+ const result = Function.prototype.apply.call(f, v, args);
+ return Promise.resolve(result);
+ } catch (err) {
+ return Promise.reject(err);
+ }
+}
+
+export function createAlgorithmFromUnderlyingMethod<
+ O extends object,
+ K extends keyof O
+>(obj: O, methodName: K, extraArgs: any[]): any {
+ const method = obj[methodName];
+ if (method === undefined) {
+ return (): any => Promise.resolve(undefined);
+ }
+ if (typeof method !== "function") {
+ throw new TypeError(`Field "${methodName}" is not a function.`);
+ }
+ return function(...fnArgs: any[]): any {
+ return promiseCall(method, obj, fnArgs.concat(extraArgs));
+ };
+}
+
+/*
+Deprecated for now, all usages replaced by readableStreamCreateReadResult
+
+function createIterResultObject<T>(value: T, done: boolean): IteratorResult<T> {
+ return { value, done };
+}
+*/
+
+export function validateAndNormalizeHighWaterMark(hwm: unknown): number {
+ const highWaterMark = Number(hwm);
+ if (isNaN(highWaterMark) || highWaterMark < 0) {
+ throw new RangeError(
+ "highWaterMark must be a valid, non-negative integer."
+ );
+ }
+ return highWaterMark;
+}
+
+export function makeSizeAlgorithmFromSizeFunction<T>(
+ sizeFn: undefined | ((chunk: T) => number)
+): QueuingStrategySizeCallback<T> {
+ if (typeof sizeFn !== "function" && typeof sizeFn !== "undefined") {
+ throw new TypeError("size function must be undefined or a function");
+ }
+ return function(chunk: T): number {
+ if (typeof sizeFn === "function") {
+ return sizeFn(chunk);
+ }
+ return 1;
+ };
+}
+
+// ----
+
+export const enum ControlledPromiseState {
+ Pending,
+ Resolved,
+ Rejected
+}
+
+export interface ControlledPromise<V> {
+ resolve(value?: V): void;
+ reject(error: ErrorResult): void;
+ promise: Promise<V>;
+ state: ControlledPromiseState;
+}
+
+export function createControlledPromise<V>(): ControlledPromise<V> {
+ const conProm = {
+ state: ControlledPromiseState.Pending
+ } as ControlledPromise<V>;
+ conProm.promise = new Promise<V>(function(resolve, reject) {
+ conProm.resolve = function(v?: V): void {
+ conProm.state = ControlledPromiseState.Resolved;
+ resolve(v);
+ };
+ conProm.reject = function(e?: ErrorResult): void {
+ conProm.state = ControlledPromiseState.Rejected;
+ reject(e);
+ };
+ });
+ return conProm;
+}
diff --git a/cli/js/web/streams/strategies.ts b/cli/js/web/streams/strategies.ts
new file mode 100644
index 000000000..5f7ffc632
--- /dev/null
+++ b/cli/js/web/streams/strategies.ts
@@ -0,0 +1,39 @@
+// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+/**
+ * streams/strategies - implementation of the built-in stream strategies
+ * Part of Stardazed
+ * (c) 2018-Present by Arthur Langereis - @zenmumbler
+ * https://github.com/stardazed/sd-streams
+ */
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+// TODO reenable this lint here
+
+import { QueuingStrategy } from "../dom_types.ts";
+
+export class ByteLengthQueuingStrategy
+ implements QueuingStrategy<ArrayBufferView> {
+ highWaterMark: number;
+
+ constructor(options: { highWaterMark: number }) {
+ this.highWaterMark = options.highWaterMark;
+ }
+
+ size(chunk: ArrayBufferView): number {
+ return chunk.byteLength;
+ }
+}
+
+export class CountQueuingStrategy implements QueuingStrategy<any> {
+ highWaterMark: number;
+
+ constructor(options: { highWaterMark: number }) {
+ this.highWaterMark = options.highWaterMark;
+ }
+
+ size(): number {
+ return 1;
+ }
+}
diff --git a/cli/js/web/streams/transform-internals.ts b/cli/js/web/streams/transform-internals.ts
new file mode 100644
index 000000000..4c5e3657d
--- /dev/null
+++ b/cli/js/web/streams/transform-internals.ts
@@ -0,0 +1,371 @@
+// TODO reenable this code when we enable writableStreams and transport types
+// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+// /**
+// * streams/transform-internals - internal types and functions for transform streams
+// * Part of Stardazed
+// * (c) 2018-Present by Arthur Langereis - @zenmumbler
+// * https://github.com/stardazed/sd-streams
+// */
+
+// /* eslint-disable @typescript-eslint/no-explicit-any */
+// // TODO reenable this lint here
+
+// import * as rs from "./readable-internals.ts";
+// import * as ws from "./writable-internals.ts";
+// import * as shared from "./shared-internals.ts";
+
+// import { createReadableStream } from "./readable-stream.ts";
+// import { createWritableStream } from "./writable-stream.ts";
+
+// import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts";
+
+// export const state_ = Symbol("transformState_");
+// export const backpressure_ = Symbol("backpressure_");
+// export const backpressureChangePromise_ = Symbol("backpressureChangePromise_");
+// export const readable_ = Symbol("readable_");
+// export const transformStreamController_ = Symbol("transformStreamController_");
+// export const writable_ = Symbol("writable_");
+
+// export const controlledTransformStream_ = Symbol("controlledTransformStream_");
+// export const flushAlgorithm_ = Symbol("flushAlgorithm_");
+// export const transformAlgorithm_ = Symbol("transformAlgorithm_");
+
+// // ----
+
+// export type TransformFunction<InputType, OutputType> = (
+// chunk: InputType,
+// controller: TransformStreamDefaultController<InputType, OutputType>
+// ) => void | PromiseLike<void>;
+// export type TransformAlgorithm<InputType> = (chunk: InputType) => Promise<void>;
+// export type FlushFunction<InputType, OutputType> = (
+// controller: TransformStreamDefaultController<InputType, OutputType>
+// ) => void | PromiseLike<void>;
+// export type FlushAlgorithm = () => Promise<void>;
+
+// // ----
+
+// export interface TransformStreamDefaultController<InputType, OutputType> {
+// readonly desiredSize: number | null;
+// enqueue(chunk: OutputType): void;
+// error(reason: shared.ErrorResult): void;
+// terminate(): void;
+
+// [controlledTransformStream_]: TransformStream<InputType, OutputType>; // The TransformStream instance controlled; also used for the IsTransformStreamDefaultController brand check
+// [flushAlgorithm_]: FlushAlgorithm; // A promise - returning algorithm which communicates a requested close to the transformer
+// [transformAlgorithm_]: TransformAlgorithm<InputType>; // A promise - returning algorithm, taking one argument(the chunk to transform), which requests the transformer perform its transformation
+// }
+
+// export interface Transformer<InputType, OutputType> {
+// start?(
+// controller: TransformStreamDefaultController<InputType, OutputType>
+// ): void | PromiseLike<void>;
+// transform?: TransformFunction<InputType, OutputType>;
+// flush?: FlushFunction<InputType, OutputType>;
+
+// readableType?: undefined; // for future spec changes
+// writableType?: undefined; // for future spec changes
+// }
+
+// export declare class TransformStream<InputType, OutputType> {
+// constructor(
+// transformer: Transformer<InputType, OutputType>,
+// writableStrategy: QueuingStrategy<InputType>,
+// readableStrategy: QueuingStrategy<OutputType>
+// );
+
+// readonly readable: rs.SDReadableStream<OutputType>;
+// readonly writable: ws.WritableStream<InputType>;
+
+// [backpressure_]: boolean | undefined; // Whether there was backpressure on [[readable]] the last time it was observed
+// [backpressureChangePromise_]: shared.ControlledPromise<void> | undefined; // A promise which is fulfilled and replaced every time the value of[[backpressure]] changes
+// [readable_]: rs.SDReadableStream<OutputType>; // The ReadableStream instance controlled by this object
+// [transformStreamController_]: TransformStreamDefaultController<
+// InputType,
+// OutputType
+// >; // A TransformStreamDefaultController created with the ability to control[[readable]] and[[writable]]; also used for the IsTransformStream brand check
+// [writable_]: ws.WritableStream<InputType>; // The WritableStream instance controlled by this object
+// }
+
+// // ---- TransformStream
+
+// export function isTransformStream(
+// value: unknown
+// ): value is TransformStream<any, any> {
+// if (typeof value !== "object" || value === null) {
+// return false;
+// }
+// return transformStreamController_ in value;
+// }
+
+// export function initializeTransformStream<InputType, OutputType>(
+// stream: TransformStream<InputType, OutputType>,
+// startPromise: Promise<void>,
+// writableHighWaterMark: number,
+// writableSizeAlgorithm: QueuingStrategySizeCallback<InputType>,
+// readableHighWaterMark: number,
+// readableSizeAlgorithm: QueuingStrategySizeCallback<OutputType>
+// ): void {
+// const startAlgorithm = function(): Promise<void> {
+// return startPromise;
+// };
+// const writeAlgorithm = function(chunk: InputType): Promise<void> {
+// return transformStreamDefaultSinkWriteAlgorithm(stream, chunk);
+// };
+// const abortAlgorithm = function(reason: shared.ErrorResult): Promise<void> {
+// return transformStreamDefaultSinkAbortAlgorithm(stream, reason);
+// };
+// const closeAlgorithm = function(): Promise<void> {
+// return transformStreamDefaultSinkCloseAlgorithm(stream);
+// };
+// stream[writable_] = createWritableStream<InputType>(
+// startAlgorithm,
+// writeAlgorithm,
+// closeAlgorithm,
+// abortAlgorithm,
+// writableHighWaterMark,
+// writableSizeAlgorithm
+// );
+
+// const pullAlgorithm = function(): Promise<void> {
+// return transformStreamDefaultSourcePullAlgorithm(stream);
+// };
+// const cancelAlgorithm = function(
+// reason: shared.ErrorResult
+// ): Promise<undefined> {
+// transformStreamErrorWritableAndUnblockWrite(stream, reason);
+// return Promise.resolve(undefined);
+// };
+// stream[readable_] = createReadableStream(
+// startAlgorithm,
+// pullAlgorithm,
+// cancelAlgorithm,
+// readableHighWaterMark,
+// readableSizeAlgorithm
+// );
+
+// stream[backpressure_] = undefined;
+// stream[backpressureChangePromise_] = undefined;
+// transformStreamSetBackpressure(stream, true);
+// stream[transformStreamController_] = undefined!; // initialize slot for brand-check
+// }
+
+// export function transformStreamError<InputType, OutputType>(
+// stream: TransformStream<InputType, OutputType>,
+// error: shared.ErrorResult
+// ): void {
+// rs.readableStreamDefaultControllerError(
+// stream[readable_][
+// rs.readableStreamController_
+// ] as rs.SDReadableStreamDefaultController<OutputType>,
+// error
+// );
+// transformStreamErrorWritableAndUnblockWrite(stream, error);
+// }
+
+// export function transformStreamErrorWritableAndUnblockWrite<
+// InputType,
+// OutputType
+// >(
+// stream: TransformStream<InputType, OutputType>,
+// error: shared.ErrorResult
+// ): void {
+// transformStreamDefaultControllerClearAlgorithms(
+// stream[transformStreamController_]
+// );
+// ws.writableStreamDefaultControllerErrorIfNeeded(
+// stream[writable_][ws.writableStreamController_]!,
+// error
+// );
+// if (stream[backpressure_]) {
+// transformStreamSetBackpressure(stream, false);
+// }
+// }
+
+// export function transformStreamSetBackpressure<InputType, OutputType>(
+// stream: TransformStream<InputType, OutputType>,
+// backpressure: boolean
+// ): void {
+// // Assert: stream.[[backpressure]] is not backpressure.
+// if (stream[backpressure_] !== undefined) {
+// stream[backpressureChangePromise_]!.resolve(undefined);
+// }
+// stream[backpressureChangePromise_] = shared.createControlledPromise<void>();
+// stream[backpressure_] = backpressure;
+// }
+
+// // ---- TransformStreamDefaultController
+
+// export function isTransformStreamDefaultController(
+// value: unknown
+// ): value is TransformStreamDefaultController<any, any> {
+// if (typeof value !== "object" || value === null) {
+// return false;
+// }
+// return controlledTransformStream_ in value;
+// }
+
+// export function setUpTransformStreamDefaultController<InputType, OutputType>(
+// stream: TransformStream<InputType, OutputType>,
+// controller: TransformStreamDefaultController<InputType, OutputType>,
+// transformAlgorithm: TransformAlgorithm<InputType>,
+// flushAlgorithm: FlushAlgorithm
+// ): void {
+// // Assert: ! IsTransformStream(stream) is true.
+// // Assert: stream.[[transformStreamController]] is undefined.
+// controller[controlledTransformStream_] = stream;
+// stream[transformStreamController_] = controller;
+// controller[transformAlgorithm_] = transformAlgorithm;
+// controller[flushAlgorithm_] = flushAlgorithm;
+// }
+
+// export function transformStreamDefaultControllerClearAlgorithms<
+// InputType,
+// OutputType
+// >(controller: TransformStreamDefaultController<InputType, OutputType>): void {
+// // Use ! assertions to override type check here, this way we don't
+// // have to perform type checks/assertions everywhere else.
+// controller[transformAlgorithm_] = undefined!;
+// controller[flushAlgorithm_] = undefined!;
+// }
+
+// export function transformStreamDefaultControllerEnqueue<InputType, OutputType>(
+// controller: TransformStreamDefaultController<InputType, OutputType>,
+// chunk: OutputType
+// ): void {
+// const stream = controller[controlledTransformStream_];
+// const readableController = stream[readable_][
+// rs.readableStreamController_
+// ] as rs.SDReadableStreamDefaultController<OutputType>;
+// if (
+// !rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController)
+// ) {
+// throw new TypeError();
+// }
+// try {
+// rs.readableStreamDefaultControllerEnqueue(readableController, chunk);
+// } catch (error) {
+// transformStreamErrorWritableAndUnblockWrite(stream, error);
+// throw stream[readable_][shared.storedError_];
+// }
+// const backpressure = rs.readableStreamDefaultControllerHasBackpressure(
+// readableController
+// );
+// if (backpressure !== stream[backpressure_]) {
+// // Assert: backpressure is true.
+// transformStreamSetBackpressure(stream, true);
+// }
+// }
+
+// export function transformStreamDefaultControllerError<InputType, OutputType>(
+// controller: TransformStreamDefaultController<InputType, OutputType>,
+// error: shared.ErrorResult
+// ): void {
+// transformStreamError(controller[controlledTransformStream_], error);
+// }
+
+// export function transformStreamDefaultControllerPerformTransform<
+// InputType,
+// OutputType
+// >(
+// controller: TransformStreamDefaultController<InputType, OutputType>,
+// chunk: InputType
+// ): Promise<void> {
+// const transformPromise = controller[transformAlgorithm_](chunk);
+// return transformPromise.catch(error => {
+// transformStreamError(controller[controlledTransformStream_], error);
+// throw error;
+// });
+// }
+
+// export function transformStreamDefaultControllerTerminate<
+// InputType,
+// OutputType
+// >(controller: TransformStreamDefaultController<InputType, OutputType>): void {
+// const stream = controller[controlledTransformStream_];
+// const readableController = stream[readable_][
+// rs.readableStreamController_
+// ] as rs.SDReadableStreamDefaultController<OutputType>;
+// if (rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) {
+// rs.readableStreamDefaultControllerClose(readableController);
+// }
+// const error = new TypeError("The transform stream has been terminated");
+// transformStreamErrorWritableAndUnblockWrite(stream, error);
+// }
+
+// // ---- Transform Sinks
+
+// export function transformStreamDefaultSinkWriteAlgorithm<InputType, OutputType>(
+// stream: TransformStream<InputType, OutputType>,
+// chunk: InputType
+// ): Promise<void> {
+// // Assert: stream.[[writable]].[[state]] is "writable".
+// const controller = stream[transformStreamController_];
+// if (stream[backpressure_]) {
+// const backpressureChangePromise = stream[backpressureChangePromise_]!;
+// // Assert: backpressureChangePromise is not undefined.
+// return backpressureChangePromise.promise.then(_ => {
+// const writable = stream[writable_];
+// const state = writable[shared.state_];
+// if (state === "erroring") {
+// throw writable[shared.storedError_];
+// }
+// // Assert: state is "writable".
+// return transformStreamDefaultControllerPerformTransform(
+// controller,
+// chunk
+// );
+// });
+// }
+// return transformStreamDefaultControllerPerformTransform(controller, chunk);
+// }
+
+// export function transformStreamDefaultSinkAbortAlgorithm<InputType, OutputType>(
+// stream: TransformStream<InputType, OutputType>,
+// reason: shared.ErrorResult
+// ): Promise<void> {
+// transformStreamError(stream, reason);
+// return Promise.resolve(undefined);
+// }
+
+// export function transformStreamDefaultSinkCloseAlgorithm<InputType, OutputType>(
+// stream: TransformStream<InputType, OutputType>
+// ): Promise<void> {
+// const readable = stream[readable_];
+// const controller = stream[transformStreamController_];
+// const flushPromise = controller[flushAlgorithm_]();
+// transformStreamDefaultControllerClearAlgorithms(controller);
+
+// return flushPromise.then(
+// _ => {
+// if (readable[shared.state_] === "errored") {
+// throw readable[shared.storedError_];
+// }
+// const readableController = readable[
+// rs.readableStreamController_
+// ] as rs.SDReadableStreamDefaultController<OutputType>;
+// if (
+// rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController)
+// ) {
+// rs.readableStreamDefaultControllerClose(readableController);
+// }
+// },
+// error => {
+// transformStreamError(stream, error);
+// throw readable[shared.storedError_];
+// }
+// );
+// }
+
+// // ---- Transform Sources
+
+// export function transformStreamDefaultSourcePullAlgorithm<
+// InputType,
+// OutputType
+// >(stream: TransformStream<InputType, OutputType>): Promise<void> {
+// // Assert: stream.[[backpressure]] is true.
+// // Assert: stream.[[backpressureChangePromise]] is not undefined.
+// transformStreamSetBackpressure(stream, false);
+// return stream[backpressureChangePromise_]!.promise;
+// }
diff --git a/cli/js/web/streams/transform-stream-default-controller.ts b/cli/js/web/streams/transform-stream-default-controller.ts
new file mode 100644
index 000000000..24a8d08fd
--- /dev/null
+++ b/cli/js/web/streams/transform-stream-default-controller.ts
@@ -0,0 +1,58 @@
+// TODO reenable this code when we enable writableStreams and transport types
+// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+// /**
+// * streams/transform-stream-default-controller - TransformStreamDefaultController class implementation
+// * Part of Stardazed
+// * (c) 2018-Present by Arthur Langereis - @zenmumbler
+// * https://github.com/stardazed/sd-streams
+// */
+
+// import * as rs from "./readable-internals.ts";
+// import * as ts from "./transform-internals.ts";
+// import { ErrorResult } from "./shared-internals.ts";
+
+// export class TransformStreamDefaultController<InputType, OutputType>
+// implements ts.TransformStreamDefaultController<InputType, OutputType> {
+// [ts.controlledTransformStream_]: ts.TransformStream<InputType, OutputType>;
+// [ts.flushAlgorithm_]: ts.FlushAlgorithm;
+// [ts.transformAlgorithm_]: ts.TransformAlgorithm<InputType>;
+
+// constructor() {
+// throw new TypeError();
+// }
+
+// get desiredSize(): number | null {
+// if (!ts.isTransformStreamDefaultController(this)) {
+// throw new TypeError();
+// }
+// const readableController = this[ts.controlledTransformStream_][
+// ts.readable_
+// ][rs.readableStreamController_] as rs.SDReadableStreamDefaultController<
+// OutputType
+// >;
+// return rs.readableStreamDefaultControllerGetDesiredSize(readableController);
+// }
+
+// enqueue(chunk: OutputType): void {
+// if (!ts.isTransformStreamDefaultController(this)) {
+// throw new TypeError();
+// }
+// ts.transformStreamDefaultControllerEnqueue(this, chunk);
+// }
+
+// error(reason: ErrorResult): void {
+// if (!ts.isTransformStreamDefaultController(this)) {
+// throw new TypeError();
+// }
+// ts.transformStreamDefaultControllerError(this, reason);
+// }
+
+// terminate(): void {
+// if (!ts.isTransformStreamDefaultController(this)) {
+// throw new TypeError();
+// }
+// ts.transformStreamDefaultControllerTerminate(this);
+// }
+// }
diff --git a/cli/js/web/streams/transform-stream.ts b/cli/js/web/streams/transform-stream.ts
new file mode 100644
index 000000000..090f78135
--- /dev/null
+++ b/cli/js/web/streams/transform-stream.ts
@@ -0,0 +1,147 @@
+// TODO reenable this code when we enable writableStreams and transport types
+// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+// /**
+// * streams/transform-stream - TransformStream class implementation
+// * Part of Stardazed
+// * (c) 2018-Present by Arthur Langereis - @zenmumbler
+// * https://github.com/stardazed/sd-streams
+// */
+
+// /* eslint-disable @typescript-eslint/no-explicit-any */
+// // TODO reenable this lint here
+
+// import * as rs from "./readable-internals.ts";
+// import * as ws from "./writable-internals.ts";
+// import * as ts from "./transform-internals.ts";
+// import * as shared from "./shared-internals.ts";
+// import { TransformStreamDefaultController } from "./transform-stream-default-controller.ts";
+// import { QueuingStrategy } from "../dom_types.ts";
+
+// export class TransformStream<InputType, OutputType> {
+// [ts.backpressure_]: boolean | undefined; // Whether there was backpressure on [[readable]] the last time it was observed
+// [ts.backpressureChangePromise_]: shared.ControlledPromise<void>; // A promise which is fulfilled and replaced every time the value of[[backpressure]] changes
+// [ts.readable_]: rs.SDReadableStream<OutputType>; // The ReadableStream instance controlled by this object
+// [ts.transformStreamController_]: TransformStreamDefaultController<
+// InputType,
+// OutputType
+// >; // A TransformStreamDefaultController created with the ability to control[[readable]] and[[writable]]; also used for the IsTransformStream brand check
+// [ts.writable_]: ws.WritableStream<InputType>; // The WritableStream instance controlled by this object
+
+// constructor(
+// transformer: ts.Transformer<InputType, OutputType> = {},
+// writableStrategy: QueuingStrategy<InputType> = {},
+// readableStrategy: QueuingStrategy<OutputType> = {}
+// ) {
+// const writableSizeFunction = writableStrategy.size;
+// const writableHighWaterMark = writableStrategy.highWaterMark;
+// const readableSizeFunction = readableStrategy.size;
+// const readableHighWaterMark = readableStrategy.highWaterMark;
+
+// const writableType = transformer.writableType;
+// if (writableType !== undefined) {
+// throw new RangeError(
+// "The transformer's `writableType` field must be undefined"
+// );
+// }
+// const writableSizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction(
+// writableSizeFunction
+// );
+// const writableHWM = shared.validateAndNormalizeHighWaterMark(
+// writableHighWaterMark === undefined ? 1 : writableHighWaterMark
+// );
+
+// const readableType = transformer.readableType;
+// if (readableType !== undefined) {
+// throw new RangeError(
+// "The transformer's `readableType` field must be undefined"
+// );
+// }
+// const readableSizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction(
+// readableSizeFunction
+// );
+// const readableHWM = shared.validateAndNormalizeHighWaterMark(
+// readableHighWaterMark === undefined ? 0 : readableHighWaterMark
+// );
+
+// const startPromise = shared.createControlledPromise<void>();
+// ts.initializeTransformStream(
+// this,
+// startPromise.promise,
+// writableHWM,
+// writableSizeAlgorithm,
+// readableHWM,
+// readableSizeAlgorithm
+// );
+// setUpTransformStreamDefaultControllerFromTransformer(this, transformer);
+
+// const startResult = shared.invokeOrNoop(transformer, "start", [
+// this[ts.transformStreamController_]
+// ]);
+// startPromise.resolve(startResult);
+// }
+
+// get readable(): rs.SDReadableStream<OutputType> {
+// if (!ts.isTransformStream(this)) {
+// throw new TypeError();
+// }
+// return this[ts.readable_];
+// }
+
+// get writable(): ws.WritableStream<InputType> {
+// if (!ts.isTransformStream(this)) {
+// throw new TypeError();
+// }
+// return this[ts.writable_];
+// }
+// }
+
+// function setUpTransformStreamDefaultControllerFromTransformer<
+// InputType,
+// OutputType
+// >(
+// stream: TransformStream<InputType, OutputType>,
+// transformer: ts.Transformer<InputType, OutputType>
+// ): void {
+// const controller = Object.create(
+// TransformStreamDefaultController.prototype
+// ) as TransformStreamDefaultController<InputType, OutputType>;
+// let transformAlgorithm: ts.TransformAlgorithm<InputType>;
+
+// const transformMethod = transformer.transform;
+// if (transformMethod !== undefined) {
+// if (typeof transformMethod !== "function") {
+// throw new TypeError(
+// "`transform` field of the transformer must be a function"
+// );
+// }
+// transformAlgorithm = (chunk: InputType): Promise<any> =>
+// shared.promiseCall(transformMethod, transformer, [chunk, controller]);
+// } else {
+// // use identity transform
+// transformAlgorithm = function(chunk: InputType): Promise<void> {
+// try {
+// // OutputType and InputType are the same here
+// ts.transformStreamDefaultControllerEnqueue(
+// controller,
+// (chunk as unknown) as OutputType
+// );
+// } catch (error) {
+// return Promise.reject(error);
+// }
+// return Promise.resolve(undefined);
+// };
+// }
+// const flushAlgorithm = shared.createAlgorithmFromUnderlyingMethod(
+// transformer,
+// "flush",
+// [controller]
+// );
+// ts.setUpTransformStreamDefaultController(
+// stream,
+// controller,
+// transformAlgorithm,
+// flushAlgorithm
+// );
+// }
diff --git a/cli/js/web/streams/writable-internals.ts b/cli/js/web/streams/writable-internals.ts
new file mode 100644
index 000000000..78bb19a28
--- /dev/null
+++ b/cli/js/web/streams/writable-internals.ts
@@ -0,0 +1,800 @@
+// TODO reenable this code when we enable writableStreams and transport types
+// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+// /**
+// * streams/writable-internals - internal types and functions for writable streams
+// * Part of Stardazed
+// * (c) 2018-Present by Arthur Langereis - @zenmumbler
+// * https://github.com/stardazed/sd-streams
+// */
+
+// /* eslint-disable @typescript-eslint/no-explicit-any */
+// // TODO reenable this lint here
+
+// import * as shared from "./shared-internals.ts";
+// import * as q from "./queue-mixin.ts";
+
+// import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts";
+
+// export const backpressure_ = Symbol("backpressure_");
+// export const closeRequest_ = Symbol("closeRequest_");
+// export const inFlightWriteRequest_ = Symbol("inFlightWriteRequest_");
+// export const inFlightCloseRequest_ = Symbol("inFlightCloseRequest_");
+// export const pendingAbortRequest_ = Symbol("pendingAbortRequest_");
+// export const writableStreamController_ = Symbol("writableStreamController_");
+// export const writer_ = Symbol("writer_");
+// export const writeRequests_ = Symbol("writeRequests_");
+
+// export const abortAlgorithm_ = Symbol("abortAlgorithm_");
+// export const closeAlgorithm_ = Symbol("closeAlgorithm_");
+// export const controlledWritableStream_ = Symbol("controlledWritableStream_");
+// export const started_ = Symbol("started_");
+// export const strategyHWM_ = Symbol("strategyHWM_");
+// export const strategySizeAlgorithm_ = Symbol("strategySizeAlgorithm_");
+// export const writeAlgorithm_ = Symbol("writeAlgorithm_");
+
+// export const ownerWritableStream_ = Symbol("ownerWritableStream_");
+// export const closedPromise_ = Symbol("closedPromise_");
+// export const readyPromise_ = Symbol("readyPromise_");
+
+// export const errorSteps_ = Symbol("errorSteps_");
+// export const abortSteps_ = Symbol("abortSteps_");
+
+// export type StartFunction = (
+// controller: WritableStreamController
+// ) => void | PromiseLike<void>;
+// export type StartAlgorithm = () => Promise<void> | void;
+// export type WriteFunction<InputType> = (
+// chunk: InputType,
+// controller: WritableStreamController
+// ) => void | PromiseLike<void>;
+// export type WriteAlgorithm<InputType> = (chunk: InputType) => Promise<void>;
+// export type CloseAlgorithm = () => Promise<void>;
+// export type AbortAlgorithm = (reason?: shared.ErrorResult) => Promise<void>;
+
+// // ----
+
+// export interface WritableStreamController {
+// error(e?: shared.ErrorResult): void;
+
+// [errorSteps_](): void;
+// [abortSteps_](reason: shared.ErrorResult): Promise<void>;
+// }
+
+// export interface WriteRecord<InputType> {
+// chunk: InputType;
+// }
+
+// export interface WritableStreamDefaultController<InputType>
+// extends WritableStreamController,
+// q.QueueContainer<WriteRecord<InputType> | "close"> {
+// [abortAlgorithm_]: AbortAlgorithm; // A promise - returning algorithm, taking one argument(the abort reason), which communicates a requested abort to the underlying sink
+// [closeAlgorithm_]: CloseAlgorithm; // A promise - returning algorithm which communicates a requested close to the underlying sink
+// [controlledWritableStream_]: WritableStream<InputType>; // The WritableStream instance controlled
+// [started_]: boolean; // A boolean flag indicating whether the underlying sink has finished starting
+// [strategyHWM_]: number; // A number supplied by the creator of the stream as part of the stream’s queuing strategy, indicating the point at which the stream will apply backpressure to its underlying sink
+// [strategySizeAlgorithm_]: QueuingStrategySizeCallback<InputType>; // An algorithm to calculate the size of enqueued chunks, as part of the stream’s queuing strategy
+// [writeAlgorithm_]: WriteAlgorithm<InputType>; // A promise-returning algorithm, taking one argument (the chunk to write), which writes data to the underlying sink
+// }
+
+// // ----
+
+// export interface WritableStreamWriter<InputType> {
+// readonly closed: Promise<void>;
+// readonly desiredSize: number | null;
+// readonly ready: Promise<void>;
+
+// abort(reason: shared.ErrorResult): Promise<void>;
+// close(): Promise<void>;
+// releaseLock(): void;
+// write(chunk: InputType): Promise<void>;
+// }
+
+// export interface WritableStreamDefaultWriter<InputType>
+// extends WritableStreamWriter<InputType> {
+// [ownerWritableStream_]: WritableStream<InputType> | undefined;
+// [closedPromise_]: shared.ControlledPromise<void>;
+// [readyPromise_]: shared.ControlledPromise<void>;
+// }
+
+// // ----
+
+// export type WritableStreamState =
+// | "writable"
+// | "closed"
+// | "erroring"
+// | "errored";
+
+// export interface WritableStreamSink<InputType> {
+// start?: StartFunction;
+// write?: WriteFunction<InputType>;
+// close?(): void | PromiseLike<void>;
+// abort?(reason?: shared.ErrorResult): void;
+
+// type?: undefined; // unused, for future revisions
+// }
+
+// export interface AbortRequest {
+// reason: shared.ErrorResult;
+// wasAlreadyErroring: boolean;
+// promise: Promise<void>;
+// resolve(): void;
+// reject(error: shared.ErrorResult): void;
+// }
+
+// export declare class WritableStream<InputType> {
+// constructor(
+// underlyingSink?: WritableStreamSink<InputType>,
+// strategy?: QueuingStrategy<InputType>
+// );
+
+// readonly locked: boolean;
+// abort(reason?: shared.ErrorResult): Promise<void>;
+// getWriter(): WritableStreamWriter<InputType>;
+
+// [shared.state_]: WritableStreamState;
+// [backpressure_]: boolean;
+// [closeRequest_]: shared.ControlledPromise<void> | undefined;
+// [inFlightWriteRequest_]: shared.ControlledPromise<void> | undefined;
+// [inFlightCloseRequest_]: shared.ControlledPromise<void> | undefined;
+// [pendingAbortRequest_]: AbortRequest | undefined;
+// [shared.storedError_]: shared.ErrorResult;
+// [writableStreamController_]:
+// | WritableStreamDefaultController<InputType>
+// | undefined;
+// [writer_]: WritableStreamDefaultWriter<InputType> | undefined;
+// [writeRequests_]: Array<shared.ControlledPromise<void>>;
+// }
+
+// // ---- Stream
+
+// export function initializeWritableStream<InputType>(
+// stream: WritableStream<InputType>
+// ): void {
+// stream[shared.state_] = "writable";
+// stream[shared.storedError_] = undefined;
+// stream[writer_] = undefined;
+// stream[writableStreamController_] = undefined;
+// stream[inFlightWriteRequest_] = undefined;
+// stream[closeRequest_] = undefined;
+// stream[inFlightCloseRequest_] = undefined;
+// stream[pendingAbortRequest_] = undefined;
+// stream[writeRequests_] = [];
+// stream[backpressure_] = false;
+// }
+
+// export function isWritableStream(value: unknown): value is WritableStream<any> {
+// if (typeof value !== "object" || value === null) {
+// return false;
+// }
+// return writableStreamController_ in value;
+// }
+
+// export function isWritableStreamLocked<InputType>(
+// stream: WritableStream<InputType>
+// ): boolean {
+// return stream[writer_] !== undefined;
+// }
+
+// export function writableStreamAbort<InputType>(
+// stream: WritableStream<InputType>,
+// reason: shared.ErrorResult
+// ): Promise<void> {
+// const state = stream[shared.state_];
+// if (state === "closed" || state === "errored") {
+// return Promise.resolve(undefined);
+// }
+// let pending = stream[pendingAbortRequest_];
+// if (pending !== undefined) {
+// return pending.promise;
+// }
+// // Assert: state is "writable" or "erroring".
+// let wasAlreadyErroring = false;
+// if (state === "erroring") {
+// wasAlreadyErroring = true;
+// reason = undefined;
+// }
+
+// pending = {
+// reason,
+// wasAlreadyErroring
+// } as AbortRequest;
+// const promise = new Promise<void>((resolve, reject) => {
+// pending!.resolve = resolve;
+// pending!.reject = reject;
+// });
+// pending.promise = promise;
+// stream[pendingAbortRequest_] = pending;
+// if (!wasAlreadyErroring) {
+// writableStreamStartErroring(stream, reason);
+// }
+// return promise;
+// }
+
+// export function writableStreamAddWriteRequest<InputType>(
+// stream: WritableStream<InputType>
+// ): Promise<void> {
+// // Assert: !IsWritableStreamLocked(stream) is true.
+// // Assert: stream.[[state]] is "writable".
+// const writePromise = shared.createControlledPromise<void>();
+// stream[writeRequests_].push(writePromise);
+// return writePromise.promise;
+// }
+
+// export function writableStreamDealWithRejection<InputType>(
+// stream: WritableStream<InputType>,
+// error: shared.ErrorResult
+// ): void {
+// const state = stream[shared.state_];
+// if (state === "writable") {
+// writableStreamStartErroring(stream, error);
+// return;
+// }
+// // Assert: state is "erroring"
+// writableStreamFinishErroring(stream);
+// }
+
+// export function writableStreamStartErroring<InputType>(
+// stream: WritableStream<InputType>,
+// reason: shared.ErrorResult
+// ): void {
+// // Assert: stream.[[storedError]] is undefined.
+// // Assert: stream.[[state]] is "writable".
+// const controller = stream[writableStreamController_]!;
+// // Assert: controller is not undefined.
+// stream[shared.state_] = "erroring";
+// stream[shared.storedError_] = reason;
+// const writer = stream[writer_];
+// if (writer !== undefined) {
+// writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);
+// }
+// if (
+// !writableStreamHasOperationMarkedInFlight(stream) &&
+// controller[started_]
+// ) {
+// writableStreamFinishErroring(stream);
+// }
+// }
+
+// export function writableStreamFinishErroring<InputType>(
+// stream: WritableStream<InputType>
+// ): void {
+// // Assert: stream.[[state]] is "erroring".
+// // Assert: writableStreamHasOperationMarkedInFlight(stream) is false.
+// stream[shared.state_] = "errored";
+// const controller = stream[writableStreamController_]!;
+// controller[errorSteps_]();
+// const storedError = stream[shared.storedError_];
+// for (const writeRequest of stream[writeRequests_]) {
+// writeRequest.reject(storedError);
+// }
+// stream[writeRequests_] = [];
+
+// const abortRequest = stream[pendingAbortRequest_];
+// if (abortRequest === undefined) {
+// writableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+// return;
+// }
+// stream[pendingAbortRequest_] = undefined;
+// if (abortRequest.wasAlreadyErroring) {
+// abortRequest.reject(storedError);
+// writableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+// return;
+// }
+// const promise = controller[abortSteps_](abortRequest.reason);
+// promise.then(
+// _ => {
+// abortRequest.resolve();
+// writableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+// },
+// error => {
+// abortRequest.reject(error);
+// writableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+// }
+// );
+// }
+
+// export function writableStreamFinishInFlightWrite<InputType>(
+// stream: WritableStream<InputType>
+// ): void {
+// // Assert: stream.[[inFlightWriteRequest]] is not undefined.
+// stream[inFlightWriteRequest_]!.resolve(undefined);
+// stream[inFlightWriteRequest_] = undefined;
+// }
+
+// export function writableStreamFinishInFlightWriteWithError<InputType>(
+// stream: WritableStream<InputType>,
+// error: shared.ErrorResult
+// ): void {
+// // Assert: stream.[[inFlightWriteRequest]] is not undefined.
+// stream[inFlightWriteRequest_]!.reject(error);
+// stream[inFlightWriteRequest_] = undefined;
+// // Assert: stream.[[state]] is "writable" or "erroring".
+// writableStreamDealWithRejection(stream, error);
+// }
+
+// export function writableStreamFinishInFlightClose<InputType>(
+// stream: WritableStream<InputType>
+// ): void {
+// // Assert: stream.[[inFlightCloseRequest]] is not undefined.
+// stream[inFlightCloseRequest_]!.resolve(undefined);
+// stream[inFlightCloseRequest_] = undefined;
+// const state = stream[shared.state_];
+// // Assert: stream.[[state]] is "writable" or "erroring".
+// if (state === "erroring") {
+// stream[shared.storedError_] = undefined;
+// if (stream[pendingAbortRequest_] !== undefined) {
+// stream[pendingAbortRequest_]!.resolve();
+// stream[pendingAbortRequest_] = undefined;
+// }
+// }
+// stream[shared.state_] = "closed";
+// const writer = stream[writer_];
+// if (writer !== undefined) {
+// writer[closedPromise_].resolve(undefined);
+// }
+// // Assert: stream.[[pendingAbortRequest]] is undefined.
+// // Assert: stream.[[storedError]] is undefined.
+// }
+
+// export function writableStreamFinishInFlightCloseWithError<InputType>(
+// stream: WritableStream<InputType>,
+// error: shared.ErrorResult
+// ): void {
+// // Assert: stream.[[inFlightCloseRequest]] is not undefined.
+// stream[inFlightCloseRequest_]!.reject(error);
+// stream[inFlightCloseRequest_] = undefined;
+// // Assert: stream.[[state]] is "writable" or "erroring".
+// if (stream[pendingAbortRequest_] !== undefined) {
+// stream[pendingAbortRequest_]!.reject(error);
+// stream[pendingAbortRequest_] = undefined;
+// }
+// writableStreamDealWithRejection(stream, error);
+// }
+
+// export function writableStreamCloseQueuedOrInFlight<InputType>(
+// stream: WritableStream<InputType>
+// ): boolean {
+// return (
+// stream[closeRequest_] !== undefined ||
+// stream[inFlightCloseRequest_] !== undefined
+// );
+// }
+
+// export function writableStreamHasOperationMarkedInFlight<InputType>(
+// stream: WritableStream<InputType>
+// ): boolean {
+// return (
+// stream[inFlightWriteRequest_] !== undefined ||
+// stream[inFlightCloseRequest_] !== undefined
+// );
+// }
+
+// export function writableStreamMarkCloseRequestInFlight<InputType>(
+// stream: WritableStream<InputType>
+// ): void {
+// // Assert: stream.[[inFlightCloseRequest]] is undefined.
+// // Assert: stream.[[closeRequest]] is not undefined.
+// stream[inFlightCloseRequest_] = stream[closeRequest_];
+// stream[closeRequest_] = undefined;
+// }
+
+// export function writableStreamMarkFirstWriteRequestInFlight<InputType>(
+// stream: WritableStream<InputType>
+// ): void {
+// // Assert: stream.[[inFlightWriteRequest]] is undefined.
+// // Assert: stream.[[writeRequests]] is not empty.
+// const writeRequest = stream[writeRequests_].shift()!;
+// stream[inFlightWriteRequest_] = writeRequest;
+// }
+
+// export function writableStreamRejectCloseAndClosedPromiseIfNeeded<InputType>(
+// stream: WritableStream<InputType>
+// ): void {
+// // Assert: stream.[[state]] is "errored".
+// const closeRequest = stream[closeRequest_];
+// if (closeRequest !== undefined) {
+// // Assert: stream.[[inFlightCloseRequest]] is undefined.
+// closeRequest.reject(stream[shared.storedError_]);
+// stream[closeRequest_] = undefined;
+// }
+// const writer = stream[writer_];
+// if (writer !== undefined) {
+// writer[closedPromise_].reject(stream[shared.storedError_]);
+// writer[closedPromise_].promise.catch(() => {});
+// }
+// }
+
+// export function writableStreamUpdateBackpressure<InputType>(
+// stream: WritableStream<InputType>,
+// backpressure: boolean
+// ): void {
+// // Assert: stream.[[state]] is "writable".
+// // Assert: !WritableStreamCloseQueuedOrInFlight(stream) is false.
+// const writer = stream[writer_];
+// if (writer !== undefined && backpressure !== stream[backpressure_]) {
+// if (backpressure) {
+// writer[readyPromise_] = shared.createControlledPromise<void>();
+// } else {
+// writer[readyPromise_].resolve(undefined);
+// }
+// }
+// stream[backpressure_] = backpressure;
+// }
+
+// // ---- Writers
+
+// export function isWritableStreamDefaultWriter(
+// value: unknown
+// ): value is WritableStreamDefaultWriter<any> {
+// if (typeof value !== "object" || value === null) {
+// return false;
+// }
+// return ownerWritableStream_ in value;
+// }
+
+// export function writableStreamDefaultWriterAbort<InputType>(
+// writer: WritableStreamDefaultWriter<InputType>,
+// reason: shared.ErrorResult
+// ): Promise<void> {
+// const stream = writer[ownerWritableStream_]!;
+// // Assert: stream is not undefined.
+// return writableStreamAbort(stream, reason);
+// }
+
+// export function writableStreamDefaultWriterClose<InputType>(
+// writer: WritableStreamDefaultWriter<InputType>
+// ): Promise<void> {
+// const stream = writer[ownerWritableStream_]!;
+// // Assert: stream is not undefined.
+// const state = stream[shared.state_];
+// if (state === "closed" || state === "errored") {
+// return Promise.reject(
+// new TypeError("Writer stream is already closed or errored")
+// );
+// }
+// // Assert: state is "writable" or "erroring".
+// // Assert: writableStreamCloseQueuedOrInFlight(stream) is false.
+// const closePromise = shared.createControlledPromise<void>();
+// stream[closeRequest_] = closePromise;
+// if (stream[backpressure_] && state === "writable") {
+// writer[readyPromise_].resolve(undefined);
+// }
+// writableStreamDefaultControllerClose(stream[writableStreamController_]!);
+// return closePromise.promise;
+// }
+
+// export function writableStreamDefaultWriterCloseWithErrorPropagation<InputType>(
+// writer: WritableStreamDefaultWriter<InputType>
+// ): Promise<void> {
+// const stream = writer[ownerWritableStream_]!;
+// // Assert: stream is not undefined.
+// const state = stream[shared.state_];
+// if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") {
+// return Promise.resolve(undefined);
+// }
+// if (state === "errored") {
+// return Promise.reject(stream[shared.storedError_]);
+// }
+// // Assert: state is "writable" or "erroring".
+// return writableStreamDefaultWriterClose(writer);
+// }
+
+// export function writableStreamDefaultWriterEnsureClosedPromiseRejected<
+// InputType
+// >(
+// writer: WritableStreamDefaultWriter<InputType>,
+// error: shared.ErrorResult
+// ): void {
+// const closedPromise = writer[closedPromise_];
+// if (closedPromise.state === shared.ControlledPromiseState.Pending) {
+// closedPromise.reject(error);
+// } else {
+// writer[closedPromise_] = shared.createControlledPromise<void>();
+// writer[closedPromise_].reject(error);
+// }
+// writer[closedPromise_].promise.catch(() => {});
+// }
+
+// export function writableStreamDefaultWriterEnsureReadyPromiseRejected<
+// InputType
+// >(
+// writer: WritableStreamDefaultWriter<InputType>,
+// error: shared.ErrorResult
+// ): void {
+// const readyPromise = writer[readyPromise_];
+// if (readyPromise.state === shared.ControlledPromiseState.Pending) {
+// readyPromise.reject(error);
+// } else {
+// writer[readyPromise_] = shared.createControlledPromise<void>();
+// writer[readyPromise_].reject(error);
+// }
+// writer[readyPromise_].promise.catch(() => {});
+// }
+
+// export function writableStreamDefaultWriterGetDesiredSize<InputType>(
+// writer: WritableStreamDefaultWriter<InputType>
+// ): number | null {
+// const stream = writer[ownerWritableStream_]!;
+// const state = stream[shared.state_];
+// if (state === "errored" || state === "erroring") {
+// return null;
+// }
+// if (state === "closed") {
+// return 0;
+// }
+// return writableStreamDefaultControllerGetDesiredSize(
+// stream[writableStreamController_]!
+// );
+// }
+
+// export function writableStreamDefaultWriterRelease<InputType>(
+// writer: WritableStreamDefaultWriter<InputType>
+// ): void {
+// const stream = writer[ownerWritableStream_]!;
+// // Assert: stream is not undefined.
+// // Assert: stream.[[writer]] is writer.
+// const releasedError = new TypeError();
+// writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);
+// writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);
+// stream[writer_] = undefined;
+// writer[ownerWritableStream_] = undefined;
+// }
+
+// export function writableStreamDefaultWriterWrite<InputType>(
+// writer: WritableStreamDefaultWriter<InputType>,
+// chunk: InputType
+// ): Promise<void> {
+// const stream = writer[ownerWritableStream_]!;
+// // Assert: stream is not undefined.
+// const controller = stream[writableStreamController_]!;
+// const chunkSize = writableStreamDefaultControllerGetChunkSize(
+// controller,
+// chunk
+// );
+// if (writer[ownerWritableStream_] !== stream) {
+// return Promise.reject(new TypeError());
+// }
+// const state = stream[shared.state_];
+// if (state === "errored") {
+// return Promise.reject(stream[shared.storedError_]);
+// }
+// if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") {
+// return Promise.reject(
+// new TypeError("Cannot write to a closing or closed stream")
+// );
+// }
+// if (state === "erroring") {
+// return Promise.reject(stream[shared.storedError_]);
+// }
+// // Assert: state is "writable".
+// const promise = writableStreamAddWriteRequest(stream);
+// writableStreamDefaultControllerWrite(controller, chunk, chunkSize);
+// return promise;
+// }
+
+// // ---- Controller
+
+// export function setUpWritableStreamDefaultController<InputType>(
+// stream: WritableStream<InputType>,
+// controller: WritableStreamDefaultController<InputType>,
+// startAlgorithm: StartAlgorithm,
+// writeAlgorithm: WriteAlgorithm<InputType>,
+// closeAlgorithm: CloseAlgorithm,
+// abortAlgorithm: AbortAlgorithm,
+// highWaterMark: number,
+// sizeAlgorithm: QueuingStrategySizeCallback<InputType>
+// ): void {
+// if (!isWritableStream(stream)) {
+// throw new TypeError();
+// }
+// if (stream[writableStreamController_] !== undefined) {
+// throw new TypeError();
+// }
+
+// controller[controlledWritableStream_] = stream;
+// stream[writableStreamController_] = controller;
+// q.resetQueue(controller);
+// controller[started_] = false;
+// controller[strategySizeAlgorithm_] = sizeAlgorithm;
+// controller[strategyHWM_] = highWaterMark;
+// controller[writeAlgorithm_] = writeAlgorithm;
+// controller[closeAlgorithm_] = closeAlgorithm;
+// controller[abortAlgorithm_] = abortAlgorithm;
+// const backpressure = writableStreamDefaultControllerGetBackpressure(
+// controller
+// );
+// writableStreamUpdateBackpressure(stream, backpressure);
+
+// const startResult = startAlgorithm();
+// Promise.resolve(startResult).then(
+// _ => {
+// // Assert: stream.[[state]] is "writable" or "erroring".
+// controller[started_] = true;
+// writableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+// },
+// error => {
+// // Assert: stream.[[state]] is "writable" or "erroring".
+// controller[started_] = true;
+// writableStreamDealWithRejection(stream, error);
+// }
+// );
+// }
+
+// export function isWritableStreamDefaultController(
+// value: unknown
+// ): value is WritableStreamDefaultController<any> {
+// if (typeof value !== "object" || value === null) {
+// return false;
+// }
+// return controlledWritableStream_ in value;
+// }
+
+// export function writableStreamDefaultControllerClearAlgorithms<InputType>(
+// controller: WritableStreamDefaultController<InputType>
+// ): void {
+// // Use ! assertions to override type check here, this way we don't
+// // have to perform type checks/assertions everywhere else.
+// controller[writeAlgorithm_] = undefined!;
+// controller[closeAlgorithm_] = undefined!;
+// controller[abortAlgorithm_] = undefined!;
+// controller[strategySizeAlgorithm_] = undefined!;
+// }
+
+// export function writableStreamDefaultControllerClose<InputType>(
+// controller: WritableStreamDefaultController<InputType>
+// ): void {
+// q.enqueueValueWithSize(controller, "close", 0);
+// writableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+// }
+
+// export function writableStreamDefaultControllerGetChunkSize<InputType>(
+// controller: WritableStreamDefaultController<InputType>,
+// chunk: InputType
+// ): number {
+// let chunkSize: number;
+// try {
+// chunkSize = controller[strategySizeAlgorithm_](chunk);
+// } catch (error) {
+// writableStreamDefaultControllerErrorIfNeeded(controller, error);
+// chunkSize = 1;
+// }
+// return chunkSize;
+// }
+
+// export function writableStreamDefaultControllerGetDesiredSize<InputType>(
+// controller: WritableStreamDefaultController<InputType>
+// ): number {
+// return controller[strategyHWM_] - controller[q.queueTotalSize_];
+// }
+
+// export function writableStreamDefaultControllerWrite<InputType>(
+// controller: WritableStreamDefaultController<InputType>,
+// chunk: InputType,
+// chunkSize: number
+// ): void {
+// try {
+// q.enqueueValueWithSize(controller, { chunk }, chunkSize);
+// } catch (error) {
+// writableStreamDefaultControllerErrorIfNeeded(controller, error);
+// return;
+// }
+// const stream = controller[controlledWritableStream_];
+// if (
+// !writableStreamCloseQueuedOrInFlight(stream) &&
+// stream[shared.state_] === "writable"
+// ) {
+// const backpressure = writableStreamDefaultControllerGetBackpressure(
+// controller
+// );
+// writableStreamUpdateBackpressure(stream, backpressure);
+// }
+// writableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+// }
+
+// export function writableStreamDefaultControllerAdvanceQueueIfNeeded<InputType>(
+// controller: WritableStreamDefaultController<InputType>
+// ): void {
+// if (!controller[started_]) {
+// return;
+// }
+// const stream = controller[controlledWritableStream_];
+// if (stream[inFlightWriteRequest_] !== undefined) {
+// return;
+// }
+// const state = stream[shared.state_];
+// if (state === "closed" || state === "errored") {
+// return;
+// }
+// if (state === "erroring") {
+// writableStreamFinishErroring(stream);
+// return;
+// }
+// if (controller[q.queue_].length === 0) {
+// return;
+// }
+// const writeRecord = q.peekQueueValue(controller);
+// if (writeRecord === "close") {
+// writableStreamDefaultControllerProcessClose(controller);
+// } else {
+// writableStreamDefaultControllerProcessWrite(controller, writeRecord.chunk);
+// }
+// }
+
+// export function writableStreamDefaultControllerErrorIfNeeded<InputType>(
+// controller: WritableStreamDefaultController<InputType>,
+// error: shared.ErrorResult
+// ): void {
+// if (controller[controlledWritableStream_][shared.state_] === "writable") {
+// writableStreamDefaultControllerError(controller, error);
+// }
+// }
+
+// export function writableStreamDefaultControllerProcessClose<InputType>(
+// controller: WritableStreamDefaultController<InputType>
+// ): void {
+// const stream = controller[controlledWritableStream_];
+// writableStreamMarkCloseRequestInFlight(stream);
+// q.dequeueValue(controller);
+// // Assert: controller.[[queue]] is empty.
+// const sinkClosePromise = controller[closeAlgorithm_]();
+// writableStreamDefaultControllerClearAlgorithms(controller);
+// sinkClosePromise.then(
+// _ => {
+// writableStreamFinishInFlightClose(stream);
+// },
+// error => {
+// writableStreamFinishInFlightCloseWithError(stream, error);
+// }
+// );
+// }
+
+// export function writableStreamDefaultControllerProcessWrite<InputType>(
+// controller: WritableStreamDefaultController<InputType>,
+// chunk: InputType
+// ): void {
+// const stream = controller[controlledWritableStream_];
+// writableStreamMarkFirstWriteRequestInFlight(stream);
+// controller[writeAlgorithm_](chunk).then(
+// _ => {
+// writableStreamFinishInFlightWrite(stream);
+// const state = stream[shared.state_];
+// // Assert: state is "writable" or "erroring".
+// q.dequeueValue(controller);
+// if (
+// !writableStreamCloseQueuedOrInFlight(stream) &&
+// state === "writable"
+// ) {
+// const backpressure = writableStreamDefaultControllerGetBackpressure(
+// controller
+// );
+// writableStreamUpdateBackpressure(stream, backpressure);
+// }
+// writableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+// },
+// error => {
+// if (stream[shared.state_] === "writable") {
+// writableStreamDefaultControllerClearAlgorithms(controller);
+// }
+// writableStreamFinishInFlightWriteWithError(stream, error);
+// }
+// );
+// }
+
+// export function writableStreamDefaultControllerGetBackpressure<InputType>(
+// controller: WritableStreamDefaultController<InputType>
+// ): boolean {
+// const desiredSize = writableStreamDefaultControllerGetDesiredSize(controller);
+// return desiredSize <= 0;
+// }
+
+// export function writableStreamDefaultControllerError<InputType>(
+// controller: WritableStreamDefaultController<InputType>,
+// error: shared.ErrorResult
+// ): void {
+// const stream = controller[controlledWritableStream_];
+// // Assert: stream.[[state]] is "writable".
+// writableStreamDefaultControllerClearAlgorithms(controller);
+// writableStreamStartErroring(stream, error);
+// }
diff --git a/cli/js/web/streams/writable-stream-default-controller.ts b/cli/js/web/streams/writable-stream-default-controller.ts
new file mode 100644
index 000000000..57ffe08fd
--- /dev/null
+++ b/cli/js/web/streams/writable-stream-default-controller.ts
@@ -0,0 +1,101 @@
+// TODO reenable this code when we enable writableStreams and transport types
+// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+// /**
+// * streams/writable-stream-default-controller - WritableStreamDefaultController class implementation
+// * Part of Stardazed
+// * (c) 2018-Present by Arthur Langereis - @zenmumbler
+// * https://github.com/stardazed/sd-streams
+// */
+
+// /* eslint-disable @typescript-eslint/no-explicit-any */
+// // TODO reenable this lint here
+
+// import * as ws from "./writable-internals.ts";
+// import * as shared from "./shared-internals.ts";
+// import * as q from "./queue-mixin.ts";
+// import { Queue } from "./queue.ts";
+// import { QueuingStrategySizeCallback } from "../dom_types.ts";
+
+// export class WritableStreamDefaultController<InputType>
+// implements ws.WritableStreamDefaultController<InputType> {
+// [ws.abortAlgorithm_]: ws.AbortAlgorithm;
+// [ws.closeAlgorithm_]: ws.CloseAlgorithm;
+// [ws.controlledWritableStream_]: ws.WritableStream<InputType>;
+// [ws.started_]: boolean;
+// [ws.strategyHWM_]: number;
+// [ws.strategySizeAlgorithm_]: QueuingStrategySizeCallback<InputType>;
+// [ws.writeAlgorithm_]: ws.WriteAlgorithm<InputType>;
+
+// [q.queue_]: Queue<q.QueueElement<ws.WriteRecord<InputType> | "close">>;
+// [q.queueTotalSize_]: number;
+
+// constructor() {
+// throw new TypeError();
+// }
+
+// error(e?: shared.ErrorResult): void {
+// if (!ws.isWritableStreamDefaultController(this)) {
+// throw new TypeError();
+// }
+// const state = this[ws.controlledWritableStream_][shared.state_];
+// if (state !== "writable") {
+// return;
+// }
+// ws.writableStreamDefaultControllerError(this, e);
+// }
+
+// [ws.abortSteps_](reason: shared.ErrorResult): Promise<void> {
+// const result = this[ws.abortAlgorithm_](reason);
+// ws.writableStreamDefaultControllerClearAlgorithms(this);
+// return result;
+// }
+
+// [ws.errorSteps_](): void {
+// q.resetQueue(this);
+// }
+// }
+
+// export function setUpWritableStreamDefaultControllerFromUnderlyingSink<
+// InputType
+// >(
+// stream: ws.WritableStream<InputType>,
+// underlyingSink: ws.WritableStreamSink<InputType>,
+// highWaterMark: number,
+// sizeAlgorithm: QueuingStrategySizeCallback<InputType>
+// ): void {
+// // Assert: underlyingSink is not undefined.
+// const controller = Object.create(
+// WritableStreamDefaultController.prototype
+// ) as WritableStreamDefaultController<InputType>;
+
+// const startAlgorithm = function(): any {
+// return shared.invokeOrNoop(underlyingSink, "start", [controller]);
+// };
+// const writeAlgorithm = shared.createAlgorithmFromUnderlyingMethod(
+// underlyingSink,
+// "write",
+// [controller]
+// );
+// const closeAlgorithm = shared.createAlgorithmFromUnderlyingMethod(
+// underlyingSink,
+// "close",
+// []
+// );
+// const abortAlgorithm = shared.createAlgorithmFromUnderlyingMethod(
+// underlyingSink,
+// "abort",
+// []
+// );
+// ws.setUpWritableStreamDefaultController(
+// stream,
+// controller,
+// startAlgorithm,
+// writeAlgorithm,
+// closeAlgorithm,
+// abortAlgorithm,
+// highWaterMark,
+// sizeAlgorithm
+// );
+// }
diff --git a/cli/js/web/streams/writable-stream-default-writer.ts b/cli/js/web/streams/writable-stream-default-writer.ts
new file mode 100644
index 000000000..f38aa26bb
--- /dev/null
+++ b/cli/js/web/streams/writable-stream-default-writer.ts
@@ -0,0 +1,136 @@
+// TODO reenable this code when we enable writableStreams and transport types
+// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+// /**
+// * streams/writable-stream-default-writer - WritableStreamDefaultWriter class implementation
+// * Part of Stardazed
+// * (c) 2018-Present by Arthur Langereis - @zenmumbler
+// * https://github.com/stardazed/sd-streams
+// */
+
+// import * as ws from "./writable-internals.ts";
+// import * as shared from "./shared-internals.ts";
+
+// export class WritableStreamDefaultWriter<InputType>
+// implements ws.WritableStreamDefaultWriter<InputType> {
+// [ws.ownerWritableStream_]: ws.WritableStream<InputType> | undefined;
+// [ws.readyPromise_]: shared.ControlledPromise<void>;
+// [ws.closedPromise_]: shared.ControlledPromise<void>;
+
+// constructor(stream: ws.WritableStream<InputType>) {
+// if (!ws.isWritableStream(stream)) {
+// throw new TypeError();
+// }
+// if (ws.isWritableStreamLocked(stream)) {
+// throw new TypeError("Stream is already locked");
+// }
+// this[ws.ownerWritableStream_] = stream;
+// stream[ws.writer_] = this;
+
+// const readyPromise = shared.createControlledPromise<void>();
+// const closedPromise = shared.createControlledPromise<void>();
+// this[ws.readyPromise_] = readyPromise;
+// this[ws.closedPromise_] = closedPromise;
+
+// const state = stream[shared.state_];
+// if (state === "writable") {
+// if (
+// !ws.writableStreamCloseQueuedOrInFlight(stream) &&
+// stream[ws.backpressure_]
+// ) {
+// // OK Set this.[[readyPromise]] to a new promise.
+// } else {
+// readyPromise.resolve(undefined);
+// }
+// // OK Set this.[[closedPromise]] to a new promise.
+// } else if (state === "erroring") {
+// readyPromise.reject(stream[shared.storedError_]);
+// readyPromise.promise.catch(() => {});
+// // OK Set this.[[closedPromise]] to a new promise.
+// } else if (state === "closed") {
+// readyPromise.resolve(undefined);
+// closedPromise.resolve(undefined);
+// } else {
+// // Assert: state is "errored".
+// const storedError = stream[shared.storedError_];
+// readyPromise.reject(storedError);
+// readyPromise.promise.catch(() => {});
+// closedPromise.reject(storedError);
+// closedPromise.promise.catch(() => {});
+// }
+// }
+
+// abort(reason: shared.ErrorResult): Promise<void> {
+// if (!ws.isWritableStreamDefaultWriter(this)) {
+// return Promise.reject(new TypeError());
+// }
+// if (this[ws.ownerWritableStream_] === undefined) {
+// return Promise.reject(
+// new TypeError("Writer is not connected to a stream")
+// );
+// }
+// return ws.writableStreamDefaultWriterAbort(this, reason);
+// }
+
+// close(): Promise<void> {
+// if (!ws.isWritableStreamDefaultWriter(this)) {
+// return Promise.reject(new TypeError());
+// }
+// const stream = this[ws.ownerWritableStream_];
+// if (stream === undefined) {
+// return Promise.reject(
+// new TypeError("Writer is not connected to a stream")
+// );
+// }
+// if (ws.writableStreamCloseQueuedOrInFlight(stream)) {
+// return Promise.reject(new TypeError());
+// }
+// return ws.writableStreamDefaultWriterClose(this);
+// }
+
+// releaseLock(): void {
+// const stream = this[ws.ownerWritableStream_];
+// if (stream === undefined) {
+// return;
+// }
+// // Assert: stream.[[writer]] is not undefined.
+// ws.writableStreamDefaultWriterRelease(this);
+// }
+
+// write(chunk: InputType): Promise<void> {
+// if (!ws.isWritableStreamDefaultWriter(this)) {
+// return Promise.reject(new TypeError());
+// }
+// if (this[ws.ownerWritableStream_] === undefined) {
+// return Promise.reject(
+// new TypeError("Writer is not connected to a stream")
+// );
+// }
+// return ws.writableStreamDefaultWriterWrite(this, chunk);
+// }
+
+// get closed(): Promise<void> {
+// if (!ws.isWritableStreamDefaultWriter(this)) {
+// return Promise.reject(new TypeError());
+// }
+// return this[ws.closedPromise_].promise;
+// }
+
+// get desiredSize(): number | null {
+// if (!ws.isWritableStreamDefaultWriter(this)) {
+// throw new TypeError();
+// }
+// if (this[ws.ownerWritableStream_] === undefined) {
+// throw new TypeError("Writer is not connected to stream");
+// }
+// return ws.writableStreamDefaultWriterGetDesiredSize(this);
+// }
+
+// get ready(): Promise<void> {
+// if (!ws.isWritableStreamDefaultWriter(this)) {
+// return Promise.reject(new TypeError());
+// }
+// return this[ws.readyPromise_].promise;
+// }
+// }
diff --git a/cli/js/web/streams/writable-stream.ts b/cli/js/web/streams/writable-stream.ts
new file mode 100644
index 000000000..a6131c5d0
--- /dev/null
+++ b/cli/js/web/streams/writable-stream.ts
@@ -0,0 +1,118 @@
+// TODO reenable this code when we enable writableStreams and transport types
+// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546
+// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT
+
+// /**
+// * streams/writable-stream - WritableStream class implementation
+// * Part of Stardazed
+// * (c) 2018-Present by Arthur Langereis - @zenmumbler
+// * https://github.com/stardazed/sd-streams
+// */
+
+// import * as ws from "./writable-internals.ts";
+// import * as shared from "./shared-internals.ts";
+// import {
+// WritableStreamDefaultController,
+// setUpWritableStreamDefaultControllerFromUnderlyingSink
+// } from "./writable-stream-default-controller.ts";
+// import { WritableStreamDefaultWriter } from "./writable-stream-default-writer.ts";
+// import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts";
+
+// export class WritableStream<InputType> {
+// [shared.state_]: ws.WritableStreamState;
+// [shared.storedError_]: shared.ErrorResult;
+// [ws.backpressure_]: boolean;
+// [ws.closeRequest_]: shared.ControlledPromise<void> | undefined;
+// [ws.inFlightWriteRequest_]: shared.ControlledPromise<void> | undefined;
+// [ws.inFlightCloseRequest_]: shared.ControlledPromise<void> | undefined;
+// [ws.pendingAbortRequest_]: ws.AbortRequest | undefined;
+// [ws.writableStreamController_]:
+// | ws.WritableStreamDefaultController<InputType>
+// | undefined;
+// [ws.writer_]: ws.WritableStreamDefaultWriter<InputType> | undefined;
+// [ws.writeRequests_]: Array<shared.ControlledPromise<void>>;
+
+// constructor(
+// sink: ws.WritableStreamSink<InputType> = {},
+// strategy: QueuingStrategy<InputType> = {}
+// ) {
+// ws.initializeWritableStream(this);
+// const sizeFunc = strategy.size;
+// const stratHWM = strategy.highWaterMark;
+// if (sink.type !== undefined) {
+// throw new RangeError("The type of an underlying sink must be undefined");
+// }
+
+// const sizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction(sizeFunc);
+// const highWaterMark = shared.validateAndNormalizeHighWaterMark(
+// stratHWM === undefined ? 1 : stratHWM
+// );
+
+// setUpWritableStreamDefaultControllerFromUnderlyingSink(
+// this,
+// sink,
+// highWaterMark,
+// sizeAlgorithm
+// );
+// }
+
+// get locked(): boolean {
+// if (!ws.isWritableStream(this)) {
+// throw new TypeError();
+// }
+// return ws.isWritableStreamLocked(this);
+// }
+
+// abort(reason?: shared.ErrorResult): Promise<void> {
+// if (!ws.isWritableStream(this)) {
+// return Promise.reject(new TypeError());
+// }
+// if (ws.isWritableStreamLocked(this)) {
+// return Promise.reject(new TypeError("Cannot abort a locked stream"));
+// }
+// return ws.writableStreamAbort(this, reason);
+// }
+
+// getWriter(): ws.WritableStreamWriter<InputType> {
+// if (!ws.isWritableStream(this)) {
+// throw new TypeError();
+// }
+// return new WritableStreamDefaultWriter(this);
+// }
+// }
+
+// export function createWritableStream<InputType>(
+// startAlgorithm: ws.StartAlgorithm,
+// writeAlgorithm: ws.WriteAlgorithm<InputType>,
+// closeAlgorithm: ws.CloseAlgorithm,
+// abortAlgorithm: ws.AbortAlgorithm,
+// highWaterMark?: number,
+// sizeAlgorithm?: QueuingStrategySizeCallback<InputType>
+// ): WritableStream<InputType> {
+// if (highWaterMark === undefined) {
+// highWaterMark = 1;
+// }
+// if (sizeAlgorithm === undefined) {
+// sizeAlgorithm = (): number => 1;
+// }
+// // Assert: ! IsNonNegativeNumber(highWaterMark) is true.
+
+// const stream = Object.create(WritableStream.prototype) as WritableStream<
+// InputType
+// >;
+// ws.initializeWritableStream(stream);
+// const controller = Object.create(
+// WritableStreamDefaultController.prototype
+// ) as WritableStreamDefaultController<InputType>;
+// ws.setUpWritableStreamDefaultController(
+// stream,
+// controller,
+// startAlgorithm,
+// writeAlgorithm,
+// closeAlgorithm,
+// abortAlgorithm,
+// highWaterMark,
+// sizeAlgorithm
+// );
+// return stream;
+// }
diff --git a/cli/js/web/text_encoding.ts b/cli/js/web/text_encoding.ts
new file mode 100644
index 000000000..0709e7123
--- /dev/null
+++ b/cli/js/web/text_encoding.ts
@@ -0,0 +1,461 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+// The following code is based off of text-encoding at:
+// https://github.com/inexorabletash/text-encoding
+//
+// Anyone is free to copy, modify, publish, use, compile, sell, or
+// distribute this software, either in source code form or as a compiled
+// binary, for any purpose, commercial or non-commercial, and by any
+// means.
+//
+// In jurisdictions that recognize copyright laws, the author or authors
+// of this software dedicate any and all copyright interest in the
+// software to the public domain. We make this dedication for the benefit
+// of the public at large and to the detriment of our heirs and
+// successors. We intend this dedication to be an overt act of
+// relinquishment in perpetuity of all present and future rights to this
+// software under copyright law.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+// OTHER DEALINGS IN THE SOFTWARE.
+
+import * as base64 from "./base64.ts";
+import { decodeUtf8 } from "./decode_utf8.ts";
+import * as domTypes from "./dom_types.ts";
+import { encodeUtf8 } from "./encode_utf8.ts";
+
+const CONTINUE = null;
+const END_OF_STREAM = -1;
+const FINISHED = -1;
+
+function decoderError(fatal: boolean): number | never {
+ if (fatal) {
+ throw new TypeError("Decoder error.");
+ }
+ return 0xfffd; // default code point
+}
+
+function inRange(a: number, min: number, max: number): boolean {
+ return min <= a && a <= max;
+}
+
+function isASCIIByte(a: number): boolean {
+ return inRange(a, 0x00, 0x7f);
+}
+
+function stringToCodePoints(input: string): number[] {
+ const u: number[] = [];
+ for (const c of input) {
+ u.push(c.codePointAt(0)!);
+ }
+ return u;
+}
+
+class UTF8Encoder implements Encoder {
+ handler(codePoint: number): number | number[] {
+ if (codePoint === END_OF_STREAM) {
+ return FINISHED;
+ }
+
+ if (inRange(codePoint, 0x00, 0x7f)) {
+ return codePoint;
+ }
+
+ let count: number;
+ let offset: number;
+ if (inRange(codePoint, 0x0080, 0x07ff)) {
+ count = 1;
+ offset = 0xc0;
+ } else if (inRange(codePoint, 0x0800, 0xffff)) {
+ count = 2;
+ offset = 0xe0;
+ } else if (inRange(codePoint, 0x10000, 0x10ffff)) {
+ count = 3;
+ offset = 0xf0;
+ } else {
+ throw TypeError(`Code point out of range: \\x${codePoint.toString(16)}`);
+ }
+
+ const bytes = [(codePoint >> (6 * count)) + offset];
+
+ while (count > 0) {
+ const temp = codePoint >> (6 * (count - 1));
+ bytes.push(0x80 | (temp & 0x3f));
+ count--;
+ }
+
+ return bytes;
+ }
+}
+
+/** Decodes a string of data which has been encoded using base-64. */
+export function atob(s: string): string {
+ s = String(s);
+ s = s.replace(/[\t\n\f\r ]/g, "");
+
+ if (s.length % 4 === 0) {
+ s = s.replace(/==?$/, "");
+ }
+
+ const rem = s.length % 4;
+ if (rem === 1 || /[^+/0-9A-Za-z]/.test(s)) {
+ // TODO: throw `DOMException`
+ throw new TypeError("The string to be decoded is not correctly encoded");
+ }
+
+ // base64-js requires length exactly times of 4
+ if (rem > 0) {
+ s = s.padEnd(s.length + (4 - rem), "=");
+ }
+
+ const byteArray: Uint8Array = base64.toByteArray(s);
+ let result = "";
+ for (let i = 0; i < byteArray.length; i++) {
+ result += String.fromCharCode(byteArray[i]);
+ }
+ return result;
+}
+
+/** Creates a base-64 ASCII string from the input string. */
+export function btoa(s: string): string {
+ const byteArray = [];
+ for (let i = 0; i < s.length; i++) {
+ const charCode = s[i].charCodeAt(0);
+ if (charCode > 0xff) {
+ throw new TypeError(
+ "The string to be encoded contains characters " +
+ "outside of the Latin1 range."
+ );
+ }
+ byteArray.push(charCode);
+ }
+ const result = base64.fromByteArray(Uint8Array.from(byteArray));
+ return result;
+}
+
+interface DecoderOptions {
+ fatal?: boolean;
+ ignoreBOM?: boolean;
+}
+
+interface Decoder {
+ handler(stream: Stream, byte: number): number | null;
+}
+
+interface Encoder {
+ handler(codePoint: number): number | number[];
+}
+
+class SingleByteDecoder implements Decoder {
+ private _index: number[];
+ private _fatal: boolean;
+
+ constructor(index: number[], options: DecoderOptions) {
+ if (options.ignoreBOM) {
+ throw new TypeError("Ignoring the BOM is available only with utf-8.");
+ }
+ this._fatal = options.fatal || false;
+ this._index = index;
+ }
+ handler(stream: Stream, byte: number): number {
+ if (byte === END_OF_STREAM) {
+ return FINISHED;
+ }
+ if (isASCIIByte(byte)) {
+ return byte;
+ }
+ const codePoint = this._index[byte - 0x80];
+
+ if (codePoint == null) {
+ return decoderError(this._fatal);
+ }
+
+ return codePoint;
+ }
+}
+
+// The encodingMap is a hash of labels that are indexed by the conical
+// encoding.
+const encodingMap: { [key: string]: string[] } = {
+ "windows-1252": [
+ "ansi_x3.4-1968",
+ "ascii",
+ "cp1252",
+ "cp819",
+ "csisolatin1",
+ "ibm819",
+ "iso-8859-1",
+ "iso-ir-100",
+ "iso8859-1",
+ "iso88591",
+ "iso_8859-1",
+ "iso_8859-1:1987",
+ "l1",
+ "latin1",
+ "us-ascii",
+ "windows-1252",
+ "x-cp1252"
+ ],
+ "utf-8": ["unicode-1-1-utf-8", "utf-8", "utf8"]
+};
+// We convert these into a Map where every label resolves to its canonical
+// encoding type.
+const encodings = new Map<string, string>();
+for (const key of Object.keys(encodingMap)) {
+ const labels = encodingMap[key];
+ for (const label of labels) {
+ encodings.set(label, key);
+ }
+}
+
+// A map of functions that return new instances of a decoder indexed by the
+// encoding type.
+const decoders = new Map<string, (options: DecoderOptions) => Decoder>();
+
+// Single byte decoders are an array of code point lookups
+const encodingIndexes = new Map<string, number[]>();
+// prettier-ignore
+encodingIndexes.set("windows-1252", [
+ 8364,129,8218,402,8222,8230,8224,8225,710,8240,352,8249,338,141,381,143,144,
+ 8216,8217,8220,8221,8226,8211,8212,732,8482,353,8250,339,157,382,376,160,161,
+ 162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,
+ 181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,
+ 200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,
+ 219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,
+ 238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255
+]);
+for (const [key, index] of encodingIndexes) {
+ decoders.set(
+ key,
+ (options: DecoderOptions): SingleByteDecoder => {
+ return new SingleByteDecoder(index, options);
+ }
+ );
+}
+
+function codePointsToString(codePoints: number[]): string {
+ let s = "";
+ for (const cp of codePoints) {
+ s += String.fromCodePoint(cp);
+ }
+ return s;
+}
+
+class Stream {
+ private _tokens: number[];
+ constructor(tokens: number[] | Uint8Array) {
+ this._tokens = [].slice.call(tokens);
+ this._tokens.reverse();
+ }
+
+ endOfStream(): boolean {
+ return !this._tokens.length;
+ }
+
+ read(): number {
+ return !this._tokens.length ? END_OF_STREAM : this._tokens.pop()!;
+ }
+
+ prepend(token: number | number[]): void {
+ if (Array.isArray(token)) {
+ while (token.length) {
+ this._tokens.push(token.pop()!);
+ }
+ } else {
+ this._tokens.push(token);
+ }
+ }
+
+ push(token: number | number[]): void {
+ if (Array.isArray(token)) {
+ while (token.length) {
+ this._tokens.unshift(token.shift()!);
+ }
+ } else {
+ this._tokens.unshift(token);
+ }
+ }
+}
+
+export interface TextDecodeOptions {
+ stream?: false;
+}
+
+export interface TextDecoderOptions {
+ fatal?: boolean;
+ ignoreBOM?: boolean;
+}
+
+type EitherArrayBuffer = SharedArrayBuffer | ArrayBuffer;
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function isEitherArrayBuffer(x: any): x is EitherArrayBuffer {
+ return x instanceof SharedArrayBuffer || x instanceof ArrayBuffer;
+}
+
+export class TextDecoder {
+ private _encoding: string;
+
+ /** Returns encoding's name, lowercased. */
+ get encoding(): string {
+ return this._encoding;
+ }
+ /** Returns `true` if error mode is "fatal", and `false` otherwise. */
+ readonly fatal: boolean = false;
+ /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */
+ readonly ignoreBOM: boolean = false;
+
+ constructor(label = "utf-8", options: TextDecoderOptions = { fatal: false }) {
+ if (options.ignoreBOM) {
+ this.ignoreBOM = true;
+ }
+ if (options.fatal) {
+ this.fatal = true;
+ }
+ label = String(label)
+ .trim()
+ .toLowerCase();
+ const encoding = encodings.get(label);
+ if (!encoding) {
+ throw new RangeError(
+ `The encoding label provided ('${label}') is invalid.`
+ );
+ }
+ if (!decoders.has(encoding) && encoding !== "utf-8") {
+ throw new TypeError(`Internal decoder ('${encoding}') not found.`);
+ }
+ this._encoding = encoding;
+ }
+
+ /** Returns the result of running encoding's decoder. */
+ decode(
+ input?: domTypes.BufferSource,
+ options: TextDecodeOptions = { stream: false }
+ ): string {
+ if (options.stream) {
+ throw new TypeError("Stream not supported.");
+ }
+
+ let bytes: Uint8Array;
+ if (input instanceof Uint8Array) {
+ bytes = input;
+ } else if (isEitherArrayBuffer(input)) {
+ bytes = new Uint8Array(input);
+ } else if (
+ typeof input === "object" &&
+ "buffer" in input &&
+ isEitherArrayBuffer(input.buffer)
+ ) {
+ bytes = new Uint8Array(input.buffer, input.byteOffset, input.byteLength);
+ } else {
+ bytes = new Uint8Array(0);
+ }
+
+ // For performance reasons we utilise a highly optimised decoder instead of
+ // the general decoder.
+ if (this._encoding === "utf-8") {
+ return decodeUtf8(bytes, this.fatal, this.ignoreBOM);
+ }
+
+ const decoder = decoders.get(this._encoding)!({
+ fatal: this.fatal,
+ ignoreBOM: this.ignoreBOM
+ });
+ const inputStream = new Stream(bytes);
+ const output: number[] = [];
+
+ while (true) {
+ const result = decoder.handler(inputStream, inputStream.read());
+ if (result === FINISHED) {
+ break;
+ }
+
+ if (result !== CONTINUE) {
+ output.push(result);
+ }
+ }
+
+ if (output.length > 0 && output[0] === 0xfeff) {
+ output.shift();
+ }
+
+ return codePointsToString(output);
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "TextDecoder";
+ }
+}
+
+interface TextEncoderEncodeIntoResult {
+ read: number;
+ written: number;
+}
+
+export class TextEncoder {
+ /** Returns "utf-8". */
+ readonly encoding = "utf-8";
+ /** Returns the result of running UTF-8's encoder. */
+ encode(input = ""): Uint8Array {
+ // For performance reasons we utilise a highly optimised decoder instead of
+ // the general decoder.
+ if (this.encoding === "utf-8") {
+ return encodeUtf8(input);
+ }
+
+ const encoder = new UTF8Encoder();
+ const inputStream = new Stream(stringToCodePoints(input));
+ const output: number[] = [];
+
+ while (true) {
+ const result = encoder.handler(inputStream.read());
+ if (result === FINISHED) {
+ break;
+ }
+ if (Array.isArray(result)) {
+ output.push(...result);
+ } else {
+ output.push(result);
+ }
+ }
+
+ return new Uint8Array(output);
+ }
+ encodeInto(input: string, dest: Uint8Array): TextEncoderEncodeIntoResult {
+ const encoder = new UTF8Encoder();
+ const inputStream = new Stream(stringToCodePoints(input));
+
+ let written = 0;
+ let read = 0;
+ while (true) {
+ const result = encoder.handler(inputStream.read());
+ if (result === FINISHED) {
+ break;
+ }
+ read++;
+ if (Array.isArray(result)) {
+ dest.set(result, written);
+ written += result.length;
+ if (result.length > 3) {
+ // increment read a second time if greater than U+FFFF
+ read++;
+ }
+ } else {
+ dest[written] = result;
+ written++;
+ }
+ }
+
+ return {
+ read,
+ written
+ };
+ }
+ get [Symbol.toStringTag](): string {
+ return "TextEncoder";
+ }
+}
diff --git a/cli/js/web/url.ts b/cli/js/web/url.ts
new file mode 100644
index 000000000..4cf9ae257
--- /dev/null
+++ b/cli/js/web/url.ts
@@ -0,0 +1,396 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import * as urlSearchParams from "./url_search_params.ts";
+import * as domTypes from "./dom_types.ts";
+import { getRandomValues } from "../get_random_values.ts";
+import { customInspect } from "../console.ts";
+
+interface URLParts {
+ protocol: string;
+ username: string;
+ password: string;
+ hostname: string;
+ port: string;
+ path: string;
+ query: string | null;
+ hash: string;
+}
+
+const patterns = {
+ protocol: "(?:([a-z]+):)",
+ authority: "(?://([^/?#]*))",
+ path: "([^?#]*)",
+ query: "(\\?[^#]*)",
+ hash: "(#.*)",
+
+ authentication: "(?:([^:]*)(?::([^@]*))?@)",
+ hostname: "([^:]+)",
+ port: "(?::(\\d+))"
+};
+
+const urlRegExp = new RegExp(
+ `^${patterns.protocol}?${patterns.authority}?${patterns.path}${patterns.query}?${patterns.hash}?`
+);
+
+const authorityRegExp = new RegExp(
+ `^${patterns.authentication}?${patterns.hostname}${patterns.port}?$`
+);
+
+const searchParamsMethods: Array<keyof urlSearchParams.URLSearchParams> = [
+ "append",
+ "delete",
+ "set"
+];
+
+function parse(url: string): URLParts | undefined {
+ const urlMatch = urlRegExp.exec(url);
+ if (urlMatch) {
+ const [, , authority] = urlMatch;
+ const authorityMatch = authority
+ ? authorityRegExp.exec(authority)
+ : [null, null, null, null, null];
+ if (authorityMatch) {
+ return {
+ protocol: urlMatch[1] || "",
+ username: authorityMatch[1] || "",
+ password: authorityMatch[2] || "",
+ hostname: authorityMatch[3] || "",
+ port: authorityMatch[4] || "",
+ path: urlMatch[3] || "",
+ query: urlMatch[4] || "",
+ hash: urlMatch[5] || ""
+ };
+ }
+ }
+ return undefined;
+}
+
+// Based on https://github.com/kelektiv/node-uuid
+// TODO(kevinkassimo): Use deno_std version once possible.
+function generateUUID(): string {
+ return "00000000-0000-4000-8000-000000000000".replace(/[0]/g, (): string =>
+ // random integer from 0 to 15 as a hex digit.
+ (getRandomValues(new Uint8Array(1))[0] % 16).toString(16)
+ );
+}
+
+// Keep it outside of URL to avoid any attempts of access.
+export const blobURLMap = new Map<string, domTypes.Blob>();
+
+function isAbsolutePath(path: string): boolean {
+ return path.startsWith("/");
+}
+
+// Resolves `.`s and `..`s where possible.
+// Preserves repeating and trailing `/`s by design.
+function normalizePath(path: string): string {
+ const isAbsolute = isAbsolutePath(path);
+ path = path.replace(/^\//, "");
+ const pathSegments = path.split("/");
+
+ const newPathSegments: string[] = [];
+ for (let i = 0; i < pathSegments.length; i++) {
+ const previous = newPathSegments[newPathSegments.length - 1];
+ if (
+ pathSegments[i] == ".." &&
+ previous != ".." &&
+ (previous != undefined || isAbsolute)
+ ) {
+ newPathSegments.pop();
+ } else if (pathSegments[i] != ".") {
+ newPathSegments.push(pathSegments[i]);
+ }
+ }
+
+ let newPath = newPathSegments.join("/");
+ if (!isAbsolute) {
+ if (newPathSegments.length == 0) {
+ newPath = ".";
+ }
+ } else {
+ newPath = `/${newPath}`;
+ }
+ return newPath;
+}
+
+// Standard URL basing logic, applied to paths.
+function resolvePathFromBase(path: string, basePath: string): string {
+ const normalizedPath = normalizePath(path);
+ if (isAbsolutePath(normalizedPath)) {
+ return normalizedPath;
+ }
+ const normalizedBasePath = normalizePath(basePath);
+ if (!isAbsolutePath(normalizedBasePath)) {
+ throw new TypeError("Base path must be absolute.");
+ }
+
+ // Special case.
+ if (path == "") {
+ return normalizedBasePath;
+ }
+
+ // Remove everything after the last `/` in `normalizedBasePath`.
+ const prefix = normalizedBasePath.replace(/[^\/]*$/, "");
+ // If `normalizedPath` ends with `.` or `..`, add a trailing space.
+ const suffix = normalizedPath.replace(/(?<=(^|\/)(\.|\.\.))$/, "/");
+
+ return normalizePath(prefix + suffix);
+}
+
+export class URL {
+ private _parts: URLParts;
+ private _searchParams!: urlSearchParams.URLSearchParams;
+
+ [customInspect](): string {
+ const keys = [
+ "href",
+ "origin",
+ "protocol",
+ "username",
+ "password",
+ "host",
+ "hostname",
+ "port",
+ "pathname",
+ "hash",
+ "search"
+ ];
+ const objectString = keys
+ .map((key: string) => `${key}: "${this[key as keyof this] || ""}"`)
+ .join(", ");
+ return `URL { ${objectString} }`;
+ }
+
+ private _updateSearchParams(): void {
+ const searchParams = new urlSearchParams.URLSearchParams(this.search);
+
+ for (const methodName of searchParamsMethods) {
+ /* eslint-disable @typescript-eslint/no-explicit-any */
+ const method: (...args: any[]) => any = searchParams[methodName];
+ searchParams[methodName] = (...args: unknown[]): any => {
+ method.apply(searchParams, args);
+ this.search = searchParams.toString();
+ };
+ /* eslint-enable */
+ }
+ this._searchParams = searchParams;
+
+ // convert to `any` that has avoided the private limit
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (this._searchParams as any).url = this;
+ }
+
+ get hash(): string {
+ return this._parts.hash;
+ }
+
+ set hash(value: string) {
+ value = unescape(String(value));
+ if (!value) {
+ this._parts.hash = "";
+ } else {
+ if (value.charAt(0) !== "#") {
+ value = `#${value}`;
+ }
+ // hashes can contain % and # unescaped
+ this._parts.hash = escape(value)
+ .replace(/%25/g, "%")
+ .replace(/%23/g, "#");
+ }
+ }
+
+ get host(): string {
+ return `${this.hostname}${this.port ? `:${this.port}` : ""}`;
+ }
+
+ set host(value: string) {
+ value = String(value);
+ const url = new URL(`http://${value}`);
+ this._parts.hostname = url.hostname;
+ this._parts.port = url.port;
+ }
+
+ get hostname(): string {
+ return this._parts.hostname;
+ }
+
+ set hostname(value: string) {
+ value = String(value);
+ this._parts.hostname = encodeURIComponent(value);
+ }
+
+ get href(): string {
+ const authentication =
+ this.username || this.password
+ ? `${this.username}${this.password ? ":" + this.password : ""}@`
+ : "";
+ let slash = "";
+ if (this.host || this.protocol === "file:") {
+ slash = "//";
+ }
+ return `${this.protocol}${slash}${authentication}${this.host}${this.pathname}${this.search}${this.hash}`;
+ }
+
+ set href(value: string) {
+ value = String(value);
+ if (value !== this.href) {
+ const url = new URL(value);
+ this._parts = { ...url._parts };
+ this._updateSearchParams();
+ }
+ }
+
+ get origin(): string {
+ if (this.host) {
+ return `${this.protocol}//${this.host}`;
+ }
+ return "null";
+ }
+
+ get password(): string {
+ return this._parts.password;
+ }
+
+ set password(value: string) {
+ value = String(value);
+ this._parts.password = encodeURIComponent(value);
+ }
+
+ get pathname(): string {
+ return this._parts.path ? this._parts.path : "/";
+ }
+
+ set pathname(value: string) {
+ value = unescape(String(value));
+ if (!value || value.charAt(0) !== "/") {
+ value = `/${value}`;
+ }
+ // paths can contain % unescaped
+ this._parts.path = escape(value).replace(/%25/g, "%");
+ }
+
+ get port(): string {
+ return this._parts.port;
+ }
+
+ set port(value: string) {
+ const port = parseInt(String(value), 10);
+ this._parts.port = isNaN(port)
+ ? ""
+ : Math.max(0, port % 2 ** 16).toString();
+ }
+
+ get protocol(): string {
+ return `${this._parts.protocol}:`;
+ }
+
+ set protocol(value: string) {
+ value = String(value);
+ if (value) {
+ if (value.charAt(value.length - 1) === ":") {
+ value = value.slice(0, -1);
+ }
+ this._parts.protocol = encodeURIComponent(value);
+ }
+ }
+
+ get search(): string {
+ if (this._parts.query === null || this._parts.query === "") {
+ return "";
+ }
+
+ return this._parts.query;
+ }
+
+ set search(value: string) {
+ value = String(value);
+ let query: string | null;
+
+ if (value === "") {
+ query = null;
+ } else if (value.charAt(0) !== "?") {
+ query = `?${value}`;
+ } else {
+ query = value;
+ }
+
+ this._parts.query = query;
+ this._updateSearchParams();
+ }
+
+ get username(): string {
+ return this._parts.username;
+ }
+
+ set username(value: string) {
+ value = String(value);
+ this._parts.username = encodeURIComponent(value);
+ }
+
+ get searchParams(): urlSearchParams.URLSearchParams {
+ return this._searchParams;
+ }
+
+ constructor(url: string, base?: string | URL) {
+ let baseParts: URLParts | undefined;
+ if (base) {
+ baseParts = typeof base === "string" ? parse(base) : base._parts;
+ if (!baseParts || baseParts.protocol == "") {
+ throw new TypeError("Invalid base URL.");
+ }
+ }
+
+ const urlParts = parse(url);
+ if (!urlParts) {
+ throw new TypeError("Invalid URL.");
+ }
+
+ if (urlParts.protocol) {
+ this._parts = urlParts;
+ } else if (baseParts) {
+ this._parts = {
+ protocol: baseParts.protocol,
+ username: baseParts.username,
+ password: baseParts.password,
+ hostname: baseParts.hostname,
+ port: baseParts.port,
+ path: resolvePathFromBase(urlParts.path, baseParts.path || "/"),
+ query: urlParts.query,
+ hash: urlParts.hash
+ };
+ } else {
+ throw new TypeError("URL requires a base URL.");
+ }
+ this._updateSearchParams();
+ }
+
+ toString(): string {
+ return this.href;
+ }
+
+ toJSON(): string {
+ return this.href;
+ }
+
+ // TODO(kevinkassimo): implement MediaSource version in the future.
+ static createObjectURL(b: domTypes.Blob): string {
+ const origin = globalThis.location.origin || "http://deno-opaque-origin";
+ const key = `blob:${origin}/${generateUUID()}`;
+ blobURLMap.set(key, b);
+ return key;
+ }
+
+ static revokeObjectURL(url: string): void {
+ let urlObject;
+ try {
+ urlObject = new URL(url);
+ } catch {
+ throw new TypeError("Provided URL string is not valid");
+ }
+ if (urlObject.protocol !== "blob:") {
+ return;
+ }
+ // Origin match check seems irrelevant for now, unless we implement
+ // persisten storage for per globalThis.location.origin at some point.
+ blobURLMap.delete(url);
+ }
+}
diff --git a/cli/js/web/url_search_params.ts b/cli/js/web/url_search_params.ts
new file mode 100644
index 000000000..2248a5388
--- /dev/null
+++ b/cli/js/web/url_search_params.ts
@@ -0,0 +1,311 @@
+// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+import { URL } from "./url.ts";
+import { requiredArguments } from "../util.ts";
+
+// Returns whether o is iterable.
+// @internal
+export function isIterable<T, P extends keyof T, K extends T[P]>(
+ o: T
+): o is T & Iterable<[P, K]> {
+ // checks for null and undefined
+ if (o == null) {
+ return false;
+ }
+ return (
+ typeof ((o as unknown) as Iterable<[P, K]>)[Symbol.iterator] === "function"
+ );
+}
+
+export class URLSearchParams {
+ private params: Array<[string, string]> = [];
+ private url: URL | null = null;
+
+ constructor(init: string | string[][] | Record<string, string> = "") {
+ if (typeof init === "string") {
+ this._handleStringInitialization(init);
+ return;
+ }
+
+ if (Array.isArray(init) || isIterable(init)) {
+ this._handleArrayInitialization(init);
+ return;
+ }
+
+ if (Object(init) !== init) {
+ return;
+ }
+
+ if (init instanceof URLSearchParams) {
+ this.params = init.params;
+ return;
+ }
+
+ // Overload: record<USVString, USVString>
+ for (const key of Object.keys(init)) {
+ this.append(key, init[key]);
+ }
+ }
+
+ private updateSteps(): void {
+ if (this.url === null) {
+ return;
+ }
+
+ let query: string | null = this.toString();
+ if (query === "") {
+ query = null;
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (this.url as any)._parts.query = query;
+ }
+
+ /** Appends a specified key/value pair as a new search parameter.
+ *
+ * searchParams.append('name', 'first');
+ * searchParams.append('name', 'second');
+ */
+ append(name: string, value: string): void {
+ requiredArguments("URLSearchParams.append", arguments.length, 2);
+ this.params.push([String(name), String(value)]);
+ this.updateSteps();
+ }
+
+ /** Deletes the given search parameter and its associated value,
+ * from the list of all search parameters.
+ *
+ * searchParams.delete('name');
+ */
+ delete(name: string): void {
+ requiredArguments("URLSearchParams.delete", arguments.length, 1);
+ name = String(name);
+ let i = 0;
+ while (i < this.params.length) {
+ if (this.params[i][0] === name) {
+ this.params.splice(i, 1);
+ } else {
+ i++;
+ }
+ }
+ this.updateSteps();
+ }
+
+ /** Returns all the values associated with a given search parameter
+ * as an array.
+ *
+ * searchParams.getAll('name');
+ */
+ getAll(name: string): string[] {
+ requiredArguments("URLSearchParams.getAll", arguments.length, 1);
+ name = String(name);
+ const values = [];
+ for (const entry of this.params) {
+ if (entry[0] === name) {
+ values.push(entry[1]);
+ }
+ }
+
+ return values;
+ }
+
+ /** Returns the first value associated to the given search parameter.
+ *
+ * searchParams.get('name');
+ */
+ get(name: string): string | null {
+ requiredArguments("URLSearchParams.get", arguments.length, 1);
+ name = String(name);
+ for (const entry of this.params) {
+ if (entry[0] === name) {
+ return entry[1];
+ }
+ }
+
+ return null;
+ }
+
+ /** Returns a Boolean that indicates whether a parameter with the
+ * specified name exists.
+ *
+ * searchParams.has('name');
+ */
+ has(name: string): boolean {
+ requiredArguments("URLSearchParams.has", arguments.length, 1);
+ name = String(name);
+ return this.params.some((entry): boolean => entry[0] === name);
+ }
+
+ /** Sets the value associated with a given search parameter to the
+ * given value. If there were several matching values, this method
+ * deletes the others. If the search parameter doesn't exist, this
+ * method creates it.
+ *
+ * searchParams.set('name', 'value');
+ */
+ set(name: string, value: string): void {
+ requiredArguments("URLSearchParams.set", arguments.length, 2);
+
+ // If there are any name-value pairs whose name is name, in list,
+ // set the value of the first such name-value pair to value
+ // and remove the others.
+ name = String(name);
+ value = String(value);
+ let found = false;
+ let i = 0;
+ while (i < this.params.length) {
+ if (this.params[i][0] === name) {
+ if (!found) {
+ this.params[i][1] = value;
+ found = true;
+ i++;
+ } else {
+ this.params.splice(i, 1);
+ }
+ } else {
+ i++;
+ }
+ }
+
+ // Otherwise, append a new name-value pair whose name is name
+ // and value is value, to list.
+ if (!found) {
+ this.append(name, value);
+ }
+
+ this.updateSteps();
+ }
+
+ /** Sort all key/value pairs contained in this object in place and
+ * return undefined. The sort order is according to Unicode code
+ * points of the keys.
+ *
+ * searchParams.sort();
+ */
+ sort(): void {
+ this.params = this.params.sort((a, b): number =>
+ a[0] === b[0] ? 0 : a[0] > b[0] ? 1 : -1
+ );
+ this.updateSteps();
+ }
+
+ /** Calls a function for each element contained in this object in
+ * place and return undefined. Optionally accepts an object to use
+ * as this when executing callback as second argument.
+ *
+ * searchParams.forEach((value, key, parent) => {
+ * console.log(value, key, parent);
+ * });
+ *
+ */
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ thisArg?: any
+ ): void {
+ requiredArguments("URLSearchParams.forEach", arguments.length, 1);
+
+ if (typeof thisArg !== "undefined") {
+ callbackfn = callbackfn.bind(thisArg);
+ }
+
+ for (const [key, value] of this.entries()) {
+ callbackfn(value, key, this);
+ }
+ }
+
+ /** Returns an iterator allowing to go through all keys contained
+ * in this object.
+ *
+ * for (const key of searchParams.keys()) {
+ * console.log(key);
+ * }
+ */
+ *keys(): IterableIterator<string> {
+ for (const entry of this.params) {
+ yield entry[0];
+ }
+ }
+
+ /** Returns an iterator allowing to go through all values contained
+ * in this object.
+ *
+ * for (const value of searchParams.values()) {
+ * console.log(value);
+ * }
+ */
+ *values(): IterableIterator<string> {
+ for (const entry of this.params) {
+ yield entry[1];
+ }
+ }
+
+ /** Returns an iterator allowing to go through all key/value
+ * pairs contained in this object.
+ *
+ * for (const [key, value] of searchParams.entries()) {
+ * console.log(key, value);
+ * }
+ */
+ *entries(): IterableIterator<[string, string]> {
+ yield* this.params;
+ }
+
+ /** Returns an iterator allowing to go through all key/value
+ * pairs contained in this object.
+ *
+ * for (const [key, value] of searchParams[Symbol.iterator]()) {
+ * console.log(key, value);
+ * }
+ */
+ *[Symbol.iterator](): IterableIterator<[string, string]> {
+ yield* this.params;
+ }
+
+ /** Returns a query string suitable for use in a URL.
+ *
+ * searchParams.toString();
+ */
+ toString(): string {
+ return this.params
+ .map(
+ (tuple): string =>
+ `${encodeURIComponent(tuple[0])}=${encodeURIComponent(tuple[1])}`
+ )
+ .join("&");
+ }
+
+ private _handleStringInitialization(init: string): void {
+ // Overload: USVString
+ // If init is a string and starts with U+003F (?),
+ // remove the first code point from init.
+ if (init.charCodeAt(0) === 0x003f) {
+ init = init.slice(1);
+ }
+
+ for (const pair of init.split("&")) {
+ // Empty params are ignored
+ if (pair.length === 0) {
+ continue;
+ }
+ const position = pair.indexOf("=");
+ const name = pair.slice(0, position === -1 ? pair.length : position);
+ const value = pair.slice(name.length + 1);
+ this.append(decodeURIComponent(name), decodeURIComponent(value));
+ }
+ }
+
+ private _handleArrayInitialization(
+ init: string[][] | Iterable<[string, string]>
+ ): void {
+ // Overload: sequence<sequence<USVString>>
+ for (const tuple of init) {
+ // If pair does not contain exactly two items, then throw a TypeError.
+ if (tuple.length !== 2) {
+ throw new TypeError(
+ "URLSearchParams.constructor tuple array argument must only contain pair elements"
+ );
+ }
+ this.append(tuple[0], tuple[1]);
+ }
+ }
+}