summaryrefslogtreecommitdiff
path: root/extensions/fetch
diff options
context:
space:
mode:
Diffstat (limited to 'extensions/fetch')
-rw-r--r--extensions/fetch/01_fetch_util.js21
-rw-r--r--extensions/fetch/11_streams.js3885
-rw-r--r--extensions/fetch/11_streams_types.d.ts49
-rw-r--r--extensions/fetch/20_headers.js449
-rw-r--r--extensions/fetch/21_formdata.js552
-rw-r--r--extensions/fetch/22_body.js338
-rw-r--r--extensions/fetch/22_http_client.js41
-rw-r--r--extensions/fetch/23_request.js524
-rw-r--r--extensions/fetch/23_response.js415
-rw-r--r--extensions/fetch/26_fetch.js307
-rw-r--r--extensions/fetch/Cargo.toml26
-rw-r--r--extensions/fetch/README.md5
-rw-r--r--extensions/fetch/internal.d.ts113
-rw-r--r--extensions/fetch/lib.deno_fetch.d.ts708
-rw-r--r--extensions/fetch/lib.rs497
15 files changed, 7930 insertions, 0 deletions
diff --git a/extensions/fetch/01_fetch_util.js b/extensions/fetch/01_fetch_util.js
new file mode 100644
index 000000000..ff76421a1
--- /dev/null
+++ b/extensions/fetch/01_fetch_util.js
@@ -0,0 +1,21 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+"use strict";
+
+((window) => {
+ function requiredArguments(
+ name,
+ length,
+ required,
+ ) {
+ if (length < required) {
+ const errMsg = `${name} requires at least ${required} argument${
+ required === 1 ? "" : "s"
+ }, but only ${length} present`;
+ throw new TypeError(errMsg);
+ }
+ }
+
+ window.__bootstrap.fetchUtil = {
+ requiredArguments,
+ };
+})(this);
diff --git a/extensions/fetch/11_streams.js b/extensions/fetch/11_streams.js
new file mode 100644
index 000000000..8bbf34898
--- /dev/null
+++ b/extensions/fetch/11_streams.js
@@ -0,0 +1,3885 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="./11_streams_types.d.ts" />
+/// <reference path="./lib.deno_fetch.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ class AssertionError extends Error {
+ constructor(msg) {
+ super(msg);
+ this.name = "AssertionError";
+ }
+ }
+
+ /**
+ * @param {unknown} cond
+ * @param {string=} msg
+ * @returns {asserts cond}
+ */
+ function assert(cond, msg = "Assertion failed.") {
+ if (!cond) {
+ throw new AssertionError(msg);
+ }
+ }
+
+ /** @template T */
+ class Deferred {
+ /** @type {Promise<T>} */
+ #promise;
+ /** @type {(reject?: any) => void} */
+ #reject;
+ /** @type {(value: T | PromiseLike<T>) => void} */
+ #resolve;
+ /** @type {"pending" | "fulfilled"} */
+ #state = "pending";
+
+ constructor() {
+ this.#promise = new Promise((resolve, reject) => {
+ this.#resolve = resolve;
+ this.#reject = reject;
+ });
+ }
+
+ /** @returns {Promise<T>} */
+ get promise() {
+ return this.#promise;
+ }
+
+ /** @returns {"pending" | "fulfilled"} */
+ get state() {
+ return this.#state;
+ }
+
+ /** @param {any=} reason */
+ reject(reason) {
+ // already settled promises are a no-op
+ if (this.#state !== "pending") {
+ return;
+ }
+ this.#state = "fulfilled";
+ this.#reject(reason);
+ }
+
+ /** @param {T | PromiseLike<T>} value */
+ resolve(value) {
+ // already settled promises are a no-op
+ if (this.#state !== "pending") {
+ return;
+ }
+ this.#state = "fulfilled";
+ this.#resolve(value);
+ }
+ }
+
+ /**
+ * @param {(...args: any[]) => any} fn
+ * @param {boolean} enforcePromise
+ * @returns {(...args: any[]) => any}
+ */
+ function reflectApply(fn, enforcePromise) {
+ if (typeof fn !== "function") {
+ throw new TypeError("The property must be a function.");
+ }
+ return function (...args) {
+ if (enforcePromise) {
+ try {
+ return resolvePromiseWith(Reflect.apply(fn, this, args));
+ } catch (err) {
+ return Promise.reject(err);
+ }
+ }
+ return Reflect.apply(fn, this, args);
+ };
+ }
+
+ /**
+ * @template I
+ * @template O
+ * @param {Transformer<I, O>} transformer
+ * @returns {Transformer<I, O>}
+ */
+ function convertTransformer(transformer) {
+ const transformerDict = Object.create(null);
+ if (transformer === null) {
+ return transformerDict;
+ }
+ if ("flush" in transformer) {
+ transformerDict.flush = reflectApply(transformer.flush, true);
+ }
+ if ("readableType" in transformer) {
+ transformerDict.readableType = transformer.readableType;
+ }
+ if ("start" in transformer) {
+ transformerDict.start = reflectApply(transformer.start, false);
+ }
+ if ("transform" in transformer) {
+ transformerDict.transform = reflectApply(transformer.transform, true);
+ }
+ if ("writableType" in transformer) {
+ transformerDict.writableType = transformer.writableType;
+ }
+ return transformerDict;
+ }
+
+ /**
+ * @template W
+ * @param {UnderlyingSink<W>} underlyingSink
+ * @returns {UnderlyingSink<W>}
+ */
+ function convertUnderlyingSink(underlyingSink) {
+ const underlyingSinkDict = Object.create(null);
+ if (underlyingSink === null) {
+ return underlyingSinkDict;
+ }
+ if ("abort" in underlyingSink) {
+ underlyingSinkDict.abort = reflectApply(underlyingSink.abort, true);
+ }
+ if ("close" in underlyingSink) {
+ underlyingSinkDict.close = reflectApply(underlyingSink.close, true);
+ }
+ if ("start" in underlyingSink) {
+ underlyingSinkDict.start = reflectApply(underlyingSink.start, false);
+ }
+ if (underlyingSink.type) {
+ underlyingSinkDict.type = underlyingSink.type;
+ }
+ if ("write" in underlyingSink) {
+ underlyingSinkDict.write = reflectApply(underlyingSink.write, true);
+ }
+ return underlyingSinkDict;
+ }
+
+ /**
+ * @template R
+ * @param {UnderlyingSource<R>} underlyingSource
+ * @returns {UnderlyingSource<R>}
+ */
+ function convertUnderlyingSource(underlyingSource) {
+ const underlyingSourceDict = Object.create(null);
+ if (underlyingSource === null) {
+ throw new TypeError("Underlying source cannot be null");
+ }
+ if (underlyingSource === undefined) {
+ return underlyingSourceDict;
+ }
+ if ("cancel" in underlyingSource) {
+ underlyingSourceDict.cancel = reflectApply(underlyingSource.cancel, true);
+ }
+ if ("pull" in underlyingSource) {
+ underlyingSourceDict.pull = reflectApply(underlyingSource.pull, true);
+ }
+ if ("start" in underlyingSource) {
+ underlyingSourceDict.start = reflectApply(underlyingSource.start, false);
+ }
+ if (underlyingSource.type !== undefined) {
+ if (underlyingSourceDict.type === null) {
+ throw new TypeError("type cannot be null");
+ }
+ const type = String(underlyingSource.type);
+ if (type !== "bytes") {
+ throw new TypeError("invalid underlying source type");
+ }
+ underlyingSourceDict.type = type;
+ }
+ return underlyingSourceDict;
+ }
+
+ const originalPromise = Promise;
+ const originalPromiseThen = Promise.prototype.then;
+
+ /**
+ * @template T
+ * @template TResult1
+ * @template TResult2
+ * @param {Promise<T>} promise
+ * @param {(value: T) => TResult1 | PromiseLike<TResult1>} onFulfilled
+ * @param {(reason: any) => TResult2 | PromiseLike<TResult2>=} onRejected
+ * @returns {Promise<TResult1 | TResult2>}
+ */
+ function performPromiseThen(promise, onFulfilled, onRejected) {
+ return originalPromiseThen.call(promise, onFulfilled, onRejected);
+ }
+
+ /**
+ * @template T
+ * @param {T | PromiseLike<T>} value
+ * @returns {Promise<T>}
+ */
+ function resolvePromiseWith(value) {
+ return new originalPromise((resolve) => resolve(value));
+ }
+
+ /** @param {any} e */
+ function rethrowAssertionErrorRejection(e) {
+ if (e && e instanceof AssertionError) {
+ queueMicrotask(() => {
+ console.error(`Internal Error: ${e.stack}`);
+ });
+ }
+ }
+
+ /** @param {Promise<any>} promise */
+ function setPromiseIsHandledToTrue(promise) {
+ performPromiseThen(promise, undefined, rethrowAssertionErrorRejection);
+ }
+
+ /**
+ * @template T
+ * @template TResult1
+ * @template TResult2
+ * @param {Promise<T>} promise
+ * @param {(value: T) => TResult1 | PromiseLike<TResult1>} fulfillmentHandler
+ * @param {(reason: any) => TResult2 | PromiseLike<TResult2>=} rejectionHandler
+ * @returns {Promise<TResult1 | TResult2>}
+ */
+ function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) {
+ return performPromiseThen(promise, fulfillmentHandler, rejectionHandler);
+ }
+
+ /**
+ * @template T
+ * @template TResult
+ * @param {Promise<T>} promise
+ * @param {(value: T) => TResult | PromiseLike<TResult>} onFulfilled
+ * @returns {void}
+ */
+ function uponFulfillment(promise, onFulfilled) {
+ uponPromise(promise, onFulfilled);
+ }
+
+ /**
+ * @template T
+ * @template TResult
+ * @param {Promise<T>} promise
+ * @param {(value: T) => TResult | PromiseLike<TResult>} onRejected
+ * @returns {void}
+ */
+ function uponRejection(promise, onRejected) {
+ uponPromise(promise, undefined, onRejected);
+ }
+
+ /**
+ * @template T
+ * @template TResult1
+ * @template TResult2
+ * @param {Promise<T>} promise
+ * @param {(value: T) => TResult1 | PromiseLike<TResult1>} onFulfilled
+ * @param {(reason: any) => TResult2 | PromiseLike<TResult2>=} onRejected
+ * @returns {void}
+ */
+ function uponPromise(promise, onFulfilled, onRejected) {
+ performPromiseThen(
+ performPromiseThen(promise, onFulfilled, onRejected),
+ undefined,
+ rethrowAssertionErrorRejection,
+ );
+ }
+
+ const isFakeDetached = Symbol("<<detached>>");
+
+ /**
+ * @param {ArrayBufferLike} O
+ * @returns {boolean}
+ */
+ function isDetachedBuffer(O) {
+ return isFakeDetached in O;
+ }
+
+ /**
+ * @param {ArrayBufferLike} O
+ * @returns {ArrayBufferLike}
+ */
+ function transferArrayBuffer(O) {
+ assert(!isDetachedBuffer(O));
+ const transferredIshVersion = O.slice(0);
+ Object.defineProperty(O, "byteLength", {
+ get() {
+ return 0;
+ },
+ });
+ O[isFakeDetached] = true;
+ return transferredIshVersion;
+ }
+
+ const _abortAlgorithm = Symbol("[[abortAlgorithm]]");
+ const _abortSteps = Symbol("[[AbortSteps]]");
+ const _autoAllocateChunkSize = Symbol("[[autoAllocateChunkSize]]");
+ const _backpressure = Symbol("[[backpressure]]");
+ const _backpressureChangePromise = Symbol("[[backpressureChangePromise]]");
+ const _byobRequest = Symbol("[[byobRequest]]");
+ const _cancelAlgorithm = Symbol("[[cancelAlgorithm]]");
+ const _cancelSteps = Symbol("[[CancelSteps]]");
+ const _close = Symbol("close sentinel");
+ const _closeAlgorithm = Symbol("[[closeAlgorithm]]");
+ const _closedPromise = Symbol("[[closedPromise]]");
+ const _closeRequest = Symbol("[[closeRequest]]");
+ const _closeRequested = Symbol("[[closeRequested]]");
+ const _controller = Symbol("[[controller]]");
+ const _detached = Symbol("[[Detached]]");
+ const _disturbed = Symbol("[[disturbed]]");
+ const _errorSteps = Symbol("[[ErrorSteps]]");
+ const _flushAlgorithm = Symbol("[[flushAlgorithm]]");
+ const _globalObject = Symbol("[[globalObject]]");
+ const _inFlightCloseRequest = Symbol("[[inFlightCloseRequest]]");
+ const _inFlightWriteRequest = Symbol("[[inFlightWriteRequest]]");
+ const _pendingAbortRequest = Symbol("[pendingAbortRequest]");
+ const _preventCancel = Symbol("[[preventCancel]]");
+ const _pullAgain = Symbol("[[pullAgain]]");
+ const _pullAlgorithm = Symbol("[[pullAlgorithm]]");
+ const _pulling = Symbol("[[pulling]]");
+ const _pullSteps = Symbol("[[PullSteps]]");
+ const _queue = Symbol("[[queue]]");
+ const _queueTotalSize = Symbol("[[queueTotalSize]]");
+ const _readable = Symbol("[[readable]]");
+ const _reader = Symbol("[[reader]]");
+ const _readRequests = Symbol("[[readRequests]]");
+ const _readyPromise = Symbol("[[readyPromise]]");
+ const _started = Symbol("[[started]]");
+ const _state = Symbol("[[state]]");
+ const _storedError = Symbol("[[storedError]]");
+ const _strategyHWM = Symbol("[[strategyHWM]]");
+ const _strategySizeAlgorithm = Symbol("[[strategySizeAlgorithm]]");
+ const _stream = Symbol("[[stream]]");
+ const _transformAlgorithm = Symbol("[[transformAlgorithm]]");
+ const _writable = Symbol("[[writable]]");
+ const _writeAlgorithm = Symbol("[[writeAlgorithm]]");
+ const _writer = Symbol("[[writer]]");
+ const _writeRequests = Symbol("[[writeRequests]]");
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @returns {ReadableStreamDefaultReader<R>}
+ */
+ function acquireReadableStreamDefaultReader(stream) {
+ return new ReadableStreamDefaultReader(stream);
+ }
+
+ /**
+ * @template W
+ * @param {WritableStream<W>} stream
+ * @returns {WritableStreamDefaultWriter<W>}
+ */
+ function acquireWritableStreamDefaultWriter(stream) {
+ return new WritableStreamDefaultWriter(stream);
+ }
+
+ /**
+ * @template R
+ * @param {() => void} startAlgorithm
+ * @param {() => Promise<void>} pullAlgorithm
+ * @param {(reason: any) => Promise<void>} cancelAlgorithm
+ * @param {number=} highWaterMark
+ * @param {((chunk: R) => number)=} sizeAlgorithm
+ * @returns {ReadableStream<R>}
+ */
+ function createReadableStream(
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark = 1,
+ sizeAlgorithm = () => 1,
+ ) {
+ assert(isNonNegativeNumber(highWaterMark));
+ /** @type {ReadableStream} */
+ const stream = Object.create(ReadableStream.prototype);
+ initializeReadableStream(stream);
+ const controller = Object.create(ReadableStreamDefaultController.prototype);
+ setUpReadableStreamDefaultController(
+ stream,
+ controller,
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark,
+ sizeAlgorithm,
+ );
+ return stream;
+ }
+
+ /**
+ * @template W
+ * @param {(controller: WritableStreamDefaultController<W>) => Promise<void>} startAlgorithm
+ * @param {(chunk: W) => Promise<void>} writeAlgorithm
+ * @param {() => Promise<void>} closeAlgorithm
+ * @param {(reason: any) => Promise<void>} abortAlgorithm
+ * @param {number} highWaterMark
+ * @param {(chunk: W) => number} sizeAlgorithm
+ * @returns {WritableStream<W>}
+ */
+ function createWritableStream(
+ startAlgorithm,
+ writeAlgorithm,
+ closeAlgorithm,
+ abortAlgorithm,
+ highWaterMark,
+ sizeAlgorithm,
+ ) {
+ assert(isNonNegativeNumber(highWaterMark));
+ const stream = Object.create(WritableStream.prototype);
+ initializeWritableStream(stream);
+ const controller = Object.create(WritableStreamDefaultController.prototype);
+ setUpWritableStreamDefaultController(
+ stream,
+ controller,
+ startAlgorithm,
+ writeAlgorithm,
+ closeAlgorithm,
+ abortAlgorithm,
+ highWaterMark,
+ sizeAlgorithm,
+ );
+ return stream;
+ }
+
+ /**
+ * @template T
+ * @param {{ [_queue]: Array<ValueWithSize<T>>, [_queueTotalSize]: number }} container
+ * @returns {T}
+ */
+ function dequeueValue(container) {
+ assert(_queue in container && _queueTotalSize in container);
+ assert(container[_queue].length);
+ const valueWithSize = container[_queue].shift();
+ container[_queueTotalSize] -= valueWithSize.size;
+ if (container[_queueTotalSize] < 0) {
+ container[_queueTotalSize] = 0;
+ }
+ return valueWithSize.value;
+ }
+
+ /**
+ * @template T
+ * @param {{ [_queue]: Array<ValueWithSize<T | _close>>, [_queueTotalSize]: number }} container
+ * @param {T} value
+ * @param {number} size
+ * @returns {void}
+ */
+ function enqueueValueWithSize(container, value, size) {
+ assert(_queue in container && _queueTotalSize in container);
+ if (isNonNegativeNumber(size) === false) {
+ throw RangeError("chunk size isn't a positive number");
+ }
+ if (size === Infinity) {
+ throw RangeError("chunk size is invalid");
+ }
+ container[_queue].push({ value, size });
+ container[_queueTotalSize] += size;
+ }
+
+ /**
+ * @param {QueuingStrategy} strategy
+ * @param {number} defaultHWM
+ */
+ function extractHighWaterMark(strategy, defaultHWM) {
+ if (!("highWaterMark" in strategy)) {
+ return defaultHWM;
+ }
+ const highWaterMark = Number(strategy.highWaterMark);
+ if (Number.isNaN(highWaterMark) || highWaterMark < 0) {
+ throw RangeError(
+ `Expected highWaterMark to be a positive number or Infinity, got "${highWaterMark}".`,
+ );
+ }
+ return highWaterMark;
+ }
+
+ /**
+ * @template T
+ * @param {QueuingStrategy<T>} strategy
+ * @return {(chunk: T) => number}
+ */
+ function extractSizeAlgorithm(strategy) {
+ const { size } = strategy;
+
+ if (!size) {
+ return () => 1;
+ }
+ return (chunk) => size(chunk);
+ }
+
+ /**
+ * @param {ReadableStream} stream
+ * @returns {void}
+ */
+ function initializeReadableStream(stream) {
+ stream[_state] = "readable";
+ stream[_reader] = stream[_storedError] = undefined;
+ stream[_disturbed] = false;
+ }
+
+ /**
+ * @template I
+ * @template O
+ * @param {TransformStream<I, O>} stream
+ * @param {Deferred<void>} startPromise
+ * @param {number} writableHighWaterMark
+ * @param {(chunk: I) => number} writableSizeAlgorithm
+ * @param {number} readableHighWaterMark
+ * @param {(chunk: O) => number} readableSizeAlgorithm
+ */
+ function initializeTransformStream(
+ stream,
+ startPromise,
+ writableHighWaterMark,
+ writableSizeAlgorithm,
+ readableHighWaterMark,
+ readableSizeAlgorithm,
+ ) {
+ function startAlgorithm() {
+ return startPromise.promise;
+ }
+
+ function writeAlgorithm(chunk) {
+ return transformStreamDefaultSinkWriteAlgorithm(stream, chunk);
+ }
+
+ function abortAlgorithm(reason) {
+ return transformStreamDefaultSinkAbortAlgorithm(stream, reason);
+ }
+
+ function closeAlgorithm() {
+ return transformStreamDefaultSinkCloseAlgorithm(stream);
+ }
+
+ stream[_writable] = createWritableStream(
+ startAlgorithm,
+ writeAlgorithm,
+ closeAlgorithm,
+ abortAlgorithm,
+ writableHighWaterMark,
+ writableSizeAlgorithm,
+ );
+
+ function pullAlgorithm() {
+ return transformStreamDefaultSourcePullAlgorithm(stream);
+ }
+
+ function cancelAlgorithm(reason) {
+ transformStreamErrorWritableAndUnblockWrite(stream, reason);
+ return resolvePromiseWith(undefined);
+ }
+
+ stream[_readable] = createReadableStream(
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ readableHighWaterMark,
+ readableSizeAlgorithm,
+ );
+
+ stream[_backpressure] = stream[_backpressureChangePromise] = undefined;
+ transformStreamSetBackpressure(stream, true);
+ stream[_controller] = undefined;
+ }
+
+ /** @param {WritableStream} stream */
+ function initializeWritableStream(stream) {
+ stream[_state] = "writable";
+ stream[_storedError] = stream[_writer] = stream[_controller] =
+ stream[_inFlightWriteRequest] = stream[_closeRequest] =
+ stream[_inFlightCloseRequest] = stream[_pendingAbortRequest] =
+ undefined;
+ stream[_writeRequests] = [];
+ stream[_backpressure] = false;
+ }
+
+ /**
+ * @param {unknown} v
+ * @returns {v is number}
+ */
+ function isNonNegativeNumber(v) {
+ if (typeof v !== "number") {
+ return false;
+ }
+ if (Number.isNaN(v)) {
+ return false;
+ }
+ if (v < 0) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * @param {unknown} value
+ * @returns {value is ReadableStream}
+ */
+ function isReadableStream(value) {
+ return !(typeof value !== "object" || value === null ||
+ !(_controller in value));
+ }
+
+ /**
+ * @param {ReadableStream} stream
+ * @returns {boolean}
+ */
+ function isReadableStreamLocked(stream) {
+ if (stream[_reader] === undefined) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * @param {unknown} value
+ * @returns {value is ReadableStreamDefaultReader}
+ */
+ function isReadableStreamDefaultReader(value) {
+ return !(typeof value !== "object" || value === null ||
+ !(_readRequests in value));
+ }
+
+ /**
+ * @param {ReadableStream} stream
+ * @returns {boolean}
+ */
+ function isReadableStreamDisturbed(stream) {
+ assert(isReadableStream(stream));
+ return stream[_disturbed];
+ }
+
+ /**
+ * @param {unknown} value
+ * @returns {value is WritableStream}
+ */
+ function isWritableStream(value) {
+ return !(typeof value !== "object" || value === null ||
+ !(_controller in value));
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @returns {boolean}
+ */
+ function isWritableStreamLocked(stream) {
+ if (stream[_writer] === undefined) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * @template T
+ * @param {{ [_queue]: Array<ValueWithSize<T | _close>>, [_queueTotalSize]: number }} container
+ * @returns {T | _close}
+ */
+ function peekQueueValue(container) {
+ assert(_queue in container && _queueTotalSize in container);
+ assert(container[_queue].length);
+ const valueWithSize = container[_queue][0];
+ return valueWithSize.value;
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @returns {void}
+ */
+ function readableByteStreamControllerCallPullIfNeeded(controller) {
+ const shouldPull = readableByteStreamControllerShouldCallPull(controller);
+ if (!shouldPull) {
+ return;
+ }
+ if (controller[_pulling]) {
+ controller[_pullAgain] = true;
+ return;
+ }
+ assert(controller[_pullAgain] === false);
+ controller[_pulling] = true;
+ /** @type {Promise<void>} */
+ const pullPromise = controller[_pullAlgorithm](controller);
+ setPromiseIsHandledToTrue(
+ pullPromise.then(
+ () => {
+ controller[_pulling] = false;
+ if (controller[_pullAgain]) {
+ controller[_pullAgain] = false;
+ readableByteStreamControllerCallPullIfNeeded(controller);
+ }
+ },
+ (e) => {
+ readableByteStreamControllerError(controller, e);
+ },
+ ),
+ );
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @returns {void}
+ */
+ function readableByteStreamControllerClearAlgorithms(controller) {
+ controller[_pullAlgorithm] = undefined;
+ controller[_cancelAlgorithm] = undefined;
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @param {any} e
+ */
+ function readableByteStreamControllerError(controller, e) {
+ /** @type {ReadableStream<ArrayBuffer>} */
+ const stream = controller[_stream];
+ if (stream[_state] !== "readable") {
+ return;
+ }
+ // 3. Perform ! ReadableByteStreamControllerClearPendingPullIntos(controller).
+ resetQueue(controller);
+ readableByteStreamControllerClearAlgorithms(controller);
+ readableStreamError(stream, e);
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @returns {void}
+ */
+ function readableByteStreamControllerClose(controller) {
+ /** @type {ReadableStream<ArrayBuffer>} */
+ const stream = controller[_stream];
+ if (controller[_closeRequested] || stream[_state] !== "readable") {
+ return;
+ }
+ if (controller[_queueTotalSize] > 0) {
+ controller[_closeRequested] = true;
+ return;
+ }
+ // 3.13.6.4 If controller.[[pendingPullIntos]] is not empty, (BYOB Support)
+ readableByteStreamControllerClearAlgorithms(controller);
+ readableStreamClose(stream);
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @param {ArrayBufferView} chunk
+ */
+ function readableByteStreamControllerEnqueue(controller, chunk) {
+ /** @type {ReadableStream<ArrayBuffer>} */
+ const stream = controller[_stream];
+ if (
+ controller[_closeRequested] ||
+ controller[_stream][_state] !== "readable"
+ ) {
+ return;
+ }
+
+ const { buffer, byteOffset, byteLength } = chunk;
+ const transferredBuffer = transferArrayBuffer(buffer);
+ if (readableStreamHasDefaultReader(stream)) {
+ if (readableStreamGetNumReadRequests(stream) === 0) {
+ readableByteStreamControllerEnqueueChunkToQueue(
+ controller,
+ transferredBuffer,
+ byteOffset,
+ byteLength,
+ );
+ } else {
+ assert(controller[_queue].length === 0);
+ const transferredView = new Uint8Array(
+ transferredBuffer,
+ byteOffset,
+ byteLength,
+ );
+ readableStreamFulfillReadRequest(stream, transferredView, false);
+ }
+ // 8 Otherwise, if ! ReadableStreamHasBYOBReader(stream) is true,
+ } else {
+ assert(isReadableStreamLocked(stream) === false);
+ readableByteStreamControllerEnqueueChunkToQueue(
+ controller,
+ transferredBuffer,
+ byteOffset,
+ byteLength,
+ );
+ }
+ readableByteStreamControllerCallPullIfNeeded(controller);
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @param {ArrayBufferLike} buffer
+ * @param {number} byteOffset
+ * @param {number} byteLength
+ * @returns {void}
+ */
+ function readableByteStreamControllerEnqueueChunkToQueue(
+ controller,
+ buffer,
+ byteOffset,
+ byteLength,
+ ) {
+ controller[_queue].push({ buffer, byteOffset, byteLength });
+ controller[_queueTotalSize] += byteLength;
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @returns {number | null}
+ */
+ function readableByteStreamControllerGetDesiredSize(controller) {
+ const state = controller[_stream][_state];
+ if (state === "errored") {
+ return null;
+ }
+ if (state === "closed") {
+ return 0;
+ }
+ return controller[_strategyHWM] - controller[_queueTotalSize];
+ }
+
+ /**
+ * @param {{ [_queue]: any[], [_queueTotalSize]: number }} container
+ * @returns {void}
+ */
+ function resetQueue(container) {
+ container[_queue] = [];
+ container[_queueTotalSize] = 0;
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @returns {void}
+ */
+ function readableByteStreamControllerHandleQueueDrain(controller) {
+ assert(controller[_stream][_state] === "readable");
+ if (
+ controller[_queueTotalSize] === 0 && controller[_closeRequested]
+ ) {
+ readableByteStreamControllerClearAlgorithms(controller);
+ readableStreamClose(controller[_stream]);
+ } else {
+ readableByteStreamControllerCallPullIfNeeded(controller);
+ }
+ }
+
+ /**
+ * @param {ReadableByteStreamController} controller
+ * @returns {boolean}
+ */
+ function readableByteStreamControllerShouldCallPull(controller) {
+ /** @type {ReadableStream<ArrayBuffer>} */
+ const stream = controller[_stream];
+ if (
+ stream[_state] !== "readable" ||
+ controller[_closeRequested] ||
+ !controller[_started]
+ ) {
+ return false;
+ }
+ if (
+ readableStreamHasDefaultReader(stream) &&
+ readableStreamGetNumReadRequests(stream) > 0
+ ) {
+ return true;
+ }
+ // 3.13.25.6 If ! ReadableStreamHasBYOBReader(stream) is true and !
+ // ReadableStreamGetNumReadIntoRequests(stream) > 0, return true.
+ const desiredSize = readableByteStreamControllerGetDesiredSize(controller);
+ assert(desiredSize !== null);
+ return desiredSize > 0;
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @param {ReadRequest<R>} readRequest
+ * @returns {void}
+ */
+ function readableStreamAddReadRequest(stream, readRequest) {
+ assert(isReadableStreamDefaultReader(stream[_reader]));
+ assert(stream[_state] === "readable");
+ stream[_reader][_readRequests].push(readRequest);
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @param {any=} reason
+ * @returns {Promise<void>}
+ */
+ function readableStreamCancel(stream, reason) {
+ stream[_disturbed] = true;
+ if (stream[_state] === "closed") {
+ return resolvePromiseWith(undefined);
+ }
+ if (stream[_state] === "errored") {
+ return Promise.reject(stream[_storedError]);
+ }
+ readableStreamClose(stream);
+ /** @type {Promise<void>} */
+ const sourceCancelPromise = stream[_controller][_cancelSteps](reason);
+ return sourceCancelPromise.then(() => undefined);
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @returns {void}
+ */
+ function readableStreamClose(stream) {
+ assert(stream[_state] === "readable");
+ stream[_state] = "closed";
+ /** @type {ReadableStreamDefaultReader<R> | undefined} */
+ const reader = stream[_reader];
+ if (!reader) {
+ return;
+ }
+ if (isReadableStreamDefaultReader(reader)) {
+ /** @type {Array<ReadRequest<R>>} */
+ const readRequests = reader[_readRequests];
+ for (const readRequest of readRequests) {
+ readRequest.closeSteps();
+ }
+ reader[_readRequests] = [];
+ }
+ // This promise can be double resolved.
+ // See: https://github.com/whatwg/streams/issues/1100
+ reader[_closedPromise].resolve(undefined);
+ }
+
+ /** @param {ReadableStreamDefaultController<any>} controller */
+ function readableStreamDefaultControllerCallPullIfNeeded(controller) {
+ const shouldPull = readableStreamDefaultcontrollerShouldCallPull(
+ controller,
+ );
+ if (shouldPull === false) {
+ return;
+ }
+ if (controller[_pulling] === true) {
+ controller[_pullAgain] = true;
+ return;
+ }
+ assert(controller[_pullAgain] === false);
+ controller[_pulling] = true;
+ const pullPromise = controller[_pullAlgorithm](controller);
+ uponFulfillment(pullPromise, () => {
+ controller[_pulling] = false;
+ if (controller[_pullAgain] === true) {
+ controller[_pullAgain] = false;
+ readableStreamDefaultControllerCallPullIfNeeded(controller);
+ }
+ });
+ uponRejection(pullPromise, (e) => {
+ readableStreamDefaultControllerError(controller, e);
+ });
+ }
+
+ /**
+ * @param {ReadableStreamDefaultController<any>} controller
+ * @returns {boolean}
+ */
+ function readableStreamDefaultControllerCanCloseOrEnqueue(controller) {
+ const state = controller[_stream][_state];
+ if (controller[_closeRequested] === false && state === "readable") {
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ /** @param {ReadableStreamDefaultController<any>} controller */
+ function readableStreamDefaultControllerClearAlgorithms(controller) {
+ controller[_pullAlgorithm] = undefined;
+ controller[_cancelAlgorithm] = undefined;
+ controller[_strategySizeAlgorithm] = undefined;
+ }
+
+ /** @param {ReadableStreamDefaultController<any>} controller */
+ function readableStreamDefaultControllerClose(controller) {
+ if (
+ readableStreamDefaultControllerCanCloseOrEnqueue(controller) === false
+ ) {
+ return;
+ }
+ const stream = controller[_stream];
+ controller[_closeRequested] = true;
+ if (controller[_queue].length === 0) {
+ readableStreamDefaultControllerClearAlgorithms(controller);
+ readableStreamClose(stream);
+ }
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStreamDefaultController<R>} controller
+ * @param {R} chunk
+ * @returns {void}
+ */
+ function readableStreamDefaultControllerEnqueue(controller, chunk) {
+ if (
+ readableStreamDefaultControllerCanCloseOrEnqueue(controller) === false
+ ) {
+ return;
+ }
+ const stream = controller[_stream];
+ if (
+ isReadableStreamLocked(stream) === true &&
+ readableStreamGetNumReadRequests(stream) > 0
+ ) {
+ readableStreamFulfillReadRequest(stream, chunk, false);
+ } else {
+ let chunkSize;
+ try {
+ chunkSize = controller[_strategySizeAlgorithm](chunk);
+ } catch (e) {
+ readableStreamDefaultControllerError(controller, e);
+ throw e;
+ }
+
+ try {
+ enqueueValueWithSize(controller, chunk, chunkSize);
+ } catch (e) {
+ readableStreamDefaultControllerError(controller, e);
+ throw e;
+ }
+ }
+ readableStreamDefaultControllerCallPullIfNeeded(controller);
+ }
+
+ /**
+ * @param {ReadableStreamDefaultController<any>} controller
+ * @param {any} e
+ */
+ function readableStreamDefaultControllerError(controller, e) {
+ const stream = controller[_stream];
+ if (stream[_state] !== "readable") {
+ return;
+ }
+ resetQueue(controller);
+ readableStreamDefaultControllerClearAlgorithms(controller);
+ readableStreamError(stream, e);
+ }
+
+ /**
+ * @param {ReadableStreamDefaultController<any>} controller
+ * @returns {number | null}
+ */
+ function readableStreamDefaultControllerGetDesiredSize(controller) {
+ const state = controller[_stream][_state];
+ if (state === "errored") {
+ return null;
+ }
+ if (state === "closed") {
+ return 0;
+ }
+ return controller[_strategyHWM] - controller[_queueTotalSize];
+ }
+
+ /** @param {ReadableStreamDefaultController} controller */
+ function readableStreamDefaultcontrollerHasBackpressure(controller) {
+ if (readableStreamDefaultcontrollerShouldCallPull(controller) === true) {
+ return false;
+ } else {
+ return true;
+ }
+ }
+
+ /**
+ * @param {ReadableStreamDefaultController<any>} controller
+ * @returns {boolean}
+ */
+ function readableStreamDefaultcontrollerShouldCallPull(controller) {
+ const stream = controller[_stream];
+ if (
+ readableStreamDefaultControllerCanCloseOrEnqueue(controller) === false
+ ) {
+ return false;
+ }
+ if (controller[_started] === false) {
+ return false;
+ }
+ if (
+ isReadableStreamLocked(stream) &&
+ readableStreamGetNumReadRequests(stream) > 0
+ ) {
+ return true;
+ }
+ const desiredSize = readableStreamDefaultControllerGetDesiredSize(
+ controller,
+ );
+ assert(desiredSize !== null);
+ if (desiredSize > 0) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStreamDefaultReader<R>} reader
+ * @param {ReadRequest<R>} readRequest
+ * @returns {void}
+ */
+ function readableStreamDefaultReaderRead(reader, readRequest) {
+ const stream = reader[_stream];
+ assert(stream);
+ stream[_disturbed] = true;
+ if (stream[_state] === "closed") {
+ readRequest.closeSteps();
+ } else if (stream[_state] === "errored") {
+ readRequest.errorSteps(stream[_storedError]);
+ } else {
+ assert(stream[_state] === "readable");
+ stream[_controller][_pullSteps](readRequest);
+ }
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @param {any} e
+ */
+ function readableStreamError(stream, e) {
+ assert(stream[_state] === "readable");
+ stream[_state] = "errored";
+ stream[_storedError] = e;
+ /** @type {ReadableStreamDefaultReader<R> | undefined} */
+ const reader = stream[_reader];
+ if (reader === undefined) {
+ return;
+ }
+ if (isReadableStreamDefaultReader(reader)) {
+ /** @type {Array<ReadRequest<R>>} */
+ const readRequests = reader[_readRequests];
+ for (const readRequest of readRequests) {
+ readRequest.errorSteps(e);
+ }
+ reader[_readRequests] = [];
+ }
+ // 3.5.6.8 Otherwise, support BYOB Reader
+ /** @type {Deferred<void>} */
+ const closedPromise = reader[_closedPromise];
+ closedPromise.reject(e);
+ setPromiseIsHandledToTrue(closedPromise.promise);
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @param {R} chunk
+ * @param {boolean} done
+ */
+ function readableStreamFulfillReadRequest(stream, chunk, done) {
+ assert(readableStreamHasDefaultReader(stream) === true);
+ /** @type {ReadableStreamDefaultReader<R>} */
+ const reader = stream[_reader];
+ assert(reader[_readRequests].length);
+ /** @type {ReadRequest<R>} */
+ const readRequest = reader[_readRequests].shift();
+ if (done) {
+ readRequest.closeSteps();
+ } else {
+ readRequest.chunkSteps(chunk);
+ }
+ }
+
+ /**
+ * @param {ReadableStream} stream
+ * @return {number}
+ */
+ function readableStreamGetNumReadRequests(stream) {
+ assert(readableStreamHasDefaultReader(stream) === true);
+ return stream[_reader][_readRequests].length;
+ }
+
+ /**
+ * @param {ReadableStream} stream
+ * @returns {boolean}
+ */
+ function readableStreamHasDefaultReader(stream) {
+ const reader = stream[_reader];
+ if (reader === undefined) {
+ return false;
+ }
+ if (isReadableStreamDefaultReader(reader)) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * @template T
+ * @param {ReadableStream<T>} source
+ * @param {WritableStream<T>} dest
+ * @param {boolean} preventClose
+ * @param {boolean} preventAbort
+ * @param {boolean} preventCancel
+ * @param {AbortSignal=} signal
+ * @returns {Promise<void>}
+ */
+ function readableStreamPipeTo(
+ source,
+ dest,
+ preventClose,
+ preventAbort,
+ preventCancel,
+ signal,
+ ) {
+ assert(isReadableStream(source));
+ assert(isWritableStream(dest));
+ assert(
+ typeof preventClose === "boolean" && typeof preventAbort === "boolean" &&
+ typeof preventCancel === "boolean",
+ );
+ assert(signal === undefined || signal instanceof AbortSignal);
+ assert(!isReadableStreamLocked(source));
+ assert(!isWritableStreamLocked(dest));
+ const reader = acquireReadableStreamDefaultReader(source);
+ const writer = acquireWritableStreamDefaultWriter(dest);
+ source[_disturbed] = true;
+ let shuttingDown = false;
+ let currentWrite = resolvePromiseWith(undefined);
+ /** @type {Deferred<void>} */
+ const promise = new Deferred();
+ /** @type {() => void} */
+ let abortAlgorithm;
+ if (signal) {
+ abortAlgorithm = () => {
+ const error = new DOMException("Aborted", "AbortError");
+ /** @type {Array<() => Promise<void>>} */
+ const actions = [];
+ if (preventAbort === false) {
+ actions.push(() => {
+ if (dest[_state] === "writable") {
+ return writableStreamAbort(dest, error);
+ } else {
+ return resolvePromiseWith(undefined);
+ }
+ });
+ }
+ if (preventCancel === false) {
+ actions.push(() => {
+ if (source[_state] === "readable") {
+ return readableStreamCancel(source, error);
+ } else {
+ return resolvePromiseWith(undefined);
+ }
+ });
+ }
+ shutdownWithAction(
+ () => Promise.all(actions.map((action) => action())),
+ true,
+ error,
+ );
+ };
+
+ if (signal.aborted) {
+ abortAlgorithm();
+ return promise.promise;
+ }
+ signal.addEventListener("abort", abortAlgorithm);
+ }
+
+ function pipeLoop() {
+ return new Promise((resolveLoop, rejectLoop) => {
+ /** @param {boolean} done */
+ function next(done) {
+ if (done) {
+ resolveLoop();
+ } else {
+ uponPromise(pipeStep(), next, rejectLoop);
+ }
+ }
+ next(false);
+ });
+ }
+
+ /** @returns {Promise<boolean>} */
+ function pipeStep() {
+ if (shuttingDown === true) {
+ return resolvePromiseWith(true);
+ }
+
+ return transformPromiseWith(writer[_readyPromise].promise, () => {
+ return new Promise((resolveRead, rejectRead) => {
+ readableStreamDefaultReaderRead(
+ reader,
+ {
+ chunkSteps(chunk) {
+ currentWrite = transformPromiseWith(
+ writableStreamDefaultWriterWrite(writer, chunk),
+ undefined,
+ () => {},
+ );
+ resolveRead(false);
+ },
+ closeSteps() {
+ resolveRead(true);
+ },
+ errorSteps: rejectRead,
+ },
+ );
+ });
+ });
+ }
+
+ isOrBecomesErrored(
+ source,
+ reader[_closedPromise].promise,
+ (storedError) => {
+ if (preventAbort === false) {
+ shutdownWithAction(
+ () => writableStreamAbort(dest, storedError),
+ true,
+ storedError,
+ );
+ } else {
+ shutdown(true, storedError);
+ }
+ },
+ );
+
+ isOrBecomesErrored(dest, writer[_closedPromise].promise, (storedError) => {
+ if (preventCancel === false) {
+ shutdownWithAction(
+ () => readableStreamCancel(source, storedError),
+ true,
+ storedError,
+ );
+ } else {
+ shutdown(true, storedError);
+ }
+ });
+
+ isOrBecomesClosed(source, reader[_closedPromise].promise, () => {
+ if (preventClose === false) {
+ shutdownWithAction(() =>
+ writableStreamDefaultWriterCloseWithErrorPropagation(writer)
+ );
+ } else {
+ shutdown();
+ }
+ });
+
+ if (
+ writableStreamCloseQueuedOrInFlight(dest) === true ||
+ dest[_state] === "closed"
+ ) {
+ const destClosed = new TypeError(
+ "The destination writable stream closed before all the data could be piped to it.",
+ );
+ if (preventCancel === false) {
+ shutdownWithAction(
+ () => readableStreamCancel(source, destClosed),
+ true,
+ destClosed,
+ );
+ } else {
+ shutdown(true, destClosed);
+ }
+ }
+
+ setPromiseIsHandledToTrue(pipeLoop());
+
+ return promise.promise;
+
+ /** @returns {Promise<void>} */
+ function waitForWritesToFinish() {
+ const oldCurrentWrite = currentWrite;
+ return transformPromiseWith(
+ currentWrite,
+ () =>
+ oldCurrentWrite !== currentWrite
+ ? waitForWritesToFinish()
+ : undefined,
+ );
+ }
+
+ /**
+ * @param {ReadableStream | WritableStream} stream
+ * @param {Promise<any>} promise
+ * @param {(e: any) => void} action
+ */
+ function isOrBecomesErrored(stream, promise, action) {
+ if (stream[_state] === "errored") {
+ action(stream[_storedError]);
+ } else {
+ uponRejection(promise, action);
+ }
+ }
+
+ /**
+ * @param {ReadableStream} stream
+ * @param {Promise<any>} promise
+ * @param {() => void} action
+ */
+ function isOrBecomesClosed(stream, promise, action) {
+ if (stream[_state] === "closed") {
+ action();
+ } else {
+ uponFulfillment(promise, action);
+ }
+ }
+
+ /**
+ * @param {() => Promise<void[] | void>} action
+ * @param {boolean=} originalIsError
+ * @param {any=} originalError
+ */
+ function shutdownWithAction(action, originalIsError, originalError) {
+ function doTheRest() {
+ uponPromise(
+ action(),
+ () => finalize(originalIsError, originalError),
+ (newError) => finalize(true, newError),
+ );
+ }
+
+ if (shuttingDown === true) {
+ return;
+ }
+ shuttingDown = true;
+
+ if (
+ dest[_state] === "writable" &&
+ writableStreamCloseQueuedOrInFlight(dest) === false
+ ) {
+ uponFulfillment(waitForWritesToFinish(), doTheRest);
+ } else {
+ doTheRest();
+ }
+ }
+
+ /**
+ * @param {boolean=} isError
+ * @param {any=} error
+ */
+ function shutdown(isError, error) {
+ if (shuttingDown) {
+ return;
+ }
+ shuttingDown = true;
+ if (
+ dest[_state] === "writable" &&
+ writableStreamCloseQueuedOrInFlight(dest) === false
+ ) {
+ uponFulfillment(
+ waitForWritesToFinish(),
+ () => finalize(isError, error),
+ );
+ } else {
+ finalize(isError, error);
+ }
+ }
+
+ /**
+ * @param {boolean=} isError
+ * @param {any=} error
+ */
+ function finalize(isError, error) {
+ writableStreamDefaultWriterRelease(writer);
+ readableStreamReaderGenericRelease(reader);
+
+ if (signal !== undefined) {
+ signal.removeEventListener("abort", abortAlgorithm);
+ }
+ if (isError) {
+ promise.reject(error);
+ } else {
+ promise.resolve(undefined);
+ }
+ }
+ }
+
+ /**
+ * @param {ReadableStreamGenericReader<any>} reader
+ * @param {any} reason
+ * @returns {Promise<void>}
+ */
+ function readableStreamReaderGenericCancel(reader, reason) {
+ const stream = reader[_stream];
+ assert(stream !== undefined);
+ return readableStreamCancel(stream, reason);
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStreamDefaultReader<R>} reader
+ * @param {ReadableStream<R>} stream
+ */
+ function readableStreamReaderGenericInitialize(reader, stream) {
+ reader[_stream] = stream;
+ stream[_reader] = reader;
+ if (stream[_state] === "readable") {
+ reader[_closedPromise] = new Deferred();
+ } else if (stream[_state] === "closed") {
+ reader[_closedPromise] = new Deferred();
+ reader[_closedPromise].resolve(undefined);
+ } else {
+ assert(stream[_state] === "errored");
+ reader[_closedPromise] = new Deferred();
+ reader[_closedPromise].reject(stream[_storedError]);
+ setPromiseIsHandledToTrue(reader[_closedPromise].promise);
+ }
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStreamGenericReader<R>} reader
+ */
+ function readableStreamReaderGenericRelease(reader) {
+ assert(reader[_stream] !== undefined);
+ assert(reader[_stream][_reader] === reader);
+ if (reader[_stream][_state] === "readable") {
+ reader[_closedPromise].reject(
+ new TypeError(
+ "Reader was released and can no longer be used to monitor the stream's closedness.",
+ ),
+ );
+ } else {
+ reader[_closedPromise] = new Deferred();
+ reader[_closedPromise].reject(
+ new TypeError(
+ "Reader was released and can no longer be used to monitor the stream's closedness.",
+ ),
+ );
+ }
+ setPromiseIsHandledToTrue(reader[_closedPromise].promise);
+ reader[_stream][_reader] = undefined;
+ reader[_stream] = undefined;
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @param {boolean} cloneForBranch2
+ * @returns {[ReadableStream<R>, ReadableStream<R>]}
+ */
+ function readableStreamTee(stream, cloneForBranch2) {
+ assert(isReadableStream(stream));
+ assert(typeof cloneForBranch2 === "boolean");
+ const reader = acquireReadableStreamDefaultReader(stream);
+ let reading = false;
+ let canceled1 = false;
+ let canceled2 = false;
+ /** @type {any} */
+ let reason1;
+ /** @type {any} */
+ let reason2;
+ /** @type {ReadableStream<R>} */
+ // deno-lint-ignore prefer-const
+ let branch1;
+ /** @type {ReadableStream<R>} */
+ // deno-lint-ignore prefer-const
+ let branch2;
+
+ /** @type {Deferred<void>} */
+ const cancelPromise = new Deferred();
+
+ function pullAlgorithm() {
+ if (reading === true) {
+ return resolvePromiseWith(undefined);
+ }
+ reading = true;
+ /** @type {ReadRequest<R>} */
+ const readRequest = {
+ chunkSteps(value) {
+ queueMicrotask(() => {
+ reading = false;
+ const value1 = value;
+ const value2 = value;
+
+ if (canceled1 === false) {
+ readableStreamDefaultControllerEnqueue(
+ /** @type {ReadableStreamDefaultController<any>} */ (branch1[
+ _controller
+ ]),
+ value1,
+ );
+ }
+ if (canceled2 === false) {
+ readableStreamDefaultControllerEnqueue(
+ /** @type {ReadableStreamDefaultController<any>} */ (branch2[
+ _controller
+ ]),
+ value2,
+ );
+ }
+ });
+ },
+ closeSteps() {
+ reading = false;
+ if (canceled1 === false) {
+ readableStreamDefaultControllerClose(
+ /** @type {ReadableStreamDefaultController<any>} */ (branch1[
+ _controller
+ ]),
+ );
+ }
+ if (canceled2 === false) {
+ readableStreamDefaultControllerClose(
+ /** @type {ReadableStreamDefaultController<any>} */ (branch2[
+ _controller
+ ]),
+ );
+ }
+ cancelPromise.resolve(undefined);
+ },
+ errorSteps() {
+ reading = false;
+ },
+ };
+ readableStreamDefaultReaderRead(reader, readRequest);
+ return resolvePromiseWith(undefined);
+ }
+
+ /**
+ * @param {any} reason
+ * @returns {Promise<void>}
+ */
+ function cancel1Algorithm(reason) {
+ canceled1 = true;
+ reason1 = reason;
+ if (canceled2 === true) {
+ const compositeReason = [reason1, reason2];
+ const cancelResult = readableStreamCancel(stream, compositeReason);
+ cancelPromise.resolve(cancelResult);
+ }
+ return cancelPromise.promise;
+ }
+
+ /**
+ * @param {any} reason
+ * @returns {Promise<void>}
+ */
+ function cancel2Algorithm(reason) {
+ canceled2 = true;
+ reason2 = reason;
+ if (canceled1 === true) {
+ const compositeReason = [reason1, reason2];
+ const cancelResult = readableStreamCancel(stream, compositeReason);
+ cancelPromise.resolve(cancelResult);
+ }
+ return cancelPromise.promise;
+ }
+
+ function startAlgorithm() {}
+
+ branch1 = createReadableStream(
+ startAlgorithm,
+ pullAlgorithm,
+ cancel1Algorithm,
+ );
+ branch2 = createReadableStream(
+ startAlgorithm,
+ pullAlgorithm,
+ cancel2Algorithm,
+ );
+
+ uponRejection(reader[_closedPromise].promise, (r) => {
+ readableStreamDefaultControllerError(
+ /** @type {ReadableStreamDefaultController<any>} */ (branch1[
+ _controller
+ ]),
+ r,
+ );
+ readableStreamDefaultControllerError(
+ /** @type {ReadableStreamDefaultController<any>} */ (branch2[
+ _controller
+ ]),
+ r,
+ );
+ cancelPromise.resolve(undefined);
+ });
+
+ return [branch1, branch2];
+ }
+
+ /**
+ * @param {ReadableStream<ArrayBuffer>} stream
+ * @param {ReadableByteStreamController} controller
+ * @param {() => void} startAlgorithm
+ * @param {() => Promise<void>} pullAlgorithm
+ * @param {(reason: any) => Promise<void>} cancelAlgorithm
+ * @param {number} highWaterMark
+ * @param {number | undefined} autoAllocateChunkSize
+ */
+ function setUpReadableByteStreamController(
+ stream,
+ controller,
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark,
+ autoAllocateChunkSize,
+ ) {
+ assert(stream[_controller] === undefined);
+ if (autoAllocateChunkSize !== undefined) {
+ assert(Number.isInteger(autoAllocateChunkSize));
+ assert(autoAllocateChunkSize >= 0);
+ }
+ controller[_stream] = stream;
+ controller[_pullAgain] = controller[_pulling] = false;
+ controller[_byobRequest] = undefined;
+ resetQueue(controller);
+ controller[_closeRequested] = controller[_started] = false;
+ controller[_strategyHWM] = highWaterMark;
+ controller[_pullAlgorithm] = pullAlgorithm;
+ controller[_cancelAlgorithm] = cancelAlgorithm;
+ controller[_autoAllocateChunkSize] = autoAllocateChunkSize;
+ // 12. Set controller.[[pendingPullIntos]] to a new empty list.
+ stream[_controller] = controller;
+ const startResult = startAlgorithm();
+ const startPromise = resolvePromiseWith(startResult);
+ setPromiseIsHandledToTrue(
+ startPromise.then(
+ () => {
+ controller[_started] = true;
+ assert(controller[_pulling] === false);
+ assert(controller[_pullAgain] === false);
+ readableByteStreamControllerCallPullIfNeeded(controller);
+ },
+ (r) => {
+ readableByteStreamControllerError(controller, r);
+ },
+ ),
+ );
+ }
+
+ /**
+ * @param {ReadableStream<ArrayBuffer>} stream
+ * @param {UnderlyingSource<ArrayBuffer>} underlyingSource
+ * @param {UnderlyingSource<ArrayBuffer>} underlyingSourceDict
+ * @param {number} highWaterMark
+ */
+ function setUpReadableByteStreamControllerFromUnderlyingSource(
+ stream,
+ underlyingSource,
+ underlyingSourceDict,
+ highWaterMark,
+ ) {
+ const controller = new ReadableByteStreamController();
+ /** @type {() => void} */
+ let startAlgorithm = () => undefined;
+ /** @type {() => Promise<void>} */
+ let pullAlgorithm = () => resolvePromiseWith(undefined);
+ /** @type {(reason: any) => Promise<void>} */
+ let cancelAlgorithm = (_reason) => resolvePromiseWith(undefined);
+ if ("start" in underlyingSourceDict) {
+ startAlgorithm = () =>
+ underlyingSourceDict.start.call(underlyingSource, controller);
+ }
+ if ("pull" in underlyingSourceDict) {
+ pullAlgorithm = () =>
+ underlyingSourceDict.pull.call(underlyingSource, controller);
+ }
+ if ("cancel" in underlyingSourceDict) {
+ cancelAlgorithm = (reason) =>
+ underlyingSourceDict.cancel.call(underlyingSource, reason);
+ }
+ // 3.13.27.6 Let autoAllocateChunkSize be ? GetV(underlyingByteSource, "autoAllocateChunkSize").
+ /** @type {undefined} */
+ const autoAllocateChunkSize = undefined;
+ setUpReadableByteStreamController(
+ stream,
+ controller,
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark,
+ autoAllocateChunkSize,
+ );
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @param {ReadableStreamDefaultController<R>} controller
+ * @param {(controller: ReadableStreamDefaultController<R>) => void | Promise<void>} startAlgorithm
+ * @param {(controller: ReadableStreamDefaultController<R>) => Promise<void>} pullAlgorithm
+ * @param {(reason: any) => Promise<void>} cancelAlgorithm
+ * @param {number} highWaterMark
+ * @param {(chunk: R) => number} sizeAlgorithm
+ */
+ function setUpReadableStreamDefaultController(
+ stream,
+ controller,
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark,
+ sizeAlgorithm,
+ ) {
+ assert(stream[_controller] === undefined);
+ controller[_stream] = stream;
+ resetQueue(controller);
+ controller[_started] = controller[_closeRequested] =
+ controller[_pullAgain] = controller[_pulling] = false;
+ controller[_strategySizeAlgorithm] = sizeAlgorithm;
+ controller[_strategyHWM] = highWaterMark;
+ controller[_pullAlgorithm] = pullAlgorithm;
+ controller[_cancelAlgorithm] = cancelAlgorithm;
+ stream[_controller] = controller;
+ const startResult = startAlgorithm(controller);
+ const startPromise = resolvePromiseWith(startResult);
+ uponPromise(startPromise, () => {
+ controller[_started] = true;
+ assert(controller[_pulling] === false);
+ assert(controller[_pullAgain] === false);
+ readableStreamDefaultControllerCallPullIfNeeded(controller);
+ }, (r) => {
+ readableStreamDefaultControllerError(controller, r);
+ });
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStream<R>} stream
+ * @param {UnderlyingSource<R>} underlyingSource
+ * @param {UnderlyingSource<R>} underlyingSourceDict
+ * @param {number} highWaterMark
+ * @param {(chunk: R) => number} sizeAlgorithm
+ */
+ function setUpReadableStreamDefaultControllerFromUnderlyingSource(
+ stream,
+ underlyingSource,
+ underlyingSourceDict,
+ highWaterMark,
+ sizeAlgorithm,
+ ) {
+ const controller = new ReadableStreamDefaultController();
+ /** @type {(controller: ReadableStreamDefaultController<R>) => Promise<void>} */
+ let startAlgorithm = () => undefined;
+ /** @type {(controller: ReadableStreamDefaultController<R>) => Promise<void>} */
+ let pullAlgorithm = () => resolvePromiseWith(undefined);
+ /** @type {(reason?: any) => Promise<void>} */
+ let cancelAlgorithm = () => resolvePromiseWith(undefined);
+ if ("start" in underlyingSourceDict) {
+ startAlgorithm = () =>
+ underlyingSourceDict.start.call(underlyingSource, controller);
+ }
+ if ("pull" in underlyingSourceDict) {
+ pullAlgorithm = () =>
+ underlyingSourceDict.pull.call(underlyingSource, controller);
+ }
+ if ("cancel" in underlyingSourceDict) {
+ cancelAlgorithm = (reason) =>
+ underlyingSourceDict.cancel.call(underlyingSource, reason);
+ }
+ setUpReadableStreamDefaultController(
+ stream,
+ controller,
+ startAlgorithm,
+ pullAlgorithm,
+ cancelAlgorithm,
+ highWaterMark,
+ sizeAlgorithm,
+ );
+ }
+
+ /**
+ * @template R
+ * @param {ReadableStreamDefaultReader<R>} reader
+ * @param {ReadableStream<R>} stream
+ */
+ function setUpReadableStreamDefaultReader(reader, stream) {
+ if (isReadableStreamLocked(stream)) {
+ throw new TypeError("ReadableStream is locked.");
+ }
+ readableStreamReaderGenericInitialize(reader, stream);
+ reader[_readRequests] = [];
+ }
+
+ /**
+ * @template O
+ * @param {TransformStream<any, O>} stream
+ * @param {TransformStreamDefaultController<O>} controller
+ * @param {(chunk: O, controller: TransformStreamDefaultController<O>) => Promise<void>} transformAlgorithm
+ * @param {(controller: TransformStreamDefaultController<O>) => Promise<void>} flushAlgorithm
+ */
+ function setUpTransformStreamDefaultController(
+ stream,
+ controller,
+ transformAlgorithm,
+ flushAlgorithm,
+ ) {
+ assert(stream instanceof TransformStream);
+ assert(stream[_controller] === undefined);
+ controller[_stream] = stream;
+ stream[_controller] = controller;
+ controller[_transformAlgorithm] = transformAlgorithm;
+ controller[_flushAlgorithm] = flushAlgorithm;
+ }
+
+ /**
+ * @template I
+ * @template O
+ * @param {TransformStream<I, O>} stream
+ * @param {Transformer<I, O>} transformer
+ * @param {Transformer<I, O>} transformerDict
+ */
+ function setUpTransformStreamDefaultControllerFromTransformer(
+ stream,
+ transformer,
+ transformerDict,
+ ) {
+ /** @type {TransformStreamDefaultController<O>} */
+ const controller = new TransformStreamDefaultController();
+ /** @type {(chunk: O, controller: TransformStreamDefaultController<O>) => Promise<void>} */
+ let transformAlgorithm = (chunk) => {
+ try {
+ transformStreamDefaultControllerEnqueue(controller, chunk);
+ } catch (e) {
+ return Promise.reject(e);
+ }
+ return resolvePromiseWith(undefined);
+ };
+ /** @type {(controller: TransformStreamDefaultController<O>) => Promise<void>} */
+ let flushAlgorithm = () => resolvePromiseWith(undefined);
+ if ("transform" in transformerDict) {
+ transformAlgorithm = (chunk, controller) =>
+ transformerDict.transform.call(transformer, chunk, controller);
+ }
+ if ("flush" in transformerDict) {
+ flushAlgorithm = (controller) =>
+ transformerDict.flush.call(transformer, controller);
+ }
+ setUpTransformStreamDefaultController(
+ stream,
+ controller,
+ transformAlgorithm,
+ flushAlgorithm,
+ );
+ }
+
+ /**
+ * @template W
+ * @param {WritableStream<W>} stream
+ * @param {WritableStreamDefaultController<W>} controller
+ * @param {(controller: WritableStreamDefaultController<W>) => Promise<void>} startAlgorithm
+ * @param {(chunk: W, controller: WritableStreamDefaultController<W>) => Promise<void>} writeAlgorithm
+ * @param {() => Promise<void>} closeAlgorithm
+ * @param {(reason?: any) => Promise<void>} abortAlgorithm
+ * @param {number} highWaterMark
+ * @param {(chunk: W) => number} sizeAlgorithm
+ */
+ function setUpWritableStreamDefaultController(
+ stream,
+ controller,
+ startAlgorithm,
+ writeAlgorithm,
+ closeAlgorithm,
+ abortAlgorithm,
+ highWaterMark,
+ sizeAlgorithm,
+ ) {
+ assert(isWritableStream(stream));
+ assert(stream[_controller] === undefined);
+ controller[_stream] = stream;
+ stream[_controller] = controller;
+ resetQueue(controller);
+ controller[_started] = false;
+ controller[_strategySizeAlgorithm] = sizeAlgorithm;
+ controller[_strategyHWM] = highWaterMark;
+ controller[_writeAlgorithm] = writeAlgorithm;
+ controller[_closeAlgorithm] = closeAlgorithm;
+ controller[_abortAlgorithm] = abortAlgorithm;
+ const backpressure = writableStreamDefaultControllerGetBackpressure(
+ controller,
+ );
+ writableStreamUpdateBackpressure(stream, backpressure);
+ const startResult = startAlgorithm(controller);
+ const startPromise = resolvePromiseWith(startResult);
+ uponPromise(startPromise, () => {
+ assert(stream[_state] === "writable" || stream[_state] === "erroring");
+ controller[_started] = true;
+ writableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }, (r) => {
+ assert(stream[_state] === "writable" || stream[_state] === "erroring");
+ controller[_started] = true;
+ writableStreamDealWithRejection(stream, r);
+ });
+ }
+
+ /**
+ * @template W
+ * @param {WritableStream<W>} stream
+ * @param {UnderlyingSink<W>} underlyingSink
+ * @param {UnderlyingSink<W>} underlyingSinkDict
+ * @param {number} highWaterMark
+ * @param {(chunk: W) => number} sizeAlgorithm
+ */
+ function setUpWritableStreamDefaultControllerFromUnderlyingSink(
+ stream,
+ underlyingSink,
+ underlyingSinkDict,
+ highWaterMark,
+ sizeAlgorithm,
+ ) {
+ const controller = new WritableStreamDefaultController();
+ let startAlgorithm = () => undefined;
+ /** @type {(chunk: W) => Promise<void>} */
+ let writeAlgorithm = () => resolvePromiseWith(undefined);
+ let closeAlgorithm = () => resolvePromiseWith(undefined);
+ /** @type {(reason?: any) => Promise<void>} */
+ let abortAlgorithm = () => resolvePromiseWith(undefined);
+ if ("start" in underlyingSinkDict) {
+ startAlgorithm = () =>
+ underlyingSinkDict.start.call(underlyingSink, controller);
+ }
+ if ("write" in underlyingSinkDict) {
+ writeAlgorithm = (chunk) =>
+ underlyingSinkDict.write.call(underlyingSink, chunk, controller);
+ }
+ if ("close" in underlyingSinkDict) {
+ closeAlgorithm = () => underlyingSinkDict.close.call(underlyingSink);
+ }
+ if ("abort" in underlyingSinkDict) {
+ abortAlgorithm = (reason) =>
+ underlyingSinkDict.abort.call(underlyingSink, reason);
+ }
+ setUpWritableStreamDefaultController(
+ stream,
+ controller,
+ startAlgorithm,
+ writeAlgorithm,
+ closeAlgorithm,
+ abortAlgorithm,
+ highWaterMark,
+ sizeAlgorithm,
+ );
+ }
+
+ /**
+ * @template W
+ * @param {WritableStreamDefaultWriter<W>} writer
+ * @param {WritableStream<W>} stream
+ */
+ function setUpWritableStreamDefaultWriter(writer, stream) {
+ if (isWritableStreamLocked(stream) === true) {
+ throw new TypeError("The stream is already locked.");
+ }
+ writer[_stream] = stream;
+ stream[_writer] = writer;
+ const state = stream[_state];
+ if (state === "writable") {
+ if (
+ writableStreamCloseQueuedOrInFlight(stream) === false &&
+ stream[_backpressure] === true
+ ) {
+ writer[_readyPromise] = new Deferred();
+ } else {
+ writer[_readyPromise] = new Deferred();
+ writer[_readyPromise].resolve(undefined);
+ }
+ writer[_closedPromise] = new Deferred();
+ } else if (state === "erroring") {
+ writer[_readyPromise] = new Deferred();
+ writer[_readyPromise].reject(stream[_storedError]);
+ setPromiseIsHandledToTrue(writer[_readyPromise].promise);
+ writer[_closedPromise] = new Deferred();
+ } else if (state === "closed") {
+ writer[_readyPromise] = new Deferred();
+ writer[_readyPromise].resolve(undefined);
+ writer[_closedPromise] = new Deferred();
+ writer[_closedPromise].resolve(undefined);
+ } else {
+ assert(state === "errored");
+ const storedError = stream[_storedError];
+ writer[_readyPromise] = new Deferred();
+ writer[_readyPromise].reject(storedError);
+ setPromiseIsHandledToTrue(writer[_readyPromise].promise);
+ writer[_closedPromise] = new Deferred();
+ writer[_closedPromise].reject(storedError);
+ setPromiseIsHandledToTrue(writer[_closedPromise].promise);
+ }
+ }
+
+ /** @param {TransformStreamDefaultController} controller */
+ function transformStreamDefaultControllerClearAlgorithms(controller) {
+ controller[_transformAlgorithm] = undefined;
+ controller[_flushAlgorithm] = undefined;
+ }
+
+ /**
+ * @template O
+ * @param {TransformStreamDefaultController<O>} controller
+ * @param {O} chunk
+ */
+ function transformStreamDefaultControllerEnqueue(controller, chunk) {
+ const stream = controller[_stream];
+ const readableController = stream[_readable][_controller];
+ if (
+ readableStreamDefaultControllerCanCloseOrEnqueue(
+ /** @type {ReadableStreamDefaultController<O>} */ (readableController),
+ ) === false
+ ) {
+ throw new TypeError("Readable stream is unavailable.");
+ }
+ try {
+ readableStreamDefaultControllerEnqueue(
+ /** @type {ReadableStreamDefaultController<O>} */ (readableController),
+ chunk,
+ );
+ } catch (e) {
+ transformStreamErrorWritableAndUnblockWrite(stream, e);
+ throw stream[_readable][_storedError];
+ }
+ const backpressure = readableStreamDefaultcontrollerHasBackpressure(
+ /** @type {ReadableStreamDefaultController<O>} */ (readableController),
+ );
+ if (backpressure !== stream[_backpressure]) {
+ assert(backpressure === true);
+ transformStreamSetBackpressure(stream, true);
+ }
+ }
+
+ /**
+ * @param {TransformStreamDefaultController} controller
+ * @param {any=} e
+ */
+ function transformStreamDefaultControllerError(controller, e) {
+ transformStreamError(controller[_stream], e);
+ }
+
+ /**
+ * @template O
+ * @param {TransformStreamDefaultController<O>} controller
+ * @param {any} chunk
+ * @returns {Promise<void>}
+ */
+ function transformStreamDefaultControllerPerformTransform(controller, chunk) {
+ const transformPromise = controller[_transformAlgorithm](chunk, controller);
+ return transformPromiseWith(transformPromise, undefined, (r) => {
+ transformStreamError(controller[_stream], r);
+ throw r;
+ });
+ }
+
+ /** @param {TransformStreamDefaultController} controller */
+ function transformStreamDefaultControllerTerminate(controller) {
+ const stream = controller[_stream];
+ const readableController = stream[_readable][_controller];
+ readableStreamDefaultControllerClose(
+ /** @type {ReadableStreamDefaultController} */ (readableController),
+ );
+ const error = new TypeError("The stream has been terminated.");
+ transformStreamErrorWritableAndUnblockWrite(stream, error);
+ }
+
+ /**
+ * @param {TransformStream} stream
+ * @param {any=} reason
+ * @returns {Promise<void>}
+ */
+ function transformStreamDefaultSinkAbortAlgorithm(stream, reason) {
+ transformStreamError(stream, reason);
+ return resolvePromiseWith(undefined);
+ }
+
+ /**
+ * @template I
+ * @template O
+ * @param {TransformStream<I, O>} stream
+ * @returns {Promise<void>}
+ */
+ function transformStreamDefaultSinkCloseAlgorithm(stream) {
+ const readable = stream[_readable];
+ const controller = stream[_controller];
+ const flushPromise = controller[_flushAlgorithm](controller);
+ transformStreamDefaultControllerClearAlgorithms(controller);
+ return transformPromiseWith(flushPromise, () => {
+ if (readable[_state] === "errored") {
+ throw readable[_storedError];
+ }
+ readableStreamDefaultControllerClose(
+ /** @type {ReadableStreamDefaultController} */ (readable[_controller]),
+ );
+ }, (r) => {
+ transformStreamError(stream, r);
+ throw readable[_storedError];
+ });
+ }
+
+ /**
+ * @template I
+ * @template O
+ * @param {TransformStream<I, O>} stream
+ * @param {I} chunk
+ * @returns {Promise<void>}
+ */
+ function transformStreamDefaultSinkWriteAlgorithm(stream, chunk) {
+ assert(stream[_writable][_state] === "writable");
+ const controller = stream[_controller];
+ if (stream[_backpressure] === true) {
+ const backpressureChangePromise = stream[_backpressureChangePromise];
+ assert(backpressureChangePromise !== undefined);
+ return transformPromiseWith(backpressureChangePromise.promise, () => {
+ const writable = stream[_writable];
+ const state = writable[_state];
+ if (state === "erroring") {
+ throw writable[_storedError];
+ }
+ assert(state === "writable");
+ return transformStreamDefaultControllerPerformTransform(
+ controller,
+ chunk,
+ );
+ });
+ }
+ return transformStreamDefaultControllerPerformTransform(controller, chunk);
+ }
+
+ /**
+ * @param {TransformStream} stream
+ * @returns {Promise<void>}
+ */
+ function transformStreamDefaultSourcePullAlgorithm(stream) {
+ assert(stream[_backpressure] === true);
+ assert(stream[_backpressureChangePromise] !== undefined);
+ transformStreamSetBackpressure(stream, false);
+ return stream[_backpressureChangePromise].promise;
+ }
+
+ /**
+ * @param {TransformStream} stream
+ * @param {any=} e
+ */
+ function transformStreamError(stream, e) {
+ readableStreamDefaultControllerError(
+ /** @type {ReadableStreamDefaultController} */ (stream[_readable][
+ _controller
+ ]),
+ e,
+ );
+ transformStreamErrorWritableAndUnblockWrite(stream, e);
+ }
+
+ /**
+ * @param {TransformStream} stream
+ * @param {any=} e
+ */
+ function transformStreamErrorWritableAndUnblockWrite(stream, e) {
+ transformStreamDefaultControllerClearAlgorithms(stream[_controller]);
+ writableStreamDefaultControllerErrorIfNeeded(
+ stream[_writable][_controller],
+ e,
+ );
+ if (stream[_backpressure] === true) {
+ transformStreamSetBackpressure(stream, false);
+ }
+ }
+
+ /**
+ * @param {TransformStream} stream
+ * @param {boolean} backpressure
+ */
+ function transformStreamSetBackpressure(stream, backpressure) {
+ assert(stream[_backpressure] !== backpressure);
+ if (stream[_backpressureChangePromise] !== undefined) {
+ stream[_backpressureChangePromise].resolve(undefined);
+ }
+ stream[_backpressureChangePromise] = new Deferred();
+ stream[_backpressure] = backpressure;
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @param {any=} reason
+ * @returns {Promise<void>}
+ */
+ function writableStreamAbort(stream, reason) {
+ const state = stream[_state];
+ if (state === "closed" || state === "errored") {
+ return resolvePromiseWith(undefined);
+ }
+ if (stream[_pendingAbortRequest] !== undefined) {
+ return stream[_pendingAbortRequest].deferred.promise;
+ }
+ assert(state === "writable" || state === "erroring");
+ let wasAlreadyErroring = false;
+ if (state === "erroring") {
+ wasAlreadyErroring = true;
+ reason = undefined;
+ }
+ /** Deferred<void> */
+ const deferred = new Deferred();
+ stream[_pendingAbortRequest] = {
+ deferred,
+ reason,
+ wasAlreadyErroring,
+ };
+ if (wasAlreadyErroring === false) {
+ writableStreamStartErroring(stream, reason);
+ }
+ return deferred.promise;
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @returns {Promise<void>}
+ */
+ function writableStreamAddWriteRequest(stream) {
+ assert(isWritableStreamLocked(stream) === true);
+ assert(stream[_state] === "writable");
+ /** @type {Deferred<void>} */
+ const deferred = new Deferred();
+ stream[_writeRequests].push(deferred);
+ return deferred.promise;
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @returns {Promise<void>}
+ */
+ function writableStreamClose(stream) {
+ const state = stream[_state];
+ if (state === "closed" || state === "errored") {
+ return Promise.reject(
+ new TypeError("Writable stream is closed or errored."),
+ );
+ }
+ assert(state === "writable" || state === "erroring");
+ assert(writableStreamCloseQueuedOrInFlight(stream) === false);
+ /** @type {Deferred<void>} */
+ const deferred = new Deferred();
+ stream[_closeRequest] = deferred;
+ const writer = stream[_writer];
+ if (
+ writer !== undefined && stream[_backpressure] === true &&
+ state === "writable"
+ ) {
+ writer[_readyPromise].resolve(undefined);
+ }
+ writableStreamDefaultControllerClose(stream[_controller]);
+ return deferred.promise;
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @returns {boolean}
+ */
+ function writableStreamCloseQueuedOrInFlight(stream) {
+ if (
+ stream[_closeRequest] === undefined &&
+ stream[_inFlightCloseRequest] === undefined
+ ) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @param {any=} error
+ */
+ function writableStreamDealWithRejection(stream, error) {
+ const state = stream[_state];
+ if (state === "writable") {
+ writableStreamStartErroring(stream, error);
+ return;
+ }
+ assert(state === "erroring");
+ writableStreamFinishErroring(stream);
+ }
+
+ /**
+ * @template W
+ * @param {WritableStreamDefaultController<W>} controller
+ */
+ function writableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {
+ const stream = controller[_stream];
+ if (controller[_started] === false) {
+ return;
+ }
+ if (stream[_inFlightWriteRequest] !== undefined) {
+ return;
+ }
+ const state = stream[_state];
+ assert(state !== "closed" && state !== "errored");
+ if (state === "erroring") {
+ writableStreamFinishErroring(stream);
+ return;
+ }
+ if (controller[_queue].length === 0) {
+ return;
+ }
+ const value = peekQueueValue(controller);
+ if (value === _close) {
+ writableStreamDefaultControllerProcessClose(controller);
+ } else {
+ writableStreamDefaultControllerProcessWrite(controller, value);
+ }
+ }
+
+ function writableStreamDefaultControllerClearAlgorithms(controller) {
+ controller[_writeAlgorithm] = undefined;
+ controller[_closeAlgorithm] = undefined;
+ controller[_abortAlgorithm] = undefined;
+ controller[_strategySizeAlgorithm] = undefined;
+ }
+
+ /** @param {WritableStreamDefaultController} controller */
+ function writableStreamDefaultControllerClose(controller) {
+ enqueueValueWithSize(controller, _close, 0);
+ writableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }
+
+ /**
+ * @param {WritableStreamDefaultController} controller
+ * @param {any} error
+ */
+ function writableStreamDefaultControllerError(controller, error) {
+ const stream = controller[_stream];
+ assert(stream[_state] === "writable");
+ writableStreamDefaultControllerClearAlgorithms(controller);
+ writableStreamStartErroring(stream, error);
+ }
+
+ /**
+ * @param {WritableStreamDefaultController} controller
+ * @param {any} error
+ */
+ function writableStreamDefaultControllerErrorIfNeeded(controller, error) {
+ if (controller[_stream][_state] === "writable") {
+ writableStreamDefaultControllerError(controller, error);
+ }
+ }
+
+ /**
+ * @param {WritableStreamDefaultController} controller
+ * @returns {boolean}
+ */
+ function writableStreamDefaultControllerGetBackpressure(controller) {
+ const desiredSize = writableStreamDefaultControllerGetDesiredSize(
+ controller,
+ );
+ return desiredSize <= 0;
+ }
+
+ /**
+ * @template W
+ * @param {WritableStreamDefaultController<W>} controller
+ * @param {W} chunk
+ * @returns {number}
+ */
+ function writableStreamDefaultControllerGetChunkSize(controller, chunk) {
+ let value;
+ try {
+ value = controller[_strategySizeAlgorithm](chunk);
+ } catch (e) {
+ writableStreamDefaultControllerErrorIfNeeded(controller, e);
+ return 1;
+ }
+ return value;
+ }
+
+ /**
+ * @param {WritableStreamDefaultController} controller
+ * @returns {number}
+ */
+ function writableStreamDefaultControllerGetDesiredSize(controller) {
+ return controller[_strategyHWM] - controller[_queueTotalSize];
+ }
+
+ /** @param {WritableStreamDefaultController} controller */
+ function writableStreamDefaultControllerProcessClose(controller) {
+ const stream = controller[_stream];
+ writableStreamMarkCloseRequestInFlight(stream);
+ dequeueValue(controller);
+ assert(controller[_queue].length === 0);
+ const sinkClosePromise = controller[_closeAlgorithm]();
+ writableStreamDefaultControllerClearAlgorithms(controller);
+ uponPromise(sinkClosePromise, () => {
+ writableStreamFinishInFlightClose(stream);
+ }, (reason) => {
+ writableStreamFinishInFlightCloseWithError(stream, reason);
+ });
+ }
+
+ /**
+ * @template W
+ * @param {WritableStreamDefaultController<W>} controller
+ * @param {W} chunk
+ */
+ function writableStreamDefaultControllerProcessWrite(controller, chunk) {
+ const stream = controller[_stream];
+ writableStreamMarkFirstWriteRequestInFlight(stream);
+ const sinkWritePromise = controller[_writeAlgorithm](chunk, controller);
+ uponPromise(sinkWritePromise, () => {
+ writableStreamFinishInFlightWrite(stream);
+ const state = stream[_state];
+ assert(state === "writable" || state === "erroring");
+ dequeueValue(controller);
+ if (
+ writableStreamCloseQueuedOrInFlight(stream) === false &&
+ state === "writable"
+ ) {
+ const backpressure = writableStreamDefaultControllerGetBackpressure(
+ controller,
+ );
+ writableStreamUpdateBackpressure(stream, backpressure);
+ }
+ writableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }, (reason) => {
+ if (stream[_state] === "writable") {
+ writableStreamDefaultControllerClearAlgorithms(controller);
+ }
+ writableStreamFinishInFlightWriteWithError(stream, reason);
+ });
+ }
+
+ /**
+ * @template W
+ * @param {WritableStreamDefaultController<W>} controller
+ * @param {W} chunk
+ * @param {number} chunkSize
+ */
+ function writableStreamDefaultControllerWrite(controller, chunk, chunkSize) {
+ try {
+ enqueueValueWithSize(controller, chunk, chunkSize);
+ } catch (e) {
+ writableStreamDefaultControllerErrorIfNeeded(controller, e);
+ return;
+ }
+ const stream = controller[_stream];
+ if (
+ writableStreamCloseQueuedOrInFlight(stream) === false &&
+ stream[_state] === "writable"
+ ) {
+ const backpressure = writableStreamDefaultControllerGetBackpressure(
+ controller,
+ );
+ writableStreamUpdateBackpressure(stream, backpressure);
+ }
+ writableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }
+
+ /**
+ * @param {WritableStreamDefaultWriter} writer
+ * @param {any=} reason
+ * @returns {Promise<void>}
+ */
+ function writableStreamDefaultWriterAbort(writer, reason) {
+ const stream = writer[_stream];
+ assert(stream !== undefined);
+ return writableStreamAbort(stream, reason);
+ }
+
+ /**
+ * @param {WritableStreamDefaultWriter} writer
+ * @returns {Promise<void>}
+ */
+ function writableStreamDefaultWriterClose(writer) {
+ const stream = writer[_stream];
+ assert(stream !== undefined);
+ return writableStreamClose(stream);
+ }
+
+ /**
+ * @param {WritableStreamDefaultWriter} writer
+ * @returns {Promise<void>}
+ */
+ function writableStreamDefaultWriterCloseWithErrorPropagation(writer) {
+ const stream = writer[_stream];
+ assert(stream !== undefined);
+ const state = stream[_state];
+ if (
+ writableStreamCloseQueuedOrInFlight(stream) === true || state === "closed"
+ ) {
+ return resolvePromiseWith(undefined);
+ }
+ if (state === "errored") {
+ return Promise.reject(stream[_storedError]);
+ }
+ assert(state === "writable" || state === "erroring");
+ return writableStreamDefaultWriterClose(writer);
+ }
+
+ /**
+ * @param {WritableStreamDefaultWriter} writer
+ * @param {any=} error
+ */
+ function writableStreamDefaultWriterEnsureClosedPromiseRejected(
+ writer,
+ error,
+ ) {
+ if (writer[_closedPromise].state === "pending") {
+ writer[_closedPromise].reject(error);
+ } else {
+ writer[_closedPromise] = new Deferred();
+ writer[_closedPromise].reject(error);
+ }
+ setPromiseIsHandledToTrue(writer[_closedPromise].promise);
+ }
+
+ /**
+ * @param {WritableStreamDefaultWriter} writer
+ * @param {any=} error
+ */
+ function writableStreamDefaultWriterEnsureReadyPromiseRejected(
+ writer,
+ error,
+ ) {
+ if (writer[_readyPromise].state === "pending") {
+ writer[_readyPromise].reject(error);
+ } else {
+ writer[_readyPromise] = new Deferred();
+ writer[_readyPromise].reject(error);
+ }
+ setPromiseIsHandledToTrue(writer[_readyPromise].promise);
+ }
+
+ /**
+ * @param {WritableStreamDefaultWriter} writer
+ * @returns {number | null}
+ */
+ function writableStreamDefaultWriterGetDesiredSize(writer) {
+ const stream = writer[_stream];
+ const state = stream[_state];
+ if (state === "errored" || state === "erroring") {
+ return null;
+ }
+ if (state === "closed") {
+ return 0;
+ }
+ return writableStreamDefaultControllerGetDesiredSize(stream[_controller]);
+ }
+
+ /** @param {WritableStreamDefaultWriter} writer */
+ function writableStreamDefaultWriterRelease(writer) {
+ const stream = writer[_stream];
+ assert(stream !== undefined);
+ assert(stream[_writer] === writer);
+ const releasedError = new TypeError(
+ "The writer has already been released.",
+ );
+ writableStreamDefaultWriterEnsureReadyPromiseRejected(
+ writer,
+ releasedError,
+ );
+ writableStreamDefaultWriterEnsureClosedPromiseRejected(
+ writer,
+ releasedError,
+ );
+ stream[_writer] = undefined;
+ writer[_stream] = undefined;
+ }
+
+ /**
+ * @template W
+ * @param {WritableStreamDefaultWriter<W>} writer
+ * @param {W} chunk
+ * @returns {Promise<void>}
+ */
+ function writableStreamDefaultWriterWrite(writer, chunk) {
+ const stream = writer[_stream];
+ assert(stream !== undefined);
+ const controller = stream[_controller];
+ const chunkSize = writableStreamDefaultControllerGetChunkSize(
+ controller,
+ chunk,
+ );
+ if (stream !== writer[_stream]) {
+ return Promise.reject(new TypeError("Writer's stream is unexpected."));
+ }
+ const state = stream[_state];
+ if (state === "errored") {
+ return Promise.reject(stream[_storedError]);
+ }
+ if (
+ writableStreamCloseQueuedOrInFlight(stream) === true || state === "closed"
+ ) {
+ return Promise.reject(
+ new TypeError("The stream is closing or is closed."),
+ );
+ }
+ if (state === "erroring") {
+ return Promise.reject(stream[_storedError]);
+ }
+ assert(state === "writable");
+ const promise = writableStreamAddWriteRequest(stream);
+ writableStreamDefaultControllerWrite(controller, chunk, chunkSize);
+ return promise;
+ }
+
+ /** @param {WritableStream} stream */
+ function writableStreamFinishErroring(stream) {
+ assert(stream[_state] === "erroring");
+ assert(writableStreamHasOperationMarkedInFlight(stream) === false);
+ stream[_state] = "errored";
+ stream[_controller][_errorSteps]();
+ const storedError = stream[_storedError];
+ for (const writeRequest of stream[_writeRequests]) {
+ writeRequest.reject(storedError);
+ }
+ stream[_writeRequests] = [];
+ if (stream[_pendingAbortRequest] === undefined) {
+ writableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ return;
+ }
+ const abortRequest = stream[_pendingAbortRequest];
+ stream[_pendingAbortRequest] = undefined;
+ if (abortRequest.wasAlreadyErroring === true) {
+ abortRequest.deferred.reject(storedError);
+ writableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ return;
+ }
+ const promise = stream[_controller][_abortSteps](abortRequest.reason);
+ uponPromise(promise, () => {
+ abortRequest.deferred.resolve(undefined);
+ writableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ }, (reason) => {
+ abortRequest.deferred.reject(reason);
+ writableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ });
+ }
+
+ /** @param {WritableStream} stream */
+ function writableStreamFinishInFlightClose(stream) {
+ assert(stream[_inFlightCloseRequest] !== undefined);
+ stream[_inFlightCloseRequest].resolve(undefined);
+ stream[_inFlightCloseRequest] = undefined;
+ const state = stream[_state];
+ assert(state === "writable" || state === "erroring");
+ if (state === "erroring") {
+ stream[_storedError] = undefined;
+ if (stream[_pendingAbortRequest] !== undefined) {
+ stream[_pendingAbortRequest].deferred.resolve(undefined);
+ stream[_pendingAbortRequest] = undefined;
+ }
+ }
+ stream[_state] = "closed";
+ const writer = stream[_writer];
+ if (writer !== undefined) {
+ writer[_closedPromise].resolve(undefined);
+ }
+ assert(stream[_pendingAbortRequest] === undefined);
+ assert(stream[_storedError] === undefined);
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @param {any=} error
+ */
+ function writableStreamFinishInFlightCloseWithError(stream, error) {
+ assert(stream[_inFlightCloseRequest] !== undefined);
+ stream[_inFlightCloseRequest].reject(error);
+ stream[_inFlightCloseRequest] = undefined;
+ assert(stream[_state] === "writable" || stream[_state] === "erroring");
+ if (stream[_pendingAbortRequest] !== undefined) {
+ stream[_pendingAbortRequest].deferred.reject(error);
+ stream[_pendingAbortRequest] = undefined;
+ }
+ writableStreamDealWithRejection(stream, error);
+ }
+
+ /** @param {WritableStream} stream */
+ function writableStreamFinishInFlightWrite(stream) {
+ assert(stream[_inFlightWriteRequest] !== undefined);
+ stream[_inFlightWriteRequest].resolve(undefined);
+ stream[_inFlightWriteRequest] = undefined;
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @param {any=} error
+ */
+ function writableStreamFinishInFlightWriteWithError(stream, error) {
+ assert(stream[_inFlightWriteRequest] !== undefined);
+ stream[_inFlightWriteRequest].reject(error);
+ stream[_inFlightWriteRequest] = undefined;
+ assert(stream[_state] === "writable" || stream[_state] === "erroring");
+ writableStreamDealWithRejection(stream, error);
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @returns {boolean}
+ */
+ function writableStreamHasOperationMarkedInFlight(stream) {
+ if (
+ stream[_inFlightWriteRequest] === undefined &&
+ stream[_controller][_inFlightCloseRequest] === undefined
+ ) {
+ return false;
+ }
+ return true;
+ }
+
+ /** @param {WritableStream} stream */
+ function writableStreamMarkCloseRequestInFlight(stream) {
+ assert(stream[_inFlightCloseRequest] === undefined);
+ assert(stream[_closeRequest] !== undefined);
+ stream[_inFlightCloseRequest] = stream[_closeRequest];
+ stream[_closeRequest] = undefined;
+ }
+
+ /**
+ * @template W
+ * @param {WritableStream<W>} stream
+ * */
+ function writableStreamMarkFirstWriteRequestInFlight(stream) {
+ assert(stream[_inFlightWriteRequest] === undefined);
+ assert(stream[_writeRequests].length);
+ const writeRequest = stream[_writeRequests].shift();
+ stream[_inFlightWriteRequest] = writeRequest;
+ }
+
+ /** @param {WritableStream} stream */
+ function writableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {
+ assert(stream[_state] === "errored");
+ if (stream[_closeRequest] !== undefined) {
+ assert(stream[_inFlightCloseRequest] === undefined);
+ stream[_closeRequest].reject(stream[_storedError]);
+ stream[_closeRequest] = undefined;
+ }
+ const writer = stream[_writer];
+ if (writer !== undefined) {
+ writer[_closedPromise].reject(stream[_storedError]);
+ setPromiseIsHandledToTrue(writer[_closedPromise].promise);
+ }
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @param {any=} reason
+ */
+ function writableStreamStartErroring(stream, reason) {
+ assert(stream[_storedError] === undefined);
+ assert(stream[_state] === "writable");
+ const controller = stream[_controller];
+ assert(controller);
+ stream[_state] = "erroring";
+ stream[_storedError] = reason;
+ const writer = stream[_writer];
+ if (writer) {
+ writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);
+ }
+ if (
+ writableStreamHasOperationMarkedInFlight(stream) === false &&
+ controller[_started] === true
+ ) {
+ writableStreamFinishErroring(stream);
+ }
+ }
+
+ /**
+ * @param {WritableStream} stream
+ * @param {boolean} backpressure
+ */
+ function writableStreamUpdateBackpressure(stream, backpressure) {
+ assert(stream[_state] === "writable");
+ assert(writableStreamCloseQueuedOrInFlight(stream) === false);
+ const writer = stream[_writer];
+ if (writer !== undefined && backpressure !== stream[_backpressure]) {
+ if (backpressure === true) {
+ writer[_readyPromise] = new Deferred();
+ } else {
+ assert(backpressure === false);
+ writer[_readyPromise].resolve(undefined);
+ }
+ }
+ stream[_backpressure] = backpressure;
+ }
+
+ /**
+ * @template T
+ * @param {T} value
+ * @param {boolean} done
+ * @returns {IteratorResult<T>}
+ */
+ function createIteratorResult(value, done) {
+ const result = Object.create(null);
+ Object.defineProperties(result, {
+ value: { value, writable: true, enumerable: true, configurable: true },
+ done: {
+ value: done,
+ writable: true,
+ enumerable: true,
+ configurable: true,
+ },
+ });
+ return result;
+ }
+
+ /** @type {AsyncIterator<unknown, unknown>} */
+ const asyncIteratorPrototype = Object.getPrototypeOf(
+ Object.getPrototypeOf(async function* () {}).prototype,
+ );
+
+ /** @type {AsyncIterator<unknown>} */
+ const readableStreamAsyncIteratorPrototype = Object.setPrototypeOf({
+ /** @returns {Promise<IteratorResult<unknown>>} */
+ next() {
+ /** @type {ReadableStreamDefaultReader} */
+ const reader = this[_reader];
+ if (reader[_stream] === undefined) {
+ return Promise.reject(
+ new TypeError(
+ "Cannot get the next iteration result once the reader has been released.",
+ ),
+ );
+ }
+ /** @type {Deferred<IteratorResult<any>>} */
+ const promise = new Deferred();
+ /** @type {ReadRequest} */
+ const readRequest = {
+ chunkSteps(chunk) {
+ promise.resolve(createIteratorResult(chunk, false));
+ },
+ closeSteps() {
+ readableStreamReaderGenericRelease(reader);
+ promise.resolve(createIteratorResult(undefined, true));
+ },
+ errorSteps(e) {
+ readableStreamReaderGenericRelease(reader);
+ promise.reject(e);
+ },
+ };
+ readableStreamDefaultReaderRead(reader, readRequest);
+ return promise.promise;
+ },
+ /**
+ * @param {unknown} arg
+ * @returns {Promise<IteratorResult<unknown>>}
+ */
+ async return(arg) {
+ /** @type {ReadableStreamDefaultReader} */
+ const reader = this[_reader];
+ if (reader[_stream] === undefined) {
+ return createIteratorResult(undefined, true);
+ }
+ assert(reader[_readRequests].length === 0);
+ if (this[_preventCancel] === false) {
+ const result = readableStreamReaderGenericCancel(reader, arg);
+ readableStreamReaderGenericRelease(reader);
+ await result;
+ return createIteratorResult(arg, true);
+ }
+ readableStreamReaderGenericRelease(reader);
+ return createIteratorResult(undefined, true);
+ },
+ }, asyncIteratorPrototype);
+
+ class ByteLengthQueuingStrategy {
+ /** @type {number} */
+ highWaterMark;
+
+ /** @param {{ highWaterMark: number }} init */
+ constructor(init) {
+ if (
+ typeof init !== "object" || init === null || !("highWaterMark" in init)
+ ) {
+ throw new TypeError(
+ "init must be an object that contains a property named highWaterMark",
+ );
+ }
+ const { highWaterMark } = init;
+ this[_globalObject] = window;
+ this.highWaterMark = Number(highWaterMark);
+ }
+
+ /** @returns {(chunk: ArrayBufferView) => number} */
+ get size() {
+ initializeByteLengthSizeFunction(this[_globalObject]);
+ return byteSizeFunctionWeakMap.get(this[_globalObject]);
+ }
+ }
+
+ /** @type {WeakMap<typeof globalThis, (chunk: ArrayBufferView) => number>} */
+ const byteSizeFunctionWeakMap = new WeakMap();
+
+ function initializeByteLengthSizeFunction(globalObject) {
+ if (byteSizeFunctionWeakMap.has(globalObject)) {
+ return;
+ }
+ byteSizeFunctionWeakMap.set(globalObject, function size(chunk) {
+ return chunk.byteLength;
+ });
+ }
+
+ class CountQueuingStrategy {
+ /** @type {number} */
+ highWaterMark;
+
+ /** @param {{ highWaterMark: number }} init */
+ constructor(init) {
+ if (
+ typeof init !== "object" || init === null || !("highWaterMark" in init)
+ ) {
+ throw new TypeError(
+ "init must be an object that contains a property named highWaterMark",
+ );
+ }
+ const { highWaterMark } = init;
+ this[_globalObject] = window;
+ this.highWaterMark = Number(highWaterMark);
+ }
+
+ /** @returns {(chunk: any) => 1} */
+ get size() {
+ initializeCountSizeFunction(this[_globalObject]);
+ return countSizeFunctionWeakMap.get(this[_globalObject]);
+ }
+ }
+
+ /** @type {WeakMap<typeof globalThis, () => 1>} */
+ const countSizeFunctionWeakMap = new WeakMap();
+
+ /** @param {typeof globalThis} globalObject */
+ function initializeCountSizeFunction(globalObject) {
+ if (countSizeFunctionWeakMap.has(globalObject)) {
+ return;
+ }
+ countSizeFunctionWeakMap.set(globalObject, function size() {
+ return 1;
+ });
+ }
+
+ /** @template R */
+ class ReadableStream {
+ /** @type {ReadableStreamDefaultController | ReadableByteStreamController} */
+ [_controller];
+ /** @type {boolean} */
+ [_detached];
+ /** @type {boolean} */
+ [_disturbed];
+ /** @type {ReadableStreamDefaultReader | undefined} */
+ [_reader];
+ /** @type {"readable" | "closed" | "errored"} */
+ [_state];
+ /** @type {any} */
+ [_storedError];
+
+ /**
+ * @param {UnderlyingSource<R>=} underlyingSource
+ * @param {QueuingStrategy<R>=} strategy
+ */
+ constructor(underlyingSource, strategy = {}) {
+ const underlyingSourceDict = convertUnderlyingSource(underlyingSource);
+ initializeReadableStream(this);
+ if (underlyingSourceDict.type === "bytes") {
+ if (strategy.size !== undefined) {
+ throw new RangeError(
+ `When underlying source is "bytes", strategy.size must be undefined.`,
+ );
+ }
+ const highWaterMark = extractHighWaterMark(strategy, 0);
+ setUpReadableByteStreamControllerFromUnderlyingSource(
+ // @ts-ignore cannot easily assert this is ReadableStream<ArrayBuffer>
+ this,
+ underlyingSource,
+ underlyingSourceDict,
+ highWaterMark,
+ );
+ } else {
+ assert(!("type" in underlyingSourceDict));
+ const sizeAlgorithm = extractSizeAlgorithm(strategy);
+ const highWaterMark = extractHighWaterMark(strategy, 1);
+ setUpReadableStreamDefaultControllerFromUnderlyingSource(
+ this,
+ underlyingSource,
+ underlyingSourceDict,
+ highWaterMark,
+ sizeAlgorithm,
+ );
+ }
+ }
+
+ /** @returns {boolean} */
+ get locked() {
+ return isReadableStreamLocked(this);
+ }
+
+ /**
+ * @param {any=} reason
+ * @returns {Promise<void>}
+ */
+ cancel(reason) {
+ if (isReadableStreamLocked(this)) {
+ Promise.reject(new TypeError("Cannot cancel a locked ReadableStream."));
+ }
+ return readableStreamCancel(this, reason);
+ }
+
+ /**
+ * @deprecated TODO(@kitsonk): Remove in Deno 1.8
+ * @param {ReadableStreamIteratorOptions=} options
+ * @returns {AsyncIterableIterator<R>}
+ */
+ getIterator(options = {}) {
+ return this[Symbol.asyncIterator](options);
+ }
+
+ /**
+ * @param {ReadableStreamGetReaderOptions=} options
+ * @returns {ReadableStreamDefaultReader<R>}
+ */
+ getReader(options = {}) {
+ if (typeof options !== "object") {
+ throw new TypeError("options must be an object");
+ }
+ if (options === null) {
+ options = {};
+ }
+ /** @type {any} */
+ let { mode } = options;
+ if (mode === undefined) {
+ return acquireReadableStreamDefaultReader(this);
+ }
+ mode = String(mode);
+ if (mode !== "byob") {
+ throw new TypeError("Invalid mode.");
+ }
+ // 3. Return ? AcquireReadableStreamBYOBReader(this).
+ throw new RangeError(`Unsupported mode "${String(mode)}"`);
+ }
+
+ /**
+ * @template T
+ * @param {{ readable: ReadableStream<T>, writable: WritableStream<R> }} transform
+ * @param {PipeOptions=} options
+ * @returns {ReadableStream<T>}
+ */
+ pipeThrough(
+ transform,
+ { preventClose, preventAbort, preventCancel, signal } = {},
+ ) {
+ if (!isReadableStream(this)) {
+ throw new TypeError("this must be a ReadableStream");
+ }
+ const { readable } = transform;
+ if (!isReadableStream(readable)) {
+ throw new TypeError("readable must be a ReadableStream");
+ }
+ const { writable } = transform;
+ if (!isWritableStream(writable)) {
+ throw new TypeError("writable must be a WritableStream");
+ }
+ if (isReadableStreamLocked(this)) {
+ throw new TypeError("ReadableStream is already locked.");
+ }
+ if (signal !== undefined && !(signal instanceof AbortSignal)) {
+ throw new TypeError("signal must be an AbortSignal");
+ }
+ if (isWritableStreamLocked(writable)) {
+ throw new TypeError("Target WritableStream is already locked.");
+ }
+ const promise = readableStreamPipeTo(
+ this,
+ writable,
+ Boolean(preventClose),
+ Boolean(preventAbort),
+ Boolean(preventCancel),
+ signal,
+ );
+ setPromiseIsHandledToTrue(promise);
+ return readable;
+ }
+
+ /**
+ * @param {WritableStream<R>} destination
+ * @param {PipeOptions=} options
+ * @returns {Promise<void>}
+ */
+ pipeTo(
+ destination,
+ {
+ preventClose = false,
+ preventAbort = false,
+ preventCancel = false,
+ signal,
+ } = {},
+ ) {
+ if (isReadableStreamLocked(this)) {
+ return Promise.reject(
+ new TypeError("ReadableStream is already locked."),
+ );
+ }
+ if (isWritableStreamLocked(destination)) {
+ return Promise.reject(
+ new TypeError("destination WritableStream is already locked."),
+ );
+ }
+ return readableStreamPipeTo(
+ this,
+ destination,
+ preventClose,
+ preventAbort,
+ preventCancel,
+ signal,
+ );
+ }
+
+ /** @returns {[ReadableStream<R>, ReadableStream<R>]} */
+ tee() {
+ return readableStreamTee(this, false);
+ }
+
+ /**
+ * @param {ReadableStreamIteratorOptions=} options
+ * @returns {AsyncIterableIterator<R>}
+ */
+ [Symbol.asyncIterator]({ preventCancel } = {}) {
+ /** @type {AsyncIterableIterator<R>} */
+ const iterator = Object.create(readableStreamAsyncIteratorPrototype);
+ const reader = acquireReadableStreamDefaultReader(this);
+ iterator[_reader] = reader;
+ iterator[_preventCancel] = preventCancel;
+ return iterator;
+ }
+
+ [Symbol.for("Deno.customInspect")](inspect) {
+ return `${this.constructor.name} ${inspect({ locked: this.locked })}`;
+ }
+ }
+
+ /** @template R */
+ class ReadableStreamGenericReader {
+ /** @type {Deferred<void>} */
+ [_closedPromise];
+ /** @type {ReadableStream<R> | undefined} */
+ [_stream];
+
+ get closed() {
+ return this[_closedPromise].promise;
+ }
+
+ /**
+ * @param {any} reason
+ * @returns {Promise<void>}
+ */
+ cancel(reason) {
+ if (this[_stream] === undefined) {
+ return Promise.reject(
+ new TypeError("Reader has no associated stream."),
+ );
+ }
+ return readableStreamReaderGenericCancel(this, reason);
+ }
+ }
+
+ /** @template R */
+ class ReadableStreamDefaultReader extends ReadableStreamGenericReader {
+ /** @type {ReadRequest[]} */
+ [_readRequests];
+
+ /** @param {ReadableStream<R>} stream */
+ constructor(stream) {
+ if (!(stream instanceof ReadableStream)) {
+ throw new TypeError("stream is not a ReadableStream");
+ }
+ super();
+ setUpReadableStreamDefaultReader(this, stream);
+ }
+
+ /** @returns {Promise<ReadableStreamReadResult<R>>} */
+ read() {
+ if (this[_stream] === undefined) {
+ return Promise.reject(
+ new TypeError("Reader has no associated stream."),
+ );
+ }
+ /** @type {Deferred<ReadableStreamReadResult<R>>} */
+ const promise = new Deferred();
+ /** @type {ReadRequest<R>} */
+ const readRequest = {
+ chunkSteps(chunk) {
+ promise.resolve({ value: chunk, done: false });
+ },
+ closeSteps() {
+ promise.resolve({ value: undefined, done: true });
+ },
+ errorSteps(e) {
+ promise.reject(e);
+ },
+ };
+ readableStreamDefaultReaderRead(this, readRequest);
+ return promise.promise;
+ }
+
+ /** @returns {void} */
+ releaseLock() {
+ if (this[_stream] === undefined) {
+ return;
+ }
+ if (this[_readRequests].length) {
+ throw new TypeError(
+ "There are pending read requests, so the reader cannot be release.",
+ );
+ }
+ readableStreamReaderGenericRelease(this);
+ }
+
+ [Symbol.for("Deno.customInspect")](inspect) {
+ return `${this.constructor.name} ${inspect({ closed: this.closed })}`;
+ }
+ }
+
+ class ReadableByteStreamController {
+ /** @type {number | undefined} */
+ [_autoAllocateChunkSize];
+ /** @type {null} */
+ [_byobRequest];
+ /** @type {(reason: any) => Promise<void>} */
+ [_cancelAlgorithm];
+ /** @type {boolean} */
+ [_closeRequested];
+ /** @type {boolean} */
+ [_pullAgain];
+ /** @type {(controller: this) => Promise<void>} */
+ [_pullAlgorithm];
+ /** @type {boolean} */
+ [_pulling];
+ /** @type {ReadableByteStreamQueueEntry[]} */
+ [_queue];
+ /** @type {number} */
+ [_queueTotalSize];
+ /** @type {boolean} */
+ [_started];
+ /** @type {number} */
+ [_strategyHWM];
+ /** @type {ReadableStream<ArrayBuffer>} */
+ [_stream];
+
+ get byobRequest() {
+ return undefined;
+ }
+
+ /** @returns {number | null} */
+ get desiredSize() {
+ return readableByteStreamControllerGetDesiredSize(this);
+ }
+
+ /** @returns {void} */
+ close() {
+ if (this[_closeRequested] === true) {
+ throw new TypeError("Closed already requested.");
+ }
+ if (this[_stream][_state] !== "readable") {
+ throw new TypeError(
+ "ReadableByteStreamController's stream is not in a readable state.",
+ );
+ }
+ readableByteStreamControllerClose(this);
+ }
+
+ /**
+ * @param {ArrayBufferView} chunk
+ * @returns {void}
+ */
+ enqueue(chunk) {
+ if (chunk.byteLength === 0) {
+ throw new TypeError("chunk must have a non-zero byteLength.");
+ }
+ if (chunk.buffer.byteLength === 0) {
+ throw new TypeError("chunk's buffer must have a non-zero byteLength.");
+ }
+ if (this[_closeRequested] === true) {
+ throw new TypeError(
+ "Cannot enqueue chunk after a close has been requested.",
+ );
+ }
+ if (this[_stream][_state] !== "readable") {
+ throw new TypeError(
+ "Cannot enqueue chunk when underlying stream is not readable.",
+ );
+ }
+ return readableByteStreamControllerEnqueue(this, chunk);
+ }
+
+ /**
+ * @param {any=} e
+ * @returns {void}
+ */
+ error(e) {
+ readableByteStreamControllerError(this, e);
+ }
+
+ /**
+ * @param {any} reason
+ * @returns {Promise<void>}
+ */
+ [_cancelSteps](reason) {
+ // 4.7.4. CancelStep 1. If this.[[pendingPullIntos]] is not empty,
+ resetQueue(this);
+ const result = this[_cancelAlgorithm](reason);
+ readableByteStreamControllerClearAlgorithms(this);
+ return result;
+ }
+
+ /**
+ * @param {ReadRequest<ArrayBuffer>} readRequest
+ * @returns {void}
+ */
+ [_pullSteps](readRequest) {
+ /** @type {ReadableStream<ArrayBuffer>} */
+ const stream = this[_stream];
+ assert(readableStreamHasDefaultReader(stream));
+ if (this[_queueTotalSize] > 0) {
+ assert(readableStreamGetNumReadRequests(stream) === 0);
+ const entry = this[_queue].shift();
+ this[_queueTotalSize] -= entry.byteLength;
+ readableByteStreamControllerHandleQueueDrain(this);
+ const view = new Uint8Array(
+ entry.buffer,
+ entry.byteOffset,
+ entry.byteLength,
+ );
+ readRequest.chunkSteps(view);
+ return;
+ }
+ // 4. Let autoAllocateChunkSize be this.[[autoAllocateChunkSize]].
+ // 5. If autoAllocateChunkSize is not undefined,
+ readableStreamAddReadRequest(stream, readRequest);
+ readableByteStreamControllerCallPullIfNeeded(this);
+ }
+ }
+
+ /** @template R */
+ class ReadableStreamDefaultController {
+ /** @type {(reason: any) => Promise<void>} */
+ [_cancelAlgorithm];
+ /** @type {boolean} */
+ [_closeRequested];
+ /** @type {boolean} */
+ [_pullAgain];
+ /** @type {(controller: this) => Promise<void>} */
+ [_pullAlgorithm];
+ /** @type {boolean} */
+ [_pulling];
+ /** @type {Array<ValueWithSize<R>>} */
+ [_queue];
+ /** @type {number} */
+ [_queueTotalSize];
+ /** @type {boolean} */
+ [_started];
+ /** @type {number} */
+ [_strategyHWM];
+ /** @type {(chunk: R) => number} */
+ [_strategySizeAlgorithm];
+ /** @type {ReadableStream<R>} */
+ [_stream];
+
+ /** @returns {number | null} */
+ get desiredSize() {
+ return readableStreamDefaultControllerGetDesiredSize(this);
+ }
+
+ /** @returns {void} */
+ close() {
+ if (readableStreamDefaultControllerCanCloseOrEnqueue(this) === false) {
+ throw new TypeError("The stream controller cannot close or enqueue.");
+ }
+ readableStreamDefaultControllerClose(this);
+ }
+
+ /**
+ * @param {R} chunk
+ * @returns {void}
+ */
+ enqueue(chunk) {
+ if (readableStreamDefaultControllerCanCloseOrEnqueue(this) === false) {
+ throw new TypeError("The stream controller cannot close or enqueue.");
+ }
+ readableStreamDefaultControllerEnqueue(this, chunk);
+ }
+
+ /**
+ * @param {any=} e
+ * @returns {void}
+ */
+ error(e) {
+ readableStreamDefaultControllerError(this, e);
+ }
+
+ /**
+ * @param {any} reason
+ * @returns {Promise<void>}
+ */
+ [_cancelSteps](reason) {
+ resetQueue(this);
+ const result = this[_cancelAlgorithm](reason);
+ readableStreamDefaultControllerClearAlgorithms(this);
+ return result;
+ }
+
+ /**
+ * @param {ReadRequest<R>} readRequest
+ * @returns {void}
+ */
+ [_pullSteps](readRequest) {
+ const stream = this[_stream];
+ if (this[_queue].length) {
+ const chunk = dequeueValue(this);
+ if (this[_closeRequested] && this[_queue].length === 0) {
+ readableStreamDefaultControllerClearAlgorithms(this);
+ readableStreamClose(stream);
+ } else {
+ readableStreamDefaultControllerCallPullIfNeeded(this);
+ }
+ readRequest.chunkSteps(chunk);
+ } else {
+ readableStreamAddReadRequest(stream, readRequest);
+ readableStreamDefaultControllerCallPullIfNeeded(this);
+ }
+ }
+ }
+
+ /**
+ * @template I
+ * @template O
+ */
+ class TransformStream {
+ /** @type {boolean} */
+ [_backpressure];
+ /** @type {Deferred<void>} */
+ [_backpressureChangePromise];
+ /** @type {TransformStreamDefaultController<O>} */
+ [_controller];
+ /** @type {boolean} */
+ [_detached];
+ /** @type {ReadableStream<O>} */
+ [_readable];
+ /** @type {WritableStream<I>} */
+ [_writable];
+
+ /**
+ *
+ * @param {Transformer<I, O>} transformer
+ * @param {QueuingStrategy<I>} writableStrategy
+ * @param {QueuingStrategy<O>} readableStrategy
+ */
+ constructor(
+ transformer = null,
+ writableStrategy = {},
+ readableStrategy = {},
+ ) {
+ const transformerDict = convertTransformer(transformer);
+ if (transformerDict.readableType) {
+ throw new RangeError("readableType transformers not supported.");
+ }
+ if (transformerDict.writableType) {
+ throw new RangeError("writableType transformers not supported.");
+ }
+ const readableHighWaterMark = extractHighWaterMark(readableStrategy, 0);
+ const readableSizeAlgorithm = extractSizeAlgorithm(readableStrategy);
+ const writableHighWaterMark = extractHighWaterMark(writableStrategy, 1);
+ const writableSizeAlgorithm = extractSizeAlgorithm(writableStrategy);
+ /** @type {Deferred<void>} */
+ const startPromise = new Deferred();
+ initializeTransformStream(
+ this,
+ startPromise,
+ writableHighWaterMark,
+ writableSizeAlgorithm,
+ readableHighWaterMark,
+ readableSizeAlgorithm,
+ );
+ setUpTransformStreamDefaultControllerFromTransformer(
+ this,
+ transformer,
+ transformerDict,
+ );
+ if ("start" in transformerDict) {
+ startPromise.resolve(
+ transformerDict.start.call(transformer, this[_controller]),
+ );
+ } else {
+ startPromise.resolve(undefined);
+ }
+ }
+
+ /** @returns {ReadableStream<O>} */
+ get readable() {
+ return this[_readable];
+ }
+
+ /** @returns {WritableStream<I>} */
+ get writable() {
+ return this[_writable];
+ }
+
+ [Symbol.for("Deno.customInspect")](inspect) {
+ return `${this.constructor.name} ${
+ inspect({ readable: this.readable, writable: this.writable })
+ }`;
+ }
+ }
+
+ /** @template O */
+ class TransformStreamDefaultController {
+ /** @type {(controller: this) => Promise<void>} */
+ [_flushAlgorithm];
+ /** @type {TransformStream<O>} */
+ [_stream];
+ /** @type {(chunk: O, controller: this) => Promise<void>} */
+ [_transformAlgorithm];
+
+ /** @returns {number | null} */
+ get desiredSize() {
+ const readableController = this[_stream][_readable][_controller];
+ return readableStreamDefaultControllerGetDesiredSize(
+ /** @type {ReadableStreamDefaultController<O>} */ (readableController),
+ );
+ }
+
+ /**
+ * @param {O} chunk
+ * @returns {void}
+ */
+ enqueue(chunk) {
+ transformStreamDefaultControllerEnqueue(this, chunk);
+ }
+
+ /**
+ * @param {any=} reason
+ * @returns {void}
+ */
+ error(reason) {
+ transformStreamDefaultControllerError(this, reason);
+ }
+
+ /** @returns {void} */
+ terminate() {
+ transformStreamDefaultControllerTerminate(this);
+ }
+ }
+
+ /** @template W */
+ class WritableStream {
+ /** @type {boolean} */
+ [_backpressure];
+ /** @type {Deferred<void> | undefined} */
+ [_closeRequest];
+ /** @type {WritableStreamDefaultController<W>} */
+ [_controller];
+ /** @type {boolean} */
+ [_detached];
+ /** @type {Deferred<void> | undefined} */
+ [_inFlightWriteRequest];
+ /** @type {Deferred<void> | undefined} */
+ [_inFlightCloseRequest];
+ /** @type {PendingAbortRequest | undefined} */
+ [_pendingAbortRequest];
+ /** @type {"writable" | "closed" | "erroring" | "errored"} */
+ [_state];
+ /** @type {any} */
+ [_storedError];
+ /** @type {WritableStreamDefaultWriter<W>} */
+ [_writer];
+ /** @type {Deferred<void>[]} */
+ [_writeRequests];
+
+ /**
+ * @param {UnderlyingSink<W>=} underlyingSink
+ * @param {QueuingStrategy<W>=} strategy
+ */
+ constructor(underlyingSink = null, strategy = {}) {
+ const underlyingSinkDict = convertUnderlyingSink(underlyingSink);
+ if (underlyingSinkDict.type != null) {
+ throw new RangeError(
+ 'WritableStream does not support "type" in the underlying sink.',
+ );
+ }
+ initializeWritableStream(this);
+ const sizeAlgorithm = extractSizeAlgorithm(strategy);
+ const highWaterMark = extractHighWaterMark(strategy, 1);
+ setUpWritableStreamDefaultControllerFromUnderlyingSink(
+ this,
+ underlyingSink,
+ underlyingSinkDict,
+ highWaterMark,
+ sizeAlgorithm,
+ );
+ }
+
+ /** @returns {boolean} */
+ get locked() {
+ return isWritableStreamLocked(this);
+ }
+
+ /**
+ * @param {any=} reason
+ * @returns {Promise<void>}
+ */
+ abort(reason) {
+ if (isWritableStreamLocked(this)) {
+ return Promise.reject(
+ new TypeError(
+ "The writable stream is locked, therefore cannot be aborted.",
+ ),
+ );
+ }
+ return writableStreamAbort(this, reason);
+ }
+
+ /** @returns {Promise<void>} */
+ close() {
+ if (isWritableStreamLocked(this)) {
+ return Promise.reject(
+ new TypeError(
+ "The writable stream is locked, therefore cannot be closed.",
+ ),
+ );
+ }
+ if (writableStreamCloseQueuedOrInFlight(this) === true) {
+ return Promise.reject(
+ new TypeError("The writable stream is already closing."),
+ );
+ }
+ return writableStreamClose(this);
+ }
+
+ /** @returns {WritableStreamDefaultWriter<W>} */
+ getWriter() {
+ return acquireWritableStreamDefaultWriter(this);
+ }
+
+ [Symbol.for("Deno.customInspect")](inspect) {
+ return `${this.constructor.name} ${inspect({ locked: this.locked })}`;
+ }
+ }
+
+ /** @template W */
+ class WritableStreamDefaultWriter {
+ /** @type {Deferred<void>} */
+ [_closedPromise];
+
+ /** @type {Deferred<void>} */
+ [_readyPromise];
+
+ /** @type {WritableStream<W>} */
+ [_stream];
+
+ constructor(stream) {
+ setUpWritableStreamDefaultWriter(this, stream);
+ }
+
+ /** @returns {Promise<void>} */
+ get closed() {
+ return this[_closedPromise].promise;
+ }
+
+ /** @returns {number} */
+ get desiredSize() {
+ if (this[_stream] === undefined) {
+ throw new TypeError(
+ "A writable stream is not associated with the writer.",
+ );
+ }
+ return writableStreamDefaultWriterGetDesiredSize(this);
+ }
+
+ /** @returns {Promise<void>} */
+ get ready() {
+ return this[_readyPromise].promise;
+ }
+
+ /**
+ * @param {any} reason
+ * @returns {Promise<void>}
+ */
+ abort(reason) {
+ if (this[_stream] === undefined) {
+ return Promise.reject(
+ new TypeError("A writable stream is not associated with the writer."),
+ );
+ }
+ return writableStreamDefaultWriterAbort(this, reason);
+ }
+
+ /** @returns {Promise<void>} */
+ close() {
+ const stream = this[_stream];
+ if (stream === undefined) {
+ return Promise.reject(
+ new TypeError("A writable stream is not associated with the writer."),
+ );
+ }
+ if (writableStreamCloseQueuedOrInFlight(stream) === true) {
+ return Promise.reject(
+ new TypeError("The associated stream is already closing."),
+ );
+ }
+ return writableStreamDefaultWriterClose(this);
+ }
+
+ /** @returns {void} */
+ releaseLock() {
+ const stream = this[_stream];
+ if (stream === undefined) {
+ return;
+ }
+ assert(stream[_writer] !== undefined);
+ writableStreamDefaultWriterRelease(this);
+ }
+
+ /**
+ * @param {W} chunk
+ * @returns {Promise<void>}
+ */
+ write(chunk) {
+ if (this[_stream] === undefined) {
+ return Promise.reject(
+ new TypeError("A writable stream is not associate with the writer."),
+ );
+ }
+ return writableStreamDefaultWriterWrite(this, chunk);
+ }
+ }
+
+ /** @template W */
+ class WritableStreamDefaultController {
+ /** @type {(reason?: any) => Promise<void>} */
+ [_abortAlgorithm];
+ /** @type {() => Promise<void>} */
+ [_closeAlgorithm];
+ /** @type {ValueWithSize<W | _close>[]} */
+ [_queue];
+ /** @type {number} */
+ [_queueTotalSize];
+ /** @type {boolean} */
+ [_started];
+ /** @type {number} */
+ [_strategyHWM];
+ /** @type {(chunk: W) => number} */
+ [_strategySizeAlgorithm];
+ /** @type {WritableStream<W>} */
+ [_stream];
+ /** @type {(chunk: W, controller: this) => Promise<void>} */
+ [_writeAlgorithm];
+
+ /**
+ * @param {any=} e
+ * @returns {void}
+ */
+ error(e) {
+ const state = this[_stream][_state];
+ if (state !== "writable") {
+ return;
+ }
+ writableStreamDefaultControllerError(this, e);
+ }
+
+ /**
+ * @param {any=} reason
+ * @returns {Promise<void>}
+ */
+ [_abortSteps](reason) {
+ const result = this[_abortAlgorithm](reason);
+ writableStreamDefaultControllerClearAlgorithms(this);
+ return result;
+ }
+
+ [_errorSteps]() {
+ resetQueue(this);
+ }
+ }
+
+ window.__bootstrap.streams = {
+ // Non-Public
+ isReadableStreamDisturbed,
+ // Exposed in global runtime scope
+ ByteLengthQueuingStrategy,
+ CountQueuingStrategy,
+ ReadableStream,
+ ReadableStreamDefaultReader,
+ TransformStream,
+ WritableStream,
+ WritableStreamDefaultWriter,
+ };
+})(this);
diff --git a/extensions/fetch/11_streams_types.d.ts b/extensions/fetch/11_streams_types.d.ts
new file mode 100644
index 000000000..a4c54363f
--- /dev/null
+++ b/extensions/fetch/11_streams_types.d.ts
@@ -0,0 +1,49 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// ** Internal Interfaces **
+
+interface PendingAbortRequest {
+ deferred: Deferred<void>;
+ // deno-lint-ignore no-explicit-any
+ reason: any;
+ wasAlreadyErroring: boolean;
+}
+
+// deno-lint-ignore no-explicit-any
+interface ReadRequest<R = any> {
+ chunkSteps: (chunk: R) => void;
+ closeSteps: () => void;
+ // deno-lint-ignore no-explicit-any
+ errorSteps: (error: any) => void;
+}
+
+interface ReadableByteStreamQueueEntry {
+ buffer: ArrayBufferLike;
+ byteOffset: number;
+ byteLength: number;
+}
+
+interface ReadableStreamGetReaderOptions {
+ mode?: "byob";
+}
+
+interface ReadableStreamIteratorOptions {
+ preventCancel?: boolean;
+}
+
+interface ValueWithSize<T> {
+ value: T;
+ size: number;
+}
+
+interface VoidFunction {
+ (): void;
+}
+
+// ** Ambient Definitions and Interfaces not provided by fetch **
+
+declare function queueMicrotask(callback: VoidFunction): void;
+
+declare namespace Deno {
+ function inspect(value: unknown, options?: Record<string, unknown>): string;
+}
diff --git a/extensions/fetch/20_headers.js b/extensions/fetch/20_headers.js
new file mode 100644
index 000000000..94e1c4076
--- /dev/null
+++ b/extensions/fetch/20_headers.js
@@ -0,0 +1,449 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../file/internal.d.ts" />
+/// <reference path="../file/lib.deno_file.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./11_streams_types.d.ts" />
+/// <reference path="./lib.deno_fetch.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const webidl = window.__bootstrap.webidl;
+ const {
+ HTTP_TAB_OR_SPACE_PREFIX_RE,
+ HTTP_TAB_OR_SPACE_SUFFIX_RE,
+ HTTP_WHITESPACE_PREFIX_RE,
+ HTTP_WHITESPACE_SUFFIX_RE,
+ HTTP_TOKEN_CODE_POINT_RE,
+ byteLowerCase,
+ collectSequenceOfCodepoints,
+ collectHttpQuotedString,
+ } = window.__bootstrap.infra;
+
+ const _headerList = Symbol("header list");
+ const _iterableHeaders = Symbol("iterable headers");
+ const _guard = Symbol("guard");
+
+ /**
+ * @typedef Header
+ * @type {[string, string]}
+ */
+
+ /**
+ * @typedef HeaderList
+ * @type {Header[]}
+ */
+
+ /**
+ * @param {string} potentialValue
+ * @returns {string}
+ */
+ function normalizeHeaderValue(potentialValue) {
+ potentialValue = potentialValue.replaceAll(HTTP_WHITESPACE_PREFIX_RE, "");
+ potentialValue = potentialValue.replaceAll(HTTP_WHITESPACE_SUFFIX_RE, "");
+ return potentialValue;
+ }
+
+ /**
+ * @param {Headers} headers
+ * @param {HeadersInit} object
+ */
+ function fillHeaders(headers, object) {
+ if (Array.isArray(object)) {
+ for (const header of object) {
+ if (header.length !== 2) {
+ throw new TypeError(
+ `Invalid header. Length must be 2, but is ${header.length}`,
+ );
+ }
+ appendHeader(headers, header[0], header[1]);
+ }
+ } else {
+ for (const key of Object.keys(object)) {
+ appendHeader(headers, key, object[key]);
+ }
+ }
+ }
+
+ /**
+ * https://fetch.spec.whatwg.org/#concept-headers-append
+ * @param {Headers} headers
+ * @param {string} name
+ * @param {string} value
+ */
+ function appendHeader(headers, name, value) {
+ // 1.
+ value = normalizeHeaderValue(value);
+
+ // 2.
+ if (!HTTP_TOKEN_CODE_POINT_RE.test(name)) {
+ throw new TypeError("Header name is not valid.");
+ }
+ if (
+ value.includes("\x00") || value.includes("\x0A") || value.includes("\x0D")
+ ) {
+ throw new TypeError("Header value is not valid.");
+ }
+
+ // 3.
+ if (headers[_guard] == "immutable") {
+ throw new TypeError("Headers are immutable.");
+ }
+
+ // 7.
+ const list = headers[_headerList];
+ const lowercaseName = byteLowerCase(name);
+ for (let i = 0; i < list.length; i++) {
+ if (byteLowerCase(list[i][0]) === lowercaseName) {
+ name = list[i][0];
+ break;
+ }
+ }
+ list.push([name, value]);
+ }
+
+ /**
+ * https://fetch.spec.whatwg.org/#concept-header-list-get
+ * @param {HeaderList} list
+ * @param {string} name
+ */
+ function getHeader(list, name) {
+ const lowercaseName = byteLowerCase(name);
+ const entries = list.filter((entry) =>
+ byteLowerCase(entry[0]) === lowercaseName
+ ).map((entry) => entry[1]);
+ if (entries.length === 0) {
+ return null;
+ } else {
+ return entries.join("\x2C\x20");
+ }
+ }
+
+ /**
+ * https://fetch.spec.whatwg.org/#concept-header-list-get-decode-split
+ * @param {HeaderList} list
+ * @param {string} name
+ * @returns {string[] | null}
+ */
+ function getDecodeSplitHeader(list, name) {
+ const initialValue = getHeader(list, name);
+ if (initialValue === null) return null;
+ const input = initialValue;
+ let position = 0;
+ const values = [];
+ let value = "";
+ while (position < initialValue.length) {
+ // 7.1. collect up to " or ,
+ const res = collectSequenceOfCodepoints(
+ initialValue,
+ position,
+ (c) => c !== "\u0022" && c !== "\u002C",
+ );
+ value += res.result;
+ position = res.position;
+
+ if (position < initialValue.length) {
+ if (input[position] === "\u0022") {
+ const res = collectHttpQuotedString(input, position, false);
+ value += res.result;
+ position = res.position;
+ if (position < initialValue.length) {
+ continue;
+ }
+ } else {
+ if (input[position] !== "\u002C") throw new TypeError("Unreachable");
+ position += 1;
+ }
+ }
+
+ value = value.replaceAll(HTTP_TAB_OR_SPACE_PREFIX_RE, "");
+ value = value.replaceAll(HTTP_TAB_OR_SPACE_SUFFIX_RE, "");
+
+ values.push(value);
+ value = "";
+ }
+ return values;
+ }
+
+ class Headers {
+ /** @type {HeaderList} */
+ [_headerList] = [];
+ /** @type {"immutable" | "request" | "request-no-cors" | "response" | "none"} */
+ [_guard];
+
+ get [_iterableHeaders]() {
+ const list = this[_headerList];
+
+ const headers = [];
+ const headerNamesSet = new Set();
+ for (const entry of list) {
+ headerNamesSet.add(byteLowerCase(entry[0]));
+ }
+ const names = [...headerNamesSet].sort();
+ for (const name of names) {
+ // The following if statement, and if block of the following statement
+ // are not spec compliant. `set-cookie` is the only header that can not
+ // be concatentated, so must be given to the user as multiple headers.
+ // The else block of the if statement is spec compliant again.
+ if (name == "set-cookie") {
+ const setCookie = list.filter((entry) =>
+ byteLowerCase(entry[0]) === "set-cookie"
+ );
+ if (setCookie.length === 0) throw new TypeError("Unreachable");
+ for (const entry of setCookie) {
+ headers.push([name, entry[1]]);
+ }
+ } else {
+ const value = getHeader(list, name);
+ if (value === null) throw new TypeError("Unreachable");
+ headers.push([name, value]);
+ }
+ }
+ return headers;
+ }
+
+ /** @param {HeadersInit} [init] */
+ constructor(init = undefined) {
+ const prefix = "Failed to construct 'Event'";
+ if (init !== undefined) {
+ init = webidl.converters["HeadersInit"](init, {
+ prefix,
+ context: "Argument 1",
+ });
+ }
+
+ this[webidl.brand] = webidl.brand;
+ this[_guard] = "none";
+ if (init !== undefined) {
+ fillHeaders(this, init);
+ }
+ }
+
+ /**
+ * @param {string} name
+ * @param {string} value
+ */
+ append(name, value) {
+ webidl.assertBranded(this, Headers);
+ const prefix = "Failed to execute 'append' on 'Headers'";
+ webidl.requiredArguments(arguments.length, 2, { prefix });
+ name = webidl.converters["ByteString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+ value = webidl.converters["ByteString"](value, {
+ prefix,
+ context: "Argument 2",
+ });
+ appendHeader(this, name, value);
+ }
+
+ /**
+ * @param {string} name
+ */
+ delete(name) {
+ const prefix = "Failed to execute 'delete' on 'Headers'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ name = webidl.converters["ByteString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ if (!HTTP_TOKEN_CODE_POINT_RE.test(name)) {
+ throw new TypeError("Header name is not valid.");
+ }
+ if (this[_guard] == "immutable") {
+ throw new TypeError("Headers are immutable.");
+ }
+
+ const list = this[_headerList];
+ const lowercaseName = byteLowerCase(name);
+ for (let i = 0; i < list.length; i++) {
+ if (byteLowerCase(list[i][0]) === lowercaseName) {
+ list.splice(i, 1);
+ i--;
+ }
+ }
+ }
+
+ /**
+ * @param {string} name
+ */
+ get(name) {
+ const prefix = "Failed to execute 'get' on 'Headers'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ name = webidl.converters["ByteString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ if (!HTTP_TOKEN_CODE_POINT_RE.test(name)) {
+ throw new TypeError("Header name is not valid.");
+ }
+
+ const list = this[_headerList];
+ return getHeader(list, name);
+ }
+
+ /**
+ * @param {string} name
+ */
+ has(name) {
+ const prefix = "Failed to execute 'has' on 'Headers'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ name = webidl.converters["ByteString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ if (!HTTP_TOKEN_CODE_POINT_RE.test(name)) {
+ throw new TypeError("Header name is not valid.");
+ }
+
+ const list = this[_headerList];
+ const lowercaseName = byteLowerCase(name);
+ for (let i = 0; i < list.length; i++) {
+ if (byteLowerCase(list[i][0]) === lowercaseName) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * @param {string} name
+ * @param {string} value
+ */
+ set(name, value) {
+ webidl.assertBranded(this, Headers);
+ const prefix = "Failed to execute 'set' on 'Headers'";
+ webidl.requiredArguments(arguments.length, 2, { prefix });
+ name = webidl.converters["ByteString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+ value = webidl.converters["ByteString"](value, {
+ prefix,
+ context: "Argument 2",
+ });
+
+ value = normalizeHeaderValue(value);
+
+ // 2.
+ if (!HTTP_TOKEN_CODE_POINT_RE.test(name)) {
+ throw new TypeError("Header name is not valid.");
+ }
+ if (
+ value.includes("\x00") || value.includes("\x0A") ||
+ value.includes("\x0D")
+ ) {
+ throw new TypeError("Header value is not valid.");
+ }
+
+ if (this[_guard] == "immutable") {
+ throw new TypeError("Headers are immutable.");
+ }
+
+ const list = this[_headerList];
+ const lowercaseName = byteLowerCase(name);
+ let added = false;
+ for (let i = 0; i < list.length; i++) {
+ if (byteLowerCase(list[i][0]) === lowercaseName) {
+ if (!added) {
+ list[i][1] = value;
+ added = true;
+ } else {
+ list.splice(i, 1);
+ i--;
+ }
+ }
+ }
+ if (!added) {
+ list.push([name, value]);
+ }
+ }
+
+ [Symbol.for("Deno.customInspect")](inspect) {
+ const headers = {};
+ for (const header of this) {
+ headers[header[0]] = header[1];
+ }
+ return `Headers ${inspect(headers)}`;
+ }
+
+ get [Symbol.toStringTag]() {
+ return "Headers";
+ }
+ }
+
+ webidl.mixinPairIterable("Headers", Headers, _iterableHeaders, 0, 1);
+
+ webidl.converters["sequence<ByteString>"] = webidl
+ .createSequenceConverter(webidl.converters["ByteString"]);
+ webidl.converters["sequence<sequence<ByteString>>"] = webidl
+ .createSequenceConverter(webidl.converters["sequence<ByteString>"]);
+ webidl.converters["record<ByteString, ByteString>"] = webidl
+ .createRecordConverter(
+ webidl.converters["ByteString"],
+ webidl.converters["ByteString"],
+ );
+ webidl.converters["HeadersInit"] = (V, opts) => {
+ // Union for (sequence<sequence<ByteString>> or record<ByteString, ByteString>)
+ if (typeof V === "object" && V !== null) {
+ if (V[Symbol.iterator] !== undefined) {
+ return webidl.converters["sequence<sequence<ByteString>>"](V, opts);
+ }
+ return webidl.converters["record<ByteString, ByteString>"](V, opts);
+ }
+ throw webidl.makeException(
+ TypeError,
+ "The provided value is not of type '(sequence<sequence<ByteString>> or record<ByteString, ByteString>)'",
+ opts,
+ );
+ };
+ webidl.converters["Headers"] = webidl.createInterfaceConverter(
+ "Headers",
+ Headers,
+ );
+
+ /**
+ * @param {HeaderList} list
+ * @param {"immutable" | "request" | "request-no-cors" | "response" | "none"} guard
+ * @returns {Headers}
+ */
+ function headersFromHeaderList(list, guard) {
+ const headers = webidl.createBranded(Headers);
+ headers[_headerList] = list;
+ headers[_guard] = guard;
+ return headers;
+ }
+
+ /**
+ * @param {Headers}
+ * @returns {HeaderList}
+ */
+ function headerListFromHeaders(headers) {
+ return headers[_headerList];
+ }
+
+ /**
+ * @param {Headers}
+ * @returns {"immutable" | "request" | "request-no-cors" | "response" | "none"}
+ */
+ function guardFromHeaders(headers) {
+ return headers[_guard];
+ }
+
+ window.__bootstrap.headers = {
+ Headers,
+ headersFromHeaderList,
+ headerListFromHeaders,
+ fillHeaders,
+ getDecodeSplitHeader,
+ guardFromHeaders,
+ };
+})(this);
diff --git a/extensions/fetch/21_formdata.js b/extensions/fetch/21_formdata.js
new file mode 100644
index 000000000..c50cf4cf7
--- /dev/null
+++ b/extensions/fetch/21_formdata.js
@@ -0,0 +1,552 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../file/internal.d.ts" />
+/// <reference path="../file/lib.deno_file.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./11_streams_types.d.ts" />
+/// <reference path="./lib.deno_fetch.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const webidl = globalThis.__bootstrap.webidl;
+ const { Blob, File, _byteSequence } = globalThis.__bootstrap.file;
+
+ const entryList = Symbol("entry list");
+
+ /**
+ * @param {string} name
+ * @param {string | Blob} value
+ * @param {string | undefined} filename
+ * @returns {FormDataEntry}
+ */
+ function createEntry(name, value, filename) {
+ if (value instanceof Blob && !(value instanceof File)) {
+ value = new File([value[_byteSequence]], "blob", { type: value.type });
+ }
+ if (value instanceof File && filename !== undefined) {
+ value = new File([value[_byteSequence]], filename, {
+ type: value.type,
+ lastModified: value.lastModified,
+ });
+ }
+ return {
+ name,
+ // @ts-expect-error because TS is not smart enough
+ value,
+ };
+ }
+
+ /**
+ * @typedef FormDataEntry
+ * @property {string} name
+ * @property {FormDataEntryValue} value
+ */
+
+ class FormData {
+ get [Symbol.toStringTag]() {
+ return "FormData";
+ }
+
+ /** @type {FormDataEntry[]} */
+ [entryList] = [];
+
+ /** @param {void} form */
+ constructor(form) {
+ if (form !== undefined) {
+ webidl.illegalConstructor();
+ }
+ this[webidl.brand] = webidl.brand;
+ }
+
+ /**
+ * @param {string} name
+ * @param {string | Blob} valueOrBlobValue
+ * @param {string} [filename]
+ * @returns {void}
+ */
+ append(name, valueOrBlobValue, filename) {
+ webidl.assertBranded(this, FormData);
+ const prefix = "Failed to execute 'append' on 'FormData'";
+ webidl.requiredArguments(arguments.length, 2, { prefix });
+
+ name = webidl.converters["USVString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+ if (valueOrBlobValue instanceof Blob) {
+ valueOrBlobValue = webidl.converters["Blob"](valueOrBlobValue, {
+ prefix,
+ context: "Argument 2",
+ });
+ if (filename !== undefined) {
+ filename = webidl.converters["USVString"](filename, {
+ prefix,
+ context: "Argument 3",
+ });
+ }
+ } else {
+ valueOrBlobValue = webidl.converters["USVString"](valueOrBlobValue, {
+ prefix,
+ context: "Argument 2",
+ });
+ }
+
+ const entry = createEntry(name, valueOrBlobValue, filename);
+
+ this[entryList].push(entry);
+ }
+
+ /**
+ * @param {string} name
+ * @returns {void}
+ */
+ delete(name) {
+ webidl.assertBranded(this, FormData);
+ const prefix = "Failed to execute 'name' on 'FormData'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+
+ name = webidl.converters["USVString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ const list = this[entryList];
+ for (let i = 0; i < list.length; i++) {
+ if (list[i].name === name) {
+ list.splice(i, 1);
+ i--;
+ }
+ }
+ }
+
+ /**
+ * @param {string} name
+ * @returns {FormDataEntryValue | null}
+ */
+ get(name) {
+ webidl.assertBranded(this, FormData);
+ const prefix = "Failed to execute 'get' on 'FormData'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+
+ name = webidl.converters["USVString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ for (const entry of this[entryList]) {
+ if (entry.name === name) return entry.value;
+ }
+ return null;
+ }
+
+ /**
+ * @param {string} name
+ * @returns {FormDataEntryValue[]}
+ */
+ getAll(name) {
+ webidl.assertBranded(this, FormData);
+ const prefix = "Failed to execute 'getAll' on 'FormData'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+
+ name = webidl.converters["USVString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ const returnList = [];
+ for (const entry of this[entryList]) {
+ if (entry.name === name) returnList.push(entry.value);
+ }
+ return returnList;
+ }
+
+ /**
+ * @param {string} name
+ * @returns {boolean}
+ */
+ has(name) {
+ webidl.assertBranded(this, FormData);
+ const prefix = "Failed to execute 'has' on 'FormData'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+
+ name = webidl.converters["USVString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+
+ for (const entry of this[entryList]) {
+ if (entry.name === name) return true;
+ }
+ return false;
+ }
+
+ /**
+ * @param {string} name
+ * @param {string | Blob} valueOrBlobValue
+ * @param {string} [filename]
+ * @returns {void}
+ */
+ set(name, valueOrBlobValue, filename) {
+ webidl.assertBranded(this, FormData);
+ const prefix = "Failed to execute 'set' on 'FormData'";
+ webidl.requiredArguments(arguments.length, 2, { prefix });
+
+ name = webidl.converters["USVString"](name, {
+ prefix,
+ context: "Argument 1",
+ });
+ if (valueOrBlobValue instanceof Blob) {
+ valueOrBlobValue = webidl.converters["Blob"](valueOrBlobValue, {
+ prefix,
+ context: "Argument 2",
+ });
+ if (filename !== undefined) {
+ filename = webidl.converters["USVString"](filename, {
+ prefix,
+ context: "Argument 3",
+ });
+ }
+ } else {
+ valueOrBlobValue = webidl.converters["USVString"](valueOrBlobValue, {
+ prefix,
+ context: "Argument 2",
+ });
+ }
+
+ const entry = createEntry(name, valueOrBlobValue, filename);
+
+ const list = this[entryList];
+ let added = false;
+ for (let i = 0; i < list.length; i++) {
+ if (list[i].name === name) {
+ if (!added) {
+ list[i] = entry;
+ added = true;
+ } else {
+ list.splice(i, 1);
+ i--;
+ }
+ }
+ }
+ if (!added) {
+ list.push(entry);
+ }
+ }
+ }
+
+ webidl.mixinPairIterable("FormData", FormData, entryList, "name", "value");
+
+ const encoder = new TextEncoder();
+
+ class MultipartBuilder {
+ /**
+ * @param {FormData} formData
+ */
+ constructor(formData) {
+ this.entryList = formData[entryList];
+ this.boundary = this.#createBoundary();
+ /** @type {Uint8Array[]} */
+ this.chunks = [];
+ }
+
+ /**
+ * @returns {string}
+ */
+ getContentType() {
+ return `multipart/form-data; boundary=${this.boundary}`;
+ }
+
+ /**
+ * @returns {Uint8Array}
+ */
+ getBody() {
+ for (const { name, value } of this.entryList) {
+ if (value instanceof File) {
+ this.#writeFile(name, value);
+ } else this.#writeField(name, value);
+ }
+
+ this.chunks.push(encoder.encode(`\r\n--${this.boundary}--`));
+
+ let totalLength = 0;
+ for (const chunk of this.chunks) {
+ totalLength += chunk.byteLength;
+ }
+
+ const finalBuffer = new Uint8Array(totalLength);
+ let i = 0;
+ for (const chunk of this.chunks) {
+ finalBuffer.set(chunk, i);
+ i += chunk.byteLength;
+ }
+
+ return finalBuffer;
+ }
+
+ #createBoundary = () => {
+ return (
+ "----------" +
+ Array.from(Array(32))
+ .map(() => Math.random().toString(36)[2] || 0)
+ .join("")
+ );
+ };
+
+ /**
+ * @param {[string, string][]} headers
+ * @returns {void}
+ */
+ #writeHeaders = (headers) => {
+ let buf = (this.chunks.length === 0) ? "" : "\r\n";
+
+ buf += `--${this.boundary}\r\n`;
+ for (const [key, value] of headers) {
+ buf += `${key}: ${value}\r\n`;
+ }
+ buf += `\r\n`;
+
+ this.chunks.push(encoder.encode(buf));
+ };
+
+ /**
+ * @param {string} field
+ * @param {string} filename
+ * @param {string} [type]
+ * @returns {void}
+ */
+ #writeFileHeaders = (
+ field,
+ filename,
+ type,
+ ) => {
+ /** @type {[string, string][]} */
+ const headers = [
+ [
+ "Content-Disposition",
+ `form-data; name="${field}"; filename="${filename}"`,
+ ],
+ ["Content-Type", type || "application/octet-stream"],
+ ];
+ return this.#writeHeaders(headers);
+ };
+
+ /**
+ * @param {string} field
+ * @returns {void}
+ */
+ #writeFieldHeaders = (field) => {
+ /** @type {[string, string][]} */
+ const headers = [["Content-Disposition", `form-data; name="${field}"`]];
+ return this.#writeHeaders(headers);
+ };
+
+ /**
+ * @param {string} field
+ * @param {string} value
+ * @returns {void}
+ */
+ #writeField = (field, value) => {
+ this.#writeFieldHeaders(field);
+ this.chunks.push(encoder.encode(value));
+ };
+
+ /**
+ * @param {string} field
+ * @param {File} value
+ * @returns {void}
+ */
+ #writeFile = (field, value) => {
+ this.#writeFileHeaders(field, value.name, value.type);
+ this.chunks.push(value[_byteSequence]);
+ };
+ }
+
+ /**
+ * @param {FormData} formdata
+ * @returns {{body: Uint8Array, contentType: string}}
+ */
+ function encodeFormData(formdata) {
+ const builder = new MultipartBuilder(formdata);
+ return {
+ body: builder.getBody(),
+ contentType: builder.getContentType(),
+ };
+ }
+
+ /**
+ * @param {string} value
+ * @returns {Map<string, string>}
+ */
+ function parseContentDisposition(value) {
+ /** @type {Map<string, string>} */
+ const params = new Map();
+ // Forced to do so for some Map constructor param mismatch
+ value
+ .split(";")
+ .slice(1)
+ .map((s) => s.trim().split("="))
+ .filter((arr) => arr.length > 1)
+ .map(([k, v]) => [k, v.replace(/^"([^"]*)"$/, "$1")])
+ .forEach(([k, v]) => params.set(k, v));
+ return params;
+ }
+
+ const LF = "\n".codePointAt(0);
+ const CR = "\r".codePointAt(0);
+ const decoder = new TextDecoder("utf-8");
+
+ class MultipartParser {
+ /**
+ * @param {Uint8Array} body
+ * @param {string | undefined} boundary
+ */
+ constructor(body, boundary) {
+ if (!boundary) {
+ throw new TypeError("multipart/form-data must provide a boundary");
+ }
+
+ this.boundary = `--${boundary}`;
+ this.body = body;
+ this.boundaryChars = encoder.encode(this.boundary);
+ }
+
+ /**
+ * @param {string} headersText
+ * @returns {{ headers: Headers, disposition: Map<string, string> }}
+ */
+ #parseHeaders = (headersText) => {
+ const headers = new Headers();
+ const rawHeaders = headersText.split("\r\n");
+ for (const rawHeader of rawHeaders) {
+ const sepIndex = rawHeader.indexOf(":");
+ if (sepIndex < 0) {
+ continue; // Skip this header
+ }
+ const key = rawHeader.slice(0, sepIndex);
+ const value = rawHeader.slice(sepIndex + 1);
+ headers.set(key, value);
+ }
+
+ const disposition = parseContentDisposition(
+ headers.get("Content-Disposition") ?? "",
+ );
+
+ return { headers, disposition };
+ };
+
+ /**
+ * @returns {FormData}
+ */
+ parse() {
+ // Body must be at least 2 boundaries + \r\n + -- on the last boundary.
+ if (this.body.length < (this.boundary.length * 2) + 4) {
+ throw new TypeError("Form data too short to be valid.");
+ }
+
+ const formData = new FormData();
+ let headerText = "";
+ let boundaryIndex = 0;
+ let state = 0;
+ let fileStart = 0;
+
+ for (let i = 0; i < this.body.length; i++) {
+ const byte = this.body[i];
+ const prevByte = this.body[i - 1];
+ const isNewLine = byte === LF && prevByte === CR;
+
+ if (state === 1 || state === 2 || state == 3) {
+ headerText += String.fromCharCode(byte);
+ }
+ if (state === 0 && isNewLine) {
+ state = 1;
+ } else if (state === 1 && isNewLine) {
+ state = 2;
+ const headersDone = this.body[i + 1] === CR &&
+ this.body[i + 2] === LF;
+
+ if (headersDone) {
+ state = 3;
+ }
+ } else if (state === 2 && isNewLine) {
+ state = 3;
+ } else if (state === 3 && isNewLine) {
+ state = 4;
+ fileStart = i + 1;
+ } else if (state === 4) {
+ if (this.boundaryChars[boundaryIndex] !== byte) {
+ boundaryIndex = 0;
+ } else {
+ boundaryIndex++;
+ }
+
+ if (boundaryIndex >= this.boundary.length) {
+ const { headers, disposition } = this.#parseHeaders(headerText);
+ const content = this.body.subarray(
+ fileStart,
+ i - boundaryIndex - 1,
+ );
+ // https://fetch.spec.whatwg.org/#ref-for-dom-body-formdata
+ const filename = disposition.get("filename");
+ const name = disposition.get("name");
+
+ state = 5;
+ // Reset
+ boundaryIndex = 0;
+ headerText = "";
+
+ if (!name) {
+ continue; // Skip, unknown name
+ }
+
+ if (filename) {
+ const blob = new Blob([content], {
+ type: headers.get("Content-Type") || "application/octet-stream",
+ });
+ formData.append(name, blob, filename);
+ } else {
+ formData.append(name, decoder.decode(content));
+ }
+ }
+ } else if (state === 5 && isNewLine) {
+ state = 1;
+ }
+ }
+
+ return formData;
+ }
+ }
+
+ /**
+ * @param {Uint8Array} body
+ * @param {string | undefined} boundary
+ * @returns {FormData}
+ */
+ function parseFormData(body, boundary) {
+ const parser = new MultipartParser(body, boundary);
+ return parser.parse();
+ }
+
+ /**
+ * @param {FormDataEntry[]} entries
+ * @returns {FormData}
+ */
+ function formDataFromEntries(entries) {
+ const fd = new FormData();
+ fd[entryList] = entries;
+ return fd;
+ }
+
+ webidl.converters["FormData"] = webidl
+ .createInterfaceConverter("FormData", FormData);
+
+ globalThis.__bootstrap.formData = {
+ FormData,
+ encodeFormData,
+ parseFormData,
+ formDataFromEntries,
+ };
+})(globalThis);
diff --git a/extensions/fetch/22_body.js b/extensions/fetch/22_body.js
new file mode 100644
index 000000000..938e3023e
--- /dev/null
+++ b/extensions/fetch/22_body.js
@@ -0,0 +1,338 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../url/internal.d.ts" />
+/// <reference path="../url/lib.deno_url.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../file/internal.d.ts" />
+/// <reference path="../file/lib.deno_file.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./11_streams_types.d.ts" />
+/// <reference path="./lib.deno_fetch.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const core = window.Deno.core;
+ const webidl = globalThis.__bootstrap.webidl;
+ const { parseUrlEncoded } = globalThis.__bootstrap.url;
+ const { parseFormData, formDataFromEntries, encodeFormData } =
+ globalThis.__bootstrap.formData;
+ const mimesniff = globalThis.__bootstrap.mimesniff;
+ const { isReadableStreamDisturbed } = globalThis.__bootstrap.streams;
+
+ class InnerBody {
+ /** @type {ReadableStream<Uint8Array> | { body: Uint8Array, consumed: boolean }} */
+ streamOrStatic;
+ /** @type {null | Uint8Array | Blob | FormData} */
+ source = null;
+ /** @type {null | number} */
+ length = null;
+
+ /**
+ * @param {ReadableStream<Uint8Array> | { body: Uint8Array, consumed: boolean }} stream
+ */
+ constructor(stream) {
+ this.streamOrStatic = stream ??
+ { body: new Uint8Array(), consumed: false };
+ }
+
+ get stream() {
+ if (!(this.streamOrStatic instanceof ReadableStream)) {
+ const { body, consumed } = this.streamOrStatic;
+ this.streamOrStatic = new ReadableStream({
+ start(controller) {
+ controller.enqueue(body);
+ controller.close();
+ },
+ });
+ if (consumed) {
+ this.streamOrStatic.cancel();
+ }
+ }
+ return this.streamOrStatic;
+ }
+
+ /**
+ * https://fetch.spec.whatwg.org/#body-unusable
+ * @returns {boolean}
+ */
+ unusable() {
+ if (this.streamOrStatic instanceof ReadableStream) {
+ return this.streamOrStatic.locked ||
+ isReadableStreamDisturbed(this.streamOrStatic);
+ }
+ return this.streamOrStatic.consumed;
+ }
+
+ /**
+ * @returns {boolean}
+ */
+ consumed() {
+ if (this.streamOrStatic instanceof ReadableStream) {
+ return isReadableStreamDisturbed(this.streamOrStatic);
+ }
+ return this.streamOrStatic.consumed;
+ }
+
+ /**
+ * https://fetch.spec.whatwg.org/#concept-body-consume-body
+ * @returns {Promise<Uint8Array>}
+ */
+ async consume() {
+ if (this.unusable()) throw new TypeError("Body already consumed.");
+ if (this.streamOrStatic instanceof ReadableStream) {
+ const reader = this.stream.getReader();
+ /** @type {Uint8Array[]} */
+ const chunks = [];
+ let totalLength = 0;
+ while (true) {
+ const { value: chunk, done } = await reader.read();
+ if (done) break;
+ chunks.push(chunk);
+ totalLength += chunk.byteLength;
+ }
+ const finalBuffer = new Uint8Array(totalLength);
+ let i = 0;
+ for (const chunk of chunks) {
+ finalBuffer.set(chunk, i);
+ i += chunk.byteLength;
+ }
+ return finalBuffer;
+ } else {
+ this.streamOrStatic.consumed = true;
+ return this.streamOrStatic.body;
+ }
+ }
+
+ /**
+ * @returns {InnerBody}
+ */
+ clone() {
+ const [out1, out2] = this.stream.tee();
+ this.streamOrStatic = out1;
+ const second = new InnerBody(out2);
+ second.source = core.deserialize(core.serialize(this.source));
+ second.length = this.length;
+ return second;
+ }
+ }
+
+ /**
+ * @param {any} prototype
+ * @param {symbol} bodySymbol
+ * @param {symbol} mimeTypeSymbol
+ * @returns {void}
+ */
+ function mixinBody(prototype, bodySymbol, mimeTypeSymbol) {
+ function consumeBody(object) {
+ if (object[bodySymbol] !== null) {
+ return object[bodySymbol].consume();
+ }
+ return Promise.resolve(new Uint8Array());
+ }
+
+ /** @type {PropertyDescriptorMap} */
+ const mixin = {
+ body: {
+ /**
+ * @returns {ReadableStream<Uint8Array> | null}
+ */
+ get() {
+ webidl.assertBranded(this, prototype);
+ if (this[bodySymbol] === null) {
+ return null;
+ } else {
+ return this[bodySymbol].stream;
+ }
+ },
+ },
+ bodyUsed: {
+ /**
+ * @returns {boolean}
+ */
+ get() {
+ webidl.assertBranded(this, prototype);
+ if (this[bodySymbol] !== null) {
+ return this[bodySymbol].consumed();
+ }
+ return false;
+ },
+ },
+ arrayBuffer: {
+ /** @returns {Promise<ArrayBuffer>} */
+ value: async function arrayBuffer() {
+ webidl.assertBranded(this, prototype);
+ const body = await consumeBody(this);
+ return packageData(body, "ArrayBuffer");
+ },
+ },
+ blob: {
+ /** @returns {Promise<Blob>} */
+ value: async function blob() {
+ webidl.assertBranded(this, prototype);
+ const body = await consumeBody(this);
+ return packageData(body, "Blob", this[mimeTypeSymbol]);
+ },
+ },
+ formData: {
+ /** @returns {Promise<FormData>} */
+ value: async function formData() {
+ webidl.assertBranded(this, prototype);
+ const body = await consumeBody(this);
+ return packageData(body, "FormData", this[mimeTypeSymbol]);
+ },
+ },
+ json: {
+ /** @returns {Promise<any>} */
+ value: async function json() {
+ webidl.assertBranded(this, prototype);
+ const body = await consumeBody(this);
+ return packageData(body, "JSON");
+ },
+ },
+ text: {
+ /** @returns {Promise<string>} */
+ value: async function text() {
+ webidl.assertBranded(this, prototype);
+ const body = await consumeBody(this);
+ return packageData(body, "text");
+ },
+ },
+ };
+ return Object.defineProperties(prototype.prototype, mixin);
+ }
+
+ const decoder = new TextDecoder();
+
+ /**
+ * https://fetch.spec.whatwg.org/#concept-body-package-data
+ * @param {Uint8Array} bytes
+ * @param {"ArrayBuffer" | "Blob" | "FormData" | "JSON" | "text"} type
+ * @param {MimeType | null} [mimeType]
+ */
+ function packageData(bytes, type, mimeType) {
+ switch (type) {
+ case "ArrayBuffer":
+ return bytes.buffer;
+ case "Blob":
+ return new Blob([bytes], {
+ type: mimeType !== null ? mimesniff.serializeMimeType(mimeType) : "",
+ });
+ case "FormData": {
+ if (mimeType !== null) {
+ if (mimeType !== null) {
+ const essence = mimesniff.essence(mimeType);
+ if (essence === "multipart/form-data") {
+ const boundary = mimeType.parameters.get("boundary");
+ if (boundary === null) {
+ throw new TypeError(
+ "Missing boundary parameter in mime type of multipart formdata.",
+ );
+ }
+ return parseFormData(bytes, boundary);
+ } else if (essence === "application/x-www-form-urlencoded") {
+ const entries = parseUrlEncoded(bytes);
+ return formDataFromEntries(
+ entries.map((x) => ({ name: x[0], value: x[1] })),
+ );
+ }
+ }
+ throw new TypeError("Invalid form data");
+ }
+ throw new TypeError("Missing content type");
+ }
+ case "JSON":
+ return JSON.parse(decoder.decode(bytes));
+ case "text":
+ return decoder.decode(bytes);
+ }
+ }
+
+ const encoder = new TextEncoder();
+
+ /**
+ * @param {BodyInit} object
+ * @returns {{body: InnerBody, contentType: string | null}}
+ */
+ function extractBody(object) {
+ /** @type {ReadableStream<Uint8Array> | { body: Uint8Array, consumed: boolean }} */
+ let stream;
+ let source = null;
+ let length = null;
+ let contentType = null;
+ if (object instanceof Blob) {
+ stream = object.stream();
+ source = object;
+ length = object.size;
+ if (object.type.length !== 0) {
+ contentType = object.type;
+ }
+ } else if (ArrayBuffer.isView(object) || object instanceof ArrayBuffer) {
+ const u8 = ArrayBuffer.isView(object)
+ ? new Uint8Array(
+ object.buffer,
+ object.byteOffset,
+ object.byteLength,
+ )
+ : new Uint8Array(object);
+ const copy = u8.slice(0, u8.byteLength);
+ source = copy;
+ } else if (object instanceof FormData) {
+ const res = encodeFormData(object);
+ stream = { body: res.body, consumed: false };
+ source = object;
+ length = res.body.byteLength;
+ contentType = res.contentType;
+ } else if (object instanceof URLSearchParams) {
+ source = encoder.encode(object.toString());
+ contentType = "application/x-www-form-urlencoded;charset=UTF-8";
+ } else if (typeof object === "string") {
+ source = encoder.encode(object);
+ contentType = "text/plain;charset=UTF-8";
+ } else if (object instanceof ReadableStream) {
+ stream = object;
+ if (object.locked || isReadableStreamDisturbed(object)) {
+ throw new TypeError("ReadableStream is locked or disturbed");
+ }
+ }
+ if (source instanceof Uint8Array) {
+ stream = { body: source, consumed: false };
+ length = source.byteLength;
+ }
+ const body = new InnerBody(stream);
+ body.source = source;
+ body.length = length;
+ return { body, contentType };
+ }
+
+ webidl.converters["BodyInit"] = (V, opts) => {
+ // Union for (ReadableStream or Blob or ArrayBufferView or ArrayBuffer or FormData or URLSearchParams or USVString)
+ if (V instanceof ReadableStream) {
+ // TODO(lucacasonato): ReadableStream is not branded
+ return V;
+ } else if (V instanceof Blob) {
+ return webidl.converters["Blob"](V, opts);
+ } else if (V instanceof FormData) {
+ return webidl.converters["FormData"](V, opts);
+ } else if (V instanceof URLSearchParams) {
+ // TODO(lucacasonato): URLSearchParams is not branded
+ return V;
+ }
+ if (typeof V === "object") {
+ if (V instanceof ArrayBuffer || V instanceof SharedArrayBuffer) {
+ return webidl.converters["ArrayBuffer"](V, opts);
+ }
+ if (ArrayBuffer.isView(V)) {
+ return webidl.converters["ArrayBufferView"](V, opts);
+ }
+ }
+ return webidl.converters["USVString"](V, opts);
+ };
+ webidl.converters["BodyInit?"] = webidl.createNullableConverter(
+ webidl.converters["BodyInit"],
+ );
+
+ window.__bootstrap.fetchBody = { mixinBody, InnerBody, extractBody };
+})(globalThis);
diff --git a/extensions/fetch/22_http_client.js b/extensions/fetch/22_http_client.js
new file mode 100644
index 000000000..770080cc9
--- /dev/null
+++ b/extensions/fetch/22_http_client.js
@@ -0,0 +1,41 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../url/internal.d.ts" />
+/// <reference path="../file/internal.d.ts" />
+/// <reference path="../file/lib.deno_file.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./11_streams_types.d.ts" />
+/// <reference path="./lib.deno_fetch.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const core = window.Deno.core;
+
+ /**
+ * @param {Deno.CreateHttpClientOptions} options
+ * @returns {HttpClient}
+ */
+ function createHttpClient(options) {
+ return new HttpClient(core.opSync("op_create_http_client", options));
+ }
+
+ class HttpClient {
+ /**
+ * @param {number} rid
+ */
+ constructor(rid) {
+ this.rid = rid;
+ }
+ close() {
+ core.close(this.rid);
+ }
+ }
+
+ window.__bootstrap.fetch ??= {};
+ window.__bootstrap.fetch.createHttpClient = createHttpClient;
+ window.__bootstrap.fetch.HttpClient = HttpClient;
+})(globalThis);
diff --git a/extensions/fetch/23_request.js b/extensions/fetch/23_request.js
new file mode 100644
index 000000000..603a37a5f
--- /dev/null
+++ b/extensions/fetch/23_request.js
@@ -0,0 +1,524 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../file/internal.d.ts" />
+/// <reference path="../file/lib.deno_file.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./11_streams_types.d.ts" />
+/// <reference path="./lib.deno_fetch.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const webidl = window.__bootstrap.webidl;
+ const { HTTP_TOKEN_CODE_POINT_RE, byteUpperCase } = window.__bootstrap.infra;
+ const { URL } = window.__bootstrap.url;
+ const { guardFromHeaders } = window.__bootstrap.headers;
+ const { InnerBody, mixinBody, extractBody } = window.__bootstrap.fetchBody;
+ const { getLocationHref } = window.__bootstrap.location;
+ const mimesniff = window.__bootstrap.mimesniff;
+ const {
+ headersFromHeaderList,
+ headerListFromHeaders,
+ fillHeaders,
+ getDecodeSplitHeader,
+ } = window.__bootstrap.headers;
+ const { HttpClient } = window.__bootstrap.fetch;
+
+ const _request = Symbol("request");
+ const _headers = Symbol("headers");
+ const _mimeType = Symbol("mime type");
+ const _body = Symbol("body");
+
+ /**
+ * @typedef InnerRequest
+ * @property {string} method
+ * @property {() => string} url
+ * @property {() => string} currentUrl
+ * @property {[string, string][]} headerList
+ * @property {null | InnerBody} body
+ * @property {"follow" | "error" | "manual"} redirectMode
+ * @property {number} redirectCount
+ * @property {string[]} urlList
+ * @property {number | null} clientRid NOTE: non standard extension for `Deno.HttpClient`.
+ */
+
+ const defaultInnerRequest = {
+ url() {
+ return this.urlList[0];
+ },
+ currentUrl() {
+ return this.urlList[this.urlList.length - 1];
+ },
+ redirectMode: "follow",
+ redirectCount: 0,
+ clientRid: null,
+ };
+
+ /**
+ * @param {string} method
+ * @param {string} url
+ * @param {[string, string][]} headerList
+ * @param {InnerBody} body
+ * @returns
+ */
+ function newInnerRequest(method, url, headerList = [], body = null) {
+ return {
+ method: method,
+ headerList,
+ body,
+ urlList: [url],
+ ...defaultInnerRequest,
+ };
+ }
+
+ /**
+ * https://fetch.spec.whatwg.org/#concept-request-clone
+ * @param {InnerRequest} request
+ * @returns {InnerRequest}
+ */
+ function cloneInnerRequest(request) {
+ const headerList = [...request.headerList.map((x) => [x[0], x[1]])];
+ let body = null;
+ if (request.body !== null) {
+ body = request.body.clone();
+ }
+
+ return {
+ method: request.method,
+ url() {
+ return this.urlList[0];
+ },
+ currentUrl() {
+ return this.urlList[this.urlList.length - 1];
+ },
+ headerList,
+ body,
+ redirectMode: request.redirectMode,
+ redirectCount: request.redirectCount,
+ urlList: request.urlList,
+ clientRid: request.clientRid,
+ };
+ }
+
+ /**
+ * @param {string} m
+ * @returns {boolean}
+ */
+ function isKnownMethod(m) {
+ return (
+ m === "DELETE" ||
+ m === "GET" ||
+ m === "HEAD" ||
+ m === "OPTIONS" ||
+ m === "POST" ||
+ m === "PUT"
+ );
+ }
+ /**
+ * @param {string} m
+ * @returns {string}
+ */
+ function validateAndNormalizeMethod(m) {
+ // Fast path for well-known methods
+ if (isKnownMethod(m)) {
+ return m;
+ }
+
+ // Regular path
+ if (!HTTP_TOKEN_CODE_POINT_RE.test(m)) {
+ throw new TypeError("Method is not valid.");
+ }
+ const upperCase = byteUpperCase(m);
+ if (
+ upperCase === "CONNECT" || upperCase === "TRACE" || upperCase === "TRACK"
+ ) {
+ throw new TypeError("Method is forbidden.");
+ }
+ return upperCase;
+ }
+
+ class Request {
+ /** @type {InnerRequest} */
+ [_request];
+ /** @type {Headers} */
+ [_headers];
+ get [_mimeType]() {
+ let charset = null;
+ let essence = null;
+ let mimeType = null;
+ const values = getDecodeSplitHeader(
+ headerListFromHeaders(this[_headers]),
+ "Content-Type",
+ );
+ if (values === null) return null;
+ for (const value of values) {
+ const temporaryMimeType = mimesniff.parseMimeType(value);
+ if (
+ temporaryMimeType === null ||
+ mimesniff.essence(temporaryMimeType) == "*/*"
+ ) {
+ continue;
+ }
+ mimeType = temporaryMimeType;
+ if (mimesniff.essence(mimeType) !== essence) {
+ charset = null;
+ const newCharset = mimeType.parameters.get("charset");
+ if (newCharset !== undefined) {
+ charset = newCharset;
+ }
+ essence = mimesniff.essence(mimeType);
+ } else {
+ if (mimeType.parameters.has("charset") === null && charset !== null) {
+ mimeType.parameters.set("charset", charset);
+ }
+ }
+ }
+ if (mimeType === null) return null;
+ return mimeType;
+ }
+ get [_body]() {
+ return this[_request].body;
+ }
+
+ /**
+ * https://fetch.spec.whatwg.org/#dom-request
+ * @param {RequestInfo} input
+ * @param {RequestInit} init
+ */
+ constructor(input, init = {}) {
+ const prefix = "Failed to construct 'Request'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ input = webidl.converters["RequestInfo"](input, {
+ prefix,
+ context: "Argument 1",
+ });
+ init = webidl.converters["RequestInit"](init, {
+ prefix,
+ context: "Argument 2",
+ });
+
+ this[webidl.brand] = webidl.brand;
+
+ /** @type {InnerRequest} */
+ let request;
+ const baseURL = getLocationHref();
+
+ // 5.
+ if (typeof input === "string") {
+ const parsedURL = new URL(input, baseURL);
+ request = newInnerRequest("GET", parsedURL.href, [], null);
+ } else { // 6.
+ if (!(input instanceof Request)) throw new TypeError("Unreachable");
+ request = input[_request];
+ }
+
+ // 22.
+ if (init.redirect !== undefined) {
+ request.redirectMode = init.redirect;
+ }
+
+ // 25.
+ if (init.method !== undefined) {
+ let method = init.method;
+ method = validateAndNormalizeMethod(method);
+ request.method = method;
+ }
+
+ // NOTE: non standard extension. This handles Deno.HttpClient parameter
+ if (init.client !== undefined) {
+ if (init.client !== null && !(init.client instanceof HttpClient)) {
+ throw webidl.makeException(
+ TypeError,
+ "`client` must be a Deno.HttpClient",
+ { prefix, context: "Argument 2" },
+ );
+ }
+ request.clientRid = init.client?.rid ?? null;
+ }
+
+ // 27.
+ this[_request] = request;
+
+ // 29.
+ this[_headers] = headersFromHeaderList(request.headerList, "request");
+
+ // 31.
+ if (Object.keys(init).length > 0) {
+ let headers = headerListFromHeaders(this[_headers]).slice(
+ 0,
+ headerListFromHeaders(this[_headers]).length,
+ );
+ if (init.headers !== undefined) {
+ headers = init.headers;
+ }
+ headerListFromHeaders(this[_headers]).splice(
+ 0,
+ headerListFromHeaders(this[_headers]).length,
+ );
+ fillHeaders(this[_headers], headers);
+ }
+
+ // 32.
+ let inputBody = null;
+ if (input instanceof Request) {
+ inputBody = input[_body];
+ }
+
+ // 33.
+ if (
+ (request.method === "GET" || request.method === "HEAD") &&
+ ((init.body !== undefined && init.body !== null) ||
+ inputBody !== null)
+ ) {
+ throw new TypeError("HEAD and GET requests may not have a body.");
+ }
+
+ // 34.
+ let initBody = null;
+
+ // 35.
+ if (init.body !== undefined && init.body !== null) {
+ const res = extractBody(init.body);
+ initBody = res.body;
+ if (res.contentType !== null && !this[_headers].has("content-type")) {
+ this[_headers].append("Content-Type", res.contentType);
+ }
+ }
+
+ // 36.
+ const inputOrInitBody = initBody ?? inputBody;
+
+ // 38.
+ const finalBody = inputOrInitBody;
+
+ // 39.
+ // TODO(lucacasonato): implement this step. Is it needed?
+
+ // 40.
+ request.body = finalBody;
+ }
+
+ get method() {
+ webidl.assertBranded(this, Request);
+ return this[_request].method;
+ }
+
+ get url() {
+ webidl.assertBranded(this, Request);
+ return this[_request].url();
+ }
+
+ get headers() {
+ webidl.assertBranded(this, Request);
+ return this[_headers];
+ }
+
+ get destination() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get referrer() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get referrerPolicy() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get mode() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get credentials() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get cache() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get redirect() {
+ webidl.assertBranded(this, Request);
+ return this[_request].redirectMode;
+ }
+
+ get integrity() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get keepalive() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get isReloadNavigation() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get isHistoryNavigation() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ get signal() {
+ webidl.assertBranded(this, Request);
+ throw new TypeError("This property is not implemented.");
+ }
+
+ clone() {
+ webidl.assertBranded(this, Request);
+ if (this[_body] && this[_body].unusable()) {
+ throw new TypeError("Body is unusable.");
+ }
+ const newReq = cloneInnerRequest(this[_request]);
+ return fromInnerRequest(newReq, guardFromHeaders(this[_headers]));
+ }
+
+ get [Symbol.toStringTag]() {
+ return "Request";
+ }
+
+ [Symbol.for("Deno.customInspect")](inspect) {
+ const inner = {
+ bodyUsed: this.bodyUsed,
+ headers: this.headers,
+ method: this.method,
+ redirect: this.redirect,
+ url: this.url,
+ };
+ return `Request ${inspect(inner)}`;
+ }
+ }
+
+ mixinBody(Request, _body, _mimeType);
+
+ webidl.converters["Request"] = webidl.createInterfaceConverter(
+ "Request",
+ Request,
+ );
+ webidl.converters["RequestInfo"] = (V, opts) => {
+ // Union for (Request or USVString)
+ if (typeof V == "object") {
+ if (V instanceof Request) {
+ return webidl.converters["Request"](V, opts);
+ }
+ }
+ return webidl.converters["USVString"](V, opts);
+ };
+
+ webidl.converters["ReferrerPolicy"] = webidl.createEnumConverter(
+ "ReferrerPolicy",
+ [
+ "",
+ "no-referrer",
+ "no-referrer-when-downgrade",
+ "same-origin",
+ "origin",
+ "strict-origin",
+ "origin-when-cross-origin",
+ "strict-origin-when-cross-origin",
+ "unsafe-url",
+ ],
+ );
+ webidl.converters["RequestMode"] = webidl.createEnumConverter("RequestMode", [
+ "navigate",
+ "same-origin",
+ "no-cors",
+ "cors",
+ ]);
+ webidl.converters["RequestCredentials"] = webidl.createEnumConverter(
+ "RequestCredentials",
+ [
+ "omit",
+ "same-origin",
+ "include",
+ ],
+ );
+ webidl.converters["RequestCache"] = webidl.createEnumConverter(
+ "RequestCache",
+ [
+ "default",
+ "no-store",
+ "reload",
+ "no-cache",
+ "force-cache",
+ "only-if-cached",
+ ],
+ );
+ webidl.converters["RequestRedirect"] = webidl.createEnumConverter(
+ "RequestRedirect",
+ [
+ "follow",
+ "error",
+ "manual",
+ ],
+ );
+ webidl.converters["RequestInit"] = webidl.createDictionaryConverter(
+ "RequestInit",
+ [
+ { key: "method", converter: webidl.converters["ByteString"] },
+ { key: "headers", converter: webidl.converters["HeadersInit"] },
+ {
+ key: "body",
+ converter: webidl.createNullableConverter(
+ webidl.converters["BodyInit"],
+ ),
+ },
+ { key: "referrer", converter: webidl.converters["USVString"] },
+ { key: "referrerPolicy", converter: webidl.converters["ReferrerPolicy"] },
+ { key: "mode", converter: webidl.converters["RequestMode"] },
+ {
+ key: "credentials",
+ converter: webidl.converters["RequestCredentials"],
+ },
+ { key: "cache", converter: webidl.converters["RequestCache"] },
+ { key: "redirect", converter: webidl.converters["RequestRedirect"] },
+ { key: "integrity", converter: webidl.converters["DOMString"] },
+ { key: "keepalive", converter: webidl.converters["boolean"] },
+ {
+ key: "signal",
+ converter: webidl.createNullableConverter(
+ webidl.converters["AbortSignal"],
+ ),
+ },
+ { key: "client", converter: webidl.converters.any },
+ ],
+ );
+
+ /**
+ * @param {Request} request
+ * @returns {InnerRequest}
+ */
+ function toInnerRequest(request) {
+ return request[_request];
+ }
+
+ /**
+ * @param {InnerRequest} inner
+ * @param {"request" | "immutable" | "request-no-cors" | "response" | "none"} guard
+ * @returns {Request}
+ */
+ function fromInnerRequest(inner, guard) {
+ const request = webidl.createBranded(Request);
+ request[_request] = inner;
+ request[_headers] = headersFromHeaderList(inner.headerList, guard);
+ return request;
+ }
+
+ window.__bootstrap.fetch ??= {};
+ window.__bootstrap.fetch.Request = Request;
+ window.__bootstrap.fetch.toInnerRequest = toInnerRequest;
+ window.__bootstrap.fetch.fromInnerRequest = fromInnerRequest;
+ window.__bootstrap.fetch.newInnerRequest = newInnerRequest;
+})(globalThis);
diff --git a/extensions/fetch/23_response.js b/extensions/fetch/23_response.js
new file mode 100644
index 000000000..6bd7a6487
--- /dev/null
+++ b/extensions/fetch/23_response.js
@@ -0,0 +1,415 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../url/internal.d.ts" />
+/// <reference path="../file/internal.d.ts" />
+/// <reference path="../file/lib.deno_file.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./11_streams_types.d.ts" />
+/// <reference path="./lib.deno_fetch.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const webidl = window.__bootstrap.webidl;
+ const { HTTP_TAB_OR_SPACE, regexMatcher } = window.__bootstrap.infra;
+ const { InnerBody, extractBody, mixinBody } = window.__bootstrap.fetchBody;
+ const { getLocationHref } = window.__bootstrap.location;
+ const mimesniff = window.__bootstrap.mimesniff;
+ const { URL } = window.__bootstrap.url;
+ const {
+ getDecodeSplitHeader,
+ headerListFromHeaders,
+ headersFromHeaderList,
+ guardFromHeaders,
+ fillHeaders,
+ } = window.__bootstrap.headers;
+
+ const VCHAR = ["\x21-\x7E"];
+ const OBS_TEXT = ["\x80-\xFF"];
+
+ const REASON_PHRASE = [...HTTP_TAB_OR_SPACE, ...VCHAR, ...OBS_TEXT];
+ const REASON_PHRASE_MATCHER = regexMatcher(REASON_PHRASE);
+ const REASON_PHRASE_RE = new RegExp(`^[${REASON_PHRASE_MATCHER}]*$`);
+
+ const _response = Symbol("response");
+ const _headers = Symbol("headers");
+ const _mimeType = Symbol("mime type");
+ const _body = Symbol("body");
+
+ /**
+ * @typedef InnerResponse
+ * @property {"basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect"} type
+ * @property {() => string | null} url
+ * @property {string[]} urlList
+ * @property {number} status
+ * @property {string} statusMessage
+ * @property {[string, string][]} headerList
+ * @property {null | InnerBody} body
+ * @property {string} [error]
+ */
+
+ /**
+ * @param {number} status
+ * @returns {boolean}
+ */
+ function nullBodyStatus(status) {
+ return status === 101 || status === 204 || status === 205 || status === 304;
+ }
+
+ /**
+ * @param {number} status
+ * @returns {boolean}
+ */
+ function redirectStatus(status) {
+ return status === 301 || status === 302 || status === 303 ||
+ status === 307 || status === 308;
+ }
+
+ /**
+ * https://fetch.spec.whatwg.org/#concept-response-clone
+ * @param {InnerResponse} response
+ * @returns {InnerResponse}
+ */
+ function cloneInnerResponse(response) {
+ const urlList = [...response.urlList];
+ const headerList = [...response.headerList.map((x) => [x[0], x[1]])];
+ let body = null;
+ if (response.body !== null) {
+ body = response.body.clone();
+ }
+
+ return {
+ type: response.type,
+ body,
+ headerList,
+ url() {
+ if (this.urlList.length == 0) return null;
+ return this.urlList[this.urlList.length - 1];
+ },
+ urlList,
+ status: response.status,
+ statusMessage: response.statusMessage,
+ };
+ }
+
+ const defaultInnerResponse = {
+ type: "default",
+ body: null,
+ url() {
+ if (this.urlList.length == 0) return null;
+ return this.urlList[this.urlList.length - 1];
+ },
+ };
+
+ /**
+ * @returns {InnerResponse}
+ */
+ function newInnerResponse(status = 200, statusMessage = "") {
+ return {
+ headerList: [],
+ urlList: [],
+ status,
+ statusMessage,
+ ...defaultInnerResponse,
+ };
+ }
+
+ /**
+ * @param {string} error
+ * @returns {InnerResponse}
+ */
+ function networkError(error) {
+ const resp = newInnerResponse(0);
+ resp.type = "error";
+ resp.error = error;
+ return resp;
+ }
+
+ class Response {
+ /** @type {InnerResponse} */
+ [_response];
+ /** @type {Headers} */
+ [_headers];
+ get [_mimeType]() {
+ let charset = null;
+ let essence = null;
+ let mimeType = null;
+ const values = getDecodeSplitHeader(
+ headerListFromHeaders(this[_headers]),
+ "Content-Type",
+ );
+ if (values === null) return null;
+ for (const value of values) {
+ const temporaryMimeType = mimesniff.parseMimeType(value);
+ if (
+ temporaryMimeType === null ||
+ mimesniff.essence(temporaryMimeType) == "*/*"
+ ) {
+ continue;
+ }
+ mimeType = temporaryMimeType;
+ if (mimesniff.essence(mimeType) !== essence) {
+ charset = null;
+ const newCharset = mimeType.parameters.get("charset");
+ if (newCharset !== undefined) {
+ charset = newCharset;
+ }
+ essence = mimesniff.essence(mimeType);
+ } else {
+ if (mimeType.parameters.has("charset") === null && charset !== null) {
+ mimeType.parameters.set("charset", charset);
+ }
+ }
+ }
+ if (mimeType === null) return null;
+ return mimeType;
+ }
+ get [_body]() {
+ return this[_response].body;
+ }
+
+ /**
+ * @returns {Response}
+ */
+ static error() {
+ const inner = newInnerResponse(0);
+ inner.type = "error";
+ const response = webidl.createBranded(Response);
+ response[_response] = inner;
+ response[_headers] = headersFromHeaderList(
+ response[_response].headerList,
+ "immutable",
+ );
+ return response;
+ }
+
+ /**
+ * @param {string} url
+ * @param {number} status
+ * @returns {Response}
+ */
+ static redirect(url, status = 302) {
+ const prefix = "Failed to call 'Response.redirect'";
+ url = webidl.converters["USVString"](url, {
+ prefix,
+ context: "Argument 1",
+ });
+ status = webidl.converters["unsigned short"](status, {
+ prefix,
+ context: "Argument 2",
+ });
+
+ const baseURL = getLocationHref();
+ const parsedURL = new URL(url, baseURL);
+ if (!redirectStatus(status)) {
+ throw new RangeError("Invalid redirect status code.");
+ }
+ const inner = newInnerResponse(status);
+ inner.type = "default";
+ inner.headerList.push(["Location", parsedURL.href]);
+ const response = webidl.createBranded(Response);
+ response[_response] = inner;
+ response[_headers] = headersFromHeaderList(
+ response[_response].headerList,
+ "immutable",
+ );
+ return response;
+ }
+
+ /**
+ * @param {BodyInit | null} body
+ * @param {ResponseInit} init
+ */
+ constructor(body = null, init = {}) {
+ const prefix = "Failed to construct 'Response'";
+ body = webidl.converters["BodyInit?"](body, {
+ prefix,
+ context: "Argument 1",
+ });
+ init = webidl.converters["ResponseInit"](init, {
+ prefix,
+ context: "Argument 2",
+ });
+
+ if (init.status < 200 || init.status > 599) {
+ throw new RangeError(
+ `The status provided (${init.status}) is outside the range [200, 599].`,
+ );
+ }
+
+ if (!REASON_PHRASE_RE.test(init.statusText)) {
+ throw new TypeError("Status text is not valid.");
+ }
+
+ this[webidl.brand] = webidl.brand;
+ const response = newInnerResponse(init.status, init.statusText);
+ this[_response] = response;
+ this[_headers] = headersFromHeaderList(response.headerList, "response");
+ if (init.headers !== undefined) {
+ fillHeaders(this[_headers], init.headers);
+ }
+ if (body !== null) {
+ if (nullBodyStatus(response.status)) {
+ throw new TypeError(
+ "Response with null body status cannot have body",
+ );
+ }
+ const res = extractBody(body);
+ response.body = res.body;
+ if (res.contentType !== null && !this[_headers].has("content-type")) {
+ this[_headers].append("Content-Type", res.contentType);
+ }
+ }
+ }
+
+ /**
+ * @returns {"basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect"}
+ */
+ get type() {
+ webidl.assertBranded(this, Response);
+ return this[_response].type;
+ }
+
+ /**
+ * @returns {string}
+ */
+ get url() {
+ webidl.assertBranded(this, Response);
+ const url = this[_response].url();
+ if (url === null) return "";
+ const newUrl = new URL(url);
+ newUrl.hash = "";
+ return newUrl.href;
+ }
+
+ /**
+ * @returns {boolean}
+ */
+ get redirected() {
+ webidl.assertBranded(this, Response);
+ return this[_response].urlList.length > 1;
+ }
+
+ /**
+ * @returns {number}
+ */
+ get status() {
+ webidl.assertBranded(this, Response);
+ return this[_response].status;
+ }
+
+ /**
+ * @returns {boolean}
+ */
+ get ok() {
+ webidl.assertBranded(this, Response);
+ const status = this[_response].status;
+ return status >= 200 && status <= 299;
+ }
+
+ /**
+ * @returns {string}
+ */
+ get statusText() {
+ webidl.assertBranded(this, Response);
+ return this[_response].statusMessage;
+ }
+
+ /**
+ * @returns {Headers}
+ */
+ get headers() {
+ webidl.assertBranded(this, Response);
+ return this[_headers];
+ }
+
+ /**
+ * @returns {Response}
+ */
+ clone() {
+ webidl.assertBranded(this, Response);
+ if (this[_body] && this[_body].unusable()) {
+ throw new TypeError("Body is unusable.");
+ }
+ const second = webidl.createBranded(Response);
+ const newRes = cloneInnerResponse(this[_response]);
+ second[_response] = newRes;
+ second[_headers] = headersFromHeaderList(
+ newRes.headerList,
+ guardFromHeaders(this[_headers]),
+ );
+ return second;
+ }
+
+ get [Symbol.toStringTag]() {
+ return "Response";
+ }
+
+ [Symbol.for("Deno.customInspect")](inspect) {
+ const inner = {
+ body: this.body,
+ bodyUsed: this.bodyUsed,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected,
+ status: this.status,
+ statusText: this.statusText,
+ url: this.url,
+ };
+ return `Response ${inspect(inner)}`;
+ }
+ }
+
+ mixinBody(Response, _body, _mimeType);
+
+ webidl.converters["Response"] = webidl.createInterfaceConverter(
+ "Response",
+ Response,
+ );
+ webidl.converters["ResponseInit"] = webidl.createDictionaryConverter(
+ "ResponseInit",
+ [{
+ key: "status",
+ defaultValue: 200,
+ converter: webidl.converters["unsigned short"],
+ }, {
+ key: "statusText",
+ defaultValue: "",
+ converter: webidl.converters["ByteString"],
+ }, {
+ key: "headers",
+ converter: webidl.converters["HeadersInit"],
+ }],
+ );
+
+ /**
+ * @param {Response} response
+ * @returns {InnerResponse}
+ */
+ function toInnerResponse(response) {
+ return response[_response];
+ }
+
+ /**
+ * @param {InnerResponse} inner
+ * @param {"request" | "immutable" | "request-no-cors" | "response" | "none"} guard
+ * @returns {Response}
+ */
+ function fromInnerResponse(inner, guard) {
+ const response = webidl.createBranded(Response);
+ response[_response] = inner;
+ response[_headers] = headersFromHeaderList(inner.headerList, guard);
+ return response;
+ }
+
+ window.__bootstrap.fetch ??= {};
+ window.__bootstrap.fetch.Response = Response;
+ window.__bootstrap.fetch.toInnerResponse = toInnerResponse;
+ window.__bootstrap.fetch.fromInnerResponse = fromInnerResponse;
+ window.__bootstrap.fetch.redirectStatus = redirectStatus;
+ window.__bootstrap.fetch.nullBodyStatus = nullBodyStatus;
+ window.__bootstrap.fetch.networkError = networkError;
+})(globalThis);
diff --git a/extensions/fetch/26_fetch.js b/extensions/fetch/26_fetch.js
new file mode 100644
index 000000000..cd86d6023
--- /dev/null
+++ b/extensions/fetch/26_fetch.js
@@ -0,0 +1,307 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference path="../../core/lib.deno_core.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../url/internal.d.ts" />
+/// <reference path="../web/lib.deno_web.d.ts" />
+/// <reference path="./11_streams_types.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./lib.deno_fetch.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const core = window.Deno.core;
+ const webidl = window.__bootstrap.webidl;
+ const { byteLowerCase } = window.__bootstrap.infra;
+ const { InnerBody, extractBody } = window.__bootstrap.fetchBody;
+ const {
+ toInnerRequest,
+ fromInnerResponse,
+ redirectStatus,
+ nullBodyStatus,
+ networkError,
+ } = window.__bootstrap.fetch;
+
+ const REQUEST_BODY_HEADER_NAMES = [
+ "content-encoding",
+ "content-language",
+ "content-location",
+ "content-type",
+ ];
+
+ /**
+ * @param {{ method: string, url: string, headers: [string, string][], clientRid: number | null, hasBody: boolean }} args
+ * @param {Uint8Array | null} body
+ * @returns {{ requestRid: number, requestBodyRid: number | null }}
+ */
+ function opFetch(args, body) {
+ return core.opSync("op_fetch", args, body);
+ }
+
+ /**
+ * @param {number} rid
+ * @returns {Promise<{ status: number, statusText: string, headers: [string, string][], url: string, responseRid: number }>}
+ */
+ function opFetchSend(rid) {
+ return core.opAsync("op_fetch_send", rid);
+ }
+
+ /**
+ * @param {number} rid
+ * @param {Uint8Array} body
+ * @returns {Promise<void>}
+ */
+ function opFetchRequestWrite(rid, body) {
+ return core.opAsync("op_fetch_request_write", rid, body);
+ }
+
+ /**
+ * @param {number} rid
+ * @param {Uint8Array} body
+ * @returns {Promise<number>}
+ */
+ function opFetchResponseRead(rid, body) {
+ return core.opAsync("op_fetch_response_read", rid, body);
+ }
+
+ /**
+ * @param {number} responseBodyRid
+ * @returns {ReadableStream<Uint8Array>}
+ */
+ function createResponseBodyStream(responseBodyRid) {
+ return new ReadableStream({
+ type: "bytes",
+ async pull(controller) {
+ try {
+ // This is the largest possible size for a single packet on a TLS
+ // stream.
+ const chunk = new Uint8Array(16 * 1024 + 256);
+ const read = await opFetchResponseRead(
+ responseBodyRid,
+ chunk,
+ );
+ if (read > 0) {
+ // We read some data. Enqueue it onto the stream.
+ controller.enqueue(chunk.subarray(0, read));
+ } else {
+ // We have reached the end of the body, so we close the stream.
+ controller.close();
+ core.close(responseBodyRid);
+ }
+ } catch (err) {
+ // There was an error while reading a chunk of the body, so we
+ // error.
+ controller.error(err);
+ controller.close();
+ core.close(responseBodyRid);
+ }
+ },
+ cancel() {
+ core.close(responseBodyRid);
+ },
+ });
+ }
+
+ /**
+ * @param {InnerRequest} req
+ * @param {boolean} recursive
+ * @returns {Promise<InnerResponse>}
+ */
+ async function mainFetch(req, recursive) {
+ /** @type {ReadableStream<Uint8Array> | Uint8Array | null} */
+ let reqBody = null;
+ if (req.body !== null) {
+ if (req.body.streamOrStatic instanceof ReadableStream) {
+ if (req.body.length === null) {
+ reqBody = req.body.stream;
+ } else {
+ const reader = req.body.stream.getReader();
+ const r1 = await reader.read();
+ if (r1.done) throw new TypeError("Unreachable");
+ reqBody = r1.value;
+ const r2 = await reader.read();
+ if (!r2.done) throw new TypeError("Unreachable");
+ }
+ } else {
+ req.body.streamOrStatic.consumed = true;
+ reqBody = req.body.streamOrStatic.body;
+ }
+ }
+
+ const { requestRid, requestBodyRid } = opFetch({
+ method: req.method,
+ url: req.currentUrl(),
+ headers: req.headerList,
+ clientRid: req.clientRid,
+ hasBody: reqBody !== null,
+ }, reqBody instanceof Uint8Array ? reqBody : null);
+
+ if (requestBodyRid !== null) {
+ if (reqBody === null || !(reqBody instanceof ReadableStream)) {
+ throw new TypeError("Unreachable");
+ }
+ const reader = reqBody.getReader();
+ (async () => {
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done) break;
+ if (!(value instanceof Uint8Array)) {
+ await reader.cancel("value not a Uint8Array");
+ break;
+ }
+ try {
+ await opFetchRequestWrite(requestBodyRid, value);
+ } catch (err) {
+ await reader.cancel(err);
+ break;
+ }
+ }
+ core.close(requestBodyRid);
+ })();
+ }
+
+ const resp = await opFetchSend(requestRid);
+ /** @type {InnerResponse} */
+ const response = {
+ headerList: resp.headers,
+ status: resp.status,
+ body: null,
+ statusMessage: resp.statusText,
+ type: "basic",
+ url() {
+ if (this.urlList.length == 0) return null;
+ return this.urlList[this.urlList.length - 1];
+ },
+ urlList: req.urlList,
+ };
+ if (redirectStatus(resp.status)) {
+ switch (req.redirectMode) {
+ case "error":
+ core.close(resp.responseRid);
+ return networkError(
+ "Encountered redirect while redirect mode is set to 'error'",
+ );
+ case "follow":
+ core.close(resp.responseRid);
+ return httpRedirectFetch(req, response);
+ case "manual":
+ break;
+ }
+ }
+
+ if (nullBodyStatus(response.status)) {
+ core.close(resp.responseRid);
+ } else {
+ response.body = new InnerBody(createResponseBodyStream(resp.responseRid));
+ }
+
+ if (recursive) return response;
+
+ if (response.urlList.length === 0) {
+ response.urlList = [...req.urlList];
+ }
+
+ return response;
+ }
+
+ /**
+ * @param {InnerRequest} request
+ * @param {InnerResponse} response
+ * @returns {Promise<InnerResponse>}
+ */
+ function httpRedirectFetch(request, response) {
+ const locationHeaders = response.headerList.filter((entry) =>
+ byteLowerCase(entry[0]) === "location"
+ );
+ if (locationHeaders.length === 0) {
+ return response;
+ }
+ const locationURL = new URL(
+ locationHeaders[0][1],
+ response.url() ?? undefined,
+ );
+ if (locationURL.hash === "") {
+ locationURL.hash = request.currentUrl().hash;
+ }
+ if (locationURL.protocol !== "https:" && locationURL.protocol !== "http:") {
+ return networkError("Can not redirect to a non HTTP(s) url");
+ }
+ if (request.redirectCount === 20) {
+ return networkError("Maximum number of redirects (20) reached");
+ }
+ request.redirectCount++;
+ if (
+ response.status !== 303 && request.body !== null &&
+ request.body.source === null
+ ) {
+ return networkError(
+ "Can not redeliver a streaming request body after a redirect",
+ );
+ }
+ if (
+ ((response.status === 301 || response.status === 302) &&
+ request.method === "POST") ||
+ (response.status === 303 &&
+ (request.method !== "GET" && request.method !== "HEAD"))
+ ) {
+ request.method = "GET";
+ request.body = null;
+ for (let i = 0; i < request.headerList.length; i++) {
+ if (
+ REQUEST_BODY_HEADER_NAMES.includes(
+ byteLowerCase(request.headerList[i][0]),
+ )
+ ) {
+ request.headerList.splice(i, 1);
+ i--;
+ }
+ }
+ }
+ if (request.body !== null) {
+ const res = extractBody(request.body.source);
+ request.body = res.body;
+ }
+ request.urlList.push(locationURL.href);
+ return mainFetch(request, true);
+ }
+
+ /**
+ * @param {RequestInfo} input
+ * @param {RequestInit} init
+ */
+ async function fetch(input, init = {}) {
+ const prefix = "Failed to call 'fetch'";
+ input = webidl.converters["RequestInfo"](input, {
+ prefix,
+ context: "Argument 1",
+ });
+ init = webidl.converters["RequestInit"](init, {
+ prefix,
+ context: "Argument 2",
+ });
+
+ // 1.
+ const requestObject = new Request(input, init);
+ // 2.
+ const request = toInnerRequest(requestObject);
+ // 10.
+ if (!requestObject.headers.has("Accept")) {
+ request.headerList.push(["Accept", "*/*"]);
+ }
+
+ // 12.
+ const response = await mainFetch(request, false);
+ if (response.type === "error") {
+ throw new TypeError(
+ "Fetch failed: " + (response.error ?? "unknown error"),
+ );
+ }
+
+ return fromInnerResponse(response, "immutable");
+ }
+
+ window.__bootstrap.fetch ??= {};
+ window.__bootstrap.fetch.fetch = fetch;
+})(this);
diff --git a/extensions/fetch/Cargo.toml b/extensions/fetch/Cargo.toml
new file mode 100644
index 000000000..d2ed10714
--- /dev/null
+++ b/extensions/fetch/Cargo.toml
@@ -0,0 +1,26 @@
+# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+[package]
+name = "deno_fetch"
+version = "0.27.0"
+edition = "2018"
+description = "Fetch API implementation for Deno"
+authors = ["the Deno authors"]
+license = "MIT"
+readme = "README.md"
+repository = "https://github.com/denoland/deno"
+
+[lib]
+path = "lib.rs"
+
+[dependencies]
+bytes = "1.0.1"
+data-url = "0.1.0"
+deno_core = { version = "0.86.0", path = "../../core" }
+deno_file = { version = "0.4.0", path = "../file" }
+http = "0.2.3"
+reqwest = { version = "0.11.2", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] }
+serde = { version = "1.0.125", features = ["derive"] }
+tokio = { version = "1.4.0", features = ["full"] }
+tokio-stream = "0.1.5"
+tokio-util = "0.6.5"
diff --git a/extensions/fetch/README.md b/extensions/fetch/README.md
new file mode 100644
index 000000000..2c946197e
--- /dev/null
+++ b/extensions/fetch/README.md
@@ -0,0 +1,5 @@
+# deno_fetch
+
+This crate implements the Fetch API.
+
+Spec: https://fetch.spec.whatwg.org/
diff --git a/extensions/fetch/internal.d.ts b/extensions/fetch/internal.d.ts
new file mode 100644
index 000000000..86de52761
--- /dev/null
+++ b/extensions/fetch/internal.d.ts
@@ -0,0 +1,113 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// deno-lint-ignore-file no-explicit-any
+
+/// <reference no-default-lib="true" />
+/// <reference lib="esnext" />
+
+declare namespace globalThis {
+ declare namespace __bootstrap {
+ declare var fetchUtil: {
+ requiredArguments(name: string, length: number, required: number): void;
+ };
+
+ declare var domIterable: {
+ DomIterableMixin(base: any, dataSymbol: symbol): any;
+ };
+
+ declare namespace headers {
+ class Headers {
+ }
+ type HeaderList = [string, string][];
+ function headersFromHeaderList(
+ list: HeaderList,
+ guard:
+ | "immutable"
+ | "request"
+ | "request-no-cors"
+ | "response"
+ | "none",
+ ): Headers;
+ function headerListFromHeaders(headers: Headers): HeaderList;
+ function fillHeaders(headers: Headers, object: HeadersInit): void;
+ function getDecodeSplitHeader(
+ list: HeaderList,
+ name: string,
+ ): string[] | null;
+ function guardFromHeaders(
+ headers: Headers,
+ ): "immutable" | "request" | "request-no-cors" | "response" | "none";
+ }
+
+ declare namespace formData {
+ declare type FormData = typeof FormData;
+ declare function encodeFormData(formdata: FormData): {
+ body: Uint8Array;
+ contentType: string;
+ };
+ declare function parseFormData(
+ body: Uint8Array,
+ boundary: string | undefined,
+ ): FormData;
+ declare function formDataFromEntries(entries: FormDataEntry[]): FormData;
+ }
+
+ declare var streams: {
+ ReadableStream: typeof ReadableStream;
+ isReadableStreamDisturbed(stream: ReadableStream): boolean;
+ };
+
+ declare namespace fetchBody {
+ function mixinBody(
+ prototype: any,
+ bodySymbol: symbol,
+ mimeTypeSymbol: symbol,
+ ): void;
+ class InnerBody {
+ constructor(stream?: ReadableStream<Uint8Array>);
+ stream: ReadableStream<Uint8Array>;
+ source: null | Uint8Array | Blob | FormData;
+ length: null | number;
+ unusable(): boolean;
+ consume(): Promise<Uint8Array>;
+ clone(): InnerBody;
+ }
+ function extractBody(object: BodyInit): {
+ body: InnerBody;
+ contentType: string | null;
+ };
+ }
+
+ declare namespace fetch {
+ function toInnerRequest(request: Request): InnerRequest;
+ function fromInnerRequest(
+ inner: InnerRequest,
+ guard:
+ | "request"
+ | "immutable"
+ | "request-no-cors"
+ | "response"
+ | "none",
+ ): Request;
+ function redirectStatus(status: number): boolean;
+ function nullBodyStatus(status: number): boolean;
+ function newInnerRequest(
+ method: string,
+ url: any,
+ headerList?: [string, string][],
+ body?: globalThis.__bootstrap.fetchBody.InnerBody,
+ ): InnerResponse;
+ function toInnerResponse(response: Response): InnerResponse;
+ function fromInnerResponse(
+ inner: InnerResponse,
+ guard:
+ | "request"
+ | "immutable"
+ | "request-no-cors"
+ | "response"
+ | "none",
+ ): Response;
+ function networkError(error: string): InnerResponse;
+ }
+ }
+}
diff --git a/extensions/fetch/lib.deno_fetch.d.ts b/extensions/fetch/lib.deno_fetch.d.ts
new file mode 100644
index 000000000..af21d8c44
--- /dev/null
+++ b/extensions/fetch/lib.deno_fetch.d.ts
@@ -0,0 +1,708 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// deno-lint-ignore-file no-explicit-any
+
+/// <reference no-default-lib="true" />
+/// <reference lib="esnext" />
+
+interface DomIterable<K, V> {
+ keys(): IterableIterator<K>;
+ values(): IterableIterator<V>;
+ entries(): IterableIterator<[K, V]>;
+ [Symbol.iterator](): IterableIterator<[K, V]>;
+ forEach(
+ callback: (value: V, key: K, parent: this) => void,
+ thisArg?: any,
+ ): void;
+}
+
+interface ReadableStreamReadDoneResult<T> {
+ done: true;
+ value?: T;
+}
+
+interface ReadableStreamReadValueResult<T> {
+ done: false;
+ value: T;
+}
+
+type ReadableStreamReadResult<T> =
+ | ReadableStreamReadValueResult<T>
+ | ReadableStreamReadDoneResult<T>;
+
+interface ReadableStreamDefaultReader<R = any> {
+ readonly closed: Promise<void>;
+ cancel(reason?: any): Promise<void>;
+ read(): Promise<ReadableStreamReadResult<R>>;
+ releaseLock(): void;
+}
+
+declare var ReadableStreamDefaultReader: {
+ prototype: ReadableStreamDefaultReader;
+ new <R>(stream: ReadableStream<R>): ReadableStreamDefaultReader<R>;
+};
+
+interface ReadableStreamReader<R = any> {
+ cancel(): Promise<void>;
+ read(): Promise<ReadableStreamReadResult<R>>;
+ releaseLock(): void;
+}
+
+declare var ReadableStreamReader: {
+ prototype: ReadableStreamReader;
+ new (): ReadableStreamReader;
+};
+
+interface ReadableByteStreamControllerCallback {
+ (controller: ReadableByteStreamController): void | PromiseLike<void>;
+}
+
+interface UnderlyingByteSource {
+ autoAllocateChunkSize?: number;
+ cancel?: ReadableStreamErrorCallback;
+ pull?: ReadableByteStreamControllerCallback;
+ start?: ReadableByteStreamControllerCallback;
+ type: "bytes";
+}
+
+interface UnderlyingSink<W = any> {
+ abort?: WritableStreamErrorCallback;
+ close?: WritableStreamDefaultControllerCloseCallback;
+ start?: WritableStreamDefaultControllerStartCallback;
+ type?: undefined;
+ write?: WritableStreamDefaultControllerWriteCallback<W>;
+}
+
+interface UnderlyingSource<R = any> {
+ cancel?: ReadableStreamErrorCallback;
+ pull?: ReadableStreamDefaultControllerCallback<R>;
+ start?: ReadableStreamDefaultControllerCallback<R>;
+ type?: undefined;
+}
+
+interface ReadableStreamErrorCallback {
+ (reason: any): void | PromiseLike<void>;
+}
+
+interface ReadableStreamDefaultControllerCallback<R> {
+ (controller: ReadableStreamDefaultController<R>): void | PromiseLike<void>;
+}
+
+interface ReadableStreamDefaultController<R = any> {
+ readonly desiredSize: number | null;
+ close(): void;
+ enqueue(chunk: R): void;
+ error(error?: any): void;
+}
+
+declare var ReadableStreamDefaultController: {
+ prototype: ReadableStreamDefaultController;
+ new (): ReadableStreamDefaultController;
+};
+
+interface ReadableByteStreamController {
+ readonly byobRequest: undefined;
+ readonly desiredSize: number | null;
+ close(): void;
+ enqueue(chunk: ArrayBufferView): void;
+ error(error?: any): void;
+}
+
+declare var ReadableByteStreamController: {
+ prototype: ReadableByteStreamController;
+ new (): ReadableByteStreamController;
+};
+
+interface PipeOptions {
+ preventAbort?: boolean;
+ preventCancel?: boolean;
+ preventClose?: boolean;
+ signal?: AbortSignal;
+}
+
+interface QueuingStrategySizeCallback<T = any> {
+ (chunk: T): number;
+}
+
+interface QueuingStrategy<T = any> {
+ highWaterMark?: number;
+ size?: QueuingStrategySizeCallback<T>;
+}
+
+/** This Streams API interface provides a built-in byte length queuing strategy
+ * that can be used when constructing streams. */
+declare class CountQueuingStrategy implements QueuingStrategy {
+ constructor(options: { highWaterMark: number });
+ highWaterMark: number;
+ size(chunk: any): 1;
+}
+
+declare class ByteLengthQueuingStrategy
+ implements QueuingStrategy<ArrayBufferView> {
+ constructor(options: { highWaterMark: number });
+ highWaterMark: number;
+ size(chunk: ArrayBufferView): number;
+}
+
+/** This Streams API interface represents a readable stream of byte data. The
+ * Fetch API offers a concrete instance of a ReadableStream through the body
+ * property of a Response object. */
+interface ReadableStream<R = any> {
+ readonly locked: boolean;
+ cancel(reason?: any): Promise<void>;
+ /**
+ * @deprecated This is no longer part of the Streams standard and the async
+ * iterable should be obtained by just using the stream as an
+ * async iterator.
+ */
+ getIterator(options?: { preventCancel?: boolean }): AsyncIterableIterator<R>;
+ getReader(): ReadableStreamDefaultReader<R>;
+ pipeThrough<T>(
+ { writable, readable }: {
+ writable: WritableStream<R>;
+ readable: ReadableStream<T>;
+ },
+ options?: PipeOptions,
+ ): ReadableStream<T>;
+ pipeTo(dest: WritableStream<R>, options?: PipeOptions): Promise<void>;
+ tee(): [ReadableStream<R>, ReadableStream<R>];
+ [Symbol.asyncIterator](options?: {
+ preventCancel?: boolean;
+ }): AsyncIterableIterator<R>;
+}
+
+declare var ReadableStream: {
+ prototype: ReadableStream;
+ new (
+ underlyingSource: UnderlyingByteSource,
+ strategy?: { highWaterMark?: number; size?: undefined },
+ ): ReadableStream<Uint8Array>;
+ new <R = any>(
+ underlyingSource?: UnderlyingSource<R>,
+ strategy?: QueuingStrategy<R>,
+ ): ReadableStream<R>;
+};
+
+interface WritableStreamDefaultControllerCloseCallback {
+ (): void | PromiseLike<void>;
+}
+
+interface WritableStreamDefaultControllerStartCallback {
+ (controller: WritableStreamDefaultController): void | PromiseLike<void>;
+}
+
+interface WritableStreamDefaultControllerWriteCallback<W> {
+ (chunk: W, controller: WritableStreamDefaultController):
+ | void
+ | PromiseLike<
+ void
+ >;
+}
+
+interface WritableStreamErrorCallback {
+ (reason: any): void | PromiseLike<void>;
+}
+
+/** This Streams API interface provides a standard abstraction for writing
+ * streaming data to a destination, known as a sink. This object comes with
+ * built-in backpressure and queuing. */
+interface WritableStream<W = any> {
+ readonly locked: boolean;
+ abort(reason?: any): Promise<void>;
+ getWriter(): WritableStreamDefaultWriter<W>;
+}
+
+declare var WritableStream: {
+ prototype: WritableStream;
+ new <W = any>(
+ underlyingSink?: UnderlyingSink<W>,
+ strategy?: QueuingStrategy<W>,
+ ): WritableStream<W>;
+};
+
+/** This Streams API interface represents a controller allowing control of a
+ * WritableStream's state. When constructing a WritableStream, the underlying
+ * sink is given a corresponding WritableStreamDefaultController instance to
+ * manipulate. */
+interface WritableStreamDefaultController {
+ error(error?: any): void;
+}
+
+/** This Streams API interface is the object returned by
+ * WritableStream.getWriter() and once created locks the < writer to the
+ * WritableStream ensuring that no other streams can write to the underlying
+ * sink. */
+interface WritableStreamDefaultWriter<W = any> {
+ readonly closed: Promise<void>;
+ readonly desiredSize: number | null;
+ readonly ready: Promise<void>;
+ abort(reason?: any): Promise<void>;
+ close(): Promise<void>;
+ releaseLock(): void;
+ write(chunk: W): Promise<void>;
+}
+
+declare var WritableStreamDefaultWriter: {
+ prototype: WritableStreamDefaultWriter;
+ new (): WritableStreamDefaultWriter;
+};
+
+interface TransformStream<I = any, O = any> {
+ readonly readable: ReadableStream<O>;
+ readonly writable: WritableStream<I>;
+}
+
+declare var TransformStream: {
+ prototype: TransformStream;
+ new <I = any, O = any>(
+ transformer?: Transformer<I, O>,
+ writableStrategy?: QueuingStrategy<I>,
+ readableStrategy?: QueuingStrategy<O>,
+ ): TransformStream<I, O>;
+};
+
+interface TransformStreamDefaultController<O = any> {
+ readonly desiredSize: number | null;
+ enqueue(chunk: O): void;
+ error(reason?: any): void;
+ terminate(): void;
+}
+
+interface Transformer<I = any, O = any> {
+ flush?: TransformStreamDefaultControllerCallback<O>;
+ readableType?: undefined;
+ start?: TransformStreamDefaultControllerCallback<O>;
+ transform?: TransformStreamDefaultControllerTransformCallback<I, O>;
+ writableType?: undefined;
+}
+
+interface TransformStreamDefaultControllerCallback<O> {
+ (controller: TransformStreamDefaultController<O>): void | PromiseLike<void>;
+}
+
+interface TransformStreamDefaultControllerTransformCallback<I, O> {
+ (
+ chunk: I,
+ controller: TransformStreamDefaultController<O>,
+ ): void | PromiseLike<void>;
+}
+
+type FormDataEntryValue = File | string;
+
+/** Provides a way to easily construct a set of key/value pairs representing
+ * form fields and their values, which can then be easily sent using the
+ * XMLHttpRequest.send() method. It uses the same format a form would use if the
+ * encoding type were set to "multipart/form-data". */
+declare class FormData implements DomIterable<string, FormDataEntryValue> {
+ // TODO(ry) FormData constructor is non-standard.
+ // new(form?: HTMLFormElement): FormData;
+ constructor();
+
+ append(name: string, value: string | Blob, fileName?: string): void;
+ delete(name: string): void;
+ get(name: string): FormDataEntryValue | null;
+ getAll(name: string): FormDataEntryValue[];
+ has(name: string): boolean;
+ set(name: string, value: string | Blob, fileName?: string): void;
+ keys(): IterableIterator<string>;
+ values(): IterableIterator<string>;
+ entries(): IterableIterator<[string, FormDataEntryValue]>;
+ [Symbol.iterator](): IterableIterator<[string, FormDataEntryValue]>;
+ forEach(
+ callback: (value: FormDataEntryValue, key: string, parent: this) => void,
+ thisArg?: any,
+ ): void;
+}
+
+interface Body {
+ /** A simple getter used to expose a `ReadableStream` of the body contents. */
+ readonly body: ReadableStream<Uint8Array> | null;
+ /** Stores a `Boolean` that declares whether the body has been used in a
+ * response yet.
+ */
+ readonly bodyUsed: boolean;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with an `ArrayBuffer`.
+ */
+ arrayBuffer(): Promise<ArrayBuffer>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `Blob`.
+ */
+ blob(): Promise<Blob>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `FormData` object.
+ */
+ formData(): Promise<FormData>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with the result of parsing the body text as JSON.
+ */
+ json(): Promise<any>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `USVString` (text).
+ */
+ text(): Promise<string>;
+}
+
+type HeadersInit = Headers | string[][] | Record<string, string>;
+
+/** This Fetch API interface allows you to perform various actions on HTTP
+ * request and response headers. These actions include retrieving, setting,
+ * adding to, and removing. A Headers object has an associated header list,
+ * which is initially empty and consists of zero or more name and value pairs.
+ * You can add to this using methods like append() (see Examples). In all
+ * methods of this interface, header names are matched by case-insensitive byte
+ * sequence. */
+interface Headers {
+ append(name: string, value: string): void;
+ delete(name: string): void;
+ get(name: string): string | null;
+ has(name: string): boolean;
+ set(name: string, value: string): void;
+ forEach(
+ callbackfn: (value: string, key: string, parent: Headers) => void,
+ thisArg?: any,
+ ): void;
+}
+
+declare class Headers implements DomIterable<string, string> {
+ constructor(init?: HeadersInit);
+
+ /** Appends a new value onto an existing header inside a `Headers` object, or
+ * adds the header if it does not already exist.
+ */
+ append(name: string, value: string): void;
+ /** Deletes a header from a `Headers` object. */
+ delete(name: string): void;
+ /** Returns an iterator allowing to go through all key/value pairs
+ * contained in this Headers object. The both the key and value of each pairs
+ * are ByteString objects.
+ */
+ entries(): IterableIterator<[string, string]>;
+ /** Returns a `ByteString` sequence of all the values of a header within a
+ * `Headers` object with a given name.
+ */
+ get(name: string): string | null;
+ /** Returns a boolean stating whether a `Headers` object contains a certain
+ * header.
+ */
+ has(name: string): boolean;
+ /** Returns an iterator allowing to go through all keys contained in
+ * this Headers object. The keys are ByteString objects.
+ */
+ keys(): IterableIterator<string>;
+ /** Sets a new value for an existing header inside a Headers object, or adds
+ * the header if it does not already exist.
+ */
+ set(name: string, value: string): void;
+ /** Returns an iterator allowing to go through all values contained in
+ * this Headers object. The values are ByteString objects.
+ */
+ values(): IterableIterator<string>;
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ thisArg?: any,
+ ): void;
+ /** The Symbol.iterator well-known symbol specifies the default
+ * iterator for this Headers object
+ */
+ [Symbol.iterator](): IterableIterator<[string, string]>;
+}
+
+type RequestInfo = Request | string;
+type RequestCache =
+ | "default"
+ | "force-cache"
+ | "no-cache"
+ | "no-store"
+ | "only-if-cached"
+ | "reload";
+type RequestCredentials = "include" | "omit" | "same-origin";
+type RequestMode = "cors" | "navigate" | "no-cors" | "same-origin";
+type RequestRedirect = "error" | "follow" | "manual";
+type ReferrerPolicy =
+ | ""
+ | "no-referrer"
+ | "no-referrer-when-downgrade"
+ | "origin"
+ | "origin-when-cross-origin"
+ | "same-origin"
+ | "strict-origin"
+ | "strict-origin-when-cross-origin"
+ | "unsafe-url";
+type BodyInit =
+ | Blob
+ | BufferSource
+ | FormData
+ | URLSearchParams
+ | ReadableStream<Uint8Array>
+ | string;
+type RequestDestination =
+ | ""
+ | "audio"
+ | "audioworklet"
+ | "document"
+ | "embed"
+ | "font"
+ | "image"
+ | "manifest"
+ | "object"
+ | "paintworklet"
+ | "report"
+ | "script"
+ | "sharedworker"
+ | "style"
+ | "track"
+ | "video"
+ | "worker"
+ | "xslt";
+
+interface RequestInit {
+ /**
+ * A BodyInit object or null to set request's body.
+ */
+ body?: BodyInit | null;
+ /**
+ * A string indicating how the request will interact with the browser's cache
+ * to set request's cache.
+ */
+ cache?: RequestCache;
+ /**
+ * A string indicating whether credentials will be sent with the request
+ * always, never, or only when sent to a same-origin URL. Sets request's
+ * credentials.
+ */
+ credentials?: RequestCredentials;
+ /**
+ * A Headers object, an object literal, or an array of two-item arrays to set
+ * request's headers.
+ */
+ headers?: HeadersInit;
+ /**
+ * A cryptographic hash of the resource to be fetched by request. Sets
+ * request's integrity.
+ */
+ integrity?: string;
+ /**
+ * A boolean to set request's keepalive.
+ */
+ keepalive?: boolean;
+ /**
+ * A string to set request's method.
+ */
+ method?: string;
+ /**
+ * A string to indicate whether the request will use CORS, or will be
+ * restricted to same-origin URLs. Sets request's mode.
+ */
+ mode?: RequestMode;
+ /**
+ * A string indicating whether request follows redirects, results in an error
+ * upon encountering a redirect, or returns the redirect (in an opaque
+ * fashion). Sets request's redirect.
+ */
+ redirect?: RequestRedirect;
+ /**
+ * A string whose value is a same-origin URL, "about:client", or the empty
+ * string, to set request's referrer.
+ */
+ referrer?: string;
+ /**
+ * A referrer policy to set request's referrerPolicy.
+ */
+ referrerPolicy?: ReferrerPolicy;
+ /**
+ * An AbortSignal to set request's signal.
+ */
+ signal?: AbortSignal | null;
+ /**
+ * Can only be null. Used to disassociate request from any Window.
+ */
+ window?: any;
+}
+
+/** This Fetch API interface represents a resource request. */
+declare class Request implements Body {
+ constructor(input: RequestInfo, init?: RequestInit);
+
+ /**
+ * Returns the cache mode associated with request, which is a string
+ * indicating how the request will interact with the browser's cache when
+ * fetching.
+ */
+ readonly cache: RequestCache;
+ /**
+ * Returns the credentials mode associated with request, which is a string
+ * indicating whether credentials will be sent with the request always, never,
+ * or only when sent to a same-origin URL.
+ */
+ readonly credentials: RequestCredentials;
+ /**
+ * Returns the kind of resource requested by request, e.g., "document" or "script".
+ */
+ readonly destination: RequestDestination;
+ /**
+ * Returns a Headers object consisting of the headers associated with request.
+ * Note that headers added in the network layer by the user agent will not be
+ * accounted for in this object, e.g., the "Host" header.
+ */
+ readonly headers: Headers;
+ /**
+ * Returns request's subresource integrity metadata, which is a cryptographic
+ * hash of the resource being fetched. Its value consists of multiple hashes
+ * separated by whitespace. [SRI]
+ */
+ readonly integrity: string;
+ /**
+ * Returns a boolean indicating whether or not request is for a history
+ * navigation (a.k.a. back-forward navigation).
+ */
+ readonly isHistoryNavigation: boolean;
+ /**
+ * Returns a boolean indicating whether or not request is for a reload
+ * navigation.
+ */
+ readonly isReloadNavigation: boolean;
+ /**
+ * Returns a boolean indicating whether or not request can outlive the global
+ * in which it was created.
+ */
+ readonly keepalive: boolean;
+ /**
+ * Returns request's HTTP method, which is "GET" by default.
+ */
+ readonly method: string;
+ /**
+ * Returns the mode associated with request, which is a string indicating
+ * whether the request will use CORS, or will be restricted to same-origin
+ * URLs.
+ */
+ readonly mode: RequestMode;
+ /**
+ * Returns the redirect mode associated with request, which is a string
+ * indicating how redirects for the request will be handled during fetching. A
+ * request will follow redirects by default.
+ */
+ readonly redirect: RequestRedirect;
+ /**
+ * Returns the referrer of request. Its value can be a same-origin URL if
+ * explicitly set in init, the empty string to indicate no referrer, and
+ * "about:client" when defaulting to the global's default. This is used during
+ * fetching to determine the value of the `Referer` header of the request
+ * being made.
+ */
+ readonly referrer: string;
+ /**
+ * Returns the referrer policy associated with request. This is used during
+ * fetching to compute the value of the request's referrer.
+ */
+ readonly referrerPolicy: ReferrerPolicy;
+ /**
+ * Returns the signal associated with request, which is an AbortSignal object
+ * indicating whether or not request has been aborted, and its abort event
+ * handler.
+ */
+ readonly signal: AbortSignal;
+ /**
+ * Returns the URL of request as a string.
+ */
+ readonly url: string;
+ clone(): Request;
+
+ /** A simple getter used to expose a `ReadableStream` of the body contents. */
+ readonly body: ReadableStream<Uint8Array> | null;
+ /** Stores a `Boolean` that declares whether the body has been used in a
+ * response yet.
+ */
+ readonly bodyUsed: boolean;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with an `ArrayBuffer`.
+ */
+ arrayBuffer(): Promise<ArrayBuffer>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `Blob`.
+ */
+ blob(): Promise<Blob>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `FormData` object.
+ */
+ formData(): Promise<FormData>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with the result of parsing the body text as JSON.
+ */
+ json(): Promise<any>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `USVString` (text).
+ */
+ text(): Promise<string>;
+}
+
+interface ResponseInit {
+ headers?: HeadersInit;
+ status?: number;
+ statusText?: string;
+}
+
+type ResponseType =
+ | "basic"
+ | "cors"
+ | "default"
+ | "error"
+ | "opaque"
+ | "opaqueredirect";
+
+/** This Fetch API interface represents the response to a request. */
+declare class Response implements Body {
+ constructor(body?: BodyInit | null, init?: ResponseInit);
+ static error(): Response;
+ static redirect(url: string, status?: number): Response;
+
+ readonly headers: Headers;
+ readonly ok: boolean;
+ readonly redirected: boolean;
+ readonly status: number;
+ readonly statusText: string;
+ readonly trailer: Promise<Headers>;
+ readonly type: ResponseType;
+ readonly url: string;
+ clone(): Response;
+
+ /** A simple getter used to expose a `ReadableStream` of the body contents. */
+ readonly body: ReadableStream<Uint8Array> | null;
+ /** Stores a `Boolean` that declares whether the body has been used in a
+ * response yet.
+ */
+ readonly bodyUsed: boolean;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with an `ArrayBuffer`.
+ */
+ arrayBuffer(): Promise<ArrayBuffer>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `Blob`.
+ */
+ blob(): Promise<Blob>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `FormData` object.
+ */
+ formData(): Promise<FormData>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with the result of parsing the body text as JSON.
+ */
+ json(): Promise<any>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `USVString` (text).
+ */
+ text(): Promise<string>;
+}
+
+/** Fetch a resource from the network. It returns a Promise that resolves to the
+ * Response to that request, whether it is successful or not.
+ *
+ * const response = await fetch("http://my.json.host/data.json");
+ * console.log(response.status); // e.g. 200
+ * console.log(response.statusText); // e.g. "OK"
+ * const jsonData = await response.json();
+ */
+declare function fetch(
+ input: Request | URL | string,
+ init?: RequestInit,
+): Promise<Response>;
diff --git a/extensions/fetch/lib.rs b/extensions/fetch/lib.rs
new file mode 100644
index 000000000..36a5b5afd
--- /dev/null
+++ b/extensions/fetch/lib.rs
@@ -0,0 +1,497 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+use deno_core::error::bad_resource_id;
+use deno_core::error::generic_error;
+use deno_core::error::null_opbuf;
+use deno_core::error::type_error;
+use deno_core::error::AnyError;
+use deno_core::futures::Future;
+use deno_core::futures::Stream;
+use deno_core::futures::StreamExt;
+use deno_core::include_js_files;
+use deno_core::op_async;
+use deno_core::op_sync;
+use deno_core::url::Url;
+use deno_core::AsyncRefCell;
+use deno_core::CancelFuture;
+use deno_core::CancelHandle;
+use deno_core::CancelTryFuture;
+use deno_core::Extension;
+use deno_core::OpState;
+use deno_core::RcRef;
+use deno_core::Resource;
+use deno_core::ResourceId;
+use deno_core::ZeroCopyBuf;
+
+use data_url::DataUrl;
+use deno_file::BlobUrlStore;
+use reqwest::header::HeaderMap;
+use reqwest::header::HeaderName;
+use reqwest::header::HeaderValue;
+use reqwest::header::USER_AGENT;
+use reqwest::redirect::Policy;
+use reqwest::Body;
+use reqwest::Client;
+use reqwest::Method;
+use reqwest::Response;
+use serde::Deserialize;
+use serde::Serialize;
+use std::borrow::Cow;
+use std::cell::RefCell;
+use std::convert::From;
+use std::fs::File;
+use std::io::Read;
+use std::path::Path;
+use std::path::PathBuf;
+use std::pin::Pin;
+use std::rc::Rc;
+use tokio::io::AsyncReadExt;
+use tokio::sync::mpsc;
+use tokio_stream::wrappers::ReceiverStream;
+use tokio_util::io::StreamReader;
+
+pub use reqwest; // Re-export reqwest
+
+pub fn init<P: FetchPermissions + 'static>(
+ user_agent: String,
+ ca_data: Option<Vec<u8>>,
+) -> Extension {
+ Extension::builder()
+ .js(include_js_files!(
+ prefix "deno:extensions/fetch",
+ "01_fetch_util.js",
+ "11_streams.js",
+ "20_headers.js",
+ "21_formdata.js",
+ "22_body.js",
+ "22_http_client.js",
+ "23_request.js",
+ "23_response.js",
+ "26_fetch.js",
+ ))
+ .ops(vec![
+ ("op_fetch", op_sync(op_fetch::<P>)),
+ ("op_fetch_send", op_async(op_fetch_send)),
+ ("op_fetch_request_write", op_async(op_fetch_request_write)),
+ ("op_fetch_response_read", op_async(op_fetch_response_read)),
+ ("op_create_http_client", op_sync(op_create_http_client::<P>)),
+ ])
+ .state(move |state| {
+ state.put::<reqwest::Client>({
+ create_http_client(user_agent.clone(), ca_data.clone()).unwrap()
+ });
+ state.put::<HttpClientDefaults>(HttpClientDefaults {
+ ca_data: ca_data.clone(),
+ user_agent: user_agent.clone(),
+ });
+ Ok(())
+ })
+ .build()
+}
+
+pub struct HttpClientDefaults {
+ pub user_agent: String,
+ pub ca_data: Option<Vec<u8>>,
+}
+
+pub trait FetchPermissions {
+ fn check_net_url(&mut self, _url: &Url) -> Result<(), AnyError>;
+ fn check_read(&mut self, _p: &Path) -> Result<(), AnyError>;
+}
+
+/// For use with `op_fetch` when the user does not want permissions.
+pub struct NoFetchPermissions;
+
+impl FetchPermissions for NoFetchPermissions {
+ fn check_net_url(&mut self, _url: &Url) -> Result<(), AnyError> {
+ Ok(())
+ }
+
+ fn check_read(&mut self, _p: &Path) -> Result<(), AnyError> {
+ Ok(())
+ }
+}
+
+pub fn get_declaration() -> PathBuf {
+ PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_fetch.d.ts")
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FetchArgs {
+ method: String,
+ url: String,
+ headers: Vec<(String, String)>,
+ client_rid: Option<u32>,
+ has_body: bool,
+}
+
+#[derive(Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FetchReturn {
+ request_rid: ResourceId,
+ request_body_rid: Option<ResourceId>,
+}
+
+pub fn op_fetch<FP>(
+ state: &mut OpState,
+ args: FetchArgs,
+ data: Option<ZeroCopyBuf>,
+) -> Result<FetchReturn, AnyError>
+where
+ FP: FetchPermissions + 'static,
+{
+ let client = if let Some(rid) = args.client_rid {
+ let r = state
+ .resource_table
+ .get::<HttpClientResource>(rid)
+ .ok_or_else(bad_resource_id)?;
+ r.client.clone()
+ } else {
+ let client = state.borrow::<reqwest::Client>();
+ client.clone()
+ };
+
+ let method = Method::from_bytes(args.method.as_bytes())?;
+ let url = Url::parse(&args.url)?;
+
+ // Check scheme before asking for net permission
+ let scheme = url.scheme();
+ let (request_rid, request_body_rid) = match scheme {
+ "http" | "https" => {
+ let permissions = state.borrow_mut::<FP>();
+ permissions.check_net_url(&url)?;
+
+ let mut request = client.request(method, url);
+
+ let request_body_rid = if args.has_body {
+ match data {
+ None => {
+ // If no body is passed, we return a writer for streaming the body.
+ let (tx, rx) = mpsc::channel::<std::io::Result<Vec<u8>>>(1);
+ request = request.body(Body::wrap_stream(ReceiverStream::new(rx)));
+
+ let request_body_rid =
+ state.resource_table.add(FetchRequestBodyResource {
+ body: AsyncRefCell::new(tx),
+ cancel: CancelHandle::default(),
+ });
+
+ Some(request_body_rid)
+ }
+ Some(data) => {
+ // If a body is passed, we use it, and don't return a body for streaming.
+ request = request.body(Vec::from(&*data));
+ None
+ }
+ }
+ } else {
+ None
+ };
+
+ for (key, value) in args.headers {
+ let name = HeaderName::from_bytes(key.as_bytes()).unwrap();
+ let v = HeaderValue::from_str(&value).unwrap();
+ request = request.header(name, v);
+ }
+
+ let fut = request.send();
+
+ let request_rid = state
+ .resource_table
+ .add(FetchRequestResource(Box::pin(fut)));
+
+ (request_rid, request_body_rid)
+ }
+ "data" => {
+ let data_url = DataUrl::process(url.as_str())
+ .map_err(|e| type_error(format!("{:?}", e)))?;
+
+ let (body, _) = data_url
+ .decode_to_vec()
+ .map_err(|e| type_error(format!("{:?}", e)))?;
+
+ let response = http::Response::builder()
+ .status(http::StatusCode::OK)
+ .header(http::header::CONTENT_TYPE, data_url.mime_type().to_string())
+ .body(reqwest::Body::from(body))?;
+
+ let fut = async move { Ok(Response::from(response)) };
+
+ let request_rid = state
+ .resource_table
+ .add(FetchRequestResource(Box::pin(fut)));
+
+ (request_rid, None)
+ }
+ "blob" => {
+ let blob_url_storage =
+ state.try_borrow::<BlobUrlStore>().ok_or_else(|| {
+ type_error("Blob URLs are not supported in this context.")
+ })?;
+
+ let blob = blob_url_storage
+ .get(url)?
+ .ok_or_else(|| type_error("Blob for the given URL not found."))?;
+
+ if method != "GET" {
+ return Err(type_error("Blob URL fetch only supports GET method."));
+ }
+
+ let response = http::Response::builder()
+ .status(http::StatusCode::OK)
+ .header(http::header::CONTENT_LENGTH, blob.data.len())
+ .header(http::header::CONTENT_TYPE, blob.media_type)
+ .body(reqwest::Body::from(blob.data))?;
+
+ let fut = async move { Ok(Response::from(response)) };
+
+ let request_rid = state
+ .resource_table
+ .add(FetchRequestResource(Box::pin(fut)));
+
+ (request_rid, None)
+ }
+ _ => return Err(type_error(format!("scheme '{}' not supported", scheme))),
+ };
+
+ Ok(FetchReturn {
+ request_rid,
+ request_body_rid,
+ })
+}
+
+#[derive(Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FetchResponse {
+ status: u16,
+ status_text: String,
+ headers: Vec<(String, String)>,
+ url: String,
+ response_rid: ResourceId,
+}
+
+pub async fn op_fetch_send(
+ state: Rc<RefCell<OpState>>,
+ rid: ResourceId,
+ _data: Option<ZeroCopyBuf>,
+) -> Result<FetchResponse, AnyError> {
+ let request = state
+ .borrow_mut()
+ .resource_table
+ .take::<FetchRequestResource>(rid)
+ .ok_or_else(bad_resource_id)?;
+
+ let request = Rc::try_unwrap(request)
+ .ok()
+ .expect("multiple op_fetch_send ongoing");
+
+ let res = match request.0.await {
+ Ok(res) => res,
+ Err(e) => return Err(type_error(e.to_string())),
+ };
+
+ //debug!("Fetch response {}", url);
+ let status = res.status();
+ let url = res.url().to_string();
+ let mut res_headers = Vec::new();
+ for (key, val) in res.headers().iter() {
+ let key_string = key.to_string();
+
+ if val.as_bytes().is_ascii() {
+ res_headers.push((key_string, val.to_str().unwrap().to_owned()))
+ } else {
+ res_headers.push((
+ key_string,
+ val
+ .as_bytes()
+ .iter()
+ .map(|&c| c as char)
+ .collect::<String>(),
+ ));
+ }
+ }
+
+ let stream: BytesStream = Box::pin(res.bytes_stream().map(|r| {
+ r.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
+ }));
+ let stream_reader = StreamReader::new(stream);
+ let rid = state
+ .borrow_mut()
+ .resource_table
+ .add(FetchResponseBodyResource {
+ reader: AsyncRefCell::new(stream_reader),
+ cancel: CancelHandle::default(),
+ });
+
+ Ok(FetchResponse {
+ status: status.as_u16(),
+ status_text: status.canonical_reason().unwrap_or("").to_string(),
+ headers: res_headers,
+ url,
+ response_rid: rid,
+ })
+}
+
+pub async fn op_fetch_request_write(
+ state: Rc<RefCell<OpState>>,
+ rid: ResourceId,
+ data: Option<ZeroCopyBuf>,
+) -> Result<(), AnyError> {
+ let data = data.ok_or_else(null_opbuf)?;
+ let buf = Vec::from(&*data);
+
+ let resource = state
+ .borrow()
+ .resource_table
+ .get::<FetchRequestBodyResource>(rid)
+ .ok_or_else(bad_resource_id)?;
+ let body = RcRef::map(&resource, |r| &r.body).borrow_mut().await;
+ let cancel = RcRef::map(resource, |r| &r.cancel);
+ body.send(Ok(buf)).or_cancel(cancel).await??;
+
+ Ok(())
+}
+
+pub async fn op_fetch_response_read(
+ state: Rc<RefCell<OpState>>,
+ rid: ResourceId,
+ data: Option<ZeroCopyBuf>,
+) -> Result<usize, AnyError> {
+ let data = data.ok_or_else(null_opbuf)?;
+
+ let resource = state
+ .borrow()
+ .resource_table
+ .get::<FetchResponseBodyResource>(rid)
+ .ok_or_else(bad_resource_id)?;
+ let mut reader = RcRef::map(&resource, |r| &r.reader).borrow_mut().await;
+ let cancel = RcRef::map(resource, |r| &r.cancel);
+ let mut buf = data.clone();
+ let read = reader.read(&mut buf).try_or_cancel(cancel).await?;
+ Ok(read)
+}
+
+struct FetchRequestResource(
+ Pin<Box<dyn Future<Output = Result<Response, reqwest::Error>>>>,
+);
+
+impl Resource for FetchRequestResource {
+ fn name(&self) -> Cow<str> {
+ "fetchRequest".into()
+ }
+}
+
+struct FetchRequestBodyResource {
+ body: AsyncRefCell<mpsc::Sender<std::io::Result<Vec<u8>>>>,
+ cancel: CancelHandle,
+}
+
+impl Resource for FetchRequestBodyResource {
+ fn name(&self) -> Cow<str> {
+ "fetchRequestBody".into()
+ }
+}
+
+type BytesStream =
+ Pin<Box<dyn Stream<Item = Result<bytes::Bytes, std::io::Error>> + Unpin>>;
+
+struct FetchResponseBodyResource {
+ reader: AsyncRefCell<StreamReader<BytesStream, bytes::Bytes>>,
+ cancel: CancelHandle,
+}
+
+impl Resource for FetchResponseBodyResource {
+ fn name(&self) -> Cow<str> {
+ "fetchResponseBody".into()
+ }
+}
+
+struct HttpClientResource {
+ client: Client,
+}
+
+impl Resource for HttpClientResource {
+ fn name(&self) -> Cow<str> {
+ "httpClient".into()
+ }
+}
+
+impl HttpClientResource {
+ fn new(client: Client) -> Self {
+ Self { client }
+ }
+}
+
+#[derive(Deserialize, Default, Debug)]
+#[serde(rename_all = "camelCase")]
+#[serde(default)]
+pub struct CreateHttpClientOptions {
+ ca_file: Option<String>,
+ ca_data: Option<String>,
+}
+
+pub fn op_create_http_client<FP>(
+ state: &mut OpState,
+ args: CreateHttpClientOptions,
+ _zero_copy: Option<ZeroCopyBuf>,
+) -> Result<ResourceId, AnyError>
+where
+ FP: FetchPermissions + 'static,
+{
+ if let Some(ca_file) = args.ca_file.clone() {
+ let permissions = state.borrow_mut::<FP>();
+ permissions.check_read(&PathBuf::from(ca_file))?;
+ }
+
+ let defaults = state.borrow::<HttpClientDefaults>();
+
+ let cert_data =
+ get_cert_data(args.ca_file.as_deref(), args.ca_data.as_deref())?;
+ let client = create_http_client(
+ defaults.user_agent.clone(),
+ cert_data.or_else(|| defaults.ca_data.clone()),
+ )
+ .unwrap();
+
+ let rid = state.resource_table.add(HttpClientResource::new(client));
+ Ok(rid)
+}
+
+fn get_cert_data(
+ ca_file: Option<&str>,
+ ca_data: Option<&str>,
+) -> Result<Option<Vec<u8>>, AnyError> {
+ if let Some(ca_data) = ca_data {
+ Ok(Some(ca_data.as_bytes().to_vec()))
+ } else if let Some(ca_file) = ca_file {
+ let mut buf = Vec::new();
+ File::open(ca_file)?.read_to_end(&mut buf)?;
+ Ok(Some(buf))
+ } else {
+ Ok(None)
+ }
+}
+
+/// Create new instance of async reqwest::Client. This client supports
+/// proxies and doesn't follow redirects.
+pub fn create_http_client(
+ user_agent: String,
+ ca_data: Option<Vec<u8>>,
+) -> Result<Client, AnyError> {
+ let mut headers = HeaderMap::new();
+ headers.insert(USER_AGENT, user_agent.parse().unwrap());
+ let mut builder = Client::builder()
+ .redirect(Policy::none())
+ .default_headers(headers)
+ .use_rustls_tls();
+
+ if let Some(ca_data) = ca_data {
+ let cert = reqwest::Certificate::from_pem(&ca_data)?;
+ builder = builder.add_root_certificate(cert);
+ }
+
+ builder
+ .build()
+ .map_err(|e| generic_error(format!("Unable to build http client: {}", e)))
+}