diff options
Diffstat (limited to 'op_crates/fetch')
-rw-r--r-- | op_crates/fetch/01_fetch_util.js | 20 | ||||
-rw-r--r-- | op_crates/fetch/03_dom_iterable.js | 77 | ||||
-rw-r--r-- | op_crates/fetch/11_streams.js | 3418 | ||||
-rw-r--r-- | op_crates/fetch/20_headers.js | 256 | ||||
-rw-r--r-- | op_crates/fetch/26_fetch.js | 1390 | ||||
-rw-r--r-- | op_crates/fetch/Cargo.toml | 19 | ||||
-rw-r--r-- | op_crates/fetch/lib.deno_fetch.d.ts | 636 | ||||
-rw-r--r-- | op_crates/fetch/lib.rs | 266 |
8 files changed, 6082 insertions, 0 deletions
diff --git a/op_crates/fetch/01_fetch_util.js b/op_crates/fetch/01_fetch_util.js new file mode 100644 index 000000000..07f45d821 --- /dev/null +++ b/op_crates/fetch/01_fetch_util.js @@ -0,0 +1,20 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +((window) => { + function requiredArguments( + name, + length, + required, + ) { + if (length < required) { + const errMsg = `${name} requires at least ${required} argument${ + required === 1 ? "" : "s" + }, but only ${length} present`; + throw new TypeError(errMsg); + } + } + + window.__bootstrap.fetchUtil = { + requiredArguments, + }; +})(this); diff --git a/op_crates/fetch/03_dom_iterable.js b/op_crates/fetch/03_dom_iterable.js new file mode 100644 index 000000000..bea60b61f --- /dev/null +++ b/op_crates/fetch/03_dom_iterable.js @@ -0,0 +1,77 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +((window) => { + const { requiredArguments } = window.__bootstrap.fetchUtil; + // const { exposeForTest } = window.__bootstrap.internals; + + function DomIterableMixin( + Base, + dataSymbol, + ) { + // we have to cast `this` as `any` because there is no way to describe the + // Base class in a way where the Symbol `dataSymbol` is defined. So the + // runtime code works, but we do lose a little bit of type safety. + + // Additionally, we have to not use .keys() nor .values() since the internal + // slot differs in type - some have a Map, which yields [K, V] in + // Symbol.iterator, and some have an Array, which yields V, in this case + // [K, V] too as they are arrays of tuples. + + const DomIterable = class extends Base { + *entries() { + for (const entry of this[dataSymbol]) { + yield entry; + } + } + + *keys() { + for (const [key] of this[dataSymbol]) { + yield key; + } + } + + *values() { + for (const [, value] of this[dataSymbol]) { + yield value; + } + } + + forEach( + callbackfn, + thisArg, + ) { + requiredArguments( + `${this.constructor.name}.forEach`, + arguments.length, + 1, + ); + callbackfn = callbackfn.bind( + thisArg == null ? globalThis : Object(thisArg), + ); + for (const [key, value] of this[dataSymbol]) { + callbackfn(value, key, this); + } + } + + *[Symbol.iterator]() { + for (const entry of this[dataSymbol]) { + yield entry; + } + } + }; + + // we want the Base class name to be the name of the class. + Object.defineProperty(DomIterable, "name", { + value: Base.name, + configurable: true, + }); + + return DomIterable; + } + + // exposeForTest("DomIterableMixin", DomIterableMixin); + + window.__bootstrap.domIterable = { + DomIterableMixin, + }; +})(this); diff --git a/op_crates/fetch/11_streams.js b/op_crates/fetch/11_streams.js new file mode 100644 index 000000000..b182a96ed --- /dev/null +++ b/op_crates/fetch/11_streams.js @@ -0,0 +1,3418 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +// This code closely follows the WHATWG Stream Specification +// See: https://streams.spec.whatwg.org/ +// +// There are some parts that are not fully implemented, and there are some +// comments which point to steps of the specification that are not implemented. + +((window) => { + /* eslint-disable @typescript-eslint/no-explicit-any,require-await */ + + const customInspect = Symbol.for("Deno.customInspect"); + + /** Clone a value in a similar way to structured cloning. It is similar to a + * StructureDeserialize(StructuredSerialize(...)). */ + function cloneValue(value) { + switch (typeof value) { + case "number": + case "string": + case "boolean": + case "undefined": + case "bigint": + return value; + case "object": { + if (objectCloneMemo.has(value)) { + return objectCloneMemo.get(value); + } + if (value === null) { + return value; + } + if (value instanceof Date) { + return new Date(value.valueOf()); + } + if (value instanceof RegExp) { + return new RegExp(value); + } + if (value instanceof SharedArrayBuffer) { + return value; + } + if (value instanceof ArrayBuffer) { + const cloned = cloneArrayBuffer( + value, + 0, + value.byteLength, + ArrayBuffer, + ); + objectCloneMemo.set(value, cloned); + return cloned; + } + if (ArrayBuffer.isView(value)) { + const clonedBuffer = cloneValue(value.buffer); + // Use DataViewConstructor type purely for type-checking, can be a + // DataView or TypedArray. They use the same constructor signature, + // only DataView has a length in bytes and TypedArrays use a length in + // terms of elements, so we adjust for that. + let length; + if (value instanceof DataView) { + length = value.byteLength; + } else { + length = value.length; + } + return new (value.constructor)( + clonedBuffer, + value.byteOffset, + length, + ); + } + if (value instanceof Map) { + const clonedMap = new Map(); + objectCloneMemo.set(value, clonedMap); + value.forEach((v, k) => clonedMap.set(k, cloneValue(v))); + return clonedMap; + } + if (value instanceof Set) { + const clonedSet = new Map(); + objectCloneMemo.set(value, clonedSet); + value.forEach((v, k) => clonedSet.set(k, cloneValue(v))); + return clonedSet; + } + + const clonedObj = {}; + objectCloneMemo.set(value, clonedObj); + const sourceKeys = Object.getOwnPropertyNames(value); + for (const key of sourceKeys) { + clonedObj[key] = cloneValue(value[key]); + } + return clonedObj; + } + case "symbol": + case "function": + default: + throw new DOMException("Uncloneable value in stream", "DataCloneError"); + } + } + + function setFunctionName(fn, value) { + Object.defineProperty(fn, "name", { value, configurable: true }); + } + + class AssertionError extends Error { + constructor(msg) { + super(msg); + this.name = "AssertionError"; + } + } + + function assert(cond, msg = "Assertion failed.") { + if (!cond) { + throw new AssertionError(msg); + } + } + + const sym = { + abortAlgorithm: Symbol("abortAlgorithm"), + abortSteps: Symbol("abortSteps"), + asyncIteratorReader: Symbol("asyncIteratorReader"), + autoAllocateChunkSize: Symbol("autoAllocateChunkSize"), + backpressure: Symbol("backpressure"), + backpressureChangePromise: Symbol("backpressureChangePromise"), + byobRequest: Symbol("byobRequest"), + cancelAlgorithm: Symbol("cancelAlgorithm"), + cancelSteps: Symbol("cancelSteps"), + closeAlgorithm: Symbol("closeAlgorithm"), + closedPromise: Symbol("closedPromise"), + closeRequest: Symbol("closeRequest"), + closeRequested: Symbol("closeRequested"), + controlledReadableByteStream: Symbol( + "controlledReadableByteStream", + ), + controlledReadableStream: Symbol("controlledReadableStream"), + controlledTransformStream: Symbol("controlledTransformStream"), + controlledWritableStream: Symbol("controlledWritableStream"), + disturbed: Symbol("disturbed"), + errorSteps: Symbol("errorSteps"), + flushAlgorithm: Symbol("flushAlgorithm"), + forAuthorCode: Symbol("forAuthorCode"), + inFlightWriteRequest: Symbol("inFlightWriteRequest"), + inFlightCloseRequest: Symbol("inFlightCloseRequest"), + isFakeDetached: Symbol("isFakeDetached"), + ownerReadableStream: Symbol("ownerReadableStream"), + ownerWritableStream: Symbol("ownerWritableStream"), + pendingAbortRequest: Symbol("pendingAbortRequest"), + preventCancel: Symbol("preventCancel"), + pullAgain: Symbol("pullAgain"), + pullAlgorithm: Symbol("pullAlgorithm"), + pulling: Symbol("pulling"), + pullSteps: Symbol("pullSteps"), + queue: Symbol("queue"), + queueTotalSize: Symbol("queueTotalSize"), + readable: Symbol("readable"), + readableStreamController: Symbol("readableStreamController"), + reader: Symbol("reader"), + readRequests: Symbol("readRequests"), + readyPromise: Symbol("readyPromise"), + started: Symbol("started"), + state: Symbol("state"), + storedError: Symbol("storedError"), + strategyHWM: Symbol("strategyHWM"), + strategySizeAlgorithm: Symbol("strategySizeAlgorithm"), + transformAlgorithm: Symbol("transformAlgorithm"), + transformStreamController: Symbol("transformStreamController"), + writableStreamController: Symbol("writableStreamController"), + writeAlgorithm: Symbol("writeAlgorithm"), + writable: Symbol("writable"), + writer: Symbol("writer"), + writeRequests: Symbol("writeRequests"), + }; + class ReadableByteStreamController { + constructor() { + throw new TypeError( + "ReadableByteStreamController's constructor cannot be called.", + ); + } + + get byobRequest() { + return undefined; + } + + get desiredSize() { + if (!isReadableByteStreamController(this)) { + throw new TypeError("Invalid ReadableByteStreamController."); + } + return readableByteStreamControllerGetDesiredSize(this); + } + + close() { + if (!isReadableByteStreamController(this)) { + throw new TypeError("Invalid ReadableByteStreamController."); + } + if (this[sym.closeRequested]) { + throw new TypeError("Closed already requested."); + } + if (this[sym.controlledReadableByteStream][sym.state] !== "readable") { + throw new TypeError( + "ReadableByteStreamController's stream is not in a readable state.", + ); + } + readableByteStreamControllerClose(this); + } + + enqueue(chunk) { + if (!isReadableByteStreamController(this)) { + throw new TypeError("Invalid ReadableByteStreamController."); + } + if (this[sym.closeRequested]) { + throw new TypeError("Closed already requested."); + } + if (this[sym.controlledReadableByteStream][sym.state] !== "readable") { + throw new TypeError( + "ReadableByteStreamController's stream is not in a readable state.", + ); + } + if (!ArrayBuffer.isView(chunk)) { + throw new TypeError( + "You can only enqueue array buffer views when using a ReadableByteStreamController", + ); + } + if (isDetachedBuffer(chunk.buffer)) { + throw new TypeError( + "Cannot enqueue a view onto a detached ArrayBuffer", + ); + } + readableByteStreamControllerEnqueue(this, chunk); + } + + error(error) { + if (!isReadableByteStreamController(this)) { + throw new TypeError("Invalid ReadableByteStreamController."); + } + readableByteStreamControllerError(this, error); + } + + [sym.cancelSteps](reason) { + // 3.11.5.1.1 If this.[[pendingPullIntos]] is not empty, + resetQueue(this); + const result = this[sym.cancelAlgorithm](reason); + readableByteStreamControllerClearAlgorithms(this); + return result; + } + + [sym.pullSteps]() { + const stream = this[sym.controlledReadableByteStream]; + assert(readableStreamHasDefaultReader(stream)); + if (this[sym.queueTotalSize] > 0) { + assert(readableStreamGetNumReadRequests(stream) === 0); + const entry = this[sym.queue].shift(); + assert(entry); + this[sym.queueTotalSize] -= entry.size; + readableByteStreamControllerHandleQueueDrain(this); + const view = new Uint8Array(entry.value, entry.offset, entry.size); + return Promise.resolve( + readableStreamCreateReadResult( + view, + false, + stream[sym.reader][sym.forAuthorCode], + ), + ); + } + // 3.11.5.2.5 If autoAllocateChunkSize is not undefined, + const promise = readableStreamAddReadRequest(stream); + readableByteStreamControllerCallPullIfNeeded(this); + return promise; + } + + [customInspect]() { + return `${this.constructor.name} { byobRequest: ${ + String(this.byobRequest) + }, desiredSize: ${String(this.desiredSize)} }`; + } + } + + class ReadableStreamDefaultController { + constructor() { + throw new TypeError( + "ReadableStreamDefaultController's constructor cannot be called.", + ); + } + + get desiredSize() { + if (!isReadableStreamDefaultController(this)) { + throw new TypeError("Invalid ReadableStreamDefaultController."); + } + return readableStreamDefaultControllerGetDesiredSize(this); + } + + close() { + if (!isReadableStreamDefaultController(this)) { + throw new TypeError("Invalid ReadableStreamDefaultController."); + } + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) { + throw new TypeError( + "ReadableStreamDefaultController cannot close or enqueue.", + ); + } + readableStreamDefaultControllerClose(this); + } + + enqueue(chunk) { + if (!isReadableStreamDefaultController(this)) { + throw new TypeError("Invalid ReadableStreamDefaultController."); + } + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) { + throw new TypeError("ReadableSteamController cannot enqueue."); + } + return readableStreamDefaultControllerEnqueue(this, chunk); + } + + error(error) { + if (!isReadableStreamDefaultController(this)) { + throw new TypeError("Invalid ReadableStreamDefaultController."); + } + readableStreamDefaultControllerError(this, error); + } + + [sym.cancelSteps](reason) { + resetQueue(this); + const result = this[sym.cancelAlgorithm](reason); + readableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [sym.pullSteps]() { + const stream = this[sym.controlledReadableStream]; + if (this[sym.queue].length) { + const chunk = dequeueValue(this); + if (this[sym.closeRequested] && this[sym.queue].length === 0) { + readableStreamDefaultControllerClearAlgorithms(this); + readableStreamClose(stream); + } else { + readableStreamDefaultControllerCallPullIfNeeded(this); + } + return Promise.resolve( + readableStreamCreateReadResult( + chunk, + false, + stream[sym.reader][sym.forAuthorCode], + ), + ); + } + const pendingPromise = readableStreamAddReadRequest(stream); + readableStreamDefaultControllerCallPullIfNeeded(this); + return pendingPromise; + } + + [customInspect]() { + return `${this.constructor.name} { desiredSize: ${ + String(this.desiredSize) + } }`; + } + } + + class ReadableStreamDefaultReader { + constructor(stream) { + if (!isReadableStream(stream)) { + throw new TypeError("stream is not a ReadableStream."); + } + if (isReadableStreamLocked(stream)) { + throw new TypeError("stream is locked."); + } + readableStreamReaderGenericInitialize(this, stream); + this[sym.readRequests] = []; + } + + get closed() { + if (!isReadableStreamDefaultReader(this)) { + return Promise.reject( + new TypeError("Invalid ReadableStreamDefaultReader."), + ); + } + return ( + this[sym.closedPromise].promise ?? + Promise.reject(new TypeError("Invalid reader.")) + ); + } + + cancel(reason) { + if (!isReadableStreamDefaultReader(this)) { + return Promise.reject( + new TypeError("Invalid ReadableStreamDefaultReader."), + ); + } + if (!this[sym.ownerReadableStream]) { + return Promise.reject(new TypeError("Invalid reader.")); + } + return readableStreamReaderGenericCancel(this, reason); + } + + read() { + if (!isReadableStreamDefaultReader(this)) { + return Promise.reject( + new TypeError("Invalid ReadableStreamDefaultReader."), + ); + } + if (!this[sym.ownerReadableStream]) { + return Promise.reject(new TypeError("Invalid reader.")); + } + return readableStreamDefaultReaderRead(this); + } + + releaseLock() { + if (!isReadableStreamDefaultReader(this)) { + throw new TypeError("Invalid ReadableStreamDefaultReader."); + } + if (this[sym.ownerReadableStream] === undefined) { + return; + } + if (this[sym.readRequests].length) { + throw new TypeError("Cannot release lock with pending read requests."); + } + readableStreamReaderGenericRelease(this); + } + + [customInspect]() { + return `${this.constructor.name} { closed: Promise }`; + } + } + + const AsyncIteratorPrototype = Object + .getPrototypeOf(Object.getPrototypeOf(async function* () {}).prototype); + + const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ + next() { + if (!isReadableStreamAsyncIterator(this)) { + return Promise.reject( + new TypeError("invalid ReadableStreamAsyncIterator."), + ); + } + const reader = this[sym.asyncIteratorReader]; + if (!reader[sym.ownerReadableStream]) { + return Promise.reject( + new TypeError("reader owner ReadableStream is undefined."), + ); + } + return readableStreamDefaultReaderRead(reader).then((result) => { + assert(typeof result === "object"); + const { done } = result; + assert(typeof done === "boolean"); + if (done) { + readableStreamReaderGenericRelease(reader); + } + const { value } = result; + return readableStreamCreateReadResult(value, done, true); + }); + }, + return( + value, + ) { + if (!isReadableStreamAsyncIterator(this)) { + return Promise.reject( + new TypeError("invalid ReadableStreamAsyncIterator."), + ); + } + const reader = this[sym.asyncIteratorReader]; + if (!reader[sym.ownerReadableStream]) { + return Promise.reject( + new TypeError("reader owner ReadableStream is undefined."), + ); + } + if (reader[sym.readRequests].length) { + return Promise.reject( + new TypeError("reader has outstanding read requests."), + ); + } + if (!this[sym.preventCancel]) { + const result = readableStreamReaderGenericCancel(reader, value); + readableStreamReaderGenericRelease(reader); + return result.then(() => + readableStreamCreateReadResult(value, true, true) + ); + } + readableStreamReaderGenericRelease(reader); + return Promise.resolve( + readableStreamCreateReadResult(value, true, true), + ); + }, + }, AsyncIteratorPrototype); + + class ReadableStream { + constructor( + underlyingSource = {}, + strategy = {}, + ) { + initializeReadableStream(this); + const { size } = strategy; + let { highWaterMark } = strategy; + const { type } = underlyingSource; + + if (underlyingSource.type == "bytes") { + if (size !== undefined) { + throw new RangeError( + `When underlying source is "bytes", strategy.size must be undefined.`, + ); + } + highWaterMark = validateAndNormalizeHighWaterMark(highWaterMark ?? 0); + setUpReadableByteStreamControllerFromUnderlyingSource( + this, + underlyingSource, + highWaterMark, + ); + } else if (type === undefined) { + const sizeAlgorithm = makeSizeAlgorithmFromSizeFunction(size); + highWaterMark = validateAndNormalizeHighWaterMark(highWaterMark ?? 1); + setUpReadableStreamDefaultControllerFromUnderlyingSource( + this, + underlyingSource, + highWaterMark, + sizeAlgorithm, + ); + } else { + throw new RangeError( + `Valid values for underlyingSource are "bytes" or undefined. Received: "${type}".`, + ); + } + } + + get locked() { + if (!isReadableStream(this)) { + throw new TypeError("Invalid ReadableStream."); + } + return isReadableStreamLocked(this); + } + + cancel(reason) { + if (!isReadableStream(this)) { + return Promise.reject(new TypeError("Invalid ReadableStream.")); + } + if (isReadableStreamLocked(this)) { + return Promise.reject( + new TypeError("Cannot cancel a locked ReadableStream."), + ); + } + return readableStreamCancel(this, reason); + } + + getIterator({ + preventCancel, + } = {}) { + if (!isReadableStream(this)) { + throw new TypeError("Invalid ReadableStream."); + } + const reader = acquireReadableStreamDefaultReader(this); + const iterator = Object.create(ReadableStreamAsyncIteratorPrototype); + iterator[sym.asyncIteratorReader] = reader; + iterator[sym.preventCancel] = Boolean(preventCancel); + return iterator; + } + + getReader({ mode } = {}) { + if (!isReadableStream(this)) { + throw new TypeError("Invalid ReadableStream."); + } + if (mode === undefined) { + return acquireReadableStreamDefaultReader(this, true); + } + mode = String(mode); + // 3.2.5.4.4 If mode is "byob", return ? AcquireReadableStreamBYOBReader(this, true). + throw new RangeError(`Unsupported mode "${mode}"`); + } + + pipeThrough( + { + writable, + readable, + }, + { preventClose, preventAbort, preventCancel, signal } = {}, + ) { + if (!isReadableStream(this)) { + throw new TypeError("Invalid ReadableStream."); + } + if (!isWritableStream(writable)) { + throw new TypeError("writable is not a valid WritableStream."); + } + if (!isReadableStream(readable)) { + throw new TypeError("readable is not a valid ReadableStream."); + } + preventClose = Boolean(preventClose); + preventAbort = Boolean(preventAbort); + preventCancel = Boolean(preventCancel); + if (signal && !(signal instanceof AbortSignal)) { + throw new TypeError("Invalid signal."); + } + if (isReadableStreamLocked(this)) { + throw new TypeError("ReadableStream is locked."); + } + if (isWritableStreamLocked(writable)) { + throw new TypeError("writable is locked."); + } + const promise = readableStreamPipeTo( + this, + writable, + preventClose, + preventAbort, + preventCancel, + signal, + ); + setPromiseIsHandledToTrue(promise); + return readable; + } + + pipeTo( + dest, + { preventClose, preventAbort, preventCancel, signal } = {}, + ) { + if (!isReadableStream(this)) { + return Promise.reject(new TypeError("Invalid ReadableStream.")); + } + if (!isWritableStream(dest)) { + return Promise.reject( + new TypeError("dest is not a valid WritableStream."), + ); + } + preventClose = Boolean(preventClose); + preventAbort = Boolean(preventAbort); + preventCancel = Boolean(preventCancel); + if (signal && !(signal instanceof AbortSignal)) { + return Promise.reject(new TypeError("Invalid signal.")); + } + if (isReadableStreamLocked(this)) { + return Promise.reject(new TypeError("ReadableStream is locked.")); + } + if (isWritableStreamLocked(dest)) { + return Promise.reject(new TypeError("dest is locked.")); + } + return readableStreamPipeTo( + this, + dest, + preventClose, + preventAbort, + preventCancel, + signal, + ); + } + + tee() { + if (!isReadableStream(this)) { + throw new TypeError("Invalid ReadableStream."); + } + return readableStreamTee(this, false); + } + + [customInspect]() { + return `${this.constructor.name} { locked: ${String(this.locked)} }`; + } + + [Symbol.asyncIterator]( + options = {}, + ) { + return this.getIterator(options); + } + } + + class TransformStream { + constructor( + transformer = {}, + writableStrategy = {}, + readableStrategy = {}, + ) { + const writableSizeFunction = writableStrategy.size; + let writableHighWaterMark = writableStrategy.highWaterMark; + const readableSizeFunction = readableStrategy.size; + let readableHighWaterMark = readableStrategy.highWaterMark; + const writableType = transformer.writableType; + if (writableType !== undefined) { + throw new RangeError( + `Expected transformer writableType to be undefined, received "${ + String(writableType) + }"`, + ); + } + const writableSizeAlgorithm = makeSizeAlgorithmFromSizeFunction( + writableSizeFunction, + ); + if (writableHighWaterMark === undefined) { + writableHighWaterMark = 1; + } + writableHighWaterMark = validateAndNormalizeHighWaterMark( + writableHighWaterMark, + ); + const readableType = transformer.readableType; + if (readableType !== undefined) { + throw new RangeError( + `Expected transformer readableType to be undefined, received "${ + String(readableType) + }"`, + ); + } + const readableSizeAlgorithm = makeSizeAlgorithmFromSizeFunction( + readableSizeFunction, + ); + if (readableHighWaterMark === undefined) { + readableHighWaterMark = 1; + } + readableHighWaterMark = validateAndNormalizeHighWaterMark( + readableHighWaterMark, + ); + const startPromise = getDeferred(); + initializeTransformStream( + this, + startPromise.promise, + writableHighWaterMark, + writableSizeAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm, + ); + // the brand check expects this, and the brand check occurs in the following + // but the property hasn't been defined. + Object.defineProperty(this, sym.transformStreamController, { + value: undefined, + writable: true, + configurable: true, + }); + setUpTransformStreamDefaultControllerFromTransformer(this, transformer); + const startResult = invokeOrNoop( + transformer, + "start", + this[sym.transformStreamController], + ); + startPromise.resolve(startResult); + } + + get readable() { + if (!isTransformStream(this)) { + throw new TypeError("Invalid TransformStream."); + } + return this[sym.readable]; + } + + get writable() { + if (!isTransformStream(this)) { + throw new TypeError("Invalid TransformStream."); + } + return this[sym.writable]; + } + + [customInspect]() { + return this.constructor.name; + } + } + + class TransformStreamDefaultController { + constructor() { + throw new TypeError( + "TransformStreamDefaultController's constructor cannot be called.", + ); + } + + get desiredSize() { + if (!isTransformStreamDefaultController(this)) { + throw new TypeError("Invalid TransformStreamDefaultController."); + } + const readableController = this[sym.controlledTransformStream][ + sym.readable + ][sym.readableStreamController]; + return readableStreamDefaultControllerGetDesiredSize( + readableController, + ); + } + + enqueue(chunk) { + if (!isTransformStreamDefaultController(this)) { + throw new TypeError("Invalid TransformStreamDefaultController."); + } + transformStreamDefaultControllerEnqueue(this, chunk); + } + + error(reason) { + if (!isTransformStreamDefaultController(this)) { + throw new TypeError("Invalid TransformStreamDefaultController."); + } + transformStreamDefaultControllerError(this, reason); + } + + terminate() { + if (!isTransformStreamDefaultController(this)) { + throw new TypeError("Invalid TransformStreamDefaultController."); + } + transformStreamDefaultControllerTerminate(this); + } + + [customInspect]() { + return `${this.constructor.name} { desiredSize: ${ + String(this.desiredSize) + } }`; + } + } + + class WritableStreamDefaultController { + constructor() { + throw new TypeError( + "WritableStreamDefaultController's constructor cannot be called.", + ); + } + + error(e) { + if (!isWritableStreamDefaultController(this)) { + throw new TypeError("Invalid WritableStreamDefaultController."); + } + const state = this[sym.controlledWritableStream][sym.state]; + if (state !== "writable") { + return; + } + writableStreamDefaultControllerError(this, e); + } + + [sym.abortSteps](reason) { + const result = this[sym.abortAlgorithm](reason); + writableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [sym.errorSteps]() { + resetQueue(this); + } + + [customInspect]() { + return `${this.constructor.name} { }`; + } + } + + class WritableStreamDefaultWriter { + constructor(stream) { + if (!isWritableStream(stream)) { + throw new TypeError("Invalid stream."); + } + if (isWritableStreamLocked(stream)) { + throw new TypeError("Cannot create a writer for a locked stream."); + } + this[sym.ownerWritableStream] = stream; + stream[sym.writer] = this; + const state = stream[sym.state]; + if (state === "writable") { + if ( + !writableStreamCloseQueuedOrInFlight(stream) && + stream[sym.backpressure] + ) { + this[sym.readyPromise] = getDeferred(); + } else { + this[sym.readyPromise] = { promise: Promise.resolve() }; + } + this[sym.closedPromise] = getDeferred(); + } else if (state === "erroring") { + this[sym.readyPromise] = { + promise: Promise.reject(stream[sym.storedError]), + }; + setPromiseIsHandledToTrue(this[sym.readyPromise].promise); + this[sym.closedPromise] = getDeferred(); + } else if (state === "closed") { + this[sym.readyPromise] = { promise: Promise.resolve() }; + this[sym.closedPromise] = { promise: Promise.resolve() }; + } else { + assert(state === "errored"); + const storedError = stream[sym.storedError]; + this[sym.readyPromise] = { promise: Promise.reject(storedError) }; + setPromiseIsHandledToTrue(this[sym.readyPromise].promise); + this[sym.closedPromise] = { promise: Promise.reject(storedError) }; + setPromiseIsHandledToTrue(this[sym.closedPromise].promise); + } + } + + get closed() { + if (!isWritableStreamDefaultWriter(this)) { + return Promise.reject( + new TypeError("Invalid WritableStreamDefaultWriter."), + ); + } + return this[sym.closedPromise].promise; + } + + get desiredSize() { + if (!isWritableStreamDefaultWriter(this)) { + throw new TypeError("Invalid WritableStreamDefaultWriter."); + } + if (!this[sym.ownerWritableStream]) { + throw new TypeError("WritableStreamDefaultWriter has no owner."); + } + return writableStreamDefaultWriterGetDesiredSize(this); + } + + get ready() { + if (!isWritableStreamDefaultWriter(this)) { + return Promise.reject( + new TypeError("Invalid WritableStreamDefaultWriter."), + ); + } + return this[sym.readyPromise].promise; + } + + abort(reason) { + if (!isWritableStreamDefaultWriter(this)) { + return Promise.reject( + new TypeError("Invalid WritableStreamDefaultWriter."), + ); + } + if (!this[sym.ownerWritableStream]) { + Promise.reject( + new TypeError("WritableStreamDefaultWriter has no owner."), + ); + } + return writableStreamDefaultWriterAbort(this, reason); + } + + close() { + if (!isWritableStreamDefaultWriter(this)) { + return Promise.reject( + new TypeError("Invalid WritableStreamDefaultWriter."), + ); + } + const stream = this[sym.ownerWritableStream]; + if (!stream) { + Promise.reject( + new TypeError("WritableStreamDefaultWriter has no owner."), + ); + } + if (writableStreamCloseQueuedOrInFlight(stream)) { + Promise.reject( + new TypeError("Stream is in an invalid state to be closed."), + ); + } + return writableStreamDefaultWriterClose(this); + } + + releaseLock() { + if (!isWritableStreamDefaultWriter(this)) { + throw new TypeError("Invalid WritableStreamDefaultWriter."); + } + const stream = this[sym.ownerWritableStream]; + if (!stream) { + return; + } + assert(stream[sym.writer]); + writableStreamDefaultWriterRelease(this); + } + + write(chunk) { + if (!isWritableStreamDefaultWriter(this)) { + return Promise.reject( + new TypeError("Invalid WritableStreamDefaultWriter."), + ); + } + if (!this[sym.ownerWritableStream]) { + Promise.reject( + new TypeError("WritableStreamDefaultWriter has no owner."), + ); + } + return writableStreamDefaultWriterWrite(this, chunk); + } + + [customInspect]() { + return `${this.constructor.name} { closed: Promise, desiredSize: ${ + String(this.desiredSize) + }, ready: Promise }`; + } + } + + class WritableStream { + constructor( + underlyingSink = {}, + strategy = {}, + ) { + initializeWritableStream(this); + const size = strategy.size; + let highWaterMark = strategy.highWaterMark ?? 1; + const { type } = underlyingSink; + if (type !== undefined) { + throw new RangeError(`Sink type of "${String(type)}" not supported.`); + } + const sizeAlgorithm = makeSizeAlgorithmFromSizeFunction(size); + highWaterMark = validateAndNormalizeHighWaterMark(highWaterMark); + setUpWritableStreamDefaultControllerFromUnderlyingSink( + this, + underlyingSink, + highWaterMark, + sizeAlgorithm, + ); + } + + get locked() { + if (!isWritableStream(this)) { + throw new TypeError("Invalid WritableStream."); + } + return isWritableStreamLocked(this); + } + + abort(reason) { + if (!isWritableStream(this)) { + return Promise.reject(new TypeError("Invalid WritableStream.")); + } + if (isWritableStreamLocked(this)) { + return Promise.reject( + new TypeError("Cannot abort a locked WritableStream."), + ); + } + return writableStreamAbort(this, reason); + } + + close() { + if (!isWritableStream(this)) { + return Promise.reject(new TypeError("Invalid WritableStream.")); + } + if (isWritableStreamLocked(this)) { + return Promise.reject( + new TypeError("Cannot abort a locked WritableStream."), + ); + } + if (writableStreamCloseQueuedOrInFlight(this)) { + return Promise.reject( + new TypeError("Cannot close an already closing WritableStream."), + ); + } + return writableStreamClose(this); + } + + getWriter() { + if (!isWritableStream(this)) { + throw new TypeError("Invalid WritableStream."); + } + return acquireWritableStreamDefaultWriter(this); + } + + [customInspect]() { + return `${this.constructor.name} { locked: ${String(this.locked)} }`; + } + } + + function acquireReadableStreamDefaultReader( + stream, + forAuthorCode = false, + ) { + const reader = new ReadableStreamDefaultReader(stream); + reader[sym.forAuthorCode] = forAuthorCode; + return reader; + } + + function acquireWritableStreamDefaultWriter( + stream, + ) { + return new WritableStreamDefaultWriter(stream); + } + + function call( + fn, + v, + args, + ) { + return Function.prototype.apply.call(fn, v, args); + } + + function createAlgorithmFromUnderlyingMethod( + underlyingObject, + methodName, + algoArgCount, + ...extraArgs + ) { + const method = underlyingObject[methodName]; + if (method) { + if (!isCallable(method)) { + throw new TypeError("method is not callable"); + } + if (algoArgCount === 0) { + return async () => call(method, underlyingObject, extraArgs); + } else { + return async (arg) => { + const fullArgs = [arg, ...extraArgs]; + return call(method, underlyingObject, fullArgs); + }; + } + } + return async () => undefined; + } + + function createReadableStream( + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark = 1, + sizeAlgorithm = () => 1, + ) { + highWaterMark = validateAndNormalizeHighWaterMark(highWaterMark); + const stream = Object.create( + ReadableStream.prototype, + ); + initializeReadableStream(stream); + const controller = Object.create( + ReadableStreamDefaultController.prototype, + ); + setUpReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm, + ); + return stream; + } + + function createWritableStream( + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark = 1, + sizeAlgorithm = () => 1, + ) { + highWaterMark = validateAndNormalizeHighWaterMark(highWaterMark); + const stream = Object.create(WritableStream.prototype); + initializeWritableStream(stream); + const controller = Object.create( + WritableStreamDefaultController.prototype, + ); + setUpWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm, + ); + return stream; + } + + function dequeueValue(container) { + assert(sym.queue in container && sym.queueTotalSize in container); + assert(container[sym.queue].length); + const pair = container[sym.queue].shift(); + container[sym.queueTotalSize] -= pair.size; + if (container[sym.queueTotalSize] <= 0) { + container[sym.queueTotalSize] = 0; + } + return pair.value; + } + + function enqueueValueWithSize( + container, + value, + size, + ) { + assert(sym.queue in container && sym.queueTotalSize in container); + size = Number(size); + if (!isFiniteNonNegativeNumber(size)) { + throw new RangeError("size must be a finite non-negative number."); + } + container[sym.queue].push({ value, size }); + container[sym.queueTotalSize] += size; + } + + /** Non-spec mechanism to "unwrap" a promise and store it to be resolved + * later. */ + function getDeferred() { + let resolve; + let reject; + const promise = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + return { promise, resolve: resolve, reject: reject }; + } + + function initializeReadableStream( + stream, + ) { + stream[sym.state] = "readable"; + stream[sym.reader] = stream[sym.storedError] = undefined; + stream[sym.disturbed] = false; + } + + function initializeTransformStream( + stream, + startPromise, + writableHighWaterMark, + writableSizeAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm, + ) { + const startAlgorithm = () => startPromise; + const writeAlgorithm = (chunk) => + transformStreamDefaultSinkWriteAlgorithm(stream, chunk); + const abortAlgorithm = (reason) => + transformStreamDefaultSinkAbortAlgorithm(stream, reason); + const closeAlgorithm = () => + transformStreamDefaultSinkCloseAlgorithm(stream); + stream[sym.writable] = createWritableStream( + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + writableHighWaterMark, + writableSizeAlgorithm, + ); + const pullAlgorithm = () => + transformStreamDefaultSourcePullAlgorithm(stream); + const cancelAlgorithm = (reason) => { + transformStreamErrorWritableAndUnblockWrite(stream, reason); + return Promise.resolve(undefined); + }; + stream[sym.readable] = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm, + ); + stream[sym.backpressure] = stream[sym.backpressureChangePromise] = + undefined; + transformStreamSetBackpressure(stream, true); + Object.defineProperty(stream, sym.transformStreamController, { + value: undefined, + configurable: true, + }); + } + + function initializeWritableStream( + stream, + ) { + stream[sym.state] = "writable"; + stream[sym.storedError] = stream[sym.writer] = stream[ + sym.writableStreamController + ] = stream[sym.inFlightWriteRequest] = stream[sym.closeRequest] = stream[ + sym.inFlightCloseRequest + ] = stream[sym.pendingAbortRequest] = undefined; + stream[sym.writeRequests] = []; + stream[sym.backpressure] = false; + } + + function invokeOrNoop( + o, + p, + ...args + ) { + assert(o); + const method = o[p]; + if (!method) { + return undefined; + } + return call(method, o, args); + } + + function isCallable(value) { + return typeof value === "function"; + } + + function isDetachedBuffer(value) { + return sym.isFakeDetached in value; + } + + function isFiniteNonNegativeNumber(v) { + return Number.isFinite(v) && (v) >= 0; + } + + function isReadableByteStreamController( + x, + ) { + return !( + typeof x !== "object" || + x === null || + !(sym.controlledReadableByteStream in x) + ); + } + + function isReadableStream(x) { + return !( + typeof x !== "object" || + x === null || + !(sym.readableStreamController in x) + ); + } + + function isReadableStreamAsyncIterator( + x, + ) { + if (typeof x !== "object" || x === null) { + return false; + } + return sym.asyncIteratorReader in x; + } + + function isReadableStreamDefaultController( + x, + ) { + return !( + typeof x !== "object" || + x === null || + !(sym.controlledReadableStream in x) + ); + } + + function isReadableStreamDefaultReader( + x, + ) { + return !(typeof x !== "object" || x === null || !(sym.readRequests in x)); + } + + function isReadableStreamLocked(stream) { + assert(isReadableStream(stream)); + return !!stream[sym.reader]; + } + + function isReadableStreamDisturbed(stream) { + assert(isReadableStream(stream)); + return !!stream[sym.disturbed]; + } + + function isTransformStream(x) { + return !( + typeof x !== "object" || + x === null || + !(sym.transformStreamController in x) + ); + } + + function isTransformStreamDefaultController( + x, + ) { + return !( + typeof x !== "object" || + x === null || + !(sym.controlledTransformStream in x) + ); + } + + function isWritableStream(x) { + return !( + typeof x !== "object" || + x === null || + !(sym.writableStreamController in x) + ); + } + + function isWritableStreamDefaultController( + x, + ) { + return !( + typeof x !== "object" || + x === null || + !(sym.controlledWritableStream in x) + ); + } + + function isWritableStreamDefaultWriter( + x, + ) { + return !( + typeof x !== "object" || + x === null || + !(sym.ownerWritableStream in x) + ); + } + + function isWritableStreamLocked(stream) { + assert(isWritableStream(stream)); + return stream[sym.writer] !== undefined; + } + + function makeSizeAlgorithmFromSizeFunction( + size, + ) { + if (size === undefined) { + return () => 1; + } + if (typeof size !== "function") { + throw new TypeError("size must be callable."); + } + return (chunk) => { + return size.call(undefined, chunk); + }; + } + + function peekQueueValue(container) { + assert(sym.queue in container && sym.queueTotalSize in container); + assert(container[sym.queue].length); + const [pair] = container[sym.queue]; + return pair.value; + } + + function readableByteStreamControllerShouldCallPull( + controller, + ) { + const stream = controller[sym.controlledReadableByteStream]; + if ( + stream[sym.state] !== "readable" || + controller[sym.closeRequested] || + !controller[sym.started] + ) { + return false; + } + if ( + readableStreamHasDefaultReader(stream) && + readableStreamGetNumReadRequests(stream) > 0 + ) { + return true; + } + // 3.13.25.6 If ! ReadableStreamHasBYOBReader(stream) is true and ! + // ReadableStreamGetNumReadIntoRequests(stream) > 0, return true. + const desiredSize = readableByteStreamControllerGetDesiredSize(controller); + assert(desiredSize !== null); + return desiredSize > 0; + } + + function readableByteStreamControllerCallPullIfNeeded( + controller, + ) { + const shouldPull = readableByteStreamControllerShouldCallPull(controller); + if (!shouldPull) { + return; + } + if (controller[sym.pulling]) { + controller[sym.pullAgain] = true; + return; + } + assert(controller[sym.pullAgain] === false); + controller[sym.pulling] = true; + const pullPromise = controller[sym.pullAlgorithm](); + setPromiseIsHandledToTrue( + pullPromise.then( + () => { + controller[sym.pulling] = false; + if (controller[sym.pullAgain]) { + controller[sym.pullAgain] = false; + readableByteStreamControllerCallPullIfNeeded(controller); + } + }, + (e) => { + readableByteStreamControllerError(controller, e); + }, + ), + ); + } + + function readableByteStreamControllerClearAlgorithms( + controller, + ) { + controller[sym.pullAlgorithm] = undefined; + controller[sym.cancelAlgorithm] = undefined; + } + + function readableByteStreamControllerClose( + controller, + ) { + const stream = controller[sym.controlledReadableByteStream]; + if (controller[sym.closeRequested] || stream[sym.state] !== "readable") { + return; + } + if (controller[sym.queueTotalSize] > 0) { + controller[sym.closeRequested] = true; + return; + } + // 3.13.6.4 If controller.[[pendingPullIntos]] is not empty, (BYOB Support) + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); + } + + function readableByteStreamControllerEnqueue( + controller, + chunk, + ) { + const stream = controller[sym.controlledReadableByteStream]; + if (controller[sym.closeRequested] || stream[sym.state] !== "readable") { + return; + } + const { buffer, byteOffset, byteLength } = chunk; + const transferredBuffer = transferArrayBuffer(buffer); + if (readableStreamHasDefaultReader(stream)) { + if (readableStreamGetNumReadRequests(stream) === 0) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength, + ); + } else { + assert(controller[sym.queue].length === 0); + const transferredView = new Uint8Array( + transferredBuffer, + byteOffset, + byteLength, + ); + readableStreamFulfillReadRequest(stream, transferredView, false); + } + // 3.13.9.8 Otherwise, if ! ReadableStreamHasBYOBReader(stream) is true + } else { + assert(!isReadableStreamLocked(stream)); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength, + ); + } + readableByteStreamControllerCallPullIfNeeded(controller); + } + + function readableByteStreamControllerEnqueueChunkToQueue( + controller, + buffer, + byteOffset, + byteLength, + ) { + controller[sym.queue].push({ + value: buffer, + offset: byteOffset, + size: byteLength, + }); + controller[sym.queueTotalSize] += byteLength; + } + + function readableByteStreamControllerError( + controller, + e, + ) { + const stream = controller[sym.controlledReadableByteStream]; + if (stream[sym.state] !== "readable") { + return; + } + // 3.13.11.3 Perform ! ReadableByteStreamControllerClearPendingPullIntos(controller). + resetQueue(controller); + readableByteStreamControllerClearAlgorithms(controller); + readableStreamError(stream, e); + } + + function readableByteStreamControllerGetDesiredSize( + controller, + ) { + const stream = controller[sym.controlledReadableByteStream]; + const state = stream[sym.state]; + if (state === "errored") { + return null; + } + if (state === "closed") { + return 0; + } + return controller[sym.strategyHWM] - controller[sym.queueTotalSize]; + } + + function readableByteStreamControllerHandleQueueDrain( + controller, + ) { + assert( + controller[sym.controlledReadableByteStream][sym.state] === "readable", + ); + if ( + controller[sym.queueTotalSize] === 0 && controller[sym.closeRequested] + ) { + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(controller[sym.controlledReadableByteStream]); + } else { + readableByteStreamControllerCallPullIfNeeded(controller); + } + } + + function readableStreamAddReadRequest( + stream, + ) { + assert(isReadableStreamDefaultReader(stream[sym.reader])); + assert(stream[sym.state] === "readable"); + const promise = getDeferred(); + stream[sym.reader][sym.readRequests].push(promise); + return promise.promise; + } + + function readableStreamCancel( + stream, + reason, + ) { + stream[sym.disturbed] = true; + if (stream[sym.state] === "closed") { + return Promise.resolve(); + } + if (stream[sym.state] === "errored") { + return Promise.reject(stream[sym.storedError]); + } + readableStreamClose(stream); + return stream[sym.readableStreamController][sym.cancelSteps](reason).then( + () => undefined, + ); + } + + function readableStreamClose(stream) { + assert(stream[sym.state] === "readable"); + stream[sym.state] = "closed"; + const reader = stream[sym.reader]; + if (!reader) { + return; + } + if (isReadableStreamDefaultReader(reader)) { + for (const readRequest of reader[sym.readRequests]) { + assert(readRequest.resolve); + readRequest.resolve( + readableStreamCreateReadResult( + undefined, + true, + reader[sym.forAuthorCode], + ), + ); + } + reader[sym.readRequests] = []; + } + const resolve = reader[sym.closedPromise].resolve; + assert(resolve); + resolve(); + } + + function readableStreamCreateReadResult( + value, + done, + forAuthorCode, + ) { + const prototype = forAuthorCode ? Object.prototype : null; + assert(typeof done === "boolean"); + const obj = Object.create(prototype); + Object.defineProperties(obj, { + value: { value, writable: true, enumerable: true, configurable: true }, + done: { + value: done, + writable: true, + enumerable: true, + configurable: true, + }, + }); + return obj; + } + + function readableStreamDefaultControllerCallPullIfNeeded( + controller, + ) { + const shouldPull = readableStreamDefaultControllerShouldCallPull( + controller, + ); + if (!shouldPull) { + return; + } + if (controller[sym.pulling]) { + controller[sym.pullAgain] = true; + return; + } + assert(controller[sym.pullAgain] === false); + controller[sym.pulling] = true; + const pullPromise = controller[sym.pullAlgorithm](); + pullPromise.then( + () => { + controller[sym.pulling] = false; + if (controller[sym.pullAgain]) { + controller[sym.pullAgain] = false; + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + }, + (e) => { + readableStreamDefaultControllerError(controller, e); + }, + ); + } + + function readableStreamDefaultControllerCanCloseOrEnqueue( + controller, + ) { + const state = controller[sym.controlledReadableStream][sym.state]; + return !controller[sym.closeRequested] && state === "readable"; + } + + function readableStreamDefaultControllerClearAlgorithms( + controller, + ) { + controller[sym.pullAlgorithm] = undefined; + controller[sym.cancelAlgorithm] = undefined; + controller[sym.strategySizeAlgorithm] = undefined; + } + + function readableStreamDefaultControllerClose( + controller, + ) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) { + return; + } + const stream = controller[sym.controlledReadableStream]; + controller[sym.closeRequested] = true; + if (controller[sym.queue].length === 0) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(stream); + } + } + + function readableStreamDefaultControllerEnqueue( + controller, + chunk, + ) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) { + return; + } + const stream = controller[sym.controlledReadableStream]; + if ( + isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream) > 0 + ) { + readableStreamFulfillReadRequest(stream, chunk, false); + } else { + try { + const chunkSize = controller[sym.strategySizeAlgorithm](chunk); + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (err) { + readableStreamDefaultControllerError(controller, err); + throw err; + } + } + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + + function readableStreamDefaultControllerGetDesiredSize( + controller, + ) { + const stream = controller[sym.controlledReadableStream]; + const state = stream[sym.state]; + if (state === "errored") { + return null; + } + if (state === "closed") { + return 0; + } + return controller[sym.strategyHWM] - controller[sym.queueTotalSize]; + } + + function readableStreamDefaultControllerError( + controller, + e, + ) { + const stream = controller[sym.controlledReadableStream]; + if (stream[sym.state] !== "readable") { + return; + } + resetQueue(controller); + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamError(stream, e); + } + + function readableStreamDefaultControllerHasBackpressure( + controller, + ) { + return readableStreamDefaultControllerShouldCallPull(controller); + } + + function readableStreamDefaultControllerShouldCallPull( + controller, + ) { + const stream = controller[sym.controlledReadableStream]; + if ( + !readableStreamDefaultControllerCanCloseOrEnqueue(controller) || + controller[sym.started] === false + ) { + return false; + } + if ( + isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream) > 0 + ) { + return true; + } + const desiredSize = readableStreamDefaultControllerGetDesiredSize( + controller, + ); + assert(desiredSize !== null); + return desiredSize > 0; + } + + function readableStreamDefaultReaderRead( + reader, + ) { + const stream = reader[sym.ownerReadableStream]; + assert(stream); + stream[sym.disturbed] = true; + if (stream[sym.state] === "closed") { + return Promise.resolve( + readableStreamCreateReadResult( + undefined, + true, + reader[sym.forAuthorCode], + ), + ); + } + if (stream[sym.state] === "errored") { + return Promise.reject(stream[sym.storedError]); + } + assert(stream[sym.state] === "readable"); + return (stream[ + sym.readableStreamController + ])[sym.pullSteps](); + } + + function readableStreamError(stream, e) { + assert(isReadableStream(stream)); + assert(stream[sym.state] === "readable"); + stream[sym.state] = "errored"; + stream[sym.storedError] = e; + const reader = stream[sym.reader]; + if (reader === undefined) { + return; + } + if (isReadableStreamDefaultReader(reader)) { + for (const readRequest of reader[sym.readRequests]) { + assert(readRequest.reject); + readRequest.reject(e); + readRequest.reject = undefined; + readRequest.resolve = undefined; + } + reader[sym.readRequests] = []; + } + // 3.5.6.8 Otherwise, support BYOB Reader + reader[sym.closedPromise].reject(e); + reader[sym.closedPromise].reject = undefined; + reader[sym.closedPromise].resolve = undefined; + setPromiseIsHandledToTrue(reader[sym.closedPromise].promise); + } + + function readableStreamFulfillReadRequest( + stream, + chunk, + done, + ) { + const reader = stream[sym.reader]; + const readRequest = reader[sym.readRequests].shift(); + assert(readRequest.resolve); + readRequest.resolve( + readableStreamCreateReadResult(chunk, done, reader[sym.forAuthorCode]), + ); + } + + function readableStreamGetNumReadRequests( + stream, + ) { + return stream[sym.reader]?.[sym.readRequests].length ?? 0; + } + + function readableStreamHasDefaultReader( + stream, + ) { + const reader = stream[sym.reader]; + return !(reader === undefined || !isReadableStreamDefaultReader(reader)); + } + + function readableStreamPipeTo( + source, + dest, + preventClose, + preventAbort, + preventCancel, + signal, + ) { + assert(isReadableStream(source)); + assert(isWritableStream(dest)); + assert( + typeof preventClose === "boolean" && + typeof preventAbort === "boolean" && + typeof preventCancel === "boolean", + ); + assert(signal === undefined || signal instanceof AbortSignal); + assert(!isReadableStreamLocked(source)); + assert(!isWritableStreamLocked(dest)); + const reader = acquireReadableStreamDefaultReader(source); + const writer = acquireWritableStreamDefaultWriter(dest); + source[sym.disturbed] = true; + let shuttingDown = false; + const promise = getDeferred(); + let abortAlgorithm; + if (signal) { + abortAlgorithm = () => { + const error = new DOMException("Abort signal received.", "AbortSignal"); + const actions = []; + if (!preventAbort) { + actions.push(() => { + if (dest[sym.state] === "writable") { + return writableStreamAbort(dest, error); + } else { + return Promise.resolve(undefined); + } + }); + } + if (!preventCancel) { + actions.push(() => { + if (source[sym.state] === "readable") { + return readableStreamCancel(source, error); + } else { + return Promise.resolve(undefined); + } + }); + } + shutdownWithAction( + () => Promise.all(actions.map((action) => action())), + true, + error, + ); + }; + if (signal.aborted) { + abortAlgorithm(); + return promise.promise; + } + signal.addEventListener("abort", abortAlgorithm); + } + + let currentWrite = Promise.resolve(); + + // At this point, the spec becomes non-specific and vague. Most of the rest + // of this code is based on the reference implementation that is part of the + // specification. This is why the functions are only scoped to this function + // to ensure they don't leak into the spec compliant parts. + + function isOrBecomesClosed( + stream, + promise, + action, + ) { + if (stream[sym.state] === "closed") { + action(); + } else { + setPromiseIsHandledToTrue(promise.then(action)); + } + } + + function isOrBecomesErrored( + stream, + promise, + action, + ) { + if (stream[sym.state] === "errored") { + action(stream[sym.storedError]); + } else { + setPromiseIsHandledToTrue(promise.catch((error) => action(error))); + } + } + + function finalize(isError, error) { + writableStreamDefaultWriterRelease(writer); + readableStreamReaderGenericRelease(reader); + + if (signal) { + signal.removeEventListener("abort", abortAlgorithm); + } + if (isError) { + promise.reject(error); + } else { + promise.resolve(); + } + } + + function waitForWritesToFinish() { + const oldCurrentWrite = currentWrite; + return currentWrite.then(() => + oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined + ); + } + + function shutdownWithAction( + action, + originalIsError, + originalError, + ) { + function doTheRest() { + setPromiseIsHandledToTrue( + action().then( + () => finalize(originalIsError, originalError), + (newError) => finalize(true, newError), + ), + ); + } + + if (shuttingDown) { + return; + } + shuttingDown = true; + + if ( + dest[sym.state] === "writable" && + writableStreamCloseQueuedOrInFlight(dest) === false + ) { + setPromiseIsHandledToTrue(waitForWritesToFinish().then(doTheRest)); + } else { + doTheRest(); + } + } + + function shutdown(isError, error) { + if (shuttingDown) { + return; + } + shuttingDown = true; + + if ( + dest[sym.state] === "writable" && + !writableStreamCloseQueuedOrInFlight(dest) + ) { + setPromiseIsHandledToTrue( + waitForWritesToFinish().then(() => finalize(isError, error)), + ); + } + finalize(isError, error); + } + + function pipeStep() { + if (shuttingDown) { + return Promise.resolve(true); + } + return writer[sym.readyPromise].promise.then(() => { + return readableStreamDefaultReaderRead(reader).then( + ({ value, done }) => { + if (done === true) { + return true; + } + currentWrite = writableStreamDefaultWriterWrite( + writer, + value, + ).then(undefined, () => {}); + return false; + }, + ); + }); + } + + function pipeLoop() { + return new Promise((resolveLoop, rejectLoop) => { + function next(done) { + if (done) { + resolveLoop(undefined); + } else { + setPromiseIsHandledToTrue(pipeStep().then(next, rejectLoop)); + } + } + next(false); + }); + } + + isOrBecomesErrored( + source, + reader[sym.closedPromise].promise, + (storedError) => { + if (!preventAbort) { + shutdownWithAction( + () => writableStreamAbort(dest, storedError), + true, + storedError, + ); + } else { + shutdown(true, storedError); + } + }, + ); + + isOrBecomesErrored( + dest, + writer[sym.closedPromise].promise, + (storedError) => { + if (!preventCancel) { + shutdownWithAction( + () => readableStreamCancel(source, storedError), + true, + storedError, + ); + } else { + shutdown(true, storedError); + } + }, + ); + + isOrBecomesClosed(source, reader[sym.closedPromise].promise, () => { + if (!preventClose) { + shutdownWithAction(() => + writableStreamDefaultWriterCloseWithErrorPropagation(writer) + ); + } + }); + + if ( + writableStreamCloseQueuedOrInFlight(dest) || + dest[sym.state] === "closed" + ) { + const destClosed = new TypeError( + "The destination writable stream closed before all data could be piped to it.", + ); + if (!preventCancel) { + shutdownWithAction( + () => readableStreamCancel(source, destClosed), + true, + destClosed, + ); + } else { + shutdown(true, destClosed); + } + } + + setPromiseIsHandledToTrue(pipeLoop()); + return promise.promise; + } + + function readableStreamReaderGenericCancel( + reader, + reason, + ) { + const stream = reader[sym.ownerReadableStream]; + assert(stream); + return readableStreamCancel(stream, reason); + } + + function readableStreamReaderGenericInitialize( + reader, + stream, + ) { + reader[sym.forAuthorCode] = true; + reader[sym.ownerReadableStream] = stream; + stream[sym.reader] = reader; + if (stream[sym.state] === "readable") { + reader[sym.closedPromise] = getDeferred(); + } else if (stream[sym.state] === "closed") { + reader[sym.closedPromise] = { promise: Promise.resolve() }; + } else { + assert(stream[sym.state] === "errored"); + reader[sym.closedPromise] = { + promise: Promise.reject(stream[sym.storedError]), + }; + setPromiseIsHandledToTrue(reader[sym.closedPromise].promise); + } + } + + function readableStreamReaderGenericRelease( + reader, + ) { + assert(reader[sym.ownerReadableStream]); + assert(reader[sym.ownerReadableStream][sym.reader] === reader); + const closedPromise = reader[sym.closedPromise]; + if (reader[sym.ownerReadableStream][sym.state] === "readable") { + assert(closedPromise.reject); + closedPromise.reject(new TypeError("ReadableStream state is readable.")); + } else { + closedPromise.promise = Promise.reject( + new TypeError("Reading is closed."), + ); + delete closedPromise.reject; + delete closedPromise.resolve; + } + setPromiseIsHandledToTrue(closedPromise.promise); + reader[sym.ownerReadableStream][sym.reader] = undefined; + reader[sym.ownerReadableStream] = undefined; + } + + function readableStreamTee( + stream, + cloneForBranch2, + ) { + assert(isReadableStream(stream)); + assert(typeof cloneForBranch2 === "boolean"); + const reader = acquireReadableStreamDefaultReader(stream); + let reading = false; + let canceled1 = false; + let canceled2 = false; + let reason1 = undefined; + let reason2 = undefined; + /* eslint-disable prefer-const */ + let branch1; + let branch2; + /* eslint-enable prefer-const */ + const cancelPromise = getDeferred(); + const pullAlgorithm = () => { + if (reading) { + return Promise.resolve(); + } + reading = true; + const readPromise = readableStreamDefaultReaderRead(reader).then( + (result) => { + reading = false; + assert(typeof result === "object"); + const { done } = result; + assert(typeof done === "boolean"); + if (done) { + if (!canceled1) { + readableStreamDefaultControllerClose( + branch1[ + sym.readableStreamController + ], + ); + } + if (!canceled2) { + readableStreamDefaultControllerClose( + branch2[ + sym.readableStreamController + ], + ); + } + return; + } + const { value } = result; + const value1 = value; + let value2 = value; + if (!canceled2 && cloneForBranch2) { + value2 = cloneValue(value2); + } + if (!canceled1) { + readableStreamDefaultControllerEnqueue( + branch1[ + sym.readableStreamController + ], + value1, + ); + } + if (!canceled2) { + readableStreamDefaultControllerEnqueue( + branch2[ + sym.readableStreamController + ], + value2, + ); + } + }, + ); + setPromiseIsHandledToTrue(readPromise); + return Promise.resolve(); + }; + const cancel1Algorithm = (reason) => { + canceled1 = true; + reason1 = reason; + if (canceled2) { + const compositeReason = [reason1, reason2]; + const cancelResult = readableStreamCancel(stream, compositeReason); + cancelPromise.resolve(cancelResult); + } + return cancelPromise.promise; + }; + const cancel2Algorithm = (reason) => { + canceled2 = true; + reason2 = reason; + if (canceled1) { + const compositeReason = [reason1, reason2]; + const cancelResult = readableStreamCancel(stream, compositeReason); + cancelPromise.resolve(cancelResult); + } + return cancelPromise.promise; + }; + const startAlgorithm = () => undefined; + branch1 = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancel1Algorithm, + ); + branch2 = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancel2Algorithm, + ); + setPromiseIsHandledToTrue( + reader[sym.closedPromise].promise.catch((r) => { + readableStreamDefaultControllerError( + branch1[ + sym.readableStreamController + ], + r, + ); + readableStreamDefaultControllerError( + branch2[ + sym.readableStreamController + ], + r, + ); + }), + ); + return [branch1, branch2]; + } + + function resetQueue(container) { + assert(sym.queue in container && sym.queueTotalSize in container); + container[sym.queue] = []; + container[sym.queueTotalSize] = 0; + } + + /** An internal function which mimics the behavior of setting the promise to + * handled in JavaScript. In this situation, an assertion failure, which + * shouldn't happen will get thrown, instead of swallowed. */ + function setPromiseIsHandledToTrue(promise) { + promise.then(undefined, (e) => { + if (e && e instanceof AssertionError) { + queueMicrotask(() => { + throw e; + }); + } + }); + } + + function setUpReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize, + ) { + assert(stream[sym.readableStreamController] === undefined); + if (autoAllocateChunkSize !== undefined) { + assert(Number.isInteger(autoAllocateChunkSize)); + assert(autoAllocateChunkSize >= 0); + } + controller[sym.controlledReadableByteStream] = stream; + controller[sym.pulling] = controller[sym.pullAgain] = false; + controller[sym.byobRequest] = undefined; + controller[sym.queue] = []; + controller[sym.queueTotalSize] = 0; + controller[sym.closeRequested] = controller[sym.started] = false; + controller[sym.strategyHWM] = validateAndNormalizeHighWaterMark( + highWaterMark, + ); + controller[sym.pullAlgorithm] = pullAlgorithm; + controller[sym.cancelAlgorithm] = cancelAlgorithm; + controller[sym.autoAllocateChunkSize] = autoAllocateChunkSize; + // 3.13.26.12 Set controller.[[pendingPullIntos]] to a new empty List. + stream[sym.readableStreamController] = controller; + const startResult = startAlgorithm(); + const startPromise = Promise.resolve(startResult); + setPromiseIsHandledToTrue( + startPromise.then( + () => { + controller[sym.started] = true; + assert(!controller[sym.pulling]); + assert(!controller[sym.pullAgain]); + readableByteStreamControllerCallPullIfNeeded(controller); + }, + (r) => { + readableByteStreamControllerError(controller, r); + }, + ), + ); + } + + function setUpReadableByteStreamControllerFromUnderlyingSource( + stream, + underlyingByteSource, + highWaterMark, + ) { + assert(underlyingByteSource); + const controller = Object.create( + ReadableByteStreamController.prototype, + ); + const startAlgorithm = () => { + return invokeOrNoop(underlyingByteSource, "start", controller); + }; + const pullAlgorithm = createAlgorithmFromUnderlyingMethod( + underlyingByteSource, + "pull", + 0, + controller, + ); + setFunctionName(pullAlgorithm, "[[pullAlgorithm]]"); + const cancelAlgorithm = createAlgorithmFromUnderlyingMethod( + underlyingByteSource, + "cancel", + 1, + ); + setFunctionName(cancelAlgorithm, "[[cancelAlgorithm]]"); + // 3.13.27.6 Let autoAllocateChunkSize be ? GetV(underlyingByteSource, "autoAllocateChunkSize"). + const autoAllocateChunkSize = undefined; + setUpReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize, + ); + } + + function setUpReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm, + ) { + assert(stream[sym.readableStreamController] === undefined); + controller[sym.controlledReadableStream] = stream; + controller[sym.queue] = []; + controller[sym.queueTotalSize] = 0; + controller[sym.started] = controller[sym.closeRequested] = controller[ + sym.pullAgain + ] = controller[sym.pulling] = false; + controller[sym.strategySizeAlgorithm] = sizeAlgorithm; + controller[sym.strategyHWM] = highWaterMark; + controller[sym.pullAlgorithm] = pullAlgorithm; + controller[sym.cancelAlgorithm] = cancelAlgorithm; + stream[sym.readableStreamController] = controller; + const startResult = startAlgorithm(); + const startPromise = Promise.resolve(startResult); + setPromiseIsHandledToTrue( + startPromise.then( + () => { + controller[sym.started] = true; + assert(controller[sym.pulling] === false); + assert(controller[sym.pullAgain] === false); + readableStreamDefaultControllerCallPullIfNeeded(controller); + }, + (r) => { + readableStreamDefaultControllerError(controller, r); + }, + ), + ); + } + + function setUpReadableStreamDefaultControllerFromUnderlyingSource( + stream, + underlyingSource, + highWaterMark, + sizeAlgorithm, + ) { + assert(underlyingSource); + const controller = Object.create( + ReadableStreamDefaultController.prototype, + ); + const startAlgorithm = () => + invokeOrNoop(underlyingSource, "start", controller); + const pullAlgorithm = createAlgorithmFromUnderlyingMethod( + underlyingSource, + "pull", + 0, + controller, + ); + setFunctionName(pullAlgorithm, "[[pullAlgorithm]]"); + const cancelAlgorithm = createAlgorithmFromUnderlyingMethod( + underlyingSource, + "cancel", + 1, + ); + setFunctionName(cancelAlgorithm, "[[cancelAlgorithm]]"); + setUpReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm, + ); + } + + function setUpTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm, + ) { + assert(isTransformStream(stream)); + assert(stream[sym.transformStreamController] === undefined); + controller[sym.controlledTransformStream] = stream; + stream[sym.transformStreamController] = controller; + controller[sym.transformAlgorithm] = transformAlgorithm; + controller[sym.flushAlgorithm] = flushAlgorithm; + } + + function setUpTransformStreamDefaultControllerFromTransformer( + stream, + transformer, + ) { + assert(transformer); + const controller = Object.create( + TransformStreamDefaultController.prototype, + ); + let transformAlgorithm = (chunk) => { + try { + transformStreamDefaultControllerEnqueue( + controller, + // it defaults to no tranformation, so I is assumed to be O + chunk, + ); + } catch (e) { + return Promise.reject(e); + } + return Promise.resolve(); + }; + const transformMethod = transformer.transform; + if (transformMethod) { + if (typeof transformMethod !== "function") { + throw new TypeError("tranformer.transform must be callable."); + } + transformAlgorithm = async (chunk) => + call(transformMethod, transformer, [chunk, controller]); + } + const flushAlgorithm = createAlgorithmFromUnderlyingMethod( + transformer, + "flush", + 0, + controller, + ); + setUpTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm, + ); + } + + function setUpWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm, + ) { + assert(isWritableStream(stream)); + assert(stream[sym.writableStreamController] === undefined); + controller[sym.controlledWritableStream] = stream; + stream[sym.writableStreamController] = controller; + controller[sym.queue] = []; + controller[sym.queueTotalSize] = 0; + controller[sym.started] = false; + controller[sym.strategySizeAlgorithm] = sizeAlgorithm; + controller[sym.strategyHWM] = highWaterMark; + controller[sym.writeAlgorithm] = writeAlgorithm; + controller[sym.closeAlgorithm] = closeAlgorithm; + controller[sym.abortAlgorithm] = abortAlgorithm; + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller, + ); + writableStreamUpdateBackpressure(stream, backpressure); + const startResult = startAlgorithm(); + const startPromise = Promise.resolve(startResult); + setPromiseIsHandledToTrue( + startPromise.then( + () => { + assert( + stream[sym.state] === "writable" || + stream[sym.state] === "erroring", + ); + controller[sym.started] = true; + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (r) => { + assert( + stream[sym.state] === "writable" || + stream[sym.state] === "erroring", + ); + controller[sym.started] = true; + writableStreamDealWithRejection(stream, r); + }, + ), + ); + } + + function setUpWritableStreamDefaultControllerFromUnderlyingSink( + stream, + underlyingSink, + highWaterMark, + sizeAlgorithm, + ) { + assert(underlyingSink); + const controller = Object.create( + WritableStreamDefaultController.prototype, + ); + const startAlgorithm = () => { + return invokeOrNoop(underlyingSink, "start", controller); + }; + const writeAlgorithm = createAlgorithmFromUnderlyingMethod( + underlyingSink, + "write", + 1, + controller, + ); + setFunctionName(writeAlgorithm, "[[writeAlgorithm]]"); + const closeAlgorithm = createAlgorithmFromUnderlyingMethod( + underlyingSink, + "close", + 0, + ); + setFunctionName(closeAlgorithm, "[[closeAlgorithm]]"); + const abortAlgorithm = createAlgorithmFromUnderlyingMethod( + underlyingSink, + "abort", + 1, + ); + setFunctionName(abortAlgorithm, "[[abortAlgorithm]]"); + setUpWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm, + ); + } + + function transformStreamDefaultControllerClearAlgorithms( + controller, + ) { + controller[sym.transformAlgorithm] = undefined; + controller[sym.flushAlgorithm] = undefined; + } + + function transformStreamDefaultControllerEnqueue( + controller, + chunk, + ) { + const stream = controller[sym.controlledTransformStream]; + const readableController = stream[sym.readable][ + sym.readableStreamController + ]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) { + throw new TypeError( + "TransformStream's readable controller cannot be closed or enqueued.", + ); + } + try { + readableStreamDefaultControllerEnqueue(readableController, chunk); + } catch (e) { + transformStreamErrorWritableAndUnblockWrite(stream, e); + throw stream[sym.readable][sym.storedError]; + } + const backpressure = readableStreamDefaultControllerHasBackpressure( + readableController, + ); + if (backpressure) { + transformStreamSetBackpressure(stream, true); + } + } + + function transformStreamDefaultControllerError( + controller, + e, + ) { + transformStreamError(controller[sym.controlledTransformStream], e); + } + + function transformStreamDefaultControllerPerformTransform( + controller, + chunk, + ) { + const transformPromise = controller[sym.transformAlgorithm](chunk); + return transformPromise.then(undefined, (r) => { + transformStreamError(controller[sym.controlledTransformStream], r); + throw r; + }); + } + + function transformStreamDefaultSinkAbortAlgorithm( + stream, + reason, + ) { + transformStreamError(stream, reason); + return Promise.resolve(undefined); + } + + function transformStreamDefaultSinkCloseAlgorithm( + stream, + ) { + const readable = stream[sym.readable]; + const controller = stream[sym.transformStreamController]; + const flushPromise = controller[sym.flushAlgorithm](); + transformStreamDefaultControllerClearAlgorithms(controller); + return flushPromise.then( + () => { + if (readable[sym.state] === "errored") { + throw readable[sym.storedError]; + } + const readableController = readable[ + sym.readableStreamController + ]; + if ( + readableStreamDefaultControllerCanCloseOrEnqueue(readableController) + ) { + readableStreamDefaultControllerClose(readableController); + } + }, + (r) => { + transformStreamError(stream, r); + throw readable[sym.storedError]; + }, + ); + } + + function transformStreamDefaultSinkWriteAlgorithm( + stream, + chunk, + ) { + assert(stream[sym.writable][sym.state] === "writable"); + const controller = stream[sym.transformStreamController]; + if (stream[sym.backpressure]) { + const backpressureChangePromise = stream[sym.backpressureChangePromise]; + assert(backpressureChangePromise); + return backpressureChangePromise.promise.then(() => { + const writable = stream[sym.writable]; + const state = writable[sym.state]; + if (state === "erroring") { + throw writable[sym.storedError]; + } + assert(state === "writable"); + return transformStreamDefaultControllerPerformTransform( + controller, + chunk, + ); + }); + } + return transformStreamDefaultControllerPerformTransform(controller, chunk); + } + + function transformStreamDefaultSourcePullAlgorithm( + stream, + ) { + assert(stream[sym.backpressure] === true); + assert(stream[sym.backpressureChangePromise] !== undefined); + transformStreamSetBackpressure(stream, false); + return stream[sym.backpressureChangePromise].promise; + } + + function transformStreamError( + stream, + e, + ) { + readableStreamDefaultControllerError( + stream[sym.readable][ + sym.readableStreamController + ], + e, + ); + transformStreamErrorWritableAndUnblockWrite(stream, e); + } + + function transformStreamDefaultControllerTerminate( + controller, + ) { + const stream = controller[sym.controlledTransformStream]; + const readableController = stream[sym.readable][ + sym.readableStreamController + ]; + readableStreamDefaultControllerClose(readableController); + const error = new TypeError("TransformStream is closed."); + transformStreamErrorWritableAndUnblockWrite(stream, error); + } + + function transformStreamErrorWritableAndUnblockWrite( + stream, + e, + ) { + transformStreamDefaultControllerClearAlgorithms( + stream[sym.transformStreamController], + ); + writableStreamDefaultControllerErrorIfNeeded( + stream[sym.writable][sym.writableStreamController], + e, + ); + if (stream[sym.backpressure]) { + transformStreamSetBackpressure(stream, false); + } + } + + function transformStreamSetBackpressure( + stream, + backpressure, + ) { + assert(stream[sym.backpressure] !== backpressure); + if (stream[sym.backpressureChangePromise] !== undefined) { + stream[sym.backpressureChangePromise].resolve(undefined); + } + stream[sym.backpressureChangePromise] = getDeferred(); + stream[sym.backpressure] = backpressure; + } + + function transferArrayBuffer(buffer) { + assert(!isDetachedBuffer(buffer)); + const transferredIshVersion = buffer.slice(0); + + Object.defineProperty(buffer, "byteLength", { + get() { + return 0; + }, + }); + buffer[sym.isFakeDetached] = true; + + return transferredIshVersion; + } + + function validateAndNormalizeHighWaterMark( + highWaterMark, + ) { + highWaterMark = Number(highWaterMark); + if (Number.isNaN(highWaterMark) || highWaterMark < 0) { + throw new RangeError( + `highWaterMark must be a positive number or Infinity. Received: ${highWaterMark}.`, + ); + } + return highWaterMark; + } + + function writableStreamAbort( + stream, + reason, + ) { + const state = stream[sym.state]; + if (state === "closed" || state === "errored") { + return Promise.resolve(undefined); + } + if (stream[sym.pendingAbortRequest]) { + return stream[sym.pendingAbortRequest].promise.promise; + } + assert(state === "writable" || state === "erroring"); + let wasAlreadyErroring = false; + if (state === "erroring") { + wasAlreadyErroring = true; + reason = undefined; + } + const promise = getDeferred(); + stream[sym.pendingAbortRequest] = { promise, reason, wasAlreadyErroring }; + + if (wasAlreadyErroring === false) { + writableStreamStartErroring(stream, reason); + } + return promise.promise; + } + + function writableStreamAddWriteRequest( + stream, + ) { + assert(isWritableStream(stream)); + assert(stream[sym.state] === "writable"); + const promise = getDeferred(); + stream[sym.writeRequests].push(promise); + return promise.promise; + } + + function writableStreamClose( + stream, + ) { + const state = stream[sym.state]; + if (state === "closed" || state === "errored") { + return Promise.reject( + new TypeError( + "Cannot close an already closed or errored WritableStream.", + ), + ); + } + assert(!writableStreamCloseQueuedOrInFlight(stream)); + const promise = getDeferred(); + stream[sym.closeRequest] = promise; + const writer = stream[sym.writer]; + if (writer && stream[sym.backpressure] && state === "writable") { + writer[sym.readyPromise].resolve(); + writer[sym.readyPromise].resolve = undefined; + writer[sym.readyPromise].reject = undefined; + } + writableStreamDefaultControllerClose(stream[sym.writableStreamController]); + return promise.promise; + } + + function writableStreamCloseQueuedOrInFlight( + stream, + ) { + return !( + stream[sym.closeRequest] === undefined && + stream[sym.inFlightCloseRequest] === undefined + ); + } + + function writableStreamDealWithRejection( + stream, + error, + ) { + const state = stream[sym.state]; + if (state === "writable") { + writableStreamStartErroring(stream, error); + return; + } + assert(state === "erroring"); + writableStreamFinishErroring(stream); + } + + function writableStreamDefaultControllerAdvanceQueueIfNeeded( + controller, + ) { + const stream = controller[sym.controlledWritableStream]; + if (!controller[sym.started]) { + return; + } + if (stream[sym.inFlightWriteRequest]) { + return; + } + const state = stream[sym.state]; + assert(state !== "closed" && state !== "errored"); + if (state === "erroring") { + writableStreamFinishErroring(stream); + return; + } + if (!controller[sym.queue].length) { + return; + } + const writeRecord = peekQueueValue(controller); + if (writeRecord === "close") { + writableStreamDefaultControllerProcessClose(controller); + } else { + writableStreamDefaultControllerProcessWrite( + controller, + writeRecord.chunk, + ); + } + } + + function writableStreamDefaultControllerClearAlgorithms( + controller, + ) { + controller[sym.writeAlgorithm] = undefined; + controller[sym.closeAlgorithm] = undefined; + controller[sym.abortAlgorithm] = undefined; + controller[sym.strategySizeAlgorithm] = undefined; + } + + function writableStreamDefaultControllerClose( + controller, + ) { + enqueueValueWithSize(controller, "close", 0); + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + } + + function writableStreamDefaultControllerError( + controller, + error, + ) { + const stream = controller[sym.controlledWritableStream]; + assert(stream[sym.state] === "writable"); + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamStartErroring(stream, error); + } + + function writableStreamDefaultControllerErrorIfNeeded( + controller, + error, + ) { + if (controller[sym.controlledWritableStream][sym.state] === "writable") { + writableStreamDefaultControllerError(controller, error); + } + } + + function writableStreamDefaultControllerGetBackpressure( + controller, + ) { + const desiredSize = writableStreamDefaultControllerGetDesiredSize( + controller, + ); + return desiredSize <= 0; + } + + function writableStreamDefaultControllerGetChunkSize( + controller, + chunk, + ) { + let returnValue; + try { + returnValue = controller[sym.strategySizeAlgorithm](chunk); + } catch (e) { + writableStreamDefaultControllerErrorIfNeeded(controller, e); + return 1; + } + return returnValue; + } + + function writableStreamDefaultControllerGetDesiredSize( + controller, + ) { + return controller[sym.strategyHWM] - controller[sym.queueTotalSize]; + } + + function writableStreamDefaultControllerProcessClose( + controller, + ) { + const stream = controller[sym.controlledWritableStream]; + writableStreamMarkCloseRequestInFlight(stream); + dequeueValue(controller); + assert(controller[sym.queue].length === 0); + const sinkClosePromise = controller[sym.closeAlgorithm](); + writableStreamDefaultControllerClearAlgorithms(controller); + setPromiseIsHandledToTrue( + sinkClosePromise.then( + () => { + writableStreamFinishInFlightClose(stream); + }, + (reason) => { + writableStreamFinishInFlightCloseWithError(stream, reason); + }, + ), + ); + } + + function writableStreamDefaultControllerProcessWrite( + controller, + chunk, + ) { + const stream = controller[sym.controlledWritableStream]; + writableStreamMarkFirstWriteRequestInFlight(stream); + const sinkWritePromise = controller[sym.writeAlgorithm](chunk); + setPromiseIsHandledToTrue( + sinkWritePromise.then( + () => { + writableStreamFinishInFlightWrite(stream); + const state = stream[sym.state]; + assert(state === "writable" || state === "erroring"); + dequeueValue(controller); + if ( + !writableStreamCloseQueuedOrInFlight(stream) && + state === "writable" + ) { + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller, + ); + writableStreamUpdateBackpressure(stream, backpressure); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (reason) => { + if (stream[sym.state] === "writable") { + writableStreamDefaultControllerClearAlgorithms(controller); + } + writableStreamFinishInFlightWriteWithError(stream, reason); + }, + ), + ); + } + + function writableStreamDefaultControllerWrite( + controller, + chunk, + chunkSize, + ) { + const writeRecord = { chunk }; + try { + enqueueValueWithSize(controller, writeRecord, chunkSize); + } catch (e) { + writableStreamDefaultControllerErrorIfNeeded(controller, e); + return; + } + const stream = controller[sym.controlledWritableStream]; + if ( + !writableStreamCloseQueuedOrInFlight(stream) && + stream[sym.state] === "writable" + ) { + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller, + ); + writableStreamUpdateBackpressure(stream, backpressure); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + } + + function writableStreamDefaultWriterAbort( + writer, + reason, + ) { + const stream = writer[sym.ownerWritableStream]; + assert(stream); + return writableStreamAbort(stream, reason); + } + + function writableStreamDefaultWriterClose( + writer, + ) { + const stream = writer[sym.ownerWritableStream]; + assert(stream); + return writableStreamClose(stream); + } + + function writableStreamDefaultWriterCloseWithErrorPropagation( + writer, + ) { + const stream = writer[sym.ownerWritableStream]; + assert(stream); + const state = stream[sym.state]; + if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") { + return Promise.resolve(); + } + if (state === "errored") { + return Promise.reject(stream[sym.storedError]); + } + assert(state === "writable" || state === "erroring"); + return writableStreamDefaultWriterClose(writer); + } + + function writableStreamDefaultWriterEnsureClosePromiseRejected( + writer, + error, + ) { + if (writer[sym.closedPromise].reject) { + writer[sym.closedPromise].reject(error); + } else { + writer[sym.closedPromise] = { + promise: Promise.reject(error), + }; + } + setPromiseIsHandledToTrue(writer[sym.closedPromise].promise); + } + + function writableStreamDefaultWriterEnsureReadyPromiseRejected( + writer, + error, + ) { + if (writer[sym.readyPromise].reject) { + writer[sym.readyPromise].reject(error); + writer[sym.readyPromise].reject = undefined; + writer[sym.readyPromise].resolve = undefined; + } else { + writer[sym.readyPromise] = { + promise: Promise.reject(error), + }; + } + setPromiseIsHandledToTrue(writer[sym.readyPromise].promise); + } + + function writableStreamDefaultWriterWrite( + writer, + chunk, + ) { + const stream = writer[sym.ownerWritableStream]; + assert(stream); + const controller = stream[sym.writableStreamController]; + assert(controller); + const chunkSize = writableStreamDefaultControllerGetChunkSize( + controller, + chunk, + ); + if (stream !== writer[sym.ownerWritableStream]) { + return Promise.reject("Writer has incorrect WritableStream."); + } + const state = stream[sym.state]; + if (state === "errored") { + return Promise.reject(stream[sym.storedError]); + } + if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") { + return Promise.reject(new TypeError("The stream is closed or closing.")); + } + if (state === "erroring") { + return Promise.reject(stream[sym.storedError]); + } + assert(state === "writable"); + const promise = writableStreamAddWriteRequest(stream); + writableStreamDefaultControllerWrite(controller, chunk, chunkSize); + return promise; + } + + function writableStreamDefaultWriterGetDesiredSize( + writer, + ) { + const stream = writer[sym.ownerWritableStream]; + const state = stream[sym.state]; + if (state === "errored" || state === "erroring") { + return null; + } + if (state === "closed") { + return 0; + } + return writableStreamDefaultControllerGetDesiredSize( + stream[sym.writableStreamController], + ); + } + + function writableStreamDefaultWriterRelease( + writer, + ) { + const stream = writer[sym.ownerWritableStream]; + assert(stream); + assert(stream[sym.writer] === writer); + const releasedError = new TypeError( + "Writer was released and can no longer be used to monitor the stream's closedness.", + ); + writableStreamDefaultWriterEnsureReadyPromiseRejected( + writer, + releasedError, + ); + writableStreamDefaultWriterEnsureClosePromiseRejected( + writer, + releasedError, + ); + stream[sym.writer] = undefined; + writer[sym.ownerWritableStream] = undefined; + } + + function writableStreamFinishErroring(stream) { + assert(stream[sym.state] === "erroring"); + assert(!writableStreamHasOperationMarkedInFlight(stream)); + stream[sym.state] = "errored"; + stream[sym.writableStreamController][sym.errorSteps](); + const storedError = stream[sym.storedError]; + for (const writeRequest of stream[sym.writeRequests]) { + assert(writeRequest.reject); + writeRequest.reject(storedError); + } + stream[sym.writeRequests] = []; + if (!stream[sym.pendingAbortRequest]) { + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + const abortRequest = stream[sym.pendingAbortRequest]; + assert(abortRequest); + stream[sym.pendingAbortRequest] = undefined; + if (abortRequest.wasAlreadyErroring) { + assert(abortRequest.promise.reject); + abortRequest.promise.reject(storedError); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + const promise = stream[sym.writableStreamController][sym.abortSteps]( + abortRequest.reason, + ); + setPromiseIsHandledToTrue( + promise.then( + () => { + assert(abortRequest.promise.resolve); + abortRequest.promise.resolve(); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }, + (reason) => { + assert(abortRequest.promise.reject); + abortRequest.promise.reject(reason); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }, + ), + ); + } + + function writableStreamFinishInFlightClose( + stream, + ) { + assert(stream[sym.inFlightCloseRequest]); + stream[sym.inFlightCloseRequest]?.resolve(); + stream[sym.inFlightCloseRequest] = undefined; + const state = stream[sym.state]; + assert(state === "writable" || state === "erroring"); + if (state === "erroring") { + stream[sym.storedError] = undefined; + if (stream[sym.pendingAbortRequest]) { + stream[sym.pendingAbortRequest].promise.resolve(); + stream[sym.pendingAbortRequest] = undefined; + } + } + stream[sym.state] = "closed"; + const writer = stream[sym.writer]; + if (writer) { + writer[sym.closedPromise].resolve(); + } + assert(stream[sym.pendingAbortRequest] === undefined); + assert(stream[sym.storedError] === undefined); + } + + function writableStreamFinishInFlightCloseWithError( + stream, + error, + ) { + assert(stream[sym.inFlightCloseRequest]); + stream[sym.inFlightCloseRequest]?.reject(error); + stream[sym.inFlightCloseRequest] = undefined; + assert( + stream[sym.state] === "writable" || stream[sym.state] === "erroring", + ); + if (stream[sym.pendingAbortRequest]) { + stream[sym.pendingAbortRequest]?.promise.reject(error); + stream[sym.pendingAbortRequest] = undefined; + } + writableStreamDealWithRejection(stream, error); + } + + function writableStreamFinishInFlightWrite( + stream, + ) { + assert(stream[sym.inFlightWriteRequest]); + stream[sym.inFlightWriteRequest].resolve(); + stream[sym.inFlightWriteRequest] = undefined; + } + + function writableStreamFinishInFlightWriteWithError( + stream, + error, + ) { + assert(stream[sym.inFlightWriteRequest]); + stream[sym.inFlightWriteRequest].reject(error); + stream[sym.inFlightWriteRequest] = undefined; + assert( + stream[sym.state] === "writable" || stream[sym.state] === "erroring", + ); + writableStreamDealWithRejection(stream, error); + } + + function writableStreamHasOperationMarkedInFlight( + stream, + ) { + return !( + stream[sym.inFlightWriteRequest] === undefined && + stream[sym.inFlightCloseRequest] === undefined + ); + } + + function writableStreamMarkCloseRequestInFlight( + stream, + ) { + assert(stream[sym.inFlightCloseRequest] === undefined); + assert(stream[sym.closeRequest] !== undefined); + stream[sym.inFlightCloseRequest] = stream[sym.closeRequest]; + stream[sym.closeRequest] = undefined; + } + + function writableStreamMarkFirstWriteRequestInFlight( + stream, + ) { + assert(stream[sym.inFlightWriteRequest] === undefined); + assert(stream[sym.writeRequests].length); + const writeRequest = stream[sym.writeRequests].shift(); + stream[sym.inFlightWriteRequest] = writeRequest; + } + + function writableStreamRejectCloseAndClosedPromiseIfNeeded( + stream, + ) { + assert(stream[sym.state] === "errored"); + if (stream[sym.closeRequest]) { + assert(stream[sym.inFlightCloseRequest] === undefined); + stream[sym.closeRequest].reject(stream[sym.storedError]); + stream[sym.closeRequest] = undefined; + } + const writer = stream[sym.writer]; + if (writer) { + writer[sym.closedPromise].reject(stream[sym.storedError]); + setPromiseIsHandledToTrue(writer[sym.closedPromise].promise); + } + } + + function writableStreamStartErroring( + stream, + reason, + ) { + assert(stream[sym.storedError] === undefined); + assert(stream[sym.state] === "writable"); + const controller = stream[sym.writableStreamController]; + assert(controller); + stream[sym.state] = "erroring"; + stream[sym.storedError] = reason; + const writer = stream[sym.writer]; + if (writer) { + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); + } + if ( + !writableStreamHasOperationMarkedInFlight(stream) && + controller[sym.started] + ) { + writableStreamFinishErroring(stream); + } + } + + function writableStreamUpdateBackpressure( + stream, + backpressure, + ) { + assert(stream[sym.state] === "writable"); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + const writer = stream[sym.writer]; + if (writer && backpressure !== stream[sym.backpressure]) { + if (backpressure) { + writer[sym.readyPromise] = getDeferred(); + } else { + assert(backpressure === false); + writer[sym.readyPromise].resolve(); + writer[sym.readyPromise].resolve = undefined; + writer[sym.readyPromise].reject = undefined; + } + } + stream[sym.backpressure] = backpressure; + } + /* eslint-enable */ + + class CountQueuingStrategy { + constructor({ highWaterMark }) { + this.highWaterMark = highWaterMark; + } + + size() { + return 1; + } + + [customInspect]() { + return `${this.constructor.name} { highWaterMark: ${ + String(this.highWaterMark) + }, size: f }`; + } + } + + Object.defineProperty(CountQueuingStrategy.prototype, "size", { + enumerable: true, + }); + + class ByteLengthQueuingStrategy { + constructor({ highWaterMark }) { + this.highWaterMark = highWaterMark; + } + + size(chunk) { + return chunk.byteLength; + } + + [customInspect]() { + return `${this.constructor.name} { highWaterMark: ${ + String(this.highWaterMark) + }, size: f }`; + } + } + + Object.defineProperty(ByteLengthQueuingStrategy.prototype, "size", { + enumerable: true, + }); + + window.__bootstrap.streams = { + ReadableStream, + TransformStream, + WritableStream, + isReadableStreamDisturbed, + CountQueuingStrategy, + ByteLengthQueuingStrategy, + }; +})(this); diff --git a/op_crates/fetch/20_headers.js b/op_crates/fetch/20_headers.js new file mode 100644 index 000000000..c2ae72864 --- /dev/null +++ b/op_crates/fetch/20_headers.js @@ -0,0 +1,256 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +((window) => { + const { DomIterableMixin } = window.__bootstrap.domIterable; + const { requiredArguments } = window.__bootstrap.fetchUtil; + + // From node-fetch + // Copyright (c) 2016 David Frank. MIT License. + const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; + const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; + + function isHeaders(value) { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + return value instanceof Headers; + } + + const headersData = Symbol("headers data"); + + // TODO: headerGuard? Investigate if it is needed + // node-fetch did not implement this but it is in the spec + function normalizeParams(name, value) { + name = String(name).toLowerCase(); + value = String(value).trim(); + return [name, value]; + } + + // The following name/value validations are copied from + // https://github.com/bitinn/node-fetch/blob/master/src/headers.js + // Copyright (c) 2016 David Frank. MIT License. + function validateName(name) { + if (invalidTokenRegex.test(name) || name === "") { + throw new TypeError(`${name} is not a legal HTTP header name`); + } + } + + function validateValue(value) { + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`); + } + } + + /** Appends a key and value to the header list. + * + * The spec indicates that when a key already exists, the append adds the new + * value onto the end of the existing value. The behaviour of this though + * varies when the key is `set-cookie`. In this case, if the key of the cookie + * already exists, the value is replaced, but if the key of the cookie does not + * exist, and additional `set-cookie` header is added. + * + * The browser specification of `Headers` is written for clients, and not + * servers, and Deno is a server, meaning that it needs to follow the patterns + * expected for servers, of which a `set-cookie` header is expected for each + * unique cookie key, but duplicate cookie keys should not exist. */ + function dataAppend( + data, + key, + value, + ) { + for (let i = 0; i < data.length; i++) { + const [dataKey] = data[i]; + if (key === "set-cookie" && dataKey === "set-cookie") { + const [, dataValue] = data[i]; + const [dataCookieKey] = dataValue.split("="); + const [cookieKey] = value.split("="); + if (dataCookieKey === cookieKey) { + data[i][1] = value; + return; + } + } else { + if (dataKey === key) { + data[i][1] += `, ${value}`; + return; + } + } + } + data.push([key, value]); + } + + /** Gets a value of a key in the headers list. + * + * This varies slightly from spec behaviour in that when the key is `set-cookie` + * the value returned will look like a concatenated value, when in fact, if the + * headers were iterated over, each individual `set-cookie` value is a unique + * entry in the headers list. */ + function dataGet( + data, + key, + ) { + const setCookieValues = []; + for (const [dataKey, value] of data) { + if (dataKey === key) { + if (key === "set-cookie") { + setCookieValues.push(value); + } else { + return value; + } + } + } + if (setCookieValues.length) { + return setCookieValues.join(", "); + } + return undefined; + } + + /** Sets a value of a key in the headers list. + * + * The spec indicates that the value should be replaced if the key already + * exists. The behaviour here varies, where if the key is `set-cookie` the key + * of the cookie is inspected, and if the key of the cookie already exists, + * then the value is replaced. If the key of the cookie is not found, then + * the value of the `set-cookie` is added to the list of headers. + * + * The browser specification of `Headers` is written for clients, and not + * servers, and Deno is a server, meaning that it needs to follow the patterns + * expected for servers, of which a `set-cookie` header is expected for each + * unique cookie key, but duplicate cookie keys should not exist. */ + function dataSet( + data, + key, + value, + ) { + for (let i = 0; i < data.length; i++) { + const [dataKey] = data[i]; + if (dataKey === key) { + // there could be multiple set-cookie headers, but all others are unique + if (key === "set-cookie") { + const [, dataValue] = data[i]; + const [dataCookieKey] = dataValue.split("="); + const [cookieKey] = value.split("="); + if (cookieKey === dataCookieKey) { + data[i][1] = value; + return; + } + } else { + data[i][1] = value; + return; + } + } + } + data.push([key, value]); + } + + function dataDelete(data, key) { + let i = 0; + while (i < data.length) { + const [dataKey] = data[i]; + if (dataKey === key) { + data.splice(i, 1); + } else { + i++; + } + } + } + + function dataHas(data, key) { + for (const [dataKey] of data) { + if (dataKey === key) { + return true; + } + } + return false; + } + + // ref: https://fetch.spec.whatwg.org/#dom-headers + class HeadersBase { + constructor(init) { + if (init === null) { + throw new TypeError( + "Failed to construct 'Headers'; The provided value was not valid", + ); + } else if (isHeaders(init)) { + this[headersData] = [...init]; + } else { + this[headersData] = []; + if (Array.isArray(init)) { + for (const tuple of init) { + // If header does not contain exactly two items, + // then throw a TypeError. + // ref: https://fetch.spec.whatwg.org/#concept-headers-fill + requiredArguments( + "Headers.constructor tuple array argument", + tuple.length, + 2, + ); + + this.append(tuple[0], tuple[1]); + } + } else if (init) { + for (const [rawName, rawValue] of Object.entries(init)) { + this.append(rawName, rawValue); + } + } + } + } + + [Symbol.for("Deno.customInspect")]() { + let length = this[headersData].length; + let output = ""; + for (const [key, value] of this[headersData]) { + const prefix = length === this[headersData].length ? " " : ""; + const postfix = length === 1 ? " " : ", "; + output = output + `${prefix}${key}: ${value}${postfix}`; + length--; + } + return `Headers {${output}}`; + } + + // ref: https://fetch.spec.whatwg.org/#concept-headers-append + append(name, value) { + requiredArguments("Headers.append", arguments.length, 2); + const [newname, newvalue] = normalizeParams(name, value); + validateName(newname); + validateValue(newvalue); + dataAppend(this[headersData], newname, newvalue); + } + + delete(name) { + requiredArguments("Headers.delete", arguments.length, 1); + const [newname] = normalizeParams(name); + validateName(newname); + dataDelete(this[headersData], newname); + } + + get(name) { + requiredArguments("Headers.get", arguments.length, 1); + const [newname] = normalizeParams(name); + validateName(newname); + return dataGet(this[headersData], newname) ?? null; + } + + has(name) { + requiredArguments("Headers.has", arguments.length, 1); + const [newname] = normalizeParams(name); + validateName(newname); + return dataHas(this[headersData], newname); + } + + set(name, value) { + requiredArguments("Headers.set", arguments.length, 2); + const [newname, newvalue] = normalizeParams(name, value); + validateName(newname); + validateValue(newvalue); + dataSet(this[headersData], newname, newvalue); + } + + get [Symbol.toStringTag]() { + return "Headers"; + } + } + + class Headers extends DomIterableMixin(HeadersBase, headersData) {} + + window.__bootstrap.headers = { + Headers, + }; +})(this); diff --git a/op_crates/fetch/26_fetch.js b/op_crates/fetch/26_fetch.js new file mode 100644 index 000000000..4b31110d6 --- /dev/null +++ b/op_crates/fetch/26_fetch.js @@ -0,0 +1,1390 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +((window) => { + const core = window.Deno.core; + + // provided by "deno_web" + const { URLSearchParams } = window.__bootstrap.url; + + const { requiredArguments } = window.__bootstrap.fetchUtil; + const { ReadableStream, isReadableStreamDisturbed } = + window.__bootstrap.streams; + const { DomIterableMixin } = window.__bootstrap.domIterable; + const { Headers } = window.__bootstrap.headers; + + // FIXME(bartlomieju): stubbed out, needed in blob + const build = { + os: "", + }; + + const MAX_SIZE = 2 ** 32 - 2; + + // `off` is the offset into `dst` where it will at which to begin writing values + // from `src`. + // Returns the number of bytes copied. + function copyBytes(src, dst, off = 0) { + const r = dst.byteLength - off; + if (src.byteLength > r) { + src = src.subarray(0, r); + } + dst.set(src, off); + return src.byteLength; + } + + class Buffer { + #buf = null; // contents are the bytes buf[off : len(buf)] + #off = 0; // read at buf[off], write at buf[buf.byteLength] + + constructor(ab) { + if (ab == null) { + this.#buf = new Uint8Array(0); + return; + } + + this.#buf = new Uint8Array(ab); + } + + bytes(options = { copy: true }) { + if (options.copy === false) return this.#buf.subarray(this.#off); + return this.#buf.slice(this.#off); + } + + empty() { + return this.#buf.byteLength <= this.#off; + } + + get length() { + return this.#buf.byteLength - this.#off; + } + + get capacity() { + return this.#buf.buffer.byteLength; + } + + reset() { + this.#reslice(0); + this.#off = 0; + } + + #tryGrowByReslice = (n) => { + const l = this.#buf.byteLength; + if (n <= this.capacity - l) { + this.#reslice(l + n); + return l; + } + return -1; + }; + + #reslice = (len) => { + if (!(len <= this.#buf.buffer.byteLength)) { + throw new Error("assert"); + } + this.#buf = new Uint8Array(this.#buf.buffer, 0, len); + }; + + writeSync(p) { + const m = this.#grow(p.byteLength); + return copyBytes(p, this.#buf, m); + } + + write(p) { + const n = this.writeSync(p); + return Promise.resolve(n); + } + + #grow = (n) => { + const m = this.length; + // If buffer is empty, reset to recover space. + if (m === 0 && this.#off !== 0) { + this.reset(); + } + // Fast: Try to grow by means of a reslice. + const i = this.#tryGrowByReslice(n); + if (i >= 0) { + return i; + } + const c = this.capacity; + if (n <= Math.floor(c / 2) - m) { + // We can slide things down instead of allocating a new + // ArrayBuffer. We only need m+n <= c to slide, but + // we instead let capacity get twice as large so we + // don't spend all our time copying. + copyBytes(this.#buf.subarray(this.#off), this.#buf); + } else if (c + n > MAX_SIZE) { + throw new Error("The buffer cannot be grown beyond the maximum size."); + } else { + // Not enough space anywhere, we need to allocate. + const buf = new Uint8Array(Math.min(2 * c + n, MAX_SIZE)); + copyBytes(this.#buf.subarray(this.#off), buf); + this.#buf = buf; + } + // Restore this.#off and len(this.#buf). + this.#off = 0; + this.#reslice(Math.min(m + n, MAX_SIZE)); + return m; + }; + + grow(n) { + if (n < 0) { + throw Error("Buffer.grow: negative count"); + } + const m = this.#grow(n); + this.#reslice(m); + } + } + + function isTypedArray(x) { + return ArrayBuffer.isView(x) && !(x instanceof DataView); + } + + function hasHeaderValueOf(s, value) { + return new RegExp(`^${value}[\t\s]*;?`).test(s); + } + + function getHeaderValueParams(value) { + const params = new Map(); + // Forced to do so for some Map constructor param mismatch + value + .split(";") + .slice(1) + .map((s) => s.trim().split("=")) + .filter((arr) => arr.length > 1) + .map(([k, v]) => [k, v.replace(/^"([^"]*)"$/, "$1")]) + .forEach(([k, v]) => params.set(k, v)); + return params; + } + + const decoder = new TextDecoder(); + const encoder = new TextEncoder(); + const CR = "\r".charCodeAt(0); + const LF = "\n".charCodeAt(0); + + const dataSymbol = Symbol("data"); + const bytesSymbol = Symbol("bytes"); + + function containsOnlyASCII(str) { + if (typeof str !== "string") { + return false; + } + return /^[\x00-\x7F]*$/.test(str); + } + + function convertLineEndingsToNative(s) { + const nativeLineEnd = build.os == "windows" ? "\r\n" : "\n"; + + let position = 0; + + let collectionResult = collectSequenceNotCRLF(s, position); + + let token = collectionResult.collected; + position = collectionResult.newPosition; + + let result = token; + + while (position < s.length) { + const c = s.charAt(position); + if (c == "\r") { + result += nativeLineEnd; + position++; + if (position < s.length && s.charAt(position) == "\n") { + position++; + } + } else if (c == "\n") { + position++; + result += nativeLineEnd; + } + + collectionResult = collectSequenceNotCRLF(s, position); + + token = collectionResult.collected; + position = collectionResult.newPosition; + + result += token; + } + + return result; + } + + function collectSequenceNotCRLF( + s, + position, + ) { + const start = position; + for ( + let c = s.charAt(position); + position < s.length && !(c == "\r" || c == "\n"); + c = s.charAt(++position) + ); + return { collected: s.slice(start, position), newPosition: position }; + } + + function toUint8Arrays( + blobParts, + doNormalizeLineEndingsToNative, + ) { + const ret = []; + const enc = new TextEncoder(); + for (const element of blobParts) { + if (typeof element === "string") { + let str = element; + if (doNormalizeLineEndingsToNative) { + str = convertLineEndingsToNative(element); + } + ret.push(enc.encode(str)); + // eslint-disable-next-line @typescript-eslint/no-use-before-define + } else if (element instanceof Blob) { + ret.push(element[bytesSymbol]); + } else if (element instanceof Uint8Array) { + ret.push(element); + } else if (element instanceof Uint16Array) { + const uint8 = new Uint8Array(element.buffer); + ret.push(uint8); + } else if (element instanceof Uint32Array) { + const uint8 = new Uint8Array(element.buffer); + ret.push(uint8); + } else if (ArrayBuffer.isView(element)) { + // Convert view to Uint8Array. + const uint8 = new Uint8Array(element.buffer); + ret.push(uint8); + } else if (element instanceof ArrayBuffer) { + // Create a new Uint8Array view for the given ArrayBuffer. + const uint8 = new Uint8Array(element); + ret.push(uint8); + } else { + ret.push(enc.encode(String(element))); + } + } + return ret; + } + + function processBlobParts( + blobParts, + options, + ) { + const normalizeLineEndingsToNative = options.ending === "native"; + // ArrayBuffer.transfer is not yet implemented in V8, so we just have to + // pre compute size of the array buffer and do some sort of static allocation + // instead of dynamic allocation. + const uint8Arrays = toUint8Arrays(blobParts, normalizeLineEndingsToNative); + const byteLength = uint8Arrays + .map((u8) => u8.byteLength) + .reduce((a, b) => a + b, 0); + const ab = new ArrayBuffer(byteLength); + const bytes = new Uint8Array(ab); + let courser = 0; + for (const u8 of uint8Arrays) { + bytes.set(u8, courser); + courser += u8.byteLength; + } + + return bytes; + } + + function getStream(blobBytes) { + // TODO: Align to spec https://fetch.spec.whatwg.org/#concept-construct-readablestream + return new ReadableStream({ + type: "bytes", + start: (controller) => { + controller.enqueue(blobBytes); + controller.close(); + }, + }); + } + + async function readBytes( + reader, + ) { + const chunks = []; + while (true) { + const { done, value } = await reader.read(); + if (!done && value instanceof Uint8Array) { + chunks.push(value); + } else if (done) { + const size = chunks.reduce((p, i) => p + i.byteLength, 0); + const bytes = new Uint8Array(size); + let offs = 0; + for (const chunk of chunks) { + bytes.set(chunk, offs); + offs += chunk.byteLength; + } + return bytes.buffer; + } else { + throw new TypeError("Invalid reader result."); + } + } + } + + // A WeakMap holding blob to byte array mapping. + // Ensures it does not impact garbage collection. + // const blobBytesWeakMap = new WeakMap(); + + class Blob { + constructor(blobParts, options) { + if (arguments.length === 0) { + this[bytesSymbol] = new Uint8Array(); + return; + } + + const { ending = "transparent", type = "" } = options ?? {}; + // Normalize options.type. + let normalizedType = type; + if (!containsOnlyASCII(type)) { + normalizedType = ""; + } else { + if (type.length) { + for (let i = 0; i < type.length; ++i) { + const char = type[i]; + if (char < "\u0020" || char > "\u007E") { + normalizedType = ""; + break; + } + } + normalizedType = type.toLowerCase(); + } + } + const bytes = processBlobParts(blobParts, { ending, type }); + // Set Blob object's properties. + this[bytesSymbol] = bytes; + this.size = bytes.byteLength; + this.type = normalizedType; + } + + slice(start, end, contentType) { + return new Blob([this[bytesSymbol].slice(start, end)], { + type: contentType || this.type, + }); + } + + stream() { + return getStream(this[bytesSymbol]); + } + + async text() { + const reader = getStream(this[bytesSymbol]).getReader(); + const decoder = new TextDecoder(); + return decoder.decode(await readBytes(reader)); + } + + arrayBuffer() { + return readBytes(getStream(this[bytesSymbol]).getReader()); + } + } + + class DomFile extends Blob { + constructor( + fileBits, + fileName, + options, + ) { + const { lastModified = Date.now(), ...blobPropertyBag } = options ?? {}; + super(fileBits, blobPropertyBag); + + // 4.1.2.1 Replace any "/" character (U+002F SOLIDUS) + // with a ":" (U + 003A COLON) + this.name = String(fileName).replace(/\u002F/g, "\u003A"); + // 4.1.3.3 If lastModified is not provided, set lastModified to the current + // date and time represented in number of milliseconds since the Unix Epoch. + this.lastModified = lastModified; + } + } + + function parseFormDataValue(value, filename) { + if (value instanceof DomFile) { + return new DomFile([value], filename || value.name, { + type: value.type, + lastModified: value.lastModified, + }); + } else if (value instanceof Blob) { + return new DomFile([value], filename || "blob", { + type: value.type, + }); + } else { + return String(value); + } + } + + class FormDataBase { + [dataSymbol] = []; + + append(name, value, filename) { + requiredArguments("FormData.append", arguments.length, 2); + name = String(name); + this[dataSymbol].push([name, parseFormDataValue(value, filename)]); + } + + delete(name) { + requiredArguments("FormData.delete", arguments.length, 1); + name = String(name); + let i = 0; + while (i < this[dataSymbol].length) { + if (this[dataSymbol][i][0] === name) { + this[dataSymbol].splice(i, 1); + } else { + i++; + } + } + } + + getAll(name) { + requiredArguments("FormData.getAll", arguments.length, 1); + name = String(name); + const values = []; + for (const entry of this[dataSymbol]) { + if (entry[0] === name) { + values.push(entry[1]); + } + } + + return values; + } + + get(name) { + requiredArguments("FormData.get", arguments.length, 1); + name = String(name); + for (const entry of this[dataSymbol]) { + if (entry[0] === name) { + return entry[1]; + } + } + + return null; + } + + has(name) { + requiredArguments("FormData.has", arguments.length, 1); + name = String(name); + return this[dataSymbol].some((entry) => entry[0] === name); + } + + set(name, value, filename) { + requiredArguments("FormData.set", arguments.length, 2); + name = String(name); + + // If there are any entries in the context object’s entry list whose name + // is name, replace the first such entry with entry and remove the others + let found = false; + let i = 0; + while (i < this[dataSymbol].length) { + if (this[dataSymbol][i][0] === name) { + if (!found) { + this[dataSymbol][i][1] = parseFormDataValue(value, filename); + found = true; + } else { + this[dataSymbol].splice(i, 1); + continue; + } + } + i++; + } + + // Otherwise, append entry to the context object’s entry list. + if (!found) { + this[dataSymbol].push([name, parseFormDataValue(value, filename)]); + } + } + + get [Symbol.toStringTag]() { + return "FormData"; + } + } + + class FormData extends DomIterableMixin(FormDataBase, dataSymbol) {} + + class MultipartBuilder { + constructor(formData, boundary) { + this.formData = formData; + this.boundary = boundary ?? this.#createBoundary(); + this.writer = new Buffer(); + } + + getContentType() { + return `multipart/form-data; boundary=${this.boundary}`; + } + + getBody() { + for (const [fieldName, fieldValue] of this.formData.entries()) { + if (fieldValue instanceof DomFile) { + this.#writeFile(fieldName, fieldValue); + } else this.#writeField(fieldName, fieldValue); + } + + this.writer.writeSync(encoder.encode(`\r\n--${this.boundary}--`)); + + return this.writer.bytes(); + } + + #createBoundary = () => { + return ( + "----------" + + Array.from(Array(32)) + .map(() => Math.random().toString(36)[2] || 0) + .join("") + ); + }; + + #writeHeaders = (headers) => { + let buf = this.writer.empty() ? "" : "\r\n"; + + buf += `--${this.boundary}\r\n`; + for (const [key, value] of headers) { + buf += `${key}: ${value}\r\n`; + } + buf += `\r\n`; + + // FIXME(Bartlomieju): this should use `writeSync()` + this.writer.write(encoder.encode(buf)); + }; + + #writeFileHeaders = ( + field, + filename, + type, + ) => { + const headers = [ + [ + "Content-Disposition", + `form-data; name="${field}"; filename="${filename}"`, + ], + ["Content-Type", type || "application/octet-stream"], + ]; + return this.#writeHeaders(headers); + }; + + #writeFieldHeaders = (field) => { + const headers = [["Content-Disposition", `form-data; name="${field}"`]]; + return this.#writeHeaders(headers); + }; + + #writeField = (field, value) => { + this.#writeFieldHeaders(field); + this.writer.writeSync(encoder.encode(value)); + }; + + #writeFile = (field, value) => { + this.#writeFileHeaders(field, value.name, value.type); + this.writer.writeSync(value[bytesSymbol]); + }; + } + + class MultipartParser { + constructor(body, boundary) { + if (!boundary) { + throw new TypeError("multipart/form-data must provide a boundary"); + } + + this.boundary = `--${boundary}`; + this.body = body; + this.boundaryChars = encoder.encode(this.boundary); + } + + #parseHeaders = (headersText) => { + const headers = new Headers(); + const rawHeaders = headersText.split("\r\n"); + for (const rawHeader of rawHeaders) { + const sepIndex = rawHeader.indexOf(":"); + if (sepIndex < 0) { + continue; // Skip this header + } + const key = rawHeader.slice(0, sepIndex); + const value = rawHeader.slice(sepIndex + 1); + headers.set(key, value); + } + + return { + headers, + disposition: getHeaderValueParams( + headers.get("Content-Disposition") ?? "", + ), + }; + }; + + parse() { + const formData = new FormData(); + let headerText = ""; + let boundaryIndex = 0; + let state = 0; + let fileStart = 0; + + for (let i = 0; i < this.body.length; i++) { + const byte = this.body[i]; + const prevByte = this.body[i - 1]; + const isNewLine = byte === LF && prevByte === CR; + + if (state === 1 || state === 2 || state == 3) { + headerText += String.fromCharCode(byte); + } + if (state === 0 && isNewLine) { + state = 1; + } else if (state === 1 && isNewLine) { + state = 2; + const headersDone = this.body[i + 1] === CR && + this.body[i + 2] === LF; + + if (headersDone) { + state = 3; + } + } else if (state === 2 && isNewLine) { + state = 3; + } else if (state === 3 && isNewLine) { + state = 4; + fileStart = i + 1; + } else if (state === 4) { + if (this.boundaryChars[boundaryIndex] !== byte) { + boundaryIndex = 0; + } else { + boundaryIndex++; + } + + if (boundaryIndex >= this.boundary.length) { + const { headers, disposition } = this.#parseHeaders(headerText); + const content = this.body.subarray( + fileStart, + i - boundaryIndex - 1, + ); + // https://fetch.spec.whatwg.org/#ref-for-dom-body-formdata + const filename = disposition.get("filename"); + const name = disposition.get("name"); + + state = 5; + // Reset + boundaryIndex = 0; + headerText = ""; + + if (!name) { + continue; // Skip, unknown name + } + + if (filename) { + const blob = new Blob([content], { + type: headers.get("Content-Type") || "application/octet-stream", + }); + formData.append(name, blob, filename); + } else { + formData.append(name, decoder.decode(content)); + } + } + } else if (state === 5 && isNewLine) { + state = 1; + } + } + + return formData; + } + } + + function validateBodyType(owner, bodySource) { + if (isTypedArray(bodySource)) { + return true; + } else if (bodySource instanceof ArrayBuffer) { + return true; + } else if (typeof bodySource === "string") { + return true; + } else if (bodySource instanceof ReadableStream) { + return true; + } else if (bodySource instanceof FormData) { + return true; + } else if (bodySource instanceof URLSearchParams) { + return true; + } else if (!bodySource) { + return true; // null body is fine + } + throw new Error( + `Bad ${owner.constructor.name} body type: ${bodySource.constructor.name}`, + ); + } + + async function bufferFromStream( + stream, + size, + ) { + const encoder = new TextEncoder(); + const buffer = new Buffer(); + + if (size) { + // grow to avoid unnecessary allocations & copies + buffer.grow(size); + } + + while (true) { + const { done, value } = await stream.read(); + + if (done) break; + + if (typeof value === "string") { + buffer.writeSync(encoder.encode(value)); + } else if (value instanceof ArrayBuffer) { + buffer.writeSync(new Uint8Array(value)); + } else if (value instanceof Uint8Array) { + buffer.writeSync(value); + } else if (!value) { + // noop for undefined + } else { + throw new Error("unhandled type on stream read"); + } + } + + return buffer.bytes().buffer; + } + + function bodyToArrayBuffer(bodySource) { + if (isTypedArray(bodySource)) { + return bodySource.buffer; + } else if (bodySource instanceof ArrayBuffer) { + return bodySource; + } else if (typeof bodySource === "string") { + const enc = new TextEncoder(); + return enc.encode(bodySource).buffer; + } else if (bodySource instanceof ReadableStream) { + throw new Error( + `Can't convert stream to ArrayBuffer (try bufferFromStream)`, + ); + } else if ( + bodySource instanceof FormData || + bodySource instanceof URLSearchParams + ) { + const enc = new TextEncoder(); + return enc.encode(bodySource.toString()).buffer; + } else if (!bodySource) { + return new ArrayBuffer(0); + } + throw new Error( + `Body type not implemented: ${bodySource.constructor.name}`, + ); + } + + const BodyUsedError = + "Failed to execute 'clone' on 'Body': body is already used"; + + class Body { + #contentType = ""; + #size = undefined; + + constructor(_bodySource, meta) { + validateBodyType(this, _bodySource); + this._bodySource = _bodySource; + this.#contentType = meta.contentType; + this.#size = meta.size; + this._stream = null; + } + + get body() { + if (this._stream) { + return this._stream; + } + + if (!this._bodySource) { + return null; + } else if (this._bodySource instanceof ReadableStream) { + this._stream = this._bodySource; + } else { + const buf = bodyToArrayBuffer(this._bodySource); + if (!(buf instanceof ArrayBuffer)) { + throw new Error( + `Expected ArrayBuffer from body`, + ); + } + + this._stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf); + controller.close(); + }, + }); + } + + return this._stream; + } + + get bodyUsed() { + if (this.body && isReadableStreamDisturbed(this.body)) { + return true; + } + return false; + } + + async blob() { + return new Blob([await this.arrayBuffer()], { + type: this.#contentType, + }); + } + + // ref: https://fetch.spec.whatwg.org/#body-mixin + async formData() { + const formData = new FormData(); + if (hasHeaderValueOf(this.#contentType, "multipart/form-data")) { + const params = getHeaderValueParams(this.#contentType); + + // ref: https://tools.ietf.org/html/rfc2046#section-5.1 + const boundary = params.get("boundary"); + const body = new Uint8Array(await this.arrayBuffer()); + const multipartParser = new MultipartParser(body, boundary); + + return multipartParser.parse(); + } else if ( + hasHeaderValueOf(this.#contentType, "application/x-www-form-urlencoded") + ) { + // From https://github.com/github/fetch/blob/master/fetch.js + // Copyright (c) 2014-2016 GitHub, Inc. MIT License + const body = await this.text(); + try { + body + .trim() + .split("&") + .forEach((bytes) => { + if (bytes) { + const split = bytes.split("="); + const name = split.shift().replace(/\+/g, " "); + const value = split.join("=").replace(/\+/g, " "); + formData.append( + decodeURIComponent(name), + decodeURIComponent(value), + ); + } + }); + } catch (e) { + throw new TypeError("Invalid form urlencoded format"); + } + return formData; + } else { + throw new TypeError("Invalid form data"); + } + } + + async text() { + if (typeof this._bodySource === "string") { + return this._bodySource; + } + + const ab = await this.arrayBuffer(); + const decoder = new TextDecoder("utf-8"); + return decoder.decode(ab); + } + + async json() { + const raw = await this.text(); + return JSON.parse(raw); + } + + arrayBuffer() { + if (this._bodySource instanceof ReadableStream) { + return bufferFromStream(this._bodySource.getReader(), this.#size); + } + return bodyToArrayBuffer(this._bodySource); + } + } + + function createHttpClient(options) { + return new HttpClient(opCreateHttpClient(options)); + } + + function opCreateHttpClient(args) { + return core.jsonOpSync("op_create_http_client", args); + } + + class HttpClient { + constructor(rid) { + this.rid = rid; + } + close() { + core.close(this.rid); + } + } + + function opFetch(args, body) { + let zeroCopy; + if (body != null) { + zeroCopy = new Uint8Array(body.buffer, body.byteOffset, body.byteLength); + } + + return core.jsonOpAsync("op_fetch", args, ...(zeroCopy ? [zeroCopy] : [])); + } + + const NULL_BODY_STATUS = [101, 204, 205, 304]; + const REDIRECT_STATUS = [301, 302, 303, 307, 308]; + + function byteUpperCase(s) { + return String(s).replace(/[a-z]/g, function byteUpperCaseReplace(c) { + return c.toUpperCase(); + }); + } + + function normalizeMethod(m) { + const u = byteUpperCase(m); + if ( + u === "DELETE" || + u === "GET" || + u === "HEAD" || + u === "OPTIONS" || + u === "POST" || + u === "PUT" + ) { + return u; + } + return m; + } + + class Request extends Body { + constructor(input, init) { + if (arguments.length < 1) { + throw TypeError("Not enough arguments"); + } + + if (!init) { + init = {}; + } + + let b; + + // prefer body from init + if (init.body) { + b = init.body; + } else if (input instanceof Request && input._bodySource) { + if (input.bodyUsed) { + throw TypeError(BodyUsedError); + } + b = input._bodySource; + } else if (typeof input === "object" && "body" in input && input.body) { + if (input.bodyUsed) { + throw TypeError(BodyUsedError); + } + b = input.body; + } else { + b = ""; + } + + let headers; + + // prefer headers from init + if (init.headers) { + headers = new Headers(init.headers); + } else if (input instanceof Request) { + headers = input.headers; + } else { + headers = new Headers(); + } + + const contentType = headers.get("content-type") || ""; + super(b, { contentType }); + this.headers = headers; + + // readonly attribute ByteString method; + this.method = "GET"; + + // readonly attribute USVString url; + this.url = ""; + + // readonly attribute RequestCredentials credentials; + this.credentials = "omit"; + + if (input instanceof Request) { + if (input.bodyUsed) { + throw TypeError(BodyUsedError); + } + this.method = input.method; + this.url = input.url; + this.headers = new Headers(input.headers); + this.credentials = input.credentials; + this._stream = input._stream; + } else if (typeof input === "string") { + this.url = input; + } + + if (init && "method" in init) { + this.method = normalizeMethod(init.method); + } + + if ( + init && + "credentials" in init && + init.credentials && + ["omit", "same-origin", "include"].indexOf(init.credentials) !== -1 + ) { + this.credentials = init.credentials; + } + } + + clone() { + if (this.bodyUsed) { + throw TypeError(BodyUsedError); + } + + const iterators = this.headers.entries(); + const headersList = []; + for (const header of iterators) { + headersList.push(header); + } + + let body2 = this._bodySource; + + if (this._bodySource instanceof ReadableStream) { + const tees = this._bodySource.tee(); + this._stream = this._bodySource = tees[0]; + body2 = tees[1]; + } + + return new Request(this.url, { + body: body2, + method: this.method, + headers: new Headers(headersList), + credentials: this.credentials, + }); + } + } + + const responseData = new WeakMap(); + class Response extends Body { + constructor(body = null, init) { + init = init ?? {}; + + if (typeof init !== "object") { + throw new TypeError(`'init' is not an object`); + } + + const extraInit = responseData.get(init) || {}; + let { type = "default", url = "" } = extraInit; + + let status = init.status === undefined ? 200 : Number(init.status || 0); + let statusText = init.statusText ?? ""; + let headers = init.headers instanceof Headers + ? init.headers + : new Headers(init.headers); + + if (init.status !== undefined && (status < 200 || status > 599)) { + throw new RangeError( + `The status provided (${init.status}) is outside the range [200, 599]`, + ); + } + + // null body status + if (body && NULL_BODY_STATUS.includes(status)) { + throw new TypeError("Response with null body status cannot have body"); + } + + if (!type) { + type = "default"; + } else { + if (type == "error") { + // spec: https://fetch.spec.whatwg.org/#concept-network-error + status = 0; + statusText = ""; + headers = new Headers(); + body = null; + /* spec for other Response types: + https://fetch.spec.whatwg.org/#concept-filtered-response-basic + Please note that type "basic" is not the same thing as "default".*/ + } else if (type == "basic") { + for (const h of headers) { + /* Forbidden Response-Header Names: + https://fetch.spec.whatwg.org/#forbidden-response-header-name */ + if (["set-cookie", "set-cookie2"].includes(h[0].toLowerCase())) { + headers.delete(h[0]); + } + } + } else if (type == "cors") { + /* CORS-safelisted Response-Header Names: + https://fetch.spec.whatwg.org/#cors-safelisted-response-header-name */ + const allowedHeaders = [ + "Cache-Control", + "Content-Language", + "Content-Length", + "Content-Type", + "Expires", + "Last-Modified", + "Pragma", + ].map((c) => c.toLowerCase()); + for (const h of headers) { + /* Technically this is still not standards compliant because we are + supposed to allow headers allowed in the + 'Access-Control-Expose-Headers' header in the 'internal response' + However, this implementation of response doesn't seem to have an + easy way to access the internal response, so we ignore that + header. + TODO(serverhiccups): change how internal responses are handled + so we can do this properly. */ + if (!allowedHeaders.includes(h[0].toLowerCase())) { + headers.delete(h[0]); + } + } + /* TODO(serverhiccups): Once I fix the 'internal response' thing, + these actually need to treat the internal response differently */ + } else if (type == "opaque" || type == "opaqueredirect") { + url = ""; + status = 0; + statusText = ""; + headers = new Headers(); + body = null; + } + } + + const contentType = headers.get("content-type") || ""; + const size = Number(headers.get("content-length")) || undefined; + + super(body, { contentType, size }); + + this.url = url; + this.statusText = statusText; + this.status = extraInit.status || status; + this.headers = headers; + this.redirected = extraInit.redirected || false; + this.type = type; + } + + get ok() { + return 200 <= this.status && this.status < 300; + } + + clone() { + if (this.bodyUsed) { + throw TypeError(BodyUsedError); + } + + const iterators = this.headers.entries(); + const headersList = []; + for (const header of iterators) { + headersList.push(header); + } + + let resBody = this._bodySource; + + if (this._bodySource instanceof ReadableStream) { + const tees = this._bodySource.tee(); + this._stream = this._bodySource = tees[0]; + resBody = tees[1]; + } + + return new Response(resBody, { + status: this.status, + statusText: this.statusText, + headers: new Headers(headersList), + }); + } + + static redirect(url, status) { + if (![301, 302, 303, 307, 308].includes(status)) { + throw new RangeError( + "The redirection status must be one of 301, 302, 303, 307 and 308.", + ); + } + return new Response(null, { + status, + statusText: "", + headers: [["Location", typeof url === "string" ? url : url.toString()]], + }); + } + } + + function sendFetchReq(url, method, headers, body, clientRid) { + let headerArray = []; + if (headers) { + headerArray = Array.from(headers.entries()); + } + + const args = { + method, + url, + headers: headerArray, + clientRid, + }; + + return opFetch(args, body); + } + + async function fetch(input, init) { + let url; + let method = null; + let headers = null; + let body; + let clientRid = null; + let redirected = false; + let remRedirectCount = 20; // TODO: use a better way to handle + + if (typeof input === "string" || input instanceof URL) { + url = typeof input === "string" ? input : input.href; + if (init != null) { + method = init.method || null; + if (init.headers) { + headers = init.headers instanceof Headers + ? init.headers + : new Headers(init.headers); + } else { + headers = null; + } + + // ref: https://fetch.spec.whatwg.org/#body-mixin + // Body should have been a mixin + // but we are treating it as a separate class + if (init.body) { + if (!headers) { + headers = new Headers(); + } + let contentType = ""; + if (typeof init.body === "string") { + body = new TextEncoder().encode(init.body); + contentType = "text/plain;charset=UTF-8"; + } else if (isTypedArray(init.body)) { + body = init.body; + } else if (init.body instanceof ArrayBuffer) { + body = new Uint8Array(init.body); + } else if (init.body instanceof URLSearchParams) { + body = new TextEncoder().encode(init.body.toString()); + contentType = "application/x-www-form-urlencoded;charset=UTF-8"; + } else if (init.body instanceof Blob) { + body = init.body[bytesSymbol]; + contentType = init.body.type; + } else if (init.body instanceof FormData) { + let boundary; + if (headers.has("content-type")) { + const params = getHeaderValueParams("content-type"); + boundary = params.get("boundary"); + } + const multipartBuilder = new MultipartBuilder( + init.body, + boundary, + ); + body = multipartBuilder.getBody(); + contentType = multipartBuilder.getContentType(); + } else { + // TODO: ReadableStream + throw new Error("Not implemented"); + } + if (contentType && !headers.has("content-type")) { + headers.set("content-type", contentType); + } + } + + if (init.client instanceof HttpClient) { + clientRid = init.client.rid; + } + } + } else { + url = input.url; + method = input.method; + headers = input.headers; + + if (input._bodySource) { + body = new DataView(await input.arrayBuffer()); + } + } + + let responseBody; + let responseInit = {}; + while (remRedirectCount) { + const fetchResponse = await sendFetchReq( + url, + method, + headers, + body, + clientRid, + ); + const rid = fetchResponse.bodyRid; + + if ( + NULL_BODY_STATUS.includes(fetchResponse.status) || + REDIRECT_STATUS.includes(fetchResponse.status) + ) { + // We won't use body of received response, so close it now + // otherwise it will be kept in resource table. + core.close(fetchResponse.bodyRid); + responseBody = null; + } else { + responseBody = new ReadableStream({ + type: "bytes", + async pull(controller) { + try { + const result = await core.jsonOpAsync("op_fetch_read", { rid }); + if (!result || !result.chunk) { + controller.close(); + core.close(rid); + } else { + // TODO(ry) This is terribly inefficient. Make this zero-copy. + const chunk = new Uint8Array(result.chunk); + controller.enqueue(chunk); + } + } catch (e) { + controller.error(e); + controller.close(); + core.close(rid); + } + }, + cancel() { + // When reader.cancel() is called + core.close(rid); + }, + }); + } + + responseInit = { + status: 200, + statusText: fetchResponse.statusText, + headers: fetchResponse.headers, + }; + + responseData.set(responseInit, { + redirected, + rid: fetchResponse.bodyRid, + status: fetchResponse.status, + url, + }); + + const response = new Response(responseBody, responseInit); + + if (REDIRECT_STATUS.includes(fetchResponse.status)) { + // We're in a redirect status + switch ((init && init.redirect) || "follow") { + case "error": + responseInit = {}; + responseData.set(responseInit, { + type: "error", + redirected: false, + url: "", + }); + return new Response(null, responseInit); + case "manual": + responseInit = {}; + responseData.set(responseInit, { + type: "opaqueredirect", + redirected: false, + url: "", + }); + return new Response(null, responseInit); + case "follow": + default: + let redirectUrl = response.headers.get("Location"); + if (redirectUrl == null) { + return response; // Unspecified + } + if ( + !redirectUrl.startsWith("http://") && + !redirectUrl.startsWith("https://") + ) { + redirectUrl = new URL(redirectUrl, url).href; + } + url = redirectUrl; + redirected = true; + remRedirectCount--; + } + } else { + return response; + } + } + + responseData.set(responseInit, { + type: "error", + redirected: false, + url: "", + }); + + return new Response(null, responseInit); + } + + window.__bootstrap.fetch = { + Blob, + DomFile, + FormData, + fetch, + Request, + Response, + HttpClient, + createHttpClient, + }; +})(this); diff --git a/op_crates/fetch/Cargo.toml b/op_crates/fetch/Cargo.toml new file mode 100644 index 000000000..66c03ee37 --- /dev/null +++ b/op_crates/fetch/Cargo.toml @@ -0,0 +1,19 @@ +# Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_fetch" +version = "0.1.0" +edition = "2018" +description = "fetch Web API" +authors = ["the Deno authors"] +license = "MIT" +readme = "README.md" +repository = "https://github.com/denoland/deno" + +[lib] +path = "lib.rs" + +[dependencies] +deno_core = { version = "0.57.0", path = "../../core" } +reqwest = { version = "0.10.8", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } +serde = { version = "1.0.116", features = ["derive"] }
\ No newline at end of file diff --git a/op_crates/fetch/lib.deno_fetch.d.ts b/op_crates/fetch/lib.deno_fetch.d.ts new file mode 100644 index 000000000..fcc2fc919 --- /dev/null +++ b/op_crates/fetch/lib.deno_fetch.d.ts @@ -0,0 +1,636 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +/* eslint-disable @typescript-eslint/no-unused-vars, @typescript-eslint/no-explicit-any, no-var */ + +/// <reference no-default-lib="true" /> +/// <reference lib="esnext" /> + +interface DomIterable<K, V> { + keys(): IterableIterator<K>; + values(): IterableIterator<V>; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + forEach( + callback: (value: V, key: K, parent: this) => void, + thisArg?: any, + ): void; +} + +interface ReadableStreamReadDoneResult<T> { + done: true; + value?: T; +} + +interface ReadableStreamReadValueResult<T> { + done: false; + value: T; +} + +type ReadableStreamReadResult<T> = + | ReadableStreamReadValueResult<T> + | ReadableStreamReadDoneResult<T>; + +interface ReadableStreamDefaultReader<R = any> { + readonly closed: Promise<void>; + cancel(reason?: any): Promise<void>; + read(): Promise<ReadableStreamReadResult<R>>; + releaseLock(): void; +} + +interface ReadableStreamReader<R = any> { + cancel(): Promise<void>; + read(): Promise<ReadableStreamReadResult<R>>; + releaseLock(): void; +} + +interface ReadableByteStreamControllerCallback { + (controller: ReadableByteStreamController): void | PromiseLike<void>; +} + +interface UnderlyingByteSource { + autoAllocateChunkSize?: number; + cancel?: ReadableStreamErrorCallback; + pull?: ReadableByteStreamControllerCallback; + start?: ReadableByteStreamControllerCallback; + type: "bytes"; +} + +interface UnderlyingSource<R = any> { + cancel?: ReadableStreamErrorCallback; + pull?: ReadableStreamDefaultControllerCallback<R>; + start?: ReadableStreamDefaultControllerCallback<R>; + type?: undefined; +} + +interface ReadableStreamErrorCallback { + (reason: any): void | PromiseLike<void>; +} + +interface ReadableStreamDefaultControllerCallback<R> { + (controller: ReadableStreamDefaultController<R>): void | PromiseLike<void>; +} + +interface ReadableStreamDefaultController<R = any> { + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: R): void; + error(error?: any): void; +} + +interface ReadableByteStreamController { + readonly byobRequest: undefined; + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: ArrayBufferView): void; + error(error?: any): void; +} + +interface PipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + preventClose?: boolean; + signal?: AbortSignal; +} + +interface QueuingStrategySizeCallback<T = any> { + (chunk: T): number; +} + +interface QueuingStrategy<T = any> { + highWaterMark?: number; + size?: QueuingStrategySizeCallback<T>; +} + +/** This Streams API interface provides a built-in byte length queuing strategy + * that can be used when constructing streams. */ +declare class CountQueuingStrategy implements QueuingStrategy { + constructor(options: { highWaterMark: number }); + highWaterMark: number; + size(chunk: any): 1; +} + +declare class ByteLengthQueuingStrategy + implements QueuingStrategy<ArrayBufferView> { + constructor(options: { highWaterMark: number }); + highWaterMark: number; + size(chunk: ArrayBufferView): number; +} + +/** This Streams API interface represents a readable stream of byte data. The + * Fetch API offers a concrete instance of a ReadableStream through the body + * property of a Response object. */ +interface ReadableStream<R = any> { + readonly locked: boolean; + cancel(reason?: any): Promise<void>; + getIterator(options?: { preventCancel?: boolean }): AsyncIterableIterator<R>; + // getReader(options: { mode: "byob" }): ReadableStreamBYOBReader; + getReader(): ReadableStreamDefaultReader<R>; + pipeThrough<T>( + { + writable, + readable, + }: { + writable: WritableStream<R>; + readable: ReadableStream<T>; + }, + options?: PipeOptions, + ): ReadableStream<T>; + pipeTo(dest: WritableStream<R>, options?: PipeOptions): Promise<void>; + tee(): [ReadableStream<R>, ReadableStream<R>]; + [Symbol.asyncIterator](options?: { + preventCancel?: boolean; + }): AsyncIterableIterator<R>; +} + +declare var ReadableStream: { + prototype: ReadableStream; + new ( + underlyingSource: UnderlyingByteSource, + strategy?: { highWaterMark?: number; size?: undefined }, + ): ReadableStream<Uint8Array>; + new <R = any>( + underlyingSource?: UnderlyingSource<R>, + strategy?: QueuingStrategy<R>, + ): ReadableStream<R>; +}; + +interface WritableStreamDefaultControllerCloseCallback { + (): void | PromiseLike<void>; +} + +interface WritableStreamDefaultControllerStartCallback { + (controller: WritableStreamDefaultController): void | PromiseLike<void>; +} + +interface WritableStreamDefaultControllerWriteCallback<W> { + (chunk: W, controller: WritableStreamDefaultController): + | void + | PromiseLike< + void + >; +} + +interface WritableStreamErrorCallback { + (reason: any): void | PromiseLike<void>; +} + +interface UnderlyingSink<W = any> { + abort?: WritableStreamErrorCallback; + close?: WritableStreamDefaultControllerCloseCallback; + start?: WritableStreamDefaultControllerStartCallback; + type?: undefined; + write?: WritableStreamDefaultControllerWriteCallback<W>; +} + +/** This Streams API interface provides a standard abstraction for writing + * streaming data to a destination, known as a sink. This object comes with + * built-in backpressure and queuing. */ +declare class WritableStream<W = any> { + constructor( + underlyingSink?: UnderlyingSink<W>, + strategy?: QueuingStrategy<W>, + ); + readonly locked: boolean; + abort(reason?: any): Promise<void>; + close(): Promise<void>; + getWriter(): WritableStreamDefaultWriter<W>; +} + +/** This Streams API interface represents a controller allowing control of a + * WritableStream's state. When constructing a WritableStream, the underlying + * sink is given a corresponding WritableStreamDefaultController instance to + * manipulate. */ +interface WritableStreamDefaultController { + error(error?: any): void; +} + +/** This Streams API interface is the object returned by + * WritableStream.getWriter() and once created locks the < writer to the + * WritableStream ensuring that no other streams can write to the underlying + * sink. */ +interface WritableStreamDefaultWriter<W = any> { + readonly closed: Promise<void>; + readonly desiredSize: number | null; + readonly ready: Promise<void>; + abort(reason?: any): Promise<void>; + close(): Promise<void>; + releaseLock(): void; + write(chunk: W): Promise<void>; +} + +declare class TransformStream<I = any, O = any> { + constructor( + transformer?: Transformer<I, O>, + writableStrategy?: QueuingStrategy<I>, + readableStrategy?: QueuingStrategy<O>, + ); + readonly readable: ReadableStream<O>; + readonly writable: WritableStream<I>; +} + +interface TransformStreamDefaultController<O = any> { + readonly desiredSize: number | null; + enqueue(chunk: O): void; + error(reason?: any): void; + terminate(): void; +} + +interface Transformer<I = any, O = any> { + flush?: TransformStreamDefaultControllerCallback<O>; + readableType?: undefined; + start?: TransformStreamDefaultControllerCallback<O>; + transform?: TransformStreamDefaultControllerTransformCallback<I, O>; + writableType?: undefined; +} + +interface TransformStreamDefaultControllerCallback<O> { + (controller: TransformStreamDefaultController<O>): void | PromiseLike<void>; +} + +interface TransformStreamDefaultControllerTransformCallback<I, O> { + ( + chunk: I, + controller: TransformStreamDefaultController<O>, + ): void | PromiseLike<void>; +} + +type BlobPart = BufferSource | Blob | string; + +interface BlobPropertyBag { + type?: string; + ending?: "transparent" | "native"; +} + +/** A file-like object of immutable, raw data. Blobs represent data that isn't necessarily in a JavaScript-native format. The File interface is based on Blob, inheriting blob functionality and expanding it to support files on the user's system. */ +interface Blob { + readonly size: number; + readonly type: string; + arrayBuffer(): Promise<ArrayBuffer>; + slice(start?: number, end?: number, contentType?: string): Blob; + stream(): ReadableStream; + text(): Promise<string>; +} + +declare const Blob: { + prototype: Blob; + new (blobParts?: BlobPart[], options?: BlobPropertyBag): Blob; +}; + +interface FilePropertyBag extends BlobPropertyBag { + lastModified?: number; +} + +/** Provides information about files and allows JavaScript in a web page to + * access their content. */ +interface File extends Blob { + readonly lastModified: number; + readonly name: string; +} + +declare const File: { + prototype: File; + new (fileBits: BlobPart[], fileName: string, options?: FilePropertyBag): File; +}; + +type FormDataEntryValue = File | string; + +/** Provides a way to easily construct a set of key/value pairs representing + * form fields and their values, which can then be easily sent using the + * XMLHttpRequest.send() method. It uses the same format a form would use if the + * encoding type were set to "multipart/form-data". */ +interface FormData extends DomIterable<string, FormDataEntryValue> { + append(name: string, value: string | Blob, fileName?: string): void; + delete(name: string): void; + get(name: string): FormDataEntryValue | null; + getAll(name: string): FormDataEntryValue[]; + has(name: string): boolean; + set(name: string, value: string | Blob, fileName?: string): void; +} + +declare const FormData: { + prototype: FormData; + // TODO(ry) FormData constructor is non-standard. + // new(form?: HTMLFormElement): FormData; + new (): FormData; +}; + +interface Body { + /** A simple getter used to expose a `ReadableStream` of the body contents. */ + readonly body: ReadableStream<Uint8Array> | null; + /** Stores a `Boolean` that declares whether the body has been used in a + * response yet. + */ + readonly bodyUsed: boolean; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with an `ArrayBuffer`. + */ + arrayBuffer(): Promise<ArrayBuffer>; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `Blob`. + */ + blob(): Promise<Blob>; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `FormData` object. + */ + formData(): Promise<FormData>; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with the result of parsing the body text as JSON. + */ + json(): Promise<any>; + /** Takes a `Response` stream and reads it to completion. It returns a promise + * that resolves with a `USVString` (text). + */ + text(): Promise<string>; +} + +type HeadersInit = Headers | string[][] | Record<string, string>; + +/** This Fetch API interface allows you to perform various actions on HTTP + * request and response headers. These actions include retrieving, setting, + * adding to, and removing. A Headers object has an associated header list, + * which is initially empty and consists of zero or more name and value pairs. + * You can add to this using methods like append() (see Examples.) In all + * methods of this interface, header names are matched by case-insensitive byte + * sequence. */ +interface Headers { + append(name: string, value: string): void; + delete(name: string): void; + get(name: string): string | null; + has(name: string): boolean; + set(name: string, value: string): void; + forEach( + callbackfn: (value: string, key: string, parent: Headers) => void, + thisArg?: any, + ): void; +} + +interface Headers extends DomIterable<string, string> { + /** Appends a new value onto an existing header inside a `Headers` object, or + * adds the header if it does not already exist. + */ + append(name: string, value: string): void; + /** Deletes a header from a `Headers` object. */ + delete(name: string): void; + /** Returns an iterator allowing to go through all key/value pairs + * contained in this Headers object. The both the key and value of each pairs + * are ByteString objects. + */ + entries(): IterableIterator<[string, string]>; + /** Returns a `ByteString` sequence of all the values of a header within a + * `Headers` object with a given name. + */ + get(name: string): string | null; + /** Returns a boolean stating whether a `Headers` object contains a certain + * header. + */ + has(name: string): boolean; + /** Returns an iterator allowing to go through all keys contained in + * this Headers object. The keys are ByteString objects. + */ + keys(): IterableIterator<string>; + /** Sets a new value for an existing header inside a Headers object, or adds + * the header if it does not already exist. + */ + set(name: string, value: string): void; + /** Returns an iterator allowing to go through all values contained in + * this Headers object. The values are ByteString objects. + */ + values(): IterableIterator<string>; + forEach( + callbackfn: (value: string, key: string, parent: this) => void, + thisArg?: any, + ): void; + /** The Symbol.iterator well-known symbol specifies the default + * iterator for this Headers object + */ + [Symbol.iterator](): IterableIterator<[string, string]>; +} + +declare const Headers: { + prototype: Headers; + new (init?: HeadersInit): Headers; +}; + +type RequestInfo = Request | string; +type RequestCache = + | "default" + | "force-cache" + | "no-cache" + | "no-store" + | "only-if-cached" + | "reload"; +type RequestCredentials = "include" | "omit" | "same-origin"; +type RequestMode = "cors" | "navigate" | "no-cors" | "same-origin"; +type RequestRedirect = "error" | "follow" | "manual"; +type ReferrerPolicy = + | "" + | "no-referrer" + | "no-referrer-when-downgrade" + | "origin" + | "origin-when-cross-origin" + | "same-origin" + | "strict-origin" + | "strict-origin-when-cross-origin" + | "unsafe-url"; +type BodyInit = + | Blob + | BufferSource + | FormData + | URLSearchParams + | ReadableStream<Uint8Array> + | string; +type RequestDestination = + | "" + | "audio" + | "audioworklet" + | "document" + | "embed" + | "font" + | "image" + | "manifest" + | "object" + | "paintworklet" + | "report" + | "script" + | "sharedworker" + | "style" + | "track" + | "video" + | "worker" + | "xslt"; + +interface RequestInit { + /** + * A BodyInit object or null to set request's body. + */ + body?: BodyInit | null; + /** + * A string indicating how the request will interact with the browser's cache + * to set request's cache. + */ + cache?: RequestCache; + /** + * A string indicating whether credentials will be sent with the request + * always, never, or only when sent to a same-origin URL. Sets request's + * credentials. + */ + credentials?: RequestCredentials; + /** + * A Headers object, an object literal, or an array of two-item arrays to set + * request's headers. + */ + headers?: HeadersInit; + /** + * A cryptographic hash of the resource to be fetched by request. Sets + * request's integrity. + */ + integrity?: string; + /** + * A boolean to set request's keepalive. + */ + keepalive?: boolean; + /** + * A string to set request's method. + */ + method?: string; + /** + * A string to indicate whether the request will use CORS, or will be + * restricted to same-origin URLs. Sets request's mode. + */ + mode?: RequestMode; + /** + * A string indicating whether request follows redirects, results in an error + * upon encountering a redirect, or returns the redirect (in an opaque + * fashion). Sets request's redirect. + */ + redirect?: RequestRedirect; + /** + * A string whose value is a same-origin URL, "about:client", or the empty + * string, to set request's referrer. + */ + referrer?: string; + /** + * A referrer policy to set request's referrerPolicy. + */ + referrerPolicy?: ReferrerPolicy; + /** + * An AbortSignal to set request's signal. + */ + signal?: AbortSignal | null; + /** + * Can only be null. Used to disassociate request from any Window. + */ + window?: any; +} + +/** This Fetch API interface represents a resource request. */ +interface Request extends Body { + /** + * Returns the cache mode associated with request, which is a string + * indicating how the request will interact with the browser's cache when + * fetching. + */ + readonly cache: RequestCache; + /** + * Returns the credentials mode associated with request, which is a string + * indicating whether credentials will be sent with the request always, never, + * or only when sent to a same-origin URL. + */ + readonly credentials: RequestCredentials; + /** + * Returns the kind of resource requested by request, e.g., "document" or "script". + */ + readonly destination: RequestDestination; + /** + * Returns a Headers object consisting of the headers associated with request. + * Note that headers added in the network layer by the user agent will not be + * accounted for in this object, e.g., the "Host" header. + */ + readonly headers: Headers; + /** + * Returns request's subresource integrity metadata, which is a cryptographic + * hash of the resource being fetched. Its value consists of multiple hashes + * separated by whitespace. [SRI] + */ + readonly integrity: string; + /** + * Returns a boolean indicating whether or not request is for a history + * navigation (a.k.a. back-forward navigation). + */ + readonly isHistoryNavigation: boolean; + /** + * Returns a boolean indicating whether or not request is for a reload + * navigation. + */ + readonly isReloadNavigation: boolean; + /** + * Returns a boolean indicating whether or not request can outlive the global + * in which it was created. + */ + readonly keepalive: boolean; + /** + * Returns request's HTTP method, which is "GET" by default. + */ + readonly method: string; + /** + * Returns the mode associated with request, which is a string indicating + * whether the request will use CORS, or will be restricted to same-origin + * URLs. + */ + readonly mode: RequestMode; + /** + * Returns the redirect mode associated with request, which is a string + * indicating how redirects for the request will be handled during fetching. A + * request will follow redirects by default. + */ + readonly redirect: RequestRedirect; + /** + * Returns the referrer of request. Its value can be a same-origin URL if + * explicitly set in init, the empty string to indicate no referrer, and + * "about:client" when defaulting to the global's default. This is used during + * fetching to determine the value of the `Referer` header of the request + * being made. + */ + readonly referrer: string; + /** + * Returns the referrer policy associated with request. This is used during + * fetching to compute the value of the request's referrer. + */ + readonly referrerPolicy: ReferrerPolicy; + /** + * Returns the signal associated with request, which is an AbortSignal object + * indicating whether or not request has been aborted, and its abort event + * handler. + */ + readonly signal: AbortSignal; + /** + * Returns the URL of request as a string. + */ + readonly url: string; + clone(): Request; +} + +declare const Request: { + prototype: Request; + new (input: RequestInfo, init?: RequestInit): Request; +}; + +declare const Response: { + prototype: Response; + new (body?: BodyInit | null, init?: ResponseInit): Response; + error(): Response; + redirect(url: string, status?: number): Response; +}; + +/** Fetch a resource from the network. It returns a Promise that resolves to the + * Response to that request, whether it is successful or not. + * + * const response = await fetch("http://my.json.host/data.json"); + * console.log(response.status); // e.g. 200 + * console.log(response.statusText); // e.g. "OK" + * const jsonData = await response.json(); + */ +declare function fetch( + input: Request | URL | string, + init?: RequestInit, +): Promise<Response>; diff --git a/op_crates/fetch/lib.rs b/op_crates/fetch/lib.rs new file mode 100644 index 000000000..e386431b5 --- /dev/null +++ b/op_crates/fetch/lib.rs @@ -0,0 +1,266 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::bad_resource_id; +use deno_core::error::type_error; +use deno_core::error::AnyError; +use deno_core::futures; +use deno_core::js_check; +use deno_core::serde_json; +use deno_core::serde_json::json; +use deno_core::serde_json::Value; +use deno_core::url; +use deno_core::url::Url; +use deno_core::BufVec; +use deno_core::JsRuntime; +use deno_core::OpState; +use deno_core::ZeroCopyBuf; +use reqwest::header::HeaderMap; +use reqwest::header::HeaderName; +use reqwest::header::HeaderValue; +use reqwest::header::USER_AGENT; +use reqwest::redirect::Policy; +use reqwest::Client; +use reqwest::Method; +use reqwest::Response; +use serde::Deserialize; +use std::cell::RefCell; +use std::convert::From; +use std::fs::File; +use std::io::Read; +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; + +pub fn init(isolate: &mut JsRuntime) { + let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); + let files = vec![ + manifest_dir.join("01_fetch_util.js"), + manifest_dir.join("03_dom_iterable.js"), + manifest_dir.join("11_streams.js"), + manifest_dir.join("20_headers.js"), + manifest_dir.join("26_fetch.js"), + ]; + // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the + // workspace root. + let display_root = manifest_dir.parent().unwrap().parent().unwrap(); + for file in files { + println!("cargo:rerun-if-changed={}", file.display()); + let display_path = file.strip_prefix(display_root).unwrap(); + let display_path_str = display_path.display().to_string(); + js_check(isolate.execute( + &("deno:".to_string() + &display_path_str.replace('\\', "/")), + &std::fs::read_to_string(&file).unwrap(), + )); + } +} + +pub trait FetchPermissions { + fn check_net_url(&self, url: &Url) -> Result<(), AnyError>; + fn check_read(&self, p: &PathBuf) -> Result<(), AnyError>; +} + +pub fn get_declaration() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_fetch.d.ts") +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct FetchArgs { + method: Option<String>, + url: String, + headers: Vec<(String, String)>, + client_rid: Option<u32>, +} + +pub async fn op_fetch<FP>( + state: Rc<RefCell<OpState>>, + args: Value, + data: BufVec, +) -> Result<Value, AnyError> +where + FP: FetchPermissions + 'static, +{ + let args: FetchArgs = serde_json::from_value(args)?; + let url = args.url; + + let client = if let Some(rid) = args.client_rid { + let state_ = state.borrow(); + let r = state_ + .resource_table + .get::<HttpClientResource>(rid) + .ok_or_else(bad_resource_id)?; + r.client.clone() + } else { + let state_ = state.borrow(); + let client = state_.borrow::<reqwest::Client>(); + client.clone() + }; + + let method = match args.method { + Some(method_str) => Method::from_bytes(method_str.as_bytes())?, + None => Method::GET, + }; + + let url_ = url::Url::parse(&url)?; + + // Check scheme before asking for net permission + let scheme = url_.scheme(); + if scheme != "http" && scheme != "https" { + return Err(type_error(format!("scheme '{}' not supported", scheme))); + } + + { + let state_ = state.borrow(); + // TODO(ry) The Rc below is a hack because we store Rc<CliState> in OpState. + // Ideally it could be removed. + let permissions = state_.borrow::<Rc<FP>>(); + permissions.check_net_url(&url_)?; + } + + let mut request = client.request(method, url_); + + match data.len() { + 0 => {} + 1 => request = request.body(Vec::from(&*data[0])), + _ => panic!("Invalid number of arguments"), + } + + for (key, value) in args.headers { + let name = HeaderName::from_bytes(key.as_bytes()).unwrap(); + let v = HeaderValue::from_str(&value).unwrap(); + request = request.header(name, v); + } + //debug!("Before fetch {}", url); + + let res = request.send().await?; + + //debug!("Fetch response {}", url); + let status = res.status(); + let mut res_headers = Vec::new(); + for (key, val) in res.headers().iter() { + res_headers.push((key.to_string(), val.to_str().unwrap().to_owned())); + } + + let rid = state + .borrow_mut() + .resource_table + .add("httpBody", Box::new(res)); + + Ok(json!({ + "bodyRid": rid, + "status": status.as_u16(), + "statusText": status.canonical_reason().unwrap_or(""), + "headers": res_headers + })) +} + +pub async fn op_fetch_read( + state: Rc<RefCell<OpState>>, + args: Value, + _data: BufVec, +) -> Result<Value, AnyError> { + #[derive(Deserialize)] + #[serde(rename_all = "camelCase")] + struct Args { + rid: u32, + } + + let args: Args = serde_json::from_value(args)?; + let rid = args.rid; + + use futures::future::poll_fn; + use futures::ready; + use futures::FutureExt; + let f = poll_fn(move |cx| { + let mut state = state.borrow_mut(); + let response = state + .resource_table + .get_mut::<Response>(rid as u32) + .ok_or_else(bad_resource_id)?; + + let mut chunk_fut = response.chunk().boxed_local(); + let r = ready!(chunk_fut.poll_unpin(cx))?; + if let Some(chunk) = r { + Ok(json!({ "chunk": &*chunk })).into() + } else { + Ok(json!({ "chunk": null })).into() + } + }); + f.await + /* + // I'm programming this as I want it to be programmed, even though it might be + // incorrect, normally we would use poll_fn here. We need to make this await pattern work. + let chunk = response.chunk().await?; + if let Some(chunk) = chunk { + // TODO(ry) This is terribly inefficient. Make this zero-copy. + Ok(json!({ "chunk": &*chunk })) + } else { + Ok(json!({ "chunk": null })) + } + */ +} + +struct HttpClientResource { + client: Client, +} + +impl HttpClientResource { + fn new(client: Client) -> Self { + Self { client } + } +} + +pub fn op_create_http_client<FP>( + state: &mut OpState, + args: Value, + _zero_copy: &mut [ZeroCopyBuf], +) -> Result<Value, AnyError> +where + FP: FetchPermissions + 'static, +{ + #[derive(Deserialize, Default, Debug)] + #[serde(rename_all = "camelCase")] + #[serde(default)] + struct CreateHttpClientOptions { + ca_file: Option<String>, + } + + let args: CreateHttpClientOptions = serde_json::from_value(args)?; + + if let Some(ca_file) = args.ca_file.clone() { + // TODO(ry) The Rc below is a hack because we store Rc<CliState> in OpState. + // Ideally it could be removed. + let permissions = state.borrow::<Rc<FP>>(); + permissions.check_read(&PathBuf::from(ca_file))?; + } + + let client = create_http_client(args.ca_file.as_deref()).unwrap(); + + let rid = state + .resource_table + .add("httpClient", Box::new(HttpClientResource::new(client))); + Ok(json!(rid)) +} + +/// Create new instance of async reqwest::Client. This client supports +/// proxies and doesn't follow redirects. +fn create_http_client(ca_file: Option<&str>) -> Result<Client, AnyError> { + let mut headers = HeaderMap::new(); + // TODO(ry) set the verison correctly. + headers.insert(USER_AGENT, format!("Deno/{}", "x.x.x").parse().unwrap()); + let mut builder = Client::builder() + .redirect(Policy::none()) + .default_headers(headers) + .use_rustls_tls(); + + if let Some(ca_file) = ca_file { + let mut buf = Vec::new(); + File::open(ca_file)?.read_to_end(&mut buf)?; + let cert = reqwest::Certificate::from_pem(&buf)?; + builder = builder.add_root_certificate(cert); + } + + builder + .build() + .map_err(|_| deno_core::error::generic_error("Unable to build http client")) +} |