summaryrefslogtreecommitdiff
path: root/extensions/web
diff options
context:
space:
mode:
Diffstat (limited to 'extensions/web')
-rw-r--r--extensions/web/09_file.js414
-rw-r--r--extensions/web/10_filereader.js415
-rw-r--r--extensions/web/11_blob_url.js62
-rw-r--r--extensions/web/Cargo.toml1
-rw-r--r--extensions/web/README.md3
-rw-r--r--extensions/web/internal.d.ts10
-rw-r--r--extensions/web/lib.deno_web.d.ts36
-rw-r--r--extensions/web/lib.rs101
8 files changed, 1039 insertions, 3 deletions
diff --git a/extensions/web/09_file.js b/extensions/web/09_file.js
new file mode 100644
index 000000000..403bbee35
--- /dev/null
+++ b/extensions/web/09_file.js
@@ -0,0 +1,414 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference no-default-lib="true" />
+/// <reference path="../../core/lib.deno_core.d.ts" />
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../web/lib.deno_web.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const core = window.Deno.core;
+ const webidl = window.__bootstrap.webidl;
+
+ // TODO(lucacasonato): this needs to not be hardcoded and instead depend on
+ // host os.
+ const isWindows = false;
+
+ /**
+ * @param {string} input
+ * @param {number} position
+ * @returns {{result: string, position: number}}
+ */
+ function collectCodepointsNotCRLF(input, position) {
+ // See https://w3c.github.io/FileAPI/#convert-line-endings-to-native and
+ // https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
+ const start = position;
+ for (
+ let c = input.charAt(position);
+ position < input.length && !(c === "\r" || c === "\n");
+ c = input.charAt(++position)
+ );
+ return { result: input.slice(start, position), position };
+ }
+
+ /**
+ * @param {string} s
+ * @returns {string}
+ */
+ function convertLineEndingsToNative(s) {
+ const nativeLineEnding = isWindows ? "\r\n" : "\n";
+
+ let { result, position } = collectCodepointsNotCRLF(s, 0);
+
+ while (position < s.length) {
+ const codePoint = s.charAt(position);
+ if (codePoint === "\r") {
+ result += nativeLineEnding;
+ position++;
+ if (position < s.length && s.charAt(position) === "\n") {
+ position++;
+ }
+ } else if (codePoint === "\n") {
+ position++;
+ result += nativeLineEnding;
+ }
+ const { result: token, position: newPosition } = collectCodepointsNotCRLF(
+ s,
+ position,
+ );
+ position = newPosition;
+ result += token;
+ }
+
+ return result;
+ }
+
+ /**
+ * @param {...Uint8Array} bytesArrays
+ * @returns {Uint8Array}
+ */
+ function concatUint8Arrays(...bytesArrays) {
+ let byteLength = 0;
+ for (const bytes of bytesArrays) {
+ byteLength += bytes.byteLength;
+ }
+ const finalBytes = new Uint8Array(byteLength);
+ let current = 0;
+ for (const bytes of bytesArrays) {
+ finalBytes.set(bytes, current);
+ current += bytes.byteLength;
+ }
+ return finalBytes;
+ }
+
+ /** @typedef {BufferSource | Blob | string} BlobPart */
+
+ /**
+ * @param {BlobPart[]} parts
+ * @param {string} endings
+ * @returns {Uint8Array}
+ */
+ function processBlobParts(parts, endings) {
+ /** @type {Uint8Array[]} */
+ const bytesArrays = [];
+ for (const element of parts) {
+ if (element instanceof ArrayBuffer) {
+ bytesArrays.push(new Uint8Array(element.slice(0)));
+ } else if (ArrayBuffer.isView(element)) {
+ const buffer = element.buffer.slice(
+ element.byteOffset,
+ element.byteOffset + element.byteLength,
+ );
+ bytesArrays.push(new Uint8Array(buffer));
+ } else if (element instanceof Blob) {
+ bytesArrays.push(
+ new Uint8Array(element[_byteSequence].buffer.slice(0)),
+ );
+ } else if (typeof element === "string") {
+ let s = element;
+ if (endings == "native") {
+ s = convertLineEndingsToNative(s);
+ }
+ bytesArrays.push(core.encode(s));
+ } else {
+ throw new TypeError("Unreachable code (invalild element type)");
+ }
+ }
+ return concatUint8Arrays(...bytesArrays);
+ }
+
+ /**
+ * @param {string} str
+ * @returns {string}
+ */
+ function normalizeType(str) {
+ let normalizedType = str;
+ if (!/^[\x20-\x7E]*$/.test(str)) {
+ normalizedType = "";
+ }
+ return normalizedType.toLowerCase();
+ }
+
+ const _byteSequence = Symbol("[[ByteSequence]]");
+
+ class Blob {
+ get [Symbol.toStringTag]() {
+ return "Blob";
+ }
+
+ /** @type {string} */
+ #type;
+
+ /** @type {Uint8Array} */
+ [_byteSequence];
+
+ /**
+ * @param {BlobPart[]} blobParts
+ * @param {BlobPropertyBag} options
+ */
+ constructor(blobParts = [], options = {}) {
+ const prefix = "Failed to construct 'Blob'";
+ blobParts = webidl.converters["sequence<BlobPart>"](blobParts, {
+ context: "Argument 1",
+ prefix,
+ });
+ options = webidl.converters["BlobPropertyBag"](options, {
+ context: "Argument 2",
+ prefix,
+ });
+
+ this[webidl.brand] = webidl.brand;
+
+ /** @type {Uint8Array} */
+ this[_byteSequence] = processBlobParts(
+ blobParts,
+ options.endings,
+ );
+ this.#type = normalizeType(options.type);
+ }
+
+ /** @returns {number} */
+ get size() {
+ webidl.assertBranded(this, Blob);
+ return this[_byteSequence].byteLength;
+ }
+
+ /** @returns {string} */
+ get type() {
+ webidl.assertBranded(this, Blob);
+ return this.#type;
+ }
+
+ /**
+ * @param {number} [start]
+ * @param {number} [end]
+ * @param {string} [contentType]
+ * @returns {Blob}
+ */
+ slice(start, end, contentType) {
+ webidl.assertBranded(this, Blob);
+ const prefix = "Failed to execute 'slice' on 'Blob'";
+ if (start !== undefined) {
+ start = webidl.converters["long long"](start, {
+ clamp: true,
+ context: "Argument 1",
+ prefix,
+ });
+ }
+ if (end !== undefined) {
+ end = webidl.converters["long long"](end, {
+ clamp: true,
+ context: "Argument 2",
+ prefix,
+ });
+ }
+ if (contentType !== undefined) {
+ contentType = webidl.converters["DOMString"](contentType, {
+ context: "Argument 3",
+ prefix,
+ });
+ }
+
+ // deno-lint-ignore no-this-alias
+ const O = this;
+ /** @type {number} */
+ let relativeStart;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else {
+ if (start < 0) {
+ relativeStart = Math.max(O.size + start, 0);
+ } else {
+ relativeStart = Math.min(start, O.size);
+ }
+ }
+ /** @type {number} */
+ let relativeEnd;
+ if (end === undefined) {
+ relativeEnd = O.size;
+ } else {
+ if (end < 0) {
+ relativeEnd = Math.max(O.size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, O.size);
+ }
+ }
+ /** @type {string} */
+ let relativeContentType;
+ if (contentType === undefined) {
+ relativeContentType = "";
+ } else {
+ relativeContentType = normalizeType(contentType);
+ }
+ return new Blob([
+ O[_byteSequence].buffer.slice(relativeStart, relativeEnd),
+ ], { type: relativeContentType });
+ }
+
+ /**
+ * @returns {ReadableStream<Uint8Array>}
+ */
+ stream() {
+ webidl.assertBranded(this, Blob);
+ const bytes = this[_byteSequence];
+ const stream = new ReadableStream({
+ type: "bytes",
+ /** @param {ReadableByteStreamController} controller */
+ start(controller) {
+ const chunk = new Uint8Array(bytes.buffer.slice(0));
+ if (chunk.byteLength > 0) controller.enqueue(chunk);
+ controller.close();
+ },
+ });
+ return stream;
+ }
+
+ /**
+ * @returns {Promise<string>}
+ */
+ async text() {
+ webidl.assertBranded(this, Blob);
+ const buffer = await this.arrayBuffer();
+ return core.decode(new Uint8Array(buffer));
+ }
+
+ /**
+ * @returns {Promise<ArrayBuffer>}
+ */
+ async arrayBuffer() {
+ webidl.assertBranded(this, Blob);
+ const stream = this.stream();
+ let bytes = new Uint8Array();
+ for await (const chunk of stream) {
+ bytes = concatUint8Arrays(bytes, chunk);
+ }
+ return bytes.buffer;
+ }
+ }
+
+ webidl.configurePrototype(Blob);
+
+ webidl.converters["Blob"] = webidl.createInterfaceConverter("Blob", Blob);
+ webidl.converters["BlobPart"] = (V, opts) => {
+ // Union for ((ArrayBuffer or ArrayBufferView) or Blob or USVString)
+ if (typeof V == "object") {
+ if (V instanceof Blob) {
+ return webidl.converters["Blob"](V, opts);
+ }
+ if (V instanceof ArrayBuffer || V instanceof SharedArrayBuffer) {
+ return webidl.converters["ArrayBuffer"](V, opts);
+ }
+ if (ArrayBuffer.isView(V)) {
+ return webidl.converters["ArrayBufferView"](V, opts);
+ }
+ }
+ return webidl.converters["USVString"](V, opts);
+ };
+ webidl.converters["sequence<BlobPart>"] = webidl.createSequenceConverter(
+ webidl.converters["BlobPart"],
+ );
+ webidl.converters["EndingType"] = webidl.createEnumConverter("EndingType", [
+ "transparent",
+ "native",
+ ]);
+ const blobPropertyBagDictionary = [
+ {
+ key: "type",
+ converter: webidl.converters["DOMString"],
+ defaultValue: "",
+ },
+ {
+ key: "endings",
+ converter: webidl.converters["EndingType"],
+ defaultValue: "transparent",
+ },
+ ];
+ webidl.converters["BlobPropertyBag"] = webidl.createDictionaryConverter(
+ "BlobPropertyBag",
+ blobPropertyBagDictionary,
+ );
+
+ const _Name = Symbol("[[Name]]");
+ const _LastModfied = Symbol("[[LastModified]]");
+
+ class File extends Blob {
+ get [Symbol.toStringTag]() {
+ return "File";
+ }
+
+ /** @type {string} */
+ [_Name];
+ /** @type {number} */
+ [_LastModfied];
+
+ /**
+ * @param {BlobPart[]} fileBits
+ * @param {string} fileName
+ * @param {FilePropertyBag} options
+ */
+ constructor(fileBits, fileName, options = {}) {
+ const prefix = "Failed to construct 'File'";
+ webidl.requiredArguments(arguments.length, 2, { prefix });
+
+ fileBits = webidl.converters["sequence<BlobPart>"](fileBits, {
+ context: "Argument 1",
+ prefix,
+ });
+ fileName = webidl.converters["USVString"](fileName, {
+ context: "Argument 2",
+ prefix,
+ });
+ options = webidl.converters["FilePropertyBag"](options, {
+ context: "Argument 3",
+ prefix,
+ });
+
+ super(fileBits, options);
+
+ /** @type {string} */
+ this[_Name] = fileName;
+ if (options.lastModified === undefined) {
+ /** @type {number} */
+ this[_LastModfied] = new Date().getTime();
+ } else {
+ /** @type {number} */
+ this[_LastModfied] = options.lastModified;
+ }
+ }
+
+ /** @returns {string} */
+ get name() {
+ webidl.assertBranded(this, File);
+ return this[_Name];
+ }
+
+ /** @returns {number} */
+ get lastModified() {
+ webidl.assertBranded(this, File);
+ return this[_LastModfied];
+ }
+ }
+
+ webidl.configurePrototype(File);
+
+ webidl.converters["FilePropertyBag"] = webidl.createDictionaryConverter(
+ "FilePropertyBag",
+ blobPropertyBagDictionary,
+ [
+ {
+ key: "lastModified",
+ converter: webidl.converters["long long"],
+ },
+ ],
+ );
+
+ window.__bootstrap.file = {
+ Blob,
+ _byteSequence,
+ File,
+ };
+})(this);
diff --git a/extensions/web/10_filereader.js b/extensions/web/10_filereader.js
new file mode 100644
index 000000000..b8bb6172a
--- /dev/null
+++ b/extensions/web/10_filereader.js
@@ -0,0 +1,415 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference no-default-lib="true" />
+/// <reference path="../../core/lib.deno_core.d.ts" />
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../web/lib.deno_web.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference lib="esnext" />
+
+"use strict";
+
+((window) => {
+ const webidl = window.__bootstrap.webidl;
+ const { forgivingBase64Encode } = window.__bootstrap.infra;
+ const { decode, TextDecoder } = window.__bootstrap.encoding;
+ const { parseMimeType } = window.__bootstrap.mimesniff;
+
+ const state = Symbol("[[state]]");
+ const result = Symbol("[[result]]");
+ const error = Symbol("[[error]]");
+ const aborted = Symbol("[[aborted]]");
+
+ class FileReader extends EventTarget {
+ get [Symbol.toStringTag]() {
+ return "FileReader";
+ }
+
+ /** @type {"empty" | "loading" | "done"} */
+ [state] = "empty";
+ /** @type {null | string | ArrayBuffer} */
+ [result] = null;
+ /** @type {null | DOMException} */
+ [error] = null;
+
+ [aborted] = false;
+
+ /**
+ * @param {Blob} blob
+ * @param {{kind: "ArrayBuffer" | "Text" | "DataUrl" | "BinaryString", encoding?: string}} readtype
+ */
+ #readOperation(blob, readtype) {
+ // 1. If fr’s state is "loading", throw an InvalidStateError DOMException.
+ if (this[state] === "loading") {
+ throw new DOMException(
+ "Invalid FileReader state.",
+ "InvalidStateError",
+ );
+ }
+ // 2. Set fr’s state to "loading".
+ this[state] = "loading";
+ // 3. Set fr’s result to null.
+ this[result] = null;
+ // 4. Set fr’s error to null.
+ this[error] = null;
+
+ // 5. Let stream be the result of calling get stream on blob.
+ const stream /*: ReadableStream<ArrayBufferView>*/ = blob.stream();
+
+ // 6. Let reader be the result of getting a reader from stream.
+ const reader = stream.getReader();
+
+ // 7. Let bytes be an empty byte sequence.
+ /** @type {Uint8Array[]} */
+ const chunks = [];
+
+ // 8. Let chunkPromise be the result of reading a chunk from stream with reader.
+ let chunkPromise = reader.read();
+
+ // 9. Let isFirstChunk be true.
+ let isFirstChunk = true;
+
+ // 10 in parallel while true
+ (async () => {
+ while (!this[aborted]) {
+ // 1. Wait for chunkPromise to be fulfilled or rejected.
+ try {
+ const chunk = await chunkPromise;
+ if (this[aborted]) return;
+
+ // 2. If chunkPromise is fulfilled, and isFirstChunk is true, queue a task to fire a progress event called loadstart at fr.
+ if (isFirstChunk) {
+ // TODO(lucacasonato): this is wrong, should be HTML "queue a task"
+ queueMicrotask(() => {
+ if (this[aborted]) return;
+ // fire a progress event for loadstart
+ const ev = new ProgressEvent("loadstart", {});
+ this.dispatchEvent(ev);
+ });
+ }
+ // 3. Set isFirstChunk to false.
+ isFirstChunk = false;
+
+ // 4. If chunkPromise is fulfilled with an object whose done property is false
+ // and whose value property is a Uint8Array object, run these steps:
+ if (!chunk.done && chunk.value instanceof Uint8Array) {
+ chunks.push(chunk.value);
+
+ // TODO(bartlomieju): (only) If roughly 50ms have passed since last progress
+ {
+ const size = chunks.reduce((p, i) => p + i.byteLength, 0);
+ const ev = new ProgressEvent("progress", {
+ loaded: size,
+ });
+ // TODO(lucacasonato): this is wrong, should be HTML "queue a task"
+ queueMicrotask(() => {
+ if (this[aborted]) return;
+ this.dispatchEvent(ev);
+ });
+ }
+
+ chunkPromise = reader.read();
+ } // 5 Otherwise, if chunkPromise is fulfilled with an object whose done property is true, queue a task to run the following steps and abort this algorithm:
+ else if (chunk.done === true) {
+ // TODO(lucacasonato): this is wrong, should be HTML "queue a task"
+ queueMicrotask(() => {
+ if (this[aborted]) return;
+ // 1. Set fr’s state to "done".
+ this[state] = "done";
+ // 2. Let result be the result of package data given bytes, type, blob’s type, and encodingName.
+ const size = chunks.reduce((p, i) => p + i.byteLength, 0);
+ const bytes = new Uint8Array(size);
+ let offs = 0;
+ for (const chunk of chunks) {
+ bytes.set(chunk, offs);
+ offs += chunk.byteLength;
+ }
+ switch (readtype.kind) {
+ case "ArrayBuffer": {
+ this[result] = bytes.buffer;
+ break;
+ }
+ case "BinaryString":
+ this[result] = [...new Uint8Array(bytes.buffer)].map((v) =>
+ String.fromCodePoint(v)
+ ).join("");
+ break;
+ case "Text": {
+ let decoder = undefined;
+ if (readtype.encoding) {
+ try {
+ decoder = new TextDecoder(readtype.encoding);
+ } catch {
+ // don't care about the error
+ }
+ }
+ if (decoder === undefined) {
+ const mimeType = parseMimeType(blob.type);
+ if (mimeType) {
+ const charset = mimeType.parameters.get("charset");
+ if (charset) {
+ try {
+ decoder = new TextDecoder(charset);
+ } catch {
+ // don't care about the error
+ }
+ }
+ }
+ }
+ if (decoder === undefined) {
+ decoder = new TextDecoder();
+ }
+ this[result] = decode(bytes, decoder.encoding);
+ break;
+ }
+ case "DataUrl": {
+ const mediaType = blob.type || "application/octet-stream";
+ this[result] = `data:${mediaType};base64,${
+ forgivingBase64Encode(bytes)
+ }`;
+ break;
+ }
+ }
+ // 4.2 Fire a progress event called load at the fr.
+ {
+ const ev = new ProgressEvent("load", {
+ lengthComputable: true,
+ loaded: size,
+ total: size,
+ });
+ this.dispatchEvent(ev);
+ }
+
+ // 5. If fr’s state is not "loading", fire a progress event called loadend at the fr.
+ //Note: Event handler for the load or error events could have started another load, if that happens the loadend event for this load is not fired.
+ if (this[state] !== "loading") {
+ const ev = new ProgressEvent("loadend", {
+ lengthComputable: true,
+ loaded: size,
+ total: size,
+ });
+ this.dispatchEvent(ev);
+ }
+ });
+ break;
+ }
+ } catch (err) {
+ // TODO(lucacasonato): this is wrong, should be HTML "queue a task"
+ queueMicrotask(() => {
+ if (this[aborted]) return;
+
+ // chunkPromise rejected
+ this[state] = "done";
+ this[error] = err;
+
+ {
+ const ev = new ProgressEvent("error", {});
+ this.dispatchEvent(ev);
+ }
+
+ //If fr’s state is not "loading", fire a progress event called loadend at fr.
+ //Note: Event handler for the error event could have started another load, if that happens the loadend event for this load is not fired.
+ if (this[state] !== "loading") {
+ const ev = new ProgressEvent("loadend", {});
+ this.dispatchEvent(ev);
+ }
+ });
+ break;
+ }
+ }
+ })();
+ }
+
+ constructor() {
+ super();
+ this[webidl.brand] = webidl.brand;
+ }
+
+ /** @returns {number} */
+ get readyState() {
+ webidl.assertBranded(this, FileReader);
+ switch (this[state]) {
+ case "empty":
+ return FileReader.EMPTY;
+ case "loading":
+ return FileReader.LOADING;
+ case "done":
+ return FileReader.DONE;
+ default:
+ throw new TypeError("Invalid state");
+ }
+ }
+
+ get result() {
+ webidl.assertBranded(this, FileReader);
+ return this[result];
+ }
+
+ get error() {
+ webidl.assertBranded(this, FileReader);
+ return this[error];
+ }
+
+ abort() {
+ webidl.assertBranded(this, FileReader);
+ // If context object's state is "empty" or if context object's state is "done" set context object's result to null and terminate this algorithm.
+ if (
+ this[state] === "empty" ||
+ this[state] === "done"
+ ) {
+ this[result] = null;
+ return;
+ }
+ // If context object's state is "loading" set context object's state to "done" and set context object's result to null.
+ if (this[state] === "loading") {
+ this[state] = "done";
+ this[result] = null;
+ }
+ // If there are any tasks from the context object on the file reading task source in an affiliated task queue, then remove those tasks from that task queue.
+ // Terminate the algorithm for the read method being processed.
+ this[aborted] = true;
+
+ // Fire a progress event called abort at the context object.
+ const ev = new ProgressEvent("abort", {});
+ this.dispatchEvent(ev);
+
+ // If context object's state is not "loading", fire a progress event called loadend at the context object.
+ if (this[state] !== "loading") {
+ const ev = new ProgressEvent("loadend", {});
+ this.dispatchEvent(ev);
+ }
+ }
+
+ /** @param {Blob} blob */
+ readAsArrayBuffer(blob) {
+ webidl.assertBranded(this, FileReader);
+ const prefix = "Failed to execute 'readAsArrayBuffer' on 'FileReader'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ this.#readOperation(blob, { kind: "ArrayBuffer" });
+ }
+
+ /** @param {Blob} blob */
+ readAsBinaryString(blob) {
+ webidl.assertBranded(this, FileReader);
+ const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ // alias for readAsArrayBuffer
+ this.#readOperation(blob, { kind: "BinaryString" });
+ }
+
+ /** @param {Blob} blob */
+ readAsDataURL(blob) {
+ webidl.assertBranded(this, FileReader);
+ const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ // alias for readAsArrayBuffer
+ this.#readOperation(blob, { kind: "DataUrl" });
+ }
+
+ /**
+ * @param {Blob} blob
+ * @param {string} [encoding]
+ */
+ readAsText(blob, encoding) {
+ webidl.assertBranded(this, FileReader);
+ const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ if (encoding !== undefined) {
+ encoding = webidl.converters["DOMString"](encoding, {
+ prefix,
+ context: "Argument 2",
+ });
+ }
+ // alias for readAsArrayBuffer
+ this.#readOperation(blob, { kind: "Text", encoding });
+ }
+ }
+
+ webidl.configurePrototype(FileReader);
+
+ Object.defineProperty(FileReader, "EMPTY", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 0,
+ });
+ Object.defineProperty(FileReader, "LOADING", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 1,
+ });
+ Object.defineProperty(FileReader, "DONE", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 2,
+ });
+ Object.defineProperty(FileReader.prototype, "EMPTY", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 0,
+ });
+ Object.defineProperty(FileReader.prototype, "LOADING", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 1,
+ });
+ Object.defineProperty(FileReader.prototype, "DONE", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 2,
+ });
+
+ const handlerSymbol = Symbol("eventHandlers");
+
+ function makeWrappedHandler(handler) {
+ function wrappedHandler(...args) {
+ if (typeof wrappedHandler.handler !== "function") {
+ return;
+ }
+ return wrappedHandler.handler.call(this, ...args);
+ }
+ wrappedHandler.handler = handler;
+ return wrappedHandler;
+ }
+ // TODO(benjamingr) reuse when we can reuse code between web crates
+ function defineEventHandler(emitter, name) {
+ // HTML specification section 8.1.5.1
+ Object.defineProperty(emitter, `on${name}`, {
+ get() {
+ return this[handlerSymbol]?.get(name)?.handler ?? null;
+ },
+ set(value) {
+ if (!this[handlerSymbol]) {
+ this[handlerSymbol] = new Map();
+ }
+ let handlerWrapper = this[handlerSymbol]?.get(name);
+ if (handlerWrapper) {
+ handlerWrapper.handler = value;
+ } else {
+ handlerWrapper = makeWrappedHandler(value);
+ this.addEventListener(name, handlerWrapper);
+ }
+ this[handlerSymbol].set(name, handlerWrapper);
+ },
+ configurable: true,
+ enumerable: true,
+ });
+ }
+ defineEventHandler(FileReader.prototype, "error");
+ defineEventHandler(FileReader.prototype, "loadstart");
+ defineEventHandler(FileReader.prototype, "load");
+ defineEventHandler(FileReader.prototype, "loadend");
+ defineEventHandler(FileReader.prototype, "progress");
+ defineEventHandler(FileReader.prototype, "abort");
+
+ window.__bootstrap.fileReader = {
+ FileReader,
+ };
+})(this);
diff --git a/extensions/web/11_blob_url.js b/extensions/web/11_blob_url.js
new file mode 100644
index 000000000..d030d79bd
--- /dev/null
+++ b/extensions/web/11_blob_url.js
@@ -0,0 +1,62 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference no-default-lib="true" />
+/// <reference path="../../core/lib.deno_core.d.ts" />
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../web/lib.deno_web.d.ts" />
+/// <reference path="../url/internal.d.ts" />
+/// <reference path="../url/lib.deno_url.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const core = Deno.core;
+ const webidl = window.__bootstrap.webidl;
+ const { _byteSequence } = window.__bootstrap.file;
+ const { URL } = window.__bootstrap.url;
+
+ /**
+ * @param {Blob} blob
+ * @returns {string}
+ */
+ function createObjectURL(blob) {
+ const prefix = "Failed to execute 'createObjectURL' on 'URL'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ blob = webidl.converters["Blob"](blob, {
+ context: "Argument 1",
+ prefix,
+ });
+
+ const url = core.opSync(
+ "op_file_create_object_url",
+ blob.type,
+ blob[_byteSequence],
+ );
+
+ return url;
+ }
+
+ /**
+ * @param {string} url
+ * @returns {void}
+ */
+ function revokeObjectURL(url) {
+ const prefix = "Failed to execute 'revokeObjectURL' on 'URL'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ url = webidl.converters["DOMString"](url, {
+ context: "Argument 1",
+ prefix,
+ });
+
+ core.opSync(
+ "op_file_revoke_object_url",
+ url,
+ );
+ }
+
+ URL.createObjectURL = createObjectURL;
+ URL.revokeObjectURL = revokeObjectURL;
+})(globalThis);
diff --git a/extensions/web/Cargo.toml b/extensions/web/Cargo.toml
index a914d1b48..05a0f17f2 100644
--- a/extensions/web/Cargo.toml
+++ b/extensions/web/Cargo.toml
@@ -18,6 +18,7 @@ base64 = "0.13.0"
deno_core = { version = "0.89.0", path = "../../core" }
encoding_rs = "0.8.28"
serde = "1.0"
+uuid = { version = "0.8.2", features = ["v4"] }
[dev-dependencies]
futures = "0.3.15"
diff --git a/extensions/web/README.md b/extensions/web/README.md
index 03c1b7f89..d847ae52e 100644
--- a/extensions/web/README.md
+++ b/extensions/web/README.md
@@ -1,5 +1,6 @@
# deno web
-Op crate that implements Event, TextEncoder, TextDecoder.
+Op crate that implements Event, TextEncoder, TextDecoder and File API
+(https://w3c.github.io/FileAPI).
Testing for text encoding is done via WPT in cli/.
diff --git a/extensions/web/internal.d.ts b/extensions/web/internal.d.ts
index ebfa0d39e..4492e6554 100644
--- a/extensions/web/internal.d.ts
+++ b/extensions/web/internal.d.ts
@@ -70,5 +70,15 @@ declare namespace globalThis {
atob(data: string): string;
btoa(data: string): string;
};
+
+ declare var file: {
+ Blob: typeof Blob & {
+ [globalThis.__bootstrap.file._byteSequence]: Uint8Array;
+ };
+ readonly _byteSequence: unique symbol;
+ File: typeof File & {
+ [globalThis.__bootstrap.file._byteSequence]: Uint8Array;
+ };
+ };
}
}
diff --git a/extensions/web/lib.deno_web.d.ts b/extensions/web/lib.deno_web.d.ts
index 0c3673351..a1b6a0595 100644
--- a/extensions/web/lib.deno_web.d.ts
+++ b/extensions/web/lib.deno_web.d.ts
@@ -341,3 +341,39 @@ declare var FileReader: {
readonly EMPTY: number;
readonly LOADING: number;
};
+
+type BlobPart = BufferSource | Blob | string;
+
+interface BlobPropertyBag {
+ type?: string;
+ endings?: "transparent" | "native";
+}
+
+/** A file-like object of immutable, raw data. Blobs represent data that isn't necessarily in a JavaScript-native format. The File interface is based on Blob, inheriting blob functionality and expanding it to support files on the user's system. */
+declare class Blob {
+ constructor(blobParts?: BlobPart[], options?: BlobPropertyBag);
+
+ readonly size: number;
+ readonly type: string;
+ arrayBuffer(): Promise<ArrayBuffer>;
+ slice(start?: number, end?: number, contentType?: string): Blob;
+ stream(): ReadableStream<Uint8Array>;
+ text(): Promise<string>;
+}
+
+interface FilePropertyBag extends BlobPropertyBag {
+ lastModified?: number;
+}
+
+/** Provides information about files and allows JavaScript in a web page to
+ * access their content. */
+declare class File extends Blob {
+ constructor(
+ fileBits: BlobPart[],
+ fileName: string,
+ options?: FilePropertyBag,
+ );
+
+ readonly lastModified: number;
+ readonly name: string;
+}
diff --git a/extensions/web/lib.rs b/extensions/web/lib.rs
index 95adb822a..b2906acaf 100644
--- a/extensions/web/lib.rs
+++ b/extensions/web/lib.rs
@@ -1,12 +1,15 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use deno_core::error::bad_resource_id;
+use deno_core::error::null_opbuf;
use deno_core::error::range_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::include_js_files;
use deno_core::op_sync;
+use deno_core::url::Url;
use deno_core::Extension;
+use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::Resource;
use deno_core::ResourceId;
@@ -17,15 +20,21 @@ use encoding_rs::DecoderResult;
use encoding_rs::Encoding;
use serde::Deserialize;
use serde::Serialize;
-
use std::borrow::Cow;
use std::cell::RefCell;
+use std::collections::HashMap;
use std::fmt;
use std::path::PathBuf;
+use std::sync::Arc;
+use std::sync::Mutex;
use std::usize;
+use uuid::Uuid;
/// Load and execute the javascript code.
-pub fn init() -> Extension {
+pub fn init(
+ blob_url_store: BlobUrlStore,
+ maybe_location: Option<Url>,
+) -> Extension {
Extension::builder()
.js(include_js_files!(
prefix "deno:extensions/web",
@@ -38,6 +47,9 @@ pub fn init() -> Extension {
"04_global_interfaces.js",
"05_base64.js",
"08_text_encoding.js",
+ "09_file.js",
+ "10_filereader.js",
+ "11_blob_url.js",
"12_location.js",
))
.ops(vec![
@@ -50,7 +62,22 @@ pub fn init() -> Extension {
("op_encoding_new_decoder", op_sync(op_encoding_new_decoder)),
("op_encoding_decode", op_sync(op_encoding_decode)),
("op_encoding_encode_into", op_sync(op_encoding_encode_into)),
+ (
+ "op_file_create_object_url",
+ op_sync(op_file_create_object_url),
+ ),
+ (
+ "op_file_revoke_object_url",
+ op_sync(op_file_revoke_object_url),
+ ),
])
+ .state(move |state| {
+ state.put(blob_url_store.clone());
+ if let Some(location) = maybe_location.clone() {
+ state.put(Location(location));
+ }
+ Ok(())
+ })
.build()
}
@@ -318,3 +345,73 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> {
.map(|_| "DOMExceptionInvalidCharacterError")
})
}
+
+#[derive(Debug, Clone)]
+pub struct Blob {
+ pub data: Vec<u8>,
+ pub media_type: String,
+}
+
+pub struct Location(pub Url);
+
+#[derive(Debug, Default, Clone)]
+pub struct BlobUrlStore(Arc<Mutex<HashMap<Url, Blob>>>);
+
+impl BlobUrlStore {
+ pub fn get(&self, mut url: Url) -> Result<Option<Blob>, AnyError> {
+ let blob_store = self.0.lock().unwrap();
+ url.set_fragment(None);
+ Ok(blob_store.get(&url).cloned())
+ }
+
+ pub fn insert(&self, blob: Blob, maybe_location: Option<Url>) -> Url {
+ let origin = if let Some(location) = maybe_location {
+ location.origin().ascii_serialization()
+ } else {
+ "null".to_string()
+ };
+ let id = Uuid::new_v4();
+ let url = Url::parse(&format!("blob:{}/{}", origin, id)).unwrap();
+
+ let mut blob_store = self.0.lock().unwrap();
+ blob_store.insert(url.clone(), blob);
+
+ url
+ }
+
+ pub fn remove(&self, url: &ModuleSpecifier) {
+ let mut blob_store = self.0.lock().unwrap();
+ blob_store.remove(&url);
+ }
+}
+
+pub fn op_file_create_object_url(
+ state: &mut deno_core::OpState,
+ media_type: String,
+ zero_copy: Option<ZeroCopyBuf>,
+) -> Result<String, AnyError> {
+ let data = zero_copy.ok_or_else(null_opbuf)?;
+ let blob = Blob {
+ data: data.to_vec(),
+ media_type,
+ };
+
+ let maybe_location = state.try_borrow::<Location>();
+ let blob_store = state.borrow::<BlobUrlStore>();
+
+ let url =
+ blob_store.insert(blob, maybe_location.map(|location| location.0.clone()));
+
+ Ok(url.to_string())
+}
+
+pub fn op_file_revoke_object_url(
+ state: &mut deno_core::OpState,
+ url: String,
+ _: (),
+) -> Result<(), AnyError> {
+ let url = Url::parse(&url)?;
+ let blob_store = state.borrow::<BlobUrlStore>();
+ blob_store.remove(&url);
+ Ok(())
+}