summaryrefslogtreecommitdiff
path: root/extensions/file
diff options
context:
space:
mode:
Diffstat (limited to 'extensions/file')
-rw-r--r--extensions/file/01_file.js412
-rw-r--r--extensions/file/02_filereader.js414
-rw-r--r--extensions/file/03_blob_url.js63
-rw-r--r--extensions/file/Cargo.toml18
-rw-r--r--extensions/file/README.md5
-rw-r--r--extensions/file/internal.d.ts18
-rw-r--r--extensions/file/lib.deno_file.d.ts40
-rw-r--r--extensions/file/lib.rs120
8 files changed, 1090 insertions, 0 deletions
diff --git a/extensions/file/01_file.js b/extensions/file/01_file.js
new file mode 100644
index 000000000..05c410572
--- /dev/null
+++ b/extensions/file/01_file.js
@@ -0,0 +1,412 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference no-default-lib="true" />
+/// <reference path="../../core/lib.deno_core.d.ts" />
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../web/lib.deno_web.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./lib.deno_file.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const webidl = window.__bootstrap.webidl;
+
+ // TODO(lucacasonato): this needs to not be hardcoded and instead depend on
+ // host os.
+ const isWindows = false;
+
+ /**
+ * @param {string} input
+ * @param {number} position
+ * @returns {{result: string, position: number}}
+ */
+ function collectCodepointsNotCRLF(input, position) {
+ // See https://w3c.github.io/FileAPI/#convert-line-endings-to-native and
+ // https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
+ const start = position;
+ for (
+ let c = input.charAt(position);
+ position < input.length && !(c === "\r" || c === "\n");
+ c = input.charAt(++position)
+ );
+ return { result: input.slice(start, position), position };
+ }
+
+ /**
+ * @param {string} s
+ * @returns {string}
+ */
+ function convertLineEndingsToNative(s) {
+ const nativeLineEnding = isWindows ? "\r\n" : "\n";
+
+ let { result, position } = collectCodepointsNotCRLF(s, 0);
+
+ while (position < s.length) {
+ const codePoint = s.charAt(position);
+ if (codePoint === "\r") {
+ result += nativeLineEnding;
+ position++;
+ if (position < s.length && s.charAt(position) === "\n") {
+ position++;
+ }
+ } else if (codePoint === "\n") {
+ position++;
+ result += nativeLineEnding;
+ }
+ const { result: token, position: newPosition } = collectCodepointsNotCRLF(
+ s,
+ position,
+ );
+ position = newPosition;
+ result += token;
+ }
+
+ return result;
+ }
+
+ /**
+ * @param {...Uint8Array} bytesArrays
+ * @returns {Uint8Array}
+ */
+ function concatUint8Arrays(...bytesArrays) {
+ let byteLength = 0;
+ for (const bytes of bytesArrays) {
+ byteLength += bytes.byteLength;
+ }
+ const finalBytes = new Uint8Array(byteLength);
+ let current = 0;
+ for (const bytes of bytesArrays) {
+ finalBytes.set(bytes, current);
+ current += bytes.byteLength;
+ }
+ return finalBytes;
+ }
+
+ const utf8Encoder = new TextEncoder();
+ const utf8Decoder = new TextDecoder();
+
+ /** @typedef {BufferSource | Blob | string} BlobPart */
+
+ /**
+ * @param {BlobPart[]} parts
+ * @param {string} endings
+ * @returns {Uint8Array}
+ */
+ function processBlobParts(parts, endings) {
+ /** @type {Uint8Array[]} */
+ const bytesArrays = [];
+ for (const element of parts) {
+ if (element instanceof ArrayBuffer) {
+ bytesArrays.push(new Uint8Array(element.slice(0)));
+ } else if (ArrayBuffer.isView(element)) {
+ const buffer = element.buffer.slice(
+ element.byteOffset,
+ element.byteOffset + element.byteLength,
+ );
+ bytesArrays.push(new Uint8Array(buffer));
+ } else if (element instanceof Blob) {
+ bytesArrays.push(
+ new Uint8Array(element[_byteSequence].buffer.slice(0)),
+ );
+ } else if (typeof element === "string") {
+ let s = element;
+ if (endings == "native") {
+ s = convertLineEndingsToNative(s);
+ }
+ bytesArrays.push(utf8Encoder.encode(s));
+ } else {
+ throw new TypeError("Unreachable code (invalild element type)");
+ }
+ }
+ return concatUint8Arrays(...bytesArrays);
+ }
+
+ /**
+ * @param {string} str
+ * @returns {string}
+ */
+ function normalizeType(str) {
+ let normalizedType = str;
+ if (!/^[\x20-\x7E]*$/.test(str)) {
+ normalizedType = "";
+ }
+ return normalizedType.toLowerCase();
+ }
+
+ const _byteSequence = Symbol("[[ByteSequence]]");
+
+ class Blob {
+ get [Symbol.toStringTag]() {
+ return "Blob";
+ }
+
+ /** @type {string} */
+ #type;
+
+ /** @type {Uint8Array} */
+ [_byteSequence];
+
+ /**
+ * @param {BlobPart[]} blobParts
+ * @param {BlobPropertyBag} options
+ */
+ constructor(blobParts = [], options = {}) {
+ const prefix = "Failed to construct 'Blob'";
+ blobParts = webidl.converters["sequence<BlobPart>"](blobParts, {
+ context: "Argument 1",
+ prefix,
+ });
+ options = webidl.converters["BlobPropertyBag"](options, {
+ context: "Argument 2",
+ prefix,
+ });
+
+ this[webidl.brand] = webidl.brand;
+
+ /** @type {Uint8Array} */
+ this[_byteSequence] = processBlobParts(
+ blobParts,
+ options.endings,
+ );
+ this.#type = normalizeType(options.type);
+ }
+
+ /** @returns {number} */
+ get size() {
+ webidl.assertBranded(this, Blob);
+ return this[_byteSequence].byteLength;
+ }
+
+ /** @returns {string} */
+ get type() {
+ webidl.assertBranded(this, Blob);
+ return this.#type;
+ }
+
+ /**
+ * @param {number} [start]
+ * @param {number} [end]
+ * @param {string} [contentType]
+ * @returns {Blob}
+ */
+ slice(start, end, contentType) {
+ webidl.assertBranded(this, Blob);
+ const prefix = "Failed to execute 'slice' on 'Blob'";
+ if (start !== undefined) {
+ start = webidl.converters["long long"](start, {
+ clamp: true,
+ context: "Argument 1",
+ prefix,
+ });
+ }
+ if (end !== undefined) {
+ end = webidl.converters["long long"](end, {
+ clamp: true,
+ context: "Argument 2",
+ prefix,
+ });
+ }
+ if (contentType !== undefined) {
+ contentType = webidl.converters["DOMString"](contentType, {
+ context: "Argument 3",
+ prefix,
+ });
+ }
+
+ const O = this;
+ /** @type {number} */
+ let relativeStart;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else {
+ if (start < 0) {
+ relativeStart = Math.max(O.size + start, 0);
+ } else {
+ relativeStart = Math.min(start, O.size);
+ }
+ }
+ /** @type {number} */
+ let relativeEnd;
+ if (end === undefined) {
+ relativeEnd = O.size;
+ } else {
+ if (end < 0) {
+ relativeEnd = Math.max(O.size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, O.size);
+ }
+ }
+ /** @type {string} */
+ let relativeContentType;
+ if (contentType === undefined) {
+ relativeContentType = "";
+ } else {
+ relativeContentType = normalizeType(contentType);
+ }
+ return new Blob([
+ O[_byteSequence].buffer.slice(relativeStart, relativeEnd),
+ ], { type: relativeContentType });
+ }
+
+ /**
+ * @returns {ReadableStream<Uint8Array>}
+ */
+ stream() {
+ webidl.assertBranded(this, Blob);
+ const bytes = this[_byteSequence];
+ const stream = new ReadableStream({
+ type: "bytes",
+ /** @param {ReadableByteStreamController} controller */
+ start(controller) {
+ const chunk = new Uint8Array(bytes.buffer.slice(0));
+ if (chunk.byteLength > 0) controller.enqueue(chunk);
+ controller.close();
+ },
+ });
+ return stream;
+ }
+
+ /**
+ * @returns {Promise<string>}
+ */
+ async text() {
+ webidl.assertBranded(this, Blob);
+ const buffer = await this.arrayBuffer();
+ return utf8Decoder.decode(buffer);
+ }
+
+ /**
+ * @returns {Promise<ArrayBuffer>}
+ */
+ async arrayBuffer() {
+ webidl.assertBranded(this, Blob);
+ const stream = this.stream();
+ let bytes = new Uint8Array();
+ for await (const chunk of stream) {
+ bytes = concatUint8Arrays(bytes, chunk);
+ }
+ return bytes.buffer;
+ }
+ }
+
+ webidl.converters["Blob"] = webidl.createInterfaceConverter("Blob", Blob);
+ webidl.converters["BlobPart"] = (V, opts) => {
+ // Union for ((ArrayBuffer or ArrayBufferView) or Blob or USVString)
+ if (typeof V == "object") {
+ if (V instanceof Blob) {
+ return webidl.converters["Blob"](V, opts);
+ }
+ if (V instanceof ArrayBuffer || V instanceof SharedArrayBuffer) {
+ return webidl.converters["ArrayBuffer"](V, opts);
+ }
+ if (ArrayBuffer.isView(V)) {
+ return webidl.converters["ArrayBufferView"](V, opts);
+ }
+ }
+ return webidl.converters["USVString"](V, opts);
+ };
+ webidl.converters["sequence<BlobPart>"] = webidl.createSequenceConverter(
+ webidl.converters["BlobPart"],
+ );
+ webidl.converters["EndingType"] = webidl.createEnumConverter("EndingType", [
+ "transparent",
+ "native",
+ ]);
+ const blobPropertyBagDictionary = [
+ {
+ key: "type",
+ converter: webidl.converters["DOMString"],
+ defaultValue: "",
+ },
+ {
+ key: "endings",
+ converter: webidl.converters["EndingType"],
+ defaultValue: "transparent",
+ },
+ ];
+ webidl.converters["BlobPropertyBag"] = webidl.createDictionaryConverter(
+ "BlobPropertyBag",
+ blobPropertyBagDictionary,
+ );
+
+ const _Name = Symbol("[[Name]]");
+ const _LastModfied = Symbol("[[LastModified]]");
+
+ class File extends Blob {
+ get [Symbol.toStringTag]() {
+ return "File";
+ }
+
+ /** @type {string} */
+ [_Name];
+ /** @type {number} */
+ [_LastModfied];
+
+ /**
+ * @param {BlobPart[]} fileBits
+ * @param {string} fileName
+ * @param {FilePropertyBag} options
+ */
+ constructor(fileBits, fileName, options = {}) {
+ const prefix = "Failed to construct 'File'";
+ webidl.requiredArguments(arguments.length, 2, { prefix });
+
+ fileBits = webidl.converters["sequence<BlobPart>"](fileBits, {
+ context: "Argument 1",
+ prefix,
+ });
+ fileName = webidl.converters["USVString"](fileName, {
+ context: "Argument 2",
+ prefix,
+ });
+ options = webidl.converters["FilePropertyBag"](options, {
+ context: "Argument 3",
+ prefix,
+ });
+
+ super(fileBits, options);
+
+ /** @type {string} */
+ this[_Name] = fileName.replaceAll("/", ":");
+ if (options.lastModified === undefined) {
+ /** @type {number} */
+ this[_LastModfied] = new Date().getTime();
+ } else {
+ /** @type {number} */
+ this[_LastModfied] = options.lastModified;
+ }
+ }
+
+ /** @returns {string} */
+ get name() {
+ webidl.assertBranded(this, File);
+ return this[_Name];
+ }
+
+ /** @returns {number} */
+ get lastModified() {
+ webidl.assertBranded(this, File);
+ return this[_LastModfied];
+ }
+ }
+
+ webidl.converters["FilePropertyBag"] = webidl.createDictionaryConverter(
+ "FilePropertyBag",
+ blobPropertyBagDictionary,
+ [
+ {
+ key: "lastModified",
+ converter: webidl.converters["long long"],
+ },
+ ],
+ );
+
+ window.__bootstrap.file = {
+ Blob,
+ _byteSequence,
+ File,
+ };
+})(this);
diff --git a/extensions/file/02_filereader.js b/extensions/file/02_filereader.js
new file mode 100644
index 000000000..9575ab85a
--- /dev/null
+++ b/extensions/file/02_filereader.js
@@ -0,0 +1,414 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference no-default-lib="true" />
+/// <reference path="../../core/lib.deno_core.d.ts" />
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../web/lib.deno_web.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./lib.deno_file.d.ts" />
+/// <reference lib="esnext" />
+
+"use strict";
+
+((window) => {
+ const webidl = window.__bootstrap.webidl;
+ const { decode } = window.__bootstrap.encoding;
+ const { parseMimeType } = window.__bootstrap.mimesniff;
+ const base64 = window.__bootstrap.base64;
+
+ const state = Symbol("[[state]]");
+ const result = Symbol("[[result]]");
+ const error = Symbol("[[error]]");
+ const aborted = Symbol("[[aborted]]");
+
+ class FileReader extends EventTarget {
+ get [Symbol.toStringTag]() {
+ return "FileReader";
+ }
+
+ /** @type {"empty" | "loading" | "done"} */
+ [state] = "empty";
+ /** @type {null | string | ArrayBuffer} */
+ [result] = null;
+ /** @type {null | DOMException} */
+ [error] = null;
+
+ [aborted] = false;
+
+ /**
+ * @param {Blob} blob
+ * @param {{kind: "ArrayBuffer" | "Text" | "DataUrl" | "BinaryString", encoding?: string}} readtype
+ */
+ #readOperation = (blob, readtype) => {
+ // 1. If fr’s state is "loading", throw an InvalidStateError DOMException.
+ if (this[state] === "loading") {
+ throw new DOMException(
+ "Invalid FileReader state.",
+ "InvalidStateError",
+ );
+ }
+ // 2. Set fr’s state to "loading".
+ this[state] = "loading";
+ // 3. Set fr’s result to null.
+ this[result] = null;
+ // 4. Set fr’s error to null.
+ this[error] = null;
+
+ // 5. Let stream be the result of calling get stream on blob.
+ const stream /*: ReadableStream<ArrayBufferView>*/ = blob.stream();
+
+ // 6. Let reader be the result of getting a reader from stream.
+ const reader = stream.getReader();
+
+ // 7. Let bytes be an empty byte sequence.
+ /** @type {Uint8Array[]} */
+ const chunks = [];
+
+ // 8. Let chunkPromise be the result of reading a chunk from stream with reader.
+ let chunkPromise = reader.read();
+
+ // 9. Let isFirstChunk be true.
+ let isFirstChunk = true;
+
+ // 10 in parallel while true
+ (async () => {
+ while (!this[aborted]) {
+ // 1. Wait for chunkPromise to be fulfilled or rejected.
+ try {
+ const chunk = await chunkPromise;
+ if (this[aborted]) return;
+
+ // 2. If chunkPromise is fulfilled, and isFirstChunk is true, queue a task to fire a progress event called loadstart at fr.
+ if (isFirstChunk) {
+ // TODO(lucacasonato): this is wrong, should be HTML "queue a task"
+ queueMicrotask(() => {
+ if (this[aborted]) return;
+ // fire a progress event for loadstart
+ const ev = new ProgressEvent("loadstart", {});
+ this.dispatchEvent(ev);
+ });
+ }
+ // 3. Set isFirstChunk to false.
+ isFirstChunk = false;
+
+ // 4. If chunkPromise is fulfilled with an object whose done property is false
+ // and whose value property is a Uint8Array object, run these steps:
+ if (!chunk.done && chunk.value instanceof Uint8Array) {
+ chunks.push(chunk.value);
+
+ // TODO(bartlomieju): (only) If roughly 50ms have passed since last progress
+ {
+ const size = chunks.reduce((p, i) => p + i.byteLength, 0);
+ const ev = new ProgressEvent("progress", {
+ loaded: size,
+ });
+ // TODO(lucacasonato): this is wrong, should be HTML "queue a task"
+ queueMicrotask(() => {
+ if (this[aborted]) return;
+ this.dispatchEvent(ev);
+ });
+ }
+
+ chunkPromise = reader.read();
+ } // 5 Otherwise, if chunkPromise is fulfilled with an object whose done property is true, queue a task to run the following steps and abort this algorithm:
+ else if (chunk.done === true) {
+ // TODO(lucacasonato): this is wrong, should be HTML "queue a task"
+ queueMicrotask(() => {
+ if (this[aborted]) return;
+ // 1. Set fr’s state to "done".
+ this[state] = "done";
+ // 2. Let result be the result of package data given bytes, type, blob’s type, and encodingName.
+ const size = chunks.reduce((p, i) => p + i.byteLength, 0);
+ const bytes = new Uint8Array(size);
+ let offs = 0;
+ for (const chunk of chunks) {
+ bytes.set(chunk, offs);
+ offs += chunk.byteLength;
+ }
+ switch (readtype.kind) {
+ case "ArrayBuffer": {
+ this[result] = bytes.buffer;
+ break;
+ }
+ case "BinaryString":
+ this[result] = [...new Uint8Array(bytes.buffer)].map((v) =>
+ String.fromCodePoint(v)
+ ).join("");
+ break;
+ case "Text": {
+ let decoder = undefined;
+ if (readtype.encoding) {
+ try {
+ decoder = new TextDecoder(readtype.encoding);
+ } catch {
+ // don't care about the error
+ }
+ }
+ if (decoder === undefined) {
+ const mimeType = parseMimeType(blob.type);
+ if (mimeType) {
+ const charset = mimeType.parameters.get("charset");
+ if (charset) {
+ try {
+ decoder = new TextDecoder(charset);
+ } catch {
+ // don't care about the error
+ }
+ }
+ }
+ }
+ if (decoder === undefined) {
+ decoder = new TextDecoder();
+ }
+ this[result] = decode(bytes, decoder.encoding);
+ break;
+ }
+ case "DataUrl": {
+ const mediaType = blob.type || "application/octet-stream";
+ this[result] = `data:${mediaType};base64,${
+ base64.fromByteArray(bytes)
+ }`;
+ break;
+ }
+ }
+ // 4.2 Fire a progress event called load at the fr.
+ {
+ const ev = new ProgressEvent("load", {
+ lengthComputable: true,
+ loaded: size,
+ total: size,
+ });
+ this.dispatchEvent(ev);
+ }
+
+ // 5. If fr’s state is not "loading", fire a progress event called loadend at the fr.
+ //Note: Event handler for the load or error events could have started another load, if that happens the loadend event for this load is not fired.
+ if (this[state] !== "loading") {
+ const ev = new ProgressEvent("loadend", {
+ lengthComputable: true,
+ loaded: size,
+ total: size,
+ });
+ this.dispatchEvent(ev);
+ }
+ });
+ break;
+ }
+ } catch (err) {
+ // TODO(lucacasonato): this is wrong, should be HTML "queue a task"
+ queueMicrotask(() => {
+ if (this[aborted]) return;
+
+ // chunkPromise rejected
+ this[state] = "done";
+ this[error] = err;
+
+ {
+ const ev = new ProgressEvent("error", {});
+ this.dispatchEvent(ev);
+ }
+
+ //If fr’s state is not "loading", fire a progress event called loadend at fr.
+ //Note: Event handler for the error event could have started another load, if that happens the loadend event for this load is not fired.
+ if (this[state] !== "loading") {
+ const ev = new ProgressEvent("loadend", {});
+ this.dispatchEvent(ev);
+ }
+ });
+ break;
+ }
+ }
+ })();
+ };
+
+ constructor() {
+ super();
+ this[webidl.brand] = webidl.brand;
+ }
+
+ /** @returns {number} */
+ get readyState() {
+ webidl.assertBranded(this, FileReader);
+ switch (this[state]) {
+ case "empty":
+ return FileReader.EMPTY;
+ case "loading":
+ return FileReader.LOADING;
+ case "done":
+ return FileReader.DONE;
+ default:
+ throw new TypeError("Invalid state");
+ }
+ }
+
+ get result() {
+ webidl.assertBranded(this, FileReader);
+ return this[result];
+ }
+
+ get error() {
+ webidl.assertBranded(this, FileReader);
+ return this[error];
+ }
+
+ abort() {
+ webidl.assertBranded(this, FileReader);
+ // If context object's state is "empty" or if context object's state is "done" set context object's result to null and terminate this algorithm.
+ if (
+ this[state] === "empty" ||
+ this[state] === "done"
+ ) {
+ this[result] = null;
+ return;
+ }
+ // If context object's state is "loading" set context object's state to "done" and set context object's result to null.
+ if (this[state] === "loading") {
+ this[state] = "done";
+ this[result] = null;
+ }
+ // If there are any tasks from the context object on the file reading task source in an affiliated task queue, then remove those tasks from that task queue.
+ // Terminate the algorithm for the read method being processed.
+ this[aborted] = true;
+
+ // Fire a progress event called abort at the context object.
+ const ev = new ProgressEvent("abort", {});
+ this.dispatchEvent(ev);
+
+ // If context object's state is not "loading", fire a progress event called loadend at the context object.
+ if (this[state] !== "loading") {
+ const ev = new ProgressEvent("loadend", {});
+ this.dispatchEvent(ev);
+ }
+ }
+
+ /** @param {Blob} blob */
+ readAsArrayBuffer(blob) {
+ webidl.assertBranded(this, FileReader);
+ const prefix = "Failed to execute 'readAsArrayBuffer' on 'FileReader'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ this.#readOperation(blob, { kind: "ArrayBuffer" });
+ }
+
+ /** @param {Blob} blob */
+ readAsBinaryString(blob) {
+ webidl.assertBranded(this, FileReader);
+ const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ // alias for readAsArrayBuffer
+ this.#readOperation(blob, { kind: "BinaryString" });
+ }
+
+ /** @param {Blob} blob */
+ readAsDataURL(blob) {
+ webidl.assertBranded(this, FileReader);
+ const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ // alias for readAsArrayBuffer
+ this.#readOperation(blob, { kind: "DataUrl" });
+ }
+
+ /**
+ * @param {Blob} blob
+ * @param {string} [encoding]
+ */
+ readAsText(blob, encoding) {
+ webidl.assertBranded(this, FileReader);
+ const prefix = "Failed to execute 'readAsBinaryString' on 'FileReader'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ if (encoding !== undefined) {
+ encoding = webidl.converters["DOMString"](encoding, {
+ prefix,
+ context: "Argument 2",
+ });
+ }
+ // alias for readAsArrayBuffer
+ this.#readOperation(blob, { kind: "Text", encoding });
+ }
+ }
+
+ Object.defineProperty(FileReader, "EMPTY", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 0,
+ });
+ Object.defineProperty(FileReader, "LOADING", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 1,
+ });
+ Object.defineProperty(FileReader, "DONE", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 2,
+ });
+ Object.defineProperty(FileReader.prototype, "EMPTY", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 0,
+ });
+ Object.defineProperty(FileReader.prototype, "LOADING", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 1,
+ });
+ Object.defineProperty(FileReader.prototype, "DONE", {
+ writable: false,
+ enumerable: true,
+ configurable: false,
+ value: 2,
+ });
+
+ const handlerSymbol = Symbol("eventHandlers");
+
+ function makeWrappedHandler(handler) {
+ function wrappedHandler(...args) {
+ if (typeof wrappedHandler.handler !== "function") {
+ return;
+ }
+ return wrappedHandler.handler.call(this, ...args);
+ }
+ wrappedHandler.handler = handler;
+ return wrappedHandler;
+ }
+ // TODO(benjamingr) reuse when we can reuse code between web crates
+ function defineEventHandler(emitter, name) {
+ // HTML specification section 8.1.5.1
+ Object.defineProperty(emitter, `on${name}`, {
+ get() {
+ return this[handlerSymbol]?.get(name)?.handler ?? null;
+ },
+ set(value) {
+ if (!this[handlerSymbol]) {
+ this[handlerSymbol] = new Map();
+ }
+ let handlerWrapper = this[handlerSymbol]?.get(name);
+ if (handlerWrapper) {
+ handlerWrapper.handler = value;
+ } else {
+ handlerWrapper = makeWrappedHandler(value);
+ this.addEventListener(name, handlerWrapper);
+ }
+ this[handlerSymbol].set(name, handlerWrapper);
+ },
+ configurable: true,
+ enumerable: true,
+ });
+ }
+ defineEventHandler(FileReader.prototype, "error");
+ defineEventHandler(FileReader.prototype, "loadstart");
+ defineEventHandler(FileReader.prototype, "load");
+ defineEventHandler(FileReader.prototype, "loadend");
+ defineEventHandler(FileReader.prototype, "progress");
+ defineEventHandler(FileReader.prototype, "abort");
+
+ window.__bootstrap.fileReader = {
+ FileReader,
+ };
+})(this);
diff --git a/extensions/file/03_blob_url.js b/extensions/file/03_blob_url.js
new file mode 100644
index 000000000..2b5e16b55
--- /dev/null
+++ b/extensions/file/03_blob_url.js
@@ -0,0 +1,63 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+// @ts-check
+/// <reference no-default-lib="true" />
+/// <reference path="../../core/lib.deno_core.d.ts" />
+/// <reference path="../webidl/internal.d.ts" />
+/// <reference path="../web/internal.d.ts" />
+/// <reference path="../web/lib.deno_web.d.ts" />
+/// <reference path="../url/internal.d.ts" />
+/// <reference path="../url/lib.deno_url.d.ts" />
+/// <reference path="./internal.d.ts" />
+/// <reference path="./lib.deno_file.d.ts" />
+/// <reference lib="esnext" />
+"use strict";
+
+((window) => {
+ const core = Deno.core;
+ const webidl = window.__bootstrap.webidl;
+ const { _byteSequence } = window.__bootstrap.file;
+ const { URL } = window.__bootstrap.url;
+
+ /**
+ * @param {Blob} blob
+ * @returns {string}
+ */
+ function createObjectURL(blob) {
+ const prefix = "Failed to execute 'createObjectURL' on 'URL'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ blob = webidl.converters["Blob"](blob, {
+ context: "Argument 1",
+ prefix,
+ });
+
+ const url = core.opSync(
+ "op_file_create_object_url",
+ blob.type,
+ blob[_byteSequence],
+ );
+
+ return url;
+ }
+
+ /**
+ * @param {string} url
+ * @returns {void}
+ */
+ function revokeObjectURL(url) {
+ const prefix = "Failed to execute 'revokeObjectURL' on 'URL'";
+ webidl.requiredArguments(arguments.length, 1, { prefix });
+ url = webidl.converters["DOMString"](url, {
+ context: "Argument 1",
+ prefix,
+ });
+
+ core.opSync(
+ "op_file_revoke_object_url",
+ url,
+ );
+ }
+
+ URL.createObjectURL = createObjectURL;
+ URL.revokeObjectURL = revokeObjectURL;
+})(globalThis);
diff --git a/extensions/file/Cargo.toml b/extensions/file/Cargo.toml
new file mode 100644
index 000000000..2338fab7c
--- /dev/null
+++ b/extensions/file/Cargo.toml
@@ -0,0 +1,18 @@
+# Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+[package]
+name = "deno_file"
+version = "0.4.0"
+edition = "2018"
+description = "File API implementation for Deno"
+authors = ["the Deno authors"]
+license = "MIT"
+readme = "README.md"
+repository = "https://github.com/denoland/deno"
+
+[lib]
+path = "lib.rs"
+
+[dependencies]
+deno_core = { version = "0.86.0", path = "../../core" }
+uuid = { version = "0.8.2", features = ["v4"] }
diff --git a/extensions/file/README.md b/extensions/file/README.md
new file mode 100644
index 000000000..c421bf004
--- /dev/null
+++ b/extensions/file/README.md
@@ -0,0 +1,5 @@
+# deno_file
+
+This crate implements the File API.
+
+Spec: https://w3c.github.io/FileAPI
diff --git a/extensions/file/internal.d.ts b/extensions/file/internal.d.ts
new file mode 100644
index 000000000..dd892e736
--- /dev/null
+++ b/extensions/file/internal.d.ts
@@ -0,0 +1,18 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+/// <reference no-default-lib="true" />
+/// <reference lib="esnext" />
+
+declare namespace globalThis {
+ declare namespace __bootstrap {
+ declare var file: {
+ Blob: typeof Blob & {
+ [globalThis.__bootstrap.file._byteSequence]: Uint8Array;
+ };
+ readonly _byteSequence: unique symbol;
+ File: typeof File & {
+ [globalThis.__bootstrap.file._byteSequence]: Uint8Array;
+ };
+ };
+ }
+}
diff --git a/extensions/file/lib.deno_file.d.ts b/extensions/file/lib.deno_file.d.ts
new file mode 100644
index 000000000..a907c3f50
--- /dev/null
+++ b/extensions/file/lib.deno_file.d.ts
@@ -0,0 +1,40 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+/// <reference no-default-lib="true" />
+/// <reference lib="esnext" />
+
+type BlobPart = BufferSource | Blob | string;
+
+interface BlobPropertyBag {
+ type?: string;
+ endings?: "transparent" | "native";
+}
+
+/** A file-like object of immutable, raw data. Blobs represent data that isn't necessarily in a JavaScript-native format. The File interface is based on Blob, inheriting blob functionality and expanding it to support files on the user's system. */
+declare class Blob {
+ constructor(blobParts?: BlobPart[], options?: BlobPropertyBag);
+
+ readonly size: number;
+ readonly type: string;
+ arrayBuffer(): Promise<ArrayBuffer>;
+ slice(start?: number, end?: number, contentType?: string): Blob;
+ stream(): ReadableStream<Uint8Array>;
+ text(): Promise<string>;
+}
+
+interface FilePropertyBag extends BlobPropertyBag {
+ lastModified?: number;
+}
+
+/** Provides information about files and allows JavaScript in a web page to
+ * access their content. */
+declare class File extends Blob {
+ constructor(
+ fileBits: BlobPart[],
+ fileName: string,
+ options?: FilePropertyBag,
+ );
+
+ readonly lastModified: number;
+ readonly name: string;
+}
diff --git a/extensions/file/lib.rs b/extensions/file/lib.rs
new file mode 100644
index 000000000..78e3db814
--- /dev/null
+++ b/extensions/file/lib.rs
@@ -0,0 +1,120 @@
+// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
+
+use deno_core::error::null_opbuf;
+use deno_core::error::AnyError;
+use deno_core::include_js_files;
+use deno_core::op_sync;
+use deno_core::url::Url;
+use deno_core::Extension;
+use deno_core::ModuleSpecifier;
+use deno_core::ZeroCopyBuf;
+use std::collections::HashMap;
+use std::path::PathBuf;
+use std::sync::Arc;
+use std::sync::Mutex;
+use uuid::Uuid;
+
+#[derive(Debug, Clone)]
+pub struct Blob {
+ pub data: Vec<u8>,
+ pub media_type: String,
+}
+
+pub struct Location(pub Url);
+
+#[derive(Debug, Default, Clone)]
+pub struct BlobUrlStore(Arc<Mutex<HashMap<Url, Blob>>>);
+
+impl BlobUrlStore {
+ pub fn get(&self, mut url: Url) -> Result<Option<Blob>, AnyError> {
+ let blob_store = self.0.lock().unwrap();
+ url.set_fragment(None);
+ Ok(blob_store.get(&url).cloned())
+ }
+
+ pub fn insert(&self, blob: Blob, maybe_location: Option<Url>) -> Url {
+ let origin = if let Some(location) = maybe_location {
+ location.origin().ascii_serialization()
+ } else {
+ "null".to_string()
+ };
+ let id = Uuid::new_v4();
+ let url = Url::parse(&format!("blob:{}/{}", origin, id)).unwrap();
+
+ let mut blob_store = self.0.lock().unwrap();
+ blob_store.insert(url.clone(), blob);
+
+ url
+ }
+
+ pub fn remove(&self, url: &ModuleSpecifier) {
+ let mut blob_store = self.0.lock().unwrap();
+ blob_store.remove(&url);
+ }
+}
+
+pub fn op_file_create_object_url(
+ state: &mut deno_core::OpState,
+ media_type: String,
+ zero_copy: Option<ZeroCopyBuf>,
+) -> Result<String, AnyError> {
+ let data = zero_copy.ok_or_else(null_opbuf)?;
+ let blob = Blob {
+ data: data.to_vec(),
+ media_type,
+ };
+
+ let maybe_location = state.try_borrow::<Location>();
+ let blob_store = state.borrow::<BlobUrlStore>();
+
+ let url =
+ blob_store.insert(blob, maybe_location.map(|location| location.0.clone()));
+
+ Ok(url.to_string())
+}
+
+pub fn op_file_revoke_object_url(
+ state: &mut deno_core::OpState,
+ url: String,
+ _zero_copy: Option<ZeroCopyBuf>,
+) -> Result<(), AnyError> {
+ let url = Url::parse(&url)?;
+ let blob_store = state.borrow::<BlobUrlStore>();
+ blob_store.remove(&url);
+ Ok(())
+}
+
+pub fn init(
+ blob_url_store: BlobUrlStore,
+ maybe_location: Option<Url>,
+) -> Extension {
+ Extension::builder()
+ .js(include_js_files!(
+ prefix "deno:extensions/file",
+ "01_file.js",
+ "02_filereader.js",
+ "03_blob_url.js",
+ ))
+ .ops(vec![
+ (
+ "op_file_create_object_url",
+ op_sync(op_file_create_object_url),
+ ),
+ (
+ "op_file_revoke_object_url",
+ op_sync(op_file_revoke_object_url),
+ ),
+ ])
+ .state(move |state| {
+ state.put(blob_url_store.clone());
+ if let Some(location) = maybe_location.clone() {
+ state.put(Location(location));
+ }
+ Ok(())
+ })
+ .build()
+}
+
+pub fn get_declaration() -> PathBuf {
+ PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_file.d.ts")
+}