summaryrefslogtreecommitdiff
path: root/extensions/fetch
diff options
context:
space:
mode:
authorLuca Casonato <lucacasonato@yahoo.com>2021-06-05 23:10:07 +0200
committerGitHub <noreply@github.com>2021-06-05 23:10:07 +0200
commitc73ef5fa143b473677d4cab069241ff018e0c971 (patch)
treef13f3ddb1741a81138240c36846e2a23fd562a02 /extensions/fetch
parentbb0c90cadbb99784681a2acac1fd65ac7f802297 (diff)
refactor(web): use encoding_rs for text encoding (#10844)
This commit removes all JS based text encoding / text decoding. Instead encoding now happens in Rust via encoding_rs (already in tree). This implementation retains stream support, but adds the last missing encodings. We are incredibly close to 100% WPT on text encoding now. This should reduce our baseline heap by quite a bit.
Diffstat (limited to 'extensions/fetch')
-rw-r--r--extensions/fetch/21_formdata.js16
-rw-r--r--extensions/fetch/22_body.js12
2 files changed, 11 insertions, 17 deletions
diff --git a/extensions/fetch/21_formdata.js b/extensions/fetch/21_formdata.js
index 7b519ddc2..a178025b7 100644
--- a/extensions/fetch/21_formdata.js
+++ b/extensions/fetch/21_formdata.js
@@ -11,7 +11,8 @@
/// <reference lib="esnext" />
"use strict";
-((_window) => {
+((window) => {
+ const core = window.Deno.core;
const webidl = globalThis.__bootstrap.webidl;
const { Blob, File, _byteSequence } = globalThis.__bootstrap.file;
@@ -240,8 +241,6 @@
webidl.mixinPairIterable("FormData", FormData, entryList, "name", "value");
- const encoder = new TextEncoder();
-
class MultipartBuilder {
/**
* @param {FormData} formData
@@ -270,7 +269,7 @@
} else this.#writeField(name, value);
}
- this.chunks.push(encoder.encode(`\r\n--${this.boundary}--`));
+ this.chunks.push(core.encode(`\r\n--${this.boundary}--`));
let totalLength = 0;
for (const chunk of this.chunks) {
@@ -309,7 +308,7 @@
}
buf += `\r\n`;
- this.chunks.push(encoder.encode(buf));
+ this.chunks.push(core.encode(buf));
}
/**
@@ -356,7 +355,7 @@
*/
#writeField(field, value) {
this.#writeFieldHeaders(field);
- this.chunks.push(encoder.encode(this.#normalizeNewlines(value)));
+ this.chunks.push(core.encode(this.#normalizeNewlines(value)));
}
/**
@@ -428,7 +427,6 @@
const LF = "\n".codePointAt(0);
const CR = "\r".codePointAt(0);
- const decoder = new TextDecoder("utf-8");
class MultipartParser {
/**
@@ -442,7 +440,7 @@
this.boundary = `--${boundary}`;
this.body = body;
- this.boundaryChars = encoder.encode(this.boundary);
+ this.boundaryChars = core.encode(this.boundary);
}
/**
@@ -539,7 +537,7 @@
});
formData.append(name, blob, filename);
} else {
- formData.append(name, decoder.decode(content));
+ formData.append(name, core.decode(content));
}
}
} else if (state === 5 && isNewLine) {
diff --git a/extensions/fetch/22_body.js b/extensions/fetch/22_body.js
index 8c93e0fcf..f0c7ac8bd 100644
--- a/extensions/fetch/22_body.js
+++ b/extensions/fetch/22_body.js
@@ -223,8 +223,6 @@
return Object.defineProperties(prototype.prototype, mixin);
}
- const decoder = new TextDecoder();
-
/**
* https://fetch.spec.whatwg.org/#concept-body-package-data
* @param {Uint8Array} bytes
@@ -263,14 +261,12 @@
throw new TypeError("Missing content type");
}
case "JSON":
- return JSON.parse(decoder.decode(bytes));
+ return JSON.parse(core.decode(bytes));
case "text":
- return decoder.decode(bytes);
+ return core.decode(bytes);
}
}
- const encoder = new TextEncoder();
-
/**
* @param {BodyInit} object
* @returns {{body: InnerBody, contentType: string | null}}
@@ -305,10 +301,10 @@
length = res.body.byteLength;
contentType = res.contentType;
} else if (object instanceof URLSearchParams) {
- source = encoder.encode(object.toString());
+ source = core.encode(object.toString());
contentType = "application/x-www-form-urlencoded;charset=UTF-8";
} else if (typeof object === "string") {
- source = encoder.encode(object);
+ source = core.encode(object);
contentType = "text/plain;charset=UTF-8";
} else if (object instanceof ReadableStream) {
stream = object;