summaryrefslogtreecommitdiff
path: root/ext/node/polyfills/internal/buffer.mjs
diff options
context:
space:
mode:
Diffstat (limited to 'ext/node/polyfills/internal/buffer.mjs')
-rw-r--r--ext/node/polyfills/internal/buffer.mjs102
1 files changed, 81 insertions, 21 deletions
diff --git a/ext/node/polyfills/internal/buffer.mjs b/ext/node/polyfills/internal/buffer.mjs
index c32494555..7c3f3ade2 100644
--- a/ext/node/polyfills/internal/buffer.mjs
+++ b/ext/node/polyfills/internal/buffer.mjs
@@ -19,6 +19,7 @@ import {
asciiToBytes,
base64ToBytes,
base64UrlToBytes,
+ base64Write,
bytesToAscii,
bytesToUtf16le,
hexToBytes,
@@ -42,6 +43,15 @@ import { Blob } from "ext:deno_web/09_file.js";
export { atob, Blob, btoa };
+class FastBuffer extends Uint8Array {
+ // Using an explicit constructor here is necessary to avoid relying on
+ // `Array.prototype[Symbol.iterator]`, which can be mutated by users.
+ // eslint-disable-next-line no-useless-constructor
+ constructor(bufferOrLength, byteOffset, length) {
+ super(bufferOrLength, byteOffset, length);
+ }
+}
+
const utf8Encoder = new TextEncoder();
// Temporary buffers to convert numbers.
@@ -72,6 +82,9 @@ export const constants = {
MAX_STRING_LENGTH: kStringMaxLength,
};
+FastBuffer.prototype.constructor = Buffer;
+Buffer.prototype = FastBuffer.prototype;
+
Object.defineProperty(Buffer.prototype, "parent", {
enumerable: true,
get: function () {
@@ -98,9 +111,7 @@ function createBuffer(length) {
'The value "' + length + '" is invalid for option "size"',
);
}
- const buf = new Uint8Array(length);
- Object.setPrototypeOf(buf, Buffer.prototype);
- return buf;
+ return new FastBuffer(length);
}
export function Buffer(arg, encodingOrOffset, length) {
@@ -117,7 +128,32 @@ export function Buffer(arg, encodingOrOffset, length) {
return _from(arg, encodingOrOffset, length);
}
-Buffer.poolSize = 8192;
+Object.defineProperty(Buffer, Symbol.species, {
+ __proto__: null,
+ enumerable: false,
+ configurable: true,
+ get() {
+ return FastBuffer;
+ },
+});
+
+Buffer.poolSize = 8 * 1024;
+let poolSize, poolOffset, allocPool;
+
+function createPool() {
+ poolSize = Buffer.poolSize;
+ allocPool = new Uint8Array(poolSize).buffer;
+ poolOffset = 0;
+}
+createPool();
+
+function alignPool() {
+ // Ensure aligned slices
+ if (poolOffset & 0x7) {
+ poolOffset |= 0x7;
+ poolOffset++;
+ }
+}
function _from(value, encodingOrOffset, length) {
if (typeof value === "string") {
@@ -204,6 +240,28 @@ Buffer.allocUnsafeSlow = function allocUnsafeSlow(size) {
return _allocUnsafe(size);
};
+function fromStringFast(string, ops) {
+ const length = ops.byteLength(string);
+ if (length >= (Buffer.poolSize >>> 1)) {
+ const data = ops.create(string);
+ Object.setPrototypeOf(data, Buffer.prototype);
+ return data;
+ }
+
+ if (length > (poolSize - poolOffset)) {
+ createPool();
+ }
+ let b = new FastBuffer(allocPool, poolOffset, length);
+ const actual = ops.write(b, string, 0, length);
+ if (actual != length) {
+ // byteLength() may overestimate. That's a rare case, though.
+ b = new FastBuffer(allocPool, poolOffset, actual);
+ }
+ poolOffset += actual;
+ alignPool();
+ return b;
+}
+
function fromString(string, encoding) {
if (typeof encoding !== "string" || encoding === "") {
encoding = "utf8";
@@ -211,19 +269,15 @@ function fromString(string, encoding) {
if (!Buffer.isEncoding(encoding)) {
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
}
- const length = byteLength(string, encoding) | 0;
- let buf = createBuffer(length);
- const actual = buf.write(string, encoding);
- if (actual !== length) {
- buf = buf.slice(0, actual);
+ const ops = getEncodingOps(encoding);
+ if (ops === undefined) {
+ throw new codes.ERR_UNKNOWN_ENCODING(encoding);
}
- return buf;
+ return fromStringFast(string, ops);
}
function fromArrayLike(obj) {
- const buf = new Uint8Array(obj);
- Object.setPrototypeOf(buf, Buffer.prototype);
- return buf;
+ return new FastBuffer(obj);
}
function fromObject(obj) {
@@ -260,7 +314,7 @@ Object.setPrototypeOf(SlowBuffer.prototype, Uint8Array.prototype);
Object.setPrototypeOf(SlowBuffer, Uint8Array);
Buffer.isBuffer = function isBuffer(b) {
- return b != null && b._isBuffer === true && b !== Buffer.prototype;
+ return b instanceof Buffer;
};
Buffer.compare = function compare(a, b) {
@@ -664,12 +718,12 @@ Buffer.prototype.base64Slice = function base64Slice(
}
};
-Buffer.prototype.base64Write = function base64Write(
+Buffer.prototype.base64Write = function base64Write_(
string,
offset,
length,
) {
- return blitBuffer(base64ToBytes(string), this, offset, length);
+ return base64Write(string, this, offset, length);
};
Buffer.prototype.base64urlSlice = function base64urlSlice(
@@ -737,8 +791,8 @@ Buffer.prototype.ucs2Write = function ucs2Write(string, offset, length) {
);
};
-Buffer.prototype.utf8Slice = function utf8Slice(string, offset, length) {
- return _utf8Slice(this, string, offset, length);
+Buffer.prototype.utf8Slice = function utf8Slice(offset, length) {
+ return _utf8Slice(this, offset, length);
};
Buffer.prototype.utf8Write = function utf8Write(string, offset, length) {
@@ -831,9 +885,7 @@ function fromArrayBuffer(obj, byteOffset, length) {
}
}
- const buffer = new Uint8Array(obj, byteOffset, length);
- Object.setPrototypeOf(buffer, Buffer.prototype);
- return buffer;
+ return new FastBuffer(obj, byteOffset, length);
}
function _base64Slice(buf, start, end) {
@@ -2105,6 +2157,7 @@ export const encodingOps = {
dir,
),
slice: (buf, start, end) => buf.asciiSlice(start, end),
+ create: (string) => asciiToBytes(string),
write: (buf, string, offset, len) => buf.asciiWrite(string, offset, len),
},
base64: {
@@ -2119,6 +2172,7 @@ export const encodingOps = {
encodingsMap.base64,
dir,
),
+ create: (string) => base64ToBytes(string),
slice: (buf, start, end) => buf.base64Slice(start, end),
write: (buf, string, offset, len) => buf.base64Write(string, offset, len),
},
@@ -2134,6 +2188,7 @@ export const encodingOps = {
encodingsMap.base64url,
dir,
),
+ create: (string) => base64UrlToBytes(string),
slice: (buf, start, end) => buf.base64urlSlice(start, end),
write: (buf, string, offset, len) =>
buf.base64urlWrite(string, offset, len),
@@ -2150,6 +2205,7 @@ export const encodingOps = {
encodingsMap.hex,
dir,
),
+ create: (string) => hexToBytes(string),
slice: (buf, start, end) => buf.hexSlice(start, end),
write: (buf, string, offset, len) => buf.hexWrite(string, offset, len),
},
@@ -2165,6 +2221,7 @@ export const encodingOps = {
encodingsMap.latin1,
dir,
),
+ create: (string) => asciiToBytes(string),
slice: (buf, start, end) => buf.latin1Slice(start, end),
write: (buf, string, offset, len) => buf.latin1Write(string, offset, len),
},
@@ -2180,6 +2237,7 @@ export const encodingOps = {
encodingsMap.utf16le,
dir,
),
+ create: (string) => utf16leToBytes(string),
slice: (buf, start, end) => buf.ucs2Slice(start, end),
write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len),
},
@@ -2195,6 +2253,7 @@ export const encodingOps = {
encodingsMap.utf8,
dir,
),
+ create: (string) => utf8Encoder.encode(string),
slice: (buf, start, end) => buf.utf8Slice(start, end),
write: (buf, string, offset, len) => buf.utf8Write(string, offset, len),
},
@@ -2210,6 +2269,7 @@ export const encodingOps = {
encodingsMap.utf16le,
dir,
),
+ create: (string) => utf16leToBytes(string),
slice: (buf, start, end) => buf.ucs2Slice(start, end),
write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len),
},