summaryrefslogtreecommitdiff
path: root/cli/tests/node_compat/test/parallel/test-stream-readable-unshift.js
diff options
context:
space:
mode:
authorYoshiya Hinosawa <stibium121@gmail.com>2023-02-21 00:35:04 +0900
committerGitHub <noreply@github.com>2023-02-20 16:35:04 +0100
commit6915a9b7a701dde0e1078867961c9a91811c1850 (patch)
treee6822f2b8400c7c7941d3cb9ace59842389b5bc9 /cli/tests/node_compat/test/parallel/test-stream-readable-unshift.js
parenta1cd2a5915c13f6a9b8eafa3807e143a02616bc1 (diff)
test(ext/node): more node compat tests (#17827)
This PR adds the remaining ~650 Node.js compat test cases from std/node. Among these 650 cases, about 130 cases are now failing. These failing cases are prefixed with `TODO:` in `tests/node_compat/config.json`. These will be addressed in later PRs.
Diffstat (limited to 'cli/tests/node_compat/test/parallel/test-stream-readable-unshift.js')
-rw-r--r--cli/tests/node_compat/test/parallel/test-stream-readable-unshift.js177
1 files changed, 177 insertions, 0 deletions
diff --git a/cli/tests/node_compat/test/parallel/test-stream-readable-unshift.js b/cli/tests/node_compat/test/parallel/test-stream-readable-unshift.js
new file mode 100644
index 000000000..114fd7a1c
--- /dev/null
+++ b/cli/tests/node_compat/test/parallel/test-stream-readable-unshift.js
@@ -0,0 +1,177 @@
+// deno-fmt-ignore-file
+// deno-lint-ignore-file
+
+// Copyright Joyent and Node contributors. All rights reserved. MIT license.
+// Taken from Node 18.12.1
+// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually
+
+'use strict';
+
+const common = require('../common');
+const assert = require('assert');
+const { Readable } = require('stream');
+
+{
+ // Check that strings are saved as Buffer
+ const readable = new Readable({ read() {} });
+
+ const string = 'abc';
+
+ readable.on('data', common.mustCall((chunk) => {
+ assert(Buffer.isBuffer(chunk));
+ assert.strictEqual(chunk.toString('utf8'), string);
+ }, 1));
+
+ readable.unshift(string);
+
+}
+
+{
+ // Check that data goes at the beginning
+ const readable = new Readable({ read() {} });
+ const unshift = 'front';
+ const push = 'back';
+
+ const expected = [unshift, push];
+ readable.on('data', common.mustCall((chunk) => {
+ assert.strictEqual(chunk.toString('utf8'), expected.shift());
+ }, 2));
+
+
+ readable.push(push);
+ readable.unshift(unshift);
+}
+
+{
+ // Check that buffer is saved with correct encoding
+ const readable = new Readable({ read() {} });
+
+ const encoding = 'base64';
+ const string = Buffer.from('abc').toString(encoding);
+
+ readable.on('data', common.mustCall((chunk) => {
+ assert.strictEqual(chunk.toString(encoding), string);
+ }, 1));
+
+ readable.unshift(string, encoding);
+
+}
+
+{
+
+ const streamEncoding = 'base64';
+
+ function checkEncoding(readable) {
+
+ // chunk encodings
+ const encodings = ['utf8', 'binary', 'hex', 'base64'];
+ const expected = [];
+
+ readable.on('data', common.mustCall((chunk) => {
+ const { encoding, string } = expected.pop();
+ assert.strictEqual(chunk.toString(encoding), string);
+ }, encodings.length));
+
+ for (const encoding of encodings) {
+ const string = 'abc';
+
+ // If encoding is the same as the state.encoding the string is
+ // saved as is
+ const expect = encoding !== streamEncoding ?
+ Buffer.from(string, encoding).toString(streamEncoding) : string;
+
+ expected.push({ encoding, string: expect });
+
+ readable.unshift(string, encoding);
+ }
+ }
+
+ const r1 = new Readable({ read() {} });
+ r1.setEncoding(streamEncoding);
+ checkEncoding(r1);
+
+ const r2 = new Readable({ read() {}, encoding: streamEncoding });
+ checkEncoding(r2);
+
+}
+
+{
+ // Both .push & .unshift should have the same behaviour
+ // When setting an encoding, each chunk should be emitted with that encoding
+ const encoding = 'base64';
+
+ function checkEncoding(readable) {
+ const string = 'abc';
+ readable.on('data', common.mustCall((chunk) => {
+ assert.strictEqual(chunk, Buffer.from(string).toString(encoding));
+ }, 2));
+
+ readable.push(string);
+ readable.unshift(string);
+ }
+
+ const r1 = new Readable({ read() {} });
+ r1.setEncoding(encoding);
+ checkEncoding(r1);
+
+ const r2 = new Readable({ read() {}, encoding });
+ checkEncoding(r2);
+
+}
+
+{
+ // Check that ObjectMode works
+ const readable = new Readable({ objectMode: true, read() {} });
+
+ const chunks = ['a', 1, {}, []];
+
+ readable.on('data', common.mustCall((chunk) => {
+ assert.strictEqual(chunk, chunks.pop());
+ }, chunks.length));
+
+ for (const chunk of chunks) {
+ readable.unshift(chunk);
+ }
+}
+
+{
+
+ // Should not throw: https://github.com/nodejs/node/issues/27192
+ const highWaterMark = 50;
+ class ArrayReader extends Readable {
+ constructor(opt) {
+ super({ highWaterMark });
+ // The error happened only when pushing above hwm
+ this.buffer = new Array(highWaterMark * 2).fill(0).map(String);
+ }
+ _read(size) {
+ while (this.buffer.length) {
+ const chunk = this.buffer.shift();
+ if (!this.buffer.length) {
+ this.push(chunk);
+ this.push(null);
+ return true;
+ }
+ if (!this.push(chunk))
+ return;
+ }
+ }
+ }
+
+ function onRead() {
+ while (null !== (stream.read())) {
+ // Remove the 'readable' listener before unshifting
+ stream.removeListener('readable', onRead);
+ stream.unshift('a');
+ stream.on('data', (chunk) => {
+ console.log(chunk.length);
+ });
+ break;
+ }
+ }
+
+ const stream = new ArrayReader();
+ stream.once('readable', common.mustCall(onRead));
+ stream.on('end', common.mustCall(() => {}));
+
+}