summaryrefslogtreecommitdiff
path: root/net/http_test.ts
diff options
context:
space:
mode:
Diffstat (limited to 'net/http_test.ts')
-rw-r--r--net/http_test.ts172
1 files changed, 171 insertions, 1 deletions
diff --git a/net/http_test.ts b/net/http_test.ts
index cdb7f8303..97d07a5b4 100644
--- a/net/http_test.ts
+++ b/net/http_test.ts
@@ -18,13 +18,16 @@ import {
Response
} from "./http";
import { Buffer } from "deno";
-import { BufWriter } from "./bufio";
+import { BufWriter, BufReader } from "./bufio";
interface ResponseTest {
response: Response;
raw: string;
}
+const enc = new TextEncoder();
+const dec = new TextDecoder();
+
const responseTests: ResponseTest[] = [
// Default response
{
@@ -56,3 +59,170 @@ test(async function responseWrite() {
assertEqual(buf.toString(), testCase.raw);
}
});
+
+test(async function requestBodyWithContentLength() {
+ {
+ const req = new ServerRequest();
+ req.headers = new Headers();
+ req.headers.set("content-length", "5");
+ const buf = new Buffer(enc.encode("Hello"));
+ req.r = new BufReader(buf);
+ const body = dec.decode(await req.body());
+ assertEqual(body, "Hello");
+ }
+
+ // Larger than internal buf
+ {
+ const longText = "1234\n".repeat(1000);
+ const req = new ServerRequest();
+ req.headers = new Headers();
+ req.headers.set("Content-Length", "5000");
+ const buf = new Buffer(enc.encode(longText));
+ req.r = new BufReader(buf);
+ const body = dec.decode(await req.body());
+ assertEqual(body, longText);
+ }
+});
+
+test(async function requestBodyWithTransferEncoding() {
+ {
+ const shortText = "Hello";
+ const req = new ServerRequest();
+ req.headers = new Headers();
+ req.headers.set("transfer-encoding", "chunked");
+ let chunksData = "";
+ let chunkOffset = 0;
+ const maxChunkSize = 70;
+ while (chunkOffset < shortText.length) {
+ const chunkSize = Math.min(maxChunkSize, shortText.length - chunkOffset);
+ chunksData += `${chunkSize.toString(16)}\r\n${shortText.substr(
+ chunkOffset,
+ chunkSize
+ )}\r\n`;
+ chunkOffset += chunkSize;
+ }
+ chunksData += "0\r\n\r\n";
+ const buf = new Buffer(enc.encode(chunksData));
+ req.r = new BufReader(buf);
+ const body = dec.decode(await req.body());
+ assertEqual(body, shortText);
+ }
+
+ // Larger than internal buf
+ {
+ const longText = "1234\n".repeat(1000);
+ const req = new ServerRequest();
+ req.headers = new Headers();
+ req.headers.set("transfer-encoding", "chunked");
+ let chunksData = "";
+ let chunkOffset = 0;
+ const maxChunkSize = 70;
+ while (chunkOffset < longText.length) {
+ const chunkSize = Math.min(maxChunkSize, longText.length - chunkOffset);
+ chunksData += `${chunkSize.toString(16)}\r\n${longText.substr(
+ chunkOffset,
+ chunkSize
+ )}\r\n`;
+ chunkOffset += chunkSize;
+ }
+ chunksData += "0\r\n\r\n";
+ const buf = new Buffer(enc.encode(chunksData));
+ req.r = new BufReader(buf);
+ const body = dec.decode(await req.body());
+ assertEqual(body, longText);
+ }
+});
+
+test(async function requestBodyStreamWithContentLength() {
+ {
+ const shortText = "Hello";
+ const req = new ServerRequest();
+ req.headers = new Headers();
+ req.headers.set("content-length", "" + shortText.length);
+ const buf = new Buffer(enc.encode(shortText));
+ req.r = new BufReader(buf);
+ const it = await req.bodyStream();
+ let offset = 0;
+ for await (const chunk of it) {
+ const s = dec.decode(chunk);
+ assertEqual(shortText.substr(offset, s.length), s);
+ offset += s.length;
+ }
+ }
+
+ // Larger than internal buf
+ {
+ const longText = "1234\n".repeat(1000);
+ const req = new ServerRequest();
+ req.headers = new Headers();
+ req.headers.set("Content-Length", "5000");
+ const buf = new Buffer(enc.encode(longText));
+ req.r = new BufReader(buf);
+ const it = await req.bodyStream();
+ let offset = 0;
+ for await (const chunk of it) {
+ const s = dec.decode(chunk);
+ assertEqual(longText.substr(offset, s.length), s);
+ offset += s.length;
+ }
+ }
+});
+
+test(async function requestBodyStreamWithTransferEncoding() {
+ {
+ const shortText = "Hello";
+ const req = new ServerRequest();
+ req.headers = new Headers();
+ req.headers.set("transfer-encoding", "chunked");
+ let chunksData = "";
+ let chunkOffset = 0;
+ const maxChunkSize = 70;
+ while (chunkOffset < shortText.length) {
+ const chunkSize = Math.min(maxChunkSize, shortText.length - chunkOffset);
+ chunksData += `${chunkSize.toString(16)}\r\n${shortText.substr(
+ chunkOffset,
+ chunkSize
+ )}\r\n`;
+ chunkOffset += chunkSize;
+ }
+ chunksData += "0\r\n\r\n";
+ const buf = new Buffer(enc.encode(chunksData));
+ req.r = new BufReader(buf);
+ const it = await req.bodyStream();
+ let offset = 0;
+ for await (const chunk of it) {
+ const s = dec.decode(chunk);
+ assertEqual(shortText.substr(offset, s.length), s);
+ offset += s.length;
+ }
+ }
+
+ // Larger than internal buf
+ {
+ const longText = "1234\n".repeat(1000);
+ const req = new ServerRequest();
+ req.headers = new Headers();
+ req.headers.set("transfer-encoding", "chunked");
+ let chunksData = "";
+ let chunkOffset = 0;
+ const maxChunkSize = 70;
+ while (chunkOffset < longText.length) {
+ const chunkSize = Math.min(maxChunkSize, longText.length - chunkOffset);
+ chunksData += `${chunkSize.toString(16)}\r\n${longText.substr(
+ chunkOffset,
+ chunkSize
+ )}\r\n`;
+ chunkOffset += chunkSize;
+ }
+ chunksData += "0\r\n\r\n";
+ const buf = new Buffer(enc.encode(chunksData));
+ req.r = new BufReader(buf);
+ const it = await req.bodyStream();
+ let offset = 0;
+ for await (const chunk of it) {
+ const s = dec.decode(chunk);
+ assertEqual(longText.substr(offset, s.length), s);
+ offset += s.length;
+ }
+ }
+});