summaryrefslogtreecommitdiff
path: root/std/http/server.ts
diff options
context:
space:
mode:
Diffstat (limited to 'std/http/server.ts')
-rw-r--r--std/http/server.ts200
1 files changed, 39 insertions, 161 deletions
diff --git a/std/http/server.ts b/std/http/server.ts
index 9e9cde016..e7d6bd598 100644
--- a/std/http/server.ts
+++ b/std/http/server.ts
@@ -1,5 +1,5 @@
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
-const { listen, listenTLS, copy, toAsyncIterator } = Deno;
+const { listen, listenTLS, copy } = Deno;
type Listener = Deno.Listener;
type Conn = Deno.Conn;
type Reader = Deno.Reader;
@@ -9,6 +9,13 @@ import { TextProtoReader } from "../textproto/mod.ts";
import { STATUS_TEXT } from "./http_status.ts";
import { assert } from "../testing/asserts.ts";
import { deferred, Deferred, MuxAsyncIterator } from "../util/async.ts";
+import {
+ bodyReader,
+ chunkedBodyReader,
+ writeChunkedBody,
+ writeTrailers,
+ emptyReader
+} from "./io.ts";
const encoder = new TextEncoder();
@@ -30,64 +37,6 @@ export function setContentLength(r: Response): void {
}
}
-async function writeChunkedBody(w: Writer, r: Reader): Promise<void> {
- const writer = BufWriter.create(w);
-
- for await (const chunk of toAsyncIterator(r)) {
- if (chunk.byteLength <= 0) continue;
- const start = encoder.encode(`${chunk.byteLength.toString(16)}\r\n`);
- const end = encoder.encode("\r\n");
- await writer.write(start);
- await writer.write(chunk);
- await writer.write(end);
- }
-
- const endChunk = encoder.encode("0\r\n\r\n");
- await writer.write(endChunk);
-}
-const kProhibitedTrailerHeaders = [
- "transfer-encoding",
- "content-length",
- "trailer"
-];
-
-/** write trailer headers to writer. it mostly should be called after writeResponse */
-export async function writeTrailers(
- w: Writer,
- headers: Headers,
- trailers: Headers
-): Promise<void> {
- const trailer = headers.get("trailer");
- if (trailer === null) {
- throw new Error('response headers must have "trailer" header field');
- }
- const transferEncoding = headers.get("transfer-encoding");
- if (transferEncoding === null || !transferEncoding.match(/^chunked/)) {
- throw new Error(
- `trailer headers is only allowed for "transfer-encoding: chunked": got "${transferEncoding}"`
- );
- }
- const writer = BufWriter.create(w);
- const trailerHeaderFields = trailer
- .split(",")
- .map(s => s.trim().toLowerCase());
- for (const f of trailerHeaderFields) {
- assert(
- !kProhibitedTrailerHeaders.includes(f),
- `"${f}" is prohibited for trailer header`
- );
- }
- for (const [key, value] of trailers) {
- assert(
- trailerHeaderFields.includes(key),
- `Not trailer header field: ${key}`
- );
- await writer.write(encoder.encode(`${key}: ${value}\r\n`));
- }
- await writer.write(encoder.encode("\r\n"));
- await writer.flush();
-}
-
export async function writeResponse(w: Writer, r: Response): Promise<void> {
const protoMajor = 1;
const protoMinor = 1;
@@ -138,17 +87,6 @@ export async function writeResponse(w: Writer, r: Response): Promise<void> {
await writer.flush();
}
-export class ServerRequestBody implements Reader {
- constructor(private it: AsyncIterator<number, undefined, Uint8Array>) {}
- async read(p: Uint8Array): Promise<number | Deno.EOF> {
- const res = await this.it.next(p);
- if (res.done) {
- return Deno.EOF;
- }
- return res.value;
- }
-}
-
export class ServerRequest {
url!: string;
method!: string;
@@ -184,7 +122,7 @@ export class ServerRequest {
return this._contentLength;
}
- private _body: ServerRequestBody | null = null;
+ private _body: Deno.Reader | null = null;
/**
* Body of the request.
@@ -200,100 +138,28 @@ export class ServerRequest {
* bufSlice = bufSlice.subarray(nread);
* }
*/
- get body(): ServerRequestBody {
+ get body(): Deno.Reader {
if (!this._body) {
- const stream = this._bodyStream();
- stream.next(); // drop dummy such that first read is not empty.
- this._body = new ServerRequestBody(stream);
- }
- return this._body;
- }
-
- /**
- * Internal: actually reading body. Each step, buf to use is passed
- * in through yield result.
- * Returns on no more data to read or error.
- */
- private async *_bodyStream(): AsyncIterator<number, undefined, Uint8Array> {
- let buf = yield 0; // dummy yield to retrieve user provided buf.
- if (this.headers.has("content-length")) {
- const len = this.contentLength;
- if (len === null) {
- return;
- }
- let rr = await this.r.read(buf);
- let nread = rr === Deno.EOF ? 0 : rr;
- let nreadTotal = nread;
- while (rr !== Deno.EOF && nreadTotal < len) {
- buf = yield nread;
- rr = await this.r.read(buf);
- nread = rr === Deno.EOF ? 0 : rr;
- nreadTotal += nread;
- }
- yield nread;
- } else {
- const transferEncoding = this.headers.get("transfer-encoding");
- if (transferEncoding) {
- const parts = transferEncoding
- .split(",")
- .map((e): string => e.trim().toLowerCase());
- if (parts.includes("chunked")) {
- // Based on https://tools.ietf.org/html/rfc2616#section-19.4.6
- const tp = new TextProtoReader(this.r);
- let line = await tp.readLine();
- if (line === Deno.EOF) throw new UnexpectedEOFError();
- // TODO: handle chunk extension
- const [chunkSizeString] = line.split(";");
- let chunkSize = parseInt(chunkSizeString, 16);
- if (Number.isNaN(chunkSize) || chunkSize < 0) {
- throw new Error("Invalid chunk size");
- }
- while (chunkSize > 0) {
- let currChunkOffset = 0;
- // Since given readBuffer might be smaller, loop.
- while (currChunkOffset < chunkSize) {
- // Try to be as large as chunkSize. Might be smaller though.
- const bufferToFill = buf.subarray(0, chunkSize);
- if ((await this.r.readFull(bufferToFill)) === Deno.EOF) {
- throw new UnexpectedEOFError();
- }
- currChunkOffset += bufferToFill.length;
- buf = yield bufferToFill.length;
- }
- await this.r.readLine(); // Consume \r\n
- line = await tp.readLine();
- if (line === Deno.EOF) throw new UnexpectedEOFError();
- chunkSize = parseInt(line, 16);
- }
- const entityHeaders = await tp.readMIMEHeader();
- if (entityHeaders !== Deno.EOF) {
- for (const [k, v] of entityHeaders) {
- this.headers.set(k, v);
- }
- }
- /* Pseudo code from https://tools.ietf.org/html/rfc2616#section-19.4.6
- length := 0
- read chunk-size, chunk-extension (if any) and CRLF
- while (chunk-size > 0) {
- read chunk-data and CRLF
- append chunk-data to entity-body
- length := length + chunk-size
- read chunk-size and CRLF
- }
- read entity-header
- while (entity-header not empty) {
- append entity-header to existing header fields
- read entity-header
- }
- Content-Length := length
- Remove "chunked" from Transfer-Encoding
- */
- return; // Must return here to avoid fall through
+ if (this.contentLength != null) {
+ this._body = bodyReader(this.contentLength, this.r);
+ } else {
+ const transferEncoding = this.headers.get("transfer-encoding");
+ if (transferEncoding != null) {
+ const parts = transferEncoding
+ .split(",")
+ .map((e): string => e.trim().toLowerCase());
+ assert(
+ parts.includes("chunked"),
+ 'transfer-encoding must include "chunked" if content-length is not set'
+ );
+ this._body = chunkedBodyReader(this.headers, this.r);
+ } else {
+ // Neither content-length nor transfer-encoding: chunked
+ this._body = emptyReader();
}
- // TODO: handle other transfer-encoding types
}
- // Otherwise... Do nothing
}
+ return this._body;
}
async respond(r: Response): Promise<void> {
@@ -316,6 +182,16 @@ export class ServerRequest {
throw err;
}
}
+
+ private finalized = false;
+ async finalize(): Promise<void> {
+ if (this.finalized) return;
+ // Consume unread body
+ const body = this.body;
+ const buf = new Uint8Array(1024);
+ while ((await body.read(buf)) !== Deno.EOF) {}
+ this.finalized = true;
+ }
}
function fixLength(req: ServerRequest): void {
@@ -462,6 +338,8 @@ export class Server implements AsyncIterable<ServerRequest> {
// req.done implies this connection already closed, so we can just return.
return;
}
+ // Consume unread body and trailers if receiver didn't consume those data
+ await req.finalize();
}
if (req === Deno.EOF) {