summaryrefslogtreecommitdiff
path: root/ext/http/00_serve.js
diff options
context:
space:
mode:
authorMatt Mastracci <matthew@mastracci.com>2023-05-30 18:02:52 -0600
committerGitHub <noreply@github.com>2023-05-30 18:02:52 -0600
commit489d2e81c3e53ed689f51dc9d76c007d487aa101 (patch)
tree529a0221076772b74bac4acb54918b1b79bceb75 /ext/http/00_serve.js
parent047edf6a75133decf069e6118634caa76a455d7c (diff)
perf(ext/http): Add a sync phase to http serving (#19321)
Under heavy load, we often have requests queued up that don't need an async call to retrieve. We can use a fast path sync op to drain this set of ready requests, and then fall back to the async op once we run out of work. This is a .5-1% bump in req/s on an M2 mac. About 90% of the handlers go through this sync phase (based on a simple instrumentation that is not included in this PR) and skip the async machinery entirely.
Diffstat (limited to 'ext/http/00_serve.js')
-rw-r--r--ext/http/00_serve.js16
1 files changed, 15 insertions, 1 deletions
diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js
index 2fb36d044..e3926280b 100644
--- a/ext/http/00_serve.js
+++ b/ext/http/00_serve.js
@@ -64,6 +64,7 @@ const {
op_http_set_response_trailers,
op_http_upgrade_raw,
op_http_upgrade_websocket_next,
+ op_http_try_wait,
op_http_wait,
} = core.generateAsyncOpHandler(
"op_http_get_request_headers",
@@ -80,6 +81,7 @@ const {
"op_http_set_response_trailers",
"op_http_upgrade_raw",
"op_http_upgrade_websocket_next",
+ "op_http_try_wait",
"op_http_wait",
);
const _upgraded = Symbol("_upgraded");
@@ -558,7 +560,7 @@ function mapToCallback(responseBodies, context, signal, callback, onError) {
}
}
- // Attempt to response quickly to this request, otherwise extract the stream
+ // Attempt to respond quickly to this request, otherwise extract the stream
const stream = fastSyncResponseOrStream(req, inner.body);
if (stream !== null) {
// Handle the stream asynchronously
@@ -671,6 +673,18 @@ function serve(arg1, arg2) {
const rid = context.serverRid;
let req;
try {
+ // Attempt to pull as many requests out of the queue as possible before awaiting. This API is
+ // a synchronous, non-blocking API that returns u32::MAX if anything goes wrong.
+ while ((req = op_http_try_wait(rid)) !== 0xffffffff) {
+ PromisePrototypeCatch(callback(req), (error) => {
+ // Abnormal exit
+ console.error(
+ "Terminating Deno.serve loop due to unexpected error",
+ error,
+ );
+ context.close();
+ });
+ }
currentPromise = op_http_wait(rid);
if (!ref) {
core.unrefOp(currentPromise[promiseIdSymbol]);