diff options
-rw-r--r-- | Cargo.lock | 1 | ||||
-rw-r--r-- | cli/bench/async_ops.js | 4 | ||||
-rw-r--r-- | core/01_core.js | 16 | ||||
-rw-r--r-- | core/runtime.rs | 38 | ||||
-rw-r--r-- | ops/Cargo.toml | 1 | ||||
-rw-r--r-- | ops/lib.rs | 7 | ||||
-rw-r--r-- | ops/optimizer.rs | 1 | ||||
-rw-r--r-- | ops/optimizer_tests/async_nop.out | 5 | ||||
-rw-r--r-- | ops/optimizer_tests/async_result.out | 5 | ||||
-rw-r--r-- | ops/optimizer_tests/issue16934.out | 5 | ||||
-rw-r--r-- | ops/optimizer_tests/issue16934_fast.out | 5 |
11 files changed, 52 insertions, 36 deletions
diff --git a/Cargo.lock b/Cargo.lock index 76b7c4388..f11ed6cd3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1206,6 +1206,7 @@ dependencies = [ "lazy-regex", "once_cell", "pmutil", + "pretty_assertions", "prettyplease", "proc-macro-crate", "proc-macro2 1.0.56", diff --git a/cli/bench/async_ops.js b/cli/bench/async_ops.js index fe041efe8..fc04942be 100644 --- a/cli/bench/async_ops.js +++ b/cli/bench/async_ops.js @@ -16,5 +16,5 @@ async function bench(fun) { if (--total) queueMicrotask(() => bench(fun)); } -const { ops } = Deno[Deno.internal].core; -bench(() => ops.op_void_async()); +const core = Deno[Deno.internal].core; +bench(() => core.opAsync("op_void_async")); diff --git a/core/01_core.js b/core/01_core.js index ab54316e5..4cefb52e9 100644 --- a/core/01_core.js +++ b/core/01_core.js @@ -194,8 +194,9 @@ function opAsync2(name, arg0, arg1) { const id = nextPromiseId++; let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult); + let maybeResult; try { - ops[name](id, arg0, arg1); + maybeResult = ops[name](id, arg0, arg1); } catch (err) { // Cleanup the just-created promise getPromise(id); @@ -204,14 +205,20 @@ } promise = handleOpCallTracing(name, id, promise); promise[promiseIdSymbol] = id; + if (typeof maybeResult !== "undefined") { + const promise = getPromise(id); + promise.resolve(maybeResult); + } + return promise; } function opAsync(name, ...args) { const id = nextPromiseId++; let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult); + let maybeResult; try { - ops[name](id, ...new SafeArrayIterator(args)); + maybeResult = ops[name](id, ...new SafeArrayIterator(args)); } catch (err) { // Cleanup the just-created promise getPromise(id); @@ -220,6 +227,11 @@ } promise = handleOpCallTracing(name, id, promise); promise[promiseIdSymbol] = id; + if (typeof maybeResult !== "undefined") { + const promise = getPromise(id); + promise.resolve(maybeResult); + } + return promise; } diff --git a/core/runtime.rs b/core/runtime.rs index ef65d2192..b03f3f7d0 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -2437,12 +2437,12 @@ pub fn queue_fast_async_op( } #[inline] -pub fn queue_async_op( +pub fn queue_async_op<'s>( ctx: &OpCtx, - scope: &mut v8::HandleScope, + scope: &'s mut v8::HandleScope, deferred: bool, op: impl Future<Output = (RealmIdx, PromiseId, OpId, OpResult)> + 'static, -) { +) -> Option<v8::Local<'s, v8::Value>> { let runtime_state = match ctx.runtime_state.upgrade() { Some(rc_state) => rc_state, // atleast 1 Rc is held by the JsRuntime. @@ -2459,31 +2459,13 @@ pub fn queue_async_op( ); match OpCall::eager(op) { - // This calls promise.resolve() before the control goes back to userland JS. It works something - // along the lines of: - // - // function opresolve(promiseId, ...) { - // getPromise(promiseId).resolve(...); - // } - // const p = setPromise(); - // op.op_async(promiseId, ...); // Calls `opresolve` - // return p; - EagerPollResult::Ready((_, promise_id, op_id, mut resp)) if !deferred => { - let context_state_rc = JsRealm::state_from_scope(scope); - let context_state = context_state_rc.borrow(); - - let args = &[ - v8::Integer::new(scope, promise_id).into(), - resp.to_v8(scope).unwrap(), - ]; - + // If the result is ready we'll just return it straight to the caller, so + // we don't have to invoke a JS callback to respond. // This works under the + // assumption that `()` return value is serialized as `null`. + EagerPollResult::Ready((_, _, op_id, mut resp)) if !deferred => { + let resp = resp.to_v8(scope).unwrap(); ctx.state.borrow_mut().tracker.track_async_completed(op_id); - - let tc_scope = &mut v8::TryCatch::new(scope); - let js_recv_cb = - context_state.js_recv_cb.as_ref().unwrap().open(tc_scope); - let this = v8::undefined(tc_scope).into(); - js_recv_cb.call(tc_scope, this, args); + return Some(resp); } EagerPollResult::Ready(op) => { let ready = OpCall::ready(op); @@ -2497,6 +2479,8 @@ pub fn queue_async_op( state.have_unpolled_ops = true; } } + + None } #[cfg(test)] diff --git a/ops/Cargo.toml b/ops/Cargo.toml index b8bbfd07d..42ce3b485 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -25,6 +25,7 @@ regex.workspace = true syn.workspace = true [dev-dependencies] +pretty_assertions.workspace = true prettyplease = "0.1.21" testing_macros = "0.2.7" trybuild = "1.0.71" diff --git a/ops/lib.rs b/ops/lib.rs index 41f69d9fc..7bf962091 100644 --- a/ops/lib.rs +++ b/ops/lib.rs @@ -319,11 +319,15 @@ fn codegen_v8_async( }; #pre_result - #core::_ops::queue_async_op(ctx, scope, #deferred, async move { + let maybe_response = #core::_ops::queue_async_op(ctx, scope, #deferred, async move { let result = #result_fut #result_wrapper (realm_idx, promise_id, op_id, #core::_ops::to_op_result(get_class, result)) }); + + if let Some(response) = maybe_response { + rv.set(response); + } } } @@ -901,6 +905,7 @@ fn exclude_lifetime_params( mod tests { use crate::Attributes; use crate::Op; + use pretty_assertions::assert_eq; use std::path::PathBuf; #[testing_macros::fixture("optimizer_tests/**/*.rs")] diff --git a/ops/optimizer.rs b/ops/optimizer.rs index cc266c716..09d3d5be6 100644 --- a/ops/optimizer.rs +++ b/ops/optimizer.rs @@ -938,6 +938,7 @@ mod tests { use super::*; use crate::Attributes; use crate::Op; + use pretty_assertions::assert_eq; use std::path::PathBuf; use syn::parse_quote; diff --git a/ops/optimizer_tests/async_nop.out b/ops/optimizer_tests/async_nop.out index 5d73f2343..7782b5970 100644 --- a/ops/optimizer_tests/async_nop.out +++ b/ops/optimizer_tests/async_nop.out @@ -79,7 +79,7 @@ impl op_void_async { state.tracker.track_async(op_id); state.get_error_class_fn }; - deno_core::_ops::queue_async_op( + let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, @@ -94,6 +94,9 @@ impl op_void_async { ) }, ); + if let Some(response) = maybe_response { + rv.set(response); + } } } #[allow(clippy::too_many_arguments)] diff --git a/ops/optimizer_tests/async_result.out b/ops/optimizer_tests/async_result.out index f820687cd..c3bb433f1 100644 --- a/ops/optimizer_tests/async_result.out +++ b/ops/optimizer_tests/async_result.out @@ -90,7 +90,7 @@ impl op_async_result { state.tracker.track_async(op_id); state.get_error_class_fn }; - deno_core::_ops::queue_async_op( + let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, @@ -104,6 +104,9 @@ impl op_async_result { ) }, ); + if let Some(response) = maybe_response { + rv.set(response); + } } } #[allow(clippy::too_many_arguments)] diff --git a/ops/optimizer_tests/issue16934.out b/ops/optimizer_tests/issue16934.out index f8acf5712..68f59ef43 100644 --- a/ops/optimizer_tests/issue16934.out +++ b/ops/optimizer_tests/issue16934.out @@ -84,7 +84,7 @@ impl send_stdin { state.tracker.track_async(op_id); state.get_error_class_fn }; - deno_core::_ops::queue_async_op( + let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, @@ -102,5 +102,8 @@ impl send_stdin { ) }, ); + if let Some(response) = maybe_response { + rv.set(response); + } } } diff --git a/ops/optimizer_tests/issue16934_fast.out b/ops/optimizer_tests/issue16934_fast.out index 0cdc3eb25..7a4a39f34 100644 --- a/ops/optimizer_tests/issue16934_fast.out +++ b/ops/optimizer_tests/issue16934_fast.out @@ -82,7 +82,7 @@ impl send_stdin { state.tracker.track_async(op_id); state.get_error_class_fn }; - deno_core::_ops::queue_async_op( + let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, @@ -100,5 +100,8 @@ impl send_stdin { ) }, ); + if let Some(response) = maybe_response { + rv.set(response); + } } } |