summaryrefslogtreecommitdiff
path: root/cli/tools/repl.rs
diff options
context:
space:
mode:
authorBartek IwaƄczuk <biwanczuk@gmail.com>2021-05-26 17:47:33 +0200
committerGitHub <noreply@github.com>2021-05-26 17:47:33 +0200
commite9edd7e14d9d78f03c5f2e67fcc44e4dbaab8f2c (patch)
treeea6b4d3eb12e36c74d906b7bc683a18932f6916f /cli/tools/repl.rs
parent02a4e7dc7cce4df8f2e8e69d3aa4e2eed22e627d (diff)
refactor: Rewrite Inspector implementation (#10725)
This commit refactors implementation of inspector. The intention is to be able to move inspector implementation to "deno_core". Following things were done to make that possible: * "runtime/inspector.rs" was split into "runtime/inspector/mod.rs" and "runtime/inspector/server.rs", separating inspector implementation from Websocket server implementation. * "DenoInspector" was renamed to "JsRuntimeInspector" and reference to "server" was removed from the structure, making it independent of Websocket server used to connect to Chrome Devtools. * "WebsocketSession" was renamed to "InspectorSession" and rewritten in such a way that it's not tied to Websockets anymore; instead it accepts a pair of "proxy" channel ends that allow to integrate the session with different "transports". * "InspectorSession" was renamed to "LocalInspectorSession" to better indicate that it's an "in-memory" session and doesn't require Websocket server. It was also rewritten in such a way that it uses "InspectorSession" from previous point instead of reimplementing "v8::inspector::ChannelImpl" trait; this is done by using the "proxy" channels to communicate with the V8 session. Consequently "LocalInspectorSession" is now a frontend to "InspectorSession". This introduces a small inconvenience that awaiting responses for "LocalInspectorSession" requires to concurrently poll worker's event loop. This arises from the fact that "InspectorSession" is now owned by "JsRuntimeInspector", which in turn is owned by "Worker" or "WebWorker". To ease this situation "Worker::with_event_loop" helper method was added, that takes a future and concurrently polls it along with the event loop (using "tokio::select!" macro inside a loop).
Diffstat (limited to 'cli/tools/repl.rs')
-rw-r--r--cli/tools/repl.rs218
1 files changed, 102 insertions, 116 deletions
diff --git a/cli/tools/repl.rs b/cli/tools/repl.rs
index ce0879a85..ba69c9234 100644
--- a/cli/tools/repl.rs
+++ b/cli/tools/repl.rs
@@ -6,9 +6,10 @@ use crate::colors;
use crate::media_type::MediaType;
use crate::program_state::ProgramState;
use deno_core::error::AnyError;
+use deno_core::futures::FutureExt;
use deno_core::serde_json::json;
use deno_core::serde_json::Value;
-use deno_runtime::inspector::InspectorSession;
+use deno_runtime::inspector::LocalInspectorSession;
use deno_runtime::worker::MainWorker;
use rustyline::completion::Completer;
use rustyline::error::ReadlineError;
@@ -255,34 +256,9 @@ impl Highlighter for Helper {
}
}
-async fn post_message_and_poll(
- worker: &mut MainWorker,
- session: &mut InspectorSession,
- method: &str,
- params: Option<Value>,
-) -> Result<Value, AnyError> {
- let response = session.post_message(method, params);
- tokio::pin!(response);
-
- loop {
- tokio::select! {
- result = &mut response => {
- return result
- }
-
- _ = worker.run_event_loop() => {
- // A zero delay is long enough to yield the thread in order to prevent the loop from
- // running hot for messages that are taking longer to resolve like for example an
- // evaluation of top level await.
- tokio::time::sleep(tokio::time::Duration::from_millis(0)).await;
- }
- }
- }
-}
-
async fn read_line_and_poll(
worker: &mut MainWorker,
- session: &mut InspectorSession,
+ session: &mut LocalInspectorSession,
message_rx: &Receiver<(String, Option<Value>)>,
response_tx: &Sender<Result<Value, AnyError>>,
editor: Arc<Mutex<Editor<Helper>>>,
@@ -294,9 +270,10 @@ async fn read_line_and_poll(
loop {
for (method, params) in message_rx.try_iter() {
- response_tx
- .send(session.post_message(&method, params).await)
- .unwrap();
+ let result = worker
+ .with_event_loop(session.post_message(&method, params).boxed_local())
+ .await;
+ response_tx.send(result).unwrap();
}
// Because an inspector websocket client may choose to connect at anytime when we have an
@@ -353,44 +330,50 @@ Object.defineProperty(globalThis, "_error", {
async fn inject_prelude(
worker: &mut MainWorker,
- session: &mut InspectorSession,
+ session: &mut LocalInspectorSession,
context_id: u64,
) -> Result<(), AnyError> {
- post_message_and_poll(
- worker,
- session,
- "Runtime.evaluate",
- Some(json!({
- "expression": PRELUDE,
- "contextId": context_id,
- })),
- )
- .await?;
+ worker
+ .with_event_loop(
+ session
+ .post_message(
+ "Runtime.evaluate",
+ Some(json!({
+ "expression": PRELUDE,
+ "contextId": context_id,
+ })),
+ )
+ .boxed_local(),
+ )
+ .await?;
Ok(())
}
pub async fn is_closing(
worker: &mut MainWorker,
- session: &mut InspectorSession,
+ session: &mut LocalInspectorSession,
context_id: u64,
) -> Result<bool, AnyError> {
- let closed = post_message_and_poll(
- worker,
- session,
- "Runtime.evaluate",
- Some(json!({
- "expression": "(globalThis.closed)",
- "contextId": context_id,
- })),
- )
- .await?
- .get("result")
- .unwrap()
- .get("value")
- .unwrap()
- .as_bool()
- .unwrap();
+ let closed = worker
+ .with_event_loop(
+ session
+ .post_message(
+ "Runtime.evaluate",
+ Some(json!({
+ "expression": "(globalThis.closed)",
+ "contextId": context_id,
+ })),
+ )
+ .boxed_local(),
+ )
+ .await?
+ .get("result")
+ .unwrap()
+ .get("value")
+ .unwrap()
+ .as_bool()
+ .unwrap();
Ok(closed)
}
@@ -399,13 +382,13 @@ pub async fn run(
program_state: &ProgramState,
mut worker: MainWorker,
) -> Result<(), AnyError> {
- let mut session = worker.create_inspector_session();
+ let mut session = worker.create_inspector_session().await;
let history_file = program_state.dir.root.join("deno_history.txt");
- post_message_and_poll(&mut worker, &mut session, "Runtime.enable", None)
+ worker
+ .with_event_loop(session.post_message("Runtime.enable", None).boxed_local())
.await?;
-
// Enabling the runtime domain will always send trigger one executionContextCreated for each
// context the inspector knows about so we grab the execution context from that since
// our inspector does not support a default context (0 is an invalid context id).
@@ -471,15 +454,15 @@ pub async fn run(
line.clone()
};
- let evaluate_response = post_message_and_poll(
- &mut worker,
- &mut session,
- "Runtime.evaluate",
- Some(json!({
- "expression": format!("'use strict'; void 0;\n{}", &wrapped_line),
- "contextId": context_id,
- "replMode": true,
- })),
+ let evaluate_response = worker.with_event_loop(
+ session.post_message(
+ "Runtime.evaluate",
+ Some(json!({
+ "expression": format!("'use strict'; void 0;\n{}", &wrapped_line),
+ "contextId": context_id,
+ "replMode": true,
+ })),
+ ).boxed_local()
)
.await?;
@@ -489,17 +472,20 @@ pub async fn run(
if evaluate_response.get("exceptionDetails").is_some()
&& wrapped_line != line
{
- post_message_and_poll(
- &mut worker,
- &mut session,
- "Runtime.evaluate",
- Some(json!({
- "expression": format!("'use strict'; void 0;\n{}", &line),
- "contextId": context_id,
- "replMode": true,
- })),
- )
- .await?
+ worker
+ .with_event_loop(
+ session
+ .post_message(
+ "Runtime.evaluate",
+ Some(json!({
+ "expression": format!("'use strict'; void 0;\n{}", &line),
+ "contextId": context_id,
+ "replMode": true,
+ })),
+ )
+ .boxed_local(),
+ )
+ .await?
} else {
evaluate_response
};
@@ -515,48 +501,48 @@ pub async fn run(
evaluate_response.get("exceptionDetails");
if evaluate_exception_details.is_some() {
- post_message_and_poll(
- &mut worker,
- &mut session,
- "Runtime.callFunctionOn",
- Some(json!({
- "executionContextId": context_id,
- "functionDeclaration": "function (object) { Deno[Deno.internal].lastThrownError = object; }",
- "arguments": [
- evaluate_result,
- ],
- })),
- ).await?;
+ worker.with_event_loop(
+ session.post_message(
+ "Runtime.callFunctionOn",
+ Some(json!({
+ "executionContextId": context_id,
+ "functionDeclaration": "function (object) { Deno[Deno.internal].lastThrownError = object; }",
+ "arguments": [
+ evaluate_result,
+ ],
+ })),
+ ).boxed_local()
+ ).await?;
} else {
- post_message_and_poll(
- &mut worker,
- &mut session,
- "Runtime.callFunctionOn",
- Some(json!({
- "executionContextId": context_id,
- "functionDeclaration": "function (object) { Deno[Deno.internal].lastEvalResult = object; }",
- "arguments": [
- evaluate_result,
- ],
- })),
- ).await?;
+ worker.with_event_loop(
+ session.post_message(
+ "Runtime.callFunctionOn",
+ Some(json!({
+ "executionContextId": context_id,
+ "functionDeclaration": "function (object) { Deno[Deno.internal].lastEvalResult = object; }",
+ "arguments": [
+ evaluate_result,
+ ],
+ })),
+ ).boxed_local()
+ ).await?;
}
// TODO(caspervonb) we should investigate using previews here but to keep things
// consistent with the previous implementation we just get the preview result from
// Deno.inspectArgs.
let inspect_response =
- post_message_and_poll(
- &mut worker,
- &mut session,
- "Runtime.callFunctionOn",
- Some(json!({
- "executionContextId": context_id,
- "functionDeclaration": "function (object) { return Deno[Deno.internal].inspectArgs(['%o', object], { colors: !Deno.noColor }); }",
- "arguments": [
- evaluate_result,
- ],
- })),
+ worker.with_event_loop(
+ session.post_message(
+ "Runtime.callFunctionOn",
+ Some(json!({
+ "executionContextId": context_id,
+ "functionDeclaration": "function (object) { return Deno[Deno.internal].inspectArgs(['%o', object], { colors: !Deno.noColor }); }",
+ "arguments": [
+ evaluate_result,
+ ],
+ })),
+ ).boxed_local()
).await?;
let inspect_result = inspect_response.get("result").unwrap();