diff options
author | Nathan Whitaker <17734409+nathanwhit@users.noreply.github.com> | 2024-05-14 18:51:48 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-05-15 01:51:48 +0000 |
commit | 36d877be4a220cb30ddf69d43c386ae8d15f4b32 (patch) | |
tree | 08bdf14935f68928452a5b9665f3c65e6e9670b7 /tests/util | |
parent | 1a788b58a0e80c4504a0fdf5d47db41c46dc8d37 (diff) |
perf(lsp): Cache semantic tokens for open documents (#23799)
VScode will typically send a `textDocument/semanticTokens/full` request
followed by `textDocument/semanticTokens/range`, and occassionally
request semantic tokens even when we know nothing has changed. Semantic
tokens also get refreshed on each change. Computing semantic tokens is
relatively heavy in TSC, so we should avoid it as much as possible.
Caches the semantic tokens for open documents, to avoid making TSC do
unnecessary work. Results in a noticeable improvement in local
benchmarking
before:
```
Starting Deno benchmark
-> Start benchmarking lsp
- Simple Startup/Shutdown
(10 runs, mean: 383ms)
- Big Document/Several Edits
(5 runs, mean: 1079ms)
- Find/Replace
(10 runs, mean: 59ms)
- Code Lens
(10 runs, mean: 440ms)
- deco-cx/apps Multiple Edits + Navigation
(5 runs, mean: 9921ms)
<- End benchmarking lsp
```
after:
```
Starting Deno benchmark
-> Start benchmarking lsp
- Simple Startup/Shutdown
(10 runs, mean: 395ms)
- Big Document/Several Edits
(5 runs, mean: 1024ms)
- Find/Replace
(10 runs, mean: 56ms)
- Code Lens
(10 runs, mean: 438ms)
- deco-cx/apps Multiple Edits + Navigation
(5 runs, mean: 8927ms)
<- End benchmarking lsp
```
Diffstat (limited to 'tests/util')
-rw-r--r-- | tests/util/server/src/lsp.rs | 114 |
1 files changed, 111 insertions, 3 deletions
diff --git a/tests/util/server/src/lsp.rs b/tests/util/server/src/lsp.rs index 68cdc3cd1..ed9dd302f 100644 --- a/tests/util/server/src/lsp.rs +++ b/tests/util/server/src/lsp.rs @@ -470,6 +470,7 @@ pub struct LspClientBuilder { use_diagnostic_sync: bool, deno_dir: TempDir, envs: HashMap<OsString, OsString>, + collect_perf: bool, } impl LspClientBuilder { @@ -488,6 +489,7 @@ impl LspClientBuilder { use_diagnostic_sync: true, deno_dir, envs: Default::default(), + collect_perf: false, } } @@ -514,6 +516,15 @@ impl LspClientBuilder { self } + /// Whether to collect performance records (marks / measures, as emitted + /// by the lsp in the `performance` module). + /// Implies `capture_stderr`. + pub fn collect_perf(mut self) -> Self { + self.capture_stderr = true; + self.collect_perf = true; + self + } + /// Whether to use the synchronization messages to better sync diagnostics /// between the test client and server. pub fn use_diagnostic_sync(mut self, value: bool) -> Self { @@ -577,10 +588,12 @@ impl LspClientBuilder { let stdin = child.stdin.take().unwrap(); let writer = io::BufWriter::new(stdin); - let stderr_lines_rx = if self.capture_stderr { + let (stderr_lines_rx, perf_rx) = if self.capture_stderr { let stderr = child.stderr.take().unwrap(); let print_stderr = self.print_stderr; let (tx, rx) = mpsc::channel::<String>(); + let (perf_tx, perf_rx) = + self.collect_perf.then(mpsc::channel::<PerfRecord>).unzip(); std::thread::spawn(move || { let stderr = BufReader::new(stderr); for line in stderr.lines() { @@ -589,6 +602,22 @@ impl LspClientBuilder { if print_stderr { eprintln!("{}", line); } + if let Some(tx) = perf_tx.as_ref() { + // look for perf records + if line.starts_with('{') && line.ends_with("},") { + match serde_json::from_str::<PerfRecord>( + line.trim_end_matches(','), + ) { + Ok(record) => { + tx.send(record).unwrap(); + continue; + } + Err(err) => { + eprintln!("failed to parse perf record: {:#}", err); + } + } + } + } tx.send(line).unwrap(); } Err(err) => { @@ -597,9 +626,9 @@ impl LspClientBuilder { } } }); - Some(rx) + (Some(rx), perf_rx) } else { - None + (None, None) }; Ok(LspClient { @@ -613,10 +642,76 @@ impl LspClientBuilder { stderr_lines_rx, config: json!("{}"), supports_workspace_configuration: false, + perf: perf_rx.map(Perf::new), }) } } +#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase", tag = "type")] +/// A performance record, emitted by the `lsp::performance` +/// module. +pub enum PerfRecord { + Mark(PerfMark), + Measure(PerfMeasure), +} + +#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct PerfMeasure { + name: String, + count: u32, + duration: f64, +} + +#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct PerfMark { + name: String, + #[serde(default)] + count: Option<u32>, + #[serde(default)] + args: Option<Value>, +} + +#[derive(Debug)] +pub struct Perf { + records: Vec<PerfRecord>, + measures_counts: HashMap<String, u32>, + rx: mpsc::Receiver<PerfRecord>, +} + +impl Perf { + fn new(rx: mpsc::Receiver<PerfRecord>) -> Self { + Self { + records: Default::default(), + measures_counts: Default::default(), + rx, + } + } + fn drain(&mut self) { + while let Ok(record) = self.rx.try_recv() { + if let PerfRecord::Measure(measure) = &record { + *self + .measures_counts + .entry(measure.name.clone()) + .or_default() += 1; + } + self.records.push(record); + } + } + pub fn measures(&self) -> impl IntoIterator<Item = &PerfMeasure> { + self.records.iter().filter_map(|record| match record { + PerfRecord::Measure(measure) => Some(measure), + _ => None, + }) + } + + pub fn measure_count(&self, name: &str) -> u32 { + self.measures_counts.get(name).copied().unwrap_or_default() + } +} + pub struct LspClient { child: Child, reader: LspStdoutReader, @@ -628,6 +723,7 @@ pub struct LspClient { stderr_lines_rx: Option<mpsc::Receiver<String>>, config: serde_json::Value, supports_workspace_configuration: bool, + perf: Option<Perf>, } impl Drop for LspClient { @@ -661,6 +757,15 @@ impl LspClient { self.reader.pending_len() } + pub fn perf(&mut self) -> &Perf { + let perf = self + .perf + .as_mut() + .expect("must setup with client_builder.collect_perf()"); + perf.drain(); + perf + } + #[track_caller] pub fn wait_until_stderr_line( &self, @@ -733,6 +838,9 @@ impl LspClient { "tlsCertificate": null, "unsafelyIgnoreCertificateErrors": null, "unstable": false, + // setting this causes performance records to be logged + // to stderr + "internalDebug": self.perf.is_some(), } }), ) } |