From 3cea44abde3db770d3d4397297c3f4e949db2186 Mon Sep 17 00:00:00 2001 From: Nathan Whitaker <17734409+nathanwhit@users.noreply.github.com> Date: Wed, 15 May 2024 11:38:45 -0700 Subject: chore: Fix flaky semantic tokens caching test (#23831) The stderr stream from the LSP is consumed by a separate thread, so it may not have processed the part we care about yet. Instead, wait until you see the measure for the request you care about. --- tests/integration/lsp_tests.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'tests/integration') diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index 47fefeafe..2d5bb2e24 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -12740,7 +12740,7 @@ fn lsp_semantic_token_caching() { assert_eq!( client - .perf() + .perf_wait_for_measure("lsp.semantic_tokens_range") .measure_count("tsc.request.getEncodedSemanticClassifications"), 1, ); @@ -12755,7 +12755,7 @@ fn lsp_semantic_token_caching() { assert_eq!( client - .perf() + .perf_wait_for_measure("lsp.semantic_tokens_full") .measure_count("tsc.request.getEncodedSemanticClassifications"), 2, ); @@ -12775,7 +12775,7 @@ fn lsp_semantic_token_caching() { // make sure we actually used the cache assert_eq!( client - .perf() + .perf_wait_for_measure("lsp.semantic_tokens_range") .measure_count("tsc.request.getEncodedSemanticClassifications"), 2, ); -- cgit v1.2.3