diff options
Diffstat (limited to 'cli')
86 files changed, 315 insertions, 392 deletions
diff --git a/cli/args/config_file.rs b/cli/args/config_file.rs index 82ae7e5d7..ad204f449 100644 --- a/cli/args/config_file.rs +++ b/cli/args/config_file.rs @@ -163,7 +163,7 @@ pub const IGNORED_COMPILER_OPTIONS: &[&str] = &[ /// A function that works like JavaScript's `Object.assign()`. pub fn json_merge(a: &mut Value, b: &Value) { match (a, b) { - (&mut Value::Object(ref mut a), &Value::Object(ref b)) => { + (&mut Value::Object(ref mut a), Value::Object(b)) => { for (k, v) in b { json_merge(a.entry(k.clone()).or_insert(Value::Null), v); } diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 00df45274..257a99eb5 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -1904,7 +1904,7 @@ fn permission_args(app: Command) -> Command { .validator(|keys| { for key in keys.split(',') { if key.is_empty() || key.contains(&['=', '\0'] as &[char]) { - return Err(format!("invalid key \"{}\"", key)); + return Err(format!("invalid key \"{key}\"")); } } Ok(()) @@ -3164,7 +3164,7 @@ fn seed_arg_parse(flags: &mut Flags, matches: &ArgMatches) { let seed = seed_string.parse::<u64>().unwrap(); flags.seed = Some(seed); - flags.v8_flags.push(format!("--random-seed={}", seed)); + flags.v8_flags.push(format!("--random-seed={seed}")); } } @@ -3293,7 +3293,7 @@ pub fn resolve_urls(urls: Vec<String>) -> Vec<String> { } out.push(full_url); } else { - panic!("Bad Url: {}", urlstr); + panic!("Bad Url: {urlstr}"); } } out diff --git a/cli/args/flags_allow_net.rs b/cli/args/flags_allow_net.rs index bf189132a..88d9d3c02 100644 --- a/cli/args/flags_allow_net.rs +++ b/cli/args/flags_allow_net.rs @@ -27,13 +27,13 @@ impl FromStr for BarePort { } pub fn validator(host_and_port: &str) -> Result<(), String> { - if Url::parse(&format!("deno://{}", host_and_port)).is_ok() + if Url::parse(&format!("deno://{host_and_port}")).is_ok() || host_and_port.parse::<IpAddr>().is_ok() || host_and_port.parse::<BarePort>().is_ok() { Ok(()) } else { - Err(format!("Bad host:port pair: {}", host_and_port)) + Err(format!("Bad host:port pair: {host_and_port}")) } } @@ -43,7 +43,7 @@ pub fn validator(host_and_port: &str) -> Result<(), String> { pub fn parse(paths: Vec<String>) -> clap::Result<Vec<String>> { let mut out: Vec<String> = vec![]; for host_and_port in paths.iter() { - if Url::parse(&format!("deno://{}", host_and_port)).is_ok() + if Url::parse(&format!("deno://{host_and_port}")).is_ok() || host_and_port.parse::<IpAddr>().is_ok() { out.push(host_and_port.to_owned()) @@ -55,7 +55,7 @@ pub fn parse(paths: Vec<String>) -> clap::Result<Vec<String>> { } else { return Err(clap::Error::raw( clap::ErrorKind::InvalidValue, - format!("Bad host:port pair: {}", host_and_port), + format!("Bad host:port pair: {host_and_port}"), )); } } diff --git a/cli/args/import_map.rs b/cli/args/import_map.rs index bac31c080..9d1b2bbda 100644 --- a/cli/args/import_map.rs +++ b/cli/args/import_map.rs @@ -56,7 +56,7 @@ fn print_import_map_diagnostics(diagnostics: &[ImportMapDiagnostic]) { "Import map diagnostics:\n{}", diagnostics .iter() - .map(|d| format!(" - {}", d)) + .map(|d| format!(" - {d}")) .collect::<Vec<_>>() .join("\n") ); diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 5cb29cab2..d75f25d52 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -90,7 +90,7 @@ impl CacheSetting { if list.iter().any(|i| i == "npm:") { return false; } - let specifier = format!("npm:{}", package_name); + let specifier = format!("npm:{package_name}"); if list.contains(&specifier) { return false; } @@ -491,7 +491,7 @@ impl CliOptions { format!("for: {}", insecure_allowlist.join(", ")) }; let msg = - format!("DANGER: TLS certificate validation is disabled {}", domains); + format!("DANGER: TLS certificate validation is disabled {domains}"); // use eprintln instead of log::warn so this always gets shown eprintln!("{}", colors::yellow(msg)); } @@ -579,8 +579,7 @@ impl CliOptions { ) .await .context(format!( - "Unable to load '{}' import map", - import_map_specifier + "Unable to load '{import_map_specifier}' import map" )) .map(Some) } @@ -929,7 +928,7 @@ fn resolve_import_map_specifier( } } let specifier = deno_core::resolve_url_or_path(import_map_path) - .context(format!("Bad URL (\"{}\") for import map.", import_map_path))?; + .context(format!("Bad URL (\"{import_map_path}\") for import map."))?; return Ok(Some(specifier)); } else if let Some(config_file) = &maybe_config_file { // if the config file is an import map we prefer to use it, over `importMap` @@ -970,8 +969,7 @@ fn resolve_import_map_specifier( } else { deno_core::resolve_import(&import_map_path, config_file.specifier.as_str()) .context(format!( - "Bad URL (\"{}\") for import map.", - import_map_path + "Bad URL (\"{import_map_path}\") for import map." ))? }; return Ok(Some(specifier)); diff --git a/cli/auth_tokens.rs b/cli/auth_tokens.rs index 360a7e6c7..6c3ed3846 100644 --- a/cli/auth_tokens.rs +++ b/cli/auth_tokens.rs @@ -20,9 +20,9 @@ pub struct AuthToken { impl fmt::Display for AuthToken { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self.token { - AuthTokenData::Bearer(token) => write!(f, "Bearer {}", token), + AuthTokenData::Bearer(token) => write!(f, "Bearer {token}"), AuthTokenData::Basic { username, password } => { - let credentials = format!("{}:{}", username, password); + let credentials = format!("{username}:{password}"); write!(f, "Basic {}", base64::encode(credentials)) } } diff --git a/cli/bench/http.rs b/cli/bench/http.rs index 585574e2d..06a177386 100644 --- a/cli/bench/http.rs +++ b/cli/bench/http.rs @@ -44,7 +44,7 @@ pub fn benchmark( let name = entry.file_name().into_string().unwrap(); let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap(); - let lua_script = http_dir.join(format!("{}.lua", file_stem)); + let lua_script = http_dir.join(format!("{file_stem}.lua")); let mut maybe_lua = None; if lua_script.exists() { maybe_lua = Some(lua_script.to_str().unwrap()); @@ -158,7 +158,7 @@ fn run( let wrk = test_util::prebuilt_tool_path("wrk"); assert!(wrk.is_file()); - let addr = format!("http://127.0.0.1:{}/", port); + let addr = format!("http://127.0.0.1:{port}/"); let mut wrk_cmd = vec![wrk.to_str().unwrap(), "-d", DURATION, "--latency", &addr]; @@ -172,7 +172,7 @@ fn run( std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy. - println!("{}", output); + println!("{output}"); assert!( server.try_wait()?.map_or(true, |s| s.success()), "server ended with error" @@ -194,7 +194,7 @@ fn get_port() -> u16 { } fn server_addr(port: u16) -> String { - format!("0.0.0.0:{}", port) + format!("0.0.0.0:{port}") } fn core_http_json_ops(exe: &str) -> Result<HttpBenchmarkResult> { diff --git a/cli/bench/lsp.rs b/cli/bench/lsp.rs index fe6fd2703..722a87b69 100644 --- a/cli/bench/lsp.rs +++ b/cli/bench/lsp.rs @@ -202,7 +202,7 @@ fn bench_find_replace(deno_exe: &Path) -> Result<Duration, AnyError> { "textDocument/didOpen", json!({ "textDocument": { - "uri": format!("file:///a/file_{}.ts", i), + "uri": format!("file:///a/file_{i}.ts"), "languageId": "typescript", "version": 1, "text": "console.log(\"000\");\n" @@ -223,7 +223,7 @@ fn bench_find_replace(deno_exe: &Path) -> Result<Duration, AnyError> { } for i in 0..10 { - let file_name = format!("file:///a/file_{}.ts", i); + let file_name = format!("file:///a/file_{i}.ts"); client.write_notification( "textDocument/didChange", lsp::DidChangeTextDocumentParams { @@ -250,7 +250,7 @@ fn bench_find_replace(deno_exe: &Path) -> Result<Duration, AnyError> { } for i in 0..10 { - let file_name = format!("file:///a/file_{}.ts", i); + let file_name = format!("file:///a/file_{i}.ts"); let (maybe_res, maybe_err) = client.write_request::<_, _, Value>( "textDocument/formatting", lsp::DocumentFormattingParams { diff --git a/cli/bench/lsp_bench_standalone.rs b/cli/bench/lsp_bench_standalone.rs index 080f25e73..e8dc29073 100644 --- a/cli/bench/lsp_bench_standalone.rs +++ b/cli/bench/lsp_bench_standalone.rs @@ -55,7 +55,7 @@ fn incremental_change_wait(bench: &mut Bencher) { let mut document_version: u64 = 0; bench.iter(|| { - let text = format!("m{:05}", document_version); + let text = format!("m{document_version:05}"); client .write_notification( "textDocument/didChange", diff --git a/cli/bench/main.rs b/cli/bench/main.rs index e6b9895f2..747407945 100644 --- a/cli/bench/main.rs +++ b/cli/bench/main.rs @@ -189,7 +189,7 @@ fn run_exec_time( let ret_code_test = if let Some(code) = return_code { // Bash test which asserts the return code value of the previous command // $? contains the return code of the previous command - format!("; test $? -eq {}", code) + format!("; test $? -eq {code}") } else { "".to_string() }; @@ -244,11 +244,11 @@ fn rlib_size(target_dir: &std::path::Path, prefix: &str) -> i64 { if name.starts_with(prefix) && name.ends_with(".rlib") { let start = name.split('-').next().unwrap().to_string(); if seen.contains(&start) { - println!("skip {}", name); + println!("skip {name}"); } else { seen.insert(start); size += entry.metadata().unwrap().len(); - println!("check size {} {}", name, size); + println!("check size {name} {size}"); } } } @@ -269,11 +269,11 @@ fn get_binary_sizes(target_dir: &Path) -> Result<HashMap<String, i64>> { // add up size for everything in target/release/deps/libswc* let swc_size = rlib_size(target_dir, "libswc"); - println!("swc {} bytes", swc_size); + println!("swc {swc_size} bytes"); sizes.insert("swc_rlib".to_string(), swc_size); let v8_size = rlib_size(target_dir, "libv8"); - println!("v8 {} bytes", v8_size); + println!("v8 {v8_size} bytes"); sizes.insert("rusty_v8_rlib".to_string(), v8_size); // Because cargo's OUT_DIR is not predictable, search the build tree for @@ -314,7 +314,7 @@ fn bundle_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> { let mut sizes = HashMap::<String, i64>::new(); for (name, url) in BUNDLES { - let path = format!("{}.bundle.js", name); + let path = format!("{name}.bundle.js"); test_util::run( &[ deno_exe.to_str().unwrap(), @@ -374,7 +374,7 @@ fn cargo_deps() -> usize { count += 1 } } - println!("cargo_deps {}", count); + println!("cargo_deps {count}"); assert!(count > 10); // Sanity check. count } diff --git a/cli/build.rs b/cli/build.rs index 131d404a7..bb048d0d4 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -130,7 +130,7 @@ mod ts { for name in libs.iter() { println!( "cargo:rerun-if-changed={}", - path_dts.join(format!("lib.{}.d.ts", name)).display() + path_dts.join(format!("lib.{name}.d.ts")).display() ); } println!( @@ -229,7 +229,7 @@ mod ts { PathBuf::from(op_crate_lib).canonicalize()? // otherwise we are will generate the path ourself } else { - path_dts.join(format!("lib.{}.d.ts", lib)) + path_dts.join(format!("lib.{lib}.d.ts")) }; let data = std::fs::read_to_string(path)?; Ok(json!({ @@ -431,7 +431,7 @@ fn main() { // op_fetch_asset::trace_serializer(); if let Ok(c) = env::var("DENO_CANARY") { - println!("cargo:rustc-env=DENO_CANARY={}", c); + println!("cargo:rustc-env=DENO_CANARY={c}"); } println!("cargo:rerun-if-env-changed=DENO_CANARY"); diff --git a/cli/cache/check.rs b/cli/cache/check.rs index 86fb07577..c991c14b1 100644 --- a/cli/cache/check.rs +++ b/cli/cache/check.rs @@ -71,7 +71,7 @@ impl TypeCheckCache { Ok(val) => val, Err(err) => { if cfg!(debug_assertions) { - panic!("Error retrieving hash: {}", err); + panic!("Error retrieving hash: {err}"); } else { log::debug!("Error retrieving hash: {}", err); // fail silently when not debugging @@ -94,7 +94,7 @@ impl TypeCheckCache { pub fn add_check_hash(&self, check_hash: u64) { if let Err(err) = self.add_check_hash_result(check_hash) { if cfg!(debug_assertions) { - panic!("Error saving check hash: {}", err); + panic!("Error saving check hash: {err}"); } else { log::debug!("Error saving check hash: {}", err); } @@ -134,7 +134,7 @@ impl TypeCheckCache { if let Err(err) = self.set_tsbuildinfo_result(specifier, text) { // should never error here, but if it ever does don't fail if cfg!(debug_assertions) { - panic!("Error saving tsbuildinfo: {}", err); + panic!("Error saving tsbuildinfo: {err}"); } else { log::debug!("Error saving tsbuildinfo: {}", err); } diff --git a/cli/cache/disk_cache.rs b/cli/cache/disk_cache.rs index b97850204..799610d5f 100644 --- a/cli/cache/disk_cache.rs +++ b/cli/cache/disk_cache.rs @@ -43,8 +43,7 @@ impl DiskCache { } fs::create_dir_all(path).map_err(|e| { io::Error::new(e.kind(), format!( - "Could not create TypeScript compiler cache location: {:?}\nCheck the permission of the directory.", - path + "Could not create TypeScript compiler cache location: {path:?}\nCheck the permission of the directory." )) }) } @@ -61,7 +60,7 @@ impl DiskCache { let host_port = match url.port() { // Windows doesn't support ":" in filenames, so we represent port using a // special string. - Some(port) => format!("{}_PORT{}", host, port), + Some(port) => format!("{host}_PORT{port}"), None => host.to_string(), }; out.push(host_port); @@ -128,7 +127,7 @@ impl DiskCache { None => Some(base.with_extension(extension)), Some(ext) => { let original_extension = OsStr::to_str(ext).unwrap(); - let final_extension = format!("{}.{}", original_extension, extension); + let final_extension = format!("{original_extension}.{extension}"); Some(base.with_extension(final_extension)) } } diff --git a/cli/cache/emit.rs b/cli/cache/emit.rs index 21f382c55..89ff496fd 100644 --- a/cli/cache/emit.rs +++ b/cli/cache/emit.rs @@ -90,7 +90,7 @@ impl EmitCache { if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) { // should never error here, but if it ever does don't fail if cfg!(debug_assertions) { - panic!("Error saving emit data ({}): {}", specifier, err); + panic!("Error saving emit data ({specifier}): {err}"); } else { log::debug!("Error saving emit data({}): {}", specifier, err); } diff --git a/cli/cache/http_cache.rs b/cli/cache/http_cache.rs index 4be3166c9..2e784765e 100644 --- a/cli/cache/http_cache.rs +++ b/cli/cache/http_cache.rs @@ -35,7 +35,7 @@ fn base_url_to_filename(url: &Url) -> Option<PathBuf> { "http" | "https" => { let host = url.host_str().unwrap(); let host_port = match url.port() { - Some(port) => format!("{}_PORT{}", host, port), + Some(port) => format!("{host}_PORT{port}"), None => host.to_string(), }; out.push(host_port); @@ -128,8 +128,7 @@ impl HttpCache { io::Error::new( e.kind(), format!( - "Could not create remote modules cache location: {:?}\nCheck the permission of the directory.", - path + "Could not create remote modules cache location: {path:?}\nCheck the permission of the directory." ), ) }) @@ -231,7 +230,7 @@ mod tests { headers.insert("etag".to_string(), "as5625rqdsfb".to_string()); let content = b"Hello world"; let r = cache.set(&url, headers, content); - eprintln!("result {:?}", r); + eprintln!("result {r:?}"); assert!(r.is_ok()); let r = cache.get(&url); assert!(r.is_ok()); diff --git a/cli/cache/incremental.rs b/cli/cache/incremental.rs index 652965ad7..985181c59 100644 --- a/cli/cache/incremental.rs +++ b/cli/cache/incremental.rs @@ -185,7 +185,7 @@ impl SqlIncrementalCache { Ok(option) => option, Err(err) => { if cfg!(debug_assertions) { - panic!("Error retrieving hash: {}", err); + panic!("Error retrieving hash: {err}"); } else { // fail silently when not debugging None diff --git a/cli/cache/node.rs b/cli/cache/node.rs index ec0121c61..b19772229 100644 --- a/cli/cache/node.rs +++ b/cli/cache/node.rs @@ -108,7 +108,7 @@ impl NodeAnalysisCache { Err(err) => { // should never error here, but if it ever does don't fail if cfg!(debug_assertions) { - panic!("Error creating node analysis cache: {:#}", err); + panic!("Error creating node analysis cache: {err:#}"); } else { log::debug!("Error creating node analysis cache: {:#}", err); None @@ -124,7 +124,7 @@ impl NodeAnalysisCache { Err(err) => { // should never error here, but if it ever does don't fail if cfg!(debug_assertions) { - panic!("Error using esm analysis: {:#}", err); + panic!("Error using esm analysis: {err:#}"); } else { log::debug!("Error using esm analysis: {:#}", err); } diff --git a/cli/errors.rs b/cli/errors.rs index 5259b90fb..823d32da5 100644 --- a/cli/errors.rs +++ b/cli/errors.rs @@ -65,9 +65,7 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str { eprintln!( "Error '{}' contains boxed error of unknown type:{}", e, - e.chain() - .map(|e| format!("\n {:?}", e)) - .collect::<String>() + e.chain().map(|e| format!("\n {e:?}")).collect::<String>() ); "Error" }) diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 4e417778e..f28720964 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -88,7 +88,7 @@ impl FileCache { /// Fetch a source file from the local file system. fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> { let local = specifier.to_file_path().map_err(|_| { - uri_error(format!("Invalid file path.\n Specifier: {}", specifier)) + uri_error(format!("Invalid file path.\n Specifier: {specifier}")) })?; let bytes = fs::read(&local)?; let charset = text_encoding::detect_charset(&bytes).to_string(); @@ -111,13 +111,13 @@ pub fn get_source_from_data_url( specifier: &ModuleSpecifier, ) -> Result<(String, String), AnyError> { let data_url = DataUrl::process(specifier.as_str()) - .map_err(|e| uri_error(format!("{:?}", e)))?; + .map_err(|e| uri_error(format!("{e:?}")))?; let mime = data_url.mime_type(); let charset = mime.get_parameter("charset").map(|v| v.to_string()); let (bytes, _) = data_url .decode_to_vec() - .map_err(|e| uri_error(format!("{:?}", e)))?; - Ok((get_source_from_bytes(bytes, charset)?, format!("{}", mime))) + .map_err(|e| uri_error(format!("{e:?}")))?; + Ok((get_source_from_bytes(bytes, charset)?, format!("{mime}"))) } /// Given a vector of bytes and optionally a charset, decode the bytes to a @@ -142,8 +142,7 @@ fn get_validated_scheme( let scheme = specifier.scheme(); if !SUPPORTED_SCHEMES.contains(&scheme) { Err(generic_error(format!( - "Unsupported scheme \"{}\" for module \"{}\". Supported schemes: {:#?}", - scheme, specifier, SUPPORTED_SCHEMES + "Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes: {SUPPORTED_SCHEMES:#?}" ))) } else { Ok(scheme.to_string()) @@ -301,8 +300,7 @@ impl FileFetcher { return Err(custom_error( "NotCached", format!( - "Specifier not found in cache: \"{}\", --cached-only is specified.", - specifier + "Specifier not found in cache: \"{specifier}\", --cached-only is specified." ), )); } @@ -349,8 +347,7 @@ impl FileFetcher { return Err(custom_error( "NotCached", format!( - "Specifier not found in cache: \"{}\", --cached-only is specified.", - specifier + "Specifier not found in cache: \"{specifier}\", --cached-only is specified." ), )); } @@ -362,7 +359,7 @@ impl FileFetcher { .ok_or_else(|| { custom_error( "NotFound", - format!("Blob URL not found: \"{}\".", specifier), + format!("Blob URL not found: \"{specifier}\"."), ) })? }; @@ -435,8 +432,7 @@ impl FileFetcher { return futures::future::err(custom_error( "NotCached", format!( - "Specifier not found in cache: \"{}\", --cached-only is specified.", - specifier + "Specifier not found in cache: \"{specifier}\", --cached-only is specified." ), )) .boxed(); @@ -580,7 +576,7 @@ impl FileFetcher { } else if !self.allow_remote { Err(custom_error( "NoRemote", - format!("A remote specifier was requested: \"{}\", but --no-remote is specified.", specifier), + format!("A remote specifier was requested: \"{specifier}\", but --no-remote is specified."), )) } else { let result = self @@ -818,19 +814,19 @@ mod tests { charset: &str, expected: &str, ) { - let url_str = format!("http://127.0.0.1:4545/encoding/{}", fixture); + let url_str = format!("http://127.0.0.1:4545/encoding/{fixture}"); let specifier = resolve_url(&url_str).unwrap(); let (file, headers) = test_fetch_remote(&specifier).await; assert_eq!(&*file.source, expected); assert_eq!(file.media_type, MediaType::TypeScript); assert_eq!( headers.get("content-type").unwrap(), - &format!("application/typescript;charset={}", charset) + &format!("application/typescript;charset={charset}") ); } async fn test_fetch_local_encoded(charset: &str, expected: String) { - let p = test_util::testdata_path().join(format!("encoding/{}.ts", charset)); + let p = test_util::testdata_path().join(format!("encoding/{charset}.ts")); let specifier = resolve_url_or_path(p.to_str().unwrap()).unwrap(); let (file, _) = test_fetch(&specifier).await; assert_eq!(&*file.source, expected); @@ -2016,7 +2012,7 @@ mod tests { ) .await; - println!("{:?}", result); + println!("{result:?}"); if let Ok(FetchOnceResult::Code(body, _headers)) = result { assert!(!body.is_empty()); } else { diff --git a/cli/graph_util.rs b/cli/graph_util.rs index 3cf3b0d0b..c216ba503 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -630,12 +630,12 @@ fn handle_check_error( let mut message = if let Some(err) = error.downcast_ref::<ResolutionError>() { enhanced_resolution_error_message(err) } else { - format!("{}", error) + format!("{error}") }; if let Some(range) = maybe_range { if !range.specifier.as_str().contains("$deno") { - message.push_str(&format!("\n at {}", range)); + message.push_str(&format!("\n at {range}")); } } @@ -644,7 +644,7 @@ fn handle_check_error( /// Adds more explanatory information to a resolution error. pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String { - let mut message = format!("{}", error); + let mut message = format!("{error}"); if let ResolutionError::InvalidSpecifier { error: SpecifierError::ImportPrefixMissing(specifier, _), @@ -653,8 +653,7 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String { { if crate::node::resolve_builtin_node_module(specifier).is_ok() { message.push_str(&format!( - "\nIf you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{}\").", - specifier + "\nIf you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")." )); } } diff --git a/cli/http_util.rs b/cli/http_util.rs index 52d0cb664..225c49996 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -59,8 +59,7 @@ pub fn resolve_redirect_from_response( Ok(new_url) } else { Err(generic_error(format!( - "Redirection from '{}' did not provide location header", - request_url + "Redirection from '{request_url}' did not provide location header" ))) } } @@ -290,7 +289,7 @@ impl HttpClient { "Bad response: {:?}{}", status, match maybe_response_text { - Some(text) => format!("\n\n{}", text), + Some(text) => format!("\n\n{text}"), None => String::new(), } ); diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index c7f5ba8aa..182ad940e 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -158,7 +158,7 @@ fn check_specifier( documents: &Documents, ) -> Option<String> { for ext in SUPPORTED_EXTENSIONS { - let specifier_with_ext = format!("{}{}", specifier, ext); + let specifier_with_ext = format!("{specifier}{ext}"); if documents.contains_import(&specifier_with_ext, referrer) { return Some(specifier_with_ext); } @@ -398,7 +398,7 @@ impl CodeActionCollection { specifier.clone(), vec![lsp::TextEdit { new_text: prepend_whitespace( - format!("// deno-lint-ignore {}\n", code), + format!("// deno-lint-ignore {code}\n"), line_content, ), range: lsp::Range { @@ -414,7 +414,7 @@ impl CodeActionCollection { }], ); let ignore_error_action = lsp::CodeAction { - title: format!("Disable {} for this line", code), + title: format!("Disable {code} for this line"), kind: Some(lsp::CodeActionKind::QUICKFIX), diagnostics: Some(vec![diagnostic.clone()]), command: None, @@ -447,7 +447,7 @@ impl CodeActionCollection { }) }); - let mut new_text = format!("// deno-lint-ignore-file {}\n", code); + let mut new_text = format!("// deno-lint-ignore-file {code}\n"); let mut range = lsp::Range { start: lsp::Position { line: 0, @@ -461,7 +461,7 @@ impl CodeActionCollection { // If ignore file comment already exists, append the lint code // to the existing comment. if let Some(ignore_comment) = maybe_ignore_comment { - new_text = format!(" {}", code); + new_text = format!(" {code}"); // Get the end position of the comment. let line = maybe_parsed_source .unwrap() @@ -479,7 +479,7 @@ impl CodeActionCollection { let mut changes = HashMap::new(); changes.insert(specifier.clone(), vec![lsp::TextEdit { new_text, range }]); let ignore_file_action = lsp::CodeAction { - title: format!("Disable {} for the entire file", code), + title: format!("Disable {code} for the entire file"), kind: Some(lsp::CodeActionKind::QUICKFIX), diagnostics: Some(vec![diagnostic.clone()]), command: None, diff --git a/cli/lsp/client.rs b/cli/lsp/client.rs index b39678667..cdef1cfbf 100644 --- a/cli/lsp/client.rs +++ b/cli/lsp/client.rs @@ -107,7 +107,7 @@ impl Client { ) { self .0 - .show_message(message_type, format!("{}", message)) + .show_message(message_type, format!("{message}")) .await } diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index a8e7896c5..b89aec6c9 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -394,7 +394,7 @@ fn get_local_completions( let filter_text = if full_text.starts_with(current) { Some(full_text) } else { - Some(format!("{}{}", current, label)) + Some(format!("{current}{label}")) }; match de.file_type() { Ok(file_type) if file_type.is_dir() => Some(lsp::CompletionItem { diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 9f3c409cc..b6dff8682 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -670,17 +670,14 @@ impl DenoDiagnostic { let DiagnosticDataImportMapRemap { from, to } = serde_json::from_value(data)?; lsp::CodeAction { - title: format!( - "Update \"{}\" to \"{}\" to use import map.", - from, to - ), + title: format!("Update \"{from}\" to \"{to}\" to use import map."), kind: Some(lsp::CodeActionKind::QUICKFIX), diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp::WorkspaceEdit { changes: Some(HashMap::from([( specifier.clone(), vec![lsp::TextEdit { - new_text: format!("\"{}\"", to), + new_text: format!("\"{to}\""), range: diagnostic.range, }], )])), @@ -821,15 +818,15 @@ impl DenoDiagnostic { pub fn to_lsp_diagnostic(&self, range: &lsp::Range) -> lsp::Diagnostic { let (severity, message, data) = match self { Self::DenoWarn(message) => (lsp::DiagnosticSeverity::WARNING, message.to_string(), None), - Self::ImportMapRemap { from, to } => (lsp::DiagnosticSeverity::HINT, format!("The import specifier can be remapped to \"{}\" which will resolve it via the active import map.", to), Some(json!({ "from": from, "to": to }))), - Self::InvalidAssertType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an assertion type of \"json\". Instead got \"{}\".", assert_type), None), + Self::ImportMapRemap { from, to } => (lsp::DiagnosticSeverity::HINT, format!("The import specifier can be remapped to \"{to}\" which will resolve it via the active import map."), Some(json!({ "from": from, "to": to }))), + Self::InvalidAssertType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an assertion type of \"json\". Instead got \"{assert_type}\"."), None), Self::NoAssertType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import assertion. Consider adding `assert { type: \"json\" }` to the import statement.".to_string(), None), - Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: \"{}\".", specifier), Some(json!({ "specifier": specifier }))), + Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: \"{specifier}\"."), Some(json!({ "specifier": specifier }))), Self::NoCacheBlob => (lsp::DiagnosticSeverity::ERROR, "Uncached blob URL.".to_string(), None), Self::NoCacheData(specifier) => (lsp::DiagnosticSeverity::ERROR, "Uncached data URL.".to_string(), Some(json!({ "specifier": specifier }))), Self::NoCacheNpm(pkg_ref, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: \"{}\".", pkg_ref.req), Some(json!({ "specifier": specifier }))), - Self::NoLocal(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Unable to load a local module: \"{}\".\n Please check the file path.", specifier), None), - Self::Redirect { from, to} => (lsp::DiagnosticSeverity::INFORMATION, format!("The import of \"{}\" was redirected to \"{}\".", from, to), Some(json!({ "specifier": from, "redirect": to }))), + Self::NoLocal(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Unable to load a local module: \"{specifier}\".\n Please check the file path."), None), + Self::Redirect { from, to} => (lsp::DiagnosticSeverity::INFORMATION, format!("The import of \"{from}\" was redirected to \"{to}\"."), Some(json!({ "specifier": from, "redirect": to }))), Self::ResolutionError(err) => ( lsp::DiagnosticSeverity::ERROR, enhanced_resolution_error_message(err), diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 03210ebaa..07003a168 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -838,7 +838,7 @@ impl Documents { || { Err(custom_error( "NotFound", - format!("The specifier \"{}\" was not found.", specifier), + format!("The specifier \"{specifier}\" was not found."), )) }, Ok, @@ -862,7 +862,7 @@ impl Documents { } else { return Err(custom_error( "NotFound", - format!("The specifier \"{}\" was not found.", specifier), + format!("The specifier \"{specifier}\" was not found."), )); } } @@ -1100,7 +1100,7 @@ impl Documents { } else { return Err(custom_error( "NotFound", - format!("Specifier not found {}", specifier), + format!("Specifier not found {specifier}"), )); } } diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 0b02115af..58116d49e 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -375,8 +375,7 @@ impl Inner { self.get_maybe_asset_or_document(specifier).map_or_else( || { Err(LspError::invalid_params(format!( - "Unable to find asset or document for: {}", - specifier + "Unable to find asset or document for: {specifier}" ))) }, Ok, @@ -1296,7 +1295,7 @@ impl Inner { Ok(Some(text_edits)) } } else { - self.client.show_message(MessageType::WARNING, format!("Unable to format \"{}\". Likely due to unrecoverable syntax errors in the file.", specifier)).await; + self.client.show_message(MessageType::WARNING, format!("Unable to format \"{specifier}\". Likely due to unrecoverable syntax errors in the file.")).await; Ok(None) } } @@ -1354,7 +1353,7 @@ impl Inner { }; let value = if let Some(docs) = self.module_registries.get_hover(&dep).await { - format!("{}\n\n---\n\n{}", value, docs) + format!("{value}\n\n---\n\n{docs}") } else { value }; diff --git a/cli/lsp/path_to_regex.rs b/cli/lsp/path_to_regex.rs index a9b4bcdf3..1d766e024 100644 --- a/cli/lsp/path_to_regex.rs +++ b/cli/lsp/path_to_regex.rs @@ -220,8 +220,8 @@ pub enum StringOrNumber { impl fmt::Display for StringOrNumber { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self { - Self::Number(n) => write!(f, "{}", n), - Self::String(s) => write!(f, "{}", s), + Self::Number(n) => write!(f, "{n}"), + Self::String(s) => write!(f, "{s}"), } } } @@ -269,9 +269,9 @@ impl StringOrVec { let mut s = String::new(); for (i, segment) in v.iter().enumerate() { if omit_initial_prefix && i == 0 { - write!(s, "{}{}", segment, suffix).unwrap(); + write!(s, "{segment}{suffix}").unwrap(); } else { - write!(s, "{}{}{}", prefix, segment, suffix).unwrap(); + write!(s, "{prefix}{segment}{suffix}").unwrap(); } } s @@ -610,7 +610,7 @@ pub fn tokens_to_regex( } } else { let modifier = key.modifier.clone().unwrap_or_default(); - format!(r"(?:{}{}){}", prefix, suffix, modifier) + format!(r"(?:{prefix}{suffix}){modifier}") } } }; @@ -619,10 +619,10 @@ pub fn tokens_to_regex( if end { if !strict { - write!(route, r"{}?", delimiter).unwrap(); + write!(route, r"{delimiter}?").unwrap(); } if has_ends_with { - write!(route, r"(?={})", ends_with).unwrap(); + write!(route, r"(?={ends_with})").unwrap(); } else { route.push('$'); } @@ -640,16 +640,16 @@ pub fn tokens_to_regex( }; if !strict { - write!(route, r"(?:{}(?={}))?", delimiter, ends_with).unwrap(); + write!(route, r"(?:{delimiter}(?={ends_with}))?").unwrap(); } if !is_end_deliminated { - write!(route, r"(?={}|{})", delimiter, ends_with).unwrap(); + write!(route, r"(?={delimiter}|{ends_with})").unwrap(); } } let flags = if sensitive { "" } else { "(?i)" }; - let re = FancyRegex::new(&format!("{}{}", flags, route))?; + let re = FancyRegex::new(&format!("{flags}{route}"))?; let maybe_keys = if keys.is_empty() { None } else { Some(keys) }; Ok((re, maybe_keys)) @@ -754,7 +754,7 @@ impl Compiler { } } } - write!(path, "{}{}{}", prefix, segment, suffix).unwrap(); + write!(path, "{prefix}{segment}{suffix}").unwrap(); } } } @@ -773,7 +773,7 @@ impl Compiler { } let prefix = k.prefix.clone().unwrap_or_default(); let suffix = k.suffix.clone().unwrap_or_default(); - write!(path, "{}{}{}", prefix, s, suffix).unwrap(); + write!(path, "{prefix}{s}{suffix}").unwrap(); } None => { if !optional { @@ -874,25 +874,23 @@ mod tests { fixtures: &[Fixture], ) { let result = string_to_regex(path, maybe_options); - assert!(result.is_ok(), "Could not parse path: \"{}\"", path); + assert!(result.is_ok(), "Could not parse path: \"{path}\""); let (re, _) = result.unwrap(); for (fixture, expected) in fixtures { let result = re.find(fixture); assert!( result.is_ok(), - "Find failure for path \"{}\" and fixture \"{}\"", - path, - fixture + "Find failure for path \"{path}\" and fixture \"{fixture}\"" ); let actual = result.unwrap(); if let Some((text, start, end)) = *expected { - assert!(actual.is_some(), "Match failure for path \"{}\" and fixture \"{}\". Expected Some got None", path, fixture); + assert!(actual.is_some(), "Match failure for path \"{path}\" and fixture \"{fixture}\". Expected Some got None"); let actual = actual.unwrap(); assert_eq!(actual.as_str(), text, "Match failure for path \"{}\" and fixture \"{}\". Expected \"{}\" got \"{}\".", path, fixture, text, actual.as_str()); assert_eq!(actual.start(), start); assert_eq!(actual.end(), end); } else { - assert!(actual.is_none(), "Match failure for path \"{}\" and fixture \"{}\". Expected None got {:?}", path, fixture, actual); + assert!(actual.is_none(), "Match failure for path \"{path}\" and fixture \"{fixture}\". Expected None got {actual:?}"); } } } diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs index ca7b6368e..d67068ec7 100644 --- a/cli/lsp/registries.rs +++ b/cli/lsp/registries.rs @@ -217,10 +217,10 @@ fn get_endpoint_with_match( Token::Key(k) if k.name == *key => Some(k), _ => None, }); - url = url - .replace(&format!("${{{}}}", name), &value.to_string(maybe_key, true)); + url = + url.replace(&format!("${{{name}}}"), &value.to_string(maybe_key, true)); url = url.replace( - &format!("${{{{{}}}}}", name), + &format!("${{{{{name}}}}}"), &percent_encoding::percent_encode( value.to_string(maybe_key, true).as_bytes(), COMPONENT, @@ -278,8 +278,8 @@ fn replace_variable( let value = maybe_value.unwrap_or(""); if let StringOrNumber::String(name) = &variable.name { url_str - .replace(&format!("${{{}}}", name), value) - .replace(&format! {"${{{{{}}}}}", name}, value) + .replace(&format!("${{{name}}}"), value) + .replace(&format! {"${{{{{name}}}}}"}, value) } else { url_str } @@ -723,7 +723,7 @@ impl ModuleRegistry { } for (idx, item) in items.into_iter().enumerate() { let mut label = if let Some(p) = &prefix { - format!("{}{}", p, item) + format!("{p}{item}") } else { item.clone() }; @@ -880,7 +880,7 @@ impl ModuleRegistry { is_incomplete = true; } for (idx, item) in items.into_iter().enumerate() { - let path = format!("{}{}", prefix, item); + let path = format!("{prefix}{item}"); let kind = Some(lsp::CompletionItemKind::FOLDER); let item_specifier = base.join(&path).ok()?; let full_text = item_specifier.as_str(); diff --git a/cli/lsp/repl.rs b/cli/lsp/repl.rs index 81e621c13..41a3f993a 100644 --- a/cli/lsp/repl.rs +++ b/cli/lsp/repl.rs @@ -188,7 +188,7 @@ impl ReplLanguageServer { let new_text = if new_text.ends_with('\n') { new_text.to_string() } else { - format!("{}\n", new_text) + format!("{new_text}\n") }; self.document_version += 1; let current_line_count = diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 3619f529c..2f66e2d2d 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -211,7 +211,7 @@ fn new_assets_map() -> Arc<Mutex<AssetsMap>> { let assets = tsc::LAZILY_LOADED_STATIC_ASSETS .iter() .map(|(k, v)| { - let url_str = format!("asset:///{}", k); + let url_str = format!("asset:///{k}"); let specifier = resolve_url(&url_str).unwrap(); let asset = AssetDocument::new(specifier.clone(), v); (specifier, asset) @@ -384,9 +384,9 @@ fn get_tag_documentation( let maybe_text = get_tag_body_text(tag, language_server); if let Some(text) = maybe_text { if text.contains('\n') { - format!("{} \n{}", label, text) + format!("{label} \n{text}") } else { - format!("{} - {}", label, text) + format!("{label} - {text}") } } else { label @@ -397,7 +397,7 @@ fn make_codeblock(text: &str) -> String { if CODEBLOCK_RE.is_match(text) { text.to_string() } else { - format!("```\n{}\n```", text) + format!("```\n{text}\n```") } } @@ -700,9 +700,9 @@ fn display_parts_to_string( .unwrap_or_else(|| "".to_string()) }); let link_str = if link.linkcode { - format!("[`{}`]({})", link_text, specifier) + format!("[`{link_text}`]({specifier})") } else { - format!("[{}]({})", link_text, specifier) + format!("[{link_text}]({specifier})") }; out.push(link_str); } @@ -785,8 +785,7 @@ impl QuickInfo { .join(" \n\n"); if !tags_preview.is_empty() { parts.push(lsp::MarkedString::from_markdown(format!( - "\n\n{}", - tags_preview + "\n\n{tags_preview}" ))); } } @@ -1984,7 +1983,7 @@ impl CompletionEntryDetails { .map(|tag_info| get_tag_documentation(tag_info, language_server)) .collect::<Vec<String>>() .join(""); - value = format!("{}\n\n{}", value, tag_documentation); + value = format!("{value}\n\n{tag_documentation}"); } Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind: lsp::MarkupKind::Markdown, @@ -2486,7 +2485,7 @@ impl SignatureHelpItem { let documentation = display_parts_to_string(&self.documentation, language_server); lsp::SignatureInformation { - label: format!("{}{}{}", prefix_text, params_text, suffix_text), + label: format!("{prefix_text}{params_text}{suffix_text}"), documentation: Some(lsp::Documentation::MarkupContent( lsp::MarkupContent { kind: lsp::MarkupKind::Markdown, @@ -2844,7 +2843,7 @@ fn start( .clone() .unwrap_or_else(|| Url::parse("cache:///").unwrap()); let init_config = json!({ "debug": debug, "rootUri": root_uri }); - let init_src = format!("globalThis.serverInit({});", init_config); + let init_src = format!("globalThis.serverInit({init_config});"); runtime.execute_script(&located_script_name!(), &init_src)?; Ok(()) @@ -3433,7 +3432,7 @@ pub fn request( (state.performance.clone(), method.to_value(state, id)) }; let mark = performance.mark("request", Some(request_params.clone())); - let request_src = format!("globalThis.serverRequest({});", request_params); + let request_src = format!("globalThis.serverRequest({request_params});"); runtime.execute_script(&located_script_name!(), &request_src)?; let op_state = runtime.op_state(); diff --git a/cli/lsp/urls.rs b/cli/lsp/urls.rs index abe83d3cb..4fba0c9ff 100644 --- a/cli/lsp/urls.rs +++ b/cli/lsp/urls.rs @@ -104,10 +104,10 @@ impl LspUrlMap { format!("deno:/asset{}", specifier.path()) } else if specifier.scheme() == "data" { let data_url = DataUrl::process(specifier.as_str()) - .map_err(|e| uri_error(format!("{:?}", e)))?; + .map_err(|e| uri_error(format!("{e:?}")))?; let mime = data_url.mime_type(); let (media_type, _) = - map_content_type(specifier, Some(&format!("{}", mime))); + map_content_type(specifier, Some(&format!("{mime}"))); let extension = if media_type == MediaType::Unknown { "" } else { @@ -128,7 +128,7 @@ impl LspUrlMap { }) .collect(); path.push_str(&parts.join("/")); - format!("deno:/{}", path) + format!("deno:/{path}") }; let url = Url::parse(&specifier_str)?; inner.put(specifier.clone(), url.clone()); diff --git a/cli/main.rs b/cli/main.rs index 8c0dd0354..7504bf941 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -139,7 +139,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> { DenoSubcommand::Test(test_flags) => { if let Some(ref coverage_dir) = flags.coverage_dir { std::fs::create_dir_all(coverage_dir) - .with_context(|| format!("Failed creating: {}", coverage_dir))?; + .with_context(|| format!("Failed creating: {coverage_dir}"))?; // this is set in order to ensure spawned processes use the same // coverage directory env::set_var( @@ -206,7 +206,7 @@ fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T { match result { Ok(value) => value, Err(error) => { - let mut error_string = format!("{:?}", error); + let mut error_string = format!("{error:?}"); let mut error_code = 1; if let Some(e) = error.downcast_ref::<JsError>() { diff --git a/cli/module_loader.rs b/cli/module_loader.rs index c0f9e43d0..c7872988b 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -122,7 +122,7 @@ impl CliModuleLoader { )? } MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { - panic!("Unexpected media type {} for {}", media_type, found_url) + panic!("Unexpected media type {media_type} for {found_url}") } }; @@ -136,7 +136,7 @@ impl CliModuleLoader { }) } _ => { - let mut msg = format!("Loading unprepared module: {}", specifier); + let mut msg = format!("Loading unprepared module: {specifier}"); if let Some(referrer) = maybe_referrer { msg = format!("{}, imported from: {}", msg, referrer.as_str()); } diff --git a/cli/napi/async.rs b/cli/napi/async.rs index 8fdb4f9d2..8cbdb2220 100644 --- a/cli/napi/async.rs +++ b/cli/napi/async.rs @@ -39,7 +39,7 @@ fn napi_cancel_async_work( /// Frees a previously allocated work object. #[napi_sym::napi_sym] fn napi_delete_async_work(_env: &mut Env, work: napi_async_work) -> Result { - let work = Box::from_raw(work); + let work = Box::from_raw(work as *mut AsyncWork); drop(work); Ok(()) diff --git a/cli/napi/env.rs b/cli/napi/env.rs index 922c64140..bdd5221e8 100644 --- a/cli/napi/env.rs +++ b/cli/napi/env.rs @@ -34,8 +34,7 @@ pub unsafe extern "C" fn napi_fatal_error( std::str::from_utf8(slice).unwrap() }; panic!( - "Fatal exception triggered by napi_fatal_error!\nLocation: {:?}\n{}", - location, message + "Fatal exception triggered by napi_fatal_error!\nLocation: {location:?}\n{message}" ); } @@ -46,10 +45,7 @@ fn napi_fatal_exception(env: *mut Env, value: napi_value) -> Result { let env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?; let value = transmute::<napi_value, v8::Local<v8::Value>>(value); let error = value.to_rust_string_lossy(&mut env.scope()); - panic!( - "Fatal exception triggered by napi_fatal_exception!\n{}", - error - ); + panic!("Fatal exception triggered by napi_fatal_exception!\n{error}"); } #[napi_sym::napi_sym] diff --git a/cli/node/analyze.rs b/cli/node/analyze.rs index fd131c551..d1046e275 100644 --- a/cli/node/analyze.rs +++ b/cli/node/analyze.rs @@ -76,11 +76,11 @@ pub fn esm_code_with_node_globals( let global_this_expr = if has_global_this { global_this_expr } else { - write!(result, "var globalThis = {};", global_this_expr).unwrap(); + write!(result, "var globalThis = {global_this_expr};").unwrap(); "globalThis" }; for global in globals { - write!(result, "var {0} = {1}.{0};", global, global_this_expr).unwrap(); + write!(result, "var {global} = {global_this_expr}.{global};").unwrap(); } let file_text = text_info.text_str(); diff --git a/cli/node/mod.rs b/cli/node/mod.rs index 2125f670e..8d599923b 100644 --- a/cli/node/mod.rs +++ b/cli/node/mod.rs @@ -65,7 +65,7 @@ impl NodeResolution { if specifier.starts_with("node:") { ModuleSpecifier::parse(&specifier).unwrap() } else { - ModuleSpecifier::parse(&format!("node:{}", specifier)).unwrap() + ModuleSpecifier::parse(&format!("node:{specifier}")).unwrap() } } } @@ -146,8 +146,7 @@ pub fn resolve_builtin_node_module(specifier: &str) -> Result<Url, AnyError> { } Err(generic_error(format!( - "Unknown built-in \"node:\" module: {}", - specifier + "Unknown built-in \"node:\" module: {specifier}" ))) } @@ -235,8 +234,7 @@ pub async fn initialize_binary_command( Object.defineProperty(process.argv, "0", {{ get: () => binaryName, }}); - }})('{}');"#, - binary_name, + }})('{binary_name}');"#, ); let value = @@ -333,7 +331,7 @@ pub fn node_resolve_npm_reference( &reference .sub_path .as_ref() - .map(|s| format!("./{}", s)) + .map(|s| format!("./{s}")) .unwrap_or_else(|| ".".to_string()), &package_folder, node_module_kind, @@ -343,7 +341,7 @@ pub fn node_resolve_npm_reference( permissions, ) .with_context(|| { - format!("Error resolving package config for '{}'", reference) + format!("Error resolving package config for '{reference}'") })?; let resolved_path = match maybe_resolved_path { Some(resolved_path) => resolved_path, @@ -425,7 +423,7 @@ fn resolve_bin_entry_value<'a>( .map(|o| { o.keys() .into_iter() - .map(|k| format!(" * npm:{}/{}", pkg_req, k)) + .map(|k| format!(" * npm:{pkg_req}/{k}")) .collect::<Vec<_>>() }) .unwrap_or_default(); @@ -546,8 +544,7 @@ pub fn url_to_node_resolution( Ok(NodeResolution::Esm(url)) } else if url_str.ends_with(".ts") { Err(generic_error(format!( - "TypeScript files are not supported in npm packages: {}", - url + "TypeScript files are not supported in npm packages: {url}" ))) } else { Ok(NodeResolution::CommonJs(url)) @@ -681,15 +678,13 @@ fn add_export( // so assign it to a temporary variable that won't have a conflict, then re-export // it as a string source.push(format!( - "const __deno_export_{}__ = {};", - temp_var_count, initializer + "const __deno_export_{temp_var_count}__ = {initializer};" )); source.push(format!( - "export {{ __deno_export_{}__ as \"{}\" }};", - temp_var_count, name + "export {{ __deno_export_{temp_var_count}__ as \"{name}\" }};" )); } else { - source.push(format!("export const {} = {};", name, initializer)); + source.push(format!("export const {name} = {initializer};")); } } @@ -838,7 +833,7 @@ pub fn translate_cjs_to_esm( add_export( &mut source, export, - &format!("mod[\"{}\"]", export), + &format!("mod[\"{export}\"]"), &mut temp_var_count, ); } @@ -975,7 +970,7 @@ fn parse_specifier(specifier: &str) -> Option<(String, String)> { fn to_file_path(url: &ModuleSpecifier) -> PathBuf { url .to_file_path() - .unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {}", url)) + .unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}")) } fn to_file_path_string(url: &ModuleSpecifier) -> String { diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs index 6a6c6156c..0d07d27b2 100644 --- a/cli/npm/cache.rs +++ b/cli/npm/cache.rs @@ -216,7 +216,7 @@ impl ReadonlyNpmCache { let encoded_name = mixed_case_package_name_encode(name); // Using the encoded directory may have a collision with an actual package name // so prefix it with an underscore since npm packages can't start with that - dir.join(format!("_{}", encoded_name)) + dir.join(format!("_{encoded_name}")) } else { // ensure backslashes are used on windows for part in name.split('/') { diff --git a/cli/npm/registry.rs b/cli/npm/registry.rs index 97397350d..9598feba1 100644 --- a/cli/npm/registry.rs +++ b/cli/npm/registry.rs @@ -131,8 +131,7 @@ impl NpmPackageVersionInfo { let version_req = NpmVersionReq::parse(&version_req).with_context(|| { format!( - "error parsing version requirement for dependency: {}@{}", - bare_specifier, version_req + "error parsing version requirement for dependency: {bare_specifier}@{version_req}" ) })?; Ok(NpmDependencyEntry { @@ -369,10 +368,7 @@ impl RealNpmRegistryApiInner { Ok(value) => value, Err(err) => { if cfg!(debug_assertions) { - panic!( - "error loading cached npm package info for {}: {:#}", - name, err - ); + panic!("error loading cached npm package info for {name}: {err:#}"); } else { None } @@ -415,10 +411,7 @@ impl RealNpmRegistryApiInner { self.save_package_info_to_file_cache_result(name, package_info) { if cfg!(debug_assertions) { - panic!( - "error saving cached npm package info for {}: {:#}", - name, err - ); + panic!("error saving cached npm package info for {name}: {err:#}"); } } } @@ -443,8 +436,7 @@ impl RealNpmRegistryApiInner { return Err(custom_error( "NotCached", format!( - "An npm specifier not found in cache: \"{}\", --cached-only is specified.", - name + "An npm specifier not found in cache: \"{name}\", --cached-only is specified." ) )); } diff --git a/cli/npm/resolution/mod.rs b/cli/npm/resolution/mod.rs index ed194bbac..407651ccb 100644 --- a/cli/npm/resolution/mod.rs +++ b/cli/npm/resolution/mod.rs @@ -112,7 +112,7 @@ impl NpmPackageId { let (input, version) = parse_version(input)?; match NpmVersion::parse(version) { Ok(version) => Ok((input, (name.to_string(), version))), - Err(err) => ParseError::fail(at_version_input, format!("{:#}", err)), + Err(err) => ParseError::fail(at_version_input, format!("{err:#}")), } } @@ -173,7 +173,7 @@ impl NpmPackageId { } with_failure_handling(parse_id_at_level(0))(id) - .with_context(|| format!("Invalid npm package id '{}'.", id)) + .with_context(|| format!("Invalid npm package id '{id}'.")) } pub fn display(&self) -> String { diff --git a/cli/npm/resolution/snapshot.rs b/cli/npm/resolution/snapshot.rs index ad6aee6d9..be64ea611 100644 --- a/cli/npm/resolution/snapshot.rs +++ b/cli/npm/resolution/snapshot.rs @@ -247,7 +247,7 @@ impl NpmResolutionSnapshot { // collect the specifiers to version mappings for (key, value) in &lockfile.content.npm.specifiers { let package_req = NpmPackageReq::from_str(key) - .with_context(|| format!("Unable to parse npm specifier: {}", key))?; + .with_context(|| format!("Unable to parse npm specifier: {key}"))?; let package_id = NpmPackageId::from_serialized(value)?; package_reqs.insert(package_req, package_id.clone()); verify_ids.insert(package_id.clone()); diff --git a/cli/npm/resolution/specifier.rs b/cli/npm/resolution/specifier.rs index 6667c60dd..0aa693472 100644 --- a/cli/npm/resolution/specifier.rs +++ b/cli/npm/resolution/specifier.rs @@ -47,7 +47,7 @@ impl NpmPackageReference { let parts = specifier.split('/').collect::<Vec<_>>(); let name_part_len = if specifier.starts_with('@') { 2 } else { 1 }; if parts.len() < name_part_len { - return Err(generic_error(format!("Not a valid package: {}", specifier))); + return Err(generic_error(format!("Not a valid package: {specifier}"))); } let name_parts = &parts[0..name_part_len]; let last_name_part = &name_parts[name_part_len - 1]; @@ -81,8 +81,7 @@ impl NpmPackageReference { if let Some(at_index) = sub_path.rfind('@') { let (new_sub_path, version) = sub_path.split_at(at_index); let msg = format!( - "Invalid package specifier 'npm:{}/{}'. Did you mean to write 'npm:{}{}/{}'?", - name, sub_path, name, version, new_sub_path + "Invalid package specifier 'npm:{name}/{sub_path}'. Did you mean to write 'npm:{name}{version}/{new_sub_path}'?" ); return Err(generic_error(msg)); } @@ -90,8 +89,7 @@ impl NpmPackageReference { if name.is_empty() { let msg = format!( - "Invalid npm specifier '{}'. Did not contain a package name.", - original_text + "Invalid npm specifier '{original_text}'. Did not contain a package name." ); return Err(generic_error(msg)); } @@ -133,7 +131,7 @@ impl std::fmt::Display for NpmPackageReq { impl NpmPackageReq { pub fn from_str(text: &str) -> Result<Self, AnyError> { // probably should do something more targeted in the future - let reference = NpmPackageReference::from_str(&format!("npm:{}", text))?; + let reference = NpmPackageReference::from_str(&format!("npm:{text}"))?; Ok(reference.req) } } @@ -163,7 +161,7 @@ impl NpmVersionMatcher for NpmPackageReq { self .version_req .as_ref() - .map(|v| format!("{}", v)) + .map(|v| format!("{v}")) .unwrap_or_else(|| "non-prerelease".to_string()) } } diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index 4307f2b2e..9ea14061e 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -256,14 +256,13 @@ impl NpmPackageResolver { .iter() .collect::<HashSet<_>>() // prevent duplicates .iter() - .map(|p| format!("\"{}\"", p)) + .map(|p| format!("\"{p}\"")) .collect::<Vec<_>>() .join(", "); return Err(custom_error( "NoNpm", format!( - "Following npm specifiers were requested: {}; but --no-npm is specified.", - fmt_reqs + "Following npm specifiers were requested: {fmt_reqs}; but --no-npm is specified." ), )); } diff --git a/cli/npm/semver/mod.rs b/cli/npm/semver/mod.rs index a87585809..b532835e6 100644 --- a/cli/npm/semver/mod.rs +++ b/cli/npm/semver/mod.rs @@ -53,7 +53,7 @@ impl fmt::Display for NpmVersion { if i > 0 { write!(f, ".")?; } - write!(f, "{}", part)?; + write!(f, "{part}")?; } } if !self.build.is_empty() { @@ -62,7 +62,7 @@ impl fmt::Display for NpmVersion { if i > 0 { write!(f, ".")?; } - write!(f, "{}", part)?; + write!(f, "{part}")?; } } Ok(()) @@ -143,7 +143,7 @@ impl NpmVersion { pub fn parse(text: &str) -> Result<Self, AnyError> { let text = text.trim(); with_failure_handling(parse_npm_version)(text) - .with_context(|| format!("Invalid npm version '{}'.", text)) + .with_context(|| format!("Invalid npm version '{text}'.")) } } @@ -218,7 +218,7 @@ impl NpmVersionReq { pub fn parse(text: &str) -> Result<Self, AnyError> { let text = text.trim(); with_failure_handling(parse_npm_version_req)(text) - .with_context(|| format!("Invalid npm version requirement '{}'.", text)) + .with_context(|| format!("Invalid npm version requirement '{text}'.")) } } @@ -523,7 +523,7 @@ fn nr(input: &str) -> ParseResult<u64> { Err(err) => { return ParseError::fail( input, - format!("Error parsing '{}' to u64.\n\n{:#}", result, err), + format!("Error parsing '{result}' to u64.\n\n{err:#}"), ) } }; @@ -984,9 +984,7 @@ mod tests { let version = NpmVersion::parse(version_text).unwrap(); assert!( req.matches(&version), - "Checking {} satisfies {}", - req_text, - version_text + "Checking {req_text} satisfies {version_text}" ); } } @@ -1083,9 +1081,7 @@ mod tests { let version = NpmVersion::parse(version_text).unwrap(); assert!( !req.matches(&version), - "Checking {} not satisfies {}", - req_text, - version_text + "Checking {req_text} not satisfies {version_text}" ); } } diff --git a/cli/npm/semver/specifier.rs b/cli/npm/semver/specifier.rs index 3fdeab16d..b12a5c308 100644 --- a/cli/npm/semver/specifier.rs +++ b/cli/npm/semver/specifier.rs @@ -33,7 +33,7 @@ impl std::fmt::Display for SpecifierVersionReq { impl SpecifierVersionReq { pub fn parse(text: &str) -> Result<Self, AnyError> { with_failure_handling(parse_npm_specifier)(text).with_context(|| { - format!("Invalid npm specifier version requirement '{}'.", text) + format!("Invalid npm specifier version requirement '{text}'.") }) } @@ -143,7 +143,7 @@ fn nr(input: &str) -> ParseResult<u64> { Err(err) => { return ParseError::fail( input, - format!("Error parsing '{}' to u64.\n\n{:#}", result, err), + format!("Error parsing '{result}' to u64.\n\n{err:#}"), ) } }; diff --git a/cli/npm/tarball.rs b/cli/npm/tarball.rs index 7fce69cda..758ac3ded 100644 --- a/cli/npm/tarball.rs +++ b/cli/npm/tarball.rs @@ -154,12 +154,12 @@ mod test { verify_tarball_integrity(package, &Vec::new(), "sha512-test") .unwrap_err() .to_string(), - format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {}", actual_checksum), + format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {actual_checksum}"), ); assert!(verify_tarball_integrity( package, &Vec::new(), - &format!("sha512-{}", actual_checksum) + &format!("sha512-{actual_checksum}") ) .is_ok()); } diff --git a/cli/proc_state.rs b/cli/proc_state.rs index eca4579c8..2612af75b 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -559,7 +559,7 @@ impl ProcState { permissions, )) .with_context(|| { - format!("Could not resolve '{}' from '{}'.", specifier, referrer) + format!("Could not resolve '{specifier}' from '{referrer}'.") }); } @@ -581,7 +581,7 @@ impl ProcState { { return Err(custom_error( "NotSupported", - format!("importing npm specifiers in remote modules requires the --unstable flag (referrer: {})", found_referrer), + format!("importing npm specifiers in remote modules requires the --unstable flag (referrer: {found_referrer})"), )); } @@ -592,7 +592,7 @@ impl ProcState { &self.npm_resolver, permissions, )) - .with_context(|| format!("Could not resolve '{}'.", reference)); + .with_context(|| format!("Could not resolve '{reference}'.")); } else { return Ok(specifier.clone()); } @@ -639,7 +639,7 @@ impl ProcState { &self.npm_resolver, permissions, )) - .with_context(|| format!("Could not resolve '{}'.", reference)); + .with_context(|| format!("Could not resolve '{reference}'.")); } } } diff --git a/cli/standalone.rs b/cli/standalone.rs index 063ad7d33..e36584d72 100644 --- a/cli/standalone.rs +++ b/cli/standalone.rs @@ -320,9 +320,7 @@ fn get_error_class_name(e: &AnyError) -> &'static str { panic!( "Error '{}' contains boxed error of unsupported type:{}", e, - e.chain() - .map(|e| format!("\n {:?}", e)) - .collect::<String>() + e.chain().map(|e| format!("\n {e:?}")).collect::<String>() ); }) } diff --git a/cli/tests/integration/coverage_tests.rs b/cli/tests/integration/coverage_tests.rs index 5f82971c6..87ed655b8 100644 --- a/cli/tests/integration/coverage_tests.rs +++ b/cli/tests/integration/coverage_tests.rs @@ -97,7 +97,7 @@ fn run_coverage_text(test_name: &str, extension: &str) { .arg("--quiet") .arg("--unstable") .arg(format!("--coverage={}", tempdir.to_str().unwrap())) - .arg(format!("coverage/{}_test.{}", test_name, extension)) + .arg(format!("coverage/{test_name}_test.{extension}")) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::inherit()) .status() @@ -123,13 +123,13 @@ fn run_coverage_text(test_name: &str, extension: &str) { .to_string(); let expected = fs::read_to_string( - util::testdata_path().join(format!("coverage/{}_expected.out", test_name)), + util::testdata_path().join(format!("coverage/{test_name}_expected.out")), ) .unwrap(); if !util::wildcard_match(&expected, &actual) { - println!("OUTPUT\n{}\nOUTPUT", actual); - println!("EXPECTED\n{}\nEXPECTED", expected); + println!("OUTPUT\n{actual}\nOUTPUT"); + println!("EXPECTED\n{expected}\nEXPECTED"); panic!("pattern match failed"); } @@ -152,13 +152,13 @@ fn run_coverage_text(test_name: &str, extension: &str) { .to_string(); let expected = fs::read_to_string( - util::testdata_path().join(format!("coverage/{}_expected.lcov", test_name)), + util::testdata_path().join(format!("coverage/{test_name}_expected.lcov")), ) .unwrap(); if !util::wildcard_match(&expected, &actual) { - println!("OUTPUT\n{}\nOUTPUT", actual); - println!("EXPECTED\n{}\nEXPECTED", expected); + println!("OUTPUT\n{actual}\nOUTPUT"); + println!("EXPECTED\n{expected}\nEXPECTED"); panic!("pattern match failed"); } @@ -208,8 +208,8 @@ fn multifile_coverage() { .unwrap(); if !util::wildcard_match(&expected, &actual) { - println!("OUTPUT\n{}\nOUTPUT", actual); - println!("EXPECTED\n{}\nEXPECTED", expected); + println!("OUTPUT\n{actual}\nOUTPUT"); + println!("EXPECTED\n{expected}\nEXPECTED"); panic!("pattern match failed"); } @@ -237,8 +237,8 @@ fn multifile_coverage() { .unwrap(); if !util::wildcard_match(&expected, &actual) { - println!("OUTPUT\n{}\nOUTPUT", actual); - println!("EXPECTED\n{}\nEXPECTED", expected); + println!("OUTPUT\n{actual}\nOUTPUT"); + println!("EXPECTED\n{expected}\nEXPECTED"); panic!("pattern match failed"); } @@ -258,8 +258,7 @@ fn no_snaps_included(test_name: &str, extension: &str) { .arg("--allow-read") .arg(format!("--coverage={}", tempdir.to_str().unwrap())) .arg(format!( - "coverage/no_snaps_included/{}_test.{}", - test_name, extension + "coverage/no_snaps_included/{test_name}_test.{extension}" )) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped()) @@ -292,8 +291,8 @@ fn no_snaps_included(test_name: &str, extension: &str) { .unwrap(); if !util::wildcard_match(&expected, &actual) { - println!("OUTPUT\n{}\nOUTPUT", actual); - println!("EXPECTED\n{}\nEXPECTED", expected); + println!("OUTPUT\n{actual}\nOUTPUT"); + println!("EXPECTED\n{expected}\nEXPECTED"); panic!("pattern match failed"); } @@ -339,8 +338,8 @@ fn no_transpiled_lines() { .unwrap(); if !util::wildcard_match(&expected, &actual) { - println!("OUTPUT\n{}\nOUTPUT", actual); - println!("EXPECTED\n{}\nEXPECTED", expected); + println!("OUTPUT\n{actual}\nOUTPUT"); + println!("EXPECTED\n{expected}\nEXPECTED"); panic!("pattern match failed"); } @@ -367,8 +366,8 @@ fn no_transpiled_lines() { .unwrap(); if !util::wildcard_match(&expected, &actual) { - println!("OUTPUT\n{}\nOUTPUT", actual); - println!("EXPECTED\n{}\nEXPECTED", expected); + println!("OUTPUT\n{actual}\nOUTPUT"); + println!("EXPECTED\n{expected}\nEXPECTED"); panic!("pattern match failed"); } diff --git a/cli/tests/integration/fmt_tests.rs b/cli/tests/integration/fmt_tests.rs index d230f96c0..52aae2bd3 100644 --- a/cli/tests/integration/fmt_tests.rs +++ b/cli/tests/integration/fmt_tests.rs @@ -31,8 +31,7 @@ fn fmt_test() { .current_dir(&testdata_fmt_dir) .arg("fmt") .arg(format!( - "--ignore={},{},{}", - badly_formatted_js_str, badly_formatted_md_str, badly_formatted_json_str + "--ignore={badly_formatted_js_str},{badly_formatted_md_str},{badly_formatted_json_str}" )) .arg("--check") .arg(badly_formatted_js_str) diff --git a/cli/tests/integration/inspector_tests.rs b/cli/tests/integration/inspector_tests.rs index 5b01522d5..bfc3a63e0 100644 --- a/cli/tests/integration/inspector_tests.rs +++ b/cli/tests/integration/inspector_tests.rs @@ -103,8 +103,7 @@ impl InspectorTester { self.child.kill().unwrap(); panic!( - "Inspector test failed with error: {:?}.\nstdout:\n{}\nstderr:\n{}", - err, stdout, stderr + "Inspector test failed with error: {err:?}.\nstdout:\n{stdout}\nstderr:\n{stderr}" ); } } @@ -215,7 +214,7 @@ fn inspect_flag_with_unique_port(flag_prefix: &str) -> String { use std::sync::atomic::Ordering; static PORT: AtomicU16 = AtomicU16::new(9229); let port = PORT.fetch_add(1, Ordering::Relaxed); - format!("{}=127.0.0.1:{}", flag_prefix, port) + format!("{flag_prefix}=127.0.0.1:{port}") } fn extract_ws_url_from_stderr( @@ -508,7 +507,7 @@ async fn inspector_does_not_hang() { .await; tester .assert_received_messages( - &[&format!(r#"{{"id":{},"result":{{}}}}"#, request_id)], + &[&format!(r#"{{"id":{request_id},"result":{{}}}}"#)], &[r#"{"method":"Debugger.resumed","params":{}}"#], ) .await; diff --git a/cli/tests/integration/lsp_tests.rs b/cli/tests/integration/lsp_tests.rs index be280bfa7..1fd619a40 100644 --- a/cli/tests/integration/lsp_tests.rs +++ b/cli/tests/integration/lsp_tests.rs @@ -103,7 +103,7 @@ pub fn ensure_directory_specifier( ) -> ModuleSpecifier { let path = specifier.path(); if !path.ends_with('/') { - let new_path = format!("{}/", path); + let new_path = format!("{path}/"); specifier.set_path(&new_path); } specifier diff --git a/cli/tests/integration/repl_tests.rs b/cli/tests/integration/repl_tests.rs index af9fce187..30d91bc1c 100644 --- a/cli/tests/integration/repl_tests.rs +++ b/cli/tests/integration/repl_tests.rs @@ -673,7 +673,7 @@ fn assign_underscore_error() { Some(vec![("NO_COLOR".to_owned(), "1".to_owned())]), false, ); - println!("{}", out); + println!("{out}"); assert_ends_with!( out, "Last thrown error is no longer saved to _error.\n1\nUncaught 2\n1\n" diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index fd6644326..923232f9d 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -1942,9 +1942,9 @@ mod permissions { .current_dir(&util::testdata_path()) .arg("run") .arg("--unstable") - .arg(format!("--allow-{0}", permission)) + .arg(format!("--allow-{permission}")) .arg("run/permission_test.ts") - .arg(format!("{0}Required", permission)) + .arg(format!("{permission}Required")) .spawn() .unwrap() .wait() @@ -1959,10 +1959,7 @@ mod permissions { for permission in &util::PERMISSION_VARIANTS { let (_, err) = util::run_and_collect_output( false, - &format!( - "run --unstable run/permission_test.ts {0}Required", - permission - ), + &format!("run --unstable run/permission_test.ts {permission}Required"), None, None, false, @@ -2100,7 +2097,7 @@ mod permissions { let status = util::deno_cmd() .current_dir(&util::testdata_path()) .arg("run") - .arg(format!("--allow-{0}={1},{2}", permission, test_dir, js_dir)) + .arg(format!("--allow-{permission}={test_dir},{js_dir}")) .arg("run/complex_permissions_test.ts") .arg(permission) .arg("run/complex_permissions_test.ts") @@ -2119,7 +2116,7 @@ mod permissions { let status = util::deno_cmd() .current_dir(&util::testdata_path()) .arg("run") - .arg(format!("--allow-{0}=.", permission)) + .arg(format!("--allow-{permission}=.")) .arg("run/complex_permissions_test.ts") .arg(permission) .arg("run/complex_permissions_test.ts") @@ -2138,7 +2135,7 @@ mod permissions { let status = util::deno_cmd() .current_dir(&util::testdata_path()) .arg("run") - .arg(format!("--allow-{0}=tls/../", permission)) + .arg(format!("--allow-{permission}=tls/../")) .arg("run/complex_permissions_test.ts") .arg(permission) .arg("run/complex_permissions_test.ts") @@ -3251,7 +3248,7 @@ fn basic_auth_tokens() { assert!(stdout_str.is_empty()); let stderr_str = std::str::from_utf8(&output.stderr).unwrap().trim(); - eprintln!("{}", stderr_str); + eprintln!("{stderr_str}"); assert!(stderr_str .contains("Module not found \"http://127.0.0.1:4554/run/001_hello.js\".")); @@ -3269,7 +3266,7 @@ fn basic_auth_tokens() { .unwrap(); let stderr_str = std::str::from_utf8(&output.stderr).unwrap().trim(); - eprintln!("{}", stderr_str); + eprintln!("{stderr_str}"); assert!(output.status.success()); @@ -3354,7 +3351,7 @@ async fn test_resolve_dns() { .unwrap(); let err = String::from_utf8_lossy(&output.stderr); let out = String::from_utf8_lossy(&output.stdout); - println!("{}", err); + println!("{err}"); assert!(output.status.success()); assert!(err.starts_with("Check file")); diff --git a/cli/tests/integration/vendor_tests.rs b/cli/tests/integration/vendor_tests.rs index 11fee5686..cd2f7f12e 100644 --- a/cli/tests/integration/vendor_tests.rs +++ b/cli/tests/integration/vendor_tests.rs @@ -530,7 +530,7 @@ fn update_existing_config_test() { } fn success_text(module_count: &str, dir: &str, has_import_map: bool) -> String { - let mut text = format!("Vendored {} into {} directory.", module_count, dir); + let mut text = format!("Vendored {module_count} into {dir} directory."); if has_import_map { let f = format!( concat!( @@ -544,7 +544,7 @@ fn success_text(module_count: &str, dir: &str, has_import_map: bool) -> String { dir.to_string() } ); - write!(text, "{}", f).unwrap(); + write!(text, "{f}").unwrap(); } text } diff --git a/cli/tests/integration/watcher_tests.rs b/cli/tests/integration/watcher_tests.rs index 375bf6804..3f586d812 100644 --- a/cli/tests/integration/watcher_tests.rs +++ b/cli/tests/integration/watcher_tests.rs @@ -74,14 +74,14 @@ fn child_lines( .lines() .map(|r| { let line = r.unwrap(); - eprintln!("STDOUT: {}", line); + eprintln!("STDOUT: {line}"); line }); let stderr_lines = std::io::BufReader::new(child.stderr.take().unwrap()) .lines() .map(|r| { let line = r.unwrap(); - eprintln!("STDERR: {}", line); + eprintln!("STDERR: {line}"); line }); (stdout_lines, stderr_lines) diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 056ed4ab4..4b2bcd49d 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -413,7 +413,7 @@ impl CoverageReporter for LcovCoverageReporter { .ok() .and_then(|p| p.to_str().map(|p| p.to_string())) .unwrap_or_else(|| coverage_report.url.to_string()); - writeln!(out_writer, "SF:{}", file_path)?; + writeln!(out_writer, "SF:{file_path}")?; for function in &coverage_report.named_functions { writeln!( @@ -433,13 +433,13 @@ impl CoverageReporter for LcovCoverageReporter { } let functions_found = coverage_report.named_functions.len(); - writeln!(out_writer, "FNF:{}", functions_found)?; + writeln!(out_writer, "FNF:{functions_found}")?; let functions_hit = coverage_report .named_functions .iter() .filter(|f| f.execution_count > 0) .count(); - writeln!(out_writer, "FNH:{}", functions_hit)?; + writeln!(out_writer, "FNH:{functions_hit}")?; for branch in &coverage_report.branches { let taken = if let Some(taken) = &branch.taken { @@ -459,10 +459,10 @@ impl CoverageReporter for LcovCoverageReporter { } let branches_found = coverage_report.branches.len(); - writeln!(out_writer, "BRF:{}", branches_found)?; + writeln!(out_writer, "BRF:{branches_found}")?; let branches_hit = coverage_report.branches.iter().filter(|b| b.is_hit).count(); - writeln!(out_writer, "BRH:{}", branches_hit)?; + writeln!(out_writer, "BRH:{branches_hit}")?; for (index, count) in &coverage_report.found_lines { writeln!(out_writer, "DA:{},{}", index + 1, count)?; } @@ -472,10 +472,10 @@ impl CoverageReporter for LcovCoverageReporter { .iter() .filter(|(_, count)| *count != 0) .count(); - writeln!(out_writer, "LH:{}", lines_hit)?; + writeln!(out_writer, "LH:{lines_hit}")?; let lines_found = coverage_report.found_lines.len(); - writeln!(out_writer, "LF:{}", lines_found)?; + writeln!(out_writer, "LF:{lines_found}")?; writeln!(out_writer, "end_of_record")?; Ok(()) @@ -664,7 +664,7 @@ pub async fn cover_files( ps.file_fetcher .fetch_cached(&module_specifier, 10) .with_context(|| { - format!("Failed to fetch \"{}\" from cache.", module_specifier) + format!("Failed to fetch \"{module_specifier}\" from cache.") })? }; let file = maybe_file.ok_or_else(|| { diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 163d3ffcd..e27e49570 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -69,7 +69,7 @@ pub async fn print_docs( local: PathBuf::from("./$deno$doc.ts"), maybe_types: None, media_type: MediaType::TypeScript, - source: format!("export * from \"{}\";", module_specifier).into(), + source: format!("export * from \"{module_specifier}\";").into(), specifier: root_specifier.clone(), maybe_headers: None, }; diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 441d4fe08..a2d9a3027 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -183,7 +183,7 @@ fn format_markdown( dprint_plugin_json::format_text(text, &json_config) } else { let fake_filename = - PathBuf::from(format!("deno_fmt_stdin.{}", extension)); + PathBuf::from(format!("deno_fmt_stdin.{extension}")); let mut codeblock_config = get_resolved_typescript_config(fmt_options); codeblock_config.line_width = line_width; @@ -287,13 +287,13 @@ async fn check_source_files( warn!("Error checking: {}", file_path.to_string_lossy()); warn!( "{}", - format!("{}", e) + format!("{e}") .split('\n') .map(|l| { if l.trim().is_empty() { String::new() } else { - format!(" {}", l) + format!(" {l}") } }) .collect::<Vec<_>>() @@ -317,8 +317,7 @@ async fn check_source_files( } else { let not_formatted_files_str = files_str(not_formatted_files_count); Err(generic_error(format!( - "Found {} not formatted {} in {}", - not_formatted_files_count, not_formatted_files_str, checked_files_str, + "Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}", ))) } } @@ -369,7 +368,7 @@ async fn format_source_files( Err(e) => { let _g = output_lock.lock(); eprintln!("Error formatting: {}", file_path.to_string_lossy()); - eprintln!(" {}", e); + eprintln!(" {e}"); } } Ok(()) @@ -719,7 +718,7 @@ mod test { &PathBuf::from("mod.ts"), "1", &Default::default(), - |_, file_text, _| Ok(Some(format!("1{}", file_text))), + |_, file_text, _| Ok(Some(format!("1{file_text}"))), ) .unwrap(); } diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 21b3d03f7..f3de922c6 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -266,10 +266,7 @@ fn print_tree_node<TWrite: Write>( writeln!( writer, "{} {}", - colors::gray(format!( - "{}{}─{}", - prefix, sibling_connector, child_connector - )), + colors::gray(format!("{prefix}{sibling_connector}─{child_connector}")), child.text )?; let child_prefix = format!( diff --git a/cli/tools/init/mod.rs b/cli/tools/init/mod.rs index 12d5872f1..c12b7a286 100644 --- a/cli/tools/init/mod.rs +++ b/cli/tools/init/mod.rs @@ -18,7 +18,7 @@ fn create_file( .write(true) .create_new(true) .open(dir.join(filename)) - .with_context(|| format!("Failed to create {} file", filename))?; + .with_context(|| format!("Failed to create {filename} file"))?; file.write_all(content.as_bytes())?; Ok(()) } diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index 2d2584f54..cc9b5c696 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -41,8 +41,7 @@ fn validate_name(exec_name: &str) -> Result<(), AnyError> { Ok(()) } else { Err(generic_error(format!( - "Invalid executable name: {}", - exec_name + "Invalid executable name: {exec_name}" ))) } } @@ -53,11 +52,8 @@ fn validate_name(exec_name: &str) -> Result<(), AnyError> { /// A second compatible with git bash / MINGW64 /// Generate batch script to satisfy that. fn generate_executable_file(shim_data: &ShimData) -> Result<(), AnyError> { - let args: Vec<String> = shim_data - .args - .iter() - .map(|c| format!("\"{}\"", c)) - .collect(); + let args: Vec<String> = + shim_data.args.iter().map(|c| format!("\"{c}\"")).collect(); let template = format!( "% generated by deno install %\n@deno {} %*\n", args @@ -122,7 +118,7 @@ fn get_installer_root() -> Result<PathBuf, io::Error> { .ok_or_else(|| { io::Error::new( io::ErrorKind::NotFound, - format!("${} is not defined", home_env_var), + format!("${home_env_var} is not defined"), ) })?; home_path.push(".deno"); @@ -201,7 +197,7 @@ pub fn uninstall(name: String, root: Option<PathBuf>) -> Result<(), AnyError> { } if !removed { - return Err(generic_error(format!("No installation found for {}", name))); + return Err(generic_error(format!("No installation found for {name}"))); } // There might be some extra files to delete @@ -339,7 +335,7 @@ fn resolve_shim_data( Level::Debug => "debug", Level::Info => "info", _ => { - return Err(generic_error(format!("invalid log level {}", log_level))) + return Err(generic_error(format!("invalid log level {log_level}"))) } }; executable_args.push(log_level.to_string()); @@ -388,11 +384,11 @@ fn resolve_shim_data( } if let Some(inspect) = flags.inspect { - executable_args.push(format!("--inspect={}", inspect)); + executable_args.push(format!("--inspect={inspect}")); } if let Some(inspect_brk) = flags.inspect_brk { - executable_args.push(format!("--inspect-brk={}", inspect_brk)); + executable_args.push(format!("--inspect-brk={inspect_brk}")); } if let Some(import_map_path) = &flags.import_map_path { @@ -408,7 +404,7 @@ fn resolve_shim_data( extra_files.push(( copy_path, fs::read_to_string(config_path) - .with_context(|| format!("error reading {}", config_path))?, + .with_context(|| format!("error reading {config_path}"))?, )); } else { executable_args.push("--no-config".to_string()); @@ -1082,13 +1078,11 @@ mod tests { assert!(file_path.exists()); let mut expected_string = format!( - "--import-map '{}' --no-config 'http://localhost:4545/cat.ts'", - import_map_url + "--import-map '{import_map_url}' --no-config 'http://localhost:4545/cat.ts'" ); if cfg!(windows) { expected_string = format!( - "\"--import-map\" \"{}\" \"--no-config\" \"http://localhost:4545/cat.ts\"", - import_map_url + "\"--import-map\" \"{import_map_url}\" \"--no-config\" \"http://localhost:4545/cat.ts\"" ); } diff --git a/cli/tools/lint.rs b/cli/tools/lint.rs index ba30c546a..0f81ec89d 100644 --- a/cli/tools/lint.rs +++ b/cli/tools/lint.rs @@ -219,7 +219,7 @@ pub fn print_rules_list(json: bool) { }) .collect(); let json_str = serde_json::to_string_pretty(&json_rules).unwrap(); - println!("{}", json_str); + println!("{json_str}"); } else { // The rules should still be printed even if `--quiet` option is enabled, // so use `println!` here instead of `info!`. @@ -345,12 +345,12 @@ impl LintReporter for PrettyLintReporter { )), ); - eprintln!("{}\n", message); + eprintln!("{message}\n"); } fn visit_error(&mut self, file_path: &str, err: &AnyError) { - eprintln!("Error linting: {}", file_path); - eprintln!(" {}", err); + eprintln!("Error linting: {file_path}"); + eprintln!(" {err}"); } fn close(&mut self, check_count: usize) { @@ -393,8 +393,8 @@ impl LintReporter for CompactLintReporter { } fn visit_error(&mut self, file_path: &str, err: &AnyError) { - eprintln!("Error linting: {}", file_path); - eprintln!(" {}", err); + eprintln!("Error linting: {file_path}"); + eprintln!(" {err}"); } fn close(&mut self, check_count: usize) { diff --git a/cli/tools/repl/editor.rs b/cli/tools/repl/editor.rs index f8302367a..c9f019305 100644 --- a/cli/tools/repl/editor.rs +++ b/cli/tools/repl/editor.rs @@ -277,8 +277,7 @@ fn validate(input: &str) -> ValidationResult { | (Some(Token::DollarLBrace), Token::RBrace) => {} (Some(left), _) => { return ValidationResult::Invalid(Some(format!( - "Mismatched pairs: {:?} is not properly closed", - left + "Mismatched pairs: {left:?} is not properly closed" ))) } (None, _) => { @@ -460,7 +459,7 @@ impl ReplEditor { } self.errored_on_history_save.store(true, Relaxed); - eprintln!("Unable to save history file: {}", e); + eprintln!("Unable to save history file: {e}"); } } diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index 780f16a6a..a9cb0132b 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -112,14 +112,11 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> { .await; // only output errors if let EvaluationOutput::Error(error_text) = output { - println!( - "Error in --eval-file file \"{}\": {}", - eval_file, error_text - ); + println!("Error in --eval-file file \"{eval_file}\": {error_text}"); } } Err(e) => { - println!("Error in --eval-file file \"{}\": {}", eval_file, e); + println!("Error in --eval-file file \"{eval_file}\": {e}"); } } } @@ -129,7 +126,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> { let output = repl_session.evaluate_line_and_get_output(&eval).await; // only output errors if let EvaluationOutput::Error(error_text) = output { - println!("Error in --eval flag: {}", error_text); + println!("Error in --eval flag: {error_text}"); } } @@ -166,7 +163,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> { break; } - println!("{}", output); + println!("{output}"); } Err(ReadlineError::Interrupted) => { if editor.should_exit_on_interrupt() { @@ -180,7 +177,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> { break; } Err(err) => { - println!("Error: {:?}", err); + println!("Error: {err:?}"); break; } } diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index cb7d18c46..e288ab0e6 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -419,10 +419,7 @@ impl ReplSession { .text; let value = self - .evaluate_expression(&format!( - "'use strict'; void 0;\n{}", - transpiled_src - )) + .evaluate_expression(&format!("'use strict'; void 0;\n{transpiled_src}")) .await?; Ok(TsEvaluateResponse { diff --git a/cli/tools/standalone.rs b/cli/tools/standalone.rs index a0fa29e81..72f30aae9 100644 --- a/cli/tools/standalone.rs +++ b/cli/tools/standalone.rs @@ -94,7 +94,7 @@ async fn get_base_binary( } let target = target.unwrap_or_else(|| env!("TARGET").to_string()); - let binary_name = format!("deno-{}.zip", target); + let binary_name = format!("deno-{target}.zip"); let binary_path_suffix = if crate::version::is_canary() { format!("canary/{}/{}", crate::version::GIT_COMMIT_HASH, binary_name) @@ -127,7 +127,7 @@ async fn download_base_binary( output_directory: &Path, binary_path_suffix: &str, ) -> Result<(), AnyError> { - let download_url = format!("https://dl.deno.land/{}", binary_path_suffix); + let download_url = format!("https://dl.deno.land/{binary_path_suffix}"); let maybe_bytes = { let progress_bars = ProgressBar::new(ProgressBarStyle::DownloadBars); let progress = progress_bars.update(&download_url); @@ -164,7 +164,7 @@ async fn create_standalone_binary( let ca_data = match ps.options.ca_data() { Some(CaData::File(ca_file)) => { - Some(fs::read(ca_file).with_context(|| format!("Reading: {}", ca_file))?) + Some(fs::read(ca_file).with_context(|| format!("Reading: {ca_file}"))?) } Some(CaData::Bytes(bytes)) => Some(bytes.clone()), None => None, diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 51fd377e4..0b611b9d3 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -56,7 +56,7 @@ pub async fn execute_script( .map(|a| format!("\"{}\"", a.replace('"', "\\\"").replace('$', "\\$"))) .collect::<Vec<_>>() .join(" "); - let script = format!("{} {}", script, additional_args); + let script = format!("{script} {additional_args}"); let script = script.trim(); log::info!( "{} {} {}", @@ -65,7 +65,7 @@ pub async fn execute_script( script, ); let seq_list = deno_task_shell::parser::parse(script) - .with_context(|| format!("Error parsing script '{}'.", task_name))?; + .with_context(|| format!("Error parsing script '{task_name}'."))?; // get the starting env vars (the PWD env var will be set by deno_task_shell) let mut env_vars = std::env::vars().collect::<HashMap<String, String>>(); @@ -81,7 +81,7 @@ pub async fn execute_script( let exit_code = deno_task_shell::execute(seq_list, env_vars, &cwd).await; Ok(exit_code) } else { - eprintln!("Task not found: {}", task_name); + eprintln!("Task not found: {task_name}"); print_available_tasks(tasks_config); Ok(1) } diff --git a/cli/tools/test.rs b/cli/tools/test.rs index c4c39ede3..e680d5718 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -323,7 +323,7 @@ impl PrettyTestReporter { if url.scheme() == "file" { if let Some(mut r) = self.cwd.make_relative(&url) { if !r.starts_with("../") { - r = format!("./{}", r); + r = format!("./{r}"); } return r; } @@ -513,7 +513,7 @@ impl PrettyTestReporter { ); print!(" {} ...", root.name); for name in ancestor_names { - print!(" {} ...", name); + print!(" {name} ..."); } print!(" {} ...", description.name); self.in_new_line = false; @@ -584,7 +584,7 @@ impl PrettyTestReporter { } println!("{}\n", colors::white_bold_on_red(" FAILURES ")); for failure_title in failure_titles { - println!("{}", failure_title); + println!("{failure_title}"); } } @@ -600,7 +600,7 @@ impl PrettyTestReporter { } else if count == 1 { " (1 step)".to_string() } else { - format!(" ({} steps)", count) + format!(" ({count} steps)") } }; diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index f34a30744..2caaa0e02 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -373,7 +373,7 @@ pub async fn upgrade( let archive_data = download_package(client, &download_url) .await - .with_context(|| format!("Failed downloading {}", download_url))?; + .with_context(|| format!("Failed downloading {download_url}"))?; log::info!("Deno is upgrading to version {}", &install_version); @@ -531,7 +531,7 @@ pub fn unpack_into_dir( })? .wait()? } - ext => panic!("Unsupported archive type: '{}'", ext), + ext => panic!("Unsupported archive type: '{ext}'"), }; assert!(unpack_status.success()); assert!(exe_path.exists()); diff --git a/cli/tools/vendor/build.rs b/cli/tools/vendor/build.rs index 830cb39f7..f418670b3 100644 --- a/cli/tools/vendor/build.rs +++ b/cli/tools/vendor/build.rs @@ -204,19 +204,15 @@ fn build_proxy_module_source( // for simplicity, always include the `export *` statement as it won't error // even when the module does not contain a named export - writeln!(text, "export * from \"{}\";", relative_specifier).unwrap(); + writeln!(text, "export * from \"{relative_specifier}\";").unwrap(); // add a default export if one exists in the module if let Some(parsed_source) = parsed_source_cache.get_parsed_source_from_module(module)? { if has_default_export(&parsed_source) { - writeln!( - text, - "export {{ default }} from \"{}\";", - relative_specifier - ) - .unwrap(); + writeln!(text, "export {{ default }} from \"{relative_specifier}\";") + .unwrap(); } } diff --git a/cli/tools/vendor/import_map.rs b/cli/tools/vendor/import_map.rs index 411a2e059..0897cbcf6 100644 --- a/cli/tools/vendor/import_map.rs +++ b/cli/tools/vendor/import_map.rs @@ -322,7 +322,7 @@ fn handle_remote_dep_specifier( if is_remote_specifier_text(text) { let base_specifier = mappings.base_specifier(specifier); if !text.starts_with(base_specifier.as_str()) { - panic!("Expected {} to start with {}", text, base_specifier); + panic!("Expected {text} to start with {base_specifier}"); } let sub_path = &text[base_specifier.as_str().len()..]; diff --git a/cli/tools/vendor/mappings.rs b/cli/tools/vendor/mappings.rs index 14705e51e..8cf6388d2 100644 --- a/cli/tools/vendor/mappings.rs +++ b/cli/tools/vendor/mappings.rs @@ -133,9 +133,7 @@ impl Mappings { self .mappings .get(specifier) - .unwrap_or_else(|| { - panic!("Could not find local path for {}", specifier) - }) + .unwrap_or_else(|| panic!("Could not find local path for {specifier}")) .to_path_buf() } } @@ -163,7 +161,7 @@ impl Mappings { .iter() .find(|s| child_specifier.as_str().starts_with(s.as_str())) .unwrap_or_else(|| { - panic!("Could not find base specifier for {}", child_specifier) + panic!("Could not find base specifier for {child_specifier}") }) } diff --git a/cli/tools/vendor/specifiers.rs b/cli/tools/vendor/specifiers.rs index d4f413c31..7418bcb8b 100644 --- a/cli/tools/vendor/specifiers.rs +++ b/cli/tools/vendor/specifiers.rs @@ -45,7 +45,7 @@ pub fn get_unique_path( let mut count = 2; // case insensitive comparison so the output works on case insensitive file systems while !unique_set.insert(path.to_string_lossy().to_lowercase()) { - path = path_with_stem_suffix(&original_path, &format!("_{}", count)); + path = path_with_stem_suffix(&original_path, &format!("_{count}")); count += 1; } path diff --git a/cli/tsc/diagnostics.rs b/cli/tsc/diagnostics.rs index ffb4a946d..461cda775 100644 --- a/cli/tsc/diagnostics.rs +++ b/cli/tsc/diagnostics.rs @@ -143,7 +143,7 @@ impl From<i64> for DiagnosticCategory { 1 => DiagnosticCategory::Error, 2 => DiagnosticCategory::Suggestion, 3 => DiagnosticCategory::Message, - _ => panic!("Unknown value: {}", value), + _ => panic!("Unknown value: {value}"), } } } @@ -212,7 +212,7 @@ impl Diagnostic { }; if !category.is_empty() { - write!(f, "{}[{}]: ", code, category) + write!(f, "{code}[{category}]: ") } else { Ok(()) } @@ -375,12 +375,12 @@ impl fmt::Display for Diagnostics { if i > 0 { write!(f, "\n\n")?; } - write!(f, "{}", item)?; + write!(f, "{item}")?; i += 1; } if i > 1 { - write!(f, "\n\nFound {} errors.", i)?; + write!(f, "\n\nFound {i} errors.")?; } Ok(()) diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 439a0de20..3e7fbfb15 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -98,7 +98,7 @@ pub fn get_types_declaration_file_text(unstable: bool) -> String { lib_names .into_iter() .map(|name| { - let asset_url = format!("asset:///lib.{}.d.ts", name); + let asset_url = format!("asset:///lib.{name}.d.ts"); assets.remove(&asset_url).unwrap() }) .collect::<Vec<_>>() @@ -204,7 +204,7 @@ impl fmt::Display for Stats { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Compilation statistics:")?; for (key, value) in self.0.clone() { - writeln!(f, " {}: {}", key, value)?; + writeln!(f, " {key}: {value}")?; } Ok(()) @@ -838,7 +838,7 @@ pub fn exec(request: Request) -> Result<Response, AnyError> { "rootNames": root_names, }); let request_str = request_value.to_string(); - let exec_source = format!("globalThis.exec({})", request_str); + let exec_source = format!("globalThis.exec({request_str})"); runtime .execute_script(&located_script_name!(), startup_source) diff --git a/cli/util/checksum.rs b/cli/util/checksum.rs index 73eec19d2..38a372590 100644 --- a/cli/util/checksum.rs +++ b/cli/util/checksum.rs @@ -12,7 +12,7 @@ pub fn gen(v: &[impl AsRef<[u8]>]) -> String { let out: Vec<String> = digest .as_ref() .iter() - .map(|byte| format!("{:02x}", byte)) + .map(|byte| format!("{byte:02x}")) .collect(); out.join("") } diff --git a/cli/util/display.rs b/cli/util/display.rs index ee3d2f2d6..96b6cf84e 100644 --- a/cli/util/display.rs +++ b/cli/util/display.rs @@ -23,7 +23,7 @@ pub fn human_size(size: f64) -> String { .unwrap() * 1_f64; let unit = units[exponent as usize]; - format!("{}{}{}", negative, pretty_bytes, unit) + format!("{negative}{pretty_bytes}{unit}") } const BYTES_TO_KIB: u64 = 2u64.pow(10); @@ -41,7 +41,7 @@ pub fn human_download_size(byte_count: u64, total_bytes: u64) -> String { fn get_in_format(byte_count: u64, conversion: u64, suffix: &str) -> String { let converted_value = byte_count / conversion; let decimal = (byte_count % conversion) * 100 / conversion; - format!("{}.{:0>2}{}", converted_value, decimal, suffix) + format!("{converted_value}.{decimal:0>2}{suffix}") } } @@ -49,7 +49,7 @@ pub fn human_download_size(byte_count: u64, total_bytes: u64) -> String { /// represents a human readable version of that time. pub fn human_elapsed(elapsed: u128) -> String { if elapsed < 1_000 { - return format!("{}ms", elapsed); + return format!("{elapsed}ms"); } if elapsed < 1_000 * 60 { return format!("{}s", elapsed / 1000); @@ -58,7 +58,7 @@ pub fn human_elapsed(elapsed: u128) -> String { let seconds = elapsed / 1_000; let minutes = seconds / 60; let seconds_remainder = seconds % 60; - format!("{}m{}s", minutes, seconds_remainder) + format!("{minutes}m{seconds_remainder}s") } pub fn write_to_stdout_ignore_sigpipe( diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs index 29213f0c9..05415f2a6 100644 --- a/cli/util/file_watcher.rs +++ b/cli/util/file_watcher.rs @@ -74,7 +74,7 @@ where if let Err(err) = result { let error_string = match err.downcast_ref::<JsError>() { Some(e) => format_js_error(e), - None => format!("{:?}", err), + None => format!("{err:?}"), }; eprintln!( "{}: {}", @@ -130,7 +130,7 @@ pub struct PrintConfig { fn create_print_after_restart_fn(clear_screen: bool) -> impl Fn() { move || { if clear_screen && atty::is(atty::Stream::Stderr) { - eprint!("{}", CLEAR_SCREEN); + eprint!("{CLEAR_SCREEN}"); } info!( "{} File change detected! Restarting!", diff --git a/cli/util/fs.rs b/cli/util/fs.rs index cb9232f39..777b22c5f 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -29,7 +29,7 @@ pub fn atomic_write_file<T: AsRef<[u8]>>( let rand: String = (0..4) .map(|_| format!("{:02x}", rand::random::<u8>())) .collect(); - let extension = format!("{}.tmp", rand); + let extension = format!("{rand}.tmp"); let tmp_file = filename.with_extension(extension); write_file(&tmp_file, data, mode)?; std::fs::rename(tmp_file, filename)?; @@ -710,13 +710,13 @@ mod tests { .to_string(); let expected: Vec<ModuleSpecifier> = [ "http://localhost:8080", - &format!("{}/a.ts", root_dir_url), - &format!("{}/b.js", root_dir_url), - &format!("{}/c.tsx", root_dir_url), - &format!("{}/child/README.md", root_dir_url), - &format!("{}/child/e.mjs", root_dir_url), - &format!("{}/child/f.mjsx", root_dir_url), - &format!("{}/d.jsx", root_dir_url), + &format!("{root_dir_url}/a.ts"), + &format!("{root_dir_url}/b.js"), + &format!("{root_dir_url}/c.tsx"), + &format!("{root_dir_url}/child/README.md"), + &format!("{root_dir_url}/child/e.mjs"), + &format!("{root_dir_url}/child/f.mjsx"), + &format!("{root_dir_url}/d.jsx"), "https://localhost:8080", ] .iter() @@ -748,9 +748,9 @@ mod tests { .unwrap(); let expected: Vec<ModuleSpecifier> = [ - &format!("{}/child/README.md", root_dir_url), - &format!("{}/child/e.mjs", root_dir_url), - &format!("{}/child/f.mjsx", root_dir_url), + &format!("{root_dir_url}/child/README.md"), + &format!("{root_dir_url}/child/e.mjs"), + &format!("{root_dir_url}/child/f.mjsx"), ] .iter() .map(|f| ModuleSpecifier::parse(f).unwrap()) diff --git a/cli/util/path.rs b/cli/util/path.rs index 76e2a1b6f..d073d80bd 100644 --- a/cli/util/path.rs +++ b/cli/util/path.rs @@ -64,8 +64,7 @@ pub fn specifier_to_file_path( match result { Ok(path) => Ok(path), Err(()) => Err(uri_error(format!( - "Invalid file path.\n Specifier: {}", - specifier + "Invalid file path.\n Specifier: {specifier}" ))), } } @@ -76,7 +75,7 @@ pub fn ensure_directory_specifier( ) -> ModuleSpecifier { let path = specifier.path(); if !path.ends_with('/') { - let new_path = format!("{}/", path); + let new_path = format!("{path}/"); specifier.set_path(&new_path); } specifier @@ -135,7 +134,7 @@ pub fn relative_specifier( Some(if text.starts_with("../") || text.starts_with("./") { text } else { - format!("./{}", text) + format!("./{text}") }) } @@ -170,12 +169,12 @@ pub fn path_with_stem_suffix(path: &Path, suffix: &str) -> PathBuf { ext )) } else { - path.with_file_name(format!("{}{}.{}", file_stem, suffix, ext)) + path.with_file_name(format!("{file_stem}{suffix}.{ext}")) }; } } - path.with_file_name(format!("{}{}", file_name, suffix)) + path.with_file_name(format!("{file_name}{suffix}")) } else { path.with_file_name(suffix) } @@ -380,9 +379,7 @@ mod test { assert_eq!( actual.as_deref(), expected, - "from: \"{}\" to: \"{}\"", - from_str, - to_str + "from: \"{from_str}\" to: \"{to_str}\"" ); } } diff --git a/cli/util/progress_bar/renderer.rs b/cli/util/progress_bar/renderer.rs index 41a27f3aa..0ea275e77 100644 --- a/cli/util/progress_bar/renderer.rs +++ b/cli/util/progress_bar/renderer.rs @@ -154,7 +154,7 @@ fn get_elapsed_text(elapsed: Duration) -> String { let elapsed_secs = elapsed.as_secs(); let seconds = elapsed_secs % 60; let minutes = elapsed_secs / 60; - format!("[{:0>2}:{:0>2}]", minutes, seconds) + format!("[{minutes:0>2}:{seconds:0>2}]") } #[cfg(test)] diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs index bb7d442e9..87067e909 100644 --- a/cli/util/text_encoding.rs +++ b/cli/util/text_encoding.rs @@ -39,7 +39,7 @@ pub fn convert_to_utf8<'a>( .ok_or_else(|| ErrorKind::InvalidData.into()), None => Err(Error::new( ErrorKind::InvalidInput, - format!("Unsupported charset: {}", charset), + format!("Unsupported charset: {charset}"), )), } } diff --git a/cli/util/v8.rs b/cli/util/v8.rs index b6d6aa44e..6afaf285e 100644 --- a/cli/util/v8.rs +++ b/cli/util/v8.rs @@ -36,7 +36,7 @@ pub fn init_v8_flags(v8_flags: &[String], env_v8_flags: Vec<String>) { .collect::<Vec<_>>(); if !unrecognized_v8_flags.is_empty() { for f in unrecognized_v8_flags { - eprintln!("error: V8 did not recognize flag '{}'", f); + eprintln!("error: V8 did not recognize flag '{f}'"); } eprintln!("\nFor a list of V8 flags, use '--v8-flags=--help'"); std::process::exit(1); diff --git a/cli/worker.rs b/cli/worker.rs index 591889ccd..60663ebc0 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -791,10 +791,10 @@ mod tests { let mut worker = create_test_worker(); let result = worker.execute_main_module(&module_specifier).await; if let Err(err) = result { - eprintln!("execute_mod err {:?}", err); + eprintln!("execute_mod err {err:?}"); } if let Err(e) = worker.run_event_loop(false).await { - panic!("Future got unexpected error: {:?}", e); + panic!("Future got unexpected error: {e:?}"); } } @@ -808,10 +808,10 @@ mod tests { let mut worker = create_test_worker(); let result = worker.execute_main_module(&module_specifier).await; if let Err(err) = result { - eprintln!("execute_mod err {:?}", err); + eprintln!("execute_mod err {err:?}"); } if let Err(e) = worker.run_event_loop(false).await { - panic!("Future got unexpected error: {:?}", e); + panic!("Future got unexpected error: {e:?}"); } } |