From 2d4c46c975eb916dc622cc729a1a8d397582a76f Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 28 Nov 2022 17:28:54 -0500 Subject: refactor: create util folder, move nap_sym to napi/sym, move http_cache to cache folder (#16857) --- cli/args/config_file.rs | 9 +- cli/args/lockfile.rs | 7 +- cli/args/mod.rs | 126 +++- cli/cache/disk_cache.rs | 7 +- cli/cache/http_cache.rs | 285 +++++++++ cli/cache/mod.rs | 6 + cli/checksum.rs | 32 - cli/diff.rs | 227 ------- cli/display.rs | 97 --- cli/file_fetcher.rs | 219 ++----- cli/file_watcher.rs | 374 ------------ cli/fs_util.rs | 1209 -------------------------------------- cli/http_cache.rs | 284 --------- cli/logger.rs | 79 --- cli/lsp/cache.rs | 12 +- cli/lsp/completions.rs | 8 +- cli/lsp/config.rs | 7 +- cli/lsp/documents.rs | 13 +- cli/lsp/language_server.rs | 41 +- cli/lsp/registries.rs | 4 +- cli/lsp/testing/definitions.rs | 2 +- cli/lsp/testing/execution.rs | 2 +- cli/lsp/tsc.rs | 6 +- cli/lsp/urls.rs | 2 +- cli/main.rs | 33 +- cli/module_loader.rs | 4 +- cli/napi/sym/Cargo.toml | 22 + cli/napi/sym/README.md | 34 ++ cli/napi/sym/lib.rs | 46 ++ cli/napi/sym/symbol_exports.json | 148 +++++ cli/napi_sym/Cargo.toml | 22 - cli/napi_sym/README.md | 34 -- cli/napi_sym/lib.rs | 46 -- cli/napi_sym/symbol_exports.json | 148 ----- cli/npm/cache.rs | 21 +- cli/npm/registry.rs | 10 +- cli/npm/resolvers/global.rs | 3 +- cli/npm/resolvers/local.rs | 15 +- cli/npm/resolvers/mod.rs | 4 +- cli/proc_state.rs | 6 +- cli/progress_bar.rs | 143 ----- cli/text_encoding.rs | 162 ----- cli/tools/bench.rs | 22 +- cli/tools/coverage/mod.rs | 4 +- cli/tools/fmt.rs | 14 +- cli/tools/info.rs | 2 +- cli/tools/installer.rs | 13 +- cli/tools/lint.rs | 10 +- cli/tools/standalone.rs | 4 +- cli/tools/task.rs | 4 +- cli/tools/test.rs | 117 +++- cli/tools/vendor/mappings.rs | 4 +- cli/tools/vendor/mod.rs | 20 +- cli/tools/vendor/specifiers.rs | 15 +- cli/tsc/mod.rs | 7 +- cli/unix_util.rs | 45 -- cli/util/checksum.rs | 32 + cli/util/diff.rs | 227 +++++++ cli/util/display.rs | 97 +++ cli/util/file_watcher.rs | 374 ++++++++++++ cli/util/fs.rs | 661 +++++++++++++++++++++ cli/util/logger.rs | 79 +++ cli/util/mod.rs | 14 + cli/util/path.rs | 452 ++++++++++++++ cli/util/progress_bar.rs | 143 +++++ cli/util/text_encoding.rs | 162 +++++ cli/util/unix.rs | 45 ++ cli/util/windows.rs | 90 +++ cli/windows_util.rs | 90 --- cli/worker.rs | 2 +- 70 files changed, 3364 insertions(+), 3344 deletions(-) create mode 100644 cli/cache/http_cache.rs delete mode 100644 cli/checksum.rs delete mode 100644 cli/diff.rs delete mode 100644 cli/display.rs delete mode 100644 cli/file_watcher.rs delete mode 100644 cli/fs_util.rs delete mode 100644 cli/http_cache.rs delete mode 100644 cli/logger.rs create mode 100644 cli/napi/sym/Cargo.toml create mode 100644 cli/napi/sym/README.md create mode 100644 cli/napi/sym/lib.rs create mode 100644 cli/napi/sym/symbol_exports.json delete mode 100644 cli/napi_sym/Cargo.toml delete mode 100644 cli/napi_sym/README.md delete mode 100644 cli/napi_sym/lib.rs delete mode 100644 cli/napi_sym/symbol_exports.json delete mode 100644 cli/progress_bar.rs delete mode 100644 cli/text_encoding.rs delete mode 100644 cli/unix_util.rs create mode 100644 cli/util/checksum.rs create mode 100644 cli/util/diff.rs create mode 100644 cli/util/display.rs create mode 100644 cli/util/file_watcher.rs create mode 100644 cli/util/fs.rs create mode 100644 cli/util/logger.rs create mode 100644 cli/util/mod.rs create mode 100644 cli/util/path.rs create mode 100644 cli/util/progress_bar.rs create mode 100644 cli/util/text_encoding.rs create mode 100644 cli/util/unix.rs create mode 100644 cli/util/windows.rs delete mode 100644 cli/windows_util.rs (limited to 'cli') diff --git a/cli/args/config_file.rs b/cli/args/config_file.rs index 435e0d715..76340aa8b 100644 --- a/cli/args/config_file.rs +++ b/cli/args/config_file.rs @@ -3,10 +3,9 @@ use crate::args::ConfigFlag; use crate::args::Flags; use crate::args::TaskFlags; -use crate::fs_util; -use crate::fs_util::canonicalize_path; -use crate::fs_util::specifier_parent; -use crate::fs_util::specifier_to_file_path; +use crate::util::fs::canonicalize_path; +use crate::util::path::specifier_parent; +use crate::util::path::specifier_to_file_path; use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; @@ -467,7 +466,7 @@ impl ConfigFile { .. }) = &flags.subcommand { - let task_cwd = fs_util::canonicalize_path(&PathBuf::from(path))?; + let task_cwd = canonicalize_path(&PathBuf::from(path))?; if let Some(path) = Self::discover_from(&task_cwd, &mut checked)? { return Ok(Some(path)); } diff --git a/cli/args/lockfile.rs b/cli/args/lockfile.rs index f99d2f570..73a075f81 100644 --- a/cli/args/lockfile.rs +++ b/cli/args/lockfile.rs @@ -20,6 +20,7 @@ use crate::npm::NpmPackageId; use crate::npm::NpmPackageReq; use crate::npm::NpmResolutionPackage; use crate::tools::fmt::format_json; +use crate::util; use crate::Flags; #[derive(Debug)] @@ -260,7 +261,7 @@ impl Lockfile { /// is not included, insert it. fn check_or_insert(&mut self, specifier: &str, code: &str) -> bool { if let Some(lockfile_checksum) = self.content.remote.get(specifier) { - let compiled_checksum = crate::checksum::gen(&[code.as_bytes()]); + let compiled_checksum = util::checksum::gen(&[code.as_bytes()]); lockfile_checksum == &compiled_checksum } else { self.insert(specifier, code); @@ -269,7 +270,7 @@ impl Lockfile { } fn insert(&mut self, specifier: &str, code: &str) { - let checksum = crate::checksum::gen(&[code.as_bytes()]); + let checksum = util::checksum::gen(&[code.as_bytes()]); self.content.remote.insert(specifier.to_string(), checksum); self.has_content_changed = true; } @@ -359,7 +360,7 @@ impl deno_graph::source::Locker for Locker { } fn get_checksum(&self, content: &str) -> String { - crate::checksum::gen(&[content.as_bytes()]) + util::checksum::gen(&[content.as_bytes()]) } fn get_filename(&self) -> Option { diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 50a407ee3..d1ff39f98 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -22,6 +22,10 @@ pub use config_file::TsConfig; pub use config_file::TsConfigForEmit; pub use config_file::TsConfigType; pub use config_file::TsTypeLib; +use deno_runtime::deno_tls::rustls; +use deno_runtime::deno_tls::rustls_native_certs::load_native_certs; +use deno_runtime::deno_tls::rustls_pemfile; +use deno_runtime::deno_tls::webpki_roots; pub use flags::*; pub use lockfile::Lockfile; pub use lockfile::LockfileError; @@ -40,16 +44,130 @@ use deno_runtime::inspector_server::InspectorServer; use deno_runtime::permissions::PermissionsOptions; use std::collections::BTreeMap; use std::env; +use std::io::BufReader; use std::net::SocketAddr; use std::path::PathBuf; use std::sync::Arc; use crate::cache::DenoDir; -use crate::file_fetcher::get_root_cert_store; -use crate::file_fetcher::CacheSetting; -use crate::fs_util; +use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::version; +/// Indicates how cached source files should be handled. +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum CacheSetting { + /// Only the cached files should be used. Any files not in the cache will + /// error. This is the equivalent of `--cached-only` in the CLI. + Only, + /// No cached source files should be used, and all files should be reloaded. + /// This is the equivalent of `--reload` in the CLI. + ReloadAll, + /// Only some cached resources should be used. This is the equivalent of + /// `--reload=https://deno.land/std` or + /// `--reload=https://deno.land/std,https://deno.land/x/example`. + ReloadSome(Vec), + /// The usability of a cached value is determined by analyzing the cached + /// headers and other metadata associated with a cached response, reloading + /// any cached "non-fresh" cached responses. + RespectHeaders, + /// The cached source files should be used for local modules. This is the + /// default behavior of the CLI. + Use, +} + +impl CacheSetting { + pub fn should_use_for_npm_package(&self, package_name: &str) -> bool { + match self { + CacheSetting::ReloadAll => false, + CacheSetting::ReloadSome(list) => { + if list.iter().any(|i| i == "npm:") { + return false; + } + let specifier = format!("npm:{}", package_name); + if list.contains(&specifier) { + return false; + } + true + } + _ => true, + } + } +} + +/// Create and populate a root cert store based on the passed options and +/// environment. +pub fn get_root_cert_store( + maybe_root_path: Option, + maybe_ca_stores: Option>, + maybe_ca_file: Option, +) -> Result { + let mut root_cert_store = RootCertStore::empty(); + let ca_stores: Vec = maybe_ca_stores + .or_else(|| { + let env_ca_store = env::var("DENO_TLS_CA_STORE").ok()?; + Some( + env_ca_store + .split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(), + ) + }) + .unwrap_or_else(|| vec!["mozilla".to_string()]); + + for store in ca_stores.iter() { + match store.as_str() { + "mozilla" => { + root_cert_store.add_server_trust_anchors( + webpki_roots::TLS_SERVER_ROOTS.0.iter().map(|ta| { + rustls::OwnedTrustAnchor::from_subject_spki_name_constraints( + ta.subject, + ta.spki, + ta.name_constraints, + ) + }), + ); + } + "system" => { + let roots = load_native_certs().expect("could not load platform certs"); + for root in roots { + root_cert_store + .add(&rustls::Certificate(root.0)) + .expect("Failed to add platform cert to root cert store"); + } + } + _ => { + return Err(anyhow!("Unknown certificate store \"{}\" specified (allowed: \"system,mozilla\")", store)); + } + } + } + + let ca_file = maybe_ca_file.or_else(|| env::var("DENO_CERT").ok()); + if let Some(ca_file) = ca_file { + let ca_file = if let Some(root) = &maybe_root_path { + root.join(&ca_file) + } else { + PathBuf::from(ca_file) + }; + let certfile = std::fs::File::open(&ca_file)?; + let mut reader = BufReader::new(certfile); + + match rustls_pemfile::certs(&mut reader) { + Ok(certs) => { + root_cert_store.add_parsable_certificates(&certs); + } + Err(e) => { + return Err(anyhow!( + "Unable to add pem file to certificate store: {}", + e + )); + } + } + } + + Ok(root_cert_store) +} + /// Overrides for the options below that when set will /// use these values over the values derived from the /// CLI flags or config file. @@ -176,7 +294,7 @@ impl CliOptions { } else { std::env::current_dir()?.join("node_modules") }; - Ok(Some(fs_util::canonicalize_path_maybe_not_exists(&path)?)) + Ok(Some(canonicalize_path_maybe_not_exists(&path)?)) } pub fn resolve_root_cert_store(&self) -> Result { diff --git a/cli/cache/disk_cache.rs b/cli/cache/disk_cache.rs index 81379ac94..60e353d85 100644 --- a/cli/cache/disk_cache.rs +++ b/cli/cache/disk_cache.rs @@ -1,7 +1,8 @@ // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. -use crate::fs_util; -use crate::http_cache::url_to_filename; +use super::http_cache::url_to_filename; +use super::CACHE_PERM; +use crate::util::fs::atomic_write_file; use deno_core::url::Host; use deno_core::url::Url; @@ -144,7 +145,7 @@ impl DiskCache { Some(parent) => self.ensure_dir_exists(parent), None => Ok(()), }?; - fs_util::atomic_write_file(&path, data, crate::http_cache::CACHE_PERM) + atomic_write_file(&path, data, CACHE_PERM) .map_err(|e| with_io_context(&e, format!("{:#?}", &path))) } } diff --git a/cli/cache/http_cache.rs b/cli/cache/http_cache.rs new file mode 100644 index 000000000..f4cf3ef11 --- /dev/null +++ b/cli/cache/http_cache.rs @@ -0,0 +1,285 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. +//! This module is meant to eventually implement HTTP cache +//! as defined in RFC 7234 (). +//! Currently it's a very simplified version to fulfill Deno needs +//! at hand. +use crate::http_util::HeadersMap; +use crate::util; +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_core::serde::Deserialize; +use deno_core::serde::Serialize; +use deno_core::serde_json; +use deno_core::url::Url; +use log::error; +use std::fs; +use std::fs::File; +use std::io; +use std::path::Path; +use std::path::PathBuf; +use std::time::SystemTime; + +use super::CACHE_PERM; + +/// Turn base of url (scheme, hostname, port) into a valid filename. +/// This method replaces port part with a special string token (because +/// ":" cannot be used in filename on some platforms). +/// Ex: $DENO_DIR/deps/https/deno.land/ +fn base_url_to_filename(url: &Url) -> Option { + let mut out = PathBuf::new(); + + let scheme = url.scheme(); + out.push(scheme); + + match scheme { + "http" | "https" => { + let host = url.host_str().unwrap(); + let host_port = match url.port() { + Some(port) => format!("{}_PORT{}", host, port), + None => host.to_string(), + }; + out.push(host_port); + } + "data" | "blob" => (), + scheme => { + error!("Don't know how to create cache name for scheme: {}", scheme); + return None; + } + }; + + Some(out) +} + +/// Turn provided `url` into a hashed filename. +/// URLs can contain a lot of characters that cannot be used +/// in filenames (like "?", "#", ":"), so in order to cache +/// them properly they are deterministically hashed into ASCII +/// strings. +/// +/// NOTE: this method is `pub` because it's used in integration_tests +pub fn url_to_filename(url: &Url) -> Option { + let mut cache_filename = base_url_to_filename(url)?; + + let mut rest_str = url.path().to_string(); + if let Some(query) = url.query() { + rest_str.push('?'); + rest_str.push_str(query); + } + // NOTE: fragment is omitted on purpose - it's not taken into + // account when caching - it denotes parts of webpage, which + // in case of static resources doesn't make much sense + let hashed_filename = util::checksum::gen(&[rest_str.as_bytes()]); + cache_filename.push(hashed_filename); + Some(cache_filename) +} + +/// Cached metadata about a url. +#[derive(Serialize, Deserialize)] +pub struct CachedUrlMetadata { + pub headers: HeadersMap, + pub url: String, + #[serde(default = "SystemTime::now")] + pub now: SystemTime, +} + +impl CachedUrlMetadata { + pub fn write(&self, cache_filename: &Path) -> Result<(), AnyError> { + let metadata_filename = Self::filename(cache_filename); + let json = serde_json::to_string_pretty(self)?; + util::fs::atomic_write_file(&metadata_filename, json, CACHE_PERM)?; + Ok(()) + } + + pub fn read(cache_filename: &Path) -> Result { + let metadata_filename = Self::filename(cache_filename); + let metadata = fs::read_to_string(metadata_filename)?; + let metadata: Self = serde_json::from_str(&metadata)?; + Ok(metadata) + } + + /// Ex: $DENO_DIR/deps/https/deno.land/c885b7dcf1d6936e33a9cc3a2d74ec79bab5d733d3701c85a029b7f7ec9fbed4.metadata.json + pub fn filename(cache_filename: &Path) -> PathBuf { + cache_filename.with_extension("metadata.json") + } +} + +#[derive(Debug, Clone, Default)] +pub struct HttpCache { + pub location: PathBuf, +} + +impl HttpCache { + /// Returns a new instance. + /// + /// `location` must be an absolute path. + pub fn new(location: &Path) -> Self { + assert!(location.is_absolute()); + Self { + location: location.to_owned(), + } + } + + /// Ensures the location of the cache. + fn ensure_dir_exists(&self, path: &Path) -> io::Result<()> { + if path.is_dir() { + return Ok(()); + } + fs::create_dir_all(path).map_err(|e| { + io::Error::new( + e.kind(), + format!( + "Could not create remote modules cache location: {:?}\nCheck the permission of the directory.", + path + ), + ) + }) + } + + pub fn get_cache_filename(&self, url: &Url) -> Option { + Some(self.location.join(url_to_filename(url)?)) + } + + // TODO(bartlomieju): this method should check headers file + // and validate against ETAG/Last-modified-as headers. + // ETAG check is currently done in `cli/file_fetcher.rs`. + pub fn get( + &self, + url: &Url, + ) -> Result<(File, HeadersMap, SystemTime), AnyError> { + let cache_filename = self.location.join( + url_to_filename(url) + .ok_or_else(|| generic_error("Can't convert url to filename."))?, + ); + let metadata_filename = CachedUrlMetadata::filename(&cache_filename); + let file = File::open(cache_filename)?; + let metadata = fs::read_to_string(metadata_filename)?; + let metadata: CachedUrlMetadata = serde_json::from_str(&metadata)?; + Ok((file, metadata.headers, metadata.now)) + } + + pub fn set( + &self, + url: &Url, + headers_map: HeadersMap, + content: &[u8], + ) -> Result<(), AnyError> { + let cache_filename = self.location.join( + url_to_filename(url) + .ok_or_else(|| generic_error("Can't convert url to filename."))?, + ); + // Create parent directory + let parent_filename = cache_filename + .parent() + .expect("Cache filename should have a parent dir"); + self.ensure_dir_exists(parent_filename)?; + // Cache content + util::fs::atomic_write_file(&cache_filename, content, CACHE_PERM)?; + + let metadata = CachedUrlMetadata { + now: SystemTime::now(), + url: url.to_string(), + headers: headers_map, + }; + metadata.write(&cache_filename) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::collections::HashMap; + use std::io::Read; + use test_util::TempDir; + + #[test] + fn test_create_cache() { + let dir = TempDir::new(); + let mut cache_path = dir.path().to_owned(); + cache_path.push("foobar"); + // HttpCache should be created lazily on first use: + // when zipping up a local project with no external dependencies + // "$DENO_DIR/deps" is empty. When unzipping such project + // "$DENO_DIR/deps" might not get restored and in situation + // when directory is owned by root we might not be able + // to create that directory. However if it's not needed it + // doesn't make sense to return error in such specific scenarios. + // For more details check issue: + // https://github.com/denoland/deno/issues/5688 + let cache = HttpCache::new(&cache_path); + assert!(!cache.location.exists()); + cache + .set( + &Url::parse("http://example.com/foo/bar.js").unwrap(), + HeadersMap::new(), + b"hello world", + ) + .expect("Failed to add to cache"); + assert!(cache.ensure_dir_exists(&cache.location).is_ok()); + assert!(cache_path.is_dir()); + } + + #[test] + fn test_get_set() { + let dir = TempDir::new(); + let cache = HttpCache::new(dir.path()); + let url = Url::parse("https://deno.land/x/welcome.ts").unwrap(); + let mut headers = HashMap::new(); + headers.insert( + "content-type".to_string(), + "application/javascript".to_string(), + ); + headers.insert("etag".to_string(), "as5625rqdsfb".to_string()); + let content = b"Hello world"; + let r = cache.set(&url, headers, content); + eprintln!("result {:?}", r); + assert!(r.is_ok()); + let r = cache.get(&url); + assert!(r.is_ok()); + let (mut file, headers, _) = r.unwrap(); + let mut content = String::new(); + file.read_to_string(&mut content).unwrap(); + assert_eq!(content, "Hello world"); + assert_eq!( + headers.get("content-type").unwrap(), + "application/javascript" + ); + assert_eq!(headers.get("etag").unwrap(), "as5625rqdsfb"); + assert_eq!(headers.get("foobar"), None); + } + + #[test] + fn test_url_to_filename() { + let test_cases = [ + ("https://deno.land/x/foo.ts", "https/deno.land/2c0a064891b9e3fbe386f5d4a833bce5076543f5404613656042107213a7bbc8"), + ( + "https://deno.land:8080/x/foo.ts", + "https/deno.land_PORT8080/2c0a064891b9e3fbe386f5d4a833bce5076543f5404613656042107213a7bbc8", + ), + ("https://deno.land/", "https/deno.land/8a5edab282632443219e051e4ade2d1d5bbc671c781051bf1437897cbdfea0f1"), + ( + "https://deno.land/?asdf=qwer", + "https/deno.land/e4edd1f433165141015db6a823094e6bd8f24dd16fe33f2abd99d34a0a21a3c0", + ), + // should be the same as case above, fragment (#qwer) is ignored + // when hashing + ( + "https://deno.land/?asdf=qwer#qwer", + "https/deno.land/e4edd1f433165141015db6a823094e6bd8f24dd16fe33f2abd99d34a0a21a3c0", + ), + ( + "data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=", + "data/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37", + ), + ( + "data:text/plain,Hello%2C%20Deno!", + "data/967374e3561d6741234131e342bf5c6848b70b13758adfe23ee1a813a8131818", + ) + ]; + + for (url, expected) in test_cases.iter() { + let u = Url::parse(url).unwrap(); + let p = url_to_filename(&u).unwrap(); + assert_eq!(p, PathBuf::from(expected)); + } + } +} diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index cf9a4c441..b0d79400a 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -19,6 +19,7 @@ mod common; mod deno_dir; mod disk_cache; mod emit; +mod http_cache; mod incremental; mod node; mod parsed_source; @@ -28,10 +29,15 @@ pub use common::FastInsecureHasher; pub use deno_dir::DenoDir; pub use disk_cache::DiskCache; pub use emit::EmitCache; +pub use http_cache::CachedUrlMetadata; +pub use http_cache::HttpCache; pub use incremental::IncrementalCache; pub use node::NodeAnalysisCache; pub use parsed_source::ParsedSourceCache; +/// Permissions used to save a file in the disk caches. +pub const CACHE_PERM: u32 = 0o644; + /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// a concise interface to the DENO_DIR when building module graphs. pub struct FetchCacher { diff --git a/cli/checksum.rs b/cli/checksum.rs deleted file mode 100644 index c0e41356d..000000000 --- a/cli/checksum.rs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use ring::digest::Context; -use ring::digest::SHA256; - -pub fn gen(v: &[impl AsRef<[u8]>]) -> String { - let mut ctx = Context::new(&SHA256); - for src in v { - ctx.update(src.as_ref()); - } - let digest = ctx.finish(); - let out: Vec = digest - .as_ref() - .iter() - .map(|byte| format!("{:02x}", byte)) - .collect(); - out.join("") -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_gen() { - let actual = gen(&[b"hello world"]); - assert_eq!( - actual, - "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" - ); - } -} diff --git a/cli/diff.rs b/cli/diff.rs deleted file mode 100644 index 048464162..000000000 --- a/cli/diff.rs +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use crate::colors; -use dissimilar::{diff as difference, Chunk}; -use std::fmt::Write as _; - -/// Print diff of the same file_path, before and after formatting. -/// -/// Diff format is loosely based on GitHub diff formatting. -pub fn diff(orig_text: &str, edit_text: &str) -> String { - if orig_text == edit_text { - return String::new(); - } - - // normalize newlines as it adds too much noise if they differ - let orig_text = orig_text.replace("\r\n", "\n"); - let edit_text = edit_text.replace("\r\n", "\n"); - - if orig_text == edit_text { - return " | Text differed by line endings.\n".to_string(); - } - - DiffBuilder::build(&orig_text, &edit_text) -} - -struct DiffBuilder { - output: String, - line_number_width: usize, - orig_line: usize, - edit_line: usize, - orig: String, - edit: String, - has_changes: bool, -} - -impl DiffBuilder { - pub fn build(orig_text: &str, edit_text: &str) -> String { - let mut diff_builder = DiffBuilder { - output: String::new(), - orig_line: 1, - edit_line: 1, - orig: String::new(), - edit: String::new(), - has_changes: false, - line_number_width: { - let line_count = std::cmp::max( - orig_text.split('\n').count(), - edit_text.split('\n').count(), - ); - line_count.to_string().chars().count() - }, - }; - - let chunks = difference(orig_text, edit_text); - diff_builder.handle_chunks(chunks); - diff_builder.output - } - - fn handle_chunks<'a>(&'a mut self, chunks: Vec>) { - for chunk in chunks { - match chunk { - Chunk::Delete(s) => { - let split = s.split('\n').enumerate(); - for (i, s) in split { - if i > 0 { - self.orig.push('\n'); - } - self.orig.push_str(&fmt_rem_text_highlight(s)); - } - self.has_changes = true - } - Chunk::Insert(s) => { - let split = s.split('\n').enumerate(); - for (i, s) in split { - if i > 0 { - self.edit.push('\n'); - } - self.edit.push_str(&fmt_add_text_highlight(s)); - } - self.has_changes = true - } - Chunk::Equal(s) => { - let split = s.split('\n').enumerate(); - for (i, s) in split { - if i > 0 { - self.flush_changes(); - } - self.orig.push_str(&fmt_rem_text(s)); - self.edit.push_str(&fmt_add_text(s)); - } - } - } - } - - self.flush_changes(); - } - - fn flush_changes(&mut self) { - if self.has_changes { - self.write_line_diff(); - - self.orig_line += self.orig.split('\n').count(); - self.edit_line += self.edit.split('\n').count(); - self.has_changes = false; - } else { - self.orig_line += 1; - self.edit_line += 1; - } - - self.orig.clear(); - self.edit.clear(); - } - - fn write_line_diff(&mut self) { - let split = self.orig.split('\n').enumerate(); - for (i, s) in split { - write!( - self.output, - "{:width$}{} ", - self.orig_line + i, - colors::gray(" |"), - width = self.line_number_width - ) - .unwrap(); - self.output.push_str(&fmt_rem()); - self.output.push_str(s); - self.output.push('\n'); - } - - let split = self.edit.split('\n').enumerate(); - for (i, s) in split { - write!( - self.output, - "{:width$}{} ", - self.edit_line + i, - colors::gray(" |"), - width = self.line_number_width - ) - .unwrap(); - self.output.push_str(&fmt_add()); - self.output.push_str(s); - self.output.push('\n'); - } - } -} - -fn fmt_add() -> String { - colors::green_bold("+").to_string() -} - -fn fmt_add_text(x: &str) -> String { - colors::green(x).to_string() -} - -fn fmt_add_text_highlight(x: &str) -> String { - colors::black_on_green(x).to_string() -} - -fn fmt_rem() -> String { - colors::red_bold("-").to_string() -} - -fn fmt_rem_text(x: &str) -> String { - colors::red(x).to_string() -} - -fn fmt_rem_text_highlight(x: &str) -> String { - colors::white_on_red(x).to_string() -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_diff() { - run_test( - "console.log('Hello World')", - "console.log(\"Hello World\");", - concat!( - "1 | -console.log('Hello World')\n", - "1 | +console.log(\"Hello World\");\n", - ), - ); - - run_test( - "\n\n\n\nconsole.log(\n'Hello World'\n)", - "console.log(\n\"Hello World\"\n);", - concat!( - "1 | -\n", - "2 | -\n", - "3 | -\n", - "4 | -\n", - "5 | -console.log(\n", - "1 | +console.log(\n", - "6 | -'Hello World'\n", - "2 | +\"Hello World\"\n", - "7 | -)\n3 | +);\n", - ), - ); - } - - #[test] - fn test_eof_newline_missing() { - run_test( - "test\nsome line text test", - "test\nsome line text test\n", - concat!( - "2 | -some line text test\n", - "2 | +some line text test\n", - "3 | +\n", - ), - ); - } - - #[test] - fn test_newlines_differing() { - run_test("test\n", "test\r\n", " | Text differed by line endings.\n"); - } - - fn run_test(diff_text1: &str, diff_text2: &str, expected_output: &str) { - assert_eq!( - test_util::strip_ansi_codes(&diff(diff_text1, diff_text2,)), - expected_output, - ); - } -} diff --git a/cli/display.rs b/cli/display.rs deleted file mode 100644 index f13965e28..000000000 --- a/cli/display.rs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use deno_core::error::AnyError; -use deno_core::serde_json; -use std::io::Write; - -/// A function that converts a float to a string the represents a human -/// readable version of that number. -pub fn human_size(size: f64) -> String { - let negative = if size.is_sign_positive() { "" } else { "-" }; - let size = size.abs(); - let units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; - if size < 1_f64 { - return format!("{}{}{}", negative, size, "B"); - } - let delimiter = 1024_f64; - let exponent = std::cmp::min( - (size.ln() / delimiter.ln()).floor() as i32, - (units.len() - 1) as i32, - ); - let pretty_bytes = format!("{:.2}", size / delimiter.powi(exponent)) - .parse::() - .unwrap() - * 1_f64; - let unit = units[exponent as usize]; - format!("{}{}{}", negative, pretty_bytes, unit) -} - -/// A function that converts a milisecond elapsed time to a string that -/// represents a human readable version of that time. -pub fn human_elapsed(elapsed: u128) -> String { - if elapsed < 1_000 { - return format!("{}ms", elapsed); - } - if elapsed < 1_000 * 60 { - return format!("{}s", elapsed / 1000); - } - - let seconds = elapsed / 1_000; - let minutes = seconds / 60; - let seconds_remainder = seconds % 60; - format!("{}m{}s", minutes, seconds_remainder) -} - -pub fn write_to_stdout_ignore_sigpipe( - bytes: &[u8], -) -> Result<(), std::io::Error> { - use std::io::ErrorKind; - - match std::io::stdout().write_all(bytes) { - Ok(()) => Ok(()), - Err(e) => match e.kind() { - ErrorKind::BrokenPipe => Ok(()), - _ => Err(e), - }, - } -} - -pub fn write_json_to_stdout(value: &T) -> Result<(), AnyError> -where - T: ?Sized + serde::ser::Serialize, -{ - let mut writer = std::io::BufWriter::new(std::io::stdout()); - serde_json::to_writer_pretty(&mut writer, value)?; - writeln!(&mut writer)?; - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_human_size() { - assert_eq!(human_size(1_f64), "1B"); - assert_eq!(human_size((12 * 1024) as f64), "12KB"); - assert_eq!(human_size((24_i64 * 1024 * 1024) as f64), "24MB"); - assert_eq!(human_size((24_i64 * 1024 * 1024 * 1024) as f64), "24GB"); - assert_eq!( - human_size((24_i64 * 1024 * 1024 * 1024 * 1024) as f64), - "24TB" - ); - assert_eq!(human_size(0_f64), "0B"); - assert_eq!(human_size(-10_f64), "-10B"); - } - - #[test] - fn test_human_elapsed() { - assert_eq!(human_elapsed(1), "1ms"); - assert_eq!(human_elapsed(256), "256ms"); - assert_eq!(human_elapsed(1000), "1s"); - assert_eq!(human_elapsed(1001), "1s"); - assert_eq!(human_elapsed(1020), "1s"); - assert_eq!(human_elapsed(70 * 1000), "1m10s"); - assert_eq!(human_elapsed(86 * 1000 + 100), "1m26s"); - } -} diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index b9a91d41a..12f39c7e3 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -1,18 +1,18 @@ // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. +use crate::args::CacheSetting; use crate::auth_tokens::AuthTokens; +use crate::cache::HttpCache; use crate::colors; -use crate::http_cache::HttpCache; use crate::http_util::CacheSemantics; use crate::http_util::FetchOnceArgs; use crate::http_util::FetchOnceResult; use crate::http_util::HttpClient; -use crate::progress_bar::ProgressBar; -use crate::text_encoding; +use crate::util::progress_bar::ProgressBar; +use crate::util::text_encoding; use data_url::DataUrl; use deno_ast::MediaType; -use deno_core::anyhow::anyhow; use deno_core::error::custom_error; use deno_core::error::generic_error; use deno_core::error::uri_error; @@ -21,11 +21,6 @@ use deno_core::futures; use deno_core::futures::future::FutureExt; use deno_core::parking_lot::Mutex; use deno_core::ModuleSpecifier; -use deno_runtime::deno_tls::rustls; -use deno_runtime::deno_tls::rustls::RootCertStore; -use deno_runtime::deno_tls::rustls_native_certs::load_native_certs; -use deno_runtime::deno_tls::rustls_pemfile; -use deno_runtime::deno_tls::webpki_roots; use deno_runtime::deno_web::BlobStore; use deno_runtime::permissions::Permissions; use log::debug; @@ -34,7 +29,6 @@ use std::collections::HashMap; use std::env; use std::fs; use std::future::Future; -use std::io::BufReader; use std::io::Read; use std::path::PathBuf; use std::pin::Pin; @@ -82,86 +76,6 @@ impl FileCache { } } -/// Indicates how cached source files should be handled. -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum CacheSetting { - /// Only the cached files should be used. Any files not in the cache will - /// error. This is the equivalent of `--cached-only` in the CLI. - Only, - /// No cached source files should be used, and all files should be reloaded. - /// This is the equivalent of `--reload` in the CLI. - ReloadAll, - /// Only some cached resources should be used. This is the equivalent of - /// `--reload=https://deno.land/std` or - /// `--reload=https://deno.land/std,https://deno.land/x/example`. - ReloadSome(Vec), - /// The usability of a cached value is determined by analyzing the cached - /// headers and other metadata associated with a cached response, reloading - /// any cached "non-fresh" cached responses. - RespectHeaders, - /// The cached source files should be used for local modules. This is the - /// default behavior of the CLI. - Use, -} - -impl CacheSetting { - /// Returns if the cache should be used for a given specifier. - pub fn should_use( - &self, - specifier: &ModuleSpecifier, - http_cache: &HttpCache, - ) -> bool { - match self { - CacheSetting::ReloadAll => false, - CacheSetting::Use | CacheSetting::Only => true, - CacheSetting::RespectHeaders => { - if let Ok((_, headers, cache_time)) = http_cache.get(specifier) { - let cache_semantics = - CacheSemantics::new(headers, cache_time, SystemTime::now()); - cache_semantics.should_use() - } else { - false - } - } - CacheSetting::ReloadSome(list) => { - let mut url = specifier.clone(); - url.set_fragment(None); - if list.contains(&url.as_str().to_string()) { - return false; - } - url.set_query(None); - let mut path = PathBuf::from(url.as_str()); - loop { - if list.contains(&path.to_str().unwrap().to_string()) { - return false; - } - if !path.pop() { - break; - } - } - true - } - } - } - - pub fn should_use_for_npm_package(&self, package_name: &str) -> bool { - match self { - CacheSetting::ReloadAll => false, - CacheSetting::ReloadSome(list) => { - if list.contains(&"npm:".to_string()) { - return false; - } - let specifier = format!("npm:{}", package_name); - if list.contains(&specifier) { - return false; - } - true - } - _ => true, - } - } -} - /// Fetch a source file from the local file system. fn fetch_local(specifier: &ModuleSpecifier) -> Result { let local = specifier.to_file_path().map_err(|_| { @@ -182,80 +96,6 @@ fn fetch_local(specifier: &ModuleSpecifier) -> Result { }) } -/// Create and populate a root cert store based on the passed options and -/// environment. -pub fn get_root_cert_store( - maybe_root_path: Option, - maybe_ca_stores: Option>, - maybe_ca_file: Option, -) -> Result { - let mut root_cert_store = RootCertStore::empty(); - let ca_stores: Vec = maybe_ca_stores - .or_else(|| { - let env_ca_store = env::var("DENO_TLS_CA_STORE").ok()?; - Some( - env_ca_store - .split(',') - .map(|s| s.trim().to_string()) - .filter(|s| !s.is_empty()) - .collect(), - ) - }) - .unwrap_or_else(|| vec!["mozilla".to_string()]); - - for store in ca_stores.iter() { - match store.as_str() { - "mozilla" => { - root_cert_store.add_server_trust_anchors( - webpki_roots::TLS_SERVER_ROOTS.0.iter().map(|ta| { - rustls::OwnedTrustAnchor::from_subject_spki_name_constraints( - ta.subject, - ta.spki, - ta.name_constraints, - ) - }), - ); - } - "system" => { - let roots = load_native_certs().expect("could not load platform certs"); - for root in roots { - root_cert_store - .add(&rustls::Certificate(root.0)) - .expect("Failed to add platform cert to root cert store"); - } - } - _ => { - return Err(anyhow!("Unknown certificate store \"{}\" specified (allowed: \"system,mozilla\")", store)); - } - } - } - - let ca_file = maybe_ca_file.or_else(|| env::var("DENO_CERT").ok()); - if let Some(ca_file) = ca_file { - let ca_file = if let Some(root) = &maybe_root_path { - root.join(&ca_file) - } else { - PathBuf::from(ca_file) - }; - let certfile = fs::File::open(&ca_file)?; - let mut reader = BufReader::new(certfile); - - match rustls_pemfile::certs(&mut reader) { - Ok(certs) => { - root_cert_store.add_parsable_certificates(&certs); - } - Err(e) => { - return Err(anyhow!( - "Unable to add pem file to certificate store: {}", - e - )); - } - } - } - - Ok(root_cert_store) -} - /// Returns the decoded body and content-type of a provided /// data URL. pub fn get_source_from_data_url( @@ -571,7 +411,7 @@ impl FileFetcher { return futures::future::err(err).boxed(); } - if self.cache_setting.should_use(specifier, &self.http_cache) { + if self.should_use_cache(specifier) { match self.fetch_cached(specifier, redirect_limit) { Ok(Some(file)) => { return futures::future::ok(file).boxed(); @@ -654,6 +494,41 @@ impl FileFetcher { .boxed() } + /// Returns if the cache should be used for a given specifier. + fn should_use_cache(&self, specifier: &ModuleSpecifier) -> bool { + match &self.cache_setting { + CacheSetting::ReloadAll => false, + CacheSetting::Use | CacheSetting::Only => true, + CacheSetting::RespectHeaders => { + if let Ok((_, headers, cache_time)) = self.http_cache.get(specifier) { + let cache_semantics = + CacheSemantics::new(headers, cache_time, SystemTime::now()); + cache_semantics.should_use() + } else { + false + } + } + CacheSetting::ReloadSome(list) => { + let mut url = specifier.clone(); + url.set_fragment(None); + if list.contains(&url.as_str().to_string()) { + return false; + } + url.set_query(None); + let mut path = PathBuf::from(url.as_str()); + loop { + if list.contains(&path.to_str().unwrap().to_string()) { + return false; + } + if !path.pop() { + break; + } + } + true + } + } + } + /// Fetch a source file and asynchronously return it. pub async fn fetch( &self, @@ -754,6 +629,7 @@ impl FileFetcher { #[cfg(test)] mod tests { + use crate::cache::CachedUrlMetadata; use crate::http_util::HttpClient; use super::*; @@ -1175,8 +1051,7 @@ mod tests { .http_cache .get_cache_filename(&specifier) .unwrap(); - let mut metadata = - crate::http_cache::Metadata::read(&cache_filename).unwrap(); + let mut metadata = CachedUrlMetadata::read(&cache_filename).unwrap(); metadata.headers = HashMap::new(); metadata .headers @@ -1265,8 +1140,7 @@ mod tests { .await; assert!(result.is_ok()); - let metadata_filename = - crate::http_cache::Metadata::filename(&cache_filename); + let metadata_filename = CachedUrlMetadata::filename(&cache_filename); let metadata_file = fs::File::open(metadata_filename).unwrap(); let metadata_file_metadata = metadata_file.metadata().unwrap(); let metadata_file_modified_01 = metadata_file_metadata.modified().unwrap(); @@ -1285,8 +1159,7 @@ mod tests { .await; assert!(result.is_ok()); - let metadata_filename = - crate::http_cache::Metadata::filename(&cache_filename); + let metadata_filename = CachedUrlMetadata::filename(&cache_filename); let metadata_file = fs::File::open(metadata_filename).unwrap(); let metadata_file_metadata = metadata_file.metadata().unwrap(); let metadata_file_modified_02 = metadata_file_metadata.modified().unwrap(); @@ -1438,7 +1311,7 @@ mod tests { assert!(result.is_ok()); let metadata_filename = - crate::http_cache::Metadata::filename(&redirected_cache_filename); + CachedUrlMetadata::filename(&redirected_cache_filename); let metadata_file = fs::File::open(metadata_filename).unwrap(); let metadata_file_metadata = metadata_file.metadata().unwrap(); let metadata_file_modified_01 = metadata_file_metadata.modified().unwrap(); @@ -1458,7 +1331,7 @@ mod tests { assert!(result.is_ok()); let metadata_filename = - crate::http_cache::Metadata::filename(&redirected_cache_filename); + CachedUrlMetadata::filename(&redirected_cache_filename); let metadata_file = fs::File::open(metadata_filename).unwrap(); let metadata_file_metadata = metadata_file.metadata().unwrap(); let metadata_file_modified_02 = metadata_file_metadata.modified().unwrap(); diff --git a/cli/file_watcher.rs b/cli/file_watcher.rs deleted file mode 100644 index 78a5e7a82..000000000 --- a/cli/file_watcher.rs +++ /dev/null @@ -1,374 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use crate::colors; -use crate::fs_util::canonicalize_path; - -use deno_core::error::AnyError; -use deno_core::error::JsError; -use deno_core::futures::Future; -use deno_runtime::fmt_errors::format_js_error; -use log::info; -use notify::event::Event as NotifyEvent; -use notify::event::EventKind; -use notify::Error as NotifyError; -use notify::RecommendedWatcher; -use notify::RecursiveMode; -use notify::Watcher; -use std::collections::HashSet; -use std::path::PathBuf; -use std::sync::Arc; -use std::time::Duration; -use tokio::select; -use tokio::sync::mpsc; -use tokio::sync::mpsc::UnboundedReceiver; -use tokio::time::sleep; - -const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H"; -const DEBOUNCE_INTERVAL: Duration = Duration::from_millis(200); - -struct DebouncedReceiver { - // The `recv()` call could be used in a tokio `select!` macro, - // and so we store this state on the struct to ensure we don't - // lose items if a `recv()` never completes - received_items: HashSet, - receiver: UnboundedReceiver>, -} - -impl DebouncedReceiver { - fn new_with_sender() -> (Arc>>, Self) { - let (sender, receiver) = mpsc::unbounded_channel(); - ( - Arc::new(sender), - Self { - receiver, - received_items: HashSet::new(), - }, - ) - } - - async fn recv(&mut self) -> Option> { - if self.received_items.is_empty() { - self - .received_items - .extend(self.receiver.recv().await?.into_iter()); - } - - loop { - select! { - items = self.receiver.recv() => { - self.received_items.extend(items?); - } - _ = sleep(DEBOUNCE_INTERVAL) => { - return Some(self.received_items.drain().collect()); - } - } - } - } -} - -async fn error_handler(watch_future: F) -where - F: Future>, -{ - let result = watch_future.await; - if let Err(err) = result { - let error_string = match err.downcast_ref::() { - Some(e) => format_js_error(e), - None => format!("{:?}", err), - }; - eprintln!( - "{}: {}", - colors::red_bold("error"), - error_string.trim_start_matches("error: ") - ); - } -} - -pub enum ResolutionResult { - Restart { - paths_to_watch: Vec, - result: Result, - }, - Ignore, -} - -async fn next_restart( - resolver: &mut R, - debounced_receiver: &mut DebouncedReceiver, -) -> (Vec, Result) -where - R: FnMut(Option>) -> F, - F: Future>, -{ - loop { - let changed = debounced_receiver.recv().await; - match resolver(changed).await { - ResolutionResult::Ignore => { - log::debug!("File change ignored") - } - ResolutionResult::Restart { - paths_to_watch, - result, - } => { - return (paths_to_watch, result); - } - } - } -} - -pub struct PrintConfig { - /// printing watcher status to terminal. - pub job_name: String, - /// determine whether to clear the terminal screen - pub clear_screen: bool, -} - -fn create_print_after_restart_fn(clear_screen: bool) -> impl Fn() { - move || { - if clear_screen { - eprint!("{}", CLEAR_SCREEN); - } - info!( - "{} File change detected! Restarting!", - colors::intense_blue("Watcher"), - ); - } -} - -/// Creates a file watcher, which will call `resolver` with every file change. -/// -/// - `resolver` is used for resolving file paths to be watched at every restarting -/// of the watcher, and can also return a value to be passed to `operation`. -/// It returns a [`ResolutionResult`], which can either instruct the watcher to restart or ignore the change. -/// This always contains paths to watch; -/// -/// - `operation` is the actual operation we want to run every time the watcher detects file -/// changes. For example, in the case where we would like to bundle, then `operation` would -/// have the logic for it like bundling the code. -pub async fn watch_func( - mut resolver: R, - mut operation: O, - print_config: PrintConfig, -) -> Result<(), AnyError> -where - R: FnMut(Option>) -> F1, - O: FnMut(T) -> F2, - F1: Future>, - F2: Future>, -{ - let (sender, mut receiver) = DebouncedReceiver::new_with_sender(); - - let PrintConfig { - job_name, - clear_screen, - } = print_config; - - // Store previous data. If module resolution fails at some point, the watcher will try to - // continue watching files using these data. - let mut paths_to_watch; - let mut resolution_result; - - let print_after_restart = create_print_after_restart_fn(clear_screen); - - match resolver(None).await { - ResolutionResult::Ignore => { - // The only situation where it makes sense to ignore the initial 'change' - // is if the command isn't supposed to do anything until something changes, - // e.g. a variant of `deno test` which doesn't run the entire test suite to start with, - // but instead does nothing until you make a change. - // - // In that case, this is probably the correct output. - info!( - "{} Waiting for file changes...", - colors::intense_blue("Watcher"), - ); - - let (paths, result) = next_restart(&mut resolver, &mut receiver).await; - paths_to_watch = paths; - resolution_result = result; - - print_after_restart(); - } - ResolutionResult::Restart { - paths_to_watch: paths, - result, - } => { - paths_to_watch = paths; - resolution_result = result; - } - }; - - info!("{} {} started.", colors::intense_blue("Watcher"), job_name,); - - loop { - let mut watcher = new_watcher(sender.clone())?; - add_paths_to_watcher(&mut watcher, &paths_to_watch); - - match resolution_result { - Ok(operation_arg) => { - let fut = error_handler(operation(operation_arg)); - select! { - (paths, result) = next_restart(&mut resolver, &mut receiver) => { - if result.is_ok() { - paths_to_watch = paths; - } - resolution_result = result; - - print_after_restart(); - continue; - }, - _ = fut => {}, - }; - - info!( - "{} {} finished. Restarting on file change...", - colors::intense_blue("Watcher"), - job_name, - ); - } - Err(error) => { - eprintln!("{}: {}", colors::red_bold("error"), error); - info!( - "{} {} failed. Restarting on file change...", - colors::intense_blue("Watcher"), - job_name, - ); - } - } - - let (paths, result) = next_restart(&mut resolver, &mut receiver).await; - if result.is_ok() { - paths_to_watch = paths; - } - resolution_result = result; - - print_after_restart(); - - drop(watcher); - } -} - -/// Creates a file watcher. -/// -/// - `operation` is the actual operation we want to run every time the watcher detects file -/// changes. For example, in the case where we would like to bundle, then `operation` would -/// have the logic for it like bundling the code. -pub async fn watch_func2( - mut paths_to_watch_receiver: UnboundedReceiver>, - mut operation: O, - operation_args: T, - print_config: PrintConfig, -) -> Result<(), AnyError> -where - O: FnMut(T) -> Result, - F: Future>, -{ - let (watcher_sender, mut watcher_receiver) = - DebouncedReceiver::new_with_sender(); - - let PrintConfig { - job_name, - clear_screen, - } = print_config; - - let print_after_restart = create_print_after_restart_fn(clear_screen); - - info!("{} {} started.", colors::intense_blue("Watcher"), job_name,); - - fn consume_paths_to_watch( - watcher: &mut RecommendedWatcher, - receiver: &mut UnboundedReceiver>, - ) { - loop { - match receiver.try_recv() { - Ok(paths) => { - add_paths_to_watcher(watcher, &paths); - } - Err(e) => match e { - mpsc::error::TryRecvError::Empty => { - break; - } - // there must be at least one receiver alive - _ => unreachable!(), - }, - } - } - } - - loop { - let mut watcher = new_watcher(watcher_sender.clone())?; - consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver); - - let receiver_future = async { - loop { - let maybe_paths = paths_to_watch_receiver.recv().await; - add_paths_to_watcher(&mut watcher, &maybe_paths.unwrap()); - } - }; - let operation_future = error_handler(operation(operation_args.clone())?); - - select! { - _ = receiver_future => {}, - _ = watcher_receiver.recv() => { - print_after_restart(); - continue; - }, - _ = operation_future => { - // TODO(bartlomieju): print exit code here? - info!( - "{} {} finished. Restarting on file change...", - colors::intense_blue("Watcher"), - job_name, - ); - consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver); - }, - }; - - let receiver_future = async { - loop { - let maybe_paths = paths_to_watch_receiver.recv().await; - add_paths_to_watcher(&mut watcher, &maybe_paths.unwrap()); - } - }; - select! { - _ = receiver_future => {}, - _ = watcher_receiver.recv() => { - print_after_restart(); - continue; - }, - }; - } -} - -fn new_watcher( - sender: Arc>>, -) -> Result { - let watcher = Watcher::new( - move |res: Result| { - if let Ok(event) = res { - if matches!( - event.kind, - EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) - ) { - let paths = event - .paths - .iter() - .filter_map(|path| canonicalize_path(path).ok()) - .collect(); - sender.send(paths).unwrap(); - } - } - }, - Default::default(), - )?; - - Ok(watcher) -} - -fn add_paths_to_watcher(watcher: &mut RecommendedWatcher, paths: &[PathBuf]) { - // Ignore any error e.g. `PathNotFound` - for path in paths { - let _ = watcher.watch(path, RecursiveMode::Recursive); - } - log::debug!("Watching paths: {:?}", paths); -} diff --git a/cli/fs_util.rs b/cli/fs_util.rs deleted file mode 100644 index a27586da2..000000000 --- a/cli/fs_util.rs +++ /dev/null @@ -1,1209 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use deno_core::anyhow::Context; -use deno_core::error::uri_error; -use deno_core::error::AnyError; -pub use deno_core::normalize_path; -use deno_core::ModuleSpecifier; -use deno_runtime::deno_crypto::rand; -use deno_runtime::deno_node::PathClean; -use std::borrow::Cow; -use std::env::current_dir; -use std::fs::OpenOptions; -use std::io::Error; -use std::io::ErrorKind; -use std::io::Write; -use std::path::Path; -use std::path::PathBuf; -use std::time::Duration; -use walkdir::WalkDir; - -pub fn atomic_write_file>( - filename: &Path, - data: T, - mode: u32, -) -> std::io::Result<()> { - let rand: String = (0..4) - .map(|_| format!("{:02x}", rand::random::())) - .collect(); - let extension = format!("{}.tmp", rand); - let tmp_file = filename.with_extension(extension); - write_file(&tmp_file, data, mode)?; - std::fs::rename(tmp_file, filename)?; - Ok(()) -} - -pub fn write_file>( - filename: &Path, - data: T, - mode: u32, -) -> std::io::Result<()> { - write_file_2(filename, data, true, mode, true, false) -} - -pub fn write_file_2>( - filename: &Path, - data: T, - update_mode: bool, - mode: u32, - is_create: bool, - is_append: bool, -) -> std::io::Result<()> { - let mut file = OpenOptions::new() - .read(false) - .write(true) - .append(is_append) - .truncate(!is_append) - .create(is_create) - .open(filename)?; - - if update_mode { - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let mode = mode & 0o777; - let permissions = PermissionsExt::from_mode(mode); - file.set_permissions(permissions)?; - } - #[cfg(not(unix))] - let _ = mode; - } - - file.write_all(data.as_ref()) -} - -/// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. -pub fn canonicalize_path(path: &Path) -> Result { - let path = path.canonicalize()?; - #[cfg(windows)] - return Ok(strip_unc_prefix(path)); - #[cfg(not(windows))] - return Ok(path); -} - -/// Canonicalizes a path which might be non-existent by going up the -/// ancestors until it finds a directory that exists, canonicalizes -/// that path, then adds back the remaining path components. -/// -/// Note: When using this, you should be aware that a symlink may -/// subsequently be created along this path by some other code. -pub fn canonicalize_path_maybe_not_exists( - path: &Path, -) -> Result { - let path = path.to_path_buf().clean(); - let mut path = path.as_path(); - let mut names_stack = Vec::new(); - loop { - match canonicalize_path(path) { - Ok(mut canonicalized_path) => { - for name in names_stack.into_iter().rev() { - canonicalized_path = canonicalized_path.join(name); - } - return Ok(canonicalized_path); - } - Err(err) if err.kind() == ErrorKind::NotFound => { - names_stack.push(path.file_name().unwrap()); - path = path.parent().unwrap(); - } - Err(err) => return Err(err), - } - } -} - -#[cfg(windows)] -fn strip_unc_prefix(path: PathBuf) -> PathBuf { - use std::path::Component; - use std::path::Prefix; - - let mut components = path.components(); - match components.next() { - Some(Component::Prefix(prefix)) => { - match prefix.kind() { - // \\?\device - Prefix::Verbatim(device) => { - let mut path = PathBuf::new(); - path.push(format!(r"\\{}\", device.to_string_lossy())); - path.extend(components.filter(|c| !matches!(c, Component::RootDir))); - path - } - // \\?\c:\path - Prefix::VerbatimDisk(_) => { - let mut path = PathBuf::new(); - path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", "")); - path.extend(components); - path - } - // \\?\UNC\hostname\share_name\path - Prefix::VerbatimUNC(hostname, share_name) => { - let mut path = PathBuf::new(); - path.push(format!( - r"\\{}\{}\", - hostname.to_string_lossy(), - share_name.to_string_lossy() - )); - path.extend(components.filter(|c| !matches!(c, Component::RootDir))); - path - } - _ => path, - } - } - _ => path, - } -} - -pub fn resolve_from_cwd(path: &Path) -> Result { - let resolved_path = if path.is_absolute() { - path.to_owned() - } else { - let cwd = - current_dir().context("Failed to get current working directory")?; - cwd.join(path) - }; - - Ok(normalize_path(&resolved_path)) -} - -/// Checks if the path has extension Deno supports. -pub fn is_supported_ext(path: &Path) -> bool { - if let Some(ext) = get_extension(path) { - matches!( - ext.as_str(), - "ts" | "tsx" | "js" | "jsx" | "mjs" | "mts" | "cjs" | "cts" - ) - } else { - false - } -} - -/// Checks if the path has a basename and extension Deno supports for tests. -pub fn is_supported_test_path(path: &Path) -> bool { - if let Some(name) = path.file_stem() { - let basename = name.to_string_lossy(); - (basename.ends_with("_test") - || basename.ends_with(".test") - || basename == "test") - && is_supported_ext(path) - } else { - false - } -} - -/// Checks if the path has a basename and extension Deno supports for benches. -pub fn is_supported_bench_path(path: &Path) -> bool { - if let Some(name) = path.file_stem() { - let basename = name.to_string_lossy(); - (basename.ends_with("_bench") - || basename.ends_with(".bench") - || basename == "bench") - && is_supported_ext(path) - } else { - false - } -} - -/// Checks if the path has an extension Deno supports for tests. -pub fn is_supported_test_ext(path: &Path) -> bool { - if let Some(ext) = get_extension(path) { - matches!( - ext.as_str(), - "ts" - | "tsx" - | "js" - | "jsx" - | "mjs" - | "mts" - | "cjs" - | "cts" - | "md" - | "mkd" - | "mkdn" - | "mdwn" - | "mdown" - | "markdown" - ) - } else { - false - } -} - -/// Get the extension of a file in lowercase. -pub fn get_extension(file_path: &Path) -> Option { - return file_path - .extension() - .and_then(|e| e.to_str()) - .map(|e| e.to_lowercase()); -} - -/// Collects file paths that satisfy the given predicate, by recursively walking `files`. -/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory. -pub fn collect_files

( - files: &[PathBuf], - ignore: &[PathBuf], - predicate: P, -) -> Result, AnyError> -where - P: Fn(&Path) -> bool, -{ - let mut target_files = Vec::new(); - - // retain only the paths which exist and ignore the rest - let canonicalized_ignore: Vec = ignore - .iter() - .filter_map(|i| canonicalize_path(i).ok()) - .collect(); - - for file in files { - for entry in WalkDir::new(file) - .into_iter() - .filter_entry(|e| { - canonicalize_path(e.path()).map_or(false, |c| { - !canonicalized_ignore.iter().any(|i| c.starts_with(i)) - }) - }) - .filter_map(|e| match e { - Ok(e) if !e.file_type().is_dir() && predicate(e.path()) => Some(e), - _ => None, - }) - { - target_files.push(canonicalize_path(entry.path())?) - } - } - - Ok(target_files) -} - -/// Collects module specifiers that satisfy the given predicate as a file path, by recursively walking `include`. -/// Specifiers that start with http and https are left intact. -pub fn collect_specifiers

( - include: Vec, - ignore: &[PathBuf], - predicate: P, -) -> Result, AnyError> -where - P: Fn(&Path) -> bool, -{ - let mut prepared = vec![]; - - let root_path = current_dir()?; - for path in include { - let lowercase_path = path.to_lowercase(); - if lowercase_path.starts_with("http://") - || lowercase_path.starts_with("https://") - { - let url = ModuleSpecifier::parse(&path)?; - prepared.push(url); - continue; - } - - let p = if lowercase_path.starts_with("file://") { - specifier_to_file_path(&ModuleSpecifier::parse(&path)?)? - } else { - root_path.join(path) - }; - let p = normalize_path(&p); - if p.is_dir() { - let test_files = collect_files(&[p], ignore, &predicate).unwrap(); - let mut test_files_as_urls = test_files - .iter() - .map(|f| ModuleSpecifier::from_file_path(f).unwrap()) - .collect::>(); - - test_files_as_urls.sort(); - prepared.extend(test_files_as_urls); - } else { - let url = ModuleSpecifier::from_file_path(p).unwrap(); - prepared.push(url); - } - } - - Ok(prepared) -} - -/// Asynchronously removes a directory and all its descendants, but does not error -/// when the directory does not exist. -pub async fn remove_dir_all_if_exists(path: &Path) -> std::io::Result<()> { - let result = tokio::fs::remove_dir_all(path).await; - match result { - Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(()), - _ => result, - } -} - -/// Copies a directory to another directory. -/// -/// Note: Does not handle symlinks. -pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> { - std::fs::create_dir_all(to) - .with_context(|| format!("Creating {}", to.display()))?; - let read_dir = std::fs::read_dir(from) - .with_context(|| format!("Reading {}", from.display()))?; - - for entry in read_dir { - let entry = entry?; - let file_type = entry.file_type()?; - let new_from = from.join(entry.file_name()); - let new_to = to.join(entry.file_name()); - - if file_type.is_dir() { - copy_dir_recursive(&new_from, &new_to).with_context(|| { - format!("Dir {} to {}", new_from.display(), new_to.display()) - })?; - } else if file_type.is_file() { - std::fs::copy(&new_from, &new_to).with_context(|| { - format!("Copying {} to {}", new_from.display(), new_to.display()) - })?; - } - } - - Ok(()) -} - -/// Hardlinks the files in one directory to another directory. -/// -/// Note: Does not handle symlinks. -pub fn hard_link_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> { - std::fs::create_dir_all(to) - .with_context(|| format!("Creating {}", to.display()))?; - let read_dir = std::fs::read_dir(from) - .with_context(|| format!("Reading {}", from.display()))?; - - for entry in read_dir { - let entry = entry?; - let file_type = entry.file_type()?; - let new_from = from.join(entry.file_name()); - let new_to = to.join(entry.file_name()); - - if file_type.is_dir() { - hard_link_dir_recursive(&new_from, &new_to).with_context(|| { - format!("Dir {} to {}", new_from.display(), new_to.display()) - })?; - } else if file_type.is_file() { - // note: chance for race conditions here between attempting to create, - // then removing, then attempting to create. There doesn't seem to be - // a way to hard link with overwriting in Rust, but maybe there is some - // way with platform specific code. The workaround here is to handle - // scenarios where something else might create or remove files. - if let Err(err) = std::fs::hard_link(&new_from, &new_to) { - if err.kind() == ErrorKind::AlreadyExists { - if let Err(err) = std::fs::remove_file(&new_to) { - if err.kind() == ErrorKind::NotFound { - // Assume another process/thread created this hard link to the file we are wanting - // to remove then sleep a little bit to let the other process/thread move ahead - // faster to reduce contention. - std::thread::sleep(Duration::from_millis(10)); - } else { - return Err(err).with_context(|| { - format!( - "Removing file to hard link {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - - // Always attempt to recreate the hardlink. In contention scenarios, the other process - // might have been killed or exited after removing the file, but before creating the hardlink - if let Err(err) = std::fs::hard_link(&new_from, &new_to) { - // Assume another process/thread created this hard link to the file we are wanting - // to now create then sleep a little bit to let the other process/thread move ahead - // faster to reduce contention. - if err.kind() == ErrorKind::AlreadyExists { - std::thread::sleep(Duration::from_millis(10)); - } else { - return Err(err).with_context(|| { - format!( - "Hard linking {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - } else { - return Err(err).with_context(|| { - format!( - "Hard linking {} to {}", - new_from.display(), - new_to.display() - ) - }); - } - } - } - } - - Ok(()) -} - -pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), AnyError> { - let err_mapper = |err: Error| { - Error::new( - err.kind(), - format!( - "{}, symlink '{}' -> '{}'", - err, - oldpath.display(), - newpath.display() - ), - ) - }; - #[cfg(unix)] - { - use std::os::unix::fs::symlink; - symlink(oldpath, newpath).map_err(err_mapper)?; - } - #[cfg(not(unix))] - { - use std::os::windows::fs::symlink_dir; - symlink_dir(oldpath, newpath).map_err(err_mapper)?; - } - Ok(()) -} - -/// Attempts to convert a specifier to a file path. By default, uses the Url -/// crate's `to_file_path()` method, but falls back to try and resolve unix-style -/// paths on Windows. -pub fn specifier_to_file_path( - specifier: &ModuleSpecifier, -) -> Result { - let result = if cfg!(windows) { - match specifier.to_file_path() { - Ok(path) => Ok(path), - Err(()) => { - // This might be a unix-style path which is used in the tests even on Windows. - // Attempt to see if we can convert it to a `PathBuf`. This code should be removed - // once/if https://github.com/servo/rust-url/issues/730 is implemented. - if specifier.scheme() == "file" - && specifier.host().is_none() - && specifier.port().is_none() - && specifier.path_segments().is_some() - { - let path_str = specifier.path(); - match String::from_utf8( - percent_encoding::percent_decode(path_str.as_bytes()).collect(), - ) { - Ok(path_str) => Ok(PathBuf::from(path_str)), - Err(_) => Err(()), - } - } else { - Err(()) - } - } - } - } else { - specifier.to_file_path() - }; - match result { - Ok(path) => Ok(path), - Err(()) => Err(uri_error(format!( - "Invalid file path.\n Specifier: {}", - specifier - ))), - } -} - -/// Ensures a specifier that will definitely be a directory has a trailing slash. -pub fn ensure_directory_specifier( - mut specifier: ModuleSpecifier, -) -> ModuleSpecifier { - let path = specifier.path(); - if !path.ends_with('/') { - let new_path = format!("{}/", path); - specifier.set_path(&new_path); - } - specifier -} - -/// Gets the parent of this module specifier. -pub fn specifier_parent(specifier: &ModuleSpecifier) -> ModuleSpecifier { - let mut specifier = specifier.clone(); - // don't use specifier.segments() because it will strip the leading slash - let mut segments = specifier.path().split('/').collect::>(); - if segments.iter().all(|s| s.is_empty()) { - return specifier; - } - if let Some(last) = segments.last() { - if last.is_empty() { - segments.pop(); - } - segments.pop(); - let new_path = format!("{}/", segments.join("/")); - specifier.set_path(&new_path); - } - specifier -} - -/// `from.make_relative(to)` but with fixes. -pub fn relative_specifier( - from: &ModuleSpecifier, - to: &ModuleSpecifier, -) -> Option { - let is_dir = to.path().ends_with('/'); - - if is_dir && from == to { - return Some("./".to_string()); - } - - // workaround using parent directory until https://github.com/servo/rust-url/pull/754 is merged - let from = if !from.path().ends_with('/') { - if let Some(end_slash) = from.path().rfind('/') { - let mut new_from = from.clone(); - new_from.set_path(&from.path()[..end_slash + 1]); - Cow::Owned(new_from) - } else { - Cow::Borrowed(from) - } - } else { - Cow::Borrowed(from) - }; - - // workaround for url crate not adding a trailing slash for a directory - // it seems to be fixed once a version greater than 2.2.2 is released - let mut text = from.make_relative(to)?; - if is_dir && !text.ends_with('/') && to.query().is_none() { - text.push('/'); - } - - Some(if text.starts_with("../") || text.starts_with("./") { - text - } else { - format!("./{}", text) - }) -} - -/// This function checks if input path has trailing slash or not. If input path -/// has trailing slash it will return true else it will return false. -pub fn path_has_trailing_slash(path: &Path) -> bool { - if let Some(path_str) = path.to_str() { - if cfg!(windows) { - path_str.ends_with('\\') - } else { - path_str.ends_with('/') - } - } else { - false - } -} - -/// Gets a path with the specified file stem suffix. -/// -/// Ex. `file.ts` with suffix `_2` returns `file_2.ts` -pub fn path_with_stem_suffix(path: &Path, suffix: &str) -> PathBuf { - if let Some(file_name) = path.file_name().map(|f| f.to_string_lossy()) { - if let Some(file_stem) = path.file_stem().map(|f| f.to_string_lossy()) { - if let Some(ext) = path.extension().map(|f| f.to_string_lossy()) { - return if file_stem.to_lowercase().ends_with(".d") { - path.with_file_name(format!( - "{}{}.{}.{}", - &file_stem[..file_stem.len() - ".d".len()], - suffix, - // maintain casing - &file_stem[file_stem.len() - "d".len()..], - ext - )) - } else { - path.with_file_name(format!("{}{}.{}", file_stem, suffix, ext)) - }; - } - } - - path.with_file_name(format!("{}{}", file_name, suffix)) - } else { - path.with_file_name(suffix) - } -} - -/// Gets if the provided character is not supported on all -/// kinds of file systems. -pub fn is_banned_path_char(c: char) -> bool { - matches!(c, '<' | '>' | ':' | '"' | '|' | '?' | '*') -} - -/// Gets a safe local directory name for the provided url. -/// -/// For example: -/// https://deno.land:8080/path -> deno.land_8080/path -pub fn root_url_to_safe_local_dirname(root: &ModuleSpecifier) -> PathBuf { - fn sanitize_segment(text: &str) -> String { - text - .chars() - .map(|c| if is_banned_segment_char(c) { '_' } else { c }) - .collect() - } - - fn is_banned_segment_char(c: char) -> bool { - matches!(c, '/' | '\\') || is_banned_path_char(c) - } - - let mut result = String::new(); - if let Some(domain) = root.domain() { - result.push_str(&sanitize_segment(domain)); - } - if let Some(port) = root.port() { - if !result.is_empty() { - result.push('_'); - } - result.push_str(&port.to_string()); - } - let mut result = PathBuf::from(result); - if let Some(segments) = root.path_segments() { - for segment in segments.filter(|s| !s.is_empty()) { - result = result.join(sanitize_segment(segment)); - } - } - - result -} - -/// Gets the total size (in bytes) of a directory. -pub fn dir_size(path: &Path) -> std::io::Result { - let entries = std::fs::read_dir(path)?; - let mut total = 0; - for entry in entries { - let entry = entry?; - total += match entry.metadata()? { - data if data.is_dir() => dir_size(&entry.path())?, - data => data.len(), - }; - } - Ok(total) -} - -#[cfg(test)] -mod tests { - use super::*; - use test_util::TempDir; - - #[test] - fn resolve_from_cwd_child() { - let cwd = current_dir().unwrap(); - assert_eq!(resolve_from_cwd(Path::new("a")).unwrap(), cwd.join("a")); - } - - #[test] - fn resolve_from_cwd_dot() { - let cwd = current_dir().unwrap(); - assert_eq!(resolve_from_cwd(Path::new(".")).unwrap(), cwd); - } - - #[test] - fn resolve_from_cwd_parent() { - let cwd = current_dir().unwrap(); - assert_eq!(resolve_from_cwd(Path::new("a/..")).unwrap(), cwd); - } - - #[test] - fn test_normalize_path() { - assert_eq!(normalize_path(Path::new("a/../b")), PathBuf::from("b")); - assert_eq!(normalize_path(Path::new("a/./b/")), PathBuf::from("a/b/")); - assert_eq!( - normalize_path(Path::new("a/./b/../c")), - PathBuf::from("a/c") - ); - - if cfg!(windows) { - assert_eq!( - normalize_path(Path::new("C:\\a\\.\\b\\..\\c")), - PathBuf::from("C:\\a\\c") - ); - } - } - - // TODO: Get a good expected value here for Windows. - #[cfg(not(windows))] - #[test] - fn resolve_from_cwd_absolute() { - let expected = Path::new("/a"); - assert_eq!(resolve_from_cwd(expected).unwrap(), expected); - } - - #[test] - fn test_is_supported_ext() { - assert!(!is_supported_ext(Path::new("tests/subdir/redirects"))); - assert!(!is_supported_ext(Path::new("README.md"))); - assert!(is_supported_ext(Path::new("lib/typescript.d.ts"))); - assert!(is_supported_ext(Path::new("testdata/run/001_hello.js"))); - assert!(is_supported_ext(Path::new("testdata/run/002_hello.ts"))); - assert!(is_supported_ext(Path::new("foo.jsx"))); - assert!(is_supported_ext(Path::new("foo.tsx"))); - assert!(is_supported_ext(Path::new("foo.TS"))); - assert!(is_supported_ext(Path::new("foo.TSX"))); - assert!(is_supported_ext(Path::new("foo.JS"))); - assert!(is_supported_ext(Path::new("foo.JSX"))); - assert!(is_supported_ext(Path::new("foo.mjs"))); - assert!(is_supported_ext(Path::new("foo.mts"))); - assert!(is_supported_ext(Path::new("foo.cjs"))); - assert!(is_supported_ext(Path::new("foo.cts"))); - assert!(!is_supported_ext(Path::new("foo.mjsx"))); - } - - #[test] - fn test_is_supported_test_ext() { - assert!(!is_supported_test_ext(Path::new("tests/subdir/redirects"))); - assert!(is_supported_test_ext(Path::new("README.md"))); - assert!(is_supported_test_ext(Path::new("readme.MD"))); - assert!(is_supported_test_ext(Path::new("lib/typescript.d.ts"))); - assert!(is_supported_test_ext(Path::new( - "testdata/run/001_hello.js" - ))); - assert!(is_supported_test_ext(Path::new( - "testdata/run/002_hello.ts" - ))); - assert!(is_supported_test_ext(Path::new("foo.jsx"))); - assert!(is_supported_test_ext(Path::new("foo.tsx"))); - assert!(is_supported_test_ext(Path::new("foo.TS"))); - assert!(is_supported_test_ext(Path::new("foo.TSX"))); - assert!(is_supported_test_ext(Path::new("foo.JS"))); - assert!(is_supported_test_ext(Path::new("foo.JSX"))); - assert!(is_supported_test_ext(Path::new("foo.mjs"))); - assert!(is_supported_test_ext(Path::new("foo.mts"))); - assert!(is_supported_test_ext(Path::new("foo.cjs"))); - assert!(is_supported_test_ext(Path::new("foo.cts"))); - assert!(!is_supported_test_ext(Path::new("foo.mjsx"))); - assert!(!is_supported_test_ext(Path::new("foo.jsonc"))); - assert!(!is_supported_test_ext(Path::new("foo.JSONC"))); - assert!(!is_supported_test_ext(Path::new("foo.json"))); - assert!(!is_supported_test_ext(Path::new("foo.JsON"))); - } - - #[test] - fn test_is_supported_test_path() { - assert!(is_supported_test_path(Path::new( - "tests/subdir/foo_test.ts" - ))); - assert!(is_supported_test_path(Path::new( - "tests/subdir/foo_test.tsx" - ))); - assert!(is_supported_test_path(Path::new( - "tests/subdir/foo_test.js" - ))); - assert!(is_supported_test_path(Path::new( - "tests/subdir/foo_test.jsx" - ))); - assert!(is_supported_test_path(Path::new("bar/foo.test.ts"))); - assert!(is_supported_test_path(Path::new("bar/foo.test.tsx"))); - assert!(is_supported_test_path(Path::new("bar/foo.test.js"))); - assert!(is_supported_test_path(Path::new("bar/foo.test.jsx"))); - assert!(is_supported_test_path(Path::new("foo/bar/test.js"))); - assert!(is_supported_test_path(Path::new("foo/bar/test.jsx"))); - assert!(is_supported_test_path(Path::new("foo/bar/test.ts"))); - assert!(is_supported_test_path(Path::new("foo/bar/test.tsx"))); - assert!(!is_supported_test_path(Path::new("README.md"))); - assert!(!is_supported_test_path(Path::new("lib/typescript.d.ts"))); - assert!(!is_supported_test_path(Path::new("notatest.js"))); - assert!(!is_supported_test_path(Path::new("NotAtest.ts"))); - } - - #[test] - fn test_collect_files() { - fn create_files(dir_path: &Path, files: &[&str]) { - std::fs::create_dir(dir_path).expect("Failed to create directory"); - for f in files { - let path = dir_path.join(f); - std::fs::write(path, "").expect("Failed to create file"); - } - } - - // dir.ts - // ├── a.ts - // ├── b.js - // ├── child - // │ ├── e.mjs - // │ ├── f.mjsx - // │ ├── .foo.TS - // │ └── README.md - // ├── c.tsx - // ├── d.jsx - // └── ignore - // ├── g.d.ts - // └── .gitignore - - let t = TempDir::new(); - - let root_dir_path = t.path().join("dir.ts"); - let root_dir_files = ["a.ts", "b.js", "c.tsx", "d.jsx"]; - create_files(&root_dir_path, &root_dir_files); - - let child_dir_path = root_dir_path.join("child"); - let child_dir_files = ["e.mjs", "f.mjsx", ".foo.TS", "README.md"]; - create_files(&child_dir_path, &child_dir_files); - - let ignore_dir_path = root_dir_path.join("ignore"); - let ignore_dir_files = ["g.d.ts", ".gitignore"]; - create_files(&ignore_dir_path, &ignore_dir_files); - - let result = collect_files(&[root_dir_path], &[ignore_dir_path], |path| { - // exclude dotfiles - path - .file_name() - .and_then(|f| f.to_str()) - .map_or(false, |f| !f.starts_with('.')) - }) - .unwrap(); - let expected = [ - "a.ts", - "b.js", - "e.mjs", - "f.mjsx", - "README.md", - "c.tsx", - "d.jsx", - ]; - for e in expected.iter() { - assert!(result.iter().any(|r| r.ends_with(e))); - } - assert_eq!(result.len(), expected.len()); - } - - #[test] - fn test_collect_specifiers() { - fn create_files(dir_path: &Path, files: &[&str]) { - std::fs::create_dir(dir_path).expect("Failed to create directory"); - for f in files { - let path = dir_path.join(f); - std::fs::write(path, "").expect("Failed to create file"); - } - } - - // dir.ts - // ├── a.ts - // ├── b.js - // ├── child - // │ ├── e.mjs - // │ ├── f.mjsx - // │ ├── .foo.TS - // │ └── README.md - // ├── c.tsx - // ├── d.jsx - // └── ignore - // ├── g.d.ts - // └── .gitignore - - let t = TempDir::new(); - - let root_dir_path = t.path().join("dir.ts"); - let root_dir_files = ["a.ts", "b.js", "c.tsx", "d.jsx"]; - create_files(&root_dir_path, &root_dir_files); - - let child_dir_path = root_dir_path.join("child"); - let child_dir_files = ["e.mjs", "f.mjsx", ".foo.TS", "README.md"]; - create_files(&child_dir_path, &child_dir_files); - - let ignore_dir_path = root_dir_path.join("ignore"); - let ignore_dir_files = ["g.d.ts", ".gitignore"]; - create_files(&ignore_dir_path, &ignore_dir_files); - - let predicate = |path: &Path| { - // exclude dotfiles - path - .file_name() - .and_then(|f| f.to_str()) - .map_or(false, |f| !f.starts_with('.')) - }; - - let result = collect_specifiers( - vec![ - "http://localhost:8080".to_string(), - root_dir_path.to_str().unwrap().to_string(), - "https://localhost:8080".to_string(), - ], - &[ignore_dir_path], - predicate, - ) - .unwrap(); - - let root_dir_url = ModuleSpecifier::from_file_path( - canonicalize_path(&root_dir_path).unwrap(), - ) - .unwrap() - .to_string(); - let expected: Vec = [ - "http://localhost:8080", - &format!("{}/a.ts", root_dir_url), - &format!("{}/b.js", root_dir_url), - &format!("{}/c.tsx", root_dir_url), - &format!("{}/child/README.md", root_dir_url), - &format!("{}/child/e.mjs", root_dir_url), - &format!("{}/child/f.mjsx", root_dir_url), - &format!("{}/d.jsx", root_dir_url), - "https://localhost:8080", - ] - .iter() - .map(|f| ModuleSpecifier::parse(f).unwrap()) - .collect::>(); - - assert_eq!(result, expected); - - let scheme = if cfg!(target_os = "windows") { - "file:///" - } else { - "file://" - }; - let result = collect_specifiers( - vec![format!( - "{}{}", - scheme, - root_dir_path - .join("child") - .to_str() - .unwrap() - .replace('\\', "/") - )], - &[], - predicate, - ) - .unwrap(); - - let expected: Vec = [ - &format!("{}/child/README.md", root_dir_url), - &format!("{}/child/e.mjs", root_dir_url), - &format!("{}/child/f.mjsx", root_dir_url), - ] - .iter() - .map(|f| ModuleSpecifier::parse(f).unwrap()) - .collect::>(); - - assert_eq!(result, expected); - } - - #[cfg(windows)] - #[test] - fn test_strip_unc_prefix() { - run_test(r"C:\", r"C:\"); - run_test(r"C:\test\file.txt", r"C:\test\file.txt"); - - run_test(r"\\?\C:\", r"C:\"); - run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt"); - - run_test(r"\\.\C:\", r"\\.\C:\"); - run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt"); - - run_test(r"\\?\UNC\localhost\", r"\\localhost"); - run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$"); - run_test( - r"\\?\UNC\localhost\c$\Windows\file.txt", - r"\\localhost\c$\Windows\file.txt", - ); - run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json"); - - run_test(r"\\?\server1", r"\\server1"); - run_test(r"\\?\server1\e$\", r"\\server1\e$\"); - run_test( - r"\\?\server1\e$\test\file.txt", - r"\\server1\e$\test\file.txt", - ); - - fn run_test(input: &str, expected: &str) { - assert_eq!( - strip_unc_prefix(PathBuf::from(input)), - PathBuf::from(expected) - ); - } - } - - #[test] - fn test_specifier_to_file_path() { - run_success_test("file:///", "/"); - run_success_test("file:///test", "/test"); - run_success_test("file:///dir/test/test.txt", "/dir/test/test.txt"); - run_success_test( - "file:///dir/test%20test/test.txt", - "/dir/test test/test.txt", - ); - - fn run_success_test(specifier: &str, expected_path: &str) { - let result = - specifier_to_file_path(&ModuleSpecifier::parse(specifier).unwrap()) - .unwrap(); - assert_eq!(result, PathBuf::from(expected_path)); - } - } - - #[test] - fn test_ensure_directory_specifier() { - run_test("file:///", "file:///"); - run_test("file:///test", "file:///test/"); - run_test("file:///test/", "file:///test/"); - run_test("file:///test/other", "file:///test/other/"); - run_test("file:///test/other/", "file:///test/other/"); - - fn run_test(specifier: &str, expected: &str) { - let result = - ensure_directory_specifier(ModuleSpecifier::parse(specifier).unwrap()); - assert_eq!(result.to_string(), expected); - } - } - - #[test] - fn test_specifier_parent() { - run_test("file:///", "file:///"); - run_test("file:///test", "file:///"); - run_test("file:///test/", "file:///"); - run_test("file:///test/other", "file:///test/"); - run_test("file:///test/other.txt", "file:///test/"); - run_test("file:///test/other/", "file:///test/"); - - fn run_test(specifier: &str, expected: &str) { - let result = - specifier_parent(&ModuleSpecifier::parse(specifier).unwrap()); - assert_eq!(result.to_string(), expected); - } - } - - #[test] - fn test_relative_specifier() { - let fixtures: Vec<(&str, &str, Option<&str>)> = vec![ - ("file:///from", "file:///to", Some("./to")), - ("file:///from", "file:///from/other", Some("./from/other")), - ("file:///from", "file:///from/other/", Some("./from/other/")), - ("file:///from", "file:///other/from", Some("./other/from")), - ("file:///from/", "file:///other/from", Some("../other/from")), - ("file:///from", "file:///other/from/", Some("./other/from/")), - ( - "file:///from", - "file:///to/other.txt", - Some("./to/other.txt"), - ), - ( - "file:///from/test", - "file:///to/other.txt", - Some("../to/other.txt"), - ), - ( - "file:///from/other.txt", - "file:///to/other.txt", - Some("../to/other.txt"), - ), - ( - "https://deno.land/x/a/b/d.ts", - "https://deno.land/x/a/b/c.ts", - Some("./c.ts"), - ), - ( - "https://deno.land/x/a/b/d.ts", - "https://deno.land/x/a/c.ts", - Some("../c.ts"), - ), - ( - "https://deno.land/x/a/b/d.ts", - "https://deno.land/x/a/b/c/d.ts", - Some("./c/d.ts"), - ), - ( - "https://deno.land/x/a/b/c/", - "https://deno.land/x/a/b/c/d.ts", - Some("./d.ts"), - ), - ( - "https://deno.land/x/a/b/c/", - "https://deno.land/x/a/b/c/d/e.ts", - Some("./d/e.ts"), - ), - ( - "https://deno.land/x/a/b/c/f.ts", - "https://deno.land/x/a/b/c/d/e.ts", - Some("./d/e.ts"), - ), - ( - "https://deno.land/x/a/b/d.ts", - "https://deno.land/x/a/c.ts?foo=bar", - Some("../c.ts?foo=bar"), - ), - ( - "https://deno.land/x/a/b/d.ts?foo=bar", - "https://deno.land/x/a/b/c.ts", - Some("./c.ts"), - ), - ("file:///a/b/d.ts", "file:///a/b/c.ts", Some("./c.ts")), - ("https://deno.land/x/a/b/c.ts", "file:///a/b/c.ts", None), - ( - "https://deno.land/", - "https://deno.land/x/a/b/c.ts", - Some("./x/a/b/c.ts"), - ), - ( - "https://deno.land/x/d/e/f.ts", - "https://deno.land/x/a/b/c.ts", - Some("../../a/b/c.ts"), - ), - ]; - for (from_str, to_str, expected) in fixtures { - let from = ModuleSpecifier::parse(from_str).unwrap(); - let to = ModuleSpecifier::parse(to_str).unwrap(); - let actual = relative_specifier(&from, &to); - assert_eq!( - actual.as_deref(), - expected, - "from: \"{}\" to: \"{}\"", - from_str, - to_str - ); - } - } - - #[test] - fn test_path_has_trailing_slash() { - #[cfg(not(windows))] - { - run_test("/Users/johndoe/Desktop/deno-project/target/", true); - run_test(r"/Users/johndoe/deno-project/target//", true); - run_test("/Users/johndoe/Desktop/deno-project", false); - run_test(r"/Users/johndoe/deno-project\", false); - } - - #[cfg(windows)] - { - run_test(r"C:\test\deno-project\", true); - run_test(r"C:\test\deno-project\\", true); - run_test(r"C:\test\file.txt", false); - run_test(r"C:\test\file.txt/", false); - } - - fn run_test(path_str: &str, expected: bool) { - let path = Path::new(path_str); - let result = path_has_trailing_slash(path); - assert_eq!(result, expected); - } - } - - #[test] - fn test_path_with_stem_suffix() { - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/"), "_2"), - PathBuf::from("/_2") - ); - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/test"), "_2"), - PathBuf::from("/test_2") - ); - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/test.txt"), "_2"), - PathBuf::from("/test_2.txt") - ); - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/test/subdir"), "_2"), - PathBuf::from("/test/subdir_2") - ); - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/test/subdir.other.txt"), "_2"), - PathBuf::from("/test/subdir.other_2.txt") - ); - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/test.d.ts"), "_2"), - PathBuf::from("/test_2.d.ts") - ); - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/test.D.TS"), "_2"), - PathBuf::from("/test_2.D.TS") - ); - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/test.d.mts"), "_2"), - PathBuf::from("/test_2.d.mts") - ); - assert_eq!( - path_with_stem_suffix(&PathBuf::from("/test.d.cts"), "_2"), - PathBuf::from("/test_2.d.cts") - ); - } -} diff --git a/cli/http_cache.rs b/cli/http_cache.rs deleted file mode 100644 index 6cda9f279..000000000 --- a/cli/http_cache.rs +++ /dev/null @@ -1,284 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. -//! This module is meant to eventually implement HTTP cache -//! as defined in RFC 7234 (). -//! Currently it's a very simplified version to fulfill Deno needs -//! at hand. -use crate::fs_util; -use crate::http_util::HeadersMap; -use deno_core::error::generic_error; -use deno_core::error::AnyError; -use deno_core::serde::Deserialize; -use deno_core::serde::Serialize; -use deno_core::serde_json; -use deno_core::url::Url; -use log::error; -use std::fs; -use std::fs::File; -use std::io; -use std::path::Path; -use std::path::PathBuf; -use std::time::SystemTime; - -pub const CACHE_PERM: u32 = 0o644; - -/// Turn base of url (scheme, hostname, port) into a valid filename. -/// This method replaces port part with a special string token (because -/// ":" cannot be used in filename on some platforms). -/// Ex: $DENO_DIR/deps/https/deno.land/ -fn base_url_to_filename(url: &Url) -> Option { - let mut out = PathBuf::new(); - - let scheme = url.scheme(); - out.push(scheme); - - match scheme { - "http" | "https" => { - let host = url.host_str().unwrap(); - let host_port = match url.port() { - Some(port) => format!("{}_PORT{}", host, port), - None => host.to_string(), - }; - out.push(host_port); - } - "data" | "blob" => (), - scheme => { - error!("Don't know how to create cache name for scheme: {}", scheme); - return None; - } - }; - - Some(out) -} - -/// Turn provided `url` into a hashed filename. -/// URLs can contain a lot of characters that cannot be used -/// in filenames (like "?", "#", ":"), so in order to cache -/// them properly they are deterministically hashed into ASCII -/// strings. -/// -/// NOTE: this method is `pub` because it's used in integration_tests -pub fn url_to_filename(url: &Url) -> Option { - let mut cache_filename = base_url_to_filename(url)?; - - let mut rest_str = url.path().to_string(); - if let Some(query) = url.query() { - rest_str.push('?'); - rest_str.push_str(query); - } - // NOTE: fragment is omitted on purpose - it's not taken into - // account when caching - it denotes parts of webpage, which - // in case of static resources doesn't make much sense - let hashed_filename = crate::checksum::gen(&[rest_str.as_bytes()]); - cache_filename.push(hashed_filename); - Some(cache_filename) -} - -#[derive(Serialize, Deserialize)] -pub struct Metadata { - pub headers: HeadersMap, - pub url: String, - #[serde(default = "SystemTime::now")] - pub now: SystemTime, -} - -impl Metadata { - pub fn write(&self, cache_filename: &Path) -> Result<(), AnyError> { - let metadata_filename = Self::filename(cache_filename); - let json = serde_json::to_string_pretty(self)?; - fs_util::atomic_write_file(&metadata_filename, json, CACHE_PERM)?; - Ok(()) - } - - pub fn read(cache_filename: &Path) -> Result { - let metadata_filename = Metadata::filename(cache_filename); - let metadata = fs::read_to_string(metadata_filename)?; - let metadata: Metadata = serde_json::from_str(&metadata)?; - Ok(metadata) - } - - /// Ex: $DENO_DIR/deps/https/deno.land/c885b7dcf1d6936e33a9cc3a2d74ec79bab5d733d3701c85a029b7f7ec9fbed4.metadata.json - pub fn filename(cache_filename: &Path) -> PathBuf { - cache_filename.with_extension("metadata.json") - } -} - -#[derive(Debug, Clone, Default)] -pub struct HttpCache { - pub location: PathBuf, -} - -impl HttpCache { - /// Returns a new instance. - /// - /// `location` must be an absolute path. - pub fn new(location: &Path) -> Self { - assert!(location.is_absolute()); - Self { - location: location.to_owned(), - } - } - - /// Ensures the location of the cache. - fn ensure_dir_exists(&self, path: &Path) -> io::Result<()> { - if path.is_dir() { - return Ok(()); - } - fs::create_dir_all(path).map_err(|e| { - io::Error::new( - e.kind(), - format!( - "Could not create remote modules cache location: {:?}\nCheck the permission of the directory.", - path - ), - ) - }) - } - - pub fn get_cache_filename(&self, url: &Url) -> Option { - Some(self.location.join(url_to_filename(url)?)) - } - - // TODO(bartlomieju): this method should check headers file - // and validate against ETAG/Last-modified-as headers. - // ETAG check is currently done in `cli/file_fetcher.rs`. - pub fn get( - &self, - url: &Url, - ) -> Result<(File, HeadersMap, SystemTime), AnyError> { - let cache_filename = self.location.join( - url_to_filename(url) - .ok_or_else(|| generic_error("Can't convert url to filename."))?, - ); - let metadata_filename = Metadata::filename(&cache_filename); - let file = File::open(cache_filename)?; - let metadata = fs::read_to_string(metadata_filename)?; - let metadata: Metadata = serde_json::from_str(&metadata)?; - Ok((file, metadata.headers, metadata.now)) - } - - pub fn set( - &self, - url: &Url, - headers_map: HeadersMap, - content: &[u8], - ) -> Result<(), AnyError> { - let cache_filename = self.location.join( - url_to_filename(url) - .ok_or_else(|| generic_error("Can't convert url to filename."))?, - ); - // Create parent directory - let parent_filename = cache_filename - .parent() - .expect("Cache filename should have a parent dir"); - self.ensure_dir_exists(parent_filename)?; - // Cache content - fs_util::atomic_write_file(&cache_filename, content, CACHE_PERM)?; - - let metadata = Metadata { - now: SystemTime::now(), - url: url.to_string(), - headers: headers_map, - }; - metadata.write(&cache_filename) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::collections::HashMap; - use std::io::Read; - use test_util::TempDir; - - #[test] - fn test_create_cache() { - let dir = TempDir::new(); - let mut cache_path = dir.path().to_owned(); - cache_path.push("foobar"); - // HttpCache should be created lazily on first use: - // when zipping up a local project with no external dependencies - // "$DENO_DIR/deps" is empty. When unzipping such project - // "$DENO_DIR/deps" might not get restored and in situation - // when directory is owned by root we might not be able - // to create that directory. However if it's not needed it - // doesn't make sense to return error in such specific scenarios. - // For more details check issue: - // https://github.com/denoland/deno/issues/5688 - let cache = HttpCache::new(&cache_path); - assert!(!cache.location.exists()); - cache - .set( - &Url::parse("http://example.com/foo/bar.js").unwrap(), - HeadersMap::new(), - b"hello world", - ) - .expect("Failed to add to cache"); - assert!(cache.ensure_dir_exists(&cache.location).is_ok()); - assert!(cache_path.is_dir()); - } - - #[test] - fn test_get_set() { - let dir = TempDir::new(); - let cache = HttpCache::new(dir.path()); - let url = Url::parse("https://deno.land/x/welcome.ts").unwrap(); - let mut headers = HashMap::new(); - headers.insert( - "content-type".to_string(), - "application/javascript".to_string(), - ); - headers.insert("etag".to_string(), "as5625rqdsfb".to_string()); - let content = b"Hello world"; - let r = cache.set(&url, headers, content); - eprintln!("result {:?}", r); - assert!(r.is_ok()); - let r = cache.get(&url); - assert!(r.is_ok()); - let (mut file, headers, _) = r.unwrap(); - let mut content = String::new(); - file.read_to_string(&mut content).unwrap(); - assert_eq!(content, "Hello world"); - assert_eq!( - headers.get("content-type").unwrap(), - "application/javascript" - ); - assert_eq!(headers.get("etag").unwrap(), "as5625rqdsfb"); - assert_eq!(headers.get("foobar"), None); - } - - #[test] - fn test_url_to_filename() { - let test_cases = [ - ("https://deno.land/x/foo.ts", "https/deno.land/2c0a064891b9e3fbe386f5d4a833bce5076543f5404613656042107213a7bbc8"), - ( - "https://deno.land:8080/x/foo.ts", - "https/deno.land_PORT8080/2c0a064891b9e3fbe386f5d4a833bce5076543f5404613656042107213a7bbc8", - ), - ("https://deno.land/", "https/deno.land/8a5edab282632443219e051e4ade2d1d5bbc671c781051bf1437897cbdfea0f1"), - ( - "https://deno.land/?asdf=qwer", - "https/deno.land/e4edd1f433165141015db6a823094e6bd8f24dd16fe33f2abd99d34a0a21a3c0", - ), - // should be the same as case above, fragment (#qwer) is ignored - // when hashing - ( - "https://deno.land/?asdf=qwer#qwer", - "https/deno.land/e4edd1f433165141015db6a823094e6bd8f24dd16fe33f2abd99d34a0a21a3c0", - ), - ( - "data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=", - "data/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37", - ), - ( - "data:text/plain,Hello%2C%20Deno!", - "data/967374e3561d6741234131e342bf5c6848b70b13758adfe23ee1a813a8131818", - ) - ]; - - for (url, expected) in test_cases.iter() { - let u = Url::parse(url).unwrap(); - let p = url_to_filename(&u).unwrap(); - assert_eq!(p, PathBuf::from(expected)); - } - } -} diff --git a/cli/logger.rs b/cli/logger.rs deleted file mode 100644 index caa027c04..000000000 --- a/cli/logger.rs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use std::io::Write; - -struct CliLogger(env_logger::Logger); - -impl CliLogger { - pub fn new(logger: env_logger::Logger) -> Self { - Self(logger) - } - - pub fn filter(&self) -> log::LevelFilter { - self.0.filter() - } -} - -impl log::Log for CliLogger { - fn enabled(&self, metadata: &log::Metadata) -> bool { - self.0.enabled(metadata) - } - - fn log(&self, record: &log::Record) { - if self.enabled(record.metadata()) { - self.0.log(record); - } - } - - fn flush(&self) { - self.0.flush(); - } -} - -pub fn init(maybe_level: Option) { - let log_level = maybe_level.unwrap_or(log::Level::Info); - let logger = env_logger::Builder::from_env( - env_logger::Env::default() - .default_filter_or(log_level.to_level_filter().to_string()), - ) - // https://github.com/denoland/deno/issues/6641 - .filter_module("rustyline", log::LevelFilter::Off) - // wgpu crates (gfx_backend), have a lot of useless INFO and WARN logs - .filter_module("wgpu", log::LevelFilter::Error) - .filter_module("gfx", log::LevelFilter::Error) - // used to make available the lsp_debug which is then filtered out at runtime - // in the cli logger - .filter_module("deno::lsp::performance", log::LevelFilter::Debug) - .format(|buf, record| { - let mut target = record.target().to_string(); - if let Some(line_no) = record.line() { - target.push(':'); - target.push_str(&line_no.to_string()); - } - if record.level() <= log::Level::Info - || (record.target() == "deno::lsp::performance" - && record.level() == log::Level::Debug) - { - // Print ERROR, WARN, INFO and lsp_debug logs as they are - writeln!(buf, "{}", record.args()) - } else { - // Add prefix to DEBUG or TRACE logs - writeln!( - buf, - "{} RS - {} - {}", - record.level(), - target, - record.args() - ) - } - }) - .build(); - - let cli_logger = CliLogger::new(logger); - let max_level = cli_logger.filter(); - let r = log::set_boxed_logger(Box::new(cli_logger)); - if r.is_ok() { - log::set_max_level(max_level); - } - r.expect("Could not install logger."); -} diff --git a/cli/lsp/cache.rs b/cli/lsp/cache.rs index c4512a803..7f7f69871 100644 --- a/cli/lsp/cache.rs +++ b/cli/lsp/cache.rs @@ -1,6 +1,7 @@ // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. -use crate::http_cache; +use crate::cache::CachedUrlMetadata; +use crate::cache::HttpCache; use deno_core::parking_lot::Mutex; use deno_core::ModuleSpecifier; @@ -49,14 +50,14 @@ struct Metadata { #[derive(Debug, Default, Clone)] pub struct CacheMetadata { - cache: http_cache::HttpCache, + cache: HttpCache, metadata: Arc>>, } impl CacheMetadata { pub fn new(location: &Path) -> Self { Self { - cache: http_cache::HttpCache::new(location), + cache: HttpCache::new(location), metadata: Default::default(), } } @@ -87,8 +88,7 @@ impl CacheMetadata { return None; } let cache_filename = self.cache.get_cache_filename(specifier)?; - let specifier_metadata = - http_cache::Metadata::read(&cache_filename).ok()?; + let specifier_metadata = CachedUrlMetadata::read(&cache_filename).ok()?; let values = Arc::new(parse_metadata(&specifier_metadata.headers)); let version = calculate_fs_version(&cache_filename); let mut metadata_map = self.metadata.lock(); @@ -98,7 +98,7 @@ impl CacheMetadata { } pub fn set_location(&mut self, location: &Path) { - self.cache = http_cache::HttpCache::new(location); + self.cache = HttpCache::new(location); self.metadata.lock().clear(); } } diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index 67978fbc9..5e0fad0f4 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -7,9 +7,9 @@ use super::lsp_custom; use super::registries::ModuleRegistry; use super::tsc; -use crate::fs_util::is_supported_ext; -use crate::fs_util::relative_specifier; -use crate::fs_util::specifier_to_file_path; +use crate::util::path::is_supported_ext; +use crate::util::path::relative_specifier; +use crate::util::path::specifier_to_file_path; use deno_ast::LineAndColumnIndex; use deno_ast::SourceTextInfo; @@ -505,7 +505,7 @@ fn get_workspace_completions( #[cfg(test)] mod tests { use super::*; - use crate::http_cache::HttpCache; + use crate::cache::HttpCache; use crate::lsp::documents::Documents; use crate::lsp::documents::LanguageId; use deno_core::resolve_url; diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 04a48435f..8d5da46b7 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -2,7 +2,8 @@ use super::client::Client; use super::logging::lsp_log; -use crate::fs_util; +use crate::util::path::ensure_directory_specifier; +use crate::util::path::specifier_to_file_path; use deno_core::error::AnyError; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; @@ -549,11 +550,11 @@ impl Config { workspace: &ModuleSpecifier, enabled_paths: Vec, ) -> bool { - let workspace = fs_util::ensure_directory_specifier(workspace.clone()); + let workspace = ensure_directory_specifier(workspace.clone()); let key = workspace.to_string(); let mut touched = false; if !enabled_paths.is_empty() { - if let Ok(workspace_path) = fs_util::specifier_to_file_path(&workspace) { + if let Ok(workspace_path) = specifier_to_file_path(&workspace) { let mut paths = Vec::new(); for path in &enabled_paths { let fs_path = workspace_path.join(path); diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 5c1e60e73..ce8fead0d 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -6,12 +6,11 @@ use super::tsc; use super::tsc::AssetDocument; use crate::args::ConfigFile; +use crate::cache::CachedUrlMetadata; +use crate::cache::HttpCache; use crate::file_fetcher::get_source_from_bytes; use crate::file_fetcher::map_content_type; use crate::file_fetcher::SUPPORTED_SCHEMES; -use crate::fs_util::specifier_to_file_path; -use crate::http_cache; -use crate::http_cache::HttpCache; use crate::node; use crate::node::node_resolve_npm_reference; use crate::node::NodeResolution; @@ -20,7 +19,8 @@ use crate::npm::NpmPackageReference; use crate::npm::NpmPackageReq; use crate::npm::NpmPackageResolver; use crate::resolver::CliResolver; -use crate::text_encoding; +use crate::util::path::specifier_to_file_path; +use crate::util::text_encoding; use deno_ast::MediaType; use deno_ast::ParsedSource; @@ -610,7 +610,7 @@ impl SpecifierResolver { ) -> Option { let cache_filename = self.cache.get_cache_filename(specifier)?; if redirect_limit > 0 && cache_filename.is_file() { - let headers = http_cache::Metadata::read(&cache_filename) + let headers = CachedUrlMetadata::read(&cache_filename) .ok() .map(|m| m.headers)?; if let Some(location) = headers.get("location") { @@ -657,8 +657,7 @@ impl FileSystemDocuments { ) } else { let cache_filename = cache.get_cache_filename(specifier)?; - let specifier_metadata = - http_cache::Metadata::read(&cache_filename).ok()?; + let specifier_metadata = CachedUrlMetadata::read(&cache_filename).ok()?; let maybe_content_type = specifier_metadata.headers.get("content-type").cloned(); let maybe_headers = Some(&specifier_metadata.headers); diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 081bbf429..11897af9d 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -57,6 +57,8 @@ use super::tsc::Assets; use super::tsc::AssetsSnapshot; use super::tsc::TsServer; use super::urls; +use crate::args::get_root_cert_store; +use crate::args::CacheSetting; use crate::args::CliOptions; use crate::args::ConfigFile; use crate::args::Flags; @@ -64,10 +66,7 @@ use crate::args::FmtConfig; use crate::args::LintConfig; use crate::args::TsConfig; use crate::cache::DenoDir; -use crate::file_fetcher::get_root_cert_store; use crate::file_fetcher::get_source_from_data_url; -use crate::file_fetcher::CacheSetting; -use crate::fs_util; use crate::graph_util::graph_valid; use crate::http_util::HttpClient; use crate::npm::NpmCache; @@ -75,9 +74,12 @@ use crate::npm::NpmPackageResolver; use crate::npm::RealNpmRegistryApi; use crate::proc_state::import_map_from_text; use crate::proc_state::ProcState; -use crate::progress_bar::ProgressBar; use crate::tools::fmt::format_file; use crate::tools::fmt::format_parsed_source; +use crate::util::fs::remove_dir_all_if_exists; +use crate::util::path::ensure_directory_specifier; +use crate::util::path::specifier_to_file_path; +use crate::util::progress_bar::ProgressBar; #[derive(Debug, Clone)] pub struct LanguageServer(Arc>); @@ -407,7 +409,7 @@ impl Inner { // file open and not a workspace. In those situations we can't // automatically discover the configuration if let Some(root_uri) = &self.config.root_uri { - let root_path = fs_util::specifier_to_file_path(root_uri)?; + let root_path = specifier_to_file_path(root_uri)?; let mut checked = std::collections::HashSet::new(); let maybe_config = ConfigFile::discover_from(&root_path, &mut checked)?; Ok(maybe_config.map(|c| { @@ -481,7 +483,7 @@ impl Inner { let cache_url = if let Ok(url) = Url::from_file_path(cache_str) { Ok(url) } else if let Some(root_uri) = &self.config.root_uri { - let root_path = fs_util::specifier_to_file_path(root_uri)?; + let root_path = specifier_to_file_path(root_uri)?; let cache_path = root_path.join(cache_str); Url::from_file_path(cache_path).map_err(|_| { anyhow!("Bad file path for import path: {:?}", cache_str) @@ -492,7 +494,7 @@ impl Inner { cache_str )) }?; - let cache_path = fs_util::specifier_to_file_path(&cache_url)?; + let cache_path = specifier_to_file_path(&cache_url)?; lsp_log!( " Resolved cache path: \"{}\"", cache_path.to_string_lossy() @@ -521,7 +523,7 @@ impl Inner { .config .root_uri .as_ref() - .and_then(|uri| fs_util::specifier_to_file_path(uri).ok()); + .and_then(|uri| specifier_to_file_path(uri).ok()); let root_cert_store = Some(get_root_cert_store( maybe_root_path, workspace_settings.certificate_stores.clone(), @@ -569,7 +571,7 @@ impl Inner { anyhow!("Bad data url for import map: {}", import_map_str) })?) } else if let Some(root_uri) = &self.config.root_uri { - let root_path = fs_util::specifier_to_file_path(root_uri)?; + let root_path = specifier_to_file_path(root_uri)?; let import_map_path = root_path.join(&import_map_str); Some(Url::from_file_path(import_map_path).map_err(|_| { anyhow!("Bad file path for import map: {}", import_map_str) @@ -612,7 +614,7 @@ impl Inner { let import_map_json = if import_map_url.scheme() == "data" { get_source_from_data_url(&import_map_url)?.0 } else { - let import_map_path = fs_util::specifier_to_file_path(&import_map_url)?; + let import_map_path = specifier_to_file_path(&import_map_url)?; lsp_log!( " Resolved import map: \"{}\"", import_map_path.to_string_lossy() @@ -768,7 +770,7 @@ impl Inner { self.config.root_uri = params .root_uri .map(|s| self.url_map.normalize_url(&s)) - .map(fs_util::ensure_directory_specifier); + .map(ensure_directory_specifier); if let Some(value) = params.initialization_options { self.config.set_workspace_settings(value).map_err(|err| { @@ -1137,11 +1139,10 @@ impl Inner { _ => return Ok(None), }; let mark = self.performance.mark("formatting", Some(¶ms)); - let file_path = - fs_util::specifier_to_file_path(&specifier).map_err(|err| { - error!("{}", err); - LspError::invalid_request() - })?; + let file_path = specifier_to_file_path(&specifier).map_err(|err| { + error!("{}", err); + LspError::invalid_request() + })?; let fmt_options = if let Some(fmt_config) = self.maybe_fmt_config.as_ref() { // skip formatting any files ignored by the config file @@ -2063,7 +2064,7 @@ impl Inner { .config .root_uri .as_ref() - .and_then(|uri| fs_util::specifier_to_file_path(uri).ok()); + .and_then(|uri| specifier_to_file_path(uri).ok()); let mut resolved_items = Vec::::new(); for item in incoming_calls.iter() { if let Some(resolved) = item.try_resolve_call_hierarchy_incoming_call( @@ -2109,7 +2110,7 @@ impl Inner { .config .root_uri .as_ref() - .and_then(|uri| fs_util::specifier_to_file_path(uri).ok()); + .and_then(|uri| specifier_to_file_path(uri).ok()); let mut resolved_items = Vec::::new(); for item in outgoing_calls.iter() { if let Some(resolved) = item.try_resolve_call_hierarchy_outgoing_call( @@ -2162,7 +2163,7 @@ impl Inner { .config .root_uri .as_ref() - .and_then(|uri| fs_util::specifier_to_file_path(uri).ok()); + .and_then(|uri| specifier_to_file_path(uri).ok()); let mut resolved_items = Vec::::new(); match one_or_many { tsc::OneOrMany::One(item) => { @@ -3010,7 +3011,7 @@ impl Inner { } async fn reload_import_registries(&mut self) -> LspResult> { - fs_util::remove_dir_all_if_exists(&self.module_registries_location) + remove_dir_all_if_exists(&self.module_registries_location) .await .map_err(|err| { error!("Unable to remove registries cache: {}", err); diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs index 43500e697..1488077dd 100644 --- a/cli/lsp/registries.rs +++ b/cli/lsp/registries.rs @@ -12,10 +12,10 @@ use super::path_to_regex::StringOrNumber; use super::path_to_regex::StringOrVec; use super::path_to_regex::Token; +use crate::args::CacheSetting; use crate::cache::DenoDir; -use crate::file_fetcher::CacheSetting; +use crate::cache::HttpCache; use crate::file_fetcher::FileFetcher; -use crate::http_cache::HttpCache; use crate::http_util::HttpClient; use deno_core::anyhow::anyhow; diff --git a/cli/lsp/testing/definitions.rs b/cli/lsp/testing/definitions.rs index 14ac165fd..a2cd78012 100644 --- a/cli/lsp/testing/definitions.rs +++ b/cli/lsp/testing/definitions.rs @@ -2,9 +2,9 @@ use super::lsp_custom; -use crate::checksum; use crate::lsp::analysis::source_range_to_lsp_range; use crate::lsp::client::TestingNotification; +use crate::util::checksum; use deno_ast::SourceRange; use deno_ast::SourceTextInfo; diff --git a/cli/lsp/testing/execution.rs b/cli/lsp/testing/execution.rs index d839cda56..bd4f07621 100644 --- a/cli/lsp/testing/execution.rs +++ b/cli/lsp/testing/execution.rs @@ -6,7 +6,6 @@ use super::lsp_custom; use crate::args::flags_from_vec; use crate::args::DenoSubcommand; -use crate::checksum; use crate::lsp::client::Client; use crate::lsp::client::TestingNotification; use crate::lsp::config; @@ -15,6 +14,7 @@ use crate::ops; use crate::proc_state; use crate::tools::test; use crate::tools::test::TestEventSender; +use crate::util::checksum; use crate::worker::create_main_worker_for_test_or_bench; use deno_core::anyhow::anyhow; diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 4bb5ae5f9..88de60131 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -18,10 +18,10 @@ use super::urls::LspUrlMap; use super::urls::INVALID_SPECIFIER; use crate::args::TsConfig; -use crate::fs_util::relative_specifier; -use crate::fs_util::specifier_to_file_path; use crate::tsc; use crate::tsc::ResolveArgs; +use crate::util::path::relative_specifier; +use crate::util::path::specifier_to_file_path; use deno_core::anyhow::anyhow; use deno_core::error::custom_error; @@ -3445,7 +3445,7 @@ pub fn request( #[cfg(test)] mod tests { use super::*; - use crate::http_cache::HttpCache; + use crate::cache::HttpCache; use crate::http_util::HeadersMap; use crate::lsp::config::WorkspaceSettings; use crate::lsp::documents::Documents; diff --git a/cli/lsp/urls.rs b/cli/lsp/urls.rs index 9b14098ae..5aed54ad5 100644 --- a/cli/lsp/urls.rs +++ b/cli/lsp/urls.rs @@ -56,7 +56,7 @@ fn hash_data_specifier(specifier: &ModuleSpecifier) -> String { file_name_str.push('?'); file_name_str.push_str(query); } - crate::checksum::gen(&[file_name_str.as_bytes()]) + crate::util::checksum::gen(&[file_name_str.as_bytes()]) } #[derive(Debug, Default)] diff --git a/cli/main.rs b/cli/main.rs index 4eaaeb755..ad585c415 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -3,20 +3,13 @@ mod args; mod auth_tokens; mod cache; -mod checksum; mod deno_std; -mod diff; -mod display; mod emit; mod errors; mod file_fetcher; -mod file_watcher; -mod fs_util; mod graph_util; -mod http_cache; mod http_util; mod js; -mod logger; mod lsp; mod module_loader; mod napi; @@ -24,15 +17,12 @@ mod node; mod npm; mod ops; mod proc_state; -mod progress_bar; mod resolver; mod standalone; -mod text_encoding; mod tools; mod tsc; -mod unix_util; +mod util; mod version; -mod windows_util; mod worker; use crate::args::flags_from_vec; @@ -63,11 +53,12 @@ use crate::args::UpgradeFlags; use crate::args::VendorFlags; use crate::cache::TypeCheckCache; use crate::file_fetcher::File; -use crate::file_watcher::ResolutionResult; use crate::graph_util::graph_lock_or_exit; use crate::proc_state::ProcState; use crate::resolver::CliResolver; use crate::tools::check; +use crate::util::display; +use crate::util::file_watcher::ResolutionResult; use args::CliOptions; use args::Lockfile; @@ -482,7 +473,7 @@ async fn bundle_command( if let Some(out_file) = out_file.as_ref() { let output_bytes = bundle_output.code.as_bytes(); let output_len = output_bytes.len(); - fs_util::write_file(out_file, output_bytes, 0o644)?; + util::fs::write_file(out_file, output_bytes, 0o644)?; info!( "{} {:?} ({})", colors::green("Emit"), @@ -498,7 +489,7 @@ async fn bundle_command( "map".to_string() }; let map_out_file = out_file.with_extension(ext); - fs_util::write_file(&map_out_file, map_bytes, 0o644)?; + util::fs::write_file(&map_out_file, map_bytes, 0o644)?; info!( "{} {:?} ({})", colors::green("Emit"), @@ -515,10 +506,10 @@ async fn bundle_command( }; if cli_options.watch_paths().is_some() { - file_watcher::watch_func( + util::file_watcher::watch_func( resolver, operation, - file_watcher::PrintConfig { + util::file_watcher::PrintConfig { job_name: "Bundle".to_string(), clear_screen: !cli_options.no_clear_screen(), }, @@ -660,11 +651,11 @@ async fn run_with_watch(flags: Flags, script: String) -> Result { }) }; - file_watcher::watch_func2( + util::file_watcher::watch_func2( receiver, operation, (sender, main_module), - file_watcher::PrintConfig { + util::file_watcher::PrintConfig { job_name: "Process".to_string(), clear_screen: !flags.no_clear_screen, }, @@ -952,8 +943,8 @@ fn unwrap_or_exit(result: Result) -> T { pub fn main() { setup_panic_hook(); - unix_util::raise_fd_limit(); - windows_util::ensure_stdio_open(); + util::unix::raise_fd_limit(); + util::windows::ensure_stdio_open(); #[cfg(windows)] colors::enable_ansi(); // For Windows 10 @@ -984,7 +975,7 @@ pub fn main() { init_v8_flags(&flags.v8_flags); } - logger::init(flags.log_level); + util::logger::init(flags.log_level); get_subcommand(flags).await }; diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 41ec7e28c..6adfc9f49 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -5,8 +5,8 @@ use crate::emit::emit_parsed_source; use crate::graph_util::ModuleEntry; use crate::node; use crate::proc_state::ProcState; -use crate::text_encoding::code_without_source_map; -use crate::text_encoding::source_map_from_code; +use crate::util::text_encoding::code_without_source_map; +use crate::util::text_encoding::source_map_from_code; use deno_ast::MediaType; use deno_core::anyhow::anyhow; diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml new file mode 100644 index 000000000..49a11fff9 --- /dev/null +++ b/cli/napi/sym/Cargo.toml @@ -0,0 +1,22 @@ +# Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +[package] +name = "napi_sym" +version = "0.9.0" +authors.workspace = true +edition.workspace = true +license.workspace = true +readme = "README.md" +repository.workspace = true +description = "proc macro for writing N-API symbols" + +[lib] +path = "./lib.rs" +proc-macro = true + +[dependencies] +proc-macro2.workspace = true +quote.workspace = true +serde.workspace = true +serde_json.workspace = true +syn.workspace = true diff --git a/cli/napi/sym/README.md b/cli/napi/sym/README.md new file mode 100644 index 000000000..b3e2ab43b --- /dev/null +++ b/cli/napi/sym/README.md @@ -0,0 +1,34 @@ +# napi_sym + +A proc_macro for Deno's Node-API implementation. It does the following things: + +- Marks the symbol as `#[no_mangle]` and rewrites it as `pub extern "C" $name`. +- Asserts that the function symbol is present in + [`symbol_exports.json`](./symbol_exports.json). +- Maps `deno_napi::Result` to raw `napi_result`. + +```rust +use deno_napi::{napi_value, Env, Error, Result}; + +#[napi_sym::napi_sym] +fn napi_get_boolean( + env: *mut Env, + value: bool, + result: *mut napi_value, +) -> Result { + let _env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?; + // *result = ... + Ok(()) +} +``` + +### `symbol_exports.json` + +A file containing the symbols that need to be put into the executable's dynamic +symbol table at link-time. + +This is done using `/DEF:` on Windows, `-exported_symbol,_` on macOS and +`--export-dynamic-symbol=` on Linux. See [`cli/build.rs`](../build.rs). + +On Windows, you need to generate the `.def` file by running +[`tools/napi/generate_symbols_lists.js`](../../tools/napi/generate_symbols_lists.js). diff --git a/cli/napi/sym/lib.rs b/cli/napi/sym/lib.rs new file mode 100644 index 000000000..984d7f4bc --- /dev/null +++ b/cli/napi/sym/lib.rs @@ -0,0 +1,46 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use proc_macro::TokenStream; +use quote::quote; +use serde::Deserialize; + +static NAPI_EXPORTS: &str = include_str!("./symbol_exports.json"); + +#[derive(Deserialize)] +struct SymbolExports { + pub symbols: Vec, +} + +#[proc_macro_attribute] +pub fn napi_sym(_attr: TokenStream, item: TokenStream) -> TokenStream { + let func = syn::parse::(item).expect("expected a function"); + + let exports: SymbolExports = + serde_json::from_str(NAPI_EXPORTS).expect("failed to parse exports"); + let name = &func.sig.ident; + assert!( + exports.symbols.contains(&name.to_string()), + "tools/napi/sym/symbol_exports.json is out of sync!" + ); + + let block = &func.block; + let inputs = &func.sig.inputs; + let output = &func.sig.output; + let generics = &func.sig.generics; + let ret_ty = match output { + syn::ReturnType::Default => panic!("expected a return type"), + syn::ReturnType::Type(_, ty) => quote! { #ty }, + }; + TokenStream::from(quote! { + // SAFETY: it's an NAPI function. + #[no_mangle] + pub unsafe extern "C" fn #name #generics (#inputs) -> napi_status { + let mut inner = || -> #ret_ty { + #block + }; + inner() + .map(|_| napi_ok) + .unwrap_or_else(|e| e.into()) + } + }) +} diff --git a/cli/napi/sym/symbol_exports.json b/cli/napi/sym/symbol_exports.json new file mode 100644 index 000000000..ba1bba67a --- /dev/null +++ b/cli/napi/sym/symbol_exports.json @@ -0,0 +1,148 @@ +{ + "symbols": [ + "node_api_create_syntax_error", + "napi_make_callback", + "napi_has_named_property", + "napi_async_destroy", + "napi_coerce_to_object", + "napi_get_arraybuffer_info", + "napi_detach_arraybuffer", + "napi_get_undefined", + "napi_reference_unref", + "napi_fatal_error", + "napi_open_callback_scope", + "napi_close_callback_scope", + "napi_get_value_uint32", + "napi_create_function", + "napi_create_arraybuffer", + "napi_get_value_int64", + "napi_get_all_property_names", + "napi_resolve_deferred", + "napi_is_detached_arraybuffer", + "napi_create_string_utf8", + "napi_create_threadsafe_function", + "node_api_throw_syntax_error", + "napi_create_bigint_int64", + "napi_wrap", + "napi_set_property", + "napi_get_value_bigint_int64", + "napi_open_handle_scope", + "napi_create_error", + "napi_create_buffer", + "napi_cancel_async_work", + "napi_is_exception_pending", + "napi_acquire_threadsafe_function", + "napi_create_external", + "napi_get_threadsafe_function_context", + "napi_get_null", + "napi_create_string_utf16", + "napi_get_value_bigint_uint64", + "napi_module_register", + "napi_is_typedarray", + "napi_create_external_buffer", + "napi_get_new_target", + "napi_get_instance_data", + "napi_close_handle_scope", + "napi_get_value_string_utf16", + "napi_get_property_names", + "napi_is_arraybuffer", + "napi_get_cb_info", + "napi_define_properties", + "napi_add_env_cleanup_hook", + "node_api_get_module_file_name", + "napi_get_node_version", + "napi_create_int64", + "napi_create_double", + "napi_get_and_clear_last_exception", + "napi_create_reference", + "napi_get_typedarray_info", + "napi_call_threadsafe_function", + "napi_get_last_error_info", + "napi_create_array_with_length", + "napi_coerce_to_number", + "napi_get_global", + "napi_is_error", + "napi_set_instance_data", + "napi_create_typedarray", + "napi_throw_type_error", + "napi_has_property", + "napi_get_value_external", + "napi_create_range_error", + "napi_typeof", + "napi_ref_threadsafe_function", + "napi_create_bigint_uint64", + "napi_get_prototype", + "napi_adjust_external_memory", + "napi_release_threadsafe_function", + "napi_delete_async_work", + "napi_create_string_latin1", + "napi_is_array", + "napi_unref_threadsafe_function", + "napi_throw_error", + "napi_has_own_property", + "napi_get_reference_value", + "napi_remove_env_cleanup_hook", + "napi_get_value_string_utf8", + "napi_is_promise", + "napi_get_boolean", + "napi_run_script", + "napi_get_element", + "napi_get_named_property", + "napi_get_buffer_info", + "napi_get_value_bool", + "napi_reference_ref", + "napi_create_object", + "napi_create_promise", + "napi_create_int32", + "napi_escape_handle", + "napi_open_escapable_handle_scope", + "napi_throw", + "napi_get_value_double", + "napi_set_named_property", + "napi_call_function", + "napi_create_date", + "napi_object_freeze", + "napi_get_uv_event_loop", + "napi_get_value_string_latin1", + "napi_reject_deferred", + "napi_add_finalizer", + "napi_create_array", + "napi_delete_reference", + "napi_get_date_value", + "napi_create_dataview", + "napi_get_version", + "napi_define_class", + "napi_is_date", + "napi_remove_wrap", + "napi_delete_property", + "napi_instanceof", + "napi_create_buffer_copy", + "napi_delete_element", + "napi_object_seal", + "napi_queue_async_work", + "napi_get_value_bigint_words", + "napi_is_buffer", + "napi_get_array_length", + "napi_get_property", + "napi_new_instance", + "napi_set_element", + "napi_create_bigint_words", + "napi_strict_equals", + "napi_is_dataview", + "napi_close_escapable_handle_scope", + "napi_get_dataview_info", + "napi_get_value_int32", + "napi_unwrap", + "napi_throw_range_error", + "napi_coerce_to_bool", + "napi_create_uint32", + "napi_has_element", + "napi_create_external_arraybuffer", + "napi_create_symbol", + "napi_coerce_to_string", + "napi_create_type_error", + "napi_fatal_exception", + "napi_create_async_work", + "napi_async_init" + ] +} diff --git a/cli/napi_sym/Cargo.toml b/cli/napi_sym/Cargo.toml deleted file mode 100644 index 49a11fff9..000000000 --- a/cli/napi_sym/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -[package] -name = "napi_sym" -version = "0.9.0" -authors.workspace = true -edition.workspace = true -license.workspace = true -readme = "README.md" -repository.workspace = true -description = "proc macro for writing N-API symbols" - -[lib] -path = "./lib.rs" -proc-macro = true - -[dependencies] -proc-macro2.workspace = true -quote.workspace = true -serde.workspace = true -serde_json.workspace = true -syn.workspace = true diff --git a/cli/napi_sym/README.md b/cli/napi_sym/README.md deleted file mode 100644 index b3e2ab43b..000000000 --- a/cli/napi_sym/README.md +++ /dev/null @@ -1,34 +0,0 @@ -# napi_sym - -A proc_macro for Deno's Node-API implementation. It does the following things: - -- Marks the symbol as `#[no_mangle]` and rewrites it as `pub extern "C" $name`. -- Asserts that the function symbol is present in - [`symbol_exports.json`](./symbol_exports.json). -- Maps `deno_napi::Result` to raw `napi_result`. - -```rust -use deno_napi::{napi_value, Env, Error, Result}; - -#[napi_sym::napi_sym] -fn napi_get_boolean( - env: *mut Env, - value: bool, - result: *mut napi_value, -) -> Result { - let _env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?; - // *result = ... - Ok(()) -} -``` - -### `symbol_exports.json` - -A file containing the symbols that need to be put into the executable's dynamic -symbol table at link-time. - -This is done using `/DEF:` on Windows, `-exported_symbol,_` on macOS and -`--export-dynamic-symbol=` on Linux. See [`cli/build.rs`](../build.rs). - -On Windows, you need to generate the `.def` file by running -[`tools/napi/generate_symbols_lists.js`](../../tools/napi/generate_symbols_lists.js). diff --git a/cli/napi_sym/lib.rs b/cli/napi_sym/lib.rs deleted file mode 100644 index 2805c9957..000000000 --- a/cli/napi_sym/lib.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use proc_macro::TokenStream; -use quote::quote; -use serde::Deserialize; - -static NAPI_EXPORTS: &str = include_str!("./symbol_exports.json"); - -#[derive(Deserialize)] -struct SymbolExports { - pub symbols: Vec, -} - -#[proc_macro_attribute] -pub fn napi_sym(_attr: TokenStream, item: TokenStream) -> TokenStream { - let func = syn::parse::(item).expect("expected a function"); - - let exports: SymbolExports = - serde_json::from_str(NAPI_EXPORTS).expect("failed to parse exports"); - let name = &func.sig.ident; - assert!( - exports.symbols.contains(&name.to_string()), - "tools/napi/symbol_exports.json is out of sync!" - ); - - let block = &func.block; - let inputs = &func.sig.inputs; - let output = &func.sig.output; - let generics = &func.sig.generics; - let ret_ty = match output { - syn::ReturnType::Default => panic!("expected a return type"), - syn::ReturnType::Type(_, ty) => quote! { #ty }, - }; - TokenStream::from(quote! { - // SAFETY: it's an NAPI function. - #[no_mangle] - pub unsafe extern "C" fn #name #generics (#inputs) -> napi_status { - let mut inner = || -> #ret_ty { - #block - }; - inner() - .map(|_| napi_ok) - .unwrap_or_else(|e| e.into()) - } - }) -} diff --git a/cli/napi_sym/symbol_exports.json b/cli/napi_sym/symbol_exports.json deleted file mode 100644 index ba1bba67a..000000000 --- a/cli/napi_sym/symbol_exports.json +++ /dev/null @@ -1,148 +0,0 @@ -{ - "symbols": [ - "node_api_create_syntax_error", - "napi_make_callback", - "napi_has_named_property", - "napi_async_destroy", - "napi_coerce_to_object", - "napi_get_arraybuffer_info", - "napi_detach_arraybuffer", - "napi_get_undefined", - "napi_reference_unref", - "napi_fatal_error", - "napi_open_callback_scope", - "napi_close_callback_scope", - "napi_get_value_uint32", - "napi_create_function", - "napi_create_arraybuffer", - "napi_get_value_int64", - "napi_get_all_property_names", - "napi_resolve_deferred", - "napi_is_detached_arraybuffer", - "napi_create_string_utf8", - "napi_create_threadsafe_function", - "node_api_throw_syntax_error", - "napi_create_bigint_int64", - "napi_wrap", - "napi_set_property", - "napi_get_value_bigint_int64", - "napi_open_handle_scope", - "napi_create_error", - "napi_create_buffer", - "napi_cancel_async_work", - "napi_is_exception_pending", - "napi_acquire_threadsafe_function", - "napi_create_external", - "napi_get_threadsafe_function_context", - "napi_get_null", - "napi_create_string_utf16", - "napi_get_value_bigint_uint64", - "napi_module_register", - "napi_is_typedarray", - "napi_create_external_buffer", - "napi_get_new_target", - "napi_get_instance_data", - "napi_close_handle_scope", - "napi_get_value_string_utf16", - "napi_get_property_names", - "napi_is_arraybuffer", - "napi_get_cb_info", - "napi_define_properties", - "napi_add_env_cleanup_hook", - "node_api_get_module_file_name", - "napi_get_node_version", - "napi_create_int64", - "napi_create_double", - "napi_get_and_clear_last_exception", - "napi_create_reference", - "napi_get_typedarray_info", - "napi_call_threadsafe_function", - "napi_get_last_error_info", - "napi_create_array_with_length", - "napi_coerce_to_number", - "napi_get_global", - "napi_is_error", - "napi_set_instance_data", - "napi_create_typedarray", - "napi_throw_type_error", - "napi_has_property", - "napi_get_value_external", - "napi_create_range_error", - "napi_typeof", - "napi_ref_threadsafe_function", - "napi_create_bigint_uint64", - "napi_get_prototype", - "napi_adjust_external_memory", - "napi_release_threadsafe_function", - "napi_delete_async_work", - "napi_create_string_latin1", - "napi_is_array", - "napi_unref_threadsafe_function", - "napi_throw_error", - "napi_has_own_property", - "napi_get_reference_value", - "napi_remove_env_cleanup_hook", - "napi_get_value_string_utf8", - "napi_is_promise", - "napi_get_boolean", - "napi_run_script", - "napi_get_element", - "napi_get_named_property", - "napi_get_buffer_info", - "napi_get_value_bool", - "napi_reference_ref", - "napi_create_object", - "napi_create_promise", - "napi_create_int32", - "napi_escape_handle", - "napi_open_escapable_handle_scope", - "napi_throw", - "napi_get_value_double", - "napi_set_named_property", - "napi_call_function", - "napi_create_date", - "napi_object_freeze", - "napi_get_uv_event_loop", - "napi_get_value_string_latin1", - "napi_reject_deferred", - "napi_add_finalizer", - "napi_create_array", - "napi_delete_reference", - "napi_get_date_value", - "napi_create_dataview", - "napi_get_version", - "napi_define_class", - "napi_is_date", - "napi_remove_wrap", - "napi_delete_property", - "napi_instanceof", - "napi_create_buffer_copy", - "napi_delete_element", - "napi_object_seal", - "napi_queue_async_work", - "napi_get_value_bigint_words", - "napi_is_buffer", - "napi_get_array_length", - "napi_get_property", - "napi_new_instance", - "napi_set_element", - "napi_create_bigint_words", - "napi_strict_equals", - "napi_is_dataview", - "napi_close_escapable_handle_scope", - "napi_get_dataview_info", - "napi_get_value_int32", - "napi_unwrap", - "napi_throw_range_error", - "napi_coerce_to_bool", - "napi_create_uint32", - "napi_has_element", - "napi_create_external_arraybuffer", - "napi_create_symbol", - "napi_coerce_to_string", - "napi_create_type_error", - "napi_fatal_exception", - "napi_create_async_work", - "napi_async_init" - ] -} diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs index 5e2f06ef7..ad6ab9db2 100644 --- a/cli/npm/cache.rs +++ b/cli/npm/cache.rs @@ -14,11 +14,13 @@ use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::url::Url; +use crate::args::CacheSetting; use crate::cache::DenoDir; -use crate::file_fetcher::CacheSetting; -use crate::fs_util; use crate::http_util::HttpClient; -use crate::progress_bar::ProgressBar; +use crate::util::fs::canonicalize_path; +use crate::util::fs::hard_link_dir_recursive; +use crate::util::path::root_url_to_safe_local_dirname; +use crate::util::progress_bar::ProgressBar; use super::registry::NpmPackageVersionDistInfo; use super::semver::NpmVersion; @@ -162,7 +164,7 @@ impl ReadonlyNpmCache { std::fs::create_dir_all(root_dir) .with_context(|| format!("Error creating {}", root_dir.display()))?; } - Ok(crate::fs_util::canonicalize_path(root_dir)?) + Ok(canonicalize_path(root_dir)?) } // this may fail on readonly file systems, so just ignore if so @@ -227,7 +229,7 @@ impl ReadonlyNpmCache { pub fn registry_folder(&self, registry_url: &Url) -> PathBuf { self .root_dir - .join(fs_util::root_url_to_safe_local_dirname(registry_url)) + .join(root_url_to_safe_local_dirname(registry_url)) } pub fn resolve_package_folder_id_from_specifier( @@ -252,7 +254,7 @@ impl ReadonlyNpmCache { .root_dir_url .join(&format!( "{}/", - fs_util::root_url_to_safe_local_dirname(registry_url) + root_url_to_safe_local_dirname(registry_url) .to_string_lossy() .replace('\\', "/") )) @@ -457,12 +459,7 @@ impl NpmCache { with_folder_sync_lock( (id.name.as_str(), &id.version), &package_folder, - || { - fs_util::hard_link_dir_recursive( - &original_package_folder, - &package_folder, - ) - }, + || hard_link_dir_recursive(&original_package_folder, &package_folder), )?; Ok(()) } diff --git a/cli/npm/registry.rs b/cli/npm/registry.rs index c62e6e1e7..9ba565366 100644 --- a/cli/npm/registry.rs +++ b/cli/npm/registry.rs @@ -21,11 +21,11 @@ use deno_core::url::Url; use deno_runtime::colors; use serde::Serialize; -use crate::file_fetcher::CacheSetting; -use crate::fs_util; -use crate::http_cache::CACHE_PERM; +use crate::args::CacheSetting; +use crate::cache::CACHE_PERM; use crate::http_util::HttpClient; -use crate::progress_bar::ProgressBar; +use crate::util::fs::atomic_write_file; +use crate::util::progress_bar::ProgressBar; use super::cache::NpmCache; use super::resolution::NpmVersionMatcher; @@ -405,7 +405,7 @@ impl RealNpmRegistryApiInner { let file_cache_path = self.get_package_file_cache_path(name); let file_text = serde_json::to_string(&package_info)?; std::fs::create_dir_all(file_cache_path.parent().unwrap())?; - fs_util::atomic_write_file(&file_cache_path, file_text, CACHE_PERM)?; + atomic_write_file(&file_cache_path, file_text, CACHE_PERM)?; Ok(()) } diff --git a/cli/npm/resolvers/global.rs b/cli/npm/resolvers/global.rs index 044c889d8..46cfec48f 100644 --- a/cli/npm/resolvers/global.rs +++ b/cli/npm/resolvers/global.rs @@ -16,7 +16,6 @@ use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::TYPES_CONDITIONS; use crate::args::Lockfile; -use crate::fs_util; use crate::npm::resolution::NpmResolution; use crate::npm::resolution::NpmResolutionSnapshot; use crate::npm::resolvers::common::cache_packages; @@ -125,7 +124,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver { fn package_size(&self, package_id: &NpmPackageId) -> Result { let package_folder = self.package_folder(package_id); - Ok(fs_util::dir_size(&package_folder)?) + Ok(crate::util::fs::dir_size(&package_folder)?) } fn has_packages(&self) -> bool { diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index ff699f26f..3a9e97433 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -10,6 +10,7 @@ use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use crate::util::fs::symlink_dir; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -23,7 +24,6 @@ use deno_runtime::deno_node::TYPES_CONDITIONS; use tokio::task::JoinHandle; use crate::args::Lockfile; -use crate::fs_util; use crate::npm::cache::mixed_case_package_name_encode; use crate::npm::cache::should_sync_download; use crate::npm::cache::NpmPackageCacheFolderId; @@ -34,6 +34,8 @@ use crate::npm::NpmPackageId; use crate::npm::NpmPackageReq; use crate::npm::NpmResolutionPackage; use crate::npm::RealNpmRegistryApi; +use crate::util::fs::copy_dir_recursive; +use crate::util::fs::hard_link_dir_recursive; use super::common::ensure_registry_read_permission; use super::common::types_package_name; @@ -203,7 +205,7 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver { fn package_size(&self, package_id: &NpmPackageId) -> Result { let package_folder_path = self.get_package_id_folder(package_id)?; - Ok(fs_util::dir_size(&package_folder_path)?) + Ok(crate::util::fs::dir_size(&package_folder_path)?) } fn has_packages(&self) -> bool { @@ -318,7 +320,7 @@ async fn sync_resolution_with_fs( ®istry_url, ); // for now copy, but in the future consider hard linking - fs_util::copy_dir_recursive(&cache_folder, &package_path)?; + copy_dir_recursive(&cache_folder, &package_path)?; // write out a file that indicates this folder has been initialized fs::write(initialized_file, "")?; Ok(()) @@ -356,7 +358,7 @@ async fn sync_resolution_with_fs( .join("node_modules"), &package.id.name, ); - fs_util::hard_link_dir_recursive(&source_path, &package_path)?; + hard_link_dir_recursive(&source_path, &package_path)?; // write out a file that indicates this folder has been initialized fs::write(initialized_file, "")?; } @@ -467,7 +469,7 @@ fn symlink_package_dir( #[cfg(windows)] return junction_or_symlink_dir(old_path, new_path); #[cfg(not(windows))] - fs_util::symlink_dir(old_path, new_path) + symlink_dir(old_path, new_path) } #[cfg(windows)] @@ -477,6 +479,7 @@ fn junction_or_symlink_dir( ) -> Result<(), AnyError> { // Use junctions because they're supported on ntfs file systems without // needing to elevate privileges on Windows + match junction::create(old_path, new_path) { Ok(()) => Ok(()), Err(junction_err) => { @@ -486,7 +489,7 @@ fn junction_or_symlink_dir( log::warn!("Error creating junction. {:#}", junction_err); } - match fs_util::symlink_dir(old_path, new_path) { + match symlink_dir(old_path, new_path) { Ok(()) => Ok(()), Err(symlink_err) => bail!( concat!( diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index 869874c8b..3cc695523 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -23,7 +23,7 @@ use std::path::PathBuf; use std::sync::Arc; use crate::args::Lockfile; -use crate::fs_util; +use crate::util::fs::canonicalize_path_maybe_not_exists; use self::common::InnerNpmPackageResolver; use self::local::LocalNpmPackageResolver; @@ -187,7 +187,7 @@ impl NpmPackageResolver { let path = self .inner .resolve_package_folder_from_deno_module(pkg_req)?; - let path = fs_util::canonicalize_path_maybe_not_exists(&path)?; + let path = canonicalize_path_maybe_not_exists(&path)?; log::debug!("Resolved {} to {}", pkg_req, path.display()); Ok(path) } diff --git a/cli/proc_state.rs b/cli/proc_state.rs index 3b7a97573..9fcac2fe0 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -11,6 +11,7 @@ use crate::cache; use crate::cache::DenoDir; use crate::cache::EmitCache; use crate::cache::FastInsecureHasher; +use crate::cache::HttpCache; use crate::cache::NodeAnalysisCache; use crate::cache::ParsedSourceCache; use crate::cache::TypeCheckCache; @@ -19,7 +20,6 @@ use crate::file_fetcher::FileFetcher; use crate::graph_util::graph_lock_or_exit; use crate::graph_util::GraphData; use crate::graph_util::ModuleEntry; -use crate::http_cache; use crate::http_util::HttpClient; use crate::node; use crate::node::NodeResolution; @@ -28,9 +28,9 @@ use crate::npm::NpmCache; use crate::npm::NpmPackageReference; use crate::npm::NpmPackageResolver; use crate::npm::RealNpmRegistryApi; -use crate::progress_bar::ProgressBar; use crate::resolver::CliResolver; use crate::tools::check; +use crate::util::progress_bar::ProgressBar; use deno_ast::MediaType; use deno_core::anyhow::anyhow; @@ -153,7 +153,7 @@ impl ProcState { let compiled_wasm_module_store = CompiledWasmModuleStore::default(); let dir = cli_options.resolve_deno_dir()?; let deps_cache_location = dir.deps_folder_path(); - let http_cache = http_cache::HttpCache::new(&deps_cache_location); + let http_cache = HttpCache::new(&deps_cache_location); let root_cert_store = cli_options.resolve_root_cert_store()?; let cache_usage = cli_options.cache_setting(); let progress_bar = ProgressBar::default(); diff --git a/cli/progress_bar.rs b/cli/progress_bar.rs deleted file mode 100644 index 5b49fb279..000000000 --- a/cli/progress_bar.rs +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use crate::colors; -use deno_core::parking_lot::Mutex; -use indexmap::IndexSet; -use std::sync::Arc; -use std::time::Duration; - -#[derive(Clone, Debug, Default)] -pub struct ProgressBar(Arc>); - -#[derive(Debug)] -struct ProgressBarInner { - pb: Option, - is_tty: bool, - in_flight: IndexSet, -} - -impl Default for ProgressBarInner { - fn default() -> Self { - Self { - pb: None, - is_tty: colors::is_tty(), - in_flight: IndexSet::default(), - } - } -} - -impl ProgressBarInner { - fn get_or_create_pb(&mut self) -> indicatif::ProgressBar { - if let Some(pb) = self.pb.as_ref() { - return pb.clone(); - } - - let pb = indicatif::ProgressBar::new_spinner(); - pb.enable_steady_tick(Duration::from_millis(120)); - pb.set_prefix("Download"); - pb.set_style( - indicatif::ProgressStyle::with_template( - "{prefix:.green} {spinner:.green} {msg}", - ) - .unwrap() - .tick_strings(&["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]), - ); - self.pb = Some(pb); - self.pb.as_ref().unwrap().clone() - } - - fn add_in_flight(&mut self, msg: &str) { - if self.in_flight.contains(msg) { - return; - } - - self.in_flight.insert(msg.to_string()); - } - - /// Returns if removed "in-flight" was last entry and progress - /// bar needs to be updated. - fn remove_in_flight(&mut self, msg: &str) -> bool { - if !self.in_flight.contains(msg) { - return false; - } - - let mut is_last = false; - if let Some(last) = self.in_flight.last() { - is_last = last == msg; - } - self.in_flight.remove(msg); - is_last - } - - fn update_progress_bar(&mut self) { - let pb = self.get_or_create_pb(); - if let Some(msg) = self.in_flight.last() { - pb.set_message(msg.clone()); - } - } -} - -pub struct UpdateGuard { - pb: ProgressBar, - msg: String, - noop: bool, -} - -impl Drop for UpdateGuard { - fn drop(&mut self) { - if self.noop { - return; - } - - let mut inner = self.pb.0.lock(); - if inner.remove_in_flight(&self.msg) { - inner.update_progress_bar(); - } - } -} - -impl ProgressBar { - pub fn update(&self, msg: &str) -> UpdateGuard { - let mut guard = UpdateGuard { - pb: self.clone(), - msg: msg.to_string(), - noop: false, - }; - let mut inner = self.0.lock(); - - // If we're not running in TTY we're just gonna fallback - // to using logger crate. - if !inner.is_tty { - log::log!(log::Level::Info, "{} {}", colors::green("Download"), msg); - guard.noop = true; - return guard; - } - - inner.add_in_flight(msg); - inner.update_progress_bar(); - guard - } - - pub fn clear(&self) { - let mut inner = self.0.lock(); - - if let Some(pb) = inner.pb.as_ref() { - pb.finish_and_clear(); - inner.pb = None; - } - } - - pub fn clear_guard(&self) -> ClearGuard { - ClearGuard { pb: self.clone() } - } -} - -pub struct ClearGuard { - pb: ProgressBar, -} - -impl Drop for ClearGuard { - fn drop(&mut self) { - self.pb.clear(); - } -} diff --git a/cli/text_encoding.rs b/cli/text_encoding.rs deleted file mode 100644 index c16a1289d..000000000 --- a/cli/text_encoding.rs +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -use encoding_rs::*; -use std::borrow::Cow; -use std::io::Error; -use std::io::ErrorKind; - -pub const BOM_CHAR: char = '\u{FEFF}'; - -/// Attempts to detect the character encoding of the provided bytes. -/// -/// Supports UTF-8, UTF-16 Little Endian and UTF-16 Big Endian. -pub fn detect_charset(bytes: &'_ [u8]) -> &'static str { - const UTF16_LE_BOM: &[u8] = b"\xFF\xFE"; - const UTF16_BE_BOM: &[u8] = b"\xFE\xFF"; - - if bytes.starts_with(UTF16_LE_BOM) { - "utf-16le" - } else if bytes.starts_with(UTF16_BE_BOM) { - "utf-16be" - } else { - // Assume everything else is utf-8 - "utf-8" - } -} - -/// Attempts to convert the provided bytes to a UTF-8 string. -/// -/// Supports all encodings supported by the encoding_rs crate, which includes -/// all encodings specified in the WHATWG Encoding Standard, and only those -/// encodings (see: ). -pub fn convert_to_utf8<'a>( - bytes: &'a [u8], - charset: &'_ str, -) -> Result, Error> { - match Encoding::for_label(charset.as_bytes()) { - Some(encoding) => encoding - .decode_without_bom_handling_and_without_replacement(bytes) - .ok_or_else(|| ErrorKind::InvalidData.into()), - None => Err(Error::new( - ErrorKind::InvalidInput, - format!("Unsupported charset: {}", charset), - )), - } -} - -/// Strips the byte order mark from the provided text if it exists. -pub fn strip_bom(text: &str) -> &str { - if text.starts_with(BOM_CHAR) { - &text[BOM_CHAR.len_utf8()..] - } else { - text - } -} - -static SOURCE_MAP_PREFIX: &str = - "//# sourceMappingURL=data:application/json;base64,"; - -pub fn source_map_from_code(code: &str) -> Option> { - let last_line = code.rsplit(|u| u == '\n').next()?; - if last_line.starts_with(SOURCE_MAP_PREFIX) { - let input = last_line.split_at(SOURCE_MAP_PREFIX.len()).1; - let decoded_map = base64::decode(input) - .expect("Unable to decode source map from emitted file."); - Some(decoded_map) - } else { - None - } -} - -pub fn code_without_source_map(mut code: String) -> String { - if let Some(last_line_index) = code.rfind('\n') { - if code[last_line_index + 1..].starts_with(SOURCE_MAP_PREFIX) { - code.truncate(last_line_index + 1); - code - } else { - code - } - } else { - code - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn test_detection(test_data: &[u8], expected_charset: &str) { - let detected_charset = detect_charset(test_data); - assert_eq!( - expected_charset.to_lowercase(), - detected_charset.to_lowercase() - ); - } - - #[test] - fn test_detection_utf8_no_bom() { - let test_data = "Hello UTF-8 it is \u{23F0} for Deno!" - .to_owned() - .into_bytes(); - test_detection(&test_data, "utf-8"); - } - - #[test] - fn test_detection_utf16_little_endian() { - let test_data = b"\xFF\xFEHello UTF-16LE".to_owned().to_vec(); - test_detection(&test_data, "utf-16le"); - } - - #[test] - fn test_detection_utf16_big_endian() { - let test_data = b"\xFE\xFFHello UTF-16BE".to_owned().to_vec(); - test_detection(&test_data, "utf-16be"); - } - - #[test] - fn test_decoding_unsupported_charset() { - let test_data = Vec::new(); - let result = convert_to_utf8(&test_data, "utf-32le"); - assert!(result.is_err()); - let err = result.expect_err("Err expected"); - assert!(err.kind() == ErrorKind::InvalidInput); - } - - #[test] - fn test_decoding_invalid_utf8() { - let test_data = b"\xFE\xFE\xFF\xFF".to_vec(); - let result = convert_to_utf8(&test_data, "utf-8"); - assert!(result.is_err()); - let err = result.expect_err("Err expected"); - assert!(err.kind() == ErrorKind::InvalidData); - } - - #[test] - fn test_source_without_source_map() { - run_test("", ""); - run_test("\n", "\n"); - run_test("\r\n", "\r\n"); - run_test("a", "a"); - run_test("a\n", "a\n"); - run_test("a\r\n", "a\r\n"); - run_test("a\r\nb", "a\r\nb"); - run_test("a\nb\n", "a\nb\n"); - run_test("a\r\nb\r\n", "a\r\nb\r\n"); - run_test( - "test\n//# sourceMappingURL=data:application/json;base64,test", - "test\n", - ); - run_test( - "test\r\n//# sourceMappingURL=data:application/json;base64,test", - "test\r\n", - ); - run_test( - "\n//# sourceMappingURL=data:application/json;base64,test", - "\n", - ); - - fn run_test(input: &str, output: &str) { - assert_eq!(code_without_source_map(input.to_string()), output); - } - } -} diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index a81c0a406..eeda3ea1a 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -4,16 +4,16 @@ use crate::args::BenchFlags; use crate::args::Flags; use crate::args::TypeCheckMode; use crate::colors; -use crate::file_watcher; -use crate::file_watcher::ResolutionResult; -use crate::fs_util::collect_specifiers; -use crate::fs_util::is_supported_bench_path; use crate::graph_util::contains_specifier; use crate::graph_util::graph_valid; use crate::ops; use crate::proc_state::ProcState; use crate::tools::test::format_test_error; use crate::tools::test::TestFilter; +use crate::util::file_watcher; +use crate::util::file_watcher::ResolutionResult; +use crate::util::fs::collect_specifiers; +use crate::util::path::is_supported_ext; use crate::worker::create_main_worker_for_test_or_bench; use deno_core::error::generic_error; @@ -32,6 +32,7 @@ use log::Level; use serde::Deserialize; use serde::Serialize; use std::collections::HashSet; +use std::path::Path; use std::path::PathBuf; use tokio::sync::mpsc::unbounded_channel; use tokio::sync::mpsc::UnboundedSender; @@ -469,6 +470,19 @@ async fn bench_specifiers( Ok(()) } +/// Checks if the path has a basename and extension Deno supports for benches. +fn is_supported_bench_path(path: &Path) -> bool { + if let Some(name) = path.file_stem() { + let basename = name.to_string_lossy(); + (basename.ends_with("_bench") + || basename.ends_with(".bench") + || basename == "bench") + && is_supported_ext(path) + } else { + false + } +} + pub async fn run_benchmarks( flags: Flags, bench_flags: BenchFlags, diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index b76792b1e..b9bbe14f2 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -3,10 +3,10 @@ use crate::args::CoverageFlags; use crate::args::Flags; use crate::colors; -use crate::fs_util::collect_files; use crate::proc_state::ProcState; -use crate::text_encoding::source_map_from_code; use crate::tools::fmt::format_json; +use crate::util::fs::collect_files; +use crate::util::text_encoding::source_map_from_code; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 9474411b7..721937b8d 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -12,13 +12,13 @@ use crate::args::FmtFlags; use crate::args::FmtOptionsConfig; use crate::args::ProseWrap; use crate::colors; -use crate::diff::diff; -use crate::file_watcher; -use crate::file_watcher::ResolutionResult; -use crate::fs_util::collect_files; -use crate::fs_util::get_extension; -use crate::fs_util::specifier_to_file_path; -use crate::text_encoding; +use crate::util::diff::diff; +use crate::util::file_watcher; +use crate::util::file_watcher::ResolutionResult; +use crate::util::fs::collect_files; +use crate::util::path::get_extension; +use crate::util::path::specifier_to_file_path; +use crate::util::text_encoding; use deno_ast::ParsedSource; use deno_core::anyhow::bail; use deno_core::anyhow::Context; diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 38aa40a5e..a81dcb55c 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -20,7 +20,6 @@ use deno_runtime::colors; use crate::args::Flags; use crate::args::InfoFlags; -use crate::checksum; use crate::display; use crate::npm::NpmPackageId; use crate::npm::NpmPackageReference; @@ -29,6 +28,7 @@ use crate::npm::NpmPackageResolver; use crate::npm::NpmResolutionPackage; use crate::npm::NpmResolutionSnapshot; use crate::proc_state::ProcState; +use crate::util::checksum; pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> { let ps = ProcState::build(flags).await?; diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index b964619d9..6914c9919 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -4,8 +4,8 @@ use crate::args::ConfigFlag; use crate::args::Flags; use crate::args::InstallFlags; use crate::args::TypeCheckMode; -use crate::fs_util; use crate::npm::NpmPackageReference; +use crate::util::fs::canonicalize_path_maybe_not_exists; use deno_core::anyhow::Context; use deno_core::error::generic_error; use deno_core::error::AnyError; @@ -107,9 +107,7 @@ exec deno {} "$@" fn get_installer_root() -> Result { if let Ok(env_dir) = env::var("DENO_INSTALL_ROOT") { if !env_dir.is_empty() { - return fs_util::canonicalize_path_maybe_not_exists(&PathBuf::from( - env_dir, - )); + return canonicalize_path_maybe_not_exists(&PathBuf::from(env_dir)); } } // Note: on Windows, the $HOME environment variable may be set by users or by @@ -167,7 +165,7 @@ pub fn infer_name_from_url(url: &Url) -> Option { pub fn uninstall(name: String, root: Option) -> Result<(), AnyError> { let root = if let Some(root) = root { - fs_util::canonicalize_path_maybe_not_exists(&root)? + canonicalize_path_maybe_not_exists(&root)? } else { get_installer_root()? }; @@ -275,7 +273,7 @@ fn resolve_shim_data( install_flags: &InstallFlags, ) -> Result { let root = if let Some(root) = &install_flags.root { - fs_util::canonicalize_path_maybe_not_exists(root)? + canonicalize_path_maybe_not_exists(root)? } else { get_installer_root()? }; @@ -450,6 +448,7 @@ mod tests { use super::*; use crate::args::ConfigFlag; + use crate::util::fs::canonicalize_path; use std::process::Command; use test_util::testdata_path; use test_util::TempDir; @@ -746,7 +745,7 @@ mod tests { #[test] fn install_npm_lockfile_default() { - let temp_dir = fs_util::canonicalize_path(&env::temp_dir()).unwrap(); + let temp_dir = canonicalize_path(&env::temp_dir()).unwrap(); let shim_data = resolve_shim_data( &Flags { allow_all: true, diff --git a/cli/tools/lint.rs b/cli/tools/lint.rs index bfbff8005..2f7cd5111 100644 --- a/cli/tools/lint.rs +++ b/cli/tools/lint.rs @@ -10,13 +10,13 @@ use crate::args::Flags; use crate::args::LintConfig; use crate::args::LintFlags; use crate::colors; -use crate::file_watcher; -use crate::file_watcher::ResolutionResult; -use crate::fs_util::collect_files; -use crate::fs_util::is_supported_ext; -use crate::fs_util::specifier_to_file_path; use crate::proc_state::ProcState; use crate::tools::fmt::run_parallelized; +use crate::util::file_watcher; +use crate::util::file_watcher::ResolutionResult; +use crate::util::fs::collect_files; +use crate::util::path::is_supported_ext; +use crate::util::path::specifier_to_file_path; use deno_ast::MediaType; use deno_core::anyhow::anyhow; use deno_core::error::generic_error; diff --git a/cli/tools/standalone.rs b/cli/tools/standalone.rs index f173103cb..4436aaa28 100644 --- a/cli/tools/standalone.rs +++ b/cli/tools/standalone.rs @@ -6,9 +6,9 @@ use crate::args::Flags; use crate::args::RunFlags; use crate::args::TypeCheckMode; use crate::cache::DenoDir; -use crate::fs_util; use crate::standalone::Metadata; use crate::standalone::MAGIC_TRAILER; +use crate::util::path::path_has_trailing_slash; use crate::ProcState; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -299,7 +299,7 @@ pub fn resolve_compile_executable_output_path( ) -> Result { let module_specifier = resolve_url_or_path(&compile_flags.source_file)?; compile_flags.output.as_ref().and_then(|output| { - if fs_util::path_has_trailing_slash(output) { + if path_has_trailing_slash(output) { let infer_file_name = infer_name_from_url(&module_specifier).map(PathBuf::from)?; Some(output.join(infer_file_name)) } else { diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 4cbba8661..39d493492 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -3,8 +3,8 @@ use crate::args::Flags; use crate::args::TaskFlags; use crate::colors; -use crate::fs_util; use crate::proc_state::ProcState; +use crate::util::fs::canonicalize_path; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; @@ -40,7 +40,7 @@ pub async fn execute_script( } let cwd = match task_flags.cwd { - Some(path) => fs_util::canonicalize_path(&PathBuf::from(path))?, + Some(path) => canonicalize_path(&PathBuf::from(path))?, None => config_file_path.parent().unwrap().to_owned(), }; let task_name = task_flags.task; diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 1bb891a1e..07d3f250d 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -3,20 +3,20 @@ use crate::args::Flags; use crate::args::TestFlags; use crate::args::TypeCheckMode; -use crate::checksum; use crate::colors; use crate::display; use crate::file_fetcher::File; -use crate::file_watcher; -use crate::file_watcher::ResolutionResult; -use crate::fs_util::collect_specifiers; -use crate::fs_util::is_supported_test_ext; -use crate::fs_util::is_supported_test_path; -use crate::fs_util::specifier_to_file_path; use crate::graph_util::contains_specifier; use crate::graph_util::graph_valid; use crate::ops; use crate::proc_state::ProcState; +use crate::util::checksum; +use crate::util::file_watcher; +use crate::util::file_watcher::ResolutionResult; +use crate::util::fs::collect_specifiers; +use crate::util::path::get_extension; +use crate::util::path::is_supported_ext; +use crate::util::path::specifier_to_file_path; use crate::worker::create_main_worker_for_test_or_bench; use deno_ast::swc::common::comments::CommentKind; @@ -51,6 +51,7 @@ use std::fmt::Write as _; use std::io::Read; use std::io::Write; use std::num::NonZeroUsize; +use std::path::Path; use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; @@ -1184,6 +1185,44 @@ async fn test_specifiers( Ok(()) } +/// Checks if the path has a basename and extension Deno supports for tests. +fn is_supported_test_path(path: &Path) -> bool { + if let Some(name) = path.file_stem() { + let basename = name.to_string_lossy(); + (basename.ends_with("_test") + || basename.ends_with(".test") + || basename == "test") + && is_supported_ext(path) + } else { + false + } +} + +/// Checks if the path has an extension Deno supports for tests. +fn is_supported_test_ext(path: &Path) -> bool { + if let Some(ext) = get_extension(path) { + matches!( + ext.as_str(), + "ts" + | "tsx" + | "js" + | "jsx" + | "mjs" + | "mts" + | "cjs" + | "cts" + | "md" + | "mkd" + | "mkdn" + | "mdwn" + | "mdown" + | "markdown" + ) + } else { + false + } +} + /// Collects specifiers marking them with the appropriate test mode while maintaining the natural /// input order. /// @@ -1667,3 +1706,67 @@ fn start_output_redirect_thread( } }); } + +#[cfg(test)] +mod inner_test { + use std::path::Path; + + use super::*; + + #[test] + fn test_is_supported_test_ext() { + assert!(!is_supported_test_ext(Path::new("tests/subdir/redirects"))); + assert!(is_supported_test_ext(Path::new("README.md"))); + assert!(is_supported_test_ext(Path::new("readme.MD"))); + assert!(is_supported_test_ext(Path::new("lib/typescript.d.ts"))); + assert!(is_supported_test_ext(Path::new( + "testdata/run/001_hello.js" + ))); + assert!(is_supported_test_ext(Path::new( + "testdata/run/002_hello.ts" + ))); + assert!(is_supported_test_ext(Path::new("foo.jsx"))); + assert!(is_supported_test_ext(Path::new("foo.tsx"))); + assert!(is_supported_test_ext(Path::new("foo.TS"))); + assert!(is_supported_test_ext(Path::new("foo.TSX"))); + assert!(is_supported_test_ext(Path::new("foo.JS"))); + assert!(is_supported_test_ext(Path::new("foo.JSX"))); + assert!(is_supported_test_ext(Path::new("foo.mjs"))); + assert!(is_supported_test_ext(Path::new("foo.mts"))); + assert!(is_supported_test_ext(Path::new("foo.cjs"))); + assert!(is_supported_test_ext(Path::new("foo.cts"))); + assert!(!is_supported_test_ext(Path::new("foo.mjsx"))); + assert!(!is_supported_test_ext(Path::new("foo.jsonc"))); + assert!(!is_supported_test_ext(Path::new("foo.JSONC"))); + assert!(!is_supported_test_ext(Path::new("foo.json"))); + assert!(!is_supported_test_ext(Path::new("foo.JsON"))); + } + + #[test] + fn test_is_supported_test_path() { + assert!(is_supported_test_path(Path::new( + "tests/subdir/foo_test.ts" + ))); + assert!(is_supported_test_path(Path::new( + "tests/subdir/foo_test.tsx" + ))); + assert!(is_supported_test_path(Path::new( + "tests/subdir/foo_test.js" + ))); + assert!(is_supported_test_path(Path::new( + "tests/subdir/foo_test.jsx" + ))); + assert!(is_supported_test_path(Path::new("bar/foo.test.ts"))); + assert!(is_supported_test_path(Path::new("bar/foo.test.tsx"))); + assert!(is_supported_test_path(Path::new("bar/foo.test.js"))); + assert!(is_supported_test_path(Path::new("bar/foo.test.jsx"))); + assert!(is_supported_test_path(Path::new("foo/bar/test.js"))); + assert!(is_supported_test_path(Path::new("foo/bar/test.jsx"))); + assert!(is_supported_test_path(Path::new("foo/bar/test.ts"))); + assert!(is_supported_test_path(Path::new("foo/bar/test.tsx"))); + assert!(!is_supported_test_path(Path::new("README.md"))); + assert!(!is_supported_test_path(Path::new("lib/typescript.d.ts"))); + assert!(!is_supported_test_path(Path::new("notatest.js"))); + assert!(!is_supported_test_path(Path::new("NotAtest.ts"))); + } +} diff --git a/cli/tools/vendor/mappings.rs b/cli/tools/vendor/mappings.rs index 543536128..d1152b12b 100644 --- a/cli/tools/vendor/mappings.rs +++ b/cli/tools/vendor/mappings.rs @@ -13,8 +13,8 @@ use deno_graph::ModuleGraph; use deno_graph::Position; use deno_graph::Resolved; -use crate::fs_util::path_with_stem_suffix; -use crate::fs_util::relative_specifier; +use crate::util::path::path_with_stem_suffix; +use crate::util::path::relative_specifier; use super::specifiers::dir_name_for_root; use super::specifiers::get_unique_path; diff --git a/cli/tools/vendor/mod.rs b/cli/tools/vendor/mod.rs index 3fd381b21..a1057d838 100644 --- a/cli/tools/vendor/mod.rs +++ b/cli/tools/vendor/mod.rs @@ -15,11 +15,12 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::FmtOptionsConfig; use crate::args::VendorFlags; -use crate::fs_util; -use crate::fs_util::relative_specifier; -use crate::fs_util::specifier_to_file_path; use crate::proc_state::ProcState; use crate::tools::fmt::format_json; +use crate::util::fs::canonicalize_path; +use crate::util::fs::resolve_from_cwd; +use crate::util::path::relative_specifier; +use crate::util::path::specifier_to_file_path; mod analyze; mod build; @@ -38,7 +39,7 @@ pub async fn vendor( Some(output_path) => output_path.to_owned(), None => PathBuf::from("vendor/"), }; - let output_dir = fs_util::resolve_from_cwd(&raw_output_dir)?; + let output_dir = resolve_from_cwd(&raw_output_dir)?; validate_output_dir(&output_dir, &vendor_flags)?; validate_options(&mut cli_options, &output_dir)?; let ps = ProcState::from_options(Arc::new(cli_options)).await?; @@ -110,18 +111,17 @@ fn validate_options( if let Some(import_map_path) = options .resolve_import_map_specifier()? .and_then(|p| specifier_to_file_path(&p).ok()) - .and_then(|p| fs_util::canonicalize_path(&p).ok()) + .and_then(|p| canonicalize_path(&p).ok()) { // make the output directory in order to canonicalize it for the check below std::fs::create_dir_all(output_dir)?; - let output_dir = - fs_util::canonicalize_path(output_dir).with_context(|| { - format!("Failed to canonicalize: {}", output_dir.display()) - })?; + let output_dir = canonicalize_path(output_dir).with_context(|| { + format!("Failed to canonicalize: {}", output_dir.display()) + })?; if import_map_path.starts_with(&output_dir) { // canonicalize to make the test for this pass on the CI - let cwd = fs_util::canonicalize_path(&std::env::current_dir()?)?; + let cwd = canonicalize_path(&std::env::current_dir()?)?; // We don't allow using the output directory to help generate the // new state because this may lead to cryptic error messages. log::warn!( diff --git a/cli/tools/vendor/specifiers.rs b/cli/tools/vendor/specifiers.rs index 5d4f98278..9d513cc92 100644 --- a/cli/tools/vendor/specifiers.rs +++ b/cli/tools/vendor/specifiers.rs @@ -8,8 +8,9 @@ use deno_ast::ModuleSpecifier; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; -use crate::fs_util; -use crate::fs_util::path_with_stem_suffix; +use crate::util::path::is_banned_path_char; +use crate::util::path::path_with_stem_suffix; +use crate::util::path::root_url_to_safe_local_dirname; /// Partitions the provided specifiers by the non-path and non-query parts of a specifier. pub fn partition_by_root_specifiers<'a>( @@ -30,7 +31,7 @@ pub fn partition_by_root_specifiers<'a>( /// Gets the directory name to use for the provided root. pub fn dir_name_for_root(root: &ModuleSpecifier) -> PathBuf { - fs_util::root_url_to_safe_local_dirname(root) + root_url_to_safe_local_dirname(root) } /// Gets a unique file path given the provided file path @@ -74,13 +75,7 @@ pub fn is_remote_specifier_text(text: &str) -> bool { pub fn sanitize_filepath(text: &str) -> String { text .chars() - .map(|c| { - if fs_util::is_banned_path_char(c) { - '_' - } else { - c - } - }) + .map(|c| if is_banned_path_char(c) { '_' } else { c }) .collect() } diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index a8cb7bcab..38ec2d263 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -9,6 +9,7 @@ use crate::node::NodeResolution; use crate::node::NodeResolutionMode; use crate::npm::NpmPackageReference; use crate::npm::NpmPackageResolver; +use crate::util::checksum; use deno_ast::MediaType; use deno_core::anyhow::anyhow; @@ -178,7 +179,7 @@ fn get_maybe_hash( if let Some(source) = maybe_source { let mut data = vec![source.as_bytes().to_owned()]; data.extend_from_slice(hash_data); - Some(crate::checksum::gen(&data)) + Some(checksum::gen(&data)) } else { None } @@ -186,7 +187,7 @@ fn get_maybe_hash( /// Hash the URL so it can be sent to `tsc` in a supportable way fn hash_url(specifier: &ModuleSpecifier, media_type: MediaType) -> String { - let hash = crate::checksum::gen(&[specifier.path().as_bytes()]); + let hash = checksum::gen(&[specifier.path().as_bytes()]); format!( "{}:///{}{}", specifier.scheme(), @@ -365,7 +366,7 @@ fn op_create_hash(s: &mut OpState, args: Value) -> Result { .context("Invalid request from JavaScript for \"op_create_hash\".")?; let mut data = vec![v.data.as_bytes().to_owned()]; data.extend_from_slice(&state.hash_data); - let hash = crate::checksum::gen(&data); + let hash = checksum::gen(&data); Ok(json!({ "hash": hash })) } diff --git a/cli/unix_util.rs b/cli/unix_util.rs deleted file mode 100644 index f282f6cfe..000000000 --- a/cli/unix_util.rs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -/// Raise soft file descriptor limit to hard file descriptor limit. -/// This is the difference between `ulimit -n` and `ulimit -n -H`. -pub fn raise_fd_limit() { - #[cfg(unix)] - // TODO(bartlomieju): - #[allow(clippy::undocumented_unsafe_blocks)] - unsafe { - let mut limits = libc::rlimit { - rlim_cur: 0, - rlim_max: 0, - }; - - if 0 != libc::getrlimit(libc::RLIMIT_NOFILE, &mut limits) { - return; - } - - if limits.rlim_cur == libc::RLIM_INFINITY { - return; - } - - // No hard limit? Do a binary search for the effective soft limit. - if limits.rlim_max == libc::RLIM_INFINITY { - let mut min = limits.rlim_cur; - let mut max = 1 << 20; - - while min + 1 < max { - limits.rlim_cur = min + (max - min) / 2; - match libc::setrlimit(libc::RLIMIT_NOFILE, &limits) { - 0 => min = limits.rlim_cur, - _ => max = limits.rlim_cur, - } - } - - return; - } - - // Raise the soft limit to the hard limit. - if limits.rlim_cur < limits.rlim_max { - limits.rlim_cur = limits.rlim_max; - libc::setrlimit(libc::RLIMIT_NOFILE, &limits); - } - } -} diff --git a/cli/util/checksum.rs b/cli/util/checksum.rs new file mode 100644 index 000000000..c0e41356d --- /dev/null +++ b/cli/util/checksum.rs @@ -0,0 +1,32 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use ring::digest::Context; +use ring::digest::SHA256; + +pub fn gen(v: &[impl AsRef<[u8]>]) -> String { + let mut ctx = Context::new(&SHA256); + for src in v { + ctx.update(src.as_ref()); + } + let digest = ctx.finish(); + let out: Vec = digest + .as_ref() + .iter() + .map(|byte| format!("{:02x}", byte)) + .collect(); + out.join("") +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_gen() { + let actual = gen(&[b"hello world"]); + assert_eq!( + actual, + "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" + ); + } +} diff --git a/cli/util/diff.rs b/cli/util/diff.rs new file mode 100644 index 000000000..048464162 --- /dev/null +++ b/cli/util/diff.rs @@ -0,0 +1,227 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use crate::colors; +use dissimilar::{diff as difference, Chunk}; +use std::fmt::Write as _; + +/// Print diff of the same file_path, before and after formatting. +/// +/// Diff format is loosely based on GitHub diff formatting. +pub fn diff(orig_text: &str, edit_text: &str) -> String { + if orig_text == edit_text { + return String::new(); + } + + // normalize newlines as it adds too much noise if they differ + let orig_text = orig_text.replace("\r\n", "\n"); + let edit_text = edit_text.replace("\r\n", "\n"); + + if orig_text == edit_text { + return " | Text differed by line endings.\n".to_string(); + } + + DiffBuilder::build(&orig_text, &edit_text) +} + +struct DiffBuilder { + output: String, + line_number_width: usize, + orig_line: usize, + edit_line: usize, + orig: String, + edit: String, + has_changes: bool, +} + +impl DiffBuilder { + pub fn build(orig_text: &str, edit_text: &str) -> String { + let mut diff_builder = DiffBuilder { + output: String::new(), + orig_line: 1, + edit_line: 1, + orig: String::new(), + edit: String::new(), + has_changes: false, + line_number_width: { + let line_count = std::cmp::max( + orig_text.split('\n').count(), + edit_text.split('\n').count(), + ); + line_count.to_string().chars().count() + }, + }; + + let chunks = difference(orig_text, edit_text); + diff_builder.handle_chunks(chunks); + diff_builder.output + } + + fn handle_chunks<'a>(&'a mut self, chunks: Vec>) { + for chunk in chunks { + match chunk { + Chunk::Delete(s) => { + let split = s.split('\n').enumerate(); + for (i, s) in split { + if i > 0 { + self.orig.push('\n'); + } + self.orig.push_str(&fmt_rem_text_highlight(s)); + } + self.has_changes = true + } + Chunk::Insert(s) => { + let split = s.split('\n').enumerate(); + for (i, s) in split { + if i > 0 { + self.edit.push('\n'); + } + self.edit.push_str(&fmt_add_text_highlight(s)); + } + self.has_changes = true + } + Chunk::Equal(s) => { + let split = s.split('\n').enumerate(); + for (i, s) in split { + if i > 0 { + self.flush_changes(); + } + self.orig.push_str(&fmt_rem_text(s)); + self.edit.push_str(&fmt_add_text(s)); + } + } + } + } + + self.flush_changes(); + } + + fn flush_changes(&mut self) { + if self.has_changes { + self.write_line_diff(); + + self.orig_line += self.orig.split('\n').count(); + self.edit_line += self.edit.split('\n').count(); + self.has_changes = false; + } else { + self.orig_line += 1; + self.edit_line += 1; + } + + self.orig.clear(); + self.edit.clear(); + } + + fn write_line_diff(&mut self) { + let split = self.orig.split('\n').enumerate(); + for (i, s) in split { + write!( + self.output, + "{:width$}{} ", + self.orig_line + i, + colors::gray(" |"), + width = self.line_number_width + ) + .unwrap(); + self.output.push_str(&fmt_rem()); + self.output.push_str(s); + self.output.push('\n'); + } + + let split = self.edit.split('\n').enumerate(); + for (i, s) in split { + write!( + self.output, + "{:width$}{} ", + self.edit_line + i, + colors::gray(" |"), + width = self.line_number_width + ) + .unwrap(); + self.output.push_str(&fmt_add()); + self.output.push_str(s); + self.output.push('\n'); + } + } +} + +fn fmt_add() -> String { + colors::green_bold("+").to_string() +} + +fn fmt_add_text(x: &str) -> String { + colors::green(x).to_string() +} + +fn fmt_add_text_highlight(x: &str) -> String { + colors::black_on_green(x).to_string() +} + +fn fmt_rem() -> String { + colors::red_bold("-").to_string() +} + +fn fmt_rem_text(x: &str) -> String { + colors::red(x).to_string() +} + +fn fmt_rem_text_highlight(x: &str) -> String { + colors::white_on_red(x).to_string() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_diff() { + run_test( + "console.log('Hello World')", + "console.log(\"Hello World\");", + concat!( + "1 | -console.log('Hello World')\n", + "1 | +console.log(\"Hello World\");\n", + ), + ); + + run_test( + "\n\n\n\nconsole.log(\n'Hello World'\n)", + "console.log(\n\"Hello World\"\n);", + concat!( + "1 | -\n", + "2 | -\n", + "3 | -\n", + "4 | -\n", + "5 | -console.log(\n", + "1 | +console.log(\n", + "6 | -'Hello World'\n", + "2 | +\"Hello World\"\n", + "7 | -)\n3 | +);\n", + ), + ); + } + + #[test] + fn test_eof_newline_missing() { + run_test( + "test\nsome line text test", + "test\nsome line text test\n", + concat!( + "2 | -some line text test\n", + "2 | +some line text test\n", + "3 | +\n", + ), + ); + } + + #[test] + fn test_newlines_differing() { + run_test("test\n", "test\r\n", " | Text differed by line endings.\n"); + } + + fn run_test(diff_text1: &str, diff_text2: &str, expected_output: &str) { + assert_eq!( + test_util::strip_ansi_codes(&diff(diff_text1, diff_text2,)), + expected_output, + ); + } +} diff --git a/cli/util/display.rs b/cli/util/display.rs new file mode 100644 index 000000000..f13965e28 --- /dev/null +++ b/cli/util/display.rs @@ -0,0 +1,97 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::AnyError; +use deno_core::serde_json; +use std::io::Write; + +/// A function that converts a float to a string the represents a human +/// readable version of that number. +pub fn human_size(size: f64) -> String { + let negative = if size.is_sign_positive() { "" } else { "-" }; + let size = size.abs(); + let units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; + if size < 1_f64 { + return format!("{}{}{}", negative, size, "B"); + } + let delimiter = 1024_f64; + let exponent = std::cmp::min( + (size.ln() / delimiter.ln()).floor() as i32, + (units.len() - 1) as i32, + ); + let pretty_bytes = format!("{:.2}", size / delimiter.powi(exponent)) + .parse::() + .unwrap() + * 1_f64; + let unit = units[exponent as usize]; + format!("{}{}{}", negative, pretty_bytes, unit) +} + +/// A function that converts a milisecond elapsed time to a string that +/// represents a human readable version of that time. +pub fn human_elapsed(elapsed: u128) -> String { + if elapsed < 1_000 { + return format!("{}ms", elapsed); + } + if elapsed < 1_000 * 60 { + return format!("{}s", elapsed / 1000); + } + + let seconds = elapsed / 1_000; + let minutes = seconds / 60; + let seconds_remainder = seconds % 60; + format!("{}m{}s", minutes, seconds_remainder) +} + +pub fn write_to_stdout_ignore_sigpipe( + bytes: &[u8], +) -> Result<(), std::io::Error> { + use std::io::ErrorKind; + + match std::io::stdout().write_all(bytes) { + Ok(()) => Ok(()), + Err(e) => match e.kind() { + ErrorKind::BrokenPipe => Ok(()), + _ => Err(e), + }, + } +} + +pub fn write_json_to_stdout(value: &T) -> Result<(), AnyError> +where + T: ?Sized + serde::ser::Serialize, +{ + let mut writer = std::io::BufWriter::new(std::io::stdout()); + serde_json::to_writer_pretty(&mut writer, value)?; + writeln!(&mut writer)?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_human_size() { + assert_eq!(human_size(1_f64), "1B"); + assert_eq!(human_size((12 * 1024) as f64), "12KB"); + assert_eq!(human_size((24_i64 * 1024 * 1024) as f64), "24MB"); + assert_eq!(human_size((24_i64 * 1024 * 1024 * 1024) as f64), "24GB"); + assert_eq!( + human_size((24_i64 * 1024 * 1024 * 1024 * 1024) as f64), + "24TB" + ); + assert_eq!(human_size(0_f64), "0B"); + assert_eq!(human_size(-10_f64), "-10B"); + } + + #[test] + fn test_human_elapsed() { + assert_eq!(human_elapsed(1), "1ms"); + assert_eq!(human_elapsed(256), "256ms"); + assert_eq!(human_elapsed(1000), "1s"); + assert_eq!(human_elapsed(1001), "1s"); + assert_eq!(human_elapsed(1020), "1s"); + assert_eq!(human_elapsed(70 * 1000), "1m10s"); + assert_eq!(human_elapsed(86 * 1000 + 100), "1m26s"); + } +} diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs new file mode 100644 index 000000000..5158437a0 --- /dev/null +++ b/cli/util/file_watcher.rs @@ -0,0 +1,374 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use crate::colors; +use crate::util::fs::canonicalize_path; + +use deno_core::error::AnyError; +use deno_core::error::JsError; +use deno_core::futures::Future; +use deno_runtime::fmt_errors::format_js_error; +use log::info; +use notify::event::Event as NotifyEvent; +use notify::event::EventKind; +use notify::Error as NotifyError; +use notify::RecommendedWatcher; +use notify::RecursiveMode; +use notify::Watcher; +use std::collections::HashSet; +use std::path::PathBuf; +use std::sync::Arc; +use std::time::Duration; +use tokio::select; +use tokio::sync::mpsc; +use tokio::sync::mpsc::UnboundedReceiver; +use tokio::time::sleep; + +const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H"; +const DEBOUNCE_INTERVAL: Duration = Duration::from_millis(200); + +struct DebouncedReceiver { + // The `recv()` call could be used in a tokio `select!` macro, + // and so we store this state on the struct to ensure we don't + // lose items if a `recv()` never completes + received_items: HashSet, + receiver: UnboundedReceiver>, +} + +impl DebouncedReceiver { + fn new_with_sender() -> (Arc>>, Self) { + let (sender, receiver) = mpsc::unbounded_channel(); + ( + Arc::new(sender), + Self { + receiver, + received_items: HashSet::new(), + }, + ) + } + + async fn recv(&mut self) -> Option> { + if self.received_items.is_empty() { + self + .received_items + .extend(self.receiver.recv().await?.into_iter()); + } + + loop { + select! { + items = self.receiver.recv() => { + self.received_items.extend(items?); + } + _ = sleep(DEBOUNCE_INTERVAL) => { + return Some(self.received_items.drain().collect()); + } + } + } + } +} + +async fn error_handler(watch_future: F) +where + F: Future>, +{ + let result = watch_future.await; + if let Err(err) = result { + let error_string = match err.downcast_ref::() { + Some(e) => format_js_error(e), + None => format!("{:?}", err), + }; + eprintln!( + "{}: {}", + colors::red_bold("error"), + error_string.trim_start_matches("error: ") + ); + } +} + +pub enum ResolutionResult { + Restart { + paths_to_watch: Vec, + result: Result, + }, + Ignore, +} + +async fn next_restart( + resolver: &mut R, + debounced_receiver: &mut DebouncedReceiver, +) -> (Vec, Result) +where + R: FnMut(Option>) -> F, + F: Future>, +{ + loop { + let changed = debounced_receiver.recv().await; + match resolver(changed).await { + ResolutionResult::Ignore => { + log::debug!("File change ignored") + } + ResolutionResult::Restart { + paths_to_watch, + result, + } => { + return (paths_to_watch, result); + } + } + } +} + +pub struct PrintConfig { + /// printing watcher status to terminal. + pub job_name: String, + /// determine whether to clear the terminal screen + pub clear_screen: bool, +} + +fn create_print_after_restart_fn(clear_screen: bool) -> impl Fn() { + move || { + if clear_screen { + eprint!("{}", CLEAR_SCREEN); + } + info!( + "{} File change detected! Restarting!", + colors::intense_blue("Watcher"), + ); + } +} + +/// Creates a file watcher, which will call `resolver` with every file change. +/// +/// - `resolver` is used for resolving file paths to be watched at every restarting +/// of the watcher, and can also return a value to be passed to `operation`. +/// It returns a [`ResolutionResult`], which can either instruct the watcher to restart or ignore the change. +/// This always contains paths to watch; +/// +/// - `operation` is the actual operation we want to run every time the watcher detects file +/// changes. For example, in the case where we would like to bundle, then `operation` would +/// have the logic for it like bundling the code. +pub async fn watch_func( + mut resolver: R, + mut operation: O, + print_config: PrintConfig, +) -> Result<(), AnyError> +where + R: FnMut(Option>) -> F1, + O: FnMut(T) -> F2, + F1: Future>, + F2: Future>, +{ + let (sender, mut receiver) = DebouncedReceiver::new_with_sender(); + + let PrintConfig { + job_name, + clear_screen, + } = print_config; + + // Store previous data. If module resolution fails at some point, the watcher will try to + // continue watching files using these data. + let mut paths_to_watch; + let mut resolution_result; + + let print_after_restart = create_print_after_restart_fn(clear_screen); + + match resolver(None).await { + ResolutionResult::Ignore => { + // The only situation where it makes sense to ignore the initial 'change' + // is if the command isn't supposed to do anything until something changes, + // e.g. a variant of `deno test` which doesn't run the entire test suite to start with, + // but instead does nothing until you make a change. + // + // In that case, this is probably the correct output. + info!( + "{} Waiting for file changes...", + colors::intense_blue("Watcher"), + ); + + let (paths, result) = next_restart(&mut resolver, &mut receiver).await; + paths_to_watch = paths; + resolution_result = result; + + print_after_restart(); + } + ResolutionResult::Restart { + paths_to_watch: paths, + result, + } => { + paths_to_watch = paths; + resolution_result = result; + } + }; + + info!("{} {} started.", colors::intense_blue("Watcher"), job_name,); + + loop { + let mut watcher = new_watcher(sender.clone())?; + add_paths_to_watcher(&mut watcher, &paths_to_watch); + + match resolution_result { + Ok(operation_arg) => { + let fut = error_handler(operation(operation_arg)); + select! { + (paths, result) = next_restart(&mut resolver, &mut receiver) => { + if result.is_ok() { + paths_to_watch = paths; + } + resolution_result = result; + + print_after_restart(); + continue; + }, + _ = fut => {}, + }; + + info!( + "{} {} finished. Restarting on file change...", + colors::intense_blue("Watcher"), + job_name, + ); + } + Err(error) => { + eprintln!("{}: {}", colors::red_bold("error"), error); + info!( + "{} {} failed. Restarting on file change...", + colors::intense_blue("Watcher"), + job_name, + ); + } + } + + let (paths, result) = next_restart(&mut resolver, &mut receiver).await; + if result.is_ok() { + paths_to_watch = paths; + } + resolution_result = result; + + print_after_restart(); + + drop(watcher); + } +} + +/// Creates a file watcher. +/// +/// - `operation` is the actual operation we want to run every time the watcher detects file +/// changes. For example, in the case where we would like to bundle, then `operation` would +/// have the logic for it like bundling the code. +pub async fn watch_func2( + mut paths_to_watch_receiver: UnboundedReceiver>, + mut operation: O, + operation_args: T, + print_config: PrintConfig, +) -> Result<(), AnyError> +where + O: FnMut(T) -> Result, + F: Future>, +{ + let (watcher_sender, mut watcher_receiver) = + DebouncedReceiver::new_with_sender(); + + let PrintConfig { + job_name, + clear_screen, + } = print_config; + + let print_after_restart = create_print_after_restart_fn(clear_screen); + + info!("{} {} started.", colors::intense_blue("Watcher"), job_name,); + + fn consume_paths_to_watch( + watcher: &mut RecommendedWatcher, + receiver: &mut UnboundedReceiver>, + ) { + loop { + match receiver.try_recv() { + Ok(paths) => { + add_paths_to_watcher(watcher, &paths); + } + Err(e) => match e { + mpsc::error::TryRecvError::Empty => { + break; + } + // there must be at least one receiver alive + _ => unreachable!(), + }, + } + } + } + + loop { + let mut watcher = new_watcher(watcher_sender.clone())?; + consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver); + + let receiver_future = async { + loop { + let maybe_paths = paths_to_watch_receiver.recv().await; + add_paths_to_watcher(&mut watcher, &maybe_paths.unwrap()); + } + }; + let operation_future = error_handler(operation(operation_args.clone())?); + + select! { + _ = receiver_future => {}, + _ = watcher_receiver.recv() => { + print_after_restart(); + continue; + }, + _ = operation_future => { + // TODO(bartlomieju): print exit code here? + info!( + "{} {} finished. Restarting on file change...", + colors::intense_blue("Watcher"), + job_name, + ); + consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver); + }, + }; + + let receiver_future = async { + loop { + let maybe_paths = paths_to_watch_receiver.recv().await; + add_paths_to_watcher(&mut watcher, &maybe_paths.unwrap()); + } + }; + select! { + _ = receiver_future => {}, + _ = watcher_receiver.recv() => { + print_after_restart(); + continue; + }, + }; + } +} + +fn new_watcher( + sender: Arc>>, +) -> Result { + let watcher = Watcher::new( + move |res: Result| { + if let Ok(event) = res { + if matches!( + event.kind, + EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) + ) { + let paths = event + .paths + .iter() + .filter_map(|path| canonicalize_path(path).ok()) + .collect(); + sender.send(paths).unwrap(); + } + } + }, + Default::default(), + )?; + + Ok(watcher) +} + +fn add_paths_to_watcher(watcher: &mut RecommendedWatcher, paths: &[PathBuf]) { + // Ignore any error e.g. `PathNotFound` + for path in paths { + let _ = watcher.watch(path, RecursiveMode::Recursive); + } + log::debug!("Watching paths: {:?}", paths); +} diff --git a/cli/util/fs.rs b/cli/util/fs.rs new file mode 100644 index 000000000..35cdae4fa --- /dev/null +++ b/cli/util/fs.rs @@ -0,0 +1,661 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +pub use deno_core::normalize_path; +use deno_core::ModuleSpecifier; +use deno_runtime::deno_crypto::rand; +use deno_runtime::deno_node::PathClean; +use std::env::current_dir; +use std::fs::OpenOptions; +use std::io::Error; +use std::io::ErrorKind; +use std::io::Write; +use std::path::Path; +use std::path::PathBuf; +use std::time::Duration; +use walkdir::WalkDir; + +use super::path::specifier_to_file_path; + +pub fn atomic_write_file>( + filename: &Path, + data: T, + mode: u32, +) -> std::io::Result<()> { + let rand: String = (0..4) + .map(|_| format!("{:02x}", rand::random::())) + .collect(); + let extension = format!("{}.tmp", rand); + let tmp_file = filename.with_extension(extension); + write_file(&tmp_file, data, mode)?; + std::fs::rename(tmp_file, filename)?; + Ok(()) +} + +pub fn write_file>( + filename: &Path, + data: T, + mode: u32, +) -> std::io::Result<()> { + write_file_2(filename, data, true, mode, true, false) +} + +pub fn write_file_2>( + filename: &Path, + data: T, + update_mode: bool, + mode: u32, + is_create: bool, + is_append: bool, +) -> std::io::Result<()> { + let mut file = OpenOptions::new() + .read(false) + .write(true) + .append(is_append) + .truncate(!is_append) + .create(is_create) + .open(filename)?; + + if update_mode { + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mode = mode & 0o777; + let permissions = PermissionsExt::from_mode(mode); + file.set_permissions(permissions)?; + } + #[cfg(not(unix))] + let _ = mode; + } + + file.write_all(data.as_ref()) +} + +/// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. +pub fn canonicalize_path(path: &Path) -> Result { + let path = path.canonicalize()?; + #[cfg(windows)] + return Ok(strip_unc_prefix(path)); + #[cfg(not(windows))] + return Ok(path); +} + +/// Canonicalizes a path which might be non-existent by going up the +/// ancestors until it finds a directory that exists, canonicalizes +/// that path, then adds back the remaining path components. +/// +/// Note: When using this, you should be aware that a symlink may +/// subsequently be created along this path by some other code. +pub fn canonicalize_path_maybe_not_exists( + path: &Path, +) -> Result { + let path = path.to_path_buf().clean(); + let mut path = path.as_path(); + let mut names_stack = Vec::new(); + loop { + match canonicalize_path(path) { + Ok(mut canonicalized_path) => { + for name in names_stack.into_iter().rev() { + canonicalized_path = canonicalized_path.join(name); + } + return Ok(canonicalized_path); + } + Err(err) if err.kind() == ErrorKind::NotFound => { + names_stack.push(path.file_name().unwrap()); + path = path.parent().unwrap(); + } + Err(err) => return Err(err), + } + } +} + +#[cfg(windows)] +fn strip_unc_prefix(path: PathBuf) -> PathBuf { + use std::path::Component; + use std::path::Prefix; + + let mut components = path.components(); + match components.next() { + Some(Component::Prefix(prefix)) => { + match prefix.kind() { + // \\?\device + Prefix::Verbatim(device) => { + let mut path = PathBuf::new(); + path.push(format!(r"\\{}\", device.to_string_lossy())); + path.extend(components.filter(|c| !matches!(c, Component::RootDir))); + path + } + // \\?\c:\path + Prefix::VerbatimDisk(_) => { + let mut path = PathBuf::new(); + path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", "")); + path.extend(components); + path + } + // \\?\UNC\hostname\share_name\path + Prefix::VerbatimUNC(hostname, share_name) => { + let mut path = PathBuf::new(); + path.push(format!( + r"\\{}\{}\", + hostname.to_string_lossy(), + share_name.to_string_lossy() + )); + path.extend(components.filter(|c| !matches!(c, Component::RootDir))); + path + } + _ => path, + } + } + _ => path, + } +} + +pub fn resolve_from_cwd(path: &Path) -> Result { + let resolved_path = if path.is_absolute() { + path.to_owned() + } else { + let cwd = + current_dir().context("Failed to get current working directory")?; + cwd.join(path) + }; + + Ok(normalize_path(&resolved_path)) +} + +/// Collects file paths that satisfy the given predicate, by recursively walking `files`. +/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory. +pub fn collect_files

( + files: &[PathBuf], + ignore: &[PathBuf], + predicate: P, +) -> Result, AnyError> +where + P: Fn(&Path) -> bool, +{ + let mut target_files = Vec::new(); + + // retain only the paths which exist and ignore the rest + let canonicalized_ignore: Vec = ignore + .iter() + .filter_map(|i| canonicalize_path(i).ok()) + .collect(); + + for file in files { + for entry in WalkDir::new(file) + .into_iter() + .filter_entry(|e| { + canonicalize_path(e.path()).map_or(false, |c| { + !canonicalized_ignore.iter().any(|i| c.starts_with(i)) + }) + }) + .filter_map(|e| match e { + Ok(e) if !e.file_type().is_dir() && predicate(e.path()) => Some(e), + _ => None, + }) + { + target_files.push(canonicalize_path(entry.path())?) + } + } + + Ok(target_files) +} + +/// Collects module specifiers that satisfy the given predicate as a file path, by recursively walking `include`. +/// Specifiers that start with http and https are left intact. +pub fn collect_specifiers

( + include: Vec, + ignore: &[PathBuf], + predicate: P, +) -> Result, AnyError> +where + P: Fn(&Path) -> bool, +{ + let mut prepared = vec![]; + + let root_path = current_dir()?; + for path in include { + let lowercase_path = path.to_lowercase(); + if lowercase_path.starts_with("http://") + || lowercase_path.starts_with("https://") + { + let url = ModuleSpecifier::parse(&path)?; + prepared.push(url); + continue; + } + + let p = if lowercase_path.starts_with("file://") { + specifier_to_file_path(&ModuleSpecifier::parse(&path)?)? + } else { + root_path.join(path) + }; + let p = normalize_path(&p); + if p.is_dir() { + let test_files = collect_files(&[p], ignore, &predicate).unwrap(); + let mut test_files_as_urls = test_files + .iter() + .map(|f| ModuleSpecifier::from_file_path(f).unwrap()) + .collect::>(); + + test_files_as_urls.sort(); + prepared.extend(test_files_as_urls); + } else { + let url = ModuleSpecifier::from_file_path(p).unwrap(); + prepared.push(url); + } + } + + Ok(prepared) +} + +/// Asynchronously removes a directory and all its descendants, but does not error +/// when the directory does not exist. +pub async fn remove_dir_all_if_exists(path: &Path) -> std::io::Result<()> { + let result = tokio::fs::remove_dir_all(path).await; + match result { + Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(()), + _ => result, + } +} + +/// Copies a directory to another directory. +/// +/// Note: Does not handle symlinks. +pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> { + std::fs::create_dir_all(to) + .with_context(|| format!("Creating {}", to.display()))?; + let read_dir = std::fs::read_dir(from) + .with_context(|| format!("Reading {}", from.display()))?; + + for entry in read_dir { + let entry = entry?; + let file_type = entry.file_type()?; + let new_from = from.join(entry.file_name()); + let new_to = to.join(entry.file_name()); + + if file_type.is_dir() { + copy_dir_recursive(&new_from, &new_to).with_context(|| { + format!("Dir {} to {}", new_from.display(), new_to.display()) + })?; + } else if file_type.is_file() { + std::fs::copy(&new_from, &new_to).with_context(|| { + format!("Copying {} to {}", new_from.display(), new_to.display()) + })?; + } + } + + Ok(()) +} + +/// Hardlinks the files in one directory to another directory. +/// +/// Note: Does not handle symlinks. +pub fn hard_link_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> { + std::fs::create_dir_all(to) + .with_context(|| format!("Creating {}", to.display()))?; + let read_dir = std::fs::read_dir(from) + .with_context(|| format!("Reading {}", from.display()))?; + + for entry in read_dir { + let entry = entry?; + let file_type = entry.file_type()?; + let new_from = from.join(entry.file_name()); + let new_to = to.join(entry.file_name()); + + if file_type.is_dir() { + hard_link_dir_recursive(&new_from, &new_to).with_context(|| { + format!("Dir {} to {}", new_from.display(), new_to.display()) + })?; + } else if file_type.is_file() { + // note: chance for race conditions here between attempting to create, + // then removing, then attempting to create. There doesn't seem to be + // a way to hard link with overwriting in Rust, but maybe there is some + // way with platform specific code. The workaround here is to handle + // scenarios where something else might create or remove files. + if let Err(err) = std::fs::hard_link(&new_from, &new_to) { + if err.kind() == ErrorKind::AlreadyExists { + if let Err(err) = std::fs::remove_file(&new_to) { + if err.kind() == ErrorKind::NotFound { + // Assume another process/thread created this hard link to the file we are wanting + // to remove then sleep a little bit to let the other process/thread move ahead + // faster to reduce contention. + std::thread::sleep(Duration::from_millis(10)); + } else { + return Err(err).with_context(|| { + format!( + "Removing file to hard link {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + + // Always attempt to recreate the hardlink. In contention scenarios, the other process + // might have been killed or exited after removing the file, but before creating the hardlink + if let Err(err) = std::fs::hard_link(&new_from, &new_to) { + // Assume another process/thread created this hard link to the file we are wanting + // to now create then sleep a little bit to let the other process/thread move ahead + // faster to reduce contention. + if err.kind() == ErrorKind::AlreadyExists { + std::thread::sleep(Duration::from_millis(10)); + } else { + return Err(err).with_context(|| { + format!( + "Hard linking {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + } else { + return Err(err).with_context(|| { + format!( + "Hard linking {} to {}", + new_from.display(), + new_to.display() + ) + }); + } + } + } + } + + Ok(()) +} + +pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), AnyError> { + let err_mapper = |err: Error| { + Error::new( + err.kind(), + format!( + "{}, symlink '{}' -> '{}'", + err, + oldpath.display(), + newpath.display() + ), + ) + }; + #[cfg(unix)] + { + use std::os::unix::fs::symlink; + symlink(oldpath, newpath).map_err(err_mapper)?; + } + #[cfg(not(unix))] + { + use std::os::windows::fs::symlink_dir; + symlink_dir(oldpath, newpath).map_err(err_mapper)?; + } + Ok(()) +} + +/// Gets the total size (in bytes) of a directory. +pub fn dir_size(path: &Path) -> std::io::Result { + let entries = std::fs::read_dir(path)?; + let mut total = 0; + for entry in entries { + let entry = entry?; + total += match entry.metadata()? { + data if data.is_dir() => dir_size(&entry.path())?, + data => data.len(), + }; + } + Ok(total) +} + +#[cfg(test)] +mod tests { + use super::*; + use test_util::TempDir; + + #[test] + fn resolve_from_cwd_child() { + let cwd = current_dir().unwrap(); + assert_eq!(resolve_from_cwd(Path::new("a")).unwrap(), cwd.join("a")); + } + + #[test] + fn resolve_from_cwd_dot() { + let cwd = current_dir().unwrap(); + assert_eq!(resolve_from_cwd(Path::new(".")).unwrap(), cwd); + } + + #[test] + fn resolve_from_cwd_parent() { + let cwd = current_dir().unwrap(); + assert_eq!(resolve_from_cwd(Path::new("a/..")).unwrap(), cwd); + } + + #[test] + fn test_normalize_path() { + assert_eq!(normalize_path(Path::new("a/../b")), PathBuf::from("b")); + assert_eq!(normalize_path(Path::new("a/./b/")), PathBuf::from("a/b/")); + assert_eq!( + normalize_path(Path::new("a/./b/../c")), + PathBuf::from("a/c") + ); + + if cfg!(windows) { + assert_eq!( + normalize_path(Path::new("C:\\a\\.\\b\\..\\c")), + PathBuf::from("C:\\a\\c") + ); + } + } + + // TODO: Get a good expected value here for Windows. + #[cfg(not(windows))] + #[test] + fn resolve_from_cwd_absolute() { + let expected = Path::new("/a"); + assert_eq!(resolve_from_cwd(expected).unwrap(), expected); + } + + #[test] + fn test_collect_files() { + fn create_files(dir_path: &Path, files: &[&str]) { + std::fs::create_dir(dir_path).expect("Failed to create directory"); + for f in files { + let path = dir_path.join(f); + std::fs::write(path, "").expect("Failed to create file"); + } + } + + // dir.ts + // ├── a.ts + // ├── b.js + // ├── child + // │ ├── e.mjs + // │ ├── f.mjsx + // │ ├── .foo.TS + // │ └── README.md + // ├── c.tsx + // ├── d.jsx + // └── ignore + // ├── g.d.ts + // └── .gitignore + + let t = TempDir::new(); + + let root_dir_path = t.path().join("dir.ts"); + let root_dir_files = ["a.ts", "b.js", "c.tsx", "d.jsx"]; + create_files(&root_dir_path, &root_dir_files); + + let child_dir_path = root_dir_path.join("child"); + let child_dir_files = ["e.mjs", "f.mjsx", ".foo.TS", "README.md"]; + create_files(&child_dir_path, &child_dir_files); + + let ignore_dir_path = root_dir_path.join("ignore"); + let ignore_dir_files = ["g.d.ts", ".gitignore"]; + create_files(&ignore_dir_path, &ignore_dir_files); + + let result = collect_files(&[root_dir_path], &[ignore_dir_path], |path| { + // exclude dotfiles + path + .file_name() + .and_then(|f| f.to_str()) + .map_or(false, |f| !f.starts_with('.')) + }) + .unwrap(); + let expected = [ + "a.ts", + "b.js", + "e.mjs", + "f.mjsx", + "README.md", + "c.tsx", + "d.jsx", + ]; + for e in expected.iter() { + assert!(result.iter().any(|r| r.ends_with(e))); + } + assert_eq!(result.len(), expected.len()); + } + + #[test] + fn test_collect_specifiers() { + fn create_files(dir_path: &Path, files: &[&str]) { + std::fs::create_dir(dir_path).expect("Failed to create directory"); + for f in files { + let path = dir_path.join(f); + std::fs::write(path, "").expect("Failed to create file"); + } + } + + // dir.ts + // ├── a.ts + // ├── b.js + // ├── child + // │ ├── e.mjs + // │ ├── f.mjsx + // │ ├── .foo.TS + // │ └── README.md + // ├── c.tsx + // ├── d.jsx + // └── ignore + // ├── g.d.ts + // └── .gitignore + + let t = TempDir::new(); + + let root_dir_path = t.path().join("dir.ts"); + let root_dir_files = ["a.ts", "b.js", "c.tsx", "d.jsx"]; + create_files(&root_dir_path, &root_dir_files); + + let child_dir_path = root_dir_path.join("child"); + let child_dir_files = ["e.mjs", "f.mjsx", ".foo.TS", "README.md"]; + create_files(&child_dir_path, &child_dir_files); + + let ignore_dir_path = root_dir_path.join("ignore"); + let ignore_dir_files = ["g.d.ts", ".gitignore"]; + create_files(&ignore_dir_path, &ignore_dir_files); + + let predicate = |path: &Path| { + // exclude dotfiles + path + .file_name() + .and_then(|f| f.to_str()) + .map_or(false, |f| !f.starts_with('.')) + }; + + let result = collect_specifiers( + vec![ + "http://localhost:8080".to_string(), + root_dir_path.to_str().unwrap().to_string(), + "https://localhost:8080".to_string(), + ], + &[ignore_dir_path], + predicate, + ) + .unwrap(); + + let root_dir_url = ModuleSpecifier::from_file_path( + canonicalize_path(&root_dir_path).unwrap(), + ) + .unwrap() + .to_string(); + let expected: Vec = [ + "http://localhost:8080", + &format!("{}/a.ts", root_dir_url), + &format!("{}/b.js", root_dir_url), + &format!("{}/c.tsx", root_dir_url), + &format!("{}/child/README.md", root_dir_url), + &format!("{}/child/e.mjs", root_dir_url), + &format!("{}/child/f.mjsx", root_dir_url), + &format!("{}/d.jsx", root_dir_url), + "https://localhost:8080", + ] + .iter() + .map(|f| ModuleSpecifier::parse(f).unwrap()) + .collect::>(); + + assert_eq!(result, expected); + + let scheme = if cfg!(target_os = "windows") { + "file:///" + } else { + "file://" + }; + let result = collect_specifiers( + vec![format!( + "{}{}", + scheme, + root_dir_path + .join("child") + .to_str() + .unwrap() + .replace('\\', "/") + )], + &[], + predicate, + ) + .unwrap(); + + let expected: Vec = [ + &format!("{}/child/README.md", root_dir_url), + &format!("{}/child/e.mjs", root_dir_url), + &format!("{}/child/f.mjsx", root_dir_url), + ] + .iter() + .map(|f| ModuleSpecifier::parse(f).unwrap()) + .collect::>(); + + assert_eq!(result, expected); + } + + #[cfg(windows)] + #[test] + fn test_strip_unc_prefix() { + run_test(r"C:\", r"C:\"); + run_test(r"C:\test\file.txt", r"C:\test\file.txt"); + + run_test(r"\\?\C:\", r"C:\"); + run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt"); + + run_test(r"\\.\C:\", r"\\.\C:\"); + run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt"); + + run_test(r"\\?\UNC\localhost\", r"\\localhost"); + run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$"); + run_test( + r"\\?\UNC\localhost\c$\Windows\file.txt", + r"\\localhost\c$\Windows\file.txt", + ); + run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json"); + + run_test(r"\\?\server1", r"\\server1"); + run_test(r"\\?\server1\e$\", r"\\server1\e$\"); + run_test( + r"\\?\server1\e$\test\file.txt", + r"\\server1\e$\test\file.txt", + ); + + fn run_test(input: &str, expected: &str) { + assert_eq!( + strip_unc_prefix(PathBuf::from(input)), + PathBuf::from(expected) + ); + } + } +} diff --git a/cli/util/logger.rs b/cli/util/logger.rs new file mode 100644 index 000000000..caa027c04 --- /dev/null +++ b/cli/util/logger.rs @@ -0,0 +1,79 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use std::io::Write; + +struct CliLogger(env_logger::Logger); + +impl CliLogger { + pub fn new(logger: env_logger::Logger) -> Self { + Self(logger) + } + + pub fn filter(&self) -> log::LevelFilter { + self.0.filter() + } +} + +impl log::Log for CliLogger { + fn enabled(&self, metadata: &log::Metadata) -> bool { + self.0.enabled(metadata) + } + + fn log(&self, record: &log::Record) { + if self.enabled(record.metadata()) { + self.0.log(record); + } + } + + fn flush(&self) { + self.0.flush(); + } +} + +pub fn init(maybe_level: Option) { + let log_level = maybe_level.unwrap_or(log::Level::Info); + let logger = env_logger::Builder::from_env( + env_logger::Env::default() + .default_filter_or(log_level.to_level_filter().to_string()), + ) + // https://github.com/denoland/deno/issues/6641 + .filter_module("rustyline", log::LevelFilter::Off) + // wgpu crates (gfx_backend), have a lot of useless INFO and WARN logs + .filter_module("wgpu", log::LevelFilter::Error) + .filter_module("gfx", log::LevelFilter::Error) + // used to make available the lsp_debug which is then filtered out at runtime + // in the cli logger + .filter_module("deno::lsp::performance", log::LevelFilter::Debug) + .format(|buf, record| { + let mut target = record.target().to_string(); + if let Some(line_no) = record.line() { + target.push(':'); + target.push_str(&line_no.to_string()); + } + if record.level() <= log::Level::Info + || (record.target() == "deno::lsp::performance" + && record.level() == log::Level::Debug) + { + // Print ERROR, WARN, INFO and lsp_debug logs as they are + writeln!(buf, "{}", record.args()) + } else { + // Add prefix to DEBUG or TRACE logs + writeln!( + buf, + "{} RS - {} - {}", + record.level(), + target, + record.args() + ) + } + }) + .build(); + + let cli_logger = CliLogger::new(logger); + let max_level = cli_logger.filter(); + let r = log::set_boxed_logger(Box::new(cli_logger)); + if r.is_ok() { + log::set_max_level(max_level); + } + r.expect("Could not install logger."); +} diff --git a/cli/util/mod.rs b/cli/util/mod.rs new file mode 100644 index 000000000..176991d32 --- /dev/null +++ b/cli/util/mod.rs @@ -0,0 +1,14 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +// Note: Only add code in this folder that has no application specific logic +pub mod checksum; +pub mod diff; +pub mod display; +pub mod file_watcher; +pub mod fs; +pub mod logger; +pub mod path; +pub mod progress_bar; +pub mod text_encoding; +pub mod unix; +pub mod windows; diff --git a/cli/util/path.rs b/cli/util/path.rs new file mode 100644 index 000000000..6df982f4e --- /dev/null +++ b/cli/util/path.rs @@ -0,0 +1,452 @@ +use std::borrow::Cow; +use std::path::Path; +use std::path::PathBuf; + +use deno_ast::ModuleSpecifier; +use deno_core::error::uri_error; +use deno_core::error::AnyError; + +/// Checks if the path has extension Deno supports. +pub fn is_supported_ext(path: &Path) -> bool { + if let Some(ext) = get_extension(path) { + matches!( + ext.as_str(), + "ts" | "tsx" | "js" | "jsx" | "mjs" | "mts" | "cjs" | "cts" + ) + } else { + false + } +} + +/// Get the extension of a file in lowercase. +pub fn get_extension(file_path: &Path) -> Option { + return file_path + .extension() + .and_then(|e| e.to_str()) + .map(|e| e.to_lowercase()); +} + +/// Attempts to convert a specifier to a file path. By default, uses the Url +/// crate's `to_file_path()` method, but falls back to try and resolve unix-style +/// paths on Windows. +pub fn specifier_to_file_path( + specifier: &ModuleSpecifier, +) -> Result { + let result = if cfg!(windows) { + match specifier.to_file_path() { + Ok(path) => Ok(path), + Err(()) => { + // This might be a unix-style path which is used in the tests even on Windows. + // Attempt to see if we can convert it to a `PathBuf`. This code should be removed + // once/if https://github.com/servo/rust-url/issues/730 is implemented. + if specifier.scheme() == "file" + && specifier.host().is_none() + && specifier.port().is_none() + && specifier.path_segments().is_some() + { + let path_str = specifier.path(); + match String::from_utf8( + percent_encoding::percent_decode(path_str.as_bytes()).collect(), + ) { + Ok(path_str) => Ok(PathBuf::from(path_str)), + Err(_) => Err(()), + } + } else { + Err(()) + } + } + } + } else { + specifier.to_file_path() + }; + match result { + Ok(path) => Ok(path), + Err(()) => Err(uri_error(format!( + "Invalid file path.\n Specifier: {}", + specifier + ))), + } +} + +/// Ensures a specifier that will definitely be a directory has a trailing slash. +pub fn ensure_directory_specifier( + mut specifier: ModuleSpecifier, +) -> ModuleSpecifier { + let path = specifier.path(); + if !path.ends_with('/') { + let new_path = format!("{}/", path); + specifier.set_path(&new_path); + } + specifier +} + +/// Gets the parent of this module specifier. +pub fn specifier_parent(specifier: &ModuleSpecifier) -> ModuleSpecifier { + let mut specifier = specifier.clone(); + // don't use specifier.segments() because it will strip the leading slash + let mut segments = specifier.path().split('/').collect::>(); + if segments.iter().all(|s| s.is_empty()) { + return specifier; + } + if let Some(last) = segments.last() { + if last.is_empty() { + segments.pop(); + } + segments.pop(); + let new_path = format!("{}/", segments.join("/")); + specifier.set_path(&new_path); + } + specifier +} + +/// `from.make_relative(to)` but with fixes. +pub fn relative_specifier( + from: &ModuleSpecifier, + to: &ModuleSpecifier, +) -> Option { + let is_dir = to.path().ends_with('/'); + + if is_dir && from == to { + return Some("./".to_string()); + } + + // workaround using parent directory until https://github.com/servo/rust-url/pull/754 is merged + let from = if !from.path().ends_with('/') { + if let Some(end_slash) = from.path().rfind('/') { + let mut new_from = from.clone(); + new_from.set_path(&from.path()[..end_slash + 1]); + Cow::Owned(new_from) + } else { + Cow::Borrowed(from) + } + } else { + Cow::Borrowed(from) + }; + + // workaround for url crate not adding a trailing slash for a directory + // it seems to be fixed once a version greater than 2.2.2 is released + let mut text = from.make_relative(to)?; + if is_dir && !text.ends_with('/') && to.query().is_none() { + text.push('/'); + } + + Some(if text.starts_with("../") || text.starts_with("./") { + text + } else { + format!("./{}", text) + }) +} + +/// This function checks if input path has trailing slash or not. If input path +/// has trailing slash it will return true else it will return false. +pub fn path_has_trailing_slash(path: &Path) -> bool { + if let Some(path_str) = path.to_str() { + if cfg!(windows) { + path_str.ends_with('\\') + } else { + path_str.ends_with('/') + } + } else { + false + } +} + +/// Gets a path with the specified file stem suffix. +/// +/// Ex. `file.ts` with suffix `_2` returns `file_2.ts` +pub fn path_with_stem_suffix(path: &Path, suffix: &str) -> PathBuf { + if let Some(file_name) = path.file_name().map(|f| f.to_string_lossy()) { + if let Some(file_stem) = path.file_stem().map(|f| f.to_string_lossy()) { + if let Some(ext) = path.extension().map(|f| f.to_string_lossy()) { + return if file_stem.to_lowercase().ends_with(".d") { + path.with_file_name(format!( + "{}{}.{}.{}", + &file_stem[..file_stem.len() - ".d".len()], + suffix, + // maintain casing + &file_stem[file_stem.len() - "d".len()..], + ext + )) + } else { + path.with_file_name(format!("{}{}.{}", file_stem, suffix, ext)) + }; + } + } + + path.with_file_name(format!("{}{}", file_name, suffix)) + } else { + path.with_file_name(suffix) + } +} + +/// Gets if the provided character is not supported on all +/// kinds of file systems. +pub fn is_banned_path_char(c: char) -> bool { + matches!(c, '<' | '>' | ':' | '"' | '|' | '?' | '*') +} + +/// Gets a safe local directory name for the provided url. +/// +/// For example: +/// https://deno.land:8080/path -> deno.land_8080/path +pub fn root_url_to_safe_local_dirname(root: &ModuleSpecifier) -> PathBuf { + fn sanitize_segment(text: &str) -> String { + text + .chars() + .map(|c| if is_banned_segment_char(c) { '_' } else { c }) + .collect() + } + + fn is_banned_segment_char(c: char) -> bool { + matches!(c, '/' | '\\') || is_banned_path_char(c) + } + + let mut result = String::new(); + if let Some(domain) = root.domain() { + result.push_str(&sanitize_segment(domain)); + } + if let Some(port) = root.port() { + if !result.is_empty() { + result.push('_'); + } + result.push_str(&port.to_string()); + } + let mut result = PathBuf::from(result); + if let Some(segments) = root.path_segments() { + for segment in segments.filter(|s| !s.is_empty()) { + result = result.join(sanitize_segment(segment)); + } + } + + result +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_is_supported_ext() { + assert!(!is_supported_ext(Path::new("tests/subdir/redirects"))); + assert!(!is_supported_ext(Path::new("README.md"))); + assert!(is_supported_ext(Path::new("lib/typescript.d.ts"))); + assert!(is_supported_ext(Path::new("testdata/run/001_hello.js"))); + assert!(is_supported_ext(Path::new("testdata/run/002_hello.ts"))); + assert!(is_supported_ext(Path::new("foo.jsx"))); + assert!(is_supported_ext(Path::new("foo.tsx"))); + assert!(is_supported_ext(Path::new("foo.TS"))); + assert!(is_supported_ext(Path::new("foo.TSX"))); + assert!(is_supported_ext(Path::new("foo.JS"))); + assert!(is_supported_ext(Path::new("foo.JSX"))); + assert!(is_supported_ext(Path::new("foo.mjs"))); + assert!(is_supported_ext(Path::new("foo.mts"))); + assert!(is_supported_ext(Path::new("foo.cjs"))); + assert!(is_supported_ext(Path::new("foo.cts"))); + assert!(!is_supported_ext(Path::new("foo.mjsx"))); + } + + #[test] + fn test_specifier_to_file_path() { + run_success_test("file:///", "/"); + run_success_test("file:///test", "/test"); + run_success_test("file:///dir/test/test.txt", "/dir/test/test.txt"); + run_success_test( + "file:///dir/test%20test/test.txt", + "/dir/test test/test.txt", + ); + + fn run_success_test(specifier: &str, expected_path: &str) { + let result = + specifier_to_file_path(&ModuleSpecifier::parse(specifier).unwrap()) + .unwrap(); + assert_eq!(result, PathBuf::from(expected_path)); + } + } + + #[test] + fn test_ensure_directory_specifier() { + run_test("file:///", "file:///"); + run_test("file:///test", "file:///test/"); + run_test("file:///test/", "file:///test/"); + run_test("file:///test/other", "file:///test/other/"); + run_test("file:///test/other/", "file:///test/other/"); + + fn run_test(specifier: &str, expected: &str) { + let result = + ensure_directory_specifier(ModuleSpecifier::parse(specifier).unwrap()); + assert_eq!(result.to_string(), expected); + } + } + + #[test] + fn test_specifier_parent() { + run_test("file:///", "file:///"); + run_test("file:///test", "file:///"); + run_test("file:///test/", "file:///"); + run_test("file:///test/other", "file:///test/"); + run_test("file:///test/other.txt", "file:///test/"); + run_test("file:///test/other/", "file:///test/"); + + fn run_test(specifier: &str, expected: &str) { + let result = + specifier_parent(&ModuleSpecifier::parse(specifier).unwrap()); + assert_eq!(result.to_string(), expected); + } + } + + #[test] + fn test_relative_specifier() { + let fixtures: Vec<(&str, &str, Option<&str>)> = vec![ + ("file:///from", "file:///to", Some("./to")), + ("file:///from", "file:///from/other", Some("./from/other")), + ("file:///from", "file:///from/other/", Some("./from/other/")), + ("file:///from", "file:///other/from", Some("./other/from")), + ("file:///from/", "file:///other/from", Some("../other/from")), + ("file:///from", "file:///other/from/", Some("./other/from/")), + ( + "file:///from", + "file:///to/other.txt", + Some("./to/other.txt"), + ), + ( + "file:///from/test", + "file:///to/other.txt", + Some("../to/other.txt"), + ), + ( + "file:///from/other.txt", + "file:///to/other.txt", + Some("../to/other.txt"), + ), + ( + "https://deno.land/x/a/b/d.ts", + "https://deno.land/x/a/b/c.ts", + Some("./c.ts"), + ), + ( + "https://deno.land/x/a/b/d.ts", + "https://deno.land/x/a/c.ts", + Some("../c.ts"), + ), + ( + "https://deno.land/x/a/b/d.ts", + "https://deno.land/x/a/b/c/d.ts", + Some("./c/d.ts"), + ), + ( + "https://deno.land/x/a/b/c/", + "https://deno.land/x/a/b/c/d.ts", + Some("./d.ts"), + ), + ( + "https://deno.land/x/a/b/c/", + "https://deno.land/x/a/b/c/d/e.ts", + Some("./d/e.ts"), + ), + ( + "https://deno.land/x/a/b/c/f.ts", + "https://deno.land/x/a/b/c/d/e.ts", + Some("./d/e.ts"), + ), + ( + "https://deno.land/x/a/b/d.ts", + "https://deno.land/x/a/c.ts?foo=bar", + Some("../c.ts?foo=bar"), + ), + ( + "https://deno.land/x/a/b/d.ts?foo=bar", + "https://deno.land/x/a/b/c.ts", + Some("./c.ts"), + ), + ("file:///a/b/d.ts", "file:///a/b/c.ts", Some("./c.ts")), + ("https://deno.land/x/a/b/c.ts", "file:///a/b/c.ts", None), + ( + "https://deno.land/", + "https://deno.land/x/a/b/c.ts", + Some("./x/a/b/c.ts"), + ), + ( + "https://deno.land/x/d/e/f.ts", + "https://deno.land/x/a/b/c.ts", + Some("../../a/b/c.ts"), + ), + ]; + for (from_str, to_str, expected) in fixtures { + let from = ModuleSpecifier::parse(from_str).unwrap(); + let to = ModuleSpecifier::parse(to_str).unwrap(); + let actual = relative_specifier(&from, &to); + assert_eq!( + actual.as_deref(), + expected, + "from: \"{}\" to: \"{}\"", + from_str, + to_str + ); + } + } + + #[test] + fn test_path_has_trailing_slash() { + #[cfg(not(windows))] + { + run_test("/Users/johndoe/Desktop/deno-project/target/", true); + run_test(r"/Users/johndoe/deno-project/target//", true); + run_test("/Users/johndoe/Desktop/deno-project", false); + run_test(r"/Users/johndoe/deno-project\", false); + } + + #[cfg(windows)] + { + run_test(r"C:\test\deno-project\", true); + run_test(r"C:\test\deno-project\\", true); + run_test(r"C:\test\file.txt", false); + run_test(r"C:\test\file.txt/", false); + } + + fn run_test(path_str: &str, expected: bool) { + let path = Path::new(path_str); + let result = path_has_trailing_slash(path); + assert_eq!(result, expected); + } + } + + #[test] + fn test_path_with_stem_suffix() { + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/"), "_2"), + PathBuf::from("/_2") + ); + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/test"), "_2"), + PathBuf::from("/test_2") + ); + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/test.txt"), "_2"), + PathBuf::from("/test_2.txt") + ); + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/test/subdir"), "_2"), + PathBuf::from("/test/subdir_2") + ); + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/test/subdir.other.txt"), "_2"), + PathBuf::from("/test/subdir.other_2.txt") + ); + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/test.d.ts"), "_2"), + PathBuf::from("/test_2.d.ts") + ); + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/test.D.TS"), "_2"), + PathBuf::from("/test_2.D.TS") + ); + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/test.d.mts"), "_2"), + PathBuf::from("/test_2.d.mts") + ); + assert_eq!( + path_with_stem_suffix(&PathBuf::from("/test.d.cts"), "_2"), + PathBuf::from("/test_2.d.cts") + ); + } +} diff --git a/cli/util/progress_bar.rs b/cli/util/progress_bar.rs new file mode 100644 index 000000000..5b49fb279 --- /dev/null +++ b/cli/util/progress_bar.rs @@ -0,0 +1,143 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use crate::colors; +use deno_core::parking_lot::Mutex; +use indexmap::IndexSet; +use std::sync::Arc; +use std::time::Duration; + +#[derive(Clone, Debug, Default)] +pub struct ProgressBar(Arc>); + +#[derive(Debug)] +struct ProgressBarInner { + pb: Option, + is_tty: bool, + in_flight: IndexSet, +} + +impl Default for ProgressBarInner { + fn default() -> Self { + Self { + pb: None, + is_tty: colors::is_tty(), + in_flight: IndexSet::default(), + } + } +} + +impl ProgressBarInner { + fn get_or_create_pb(&mut self) -> indicatif::ProgressBar { + if let Some(pb) = self.pb.as_ref() { + return pb.clone(); + } + + let pb = indicatif::ProgressBar::new_spinner(); + pb.enable_steady_tick(Duration::from_millis(120)); + pb.set_prefix("Download"); + pb.set_style( + indicatif::ProgressStyle::with_template( + "{prefix:.green} {spinner:.green} {msg}", + ) + .unwrap() + .tick_strings(&["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]), + ); + self.pb = Some(pb); + self.pb.as_ref().unwrap().clone() + } + + fn add_in_flight(&mut self, msg: &str) { + if self.in_flight.contains(msg) { + return; + } + + self.in_flight.insert(msg.to_string()); + } + + /// Returns if removed "in-flight" was last entry and progress + /// bar needs to be updated. + fn remove_in_flight(&mut self, msg: &str) -> bool { + if !self.in_flight.contains(msg) { + return false; + } + + let mut is_last = false; + if let Some(last) = self.in_flight.last() { + is_last = last == msg; + } + self.in_flight.remove(msg); + is_last + } + + fn update_progress_bar(&mut self) { + let pb = self.get_or_create_pb(); + if let Some(msg) = self.in_flight.last() { + pb.set_message(msg.clone()); + } + } +} + +pub struct UpdateGuard { + pb: ProgressBar, + msg: String, + noop: bool, +} + +impl Drop for UpdateGuard { + fn drop(&mut self) { + if self.noop { + return; + } + + let mut inner = self.pb.0.lock(); + if inner.remove_in_flight(&self.msg) { + inner.update_progress_bar(); + } + } +} + +impl ProgressBar { + pub fn update(&self, msg: &str) -> UpdateGuard { + let mut guard = UpdateGuard { + pb: self.clone(), + msg: msg.to_string(), + noop: false, + }; + let mut inner = self.0.lock(); + + // If we're not running in TTY we're just gonna fallback + // to using logger crate. + if !inner.is_tty { + log::log!(log::Level::Info, "{} {}", colors::green("Download"), msg); + guard.noop = true; + return guard; + } + + inner.add_in_flight(msg); + inner.update_progress_bar(); + guard + } + + pub fn clear(&self) { + let mut inner = self.0.lock(); + + if let Some(pb) = inner.pb.as_ref() { + pb.finish_and_clear(); + inner.pb = None; + } + } + + pub fn clear_guard(&self) -> ClearGuard { + ClearGuard { pb: self.clone() } + } +} + +pub struct ClearGuard { + pb: ProgressBar, +} + +impl Drop for ClearGuard { + fn drop(&mut self) { + self.pb.clear(); + } +} diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs new file mode 100644 index 000000000..c16a1289d --- /dev/null +++ b/cli/util/text_encoding.rs @@ -0,0 +1,162 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +use encoding_rs::*; +use std::borrow::Cow; +use std::io::Error; +use std::io::ErrorKind; + +pub const BOM_CHAR: char = '\u{FEFF}'; + +/// Attempts to detect the character encoding of the provided bytes. +/// +/// Supports UTF-8, UTF-16 Little Endian and UTF-16 Big Endian. +pub fn detect_charset(bytes: &'_ [u8]) -> &'static str { + const UTF16_LE_BOM: &[u8] = b"\xFF\xFE"; + const UTF16_BE_BOM: &[u8] = b"\xFE\xFF"; + + if bytes.starts_with(UTF16_LE_BOM) { + "utf-16le" + } else if bytes.starts_with(UTF16_BE_BOM) { + "utf-16be" + } else { + // Assume everything else is utf-8 + "utf-8" + } +} + +/// Attempts to convert the provided bytes to a UTF-8 string. +/// +/// Supports all encodings supported by the encoding_rs crate, which includes +/// all encodings specified in the WHATWG Encoding Standard, and only those +/// encodings (see: ). +pub fn convert_to_utf8<'a>( + bytes: &'a [u8], + charset: &'_ str, +) -> Result, Error> { + match Encoding::for_label(charset.as_bytes()) { + Some(encoding) => encoding + .decode_without_bom_handling_and_without_replacement(bytes) + .ok_or_else(|| ErrorKind::InvalidData.into()), + None => Err(Error::new( + ErrorKind::InvalidInput, + format!("Unsupported charset: {}", charset), + )), + } +} + +/// Strips the byte order mark from the provided text if it exists. +pub fn strip_bom(text: &str) -> &str { + if text.starts_with(BOM_CHAR) { + &text[BOM_CHAR.len_utf8()..] + } else { + text + } +} + +static SOURCE_MAP_PREFIX: &str = + "//# sourceMappingURL=data:application/json;base64,"; + +pub fn source_map_from_code(code: &str) -> Option> { + let last_line = code.rsplit(|u| u == '\n').next()?; + if last_line.starts_with(SOURCE_MAP_PREFIX) { + let input = last_line.split_at(SOURCE_MAP_PREFIX.len()).1; + let decoded_map = base64::decode(input) + .expect("Unable to decode source map from emitted file."); + Some(decoded_map) + } else { + None + } +} + +pub fn code_without_source_map(mut code: String) -> String { + if let Some(last_line_index) = code.rfind('\n') { + if code[last_line_index + 1..].starts_with(SOURCE_MAP_PREFIX) { + code.truncate(last_line_index + 1); + code + } else { + code + } + } else { + code + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn test_detection(test_data: &[u8], expected_charset: &str) { + let detected_charset = detect_charset(test_data); + assert_eq!( + expected_charset.to_lowercase(), + detected_charset.to_lowercase() + ); + } + + #[test] + fn test_detection_utf8_no_bom() { + let test_data = "Hello UTF-8 it is \u{23F0} for Deno!" + .to_owned() + .into_bytes(); + test_detection(&test_data, "utf-8"); + } + + #[test] + fn test_detection_utf16_little_endian() { + let test_data = b"\xFF\xFEHello UTF-16LE".to_owned().to_vec(); + test_detection(&test_data, "utf-16le"); + } + + #[test] + fn test_detection_utf16_big_endian() { + let test_data = b"\xFE\xFFHello UTF-16BE".to_owned().to_vec(); + test_detection(&test_data, "utf-16be"); + } + + #[test] + fn test_decoding_unsupported_charset() { + let test_data = Vec::new(); + let result = convert_to_utf8(&test_data, "utf-32le"); + assert!(result.is_err()); + let err = result.expect_err("Err expected"); + assert!(err.kind() == ErrorKind::InvalidInput); + } + + #[test] + fn test_decoding_invalid_utf8() { + let test_data = b"\xFE\xFE\xFF\xFF".to_vec(); + let result = convert_to_utf8(&test_data, "utf-8"); + assert!(result.is_err()); + let err = result.expect_err("Err expected"); + assert!(err.kind() == ErrorKind::InvalidData); + } + + #[test] + fn test_source_without_source_map() { + run_test("", ""); + run_test("\n", "\n"); + run_test("\r\n", "\r\n"); + run_test("a", "a"); + run_test("a\n", "a\n"); + run_test("a\r\n", "a\r\n"); + run_test("a\r\nb", "a\r\nb"); + run_test("a\nb\n", "a\nb\n"); + run_test("a\r\nb\r\n", "a\r\nb\r\n"); + run_test( + "test\n//# sourceMappingURL=data:application/json;base64,test", + "test\n", + ); + run_test( + "test\r\n//# sourceMappingURL=data:application/json;base64,test", + "test\r\n", + ); + run_test( + "\n//# sourceMappingURL=data:application/json;base64,test", + "\n", + ); + + fn run_test(input: &str, output: &str) { + assert_eq!(code_without_source_map(input.to_string()), output); + } + } +} diff --git a/cli/util/unix.rs b/cli/util/unix.rs new file mode 100644 index 000000000..f282f6cfe --- /dev/null +++ b/cli/util/unix.rs @@ -0,0 +1,45 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +/// Raise soft file descriptor limit to hard file descriptor limit. +/// This is the difference between `ulimit -n` and `ulimit -n -H`. +pub fn raise_fd_limit() { + #[cfg(unix)] + // TODO(bartlomieju): + #[allow(clippy::undocumented_unsafe_blocks)] + unsafe { + let mut limits = libc::rlimit { + rlim_cur: 0, + rlim_max: 0, + }; + + if 0 != libc::getrlimit(libc::RLIMIT_NOFILE, &mut limits) { + return; + } + + if limits.rlim_cur == libc::RLIM_INFINITY { + return; + } + + // No hard limit? Do a binary search for the effective soft limit. + if limits.rlim_max == libc::RLIM_INFINITY { + let mut min = limits.rlim_cur; + let mut max = 1 << 20; + + while min + 1 < max { + limits.rlim_cur = min + (max - min) / 2; + match libc::setrlimit(libc::RLIMIT_NOFILE, &limits) { + 0 => min = limits.rlim_cur, + _ => max = limits.rlim_cur, + } + } + + return; + } + + // Raise the soft limit to the hard limit. + if limits.rlim_cur < limits.rlim_max { + limits.rlim_cur = limits.rlim_max; + libc::setrlimit(libc::RLIMIT_NOFILE, &limits); + } + } +} diff --git a/cli/util/windows.rs b/cli/util/windows.rs new file mode 100644 index 000000000..0801ff2f5 --- /dev/null +++ b/cli/util/windows.rs @@ -0,0 +1,90 @@ +// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. + +/// Ensures that stdin, stdout, and stderr are open and have valid HANDLEs +/// associated with them. There are many places where a `std::fs::File` is +/// constructed from a stdio handle; if the handle is null this causes a panic. +pub fn ensure_stdio_open() { + #[cfg(windows)] + // SAFETY: winapi calls + unsafe { + use std::mem::size_of; + use winapi::shared::minwindef::DWORD; + use winapi::shared::minwindef::FALSE; + use winapi::shared::minwindef::TRUE; + use winapi::shared::ntdef::NULL; + use winapi::shared::winerror::ERROR_INVALID_HANDLE; + use winapi::um::errhandlingapi::GetLastError; + use winapi::um::fileapi::CreateFileA; + use winapi::um::fileapi::OPEN_EXISTING; + use winapi::um::handleapi::GetHandleInformation; + use winapi::um::handleapi::INVALID_HANDLE_VALUE; + use winapi::um::minwinbase::SECURITY_ATTRIBUTES; + use winapi::um::processenv::GetStdHandle; + use winapi::um::processenv::SetStdHandle; + use winapi::um::winbase::STD_ERROR_HANDLE; + use winapi::um::winbase::STD_INPUT_HANDLE; + use winapi::um::winbase::STD_OUTPUT_HANDLE; + use winapi::um::winnt::FILE_ATTRIBUTE_NORMAL; + use winapi::um::winnt::FILE_GENERIC_READ; + use winapi::um::winnt::FILE_GENERIC_WRITE; + use winapi::um::winnt::FILE_READ_ATTRIBUTES; + use winapi::um::winnt::FILE_SHARE_READ; + use winapi::um::winnt::FILE_SHARE_WRITE; + + for std_handle in [STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, STD_ERROR_HANDLE] { + // Check whether stdio handle is open. + let is_valid = match GetStdHandle(std_handle) { + NULL | INVALID_HANDLE_VALUE => false, + handle => { + // The stdio handle is open; check whether its handle is valid. + let mut flags: DWORD = 0; + match GetHandleInformation(handle, &mut flags) { + TRUE => true, + FALSE if GetLastError() == ERROR_INVALID_HANDLE => false, + FALSE => { + panic!("GetHandleInformation failed (error {})", GetLastError()); + } + _ => unreachable!(), + } + } + }; + + if !is_valid { + // Open NUL device. + let desired_access = match std_handle { + STD_INPUT_HANDLE => FILE_GENERIC_READ, + _ => FILE_GENERIC_WRITE | FILE_READ_ATTRIBUTES, + }; + let security_attributes = SECURITY_ATTRIBUTES { + nLength: size_of::() as DWORD, + lpSecurityDescriptor: NULL, + bInheritHandle: TRUE, + }; + let file_handle = CreateFileA( + b"\\\\?\\NUL\0" as *const _ as *mut _, + desired_access, + FILE_SHARE_READ | FILE_SHARE_WRITE, + &security_attributes as *const _ as *mut _, + OPEN_EXISTING, + FILE_ATTRIBUTE_NORMAL, + NULL, + ); + match file_handle { + NULL => unreachable!(), + INVALID_HANDLE_VALUE => { + panic!("Could not open NUL device (error {})", GetLastError()); + } + _ => {} + } + + // Assign the opened NUL handle to the missing stdio handle. + let success = SetStdHandle(std_handle, file_handle); + match success { + TRUE => {} + FALSE => panic!("SetStdHandle failed (error {})", GetLastError()), + _ => unreachable!(), + } + } + } + } +} diff --git a/cli/windows_util.rs b/cli/windows_util.rs deleted file mode 100644 index 0801ff2f5..000000000 --- a/cli/windows_util.rs +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. - -/// Ensures that stdin, stdout, and stderr are open and have valid HANDLEs -/// associated with them. There are many places where a `std::fs::File` is -/// constructed from a stdio handle; if the handle is null this causes a panic. -pub fn ensure_stdio_open() { - #[cfg(windows)] - // SAFETY: winapi calls - unsafe { - use std::mem::size_of; - use winapi::shared::minwindef::DWORD; - use winapi::shared::minwindef::FALSE; - use winapi::shared::minwindef::TRUE; - use winapi::shared::ntdef::NULL; - use winapi::shared::winerror::ERROR_INVALID_HANDLE; - use winapi::um::errhandlingapi::GetLastError; - use winapi::um::fileapi::CreateFileA; - use winapi::um::fileapi::OPEN_EXISTING; - use winapi::um::handleapi::GetHandleInformation; - use winapi::um::handleapi::INVALID_HANDLE_VALUE; - use winapi::um::minwinbase::SECURITY_ATTRIBUTES; - use winapi::um::processenv::GetStdHandle; - use winapi::um::processenv::SetStdHandle; - use winapi::um::winbase::STD_ERROR_HANDLE; - use winapi::um::winbase::STD_INPUT_HANDLE; - use winapi::um::winbase::STD_OUTPUT_HANDLE; - use winapi::um::winnt::FILE_ATTRIBUTE_NORMAL; - use winapi::um::winnt::FILE_GENERIC_READ; - use winapi::um::winnt::FILE_GENERIC_WRITE; - use winapi::um::winnt::FILE_READ_ATTRIBUTES; - use winapi::um::winnt::FILE_SHARE_READ; - use winapi::um::winnt::FILE_SHARE_WRITE; - - for std_handle in [STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, STD_ERROR_HANDLE] { - // Check whether stdio handle is open. - let is_valid = match GetStdHandle(std_handle) { - NULL | INVALID_HANDLE_VALUE => false, - handle => { - // The stdio handle is open; check whether its handle is valid. - let mut flags: DWORD = 0; - match GetHandleInformation(handle, &mut flags) { - TRUE => true, - FALSE if GetLastError() == ERROR_INVALID_HANDLE => false, - FALSE => { - panic!("GetHandleInformation failed (error {})", GetLastError()); - } - _ => unreachable!(), - } - } - }; - - if !is_valid { - // Open NUL device. - let desired_access = match std_handle { - STD_INPUT_HANDLE => FILE_GENERIC_READ, - _ => FILE_GENERIC_WRITE | FILE_READ_ATTRIBUTES, - }; - let security_attributes = SECURITY_ATTRIBUTES { - nLength: size_of::() as DWORD, - lpSecurityDescriptor: NULL, - bInheritHandle: TRUE, - }; - let file_handle = CreateFileA( - b"\\\\?\\NUL\0" as *const _ as *mut _, - desired_access, - FILE_SHARE_READ | FILE_SHARE_WRITE, - &security_attributes as *const _ as *mut _, - OPEN_EXISTING, - FILE_ATTRIBUTE_NORMAL, - NULL, - ); - match file_handle { - NULL => unreachable!(), - INVALID_HANDLE_VALUE => { - panic!("Could not open NUL device (error {})", GetLastError()); - } - _ => {} - } - - // Assign the opened NUL handle to the missing stdio handle. - let success = SetStdHandle(std_handle, file_handle); - match success { - TRUE => {} - FALSE => panic!("SetStdHandle failed (error {})", GetLastError()), - _ => unreachable!(), - } - } - } - } -} diff --git a/cli/worker.rs b/cli/worker.rs index ad7b4e8ed..4762bab1f 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -24,7 +24,6 @@ use deno_runtime::worker::WorkerOptions; use deno_runtime::BootstrapOptions; use crate::args::DenoSubcommand; -use crate::checksum; use crate::errors; use crate::module_loader::CliModuleLoader; use crate::node; @@ -34,6 +33,7 @@ use crate::proc_state::ProcState; use crate::tools; use crate::tools::coverage::CoverageCollector; use crate::tools::test::TestMode; +use crate::util::checksum; use crate::version; pub struct CliMainWorker { -- cgit v1.2.3