From f9f10229a41d000ae9e96f0907ca321f9ffdeea7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 8 May 2020 16:18:00 +0200 Subject: refactor: Remove cli::compilers module (#5138) This PR removes "cli/compilers/" directory. "cli/compilers/ts.rs" has been renamed to "cli/tsc.rs" --- cli/compilers/compiler_worker.rs | 66 --- cli/compilers/js.rs | 21 - cli/compilers/mod.rs | 25 - cli/compilers/ts.rs | 910 ------------------------------------- cli/global_state.rs | 19 +- cli/lib.rs | 4 +- cli/lockfile.rs | 2 +- cli/ops/runtime_compiler.rs | 4 +- cli/state.rs | 2 +- cli/tsc.rs | 960 +++++++++++++++++++++++++++++++++++++++ 10 files changed, 977 insertions(+), 1036 deletions(-) delete mode 100644 cli/compilers/compiler_worker.rs delete mode 100644 cli/compilers/js.rs delete mode 100644 cli/compilers/mod.rs delete mode 100644 cli/compilers/ts.rs create mode 100644 cli/tsc.rs (limited to 'cli') diff --git a/cli/compilers/compiler_worker.rs b/cli/compilers/compiler_worker.rs deleted file mode 100644 index aa84c8695..000000000 --- a/cli/compilers/compiler_worker.rs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::ops; -use crate::state::State; -use crate::web_worker::WebWorker; -use core::task::Context; -use deno_core::ErrBox; -use deno_core::StartupData; -use futures::future::Future; -use futures::future::FutureExt; -use std::ops::Deref; -use std::ops::DerefMut; -use std::pin::Pin; -use std::task::Poll; - -/// This worker is used to host TypeScript and WASM compilers. -/// -/// It provides minimal set of ops that are necessary to facilitate -/// compilation. -/// -/// NOTE: This worker is considered priveleged, because it may -/// access file system without permission check. -/// -/// At the moment this worker is meant to be single-use - after -/// performing single compilation/bundling it should be destroyed. -/// -/// TODO(bartlomieju): add support to reuse the worker - or in other -/// words support stateful TS compiler -pub struct CompilerWorker(WebWorker); - -impl CompilerWorker { - pub fn new(name: String, startup_data: StartupData, state: State) -> Self { - let state_ = state.clone(); - let mut worker = WebWorker::new(name, startup_data, state_, false); - { - let isolate = &mut worker.isolate; - ops::compiler::init(isolate, &state); - // TODO(bartlomieju): CompilerWorker should not - // depend on those ops - ops::os::init(isolate, &state); - ops::fs::init(isolate, &state); - } - Self(worker) - } -} - -impl Deref for CompilerWorker { - type Target = WebWorker; - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for CompilerWorker { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl Future for CompilerWorker { - type Output = Result<(), ErrBox>; - - fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { - let inner = self.get_mut(); - inner.0.poll_unpin(cx) - } -} diff --git a/cli/compilers/js.rs b/cli/compilers/js.rs deleted file mode 100644 index d90960bfc..000000000 --- a/cli/compilers/js.rs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::compilers::CompiledModule; -use crate::file_fetcher::SourceFile; -use deno_core::ErrBox; -use std::str; - -pub struct JsCompiler {} - -impl JsCompiler { - pub async fn compile( - &self, - source_file: SourceFile, - ) -> Result { - Ok(CompiledModule { - code: str::from_utf8(&source_file.source_code) - .unwrap() - .to_string(), - name: source_file.url.to_string(), - }) - } -} diff --git a/cli/compilers/mod.rs b/cli/compilers/mod.rs deleted file mode 100644 index bc3bfade2..000000000 --- a/cli/compilers/mod.rs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::ops::JsonResult; -use deno_core::ErrBox; -use futures::Future; - -mod compiler_worker; -mod js; -mod ts; - -pub use js::JsCompiler; -pub use ts::runtime_compile; -pub use ts::runtime_transpile; -pub use ts::TargetLib; -pub use ts::TsCompiler; - -pub type CompilationResultFuture = dyn Future; - -#[derive(Debug, Clone)] -pub struct CompiledModule { - pub code: String, - pub name: String, -} - -pub type CompiledModuleFuture = - dyn Future>; diff --git a/cli/compilers/ts.rs b/cli/compilers/ts.rs deleted file mode 100644 index 832d5e6df..000000000 --- a/cli/compilers/ts.rs +++ /dev/null @@ -1,910 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use super::compiler_worker::CompilerWorker; -use crate::colors; -use crate::compilers::CompiledModule; -use crate::diagnostics::Diagnostic; -use crate::diagnostics::DiagnosticItem; -use crate::disk_cache::DiskCache; -use crate::file_fetcher::SourceFile; -use crate::file_fetcher::SourceFileFetcher; -use crate::fmt; -use crate::fs as deno_fs; -use crate::global_state::GlobalState; -use crate::msg; -use crate::op_error::OpError; -use crate::source_maps::SourceMapGetter; -use crate::startup_data; -use crate::state::*; -use crate::tokio_util; -use crate::version; -use crate::web_worker::WebWorkerHandle; -use crate::worker::WorkerEvent; -use deno_core::Buf; -use deno_core::ErrBox; -use deno_core::ModuleSpecifier; -use log::info; -use regex::Regex; -use serde::Deserialize; -use serde_json::json; -use serde_json::Value; -use std::collections::HashMap; -use std::collections::HashSet; -use std::fs; -use std::hash::BuildHasher; -use std::io; -use std::ops::Deref; -use std::path::PathBuf; -use std::str; -use std::sync::atomic::Ordering; -use std::sync::Arc; -use std::sync::Mutex; -use url::Url; - -lazy_static! { - static ref CHECK_JS_RE: Regex = - Regex::new(r#""checkJs"\s*?:\s*?true"#).unwrap(); -} - -#[derive(Clone)] -pub enum TargetLib { - Main, - Worker, -} - -/// Struct which represents the state of the compiler -/// configuration where the first is canonical name for the configuration file, -/// second is a vector of the bytes of the contents of the configuration file, -/// third is bytes of the hash of contents. -#[derive(Clone)] -pub struct CompilerConfig { - pub path: Option, - pub content: Option>, - pub hash: Vec, - pub compile_js: bool, -} - -impl CompilerConfig { - /// Take the passed flag and resolve the file name relative to the cwd. - pub fn load(config_path: Option) -> Result { - let config_file = match &config_path { - Some(config_file_name) => { - debug!("Compiler config file: {}", config_file_name); - let cwd = std::env::current_dir().unwrap(); - Some(cwd.join(config_file_name)) - } - _ => None, - }; - - // Convert the PathBuf to a canonicalized string. This is needed by the - // compiler to properly deal with the configuration. - let config_path = match &config_file { - Some(config_file) => Some(config_file.canonicalize().map_err(|_| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!( - "Could not find the config file: {}", - config_file.to_string_lossy() - ), - ) - })), - _ => None, - }; - - // Load the contents of the configuration file - let config = match &config_file { - Some(config_file) => { - debug!("Attempt to load config: {}", config_file.to_str().unwrap()); - let config = fs::read(&config_file)?; - Some(config) - } - _ => None, - }; - - let config_hash = match &config { - Some(bytes) => bytes.clone(), - _ => b"".to_vec(), - }; - - // If `checkJs` is set to true in `compilerOptions` then we're gonna be compiling - // JavaScript files as well - let compile_js = if let Some(config_content) = config.clone() { - let config_str = std::str::from_utf8(&config_content)?; - CHECK_JS_RE.is_match(config_str) - } else { - false - }; - - let ts_config = Self { - path: config_path.unwrap_or_else(|| Ok(PathBuf::new())).ok(), - content: config, - hash: config_hash, - compile_js, - }; - - Ok(ts_config) - } -} - -/// Information associated with compiled file in cache. -/// Includes source code path and state hash. -/// version_hash is used to validate versions of the file -/// and could be used to remove stale file in cache. -pub struct CompiledFileMetadata { - pub source_path: PathBuf, - pub version_hash: String, -} - -static SOURCE_PATH: &str = "source_path"; -static VERSION_HASH: &str = "version_hash"; - -impl CompiledFileMetadata { - pub fn from_json_string(metadata_string: String) -> Option { - // TODO: use serde for deserialization - let maybe_metadata_json: serde_json::Result = - serde_json::from_str(&metadata_string); - - if let Ok(metadata_json) = maybe_metadata_json { - let source_path = metadata_json[SOURCE_PATH].as_str().map(PathBuf::from); - let version_hash = metadata_json[VERSION_HASH].as_str().map(String::from); - - if source_path.is_none() || version_hash.is_none() { - return None; - } - - return Some(CompiledFileMetadata { - source_path: source_path.unwrap(), - version_hash: version_hash.unwrap(), - }); - } - - None - } - - pub fn to_json_string(&self) -> Result { - let mut value_map = serde_json::map::Map::new(); - - value_map.insert(SOURCE_PATH.to_owned(), json!(&self.source_path)); - value_map.insert(VERSION_HASH.to_string(), json!(&self.version_hash)); - serde_json::to_string(&value_map) - } -} -/// Creates the JSON message send to compiler.ts's onmessage. -fn req( - request_type: msg::CompilerRequestType, - root_names: Vec, - compiler_config: CompilerConfig, - target: &str, - bundle: bool, - unstable: bool, -) -> Buf { - let cwd = std::env::current_dir().unwrap(); - let j = match (compiler_config.path, compiler_config.content) { - (Some(config_path), Some(config_data)) => json!({ - "type": request_type as i32, - "target": target, - "rootNames": root_names, - "bundle": bundle, - "unstable": unstable, - "configPath": config_path, - "config": str::from_utf8(&config_data).unwrap(), - "cwd": cwd, - }), - _ => json!({ - "type": request_type as i32, - "target": target, - "rootNames": root_names, - "bundle": bundle, - "unstable": unstable, - "cwd": cwd, - }), - }; - - j.to_string().into_boxed_str().into_boxed_bytes() -} - -/// Emit a SHA256 hash based on source code, deno version and TS config. -/// Used to check if a recompilation for source code is needed. -pub fn source_code_version_hash( - source_code: &[u8], - version: &str, - config_hash: &[u8], -) -> String { - crate::checksum::gen(vec![source_code, version.as_bytes(), config_hash]) -} - -pub struct TsCompilerInner { - pub file_fetcher: SourceFileFetcher, - pub config: CompilerConfig, - pub disk_cache: DiskCache, - /// Set of all URLs that have been compiled. This prevents double - /// compilation of module. - pub compiled: Mutex>, - /// This setting is controlled by `--reload` flag. Unless the flag - /// is provided disk cache is used. - pub use_disk_cache: bool, - /// This setting is controlled by `compilerOptions.checkJs` - pub compile_js: bool, -} - -#[derive(Clone)] -pub struct TsCompiler(Arc); - -impl Deref for TsCompiler { - type Target = TsCompilerInner; - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct EmittedSource { - filename: String, - contents: String, -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct BundleResponse { - diagnostics: Diagnostic, - bundle_output: String, -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct CompileResponse { - diagnostics: Diagnostic, - emit_map: HashMap, -} - -// TODO(bartlomieju): possible deduplicate once TS refactor is stabilized -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -#[allow(unused)] -struct RuntimeBundleResponse { - diagnostics: Vec, - output: String, -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct RuntimeCompileResponse { - diagnostics: Vec, - emit_map: HashMap, -} - -impl TsCompiler { - pub fn new( - file_fetcher: SourceFileFetcher, - disk_cache: DiskCache, - use_disk_cache: bool, - config_path: Option, - ) -> Result { - let config = CompilerConfig::load(config_path)?; - Ok(TsCompiler(Arc::new(TsCompilerInner { - file_fetcher, - disk_cache, - compile_js: config.compile_js, - config, - compiled: Mutex::new(HashSet::new()), - use_disk_cache, - }))) - } - - /// Create a new V8 worker with snapshot of TS compiler and setup compiler's - /// runtime. - fn setup_worker(global_state: GlobalState) -> CompilerWorker { - let entry_point = - ModuleSpecifier::resolve_url_or_path("./__$deno$ts_compiler.ts").unwrap(); - let worker_state = - State::new(global_state.clone(), None, entry_point, DebugType::Internal) - .expect("Unable to create worker state"); - - // Count how many times we start the compiler worker. - global_state.compiler_starts.fetch_add(1, Ordering::SeqCst); - - let mut worker = CompilerWorker::new( - "TS".to_string(), - startup_data::compiler_isolate_init(), - worker_state, - ); - worker.execute("bootstrap.tsCompilerRuntime()").unwrap(); - worker - } - - pub async fn bundle( - &self, - global_state: GlobalState, - module_name: String, - out_file: Option, - ) -> Result<(), ErrBox> { - debug!( - "Invoking the compiler to bundle. module_name: {}", - module_name - ); - eprintln!("Bundling {}", module_name); - - let root_names = vec![module_name]; - let req_msg = req( - msg::CompilerRequestType::Compile, - root_names, - self.config.clone(), - "main", - true, - global_state.flags.unstable, - ); - - let msg = execute_in_thread(global_state.clone(), req_msg).await?; - let json_str = std::str::from_utf8(&msg).unwrap(); - debug!("Message: {}", json_str); - - let bundle_response: BundleResponse = serde_json::from_str(json_str)?; - - if !bundle_response.diagnostics.items.is_empty() { - return Err(ErrBox::from(bundle_response.diagnostics)); - } - - let output_string = fmt::format_text(&bundle_response.bundle_output)?; - - if let Some(out_file_) = out_file.as_ref() { - eprintln!("Emitting bundle to {:?}", out_file_); - - let output_bytes = output_string.as_bytes(); - let output_len = output_bytes.len(); - - deno_fs::write_file(out_file_, output_bytes, 0o666)?; - // TODO(bartlomieju): add "humanFileSize" method - eprintln!("{} bytes emmited.", output_len); - } else { - println!("{}", output_string); - } - - Ok(()) - } - - /// Mark given module URL as compiled to avoid multiple compilations of same - /// module in single run. - fn mark_compiled(&self, url: &Url) { - let mut c = self.compiled.lock().unwrap(); - c.insert(url.clone()); - } - - /// Check if given module URL has already been compiled and can be fetched - /// directly from disk. - fn has_compiled(&self, url: &Url) -> bool { - let c = self.compiled.lock().unwrap(); - c.contains(url) - } - - /// Asynchronously compile module and all it's dependencies. - /// - /// This method compiled every module at most once. - /// - /// If `--reload` flag was provided then compiler will not on-disk cache and - /// force recompilation. - /// - /// If compilation is required then new V8 worker is spawned with fresh TS - /// compiler. - pub async fn compile( - &self, - global_state: GlobalState, - source_file: &SourceFile, - target: TargetLib, - ) -> Result { - if self.has_compiled(&source_file.url) { - return self.get_compiled_module(&source_file.url); - } - - if self.use_disk_cache { - // Try to load cached version: - // 1. check if there's 'meta' file - if let Some(metadata) = self.get_metadata(&source_file.url) { - // 2. compare version hashes - // TODO: it would probably be good idea to make it method implemented on SourceFile - let version_hash_to_validate = source_code_version_hash( - &source_file.source_code, - version::DENO, - &self.config.hash, - ); - - if metadata.version_hash == version_hash_to_validate { - debug!("load_cache metadata version hash match"); - if let Ok(compiled_module) = - self.get_compiled_module(&source_file.url) - { - self.mark_compiled(&source_file.url); - return Ok(compiled_module); - } - } - } - } - let source_file_ = source_file.clone(); - let module_url = source_file.url.clone(); - let target = match target { - TargetLib::Main => "main", - TargetLib::Worker => "worker", - }; - let root_names = vec![module_url.to_string()]; - let req_msg = req( - msg::CompilerRequestType::Compile, - root_names, - self.config.clone(), - target, - false, - global_state.flags.unstable, - ); - - let ts_compiler = self.clone(); - - info!( - "{} {}", - colors::green("Compile".to_string()), - module_url.to_string() - ); - - let msg = execute_in_thread(global_state.clone(), req_msg).await?; - let json_str = std::str::from_utf8(&msg).unwrap(); - - let compile_response: CompileResponse = serde_json::from_str(json_str)?; - - if !compile_response.diagnostics.items.is_empty() { - return Err(ErrBox::from(compile_response.diagnostics)); - } - - self.cache_emitted_files(compile_response.emit_map)?; - ts_compiler.get_compiled_module(&source_file_.url) - } - - /// Get associated `CompiledFileMetadata` for given module if it exists. - pub fn get_metadata(&self, url: &Url) -> Option { - // Try to load cached version: - // 1. check if there's 'meta' file - let cache_key = self - .disk_cache - .get_cache_filename_with_extension(url, "meta"); - if let Ok(metadata_bytes) = self.disk_cache.get(&cache_key) { - if let Ok(metadata) = std::str::from_utf8(&metadata_bytes) { - if let Some(read_metadata) = - CompiledFileMetadata::from_json_string(metadata.to_string()) - { - return Some(read_metadata); - } - } - } - - None - } - - fn cache_emitted_files( - &self, - emit_map: HashMap, - ) -> std::io::Result<()> { - for (emitted_name, source) in emit_map.iter() { - let specifier = ModuleSpecifier::resolve_url(&source.filename) - .expect("Should be a valid module specifier"); - - if emitted_name.ends_with(".map") { - self.cache_source_map(&specifier, &source.contents)?; - } else if emitted_name.ends_with(".js") { - self.cache_compiled_file(&specifier, &source.contents)?; - } else { - panic!("Trying to cache unknown file type {}", emitted_name); - } - } - - Ok(()) - } - - pub fn get_compiled_module( - &self, - module_url: &Url, - ) -> Result { - let compiled_source_file = self.get_compiled_source_file(module_url)?; - - let compiled_module = CompiledModule { - code: str::from_utf8(&compiled_source_file.source_code) - .unwrap() - .to_string(), - name: module_url.to_string(), - }; - - Ok(compiled_module) - } - - /// Return compiled JS file for given TS module. - // TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to - // SourceFileFetcher - pub fn get_compiled_source_file( - &self, - module_url: &Url, - ) -> Result { - let cache_key = self - .disk_cache - .get_cache_filename_with_extension(&module_url, "js"); - let compiled_code = self.disk_cache.get(&cache_key)?; - let compiled_code_filename = self.disk_cache.location.join(cache_key); - debug!("compiled filename: {:?}", compiled_code_filename); - - let compiled_module = SourceFile { - url: module_url.clone(), - filename: compiled_code_filename, - media_type: msg::MediaType::JavaScript, - source_code: compiled_code, - types_url: None, - }; - - Ok(compiled_module) - } - - /// Save compiled JS file for given TS module to on-disk cache. - /// - /// Along compiled file a special metadata file is saved as well containing - /// hash that can be validated to avoid unnecessary recompilation. - fn cache_compiled_file( - &self, - module_specifier: &ModuleSpecifier, - contents: &str, - ) -> std::io::Result<()> { - let source_file = self - .file_fetcher - .fetch_cached_source_file(&module_specifier) - .expect("Source file not found"); - - // NOTE: JavaScript files are only cached to disk if `checkJs` - // option in on - if source_file.media_type == msg::MediaType::JavaScript && !self.compile_js - { - return Ok(()); - } - - let js_key = self - .disk_cache - .get_cache_filename_with_extension(module_specifier.as_url(), "js"); - self.disk_cache.set(&js_key, contents.as_bytes())?; - self.mark_compiled(module_specifier.as_url()); - - let version_hash = source_code_version_hash( - &source_file.source_code, - version::DENO, - &self.config.hash, - ); - - let compiled_file_metadata = CompiledFileMetadata { - source_path: source_file.filename, - version_hash, - }; - let meta_key = self - .disk_cache - .get_cache_filename_with_extension(module_specifier.as_url(), "meta"); - self.disk_cache.set( - &meta_key, - compiled_file_metadata.to_json_string()?.as_bytes(), - ) - } - - /// Return associated source map file for given TS module. - // TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to - // SourceFileFetcher - pub fn get_source_map_file( - &self, - module_specifier: &ModuleSpecifier, - ) -> Result { - let cache_key = self - .disk_cache - .get_cache_filename_with_extension(module_specifier.as_url(), "js.map"); - let source_code = self.disk_cache.get(&cache_key)?; - let source_map_filename = self.disk_cache.location.join(cache_key); - debug!("source map filename: {:?}", source_map_filename); - - let source_map_file = SourceFile { - url: module_specifier.as_url().to_owned(), - filename: source_map_filename, - media_type: msg::MediaType::JavaScript, - source_code, - types_url: None, - }; - - Ok(source_map_file) - } - - /// Save source map file for given TS module to on-disk cache. - fn cache_source_map( - &self, - module_specifier: &ModuleSpecifier, - contents: &str, - ) -> std::io::Result<()> { - let source_file = self - .file_fetcher - .fetch_cached_source_file(&module_specifier) - .expect("Source file not found"); - - // NOTE: JavaScript files are only cached to disk if `checkJs` - // option in on - if source_file.media_type == msg::MediaType::JavaScript && !self.compile_js - { - return Ok(()); - } - - let source_map_key = self - .disk_cache - .get_cache_filename_with_extension(module_specifier.as_url(), "js.map"); - self.disk_cache.set(&source_map_key, contents.as_bytes()) - } -} - -impl SourceMapGetter for TsCompiler { - fn get_source_map(&self, script_name: &str) -> Option> { - self - .try_to_resolve_and_get_source_map(script_name) - .map(|out| out.source_code) - } - - fn get_source_line(&self, script_name: &str, line: usize) -> Option { - self - .try_resolve_and_get_source_file(script_name) - .and_then(|out| { - str::from_utf8(&out.source_code).ok().and_then(|v| { - // Do NOT use .lines(): it skips the terminating empty line. - // (due to internally using .split_terminator() instead of .split()) - let lines: Vec<&str> = v.split('\n').collect(); - assert!(lines.len() > line); - Some(lines[line].to_string()) - }) - }) - } -} - -// `SourceMapGetter` related methods -impl TsCompiler { - fn try_to_resolve(&self, script_name: &str) -> Option { - // if `script_name` can't be resolved to ModuleSpecifier it's probably internal - // script (like `gen/cli/bundle/compiler.js`) so we won't be - // able to get source for it anyway - ModuleSpecifier::resolve_url(script_name).ok() - } - - fn try_resolve_and_get_source_file( - &self, - script_name: &str, - ) -> Option { - if let Some(module_specifier) = self.try_to_resolve(script_name) { - return self - .file_fetcher - .fetch_cached_source_file(&module_specifier); - } - - None - } - - fn try_to_resolve_and_get_source_map( - &self, - script_name: &str, - ) -> Option { - if let Some(module_specifier) = self.try_to_resolve(script_name) { - return match self.get_source_map_file(&module_specifier) { - Ok(out) => Some(out), - Err(_) => None, - }; - } - - None - } -} - -async fn execute_in_thread( - global_state: GlobalState, - req: Buf, -) -> Result { - let (handle_sender, handle_receiver) = - std::sync::mpsc::sync_channel::>(1); - let builder = - std::thread::Builder::new().name("deno-ts-compiler".to_string()); - let join_handle = builder.spawn(move || { - let worker = TsCompiler::setup_worker(global_state.clone()); - handle_sender.send(Ok(worker.thread_safe_handle())).unwrap(); - drop(handle_sender); - tokio_util::run_basic(worker).expect("Panic in event loop"); - })?; - let handle = handle_receiver.recv().unwrap()?; - handle.post_message(req)?; - let event = handle.get_event().await.expect("Compiler didn't respond"); - let buf = match event { - WorkerEvent::Message(buf) => Ok(buf), - WorkerEvent::Error(error) => Err(error), - WorkerEvent::TerminalError(error) => Err(error), - }?; - // Shutdown worker and wait for thread to finish - handle.terminate(); - join_handle.join().unwrap(); - Ok(buf) -} - -/// This function is used by `Deno.compile()` and `Deno.bundle()` APIs. -pub async fn runtime_compile( - global_state: GlobalState, - root_name: &str, - sources: &Option>, - bundle: bool, - options: &Option, -) -> Result { - let req_msg = json!({ - "type": msg::CompilerRequestType::RuntimeCompile as i32, - "target": "runtime", - "rootName": root_name, - "sources": sources, - "options": options, - "bundle": bundle, - "unstable": global_state.flags.unstable, - }) - .to_string() - .into_boxed_str() - .into_boxed_bytes(); - - let compiler = global_state.ts_compiler.clone(); - - let msg = execute_in_thread(global_state, req_msg).await?; - let json_str = std::str::from_utf8(&msg).unwrap(); - - // TODO(bartlomieju): factor `bundle` path into separate function `runtime_bundle` - if bundle { - let _response: RuntimeBundleResponse = serde_json::from_str(json_str)?; - return Ok(serde_json::from_str::(json_str).unwrap()); - } - - let response: RuntimeCompileResponse = serde_json::from_str(json_str)?; - - if response.diagnostics.is_empty() && sources.is_none() { - compiler.cache_emitted_files(response.emit_map)?; - } - - // We're returning `Ok()` instead of `Err()` because it's not runtime - // error if there were diagnostics produces; we want to let user handle - // diagnostics in the runtime. - Ok(serde_json::from_str::(json_str).unwrap()) -} - -/// This function is used by `Deno.transpileOnly()` API. -pub async fn runtime_transpile( - global_state: GlobalState, - sources: &HashMap, - options: &Option, -) -> Result { - let req_msg = json!({ - "type": msg::CompilerRequestType::RuntimeTranspile as i32, - "sources": sources, - "options": options, - }) - .to_string() - .into_boxed_str() - .into_boxed_bytes(); - - let msg = execute_in_thread(global_state, req_msg).await?; - let json_str = std::str::from_utf8(&msg).unwrap(); - let v = serde_json::from_str::(json_str) - .expect("Error decoding JSON string."); - Ok(v) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::fs as deno_fs; - use deno_core::ModuleSpecifier; - use std::path::PathBuf; - use tempfile::TempDir; - - #[tokio::test] - async fn test_compile() { - let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .parent() - .unwrap() - .join("cli/tests/002_hello.ts"); - let specifier = - ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap(); - let out = SourceFile { - url: specifier.as_url().clone(), - filename: PathBuf::from(p.to_str().unwrap().to_string()), - media_type: msg::MediaType::TypeScript, - source_code: include_bytes!("../tests/002_hello.ts").to_vec(), - types_url: None, - }; - let mock_state = - GlobalState::mock(vec![String::from("deno"), String::from("hello.js")]); - let result = mock_state - .ts_compiler - .compile(mock_state.clone(), &out, TargetLib::Main) - .await; - assert!(result.is_ok()); - assert!(result - .unwrap() - .code - .as_bytes() - .starts_with(b"\"use strict\";\nconsole.log(\"Hello World\");")); - } - - #[tokio::test] - async fn test_bundle() { - let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .parent() - .unwrap() - .join("cli/tests/002_hello.ts"); - use deno_core::ModuleSpecifier; - let module_name = ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()) - .unwrap() - .to_string(); - - let state = GlobalState::mock(vec![ - String::from("deno"), - p.to_string_lossy().into(), - String::from("$deno$/bundle.js"), - ]); - - let result = state - .ts_compiler - .bundle(state.clone(), module_name, None) - .await; - assert!(result.is_ok()); - } - - #[test] - fn test_source_code_version_hash() { - assert_eq!( - "0185b42de0686b4c93c314daaa8dee159f768a9e9a336c2a5e3d5b8ca6c4208c", - source_code_version_hash(b"1+2", "0.4.0", b"{}") - ); - // Different source_code should result in different hash. - assert_eq!( - "e58631f1b6b6ce2b300b133ec2ad16a8a5ba6b7ecf812a8c06e59056638571ac", - source_code_version_hash(b"1", "0.4.0", b"{}") - ); - // Different version should result in different hash. - assert_eq!( - "307e6200347a88dbbada453102deb91c12939c65494e987d2d8978f6609b5633", - source_code_version_hash(b"1", "0.1.0", b"{}") - ); - // Different config should result in different hash. - assert_eq!( - "195eaf104a591d1d7f69fc169c60a41959c2b7a21373cd23a8f675f877ec385f", - source_code_version_hash(b"1", "0.4.0", b"{\"compilerOptions\": {}}") - ); - } - - #[test] - fn test_compile_js() { - let temp_dir = TempDir::new().expect("tempdir fail"); - let temp_dir_path = temp_dir.path(); - - let test_cases = vec![ - // valid JSON - (r#"{ "compilerOptions": { "checkJs": true } } "#, true), - // JSON with comment - ( - r#"{ "compilerOptions": { // force .js file compilation by Deno "checkJs": true } } "#, - true, - ), - // invalid JSON - (r#"{ "compilerOptions": { "checkJs": true },{ } "#, true), - // without content - ("", false), - ]; - - let path = temp_dir_path.join("tsconfig.json"); - let path_str = path.to_str().unwrap().to_string(); - - for (json_str, expected) in test_cases { - deno_fs::write_file(&path, json_str.as_bytes(), 0o666).unwrap(); - let config = CompilerConfig::load(Some(path_str.clone())).unwrap(); - assert_eq!(config.compile_js, expected); - } - } - - #[test] - fn test_compiler_config_load() { - let temp_dir = TempDir::new().expect("tempdir fail"); - let temp_dir_path = temp_dir.path(); - let path = temp_dir_path.join("doesnotexist.json"); - let path_str = path.to_str().unwrap().to_string(); - let res = CompilerConfig::load(Some(path_str)); - assert!(res.is_err()); - } -} diff --git a/cli/global_state.rs b/cli/global_state.rs index 460203364..b91ba5b10 100644 --- a/cli/global_state.rs +++ b/cli/global_state.rs @@ -1,8 +1,4 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::compilers::CompiledModule; -use crate::compilers::JsCompiler; -use crate::compilers::TargetLib; -use crate::compilers::TsCompiler; use crate::deno_dir; use crate::file_fetcher::SourceFileFetcher; use crate::flags; @@ -10,6 +6,9 @@ use crate::http_cache; use crate::lockfile::Lockfile; use crate::msg; use crate::permissions::Permissions; +use crate::tsc::CompiledModule; +use crate::tsc::TargetLib; +use crate::tsc::TsCompiler; use deno_core::ErrBox; use deno_core::ModuleSpecifier; use std::env; @@ -33,7 +32,6 @@ pub struct GlobalStateInner { pub permissions: Permissions, pub dir: deno_dir::DenoDir, pub file_fetcher: SourceFileFetcher, - pub js_compiler: JsCompiler, pub ts_compiler: TsCompiler, pub lockfile: Option>, pub compiler_starts: AtomicUsize, @@ -84,7 +82,6 @@ impl GlobalState { flags, file_fetcher, ts_compiler, - js_compiler: JsCompiler {}, lockfile, compiler_starts: AtomicUsize::new(0), compile_lock: AsyncMutex::new(()), @@ -140,10 +137,16 @@ impl GlobalState { .ok(); }; - state1.js_compiler.compile(out).await + Ok(CompiledModule { + code: String::from_utf8(out.source_code)?, + name: out.url.to_string(), + }) } } - _ => state1.js_compiler.compile(out).await, + _ => Ok(CompiledModule { + code: String::from_utf8(out.source_code)?, + name: out.url.to_string(), + }), }?; drop(compile_lock); diff --git a/cli/lib.rs b/cli/lib.rs index 673340cb9..060c86b55 100644 --- a/cli/lib.rs +++ b/cli/lib.rs @@ -23,7 +23,6 @@ extern crate url; mod checksum; pub mod colors; -pub mod compilers; pub mod deno_dir; pub mod diagnostics; mod disk_cache; @@ -57,6 +56,7 @@ mod swc_util; mod test_runner; pub mod test_util; mod tokio_util; +mod tsc; mod upgrade; pub mod version; mod web_worker; @@ -66,7 +66,6 @@ pub use dprint_plugin_typescript::swc_common; pub use dprint_plugin_typescript::swc_ecma_ast; pub use dprint_plugin_typescript::swc_ecma_parser; -use crate::compilers::TargetLib; use crate::doc::parser::DocFileLoader; use crate::file_fetcher::SourceFile; use crate::file_fetcher::SourceFileFetcher; @@ -76,6 +75,7 @@ use crate::op_error::OpError; use crate::ops::io::get_stdio; use crate::state::DebugType; use crate::state::State; +use crate::tsc::TargetLib; use crate::worker::MainWorker; use deno_core::v8_set_flags; use deno_core::ErrBox; diff --git a/cli/lockfile.rs b/cli/lockfile.rs index f8700dac1..5e43e3420 100644 --- a/cli/lockfile.rs +++ b/cli/lockfile.rs @@ -1,4 +1,4 @@ -use crate::compilers::CompiledModule; +use crate::tsc::CompiledModule; use serde_json::json; pub use serde_json::Value; use std::collections::HashMap; diff --git a/cli/ops/runtime_compiler.rs b/cli/ops/runtime_compiler.rs index c7225b944..e44d6fa8b 100644 --- a/cli/ops/runtime_compiler.rs +++ b/cli/ops/runtime_compiler.rs @@ -1,10 +1,10 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use super::dispatch_json::{Deserialize, JsonOp, Value}; -use crate::compilers::runtime_compile; -use crate::compilers::runtime_transpile; use crate::futures::FutureExt; use crate::op_error::OpError; use crate::state::State; +use crate::tsc::runtime_compile; +use crate::tsc::runtime_transpile; use deno_core::CoreIsolate; use deno_core::ZeroCopyBuf; use std::collections::HashMap; diff --git a/cli/state.rs b/cli/state.rs index 9fd719646..8c425d700 100644 --- a/cli/state.rs +++ b/cli/state.rs @@ -1,5 +1,4 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::compilers::TargetLib; use crate::file_fetcher::SourceFileFetcher; use crate::global_state::GlobalState; use crate::global_timer::GlobalTimer; @@ -9,6 +8,7 @@ use crate::op_error::OpError; use crate::ops::JsonOp; use crate::ops::MinimalOp; use crate::permissions::Permissions; +use crate::tsc::TargetLib; use crate::web_worker::WebWorkerHandle; use deno_core::Buf; use deno_core::ErrBox; diff --git a/cli/tsc.rs b/cli/tsc.rs new file mode 100644 index 000000000..f0b617246 --- /dev/null +++ b/cli/tsc.rs @@ -0,0 +1,960 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. +use crate::colors; +use crate::diagnostics::Diagnostic; +use crate::diagnostics::DiagnosticItem; +use crate::disk_cache::DiskCache; +use crate::file_fetcher::SourceFile; +use crate::file_fetcher::SourceFileFetcher; +use crate::fmt; +use crate::fs as deno_fs; +use crate::global_state::GlobalState; +use crate::msg; +use crate::op_error::OpError; +use crate::ops; +use crate::source_maps::SourceMapGetter; +use crate::startup_data; +use crate::state::State; +use crate::state::*; +use crate::tokio_util; +use crate::version; +use crate::web_worker::WebWorker; +use crate::web_worker::WebWorkerHandle; +use crate::worker::WorkerEvent; +use core::task::Context; +use deno_core::Buf; +use deno_core::ErrBox; +use deno_core::ModuleSpecifier; +use deno_core::StartupData; +use futures::future::Future; +use futures::future::FutureExt; +use log::info; +use regex::Regex; +use serde::Deserialize; +use serde_json::json; +use serde_json::Value; +use std::collections::HashMap; +use std::collections::HashSet; +use std::fs; +use std::hash::BuildHasher; +use std::io; +use std::ops::Deref; +use std::ops::DerefMut; +use std::path::PathBuf; +use std::pin::Pin; +use std::str; +use std::sync::atomic::Ordering; +use std::sync::Arc; +use std::sync::Mutex; +use std::task::Poll; +use url::Url; + +#[derive(Debug, Clone)] +pub struct CompiledModule { + pub code: String, + pub name: String, +} + +pub struct CompilerWorker(WebWorker); + +impl CompilerWorker { + pub fn new(name: String, startup_data: StartupData, state: State) -> Self { + let state_ = state.clone(); + let mut worker = WebWorker::new(name, startup_data, state_, false); + { + let isolate = &mut worker.isolate; + ops::compiler::init(isolate, &state); + } + Self(worker) + } +} + +impl Deref for CompilerWorker { + type Target = WebWorker; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for CompilerWorker { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Future for CompilerWorker { + type Output = Result<(), ErrBox>; + + fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { + let inner = self.get_mut(); + inner.0.poll_unpin(cx) + } +} + +lazy_static! { + static ref CHECK_JS_RE: Regex = + Regex::new(r#""checkJs"\s*?:\s*?true"#).unwrap(); +} + +#[derive(Clone)] +pub enum TargetLib { + Main, + Worker, +} + +/// Struct which represents the state of the compiler +/// configuration where the first is canonical name for the configuration file, +/// second is a vector of the bytes of the contents of the configuration file, +/// third is bytes of the hash of contents. +#[derive(Clone)] +pub struct CompilerConfig { + pub path: Option, + pub content: Option>, + pub hash: Vec, + pub compile_js: bool, +} + +impl CompilerConfig { + /// Take the passed flag and resolve the file name relative to the cwd. + pub fn load(config_path: Option) -> Result { + let config_file = match &config_path { + Some(config_file_name) => { + debug!("Compiler config file: {}", config_file_name); + let cwd = std::env::current_dir().unwrap(); + Some(cwd.join(config_file_name)) + } + _ => None, + }; + + // Convert the PathBuf to a canonicalized string. This is needed by the + // compiler to properly deal with the configuration. + let config_path = match &config_file { + Some(config_file) => Some(config_file.canonicalize().map_err(|_| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!( + "Could not find the config file: {}", + config_file.to_string_lossy() + ), + ) + })), + _ => None, + }; + + // Load the contents of the configuration file + let config = match &config_file { + Some(config_file) => { + debug!("Attempt to load config: {}", config_file.to_str().unwrap()); + let config = fs::read(&config_file)?; + Some(config) + } + _ => None, + }; + + let config_hash = match &config { + Some(bytes) => bytes.clone(), + _ => b"".to_vec(), + }; + + // If `checkJs` is set to true in `compilerOptions` then we're gonna be compiling + // JavaScript files as well + let compile_js = if let Some(config_content) = config.clone() { + let config_str = std::str::from_utf8(&config_content)?; + CHECK_JS_RE.is_match(config_str) + } else { + false + }; + + let ts_config = Self { + path: config_path.unwrap_or_else(|| Ok(PathBuf::new())).ok(), + content: config, + hash: config_hash, + compile_js, + }; + + Ok(ts_config) + } +} + +/// Information associated with compiled file in cache. +/// Includes source code path and state hash. +/// version_hash is used to validate versions of the file +/// and could be used to remove stale file in cache. +pub struct CompiledFileMetadata { + pub source_path: PathBuf, + pub version_hash: String, +} + +static SOURCE_PATH: &str = "source_path"; +static VERSION_HASH: &str = "version_hash"; + +impl CompiledFileMetadata { + pub fn from_json_string(metadata_string: String) -> Option { + // TODO: use serde for deserialization + let maybe_metadata_json: serde_json::Result = + serde_json::from_str(&metadata_string); + + if let Ok(metadata_json) = maybe_metadata_json { + let source_path = metadata_json[SOURCE_PATH].as_str().map(PathBuf::from); + let version_hash = metadata_json[VERSION_HASH].as_str().map(String::from); + + if source_path.is_none() || version_hash.is_none() { + return None; + } + + return Some(CompiledFileMetadata { + source_path: source_path.unwrap(), + version_hash: version_hash.unwrap(), + }); + } + + None + } + + pub fn to_json_string(&self) -> Result { + let mut value_map = serde_json::map::Map::new(); + + value_map.insert(SOURCE_PATH.to_owned(), json!(&self.source_path)); + value_map.insert(VERSION_HASH.to_string(), json!(&self.version_hash)); + serde_json::to_string(&value_map) + } +} +/// Creates the JSON message send to compiler.ts's onmessage. +fn req( + request_type: msg::CompilerRequestType, + root_names: Vec, + compiler_config: CompilerConfig, + target: &str, + bundle: bool, + unstable: bool, +) -> Buf { + let cwd = std::env::current_dir().unwrap(); + let j = match (compiler_config.path, compiler_config.content) { + (Some(config_path), Some(config_data)) => json!({ + "type": request_type as i32, + "target": target, + "rootNames": root_names, + "bundle": bundle, + "unstable": unstable, + "configPath": config_path, + "config": str::from_utf8(&config_data).unwrap(), + "cwd": cwd, + }), + _ => json!({ + "type": request_type as i32, + "target": target, + "rootNames": root_names, + "bundle": bundle, + "unstable": unstable, + "cwd": cwd, + }), + }; + + j.to_string().into_boxed_str().into_boxed_bytes() +} + +/// Emit a SHA256 hash based on source code, deno version and TS config. +/// Used to check if a recompilation for source code is needed. +pub fn source_code_version_hash( + source_code: &[u8], + version: &str, + config_hash: &[u8], +) -> String { + crate::checksum::gen(vec![source_code, version.as_bytes(), config_hash]) +} + +pub struct TsCompilerInner { + pub file_fetcher: SourceFileFetcher, + pub config: CompilerConfig, + pub disk_cache: DiskCache, + /// Set of all URLs that have been compiled. This prevents double + /// compilation of module. + pub compiled: Mutex>, + /// This setting is controlled by `--reload` flag. Unless the flag + /// is provided disk cache is used. + pub use_disk_cache: bool, + /// This setting is controlled by `compilerOptions.checkJs` + pub compile_js: bool, +} + +#[derive(Clone)] +pub struct TsCompiler(Arc); + +impl Deref for TsCompiler { + type Target = TsCompilerInner; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct EmittedSource { + filename: String, + contents: String, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct BundleResponse { + diagnostics: Diagnostic, + bundle_output: String, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct CompileResponse { + diagnostics: Diagnostic, + emit_map: HashMap, +} + +// TODO(bartlomieju): possible deduplicate once TS refactor is stabilized +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +#[allow(unused)] +struct RuntimeBundleResponse { + diagnostics: Vec, + output: String, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct RuntimeCompileResponse { + diagnostics: Vec, + emit_map: HashMap, +} + +impl TsCompiler { + pub fn new( + file_fetcher: SourceFileFetcher, + disk_cache: DiskCache, + use_disk_cache: bool, + config_path: Option, + ) -> Result { + let config = CompilerConfig::load(config_path)?; + Ok(TsCompiler(Arc::new(TsCompilerInner { + file_fetcher, + disk_cache, + compile_js: config.compile_js, + config, + compiled: Mutex::new(HashSet::new()), + use_disk_cache, + }))) + } + + /// Create a new V8 worker with snapshot of TS compiler and setup compiler's + /// runtime. + fn setup_worker(global_state: GlobalState) -> CompilerWorker { + let entry_point = + ModuleSpecifier::resolve_url_or_path("./__$deno$ts_compiler.ts").unwrap(); + let worker_state = + State::new(global_state.clone(), None, entry_point, DebugType::Internal) + .expect("Unable to create worker state"); + + // Count how many times we start the compiler worker. + global_state.compiler_starts.fetch_add(1, Ordering::SeqCst); + + let mut worker = CompilerWorker::new( + "TS".to_string(), + startup_data::compiler_isolate_init(), + worker_state, + ); + worker.execute("bootstrap.tsCompilerRuntime()").unwrap(); + worker + } + + pub async fn bundle( + &self, + global_state: GlobalState, + module_name: String, + out_file: Option, + ) -> Result<(), ErrBox> { + debug!( + "Invoking the compiler to bundle. module_name: {}", + module_name + ); + eprintln!("Bundling {}", module_name); + + let root_names = vec![module_name]; + let req_msg = req( + msg::CompilerRequestType::Compile, + root_names, + self.config.clone(), + "main", + true, + global_state.flags.unstable, + ); + + let msg = execute_in_thread(global_state.clone(), req_msg).await?; + let json_str = std::str::from_utf8(&msg).unwrap(); + debug!("Message: {}", json_str); + + let bundle_response: BundleResponse = serde_json::from_str(json_str)?; + + if !bundle_response.diagnostics.items.is_empty() { + return Err(ErrBox::from(bundle_response.diagnostics)); + } + + let output_string = fmt::format_text(&bundle_response.bundle_output)?; + + if let Some(out_file_) = out_file.as_ref() { + eprintln!("Emitting bundle to {:?}", out_file_); + + let output_bytes = output_string.as_bytes(); + let output_len = output_bytes.len(); + + deno_fs::write_file(out_file_, output_bytes, 0o666)?; + // TODO(bartlomieju): add "humanFileSize" method + eprintln!("{} bytes emmited.", output_len); + } else { + println!("{}", output_string); + } + + Ok(()) + } + + /// Mark given module URL as compiled to avoid multiple compilations of same + /// module in single run. + fn mark_compiled(&self, url: &Url) { + let mut c = self.compiled.lock().unwrap(); + c.insert(url.clone()); + } + + /// Check if given module URL has already been compiled and can be fetched + /// directly from disk. + fn has_compiled(&self, url: &Url) -> bool { + let c = self.compiled.lock().unwrap(); + c.contains(url) + } + + /// Asynchronously compile module and all it's dependencies. + /// + /// This method compiled every module at most once. + /// + /// If `--reload` flag was provided then compiler will not on-disk cache and + /// force recompilation. + /// + /// If compilation is required then new V8 worker is spawned with fresh TS + /// compiler. + pub async fn compile( + &self, + global_state: GlobalState, + source_file: &SourceFile, + target: TargetLib, + ) -> Result { + if self.has_compiled(&source_file.url) { + return self.get_compiled_module(&source_file.url); + } + + if self.use_disk_cache { + // Try to load cached version: + // 1. check if there's 'meta' file + if let Some(metadata) = self.get_metadata(&source_file.url) { + // 2. compare version hashes + // TODO: it would probably be good idea to make it method implemented on SourceFile + let version_hash_to_validate = source_code_version_hash( + &source_file.source_code, + version::DENO, + &self.config.hash, + ); + + if metadata.version_hash == version_hash_to_validate { + debug!("load_cache metadata version hash match"); + if let Ok(compiled_module) = + self.get_compiled_module(&source_file.url) + { + self.mark_compiled(&source_file.url); + return Ok(compiled_module); + } + } + } + } + let source_file_ = source_file.clone(); + let module_url = source_file.url.clone(); + let target = match target { + TargetLib::Main => "main", + TargetLib::Worker => "worker", + }; + let root_names = vec![module_url.to_string()]; + let req_msg = req( + msg::CompilerRequestType::Compile, + root_names, + self.config.clone(), + target, + false, + global_state.flags.unstable, + ); + + let ts_compiler = self.clone(); + + info!( + "{} {}", + colors::green("Compile".to_string()), + module_url.to_string() + ); + + let msg = execute_in_thread(global_state.clone(), req_msg).await?; + let json_str = std::str::from_utf8(&msg).unwrap(); + + let compile_response: CompileResponse = serde_json::from_str(json_str)?; + + if !compile_response.diagnostics.items.is_empty() { + return Err(ErrBox::from(compile_response.diagnostics)); + } + + self.cache_emitted_files(compile_response.emit_map)?; + ts_compiler.get_compiled_module(&source_file_.url) + } + + /// Get associated `CompiledFileMetadata` for given module if it exists. + pub fn get_metadata(&self, url: &Url) -> Option { + // Try to load cached version: + // 1. check if there's 'meta' file + let cache_key = self + .disk_cache + .get_cache_filename_with_extension(url, "meta"); + if let Ok(metadata_bytes) = self.disk_cache.get(&cache_key) { + if let Ok(metadata) = std::str::from_utf8(&metadata_bytes) { + if let Some(read_metadata) = + CompiledFileMetadata::from_json_string(metadata.to_string()) + { + return Some(read_metadata); + } + } + } + + None + } + + fn cache_emitted_files( + &self, + emit_map: HashMap, + ) -> std::io::Result<()> { + for (emitted_name, source) in emit_map.iter() { + let specifier = ModuleSpecifier::resolve_url(&source.filename) + .expect("Should be a valid module specifier"); + + if emitted_name.ends_with(".map") { + self.cache_source_map(&specifier, &source.contents)?; + } else if emitted_name.ends_with(".js") { + self.cache_compiled_file(&specifier, &source.contents)?; + } else { + panic!("Trying to cache unknown file type {}", emitted_name); + } + } + + Ok(()) + } + + pub fn get_compiled_module( + &self, + module_url: &Url, + ) -> Result { + let compiled_source_file = self.get_compiled_source_file(module_url)?; + + let compiled_module = CompiledModule { + code: str::from_utf8(&compiled_source_file.source_code) + .unwrap() + .to_string(), + name: module_url.to_string(), + }; + + Ok(compiled_module) + } + + /// Return compiled JS file for given TS module. + // TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to + // SourceFileFetcher + pub fn get_compiled_source_file( + &self, + module_url: &Url, + ) -> Result { + let cache_key = self + .disk_cache + .get_cache_filename_with_extension(&module_url, "js"); + let compiled_code = self.disk_cache.get(&cache_key)?; + let compiled_code_filename = self.disk_cache.location.join(cache_key); + debug!("compiled filename: {:?}", compiled_code_filename); + + let compiled_module = SourceFile { + url: module_url.clone(), + filename: compiled_code_filename, + media_type: msg::MediaType::JavaScript, + source_code: compiled_code, + types_url: None, + }; + + Ok(compiled_module) + } + + /// Save compiled JS file for given TS module to on-disk cache. + /// + /// Along compiled file a special metadata file is saved as well containing + /// hash that can be validated to avoid unnecessary recompilation. + fn cache_compiled_file( + &self, + module_specifier: &ModuleSpecifier, + contents: &str, + ) -> std::io::Result<()> { + let source_file = self + .file_fetcher + .fetch_cached_source_file(&module_specifier) + .expect("Source file not found"); + + // NOTE: JavaScript files are only cached to disk if `checkJs` + // option in on + if source_file.media_type == msg::MediaType::JavaScript && !self.compile_js + { + return Ok(()); + } + + let js_key = self + .disk_cache + .get_cache_filename_with_extension(module_specifier.as_url(), "js"); + self.disk_cache.set(&js_key, contents.as_bytes())?; + self.mark_compiled(module_specifier.as_url()); + + let version_hash = source_code_version_hash( + &source_file.source_code, + version::DENO, + &self.config.hash, + ); + + let compiled_file_metadata = CompiledFileMetadata { + source_path: source_file.filename, + version_hash, + }; + let meta_key = self + .disk_cache + .get_cache_filename_with_extension(module_specifier.as_url(), "meta"); + self.disk_cache.set( + &meta_key, + compiled_file_metadata.to_json_string()?.as_bytes(), + ) + } + + /// Return associated source map file for given TS module. + // TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to + // SourceFileFetcher + pub fn get_source_map_file( + &self, + module_specifier: &ModuleSpecifier, + ) -> Result { + let cache_key = self + .disk_cache + .get_cache_filename_with_extension(module_specifier.as_url(), "js.map"); + let source_code = self.disk_cache.get(&cache_key)?; + let source_map_filename = self.disk_cache.location.join(cache_key); + debug!("source map filename: {:?}", source_map_filename); + + let source_map_file = SourceFile { + url: module_specifier.as_url().to_owned(), + filename: source_map_filename, + media_type: msg::MediaType::JavaScript, + source_code, + types_url: None, + }; + + Ok(source_map_file) + } + + /// Save source map file for given TS module to on-disk cache. + fn cache_source_map( + &self, + module_specifier: &ModuleSpecifier, + contents: &str, + ) -> std::io::Result<()> { + let source_file = self + .file_fetcher + .fetch_cached_source_file(&module_specifier) + .expect("Source file not found"); + + // NOTE: JavaScript files are only cached to disk if `checkJs` + // option in on + if source_file.media_type == msg::MediaType::JavaScript && !self.compile_js + { + return Ok(()); + } + + let source_map_key = self + .disk_cache + .get_cache_filename_with_extension(module_specifier.as_url(), "js.map"); + self.disk_cache.set(&source_map_key, contents.as_bytes()) + } +} + +impl SourceMapGetter for TsCompiler { + fn get_source_map(&self, script_name: &str) -> Option> { + self + .try_to_resolve_and_get_source_map(script_name) + .map(|out| out.source_code) + } + + fn get_source_line(&self, script_name: &str, line: usize) -> Option { + self + .try_resolve_and_get_source_file(script_name) + .and_then(|out| { + str::from_utf8(&out.source_code).ok().and_then(|v| { + // Do NOT use .lines(): it skips the terminating empty line. + // (due to internally using .split_terminator() instead of .split()) + let lines: Vec<&str> = v.split('\n').collect(); + assert!(lines.len() > line); + Some(lines[line].to_string()) + }) + }) + } +} + +// `SourceMapGetter` related methods +impl TsCompiler { + fn try_to_resolve(&self, script_name: &str) -> Option { + // if `script_name` can't be resolved to ModuleSpecifier it's probably internal + // script (like `gen/cli/bundle/compiler.js`) so we won't be + // able to get source for it anyway + ModuleSpecifier::resolve_url(script_name).ok() + } + + fn try_resolve_and_get_source_file( + &self, + script_name: &str, + ) -> Option { + if let Some(module_specifier) = self.try_to_resolve(script_name) { + return self + .file_fetcher + .fetch_cached_source_file(&module_specifier); + } + + None + } + + fn try_to_resolve_and_get_source_map( + &self, + script_name: &str, + ) -> Option { + if let Some(module_specifier) = self.try_to_resolve(script_name) { + return match self.get_source_map_file(&module_specifier) { + Ok(out) => Some(out), + Err(_) => None, + }; + } + + None + } +} + +async fn execute_in_thread( + global_state: GlobalState, + req: Buf, +) -> Result { + let (handle_sender, handle_receiver) = + std::sync::mpsc::sync_channel::>(1); + let builder = + std::thread::Builder::new().name("deno-ts-compiler".to_string()); + let join_handle = builder.spawn(move || { + let worker = TsCompiler::setup_worker(global_state.clone()); + handle_sender.send(Ok(worker.thread_safe_handle())).unwrap(); + drop(handle_sender); + tokio_util::run_basic(worker).expect("Panic in event loop"); + })?; + let handle = handle_receiver.recv().unwrap()?; + handle.post_message(req)?; + let event = handle.get_event().await.expect("Compiler didn't respond"); + let buf = match event { + WorkerEvent::Message(buf) => Ok(buf), + WorkerEvent::Error(error) => Err(error), + WorkerEvent::TerminalError(error) => Err(error), + }?; + // Shutdown worker and wait for thread to finish + handle.terminate(); + join_handle.join().unwrap(); + Ok(buf) +} + +/// This function is used by `Deno.compile()` and `Deno.bundle()` APIs. +pub async fn runtime_compile( + global_state: GlobalState, + root_name: &str, + sources: &Option>, + bundle: bool, + options: &Option, +) -> Result { + let req_msg = json!({ + "type": msg::CompilerRequestType::RuntimeCompile as i32, + "target": "runtime", + "rootName": root_name, + "sources": sources, + "options": options, + "bundle": bundle, + "unstable": global_state.flags.unstable, + }) + .to_string() + .into_boxed_str() + .into_boxed_bytes(); + + let compiler = global_state.ts_compiler.clone(); + + let msg = execute_in_thread(global_state, req_msg).await?; + let json_str = std::str::from_utf8(&msg).unwrap(); + + // TODO(bartlomieju): factor `bundle` path into separate function `runtime_bundle` + if bundle { + let _response: RuntimeBundleResponse = serde_json::from_str(json_str)?; + return Ok(serde_json::from_str::(json_str).unwrap()); + } + + let response: RuntimeCompileResponse = serde_json::from_str(json_str)?; + + if response.diagnostics.is_empty() && sources.is_none() { + compiler.cache_emitted_files(response.emit_map)?; + } + + // We're returning `Ok()` instead of `Err()` because it's not runtime + // error if there were diagnostics produces; we want to let user handle + // diagnostics in the runtime. + Ok(serde_json::from_str::(json_str).unwrap()) +} + +/// This function is used by `Deno.transpileOnly()` API. +pub async fn runtime_transpile( + global_state: GlobalState, + sources: &HashMap, + options: &Option, +) -> Result { + let req_msg = json!({ + "type": msg::CompilerRequestType::RuntimeTranspile as i32, + "sources": sources, + "options": options, + }) + .to_string() + .into_boxed_str() + .into_boxed_bytes(); + + let msg = execute_in_thread(global_state, req_msg).await?; + let json_str = std::str::from_utf8(&msg).unwrap(); + let v = serde_json::from_str::(json_str) + .expect("Error decoding JSON string."); + Ok(v) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::fs as deno_fs; + use deno_core::ModuleSpecifier; + use std::path::PathBuf; + use tempfile::TempDir; + + #[tokio::test] + async fn test_compile() { + let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .parent() + .unwrap() + .join("cli/tests/002_hello.ts"); + let specifier = + ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap(); + let out = SourceFile { + url: specifier.as_url().clone(), + filename: PathBuf::from(p.to_str().unwrap().to_string()), + media_type: msg::MediaType::TypeScript, + source_code: include_bytes!("./tests/002_hello.ts").to_vec(), + types_url: None, + }; + let mock_state = + GlobalState::mock(vec![String::from("deno"), String::from("hello.js")]); + let result = mock_state + .ts_compiler + .compile(mock_state.clone(), &out, TargetLib::Main) + .await; + assert!(result.is_ok()); + assert!(result + .unwrap() + .code + .as_bytes() + .starts_with(b"\"use strict\";\nconsole.log(\"Hello World\");")); + } + + #[tokio::test] + async fn test_bundle() { + let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .parent() + .unwrap() + .join("cli/tests/002_hello.ts"); + use deno_core::ModuleSpecifier; + let module_name = ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()) + .unwrap() + .to_string(); + + let state = GlobalState::mock(vec![ + String::from("deno"), + p.to_string_lossy().into(), + String::from("$deno$/bundle.js"), + ]); + + let result = state + .ts_compiler + .bundle(state.clone(), module_name, None) + .await; + assert!(result.is_ok()); + } + + #[test] + fn test_source_code_version_hash() { + assert_eq!( + "0185b42de0686b4c93c314daaa8dee159f768a9e9a336c2a5e3d5b8ca6c4208c", + source_code_version_hash(b"1+2", "0.4.0", b"{}") + ); + // Different source_code should result in different hash. + assert_eq!( + "e58631f1b6b6ce2b300b133ec2ad16a8a5ba6b7ecf812a8c06e59056638571ac", + source_code_version_hash(b"1", "0.4.0", b"{}") + ); + // Different version should result in different hash. + assert_eq!( + "307e6200347a88dbbada453102deb91c12939c65494e987d2d8978f6609b5633", + source_code_version_hash(b"1", "0.1.0", b"{}") + ); + // Different config should result in different hash. + assert_eq!( + "195eaf104a591d1d7f69fc169c60a41959c2b7a21373cd23a8f675f877ec385f", + source_code_version_hash(b"1", "0.4.0", b"{\"compilerOptions\": {}}") + ); + } + + #[test] + fn test_compile_js() { + let temp_dir = TempDir::new().expect("tempdir fail"); + let temp_dir_path = temp_dir.path(); + + let test_cases = vec![ + // valid JSON + (r#"{ "compilerOptions": { "checkJs": true } } "#, true), + // JSON with comment + ( + r#"{ "compilerOptions": { // force .js file compilation by Deno "checkJs": true } } "#, + true, + ), + // invalid JSON + (r#"{ "compilerOptions": { "checkJs": true },{ } "#, true), + // without content + ("", false), + ]; + + let path = temp_dir_path.join("tsconfig.json"); + let path_str = path.to_str().unwrap().to_string(); + + for (json_str, expected) in test_cases { + deno_fs::write_file(&path, json_str.as_bytes(), 0o666).unwrap(); + let config = CompilerConfig::load(Some(path_str.clone())).unwrap(); + assert_eq!(config.compile_js, expected); + } + } + + #[test] + fn test_compiler_config_load() { + let temp_dir = TempDir::new().expect("tempdir fail"); + let temp_dir_path = temp_dir.path(); + let path = temp_dir_path.join("doesnotexist.json"); + let path_str = path.to_str().unwrap().to_string(); + let res = CompilerConfig::load(Some(path_str)); + assert!(res.is_err()); + } +} -- cgit v1.2.3