summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cli/compilers/js.rs25
-rw-r--r--cli/compilers/json.rs26
-rw-r--r--cli/compilers/mod.rs20
-rw-r--r--cli/compilers/ts.rs (renamed from cli/compiler.rs)255
-rw-r--r--cli/deno_dir.rs2
-rw-r--r--cli/disk_cache.rs4
-rw-r--r--cli/file_fetcher.rs21
-rw-r--r--cli/main.rs28
-rw-r--r--cli/state.rs115
-rw-r--r--js/compiler.ts25
-rw-r--r--tests/038_checkjs.js6
-rw-r--r--tests/038_checkjs.js.out15
-rw-r--r--tests/038_checkjs.test5
-rw-r--r--tests/038_checkjs.tsconfig.json5
14 files changed, 353 insertions, 199 deletions
diff --git a/cli/compilers/js.rs b/cli/compilers/js.rs
new file mode 100644
index 000000000..56c9b672e
--- /dev/null
+++ b/cli/compilers/js.rs
@@ -0,0 +1,25 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::compilers::CompiledModule;
+use crate::compilers::CompiledModuleFuture;
+use crate::file_fetcher::SourceFile;
+use crate::state::ThreadSafeState;
+use std::str;
+
+pub struct JsCompiler {}
+
+impl JsCompiler {
+ pub fn compile_async(
+ self: &Self,
+ _state: ThreadSafeState,
+ source_file: &SourceFile,
+ ) -> Box<CompiledModuleFuture> {
+ let module = CompiledModule {
+ code: str::from_utf8(&source_file.source_code)
+ .unwrap()
+ .to_string(),
+ name: source_file.url.to_string(),
+ };
+
+ Box::new(futures::future::ok(module))
+ }
+}
diff --git a/cli/compilers/json.rs b/cli/compilers/json.rs
new file mode 100644
index 000000000..57e44d354
--- /dev/null
+++ b/cli/compilers/json.rs
@@ -0,0 +1,26 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::compilers::CompiledModule;
+use crate::compilers::CompiledModuleFuture;
+use crate::file_fetcher::SourceFile;
+use crate::state::ThreadSafeState;
+use std::str;
+
+pub struct JsonCompiler {}
+
+impl JsonCompiler {
+ pub fn compile_async(
+ self: &Self,
+ _state: ThreadSafeState,
+ source_file: &SourceFile,
+ ) -> Box<CompiledModuleFuture> {
+ let module = CompiledModule {
+ code: format!(
+ "export default {};",
+ str::from_utf8(&source_file.source_code).unwrap()
+ ),
+ name: source_file.url.to_string(),
+ };
+
+ Box::new(futures::future::ok(module))
+ }
+}
diff --git a/cli/compilers/mod.rs b/cli/compilers/mod.rs
new file mode 100644
index 000000000..fdc18d2bc
--- /dev/null
+++ b/cli/compilers/mod.rs
@@ -0,0 +1,20 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use deno::ErrBox;
+use futures::Future;
+
+mod js;
+mod json;
+mod ts;
+
+pub use js::JsCompiler;
+pub use json::JsonCompiler;
+pub use ts::TsCompiler;
+
+#[derive(Debug, Clone)]
+pub struct CompiledModule {
+ pub code: String,
+ pub name: String,
+}
+
+pub type CompiledModuleFuture =
+ dyn Future<Item = CompiledModule, Error = ErrBox> + Send;
diff --git a/cli/compiler.rs b/cli/compilers/ts.rs
index f90337d02..bbfe33461 100644
--- a/cli/compiler.rs
+++ b/cli/compilers/ts.rs
@@ -1,9 +1,13 @@
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::compilers::CompiledModule;
+use crate::compilers::CompiledModuleFuture;
+use crate::deno_error::DenoError;
use crate::diagnostics::Diagnostic;
use crate::disk_cache::DiskCache;
use crate::file_fetcher::SourceFile;
use crate::file_fetcher::SourceFileFetcher;
use crate::msg;
+use crate::msg::ErrorKind;
use crate::resources;
use crate::source_maps::SourceMapGetter;
use crate::startup_data;
@@ -13,7 +17,6 @@ use crate::worker::Worker;
use deno::Buf;
use deno::ErrBox;
use deno::ModuleSpecifier;
-use futures::future::Either;
use futures::Future;
use futures::Stream;
use ring;
@@ -26,10 +29,78 @@ use std::sync::atomic::Ordering;
use std::sync::Mutex;
use url::Url;
-/// Optional tuple which represents the state of the compiler
-/// configuration where the first is canonical name for the configuration file
-/// and a vector of the bytes of the contents of the configuration file.
-type CompilerConfig = Option<(PathBuf, Vec<u8>)>;
+/// Struct which represents the state of the compiler
+/// configuration where the first is canonical name for the configuration file,
+/// second is a vector of the bytes of the contents of the configuration file,
+/// third is bytes of the hash of contents.
+#[derive(Clone)]
+pub struct CompilerConfig {
+ pub path: Option<PathBuf>,
+ pub content: Option<Vec<u8>>,
+ pub hash: Vec<u8>,
+}
+
+impl CompilerConfig {
+ /// Take the passed flag and resolve the file name relative to the cwd.
+ pub fn load(config_path: Option<String>) -> Result<Self, ErrBox> {
+ let config_file = match &config_path {
+ Some(config_file_name) => {
+ debug!("Compiler config file: {}", config_file_name);
+ let cwd = std::env::current_dir().unwrap();
+ Some(cwd.join(config_file_name))
+ }
+ _ => None,
+ };
+
+ // Convert the PathBuf to a canonicalized string. This is needed by the
+ // compiler to properly deal with the configuration.
+ let config_path = match &config_file {
+ Some(config_file) => Some(config_file.canonicalize().unwrap().to_owned()),
+ _ => None,
+ };
+
+ // Load the contents of the configuration file
+ let config = match &config_file {
+ Some(config_file) => {
+ debug!("Attempt to load config: {}", config_file.to_str().unwrap());
+ let config = fs::read(&config_file)?;
+ Some(config)
+ }
+ _ => None,
+ };
+
+ let config_hash = match &config {
+ Some(bytes) => bytes.clone(),
+ _ => b"".to_vec(),
+ };
+
+ let ts_config = Self {
+ path: config_path,
+ content: config,
+ hash: config_hash,
+ };
+
+ Ok(ts_config)
+ }
+
+ pub fn json(self: &Self) -> Result<serde_json::Value, ErrBox> {
+ if self.content.is_none() {
+ return Ok(serde_json::Value::Null);
+ }
+
+ let bytes = self.content.clone().unwrap();
+ let json_string = std::str::from_utf8(&bytes)?;
+ match serde_json::from_str(&json_string) {
+ Ok(json_map) => Ok(json_map),
+ Err(_) => Err(
+ DenoError::new(
+ ErrorKind::InvalidInput,
+ "Compiler config is not a valid JSON".to_string(),
+ ).into(),
+ ),
+ }
+ }
+}
/// Information associated with compiled file in cache.
/// Includes source code path and state hash.
@@ -80,19 +151,19 @@ fn req(
compiler_config: CompilerConfig,
bundle: Option<String>,
) -> Buf {
- let j = if let Some((config_path, config_data)) = compiler_config {
- json!({
- "rootNames": root_names,
- "bundle": bundle,
- "configPath": config_path,
- "config": str::from_utf8(&config_data).unwrap(),
- })
- } else {
- json!({
- "rootNames": root_names,
- "bundle": bundle,
- })
+ let j = match (compiler_config.path, compiler_config.content) {
+ (Some(config_path), Some(config_data)) => json!({
+ "rootNames": root_names,
+ "bundle": bundle,
+ "configPath": config_path,
+ "config": str::from_utf8(&config_data).unwrap(),
+ }),
+ _ => json!({
+ "rootNames": root_names,
+ "bundle": bundle,
+ }),
};
+
j.to_string().into_boxed_str().into_boxed_bytes()
}
@@ -120,48 +191,9 @@ pub fn source_code_version_hash(
gen_hash(vec![source_code, version.as_bytes(), config_hash])
}
-fn load_config_file(
- config_path: Option<String>,
-) -> (Option<PathBuf>, Option<Vec<u8>>) {
- // take the passed flag and resolve the file name relative to the cwd
- let config_file = match &config_path {
- Some(config_file_name) => {
- debug!("Compiler config file: {}", config_file_name);
- let cwd = std::env::current_dir().unwrap();
- Some(cwd.join(config_file_name))
- }
- _ => None,
- };
-
- // Convert the PathBuf to a canonicalized string. This is needed by the
- // compiler to properly deal with the configuration.
- let config_path = match &config_file {
- Some(config_file) => Some(config_file.canonicalize().unwrap().to_owned()),
- _ => None,
- };
-
- // Load the contents of the configuration file
- let config = match &config_file {
- Some(config_file) => {
- debug!("Attempt to load config: {}", config_file.to_str().unwrap());
- match fs::read(&config_file) {
- Ok(config_data) => Some(config_data.to_owned()),
- _ => panic!(
- "Error retrieving compiler config file at \"{}\"",
- config_file.to_str().unwrap()
- ),
- }
- }
- _ => None,
- };
-
- (config_path, config)
-}
-
pub struct TsCompiler {
pub file_fetcher: SourceFileFetcher,
pub config: CompilerConfig,
- pub config_hash: Vec<u8>,
pub disk_cache: DiskCache,
/// Set of all URLs that have been compiled. This prevents double
/// compilation of module.
@@ -169,6 +201,8 @@ pub struct TsCompiler {
/// This setting is controlled by `--reload` flag. Unless the flag
/// is provided disk cache is used.
pub use_disk_cache: bool,
+ /// This setting is controlled by `compilerOptions.checkJs`
+ pub compile_js: bool,
}
impl TsCompiler {
@@ -177,25 +211,30 @@ impl TsCompiler {
disk_cache: DiskCache,
use_disk_cache: bool,
config_path: Option<String>,
- ) -> Self {
- let compiler_config = match load_config_file(config_path) {
- (Some(config_path), Some(config)) => Some((config_path, config.to_vec())),
- _ => None,
- };
-
- let config_bytes = match &compiler_config {
- Some((_, config)) => config.clone(),
- _ => b"".to_vec(),
+ ) -> Result<Self, ErrBox> {
+ let config = CompilerConfig::load(config_path)?;
+
+ // If `checkJs` is set to true in `compilerOptions` then we're gonna be compiling
+ // JavaScript files as well
+ let config_json = config.json()?;
+ let compile_js = match &config_json.get("compilerOptions") {
+ Some(serde_json::Value::Object(m)) => match m.get("checkJs") {
+ Some(serde_json::Value::Bool(bool_)) => *bool_,
+ _ => false,
+ },
+ _ => false,
};
- Self {
+ let compiler = Self {
file_fetcher,
disk_cache,
- config: compiler_config,
- config_hash: config_bytes,
+ config,
compiled: Mutex::new(HashSet::new()),
use_disk_cache,
- }
+ compile_js,
+ };
+
+ Ok(compiler)
}
/// Create a new V8 worker with snapshot of TS compiler and setup compiler's runtime.
@@ -290,22 +329,12 @@ impl TsCompiler {
self: &Self,
state: ThreadSafeState,
source_file: &SourceFile,
- ) -> impl Future<Item = SourceFile, Error = ErrBox> {
- // TODO: maybe fetching of original SourceFile should be done here?
-
- if source_file.media_type != msg::MediaType::TypeScript {
- return Either::A(futures::future::ok(source_file.clone()));
- }
-
+ ) -> Box<CompiledModuleFuture> {
if self.has_compiled(&source_file.url) {
- match self.get_compiled_source_file(&source_file) {
- Ok(compiled_module) => {
- return Either::A(futures::future::ok(compiled_module));
- }
- Err(err) => {
- return Either::A(futures::future::err(err));
- }
- }
+ return match self.get_compiled_module(&source_file.url) {
+ Ok(compiled) => Box::new(futures::future::ok(compiled)),
+ Err(err) => Box::new(futures::future::err(err)),
+ };
}
if self.use_disk_cache {
@@ -317,20 +346,16 @@ impl TsCompiler {
let version_hash_to_validate = source_code_version_hash(
&source_file.source_code,
version::DENO,
- &self.config_hash,
+ &self.config.hash,
);
if metadata.version_hash == version_hash_to_validate {
debug!("load_cache metadata version hash match");
if let Ok(compiled_module) =
- self.get_compiled_source_file(&source_file)
+ self.get_compiled_module(&source_file.url)
{
- debug!(
- "found cached compiled module: {:?}",
- compiled_module.clone().filename
- );
- // TODO: store in in-process cache for subsequent access
- return Either::A(futures::future::ok(compiled_module));
+ self.mark_compiled(&source_file.url);
+ return Box::new(futures::future::ok(compiled_module));
}
}
}
@@ -388,19 +413,18 @@ impl TsCompiler {
}).and_then(move |_| {
// if we are this far it means compilation was successful and we can
// load compiled filed from disk
- // TODO: can this be somehow called using `self.`?
state_
.ts_compiler
- .get_compiled_source_file(&source_file_)
+ .get_compiled_module(&source_file_.url)
.map_err(|e| {
// TODO: this situation shouldn't happen
panic!("Expected to find compiled file: {}", e)
})
- }).and_then(move |source_file_after_compile| {
+ }).and_then(move |compiled_module| {
// Explicit drop to keep reference alive until future completes.
drop(compiling_job);
- Ok(source_file_after_compile)
+ Ok(compiled_module)
}).then(move |r| {
debug!(">>>>> compile_sync END");
// TODO(ry) do this in worker's destructor.
@@ -408,7 +432,7 @@ impl TsCompiler {
r
});
- Either::B(fut)
+ Box::new(fut)
}
/// Get associated `CompiledFileMetadata` for given module if it exists.
@@ -431,22 +455,38 @@ impl TsCompiler {
None
}
+ pub fn get_compiled_module(
+ self: &Self,
+ module_url: &Url,
+ ) -> Result<CompiledModule, ErrBox> {
+ let compiled_source_file = self.get_compiled_source_file(module_url)?;
+
+ let compiled_module = CompiledModule {
+ code: str::from_utf8(&compiled_source_file.source_code)
+ .unwrap()
+ .to_string(),
+ name: module_url.to_string(),
+ };
+
+ Ok(compiled_module)
+ }
+
/// Return compiled JS file for given TS module.
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
// SourceFileFetcher
pub fn get_compiled_source_file(
self: &Self,
- source_file: &SourceFile,
+ module_url: &Url,
) -> Result<SourceFile, ErrBox> {
let cache_key = self
.disk_cache
- .get_cache_filename_with_extension(&source_file.url, "js");
+ .get_cache_filename_with_extension(&module_url, "js");
let compiled_code = self.disk_cache.get(&cache_key)?;
let compiled_code_filename = self.disk_cache.location.join(cache_key);
debug!("compiled filename: {:?}", compiled_code_filename);
let compiled_module = SourceFile {
- url: source_file.url.clone(),
+ url: module_url.clone(),
filename: compiled_code_filename,
media_type: msg::MediaType::JavaScript,
source_code: compiled_code,
@@ -481,7 +521,7 @@ impl TsCompiler {
let version_hash = source_code_version_hash(
&source_file.source_code,
version::DENO,
- &self.config_hash,
+ &self.config.hash,
);
let compiled_file_metadata = CompiledFileMetadata {
@@ -619,7 +659,7 @@ mod tests {
self: &Self,
state: ThreadSafeState,
source_file: &SourceFile,
- ) -> Result<SourceFile, ErrBox> {
+ ) -> Result<CompiledModule, ErrBox> {
tokio_util::block_on(self.compile_async(state, source_file))
}
}
@@ -630,24 +670,25 @@ mod tests {
let specifier =
ModuleSpecifier::resolve_url_or_path("./tests/002_hello.ts").unwrap();
- let mut out = SourceFile {
+ let out = SourceFile {
url: specifier.as_url().clone(),
filename: PathBuf::from("/tests/002_hello.ts"),
media_type: msg::MediaType::TypeScript,
- source_code: include_bytes!("../tests/002_hello.ts").to_vec(),
+ source_code: include_bytes!("../../tests/002_hello.ts").to_vec(),
};
let mock_state = ThreadSafeState::mock(vec![
String::from("./deno"),
String::from("hello.js"),
]);
- out = mock_state
+ let compiled = mock_state
.ts_compiler
.compile_sync(mock_state.clone(), &out)
.unwrap();
assert!(
- out
- .source_code
+ compiled
+ .code
+ .as_bytes()
.starts_with("console.log(\"Hello World\");".as_bytes())
);
})
diff --git a/cli/deno_dir.rs b/cli/deno_dir.rs
index c600b06fc..ac35922eb 100644
--- a/cli/deno_dir.rs
+++ b/cli/deno_dir.rs
@@ -17,8 +17,6 @@ pub struct DenoDir {
}
impl DenoDir {
- // Must be called before using any function from this module.
- // https://github.com/denoland/deno/blob/golang/deno_dir.go#L99-L111
pub fn new(custom_root: Option<PathBuf>) -> std::io::Result<Self> {
// Only setup once.
let home_dir = dirs::home_dir().expect("Could not get home directory.");
diff --git a/cli/disk_cache.rs b/cli/disk_cache.rs
index 808cfe675..fdbe2cbd5 100644
--- a/cli/disk_cache.rs
+++ b/cli/disk_cache.rs
@@ -18,6 +18,10 @@ impl DiskCache {
}
}
+ // TODO(bartlomieju) this method is not working properly for Windows paths,
+ // Example: file:///C:/deno/js/unit_test_runner.ts
+ // would produce: C:deno\\js\\unit_test_runner.ts
+ // it should produce: file\deno\js\unit_test_runner.ts
pub fn get_cache_filename(self: &Self, url: &Url) -> PathBuf {
let mut out = PathBuf::new();
diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs
index 656ecfff0..79d6ede00 100644
--- a/cli/file_fetcher.rs
+++ b/cli/file_fetcher.rs
@@ -39,27 +39,6 @@ pub struct SourceFile {
pub source_code: Vec<u8>,
}
-impl SourceFile {
- // TODO(bartlomieju): this method should be implemented on new `CompiledSourceFile`
- // trait and should be handled by "compiler pipeline"
- pub fn js_source(&self) -> String {
- if self.media_type == msg::MediaType::TypeScript {
- panic!("TypeScript module has no JS source, did you forget to run it through compiler?");
- }
-
- // TODO: this should be done by compiler and JS module should be returned
- if self.media_type == msg::MediaType::Json {
- return format!(
- "export default {};",
- str::from_utf8(&self.source_code).unwrap()
- );
- }
-
- // it's either JS or Unknown media type
- str::from_utf8(&self.source_code).unwrap().to_string()
- }
-}
-
pub type SourceFileFuture =
dyn Future<Item = SourceFile, Error = ErrBox> + Send;
diff --git a/cli/main.rs b/cli/main.rs
index 452cdfa65..fb34a2c76 100644
--- a/cli/main.rs
+++ b/cli/main.rs
@@ -16,7 +16,7 @@ extern crate rand;
extern crate url;
mod ansi;
-pub mod compiler;
+pub mod compilers;
pub mod deno_dir;
pub mod deno_error;
pub mod diagnostics;
@@ -99,6 +99,7 @@ fn js_check(r: Result<(), ErrBox>) {
}
}
+// TODO: we might want to rethink how this method works
pub fn print_file_info(
worker: Worker,
module_specifier: &ModuleSpecifier,
@@ -110,7 +111,7 @@ pub fn print_file_info(
.file_fetcher
.fetch_source_file_async(&module_specifier)
.map_err(|err| println!("{}", err))
- .and_then(move |out| {
+ .and_then(|out| {
println!(
"{} {}",
ansi::bold("local:".to_string()),
@@ -125,18 +126,25 @@ pub fn print_file_info(
state_
.clone()
- .ts_compiler
- .compile_async(state_.clone(), &out)
+ .fetch_compiled_module(&module_specifier_)
.map_err(|e| {
debug!("compiler error exiting!");
eprintln!("\n{}", e.to_string());
std::process::exit(1);
}).and_then(move |compiled| {
- if out.media_type == msg::MediaType::TypeScript {
+ if out.media_type == msg::MediaType::TypeScript
+ || (out.media_type == msg::MediaType::JavaScript
+ && state_.ts_compiler.compile_js)
+ {
+ let compiled_source_file = state_
+ .ts_compiler
+ .get_compiled_source_file(&out.url)
+ .unwrap();
+
println!(
"{} {}",
ansi::bold("compiled:".to_string()),
- compiled.filename.to_str().unwrap(),
+ compiled_source_file.filename.to_str().unwrap(),
);
}
@@ -152,12 +160,8 @@ pub fn print_file_info(
);
}
- if let Some(deps) = worker
- .state
- .modules
- .lock()
- .unwrap()
- .deps(&compiled.url.to_string())
+ if let Some(deps) =
+ worker.state.modules.lock().unwrap().deps(&compiled.name)
{
println!("{}{}", ansi::bold("deps:\n".to_string()), deps.name);
if let Some(ref depsdeps) = deps.deps {
diff --git a/cli/state.rs b/cli/state.rs
index cd7c1269d..047e2b7ed 100644
--- a/cli/state.rs
+++ b/cli/state.rs
@@ -1,11 +1,14 @@
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
-use crate::compiler::TsCompiler;
+use crate::compilers::CompiledModule;
+use crate::compilers::JsCompiler;
+use crate::compilers::JsonCompiler;
+use crate::compilers::TsCompiler;
use crate::deno_dir;
-use crate::file_fetcher::SourceFile;
use crate::file_fetcher::SourceFileFetcher;
use crate::flags;
use crate::global_timer::GlobalTimer;
use crate::import_map::ImportMap;
+use crate::msg;
use crate::ops;
use crate::permissions::DenoPermissions;
use crate::progress::Progress;
@@ -26,6 +29,7 @@ use std;
use std::collections::HashMap;
use std::env;
use std::ops::Deref;
+use std::str;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::sync::Mutex;
@@ -77,6 +81,8 @@ pub struct State {
pub seeded_rng: Option<Mutex<StdRng>>,
pub file_fetcher: SourceFileFetcher,
+ pub js_compiler: JsCompiler,
+ pub json_compiler: JsonCompiler,
pub ts_compiler: TsCompiler,
}
@@ -103,28 +109,6 @@ impl ThreadSafeState {
}
}
-pub fn fetch_source_file_and_maybe_compile_async(
- state: &ThreadSafeState,
- module_specifier: &ModuleSpecifier,
-) -> impl Future<Item = SourceFile, Error = ErrBox> {
- let state_ = state.clone();
-
- state_
- .file_fetcher
- .fetch_source_file_async(&module_specifier)
- .and_then(move |out| {
- state_
- .clone()
- .ts_compiler
- .compile_async(state_.clone(), &out)
- .map_err(|e| {
- debug!("compiler error exiting!");
- eprintln!("\n{}", e.to_string());
- std::process::exit(1);
- })
- })
-}
-
impl Loader for ThreadSafeState {
fn resolve(
&self,
@@ -150,16 +134,14 @@ impl Loader for ThreadSafeState {
module_specifier: &ModuleSpecifier,
) -> Box<deno::SourceCodeInfoFuture> {
self.metrics.resolve_count.fetch_add(1, Ordering::SeqCst);
- Box::new(
- fetch_source_file_and_maybe_compile_async(self, module_specifier).map(
- |source_file| deno::SourceCodeInfo {
- // Real module name, might be different from initial specifier
- // due to redirections.
- code: source_file.js_source(),
- module_name: source_file.url.to_string(),
- },
- ),
- )
+ Box::new(self.fetch_compiled_module(module_specifier).map(
+ |compiled_module| deno::SourceCodeInfo {
+ // Real module name, might be different from initial specifier
+ // due to redirections.
+ code: compiled_module.code,
+ module_name: compiled_module.name,
+ },
+ ))
}
}
@@ -192,36 +174,26 @@ impl ThreadSafeState {
dir.gen_cache.clone(),
!flags.reload,
flags.config_path.clone(),
- );
+ )?;
let main_module: Option<ModuleSpecifier> = if argv_rest.len() <= 1 {
None
} else {
let root_specifier = argv_rest[1].clone();
- match ModuleSpecifier::resolve_url_or_path(&root_specifier) {
- Ok(specifier) => Some(specifier),
- Err(e) => {
- // TODO: handle unresolvable specifier
- panic!("Unable to resolve root specifier: {:?}", e);
- }
- }
+ Some(ModuleSpecifier::resolve_url_or_path(&root_specifier)?)
};
- let mut import_map = None;
- if let Some(file_name) = &flags.import_map_path {
- let base_url = match &main_module {
- Some(module_specifier) => module_specifier.clone(),
- None => unreachable!(),
- };
-
- match ImportMap::load(&base_url.to_string(), file_name) {
- Ok(map) => import_map = Some(map),
- Err(err) => {
- println!("{:?}", err);
- panic!("Error parsing import map");
- }
+ let import_map: Option<ImportMap> = match &flags.import_map_path {
+ None => None,
+ Some(file_name) => {
+ let base_url = match &main_module {
+ Some(module_specifier) => module_specifier.clone(),
+ None => unreachable!(),
+ };
+ let import_map = ImportMap::load(&base_url.to_string(), file_name)?;
+ Some(import_map)
}
- }
+ };
let mut seeded_rng = None;
if let Some(seed) = flags.seed {
@@ -249,11 +221,42 @@ impl ThreadSafeState {
seeded_rng,
file_fetcher,
ts_compiler,
+ js_compiler: JsCompiler {},
+ json_compiler: JsonCompiler {},
};
Ok(ThreadSafeState(Arc::new(state)))
}
+ pub fn fetch_compiled_module(
+ self: &Self,
+ module_specifier: &ModuleSpecifier,
+ ) -> impl Future<Item = CompiledModule, Error = ErrBox> {
+ let state_ = self.clone();
+
+ self
+ .file_fetcher
+ .fetch_source_file_async(&module_specifier)
+ .and_then(move |out| match out.media_type {
+ msg::MediaType::Unknown => {
+ state_.js_compiler.compile_async(state_.clone(), &out)
+ }
+ msg::MediaType::Json => {
+ state_.json_compiler.compile_async(state_.clone(), &out)
+ }
+ msg::MediaType::TypeScript => {
+ state_.ts_compiler.compile_async(state_.clone(), &out)
+ }
+ msg::MediaType::JavaScript => {
+ if state_.ts_compiler.compile_js {
+ state_.ts_compiler.compile_async(state_.clone(), &out)
+ } else {
+ state_.js_compiler.compile_async(state_.clone(), &out)
+ }
+ }
+ })
+ }
+
/// Read main module from argv
pub fn main_module(&self) -> Option<ModuleSpecifier> {
match &self.main_module {
diff --git a/js/compiler.ts b/js/compiler.ts
index 34ac2f482..4203f753b 100644
--- a/js/compiler.ts
+++ b/js/compiler.ts
@@ -219,6 +219,8 @@ function getExtension(
}
class Host implements ts.CompilerHost {
+ extensionCache: Record<string, ts.Extension> = {};
+
private readonly _options: ts.CompilerOptions = {
allowJs: true,
allowNonTsExtensions: true,
@@ -370,10 +372,16 @@ class Host implements ts.CompilerHost {
// This flags to the compiler to not go looking to transpile functional
// code, anything that is in `/$asset$/` is just library code
const isExternalLibraryImport = moduleName.startsWith(ASSETS);
+ const extension = getExtension(
+ resolvedFileName,
+ SourceFile.mediaType
+ );
+ this.extensionCache[resolvedFileName] = extension;
+
const r = {
resolvedFileName,
isExternalLibraryImport,
- extension: getExtension(resolvedFileName, SourceFile.mediaType)
+ extension
};
return r;
} else {
@@ -401,6 +409,21 @@ class Host implements ts.CompilerHost {
} else {
assert(sourceFiles != null && sourceFiles.length == 1);
const sourceFileName = sourceFiles![0].fileName;
+ const maybeExtension = this.extensionCache[sourceFileName];
+
+ if (maybeExtension) {
+ // NOTE: If it's a `.json` file we don't want to write it to disk.
+ // JSON files are loaded and used by TS compiler to check types, but we don't want
+ // to emit them to disk because output file is the same as input file.
+ if (maybeExtension === ts.Extension.Json) {
+ return;
+ }
+
+ // NOTE: JavaScript files are only emitted to disk if `checkJs` option in on
+ if (maybeExtension === ts.Extension.Js && !this._options.checkJs) {
+ return;
+ }
+ }
if (fileName.endsWith(".map")) {
// Source Map
diff --git a/tests/038_checkjs.js b/tests/038_checkjs.js
new file mode 100644
index 000000000..628d3e376
--- /dev/null
+++ b/tests/038_checkjs.js
@@ -0,0 +1,6 @@
+// console.log intentionally misspelled to trigger a type error
+consol.log("hello world!");
+
+// the following error should be ignored and not output to the console
+// eslint-disable-next-line
+const foo = new Foo();
diff --git a/tests/038_checkjs.js.out b/tests/038_checkjs.js.out
new file mode 100644
index 000000000..deaf77211
--- /dev/null
+++ b/tests/038_checkjs.js.out
@@ -0,0 +1,15 @@
+[WILDCARD]
+error TS2552: Cannot find name 'consol'. Did you mean 'console'?
+
+[WILDCARD]tests/038_checkjs.js:2:1
+
+2 consol.log("hello world!");
+[WILDCARD]
+error TS2552: Cannot find name 'Foo'. Did you mean 'foo'?
+
+[WILDCARD]tests/038_checkjs.js:6:17
+
+6 const foo = new Foo();
+[WILDCARD]
+Found 2 errors.
+[WILDCARD] \ No newline at end of file
diff --git a/tests/038_checkjs.test b/tests/038_checkjs.test
new file mode 100644
index 000000000..6385c9bb7
--- /dev/null
+++ b/tests/038_checkjs.test
@@ -0,0 +1,5 @@
+# checking if JS file is run through TS compiler
+args: run --reload --config tests/038_checkjs.tsconfig.json tests/038_checkjs.js
+check_stderr: true
+exit_code: 1
+output: tests/038_checkjs.js.out
diff --git a/tests/038_checkjs.tsconfig.json b/tests/038_checkjs.tsconfig.json
new file mode 100644
index 000000000..08ac60b6c
--- /dev/null
+++ b/tests/038_checkjs.tsconfig.json
@@ -0,0 +1,5 @@
+{
+ "compilerOptions": {
+ "checkJs": true
+ }
+}