summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKitson Kelly <me@kitsonkelly.com>2020-06-26 22:23:25 +1000
committerGitHub <noreply@github.com>2020-06-26 08:23:25 -0400
commit70463bac7d0327027f4650e3b4ab810e76604e2b (patch)
tree03ba8bea5f6bb7cafceeac3cf6deda4d804442c9
parent4817c153e47975b0b4390ec41b68240c4257f599 (diff)
fix: Omit buildinfo when --reload passed (#6489)
-rw-r--r--cli/tests/integration_tests.rs31
-rw-r--r--cli/tsc.rs109
2 files changed, 93 insertions, 47 deletions
diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs
index b4bc47e08..239d1ca52 100644
--- a/cli/tests/integration_tests.rs
+++ b/cli/tests/integration_tests.rs
@@ -611,6 +611,37 @@ fn ts_dependency_recompilation() {
}
#[test]
+fn ts_reload() {
+ let hello_ts = util::root_path().join("cli/tests/002_hello.ts");
+ assert!(hello_ts.is_file());
+ let mut initial = util::deno_cmd()
+ .current_dir(util::root_path())
+ .arg("cache")
+ .arg("--reload")
+ .arg(hello_ts.clone())
+ .spawn()
+ .expect("failed to spawn script");
+ let status_initial =
+ initial.wait().expect("failed to wait for child process");
+ assert!(status_initial.success());
+
+ let output = util::deno_cmd()
+ .current_dir(util::root_path())
+ .arg("cache")
+ .arg("--reload")
+ .arg("-L")
+ .arg("debug")
+ .arg(hello_ts)
+ .output()
+ .expect("failed to spawn script");
+ // check the output of the the bundle program.
+ assert!(std::str::from_utf8(&output.stdout)
+ .unwrap()
+ .trim()
+ .contains("compiler::host.writeFile deno://002_hello.js"));
+}
+
+#[test]
fn bundle_exports() {
// First we have to generate a bundle of some module that has exports.
let mod1 = util::root_path().join("cli/tests/subdir/mod1.ts");
diff --git a/cli/tsc.rs b/cli/tsc.rs
index 45c77f0c4..2ef620a6f 100644
--- a/cli/tsc.rs
+++ b/cli/tsc.rs
@@ -422,8 +422,8 @@ impl TsCompiler {
c.contains(url)
}
- /// Check if there is compiled source in cache that is valid
- /// and can be used again.
+ /// Check if there is compiled source in cache that is valid and can be used
+ /// again.
fn has_compiled_source(
&self,
file_fetcher: &SourceFileFetcher,
@@ -450,6 +450,55 @@ impl TsCompiler {
false
}
+ fn has_valid_cache(
+ &self,
+ file_fetcher: &SourceFileFetcher,
+ url: &Url,
+ build_info: &Option<String>,
+ ) -> Result<bool, ErrBox> {
+ if let Some(build_info_str) = build_info.as_ref() {
+ let build_inf_json: Value = serde_json::from_str(build_info_str)?;
+ let program_val = build_inf_json["program"].as_object().unwrap();
+ let file_infos = program_val["fileInfos"].as_object().unwrap();
+
+ if !self.has_compiled_source(file_fetcher, url) {
+ return Ok(false);
+ }
+
+ for (filename, file_info) in file_infos.iter() {
+ if filename.starts_with("asset://") {
+ continue;
+ }
+
+ let url = Url::parse(&filename).expect("Filename is not a valid url");
+ let specifier = ModuleSpecifier::from(url);
+
+ if let Some(source_file) = file_fetcher
+ .fetch_cached_source_file(&specifier, Permissions::allow_all())
+ {
+ let existing_hash = crate::checksum::gen(vec![
+ &source_file.source_code,
+ version::DENO.as_bytes(),
+ ]);
+ let expected_hash =
+ file_info["version"].as_str().unwrap().to_string();
+ if existing_hash != expected_hash {
+ // hashes don't match, somethings changed
+ return Ok(false);
+ }
+ } else {
+ // no cached source file
+ return Ok(false);
+ }
+ }
+ } else {
+ // no build info
+ return Ok(false);
+ }
+
+ Ok(true)
+ }
+
/// Asynchronously compile module and all it's dependencies.
///
/// This method compiled every module at most once.
@@ -468,58 +517,24 @@ impl TsCompiler {
module_graph: ModuleGraph,
allow_js: bool,
) -> Result<(), ErrBox> {
- let mut has_cached_version = false;
-
let module_url = source_file.url.clone();
let build_info_key = self
.disk_cache
.get_cache_filename_with_extension(&module_url, "buildinfo");
-
let build_info = match self.disk_cache.get(&build_info_key) {
Ok(bytes) => Some(String::from_utf8(bytes)?),
Err(_) => None,
};
- let file_fetcher = global_state.file_fetcher.clone();
-
- // Only use disk cache if `--reload` flag was not used or
- // this file has already been compiled during current process
- // lifetime.
- if self.use_disk_cache || self.has_compiled(&source_file.url) {
- if let Some(build_info_str) = build_info.as_ref() {
- let build_inf_json: Value = serde_json::from_str(build_info_str)?;
- let program_val = build_inf_json["program"].as_object().unwrap();
- let file_infos = program_val["fileInfos"].as_object().unwrap();
-
- has_cached_version = true;
-
- has_cached_version &= self
- .has_compiled_source(&global_state.file_fetcher, &source_file.url);
-
- for (filename, file_info) in file_infos.iter() {
- if filename.starts_with("asset://") {
- continue;
- }
-
- let url = Url::parse(&filename).expect("Filename is not a valid url");
- let specifier = ModuleSpecifier::from(url);
- if let Some(source_file) = file_fetcher
- .fetch_cached_source_file(&specifier, Permissions::allow_all())
- {
- let existing_hash = crate::checksum::gen(vec![
- &source_file.source_code,
- version::DENO.as_bytes(),
- ]);
- let expected_hash =
- file_info["version"].as_str().unwrap().to_string();
- has_cached_version &= existing_hash == expected_hash
- } else {
- has_cached_version &= false
- }
- }
- }
- }
- if has_cached_version {
+ // Only use disk cache if `--reload` flag was not used or this file has
+ // already been compiled during current process lifetime.
+ if (self.use_disk_cache || self.has_compiled(&source_file.url))
+ && self.has_valid_cache(
+ &global_state.file_fetcher,
+ &source_file.url,
+ &build_info,
+ )?
+ {
return Ok(());
}
@@ -550,7 +565,7 @@ impl TsCompiler {
"config": str::from_utf8(&config_data).unwrap(),
"cwd": cwd,
"sourceFileMap": module_graph_json,
- "buildInfo": build_info,
+ "buildInfo": if self.use_disk_cache { build_info } else { None },
}),
_ => json!({
"type": msg::CompilerRequestType::Compile,
@@ -561,7 +576,7 @@ impl TsCompiler {
"performance": performance,
"cwd": cwd,
"sourceFileMap": module_graph_json,
- "buildInfo": build_info,
+ "buildInfo": if self.use_disk_cache { build_info } else { None },
}),
};