summaryrefslogtreecommitdiff
path: root/cli
diff options
context:
space:
mode:
Diffstat (limited to 'cli')
-rw-r--r--cli/global_state.rs9
-rw-r--r--cli/js/compiler.ts148
-rw-r--r--cli/module_graph.rs12
-rw-r--r--cli/tests/054_info_local_imports.out1
-rw-r--r--cli/tests/integration_tests.rs2
-rw-r--r--cli/tests/single_compile_with_reload.ts4
-rw-r--r--cli/tests/single_compile_with_reload.ts.out2
-rw-r--r--cli/tests/single_compile_with_reload_dyn.ts11
-rw-r--r--cli/tsc.rs244
9 files changed, 245 insertions, 188 deletions
diff --git a/cli/global_state.rs b/cli/global_state.rs
index 3c7f23435..f3a35be3c 100644
--- a/cli/global_state.rs
+++ b/cli/global_state.rs
@@ -327,6 +327,7 @@ fn test_should_allow_js() {
redirect: None,
filename: "some/file.ts".to_string(),
imports: vec![],
+ version_hash: "1".to_string(),
referenced_files: vec![],
lib_directives: vec![],
types_directives: vec![],
@@ -339,6 +340,7 @@ fn test_should_allow_js() {
url: "file:///some/file1.js".to_string(),
redirect: None,
filename: "some/file1.js".to_string(),
+ version_hash: "1".to_string(),
imports: vec![ImportDescriptor {
specifier: "./file.ts".to_string(),
resolved_specifier: ModuleSpecifier::resolve_url(
@@ -369,6 +371,7 @@ fn test_should_allow_js() {
redirect: None,
filename: "some/file.jsx".to_string(),
imports: vec![],
+ version_hash: "1".to_string(),
referenced_files: vec![],
lib_directives: vec![],
types_directives: vec![],
@@ -381,6 +384,7 @@ fn test_should_allow_js() {
url: "file:///some/file.ts".to_string(),
redirect: None,
filename: "some/file.ts".to_string(),
+ version_hash: "1".to_string(),
imports: vec![ImportDescriptor {
specifier: "./file.jsx".to_string(),
resolved_specifier: ModuleSpecifier::resolve_url(
@@ -414,6 +418,7 @@ fn test_should_allow_js() {
referenced_files: vec![],
lib_directives: vec![],
types_directives: vec![],
+ version_hash: "1".to_string(),
type_headers: vec![],
media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
@@ -440,6 +445,7 @@ fn test_should_allow_js() {
referenced_files: vec![],
lib_directives: vec![],
types_directives: vec![],
+ version_hash: "1".to_string(),
type_headers: vec![],
media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
@@ -462,6 +468,7 @@ fn test_needs_compilation() {
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
+ version_hash: "1".to_string(),
media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
}],
@@ -487,6 +494,7 @@ fn test_needs_compilation() {
types_directives: vec![],
type_headers: vec![],
media_type: MediaType::TypeScript,
+ version_hash: "1".to_string(),
source_code: "function foo() {}".to_string(),
},
&ModuleGraphFile {
@@ -499,6 +507,7 @@ fn test_needs_compilation() {
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
+ version_hash: "1".to_string(),
media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
},
diff --git a/cli/js/compiler.ts b/cli/js/compiler.ts
index d86109278..af95390e7 100644
--- a/cli/js/compiler.ts
+++ b/cli/js/compiler.ts
@@ -32,8 +32,16 @@ function getAsset(name: string): string {
// Constants used by `normalizeString` and `resolvePath`
const CHAR_DOT = 46; /* . */
const CHAR_FORWARD_SLASH = 47; /* / */
-const ASSETS = "$asset$";
-const OUT_DIR = "$deno$";
+// Using incremental compile APIs requires that all
+// paths must be either relative or absolute. Since
+// analysis in Rust operates on fully resolved URLs,
+// it makes sense to use the same scheme here.
+const ASSETS = "asset://";
+const OUT_DIR = "deno://";
+// This constant is passed to compiler settings when
+// doing incremental compiles. Contents of this
+// file are passed back to Rust and saved to $DENO_DIR.
+const TS_BUILD_INFO = "cache:///tsbuildinfo.json";
// TODO(Bartlomieju): this check should be done in Rust
const IGNORED_COMPILER_OPTIONS: readonly string[] = [
@@ -104,6 +112,24 @@ const DEFAULT_BUNDLER_OPTIONS: ts.CompilerOptions = {
sourceMap: false,
};
+const DEFAULT_INCREMENTAL_COMPILE_OPTIONS: ts.CompilerOptions = {
+ allowJs: false,
+ allowNonTsExtensions: true,
+ checkJs: false,
+ esModuleInterop: true,
+ incremental: true,
+ inlineSourceMap: true,
+ jsx: ts.JsxEmit.React,
+ module: ts.ModuleKind.ESNext,
+ outDir: OUT_DIR,
+ resolveJsonModule: true,
+ sourceMap: false,
+ strict: true,
+ stripComments: true,
+ target: ts.ScriptTarget.ESNext,
+ tsBuildInfoFile: TS_BUILD_INFO,
+};
+
const DEFAULT_COMPILE_OPTIONS: ts.CompilerOptions = {
allowJs: false,
allowNonTsExtensions: true,
@@ -142,6 +168,12 @@ interface CompilerHostOptions {
target: CompilerHostTarget;
unstable?: boolean;
writeFile: WriteFileCallback;
+ incremental?: boolean;
+}
+
+interface IncrementalCompilerHostOptions extends CompilerHostOptions {
+ rootNames?: string[];
+ buildInfo?: string;
}
interface ConfigureResponse {
@@ -166,6 +198,7 @@ interface SourceFileJson {
filename: string;
mediaType: MediaType;
sourceCode: string;
+ versionHash: string;
}
function getExtension(fileName: string, mediaType: MediaType): ts.Extension {
@@ -274,19 +307,20 @@ function getAssetInternal(filename: string): SourceFile {
url,
filename: `${ASSETS}/${name}`,
mediaType: MediaType.TypeScript,
+ versionHash: "1",
sourceCode,
});
}
class Host implements ts.CompilerHost {
- readonly #options = DEFAULT_COMPILE_OPTIONS;
+ protected _options = DEFAULT_COMPILE_OPTIONS;
#target: CompilerHostTarget;
#writeFile: WriteFileCallback;
-
/* Deno specific APIs */
constructor({
bundle = false,
+ incremental = false,
target,
unstable,
writeFile,
@@ -295,10 +329,12 @@ class Host implements ts.CompilerHost {
this.#writeFile = writeFile;
if (bundle) {
// options we need to change when we are generating a bundle
- Object.assign(this.#options, DEFAULT_BUNDLER_OPTIONS);
+ Object.assign(this._options, DEFAULT_BUNDLER_OPTIONS);
+ } else if (incremental) {
+ Object.assign(this._options, DEFAULT_INCREMENTAL_COMPILE_OPTIONS);
}
if (unstable) {
- this.#options.lib = [
+ this._options.lib = [
target === CompilerHostTarget.Worker
? "lib.deno.worker.d.ts"
: "lib.deno.window.d.ts",
@@ -308,7 +344,7 @@ class Host implements ts.CompilerHost {
}
get options(): ts.CompilerOptions {
- return this.#options;
+ return this._options;
}
configure(
@@ -333,13 +369,13 @@ class Host implements ts.CompilerHost {
for (const key of Object.keys(options)) {
if (
IGNORED_COMPILER_OPTIONS.includes(key) &&
- (!(key in this.#options) || options[key] !== this.#options[key])
+ (!(key in this._options) || options[key] !== this._options[key])
) {
ignoredOptions.push(key);
delete options[key];
}
}
- Object.assign(this.#options, options);
+ Object.assign(this._options, options);
return {
ignoredOptions: ignoredOptions.length ? ignoredOptions : undefined,
diagnostics: errors.length ? errors : undefined,
@@ -347,8 +383,8 @@ class Host implements ts.CompilerHost {
}
mergeOptions(...options: ts.CompilerOptions[]): ts.CompilerOptions {
- Object.assign(this.#options, ...options);
- return Object.assign({}, this.#options);
+ Object.assign(this._options, ...options);
+ return Object.assign({}, this._options);
}
/* TypeScript CompilerHost APIs */
@@ -363,7 +399,7 @@ class Host implements ts.CompilerHost {
getCompilationSettings(): ts.CompilerOptions {
log("compiler::host.getCompilationSettings()");
- return this.#options;
+ return this._options;
}
getCurrentDirectory(): string {
@@ -409,6 +445,8 @@ class Host implements ts.CompilerHost {
sourceFile.sourceCode,
languageVersion
);
+ //@ts-ignore
+ sourceFile.tsSourceFile.version = sourceFile.versionHash;
delete sourceFile.sourceCode;
}
return sourceFile.tsSourceFile;
@@ -480,6 +518,25 @@ class Host implements ts.CompilerHost {
}
}
+class IncrementalCompileHost extends Host {
+ #buildInfo: undefined | string = undefined;
+
+ constructor(options: IncrementalCompilerHostOptions) {
+ super(options);
+ const { buildInfo } = options;
+ if (buildInfo) {
+ this.#buildInfo = buildInfo;
+ }
+ }
+
+ readFile(fileName: string): string | undefined {
+ if (fileName == TS_BUILD_INFO) {
+ return this.#buildInfo;
+ }
+ throw new Error("unreachable");
+ }
+}
+
// NOTE: target doesn't really matter here,
// this is in fact a mock host created just to
// load all type definitions and snapshot them.
@@ -547,6 +604,7 @@ function buildLocalSourceFileCache(
filename: entry.url,
mediaType: entry.mediaType,
sourceCode: entry.sourceCode,
+ versionHash: entry.versionHash,
});
for (const importDesc of entry.imports) {
@@ -598,6 +656,7 @@ function buildSourceFileCache(
filename: entry.url,
mediaType: entry.mediaType,
sourceCode: entry.sourceCode,
+ versionHash: entry.versionHash,
});
for (const importDesc of entry.imports) {
@@ -663,6 +722,7 @@ type WriteFileCallback = (
interface CompileWriteFileState {
rootNames: string[];
emitMap: Record<string, EmittedSource>;
+ buildInfo?: string;
}
interface BundleWriteFileState {
@@ -708,7 +768,15 @@ function createCompileWriteFile(
data: string,
sourceFiles?: readonly ts.SourceFile[]
): void {
- assert(sourceFiles != null);
+ const isBuildInfo = fileName === TS_BUILD_INFO;
+
+ if (isBuildInfo) {
+ assert(isBuildInfo);
+ state.buildInfo = data;
+ return;
+ }
+
+ assert(sourceFiles);
assert(sourceFiles.length === 1);
state.emitMap[fileName] = {
filename: sourceFiles[0].fileName,
@@ -717,6 +785,22 @@ function createCompileWriteFile(
};
}
+function createRuntimeCompileWriteFile(
+ state: CompileWriteFileState
+): WriteFileCallback {
+ return function writeFile(
+ fileName: string,
+ data: string,
+ sourceFiles?: readonly ts.SourceFile[]
+ ): void {
+ assert(sourceFiles);
+ assert(sourceFiles.length === 1);
+ state.emitMap[fileName] = {
+ filename: sourceFiles[0].fileName,
+ contents: data,
+ };
+ };
+}
interface ConvertCompilerOptionsResult {
files?: string[];
options: ts.CompilerOptions;
@@ -888,7 +972,6 @@ function performanceEnd(): Stats {
}
// TODO(Bartlomieju): this check should be done in Rust; there should be no
-// console.log here
function processConfigureResponse(
configResult: ConfigureResponse,
configPath: string
@@ -1106,6 +1189,7 @@ interface SourceFileMapEntry {
libDirectives: ReferenceDescriptor[];
typesDirectives: ReferenceDescriptor[];
typeHeaders: ReferenceDescriptor[];
+ versionHash: string;
}
/** Used when "deno run" is invoked */
@@ -1121,6 +1205,7 @@ interface CompileRequest {
cwd: string;
// key value is fully resolved URL
sourceFileMap: Record<string, SourceFileMapEntry>;
+ buildInfo?: string;
}
/** Used when "deno bundle" is invoked */
@@ -1174,6 +1259,7 @@ type CompilerRequest =
interface CompileResponse {
emitMap: Record<string, EmittedSource>;
diagnostics: Diagnostic;
+ buildInfo: undefined | string;
stats?: Stats;
}
@@ -1195,19 +1281,16 @@ interface RuntimeBundleResponse {
function compile({
allowJs,
+ buildInfo,
config,
configPath,
rootNames,
target,
unstable,
- performance,
cwd,
sourceFileMap,
type,
}: CompileRequest): CompileResponse {
- if (performance) {
- performanceStart();
- }
log(">>> compile start", { rootNames, type: CompilerRequestType[type] });
// When a programme is emitted, TypeScript will call `writeFile` with
@@ -1218,11 +1301,14 @@ function compile({
rootNames,
emitMap: {},
};
- const host = new Host({
+ const host = new IncrementalCompileHost({
bundle: false,
target,
unstable,
+ incremental: true,
writeFile: createCompileWriteFile(state),
+ rootNames,
+ buildInfo,
});
let diagnostics: readonly ts.Diagnostic[] = [];
@@ -1239,15 +1325,23 @@ function compile({
// to generate the program and possibly emit it.
if (diagnostics.length === 0) {
const options = host.getCompilationSettings();
- const program = ts.createProgram({
+ const program = ts.createIncrementalProgram({
rootNames,
options,
host,
});
- diagnostics = ts
- .getPreEmitDiagnostics(program)
- .filter(({ code }) => !ignoredDiagnostics.includes(code));
+ // TODO(bartlomieju): check if this is ok
+ diagnostics = [
+ ...program.getConfigFileParsingDiagnostics(),
+ ...program.getSyntacticDiagnostics(),
+ ...program.getOptionsDiagnostics(),
+ ...program.getGlobalDiagnostics(),
+ ...program.getSemanticDiagnostics(),
+ ];
+ diagnostics = diagnostics.filter(
+ ({ code }) => !ignoredDiagnostics.includes(code)
+ );
// We will only proceed with the emit if there are no diagnostics.
if (diagnostics.length === 0) {
@@ -1265,18 +1359,14 @@ function compile({
// without casting.
diagnostics = emitResult.diagnostics;
}
- if (performance) {
- performanceProgram(program);
- }
}
log("<<< compile end", { rootNames, type: CompilerRequestType[type] });
- const stats = performance ? performanceEnd() : undefined;
return {
emitMap: state.emitMap,
+ buildInfo: state.buildInfo,
diagnostics: fromTypeScriptDiagnostic(diagnostics),
- stats,
};
}
@@ -1402,7 +1492,7 @@ function runtimeCompile(
const host = new Host({
bundle: false,
target,
- writeFile: createCompileWriteFile(state),
+ writeFile: createRuntimeCompileWriteFile(state),
});
const compilerOptions = [DEFAULT_RUNTIME_COMPILE_OPTIONS];
if (convertedOptions) {
diff --git a/cli/module_graph.rs b/cli/module_graph.rs
index b5bde1a19..24a20fa43 100644
--- a/cli/module_graph.rs
+++ b/cli/module_graph.rs
@@ -1,5 +1,5 @@
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
-
+use crate::checksum;
use crate::doc::Location;
use crate::file_fetcher::map_file_extension;
use crate::file_fetcher::SourceFile;
@@ -13,6 +13,7 @@ use crate::tsc::ImportDesc;
use crate::tsc::TsReferenceDesc;
use crate::tsc::TsReferenceKind;
use crate::tsc::AVAILABLE_LIBS;
+use crate::version;
use deno_core::ErrBox;
use deno_core::ModuleSpecifier;
use futures::stream::FuturesUnordered;
@@ -228,6 +229,7 @@ pub struct ModuleGraphFile {
pub url: String,
pub redirect: Option<String>,
pub filename: String,
+ pub version_hash: String,
pub imports: Vec<ImportDescriptor>,
pub referenced_files: Vec<ReferenceDescriptor>,
pub lib_directives: Vec<ReferenceDescriptor>,
@@ -369,6 +371,7 @@ impl ModuleGraphLoader {
specifier: specifier.to_string(),
url: specifier.to_string(),
redirect: None,
+ version_hash: "".to_string(),
media_type: map_file_extension(&PathBuf::from(specifier.clone())),
filename: specifier,
source_code,
@@ -454,6 +457,10 @@ impl ModuleGraphLoader {
url: module_specifier.to_string(),
redirect: Some(source_file.url.to_string()),
filename: source_file.filename.to_str().unwrap().to_string(),
+ version_hash: checksum::gen(vec![
+ &source_file.source_code,
+ version::DENO.as_bytes(),
+ ]),
media_type: source_file.media_type,
source_code: "".to_string(),
imports: vec![],
@@ -466,6 +473,8 @@ impl ModuleGraphLoader {
}
let module_specifier = ModuleSpecifier::from(source_file.url.clone());
+ let version_hash =
+ checksum::gen(vec![&source_file.source_code, version::DENO.as_bytes()]);
let source_code = String::from_utf8(source_file.source_code)?;
if SUPPORTED_MEDIA_TYPES.contains(&source_file.media_type) {
@@ -553,6 +562,7 @@ impl ModuleGraphLoader {
specifier: module_specifier.to_string(),
url: module_specifier.to_string(),
redirect: None,
+ version_hash,
filename: source_file.filename.to_str().unwrap().to_string(),
media_type: source_file.media_type,
source_code,
diff --git a/cli/tests/054_info_local_imports.out b/cli/tests/054_info_local_imports.out
index 9794e4ede..32cfd8525 100644
--- a/cli/tests/054_info_local_imports.out
+++ b/cli/tests/054_info_local_imports.out
@@ -1,7 +1,6 @@
local: [WILDCARD]005_more_imports.ts
type: TypeScript
compiled: [WILDCARD]005_more_imports.ts.js
-map: [WILDCARD]005_more_imports.ts.js.map
deps:
file://[WILDCARD]/005_more_imports.ts
└─┬ file://[WILDCARD]/subdir/mod1.ts
diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs
index d5b4016c1..890249072 100644
--- a/cli/tests/integration_tests.rs
+++ b/cli/tests/integration_tests.rs
@@ -2049,7 +2049,7 @@ itest!(single_compile_with_reload {
});
itest!(performance_stats {
- args: "run --reload --log-level debug 002_hello.ts",
+ args: "bundle --log-level debug 002_hello.ts",
output: "performance_stats.out",
});
diff --git a/cli/tests/single_compile_with_reload.ts b/cli/tests/single_compile_with_reload.ts
index 3dd728366..a4d6d0341 100644
--- a/cli/tests/single_compile_with_reload.ts
+++ b/cli/tests/single_compile_with_reload.ts
@@ -1,4 +1,4 @@
-await import("./005_more_imports.ts");
+await import("./single_compile_with_reload_dyn.ts");
console.log("1");
-await import("./005_more_imports.ts");
+await import("./single_compile_with_reload_dyn.ts");
console.log("2");
diff --git a/cli/tests/single_compile_with_reload.ts.out b/cli/tests/single_compile_with_reload.ts.out
index 2cdd71673..c3e87e7d3 100644
--- a/cli/tests/single_compile_with_reload.ts.out
+++ b/cli/tests/single_compile_with_reload.ts.out
@@ -1,5 +1,5 @@
Compile [WILDCARD]single_compile_with_reload.ts
-Compile [WILDCARD]005_more_imports.ts
+Compile [WILDCARD]single_compile_with_reload_dyn.ts
Hello
1
2
diff --git a/cli/tests/single_compile_with_reload_dyn.ts b/cli/tests/single_compile_with_reload_dyn.ts
new file mode 100644
index 000000000..52dd1df7b
--- /dev/null
+++ b/cli/tests/single_compile_with_reload_dyn.ts
@@ -0,0 +1,11 @@
+import { returnsHi, returnsFoo2, printHello3 } from "./subdir/mod1.ts";
+
+printHello3();
+
+if (returnsHi() !== "Hi") {
+ throw Error("Unexpected");
+}
+
+if (returnsFoo2() !== "Foo") {
+ throw Error("Unexpected");
+}
diff --git a/cli/tsc.rs b/cli/tsc.rs
index bb6544961..45c77f0c4 100644
--- a/cli/tsc.rs
+++ b/cli/tsc.rs
@@ -302,28 +302,6 @@ impl CompiledFileMetadata {
}
}
-/// Information associated with compilation of a "module graph",
-/// ie. entry point and all its dependencies.
-/// It's used to perform cache invalidation if content of any
-/// dependency changes.
-#[derive(Deserialize, Serialize)]
-pub struct GraphFileMetadata {
- pub deps: Vec<String>,
- pub version_hash: String,
-}
-
-impl GraphFileMetadata {
- pub fn from_json_string(
- metadata_string: String,
- ) -> Result<Self, serde_json::Error> {
- serde_json::from_str::<Self>(&metadata_string)
- }
-
- pub fn to_json_string(&self) -> Result<String, serde_json::Error> {
- serde_json::to_string(self)
- }
-}
-
/// Emit a SHA256 hash based on source code, deno version and TS config.
/// Used to check if a recompilation for source code is needed.
pub fn source_code_version_hash(
@@ -367,7 +345,7 @@ impl Deref for TsCompiler {
}
}
-#[derive(Deserialize)]
+#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct Stat {
key: String,
@@ -394,6 +372,7 @@ struct BundleResponse {
struct CompileResponse {
diagnostics: Diagnostic,
emit_map: HashMap<String, EmittedSource>,
+ build_info: Option<String>,
stats: Option<Vec<Stat>>,
}
@@ -491,36 +470,59 @@ impl TsCompiler {
) -> Result<(), ErrBox> {
let mut has_cached_version = false;
+ let module_url = source_file.url.clone();
+ let build_info_key = self
+ .disk_cache
+ .get_cache_filename_with_extension(&module_url, "buildinfo");
+
+ let build_info = match self.disk_cache.get(&build_info_key) {
+ Ok(bytes) => Some(String::from_utf8(bytes)?),
+ Err(_) => None,
+ };
+
+ let file_fetcher = global_state.file_fetcher.clone();
+
// Only use disk cache if `--reload` flag was not used or
// this file has already been compiled during current process
// lifetime.
if self.use_disk_cache || self.has_compiled(&source_file.url) {
- if let Some(metadata) = self.get_graph_metadata(&source_file.url) {
- has_cached_version = true;
+ if let Some(build_info_str) = build_info.as_ref() {
+ let build_inf_json: Value = serde_json::from_str(build_info_str)?;
+ let program_val = build_inf_json["program"].as_object().unwrap();
+ let file_infos = program_val["fileInfos"].as_object().unwrap();
- let version_hash = crate::checksum::gen(vec![
- version::DENO.as_bytes(),
- &self.config.hash,
- ]);
+ has_cached_version = true;
- has_cached_version &= metadata.version_hash == version_hash;
has_cached_version &= self
.has_compiled_source(&global_state.file_fetcher, &source_file.url);
- for dep in metadata.deps {
- let url = Url::parse(&dep).expect("Dep is not a valid url");
- has_cached_version &=
- self.has_compiled_source(&global_state.file_fetcher, &url);
+ for (filename, file_info) in file_infos.iter() {
+ if filename.starts_with("asset://") {
+ continue;
+ }
+
+ let url = Url::parse(&filename).expect("Filename is not a valid url");
+ let specifier = ModuleSpecifier::from(url);
+ if let Some(source_file) = file_fetcher
+ .fetch_cached_source_file(&specifier, Permissions::allow_all())
+ {
+ let existing_hash = crate::checksum::gen(vec![
+ &source_file.source_code,
+ version::DENO.as_bytes(),
+ ]);
+ let expected_hash =
+ file_info["version"].as_str().unwrap().to_string();
+ has_cached_version &= existing_hash == expected_hash
+ } else {
+ has_cached_version &= false
+ }
}
}
}
-
if has_cached_version {
return Ok(());
}
- let module_url = source_file.url.clone();
-
let module_graph_json =
serde_json::to_value(module_graph).expect("Failed to serialize data");
let target = match target {
@@ -535,6 +537,7 @@ impl TsCompiler {
};
let compiler_config = self.config.clone();
let cwd = std::env::current_dir().unwrap();
+
let j = match (compiler_config.path, compiler_config.content) {
(Some(config_path), Some(config_data)) => json!({
"type": msg::CompilerRequestType::Compile,
@@ -547,6 +550,7 @@ impl TsCompiler {
"config": str::from_utf8(&config_data).unwrap(),
"cwd": cwd,
"sourceFileMap": module_graph_json,
+ "buildInfo": build_info,
}),
_ => json!({
"type": msg::CompilerRequestType::Compile,
@@ -557,6 +561,7 @@ impl TsCompiler {
"performance": performance,
"cwd": cwd,
"sourceFileMap": module_graph_json,
+ "buildInfo": build_info,
}),
};
@@ -583,69 +588,13 @@ impl TsCompiler {
maybe_log_stats(compile_response.stats);
- self.set_graph_metadata(
- source_file.url.clone(),
- &compile_response.emit_map,
- )?;
+ if let Some(build_info) = compile_response.build_info {
+ self.cache_build_info(&module_url, build_info)?;
+ }
self.cache_emitted_files(compile_response.emit_map)?;
Ok(())
}
- fn get_graph_metadata(&self, url: &Url) -> Option<GraphFileMetadata> {
- let cache_key = self
- .disk_cache
- .get_cache_filename_with_extension(url, "graph");
- if let Ok(metadata_bytes) = self.disk_cache.get(&cache_key) {
- if let Ok(metadata) = std::str::from_utf8(&metadata_bytes) {
- if let Ok(read_metadata) =
- GraphFileMetadata::from_json_string(metadata.to_string())
- {
- return Some(read_metadata);
- }
- }
- }
-
- None
- }
-
- fn set_graph_metadata(
- &self,
- url: Url,
- emit_map: &HashMap<String, EmittedSource>,
- ) -> std::io::Result<()> {
- let version_hash =
- crate::checksum::gen(vec![version::DENO.as_bytes(), &self.config.hash]);
- let mut deps = vec![];
-
- for (_emitted_name, source) in emit_map.iter() {
- let specifier = ModuleSpecifier::resolve_url(&source.filename)
- .expect("Should be a valid module specifier");
-
- let source_file = self
- .file_fetcher
- .fetch_cached_source_file(&specifier, Permissions::allow_all())
- .expect("Source file not found");
-
- // NOTE: JavaScript files are only cached to disk if `checkJs`
- // option in on
- if source_file.media_type == msg::MediaType::JavaScript
- && !self.compile_js
- {
- continue;
- }
-
- deps.push(specifier.to_string());
- }
-
- let graph_metadata = GraphFileMetadata { deps, version_hash };
- let meta_key = self
- .disk_cache
- .get_cache_filename_with_extension(&url, "graph");
- self
- .disk_cache
- .set(&meta_key, graph_metadata.to_json_string()?.as_bytes())
- }
-
/// Get associated `CompiledFileMetadata` for given module if it exists.
pub fn get_metadata(&self, url: &Url) -> Option<CompiledFileMetadata> {
// Try to load cached version:
@@ -666,6 +615,19 @@ impl TsCompiler {
None
}
+ fn cache_build_info(
+ &self,
+ url: &Url,
+ build_info: String,
+ ) -> std::io::Result<()> {
+ let js_key = self
+ .disk_cache
+ .get_cache_filename_with_extension(url, "buildinfo");
+ self.disk_cache.set(&js_key, build_info.as_bytes())?;
+
+ Ok(())
+ }
+
fn cache_emitted_files(
&self,
emit_map: HashMap<String, EmittedSource>,
@@ -750,33 +712,6 @@ impl TsCompiler {
source_file: SourceFile,
contents: &str,
) -> std::io::Result<()> {
- // By default TSC output source map url that is relative; we need
- // to substitute it manually to correct file URL in DENO_DIR.
- let mut content_lines = contents
- .split('\n')
- .map(|s| s.to_string())
- .collect::<Vec<String>>();
-
- if !content_lines.is_empty() {
- let last_line = content_lines.pop().unwrap();
- if last_line.starts_with("//# sourceMappingURL=") {
- let source_map_key = self.disk_cache.get_cache_filename_with_extension(
- module_specifier.as_url(),
- "js.map",
- );
- let source_map_path = self.disk_cache.location.join(source_map_key);
- let source_map_file_url = Url::from_file_path(source_map_path)
- .expect("Bad file URL for source map");
- let new_last_line =
- format!("//# sourceMappingURL={}", source_map_file_url.to_string());
- content_lines.push(new_last_line);
- } else {
- content_lines.push(last_line);
- }
- }
-
- let contents = content_lines.join("\n");
-
let js_key = self
.disk_cache
.get_cache_filename_with_extension(module_specifier.as_url(), "js");
@@ -856,9 +791,7 @@ impl TsCompiler {
impl SourceMapGetter for TsCompiler {
fn get_source_map(&self, script_name: &str) -> Option<Vec<u8>> {
- self
- .try_to_resolve_and_get_source_map(script_name)
- .map(|out| out.source_code)
+ self.try_to_resolve_and_get_source_map(script_name)
}
fn get_source_line(&self, script_name: &str, line: usize) -> Option<String> {
@@ -901,11 +834,38 @@ impl TsCompiler {
fn try_to_resolve_and_get_source_map(
&self,
script_name: &str,
- ) -> Option<SourceFile> {
+ ) -> Option<Vec<u8>> {
if let Some(module_specifier) = self.try_to_resolve(script_name) {
return match self.get_source_map_file(&module_specifier) {
- Ok(out) => Some(out),
- Err(_) => None,
+ Ok(out) => Some(out.source_code),
+ Err(_) => {
+ // Check if map is inlined
+ if let Ok(compiled_source) =
+ self.get_compiled_module(module_specifier.as_url())
+ {
+ let mut content_lines = compiled_source
+ .code
+ .split('\n')
+ .map(|s| s.to_string())
+ .collect::<Vec<String>>();
+
+ if !content_lines.is_empty() {
+ let last_line = content_lines.pop().unwrap();
+ if last_line.starts_with(
+ "//# sourceMappingURL=data:application/json;base64,",
+ ) {
+ let encoded = last_line.trim_start_matches(
+ "//# sourceMappingURL=data:application/json;base64,",
+ );
+ let decoded_map =
+ base64::decode(encoded).expect("failed to parse source map");
+ return Some(decoded_map);
+ }
+ }
+ }
+
+ None
+ }
};
}
@@ -1560,30 +1520,8 @@ mod tests {
let mut lines: Vec<String> =
source_code.split('\n').map(|s| s.to_string()).collect();
let last_line = lines.pop().unwrap();
- assert!(last_line.starts_with("//# sourceMappingURL=file://"));
-
- // Get source map file and assert it has proper URLs
- let source_map = mock_state
- .ts_compiler
- .get_source_map_file(&specifier)
- .expect("Source map not found");
- let source_str = String::from_utf8(source_map.source_code).unwrap();
- let source_json: Value = serde_json::from_str(&source_str).unwrap();
-
- let js_key = mock_state
- .ts_compiler
- .disk_cache
- .get_cache_filename_with_extension(specifier.as_url(), "js");
- let js_path = mock_state.ts_compiler.disk_cache.location.join(js_key);
- let js_file_url = Url::from_file_path(js_path).unwrap();
-
- let file_str = source_json.get("file").unwrap().as_str().unwrap();
- assert_eq!(file_str, js_file_url.to_string());
-
- let sources = source_json.get("sources").unwrap().as_array().unwrap();
- assert_eq!(sources.len(), 1);
- let source = sources.get(0).unwrap().as_str().unwrap();
- assert_eq!(source, specifier.to_string());
+ assert!(last_line
+ .starts_with("//# sourceMappingURL=data:application/json;base64"));
}
#[tokio::test]