summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBartek IwaƄczuk <biwanczuk@gmail.com>2020-06-19 12:27:15 +0200
committerGitHub <noreply@github.com>2020-06-19 12:27:15 +0200
commit826a3135b41bdaeb8c8cd27a4652563971b04baa (patch)
treee8baaca1b5560e5825e19f5b0c6872d781d767a3
parent345a5b3dff3a333d156bf4aff9f7e2a355d59746 (diff)
refactor(compiler): split code paths for compile and bundle (#6304)
* refactor "compile" and "runtimeCompile" in "compiler.ts" and factor out separate methods for "compile" and "bundle" operations * remove noisy debug output from "compiler.ts" * provide "Serialize" implementations for enums in "msg.rs" * rename "analyze_dependencies_and_references" to "pre_process_file" and move it to "tsc.rs" * refactor ModuleGraph to use more concrete types and properly annotate locations where errors occur * remove dead code from "file_fetcher.rs" - "SourceFile.types_url" is no longer needed, as type reference parsing is done in "ModuleGraph" * remove unneeded field "source_path" from ".meta" files stored for compiled source file (towards #6080)
-rw-r--r--cli/file_fetcher.rs170
-rw-r--r--cli/global_state.rs50
-rw-r--r--cli/js/compiler.ts370
-rw-r--r--cli/main.rs3
-rw-r--r--cli/module_graph.rs509
-rw-r--r--cli/msg.rs47
-rw-r--r--cli/ops/runtime_compiler.rs31
-rw-r--r--cli/swc_util.rs439
-rw-r--r--cli/tests/error_004_missing_module.ts.out1
-rw-r--r--cli/tests/error_006_import_ext_failure.ts.out1
-rw-r--r--cli/tests/error_016_dynamic_import_permissions2.out1
-rw-r--r--cli/tsc.rs441
-rw-r--r--cli/web_worker.rs1
13 files changed, 1062 insertions, 1002 deletions
diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs
index 1613117a1..4da0f8b86 100644
--- a/cli/file_fetcher.rs
+++ b/cli/file_fetcher.rs
@@ -11,7 +11,6 @@ use deno_core::ErrBox;
use deno_core::ModuleSpecifier;
use futures::future::FutureExt;
use log::info;
-use regex::Regex;
use std::collections::HashMap;
use std::fs;
use std::future::Future;
@@ -33,7 +32,6 @@ use url::Url;
pub struct SourceFile {
pub url: Url,
pub filename: PathBuf,
- pub types_url: Option<Url>,
pub types_header: Option<String>,
pub media_type: msg::MediaType,
pub source_code: Vec<u8>,
@@ -316,18 +314,11 @@ impl SourceFileFetcher {
};
let media_type = map_content_type(&filepath, None);
- let types_url = match media_type {
- msg::MediaType::JavaScript | msg::MediaType::JSX => {
- get_types_url(&module_url, &source_code, None)
- }
- _ => None,
- };
Ok(SourceFile {
url: module_url.clone(),
filename: filepath,
media_type,
source_code,
- types_url,
types_header: None,
})
}
@@ -394,20 +385,11 @@ impl SourceFileFetcher {
headers.get("content-type").map(|e| e.as_str()),
);
let types_header = headers.get("x-typescript-types").map(|e| e.to_string());
- let types_url = match media_type {
- msg::MediaType::JavaScript | msg::MediaType::JSX => get_types_url(
- &module_url,
- &source_code,
- headers.get("x-typescript-types").map(|e| e.as_str()),
- ),
- _ => None,
- };
Ok(Some(SourceFile {
url: module_url.clone(),
filename: cache_filename,
media_type,
source_code,
- types_url,
types_header,
}))
}
@@ -519,21 +501,12 @@ impl SourceFileFetcher {
let types_header =
headers.get("x-typescript-types").map(String::to_string);
- let types_url = match media_type {
- msg::MediaType::JavaScript | msg::MediaType::JSX => get_types_url(
- &module_url,
- &source,
- headers.get("x-typescript-types").map(String::as_str),
- ),
- _ => None,
- };
let source_file = SourceFile {
url: module_url.clone(),
filename: cache_filepath,
media_type,
source_code: source,
- types_url,
types_header,
};
@@ -617,41 +590,6 @@ fn map_js_like_extension(
}
}
-/// Take a module URL and source code and determines if the source code contains
-/// a type directive, and if so, returns the parsed URL for that type directive.
-fn get_types_url(
- module_url: &Url,
- source_code: &[u8],
- maybe_types_header: Option<&str>,
-) -> Option<Url> {
- lazy_static! {
- /// Matches reference type directives in strings, which provide
- /// type files that should be used by the compiler instead of the
- /// JavaScript file.
- static ref DIRECTIVE_TYPES: Regex = Regex::new(
- r#"(?m)^/{3}\s*<reference\s+types\s*=\s*["']([^"']+)["']\s*/>"#
- )
- .unwrap();
- }
-
- match maybe_types_header {
- Some(types_header) => match Url::parse(&types_header) {
- Ok(url) => Some(url),
- _ => Some(module_url.join(&types_header).unwrap()),
- },
- _ => match DIRECTIVE_TYPES.captures(str::from_utf8(source_code).unwrap()) {
- Some(cap) => {
- let val = cap.get(1).unwrap().as_str();
- match Url::parse(&val) {
- Ok(url) => Some(url),
- _ => Some(module_url.join(&val).unwrap()),
- }
- }
- _ => None,
- },
- }
-}
-
fn filter_shebang(bytes: Vec<u8>) -> Vec<u8> {
let string = str::from_utf8(&bytes).unwrap();
if let Some(i) = string.find('\n') {
@@ -1868,85 +1806,6 @@ mod tests {
drop(http_server_guard);
}
- #[test]
- fn test_get_types_url_1() {
- let module_url = Url::parse("https://example.com/mod.js").unwrap();
- let source_code = b"console.log(\"foo\");".to_owned();
- let result = get_types_url(&module_url, &source_code, None);
- assert_eq!(result, None);
- }
-
- #[test]
- fn test_get_types_url_2() {
- let module_url = Url::parse("https://example.com/mod.js").unwrap();
- let source_code = r#"/// <reference types="./mod.d.ts" />
- console.log("foo");"#
- .as_bytes()
- .to_owned();
- let result = get_types_url(&module_url, &source_code, None);
- assert_eq!(
- result,
- Some(Url::parse("https://example.com/mod.d.ts").unwrap())
- );
- }
-
- #[test]
- fn test_get_types_url_3() {
- let module_url = Url::parse("https://example.com/mod.js").unwrap();
- let source_code = r#"/// <reference types="https://deno.land/mod.d.ts" />
- console.log("foo");"#
- .as_bytes()
- .to_owned();
- let result = get_types_url(&module_url, &source_code, None);
- assert_eq!(
- result,
- Some(Url::parse("https://deno.land/mod.d.ts").unwrap())
- );
- }
-
- #[test]
- fn test_get_types_url_4() {
- let module_url = Url::parse("file:///foo/bar/baz.js").unwrap();
- let source_code = r#"/// <reference types="../qat/baz.d.ts" />
- console.log("foo");"#
- .as_bytes()
- .to_owned();
- let result = get_types_url(&module_url, &source_code, None);
- assert_eq!(
- result,
- Some(Url::parse("file:///foo/qat/baz.d.ts").unwrap())
- );
- }
-
- #[test]
- fn test_get_types_url_5() {
- let module_url = Url::parse("https://example.com/mod.js").unwrap();
- let source_code = b"console.log(\"foo\");".to_owned();
- let result = get_types_url(&module_url, &source_code, Some("./mod.d.ts"));
- assert_eq!(
- result,
- Some(Url::parse("https://example.com/mod.d.ts").unwrap())
- );
- }
-
- #[test]
- fn test_get_types_url_6() {
- let module_url = Url::parse("https://example.com/mod.js").unwrap();
- let source_code = r#"/// <reference types="./mod.d.ts" />
- console.log("foo");"#
- .as_bytes()
- .to_owned();
- let result = get_types_url(
- &module_url,
- &source_code,
- Some("https://deno.land/mod.d.ts"),
- );
- assert_eq!(
- result,
- Some(Url::parse("https://deno.land/mod.d.ts").unwrap())
- );
- }
-
#[tokio::test]
async fn test_fetch_with_types_header() {
let http_server_guard = test_util::http_server();
@@ -1967,33 +1826,8 @@ mod tests {
assert_eq!(source.source_code, b"export const foo = 'foo';");
assert_eq!(&(source.media_type), &msg::MediaType::JavaScript);
assert_eq!(
- source.types_url,
- Some(Url::parse("http://127.0.0.1:4545/xTypeScriptTypes.d.ts").unwrap())
- );
- drop(http_server_guard);
- }
-
- #[tokio::test]
- async fn test_fetch_with_types_reference() {
- let http_server_guard = test_util::http_server();
- let (_temp_dir, fetcher) = test_setup();
- let module_url =
- Url::parse("http://127.0.0.1:4545/referenceTypes.js").unwrap();
- let source = fetcher
- .fetch_remote_source(
- &module_url,
- false,
- false,
- 1,
- &Permissions::allow_all(),
- )
- .await;
- assert!(source.is_ok());
- let source = source.unwrap();
- assert_eq!(&(source.media_type), &msg::MediaType::JavaScript);
- assert_eq!(
- source.types_url,
- Some(Url::parse("http://127.0.0.1:4545/xTypeScriptTypes.d.ts").unwrap())
+ source.types_header,
+ Some("./xTypeScriptTypes.d.ts".to_string())
);
drop(http_server_guard);
}
diff --git a/cli/global_state.rs b/cli/global_state.rs
index 959d794ca..3c7f23435 100644
--- a/cli/global_state.rs
+++ b/cli/global_state.rs
@@ -260,9 +260,9 @@ impl GlobalState {
/// - JSX import
fn should_allow_js(module_graph_files: &[&ModuleGraphFile]) -> bool {
module_graph_files.iter().any(|module_file| {
- if module_file.media_type == (MediaType::JSX as i32) {
+ if module_file.media_type == MediaType::JSX {
true
- } else if module_file.media_type == (MediaType::JavaScript as i32) {
+ } else if module_file.media_type == MediaType::JavaScript {
module_file.imports.iter().any(|import_desc| {
let import_file = module_graph_files
.iter()
@@ -271,9 +271,9 @@ fn should_allow_js(module_graph_files: &[&ModuleGraphFile]) -> bool {
})
.expect("Failed to find imported file");
let media_type = import_file.media_type;
- media_type == (MediaType::TypeScript as i32)
- || media_type == (MediaType::TSX as i32)
- || media_type == (MediaType::JSX as i32)
+ media_type == MediaType::TypeScript
+ || media_type == MediaType::TSX
+ || media_type == MediaType::JSX
})
} else {
false
@@ -301,9 +301,9 @@ fn needs_compilation(
needs_compilation |= module_graph_files.iter().any(|module_file| {
let media_type = module_file.media_type;
- media_type == (MediaType::TypeScript as i32)
- || media_type == (MediaType::TSX as i32)
- || media_type == (MediaType::JSX as i32)
+ media_type == (MediaType::TypeScript)
+ || media_type == (MediaType::TSX)
+ || media_type == (MediaType::JSX)
});
needs_compilation
@@ -317,6 +317,7 @@ fn thread_safe() {
#[test]
fn test_should_allow_js() {
+ use crate::doc::Location;
use crate::module_graph::ImportDescriptor;
assert!(should_allow_js(&[
@@ -330,7 +331,7 @@ fn test_should_allow_js() {
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::TypeScript as i32,
+ media_type: MediaType::TypeScript,
source_code: "function foo() {}".to_string(),
},
&ModuleGraphFile {
@@ -346,12 +347,17 @@ fn test_should_allow_js() {
.unwrap(),
type_directive: None,
resolved_type_directive: None,
+ location: Location {
+ filename: "file:///some/file1.js".to_string(),
+ line: 0,
+ col: 0,
+ },
}],
referenced_files: vec![],
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::JavaScript as i32,
+ media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
},
],));
@@ -367,7 +373,7 @@ fn test_should_allow_js() {
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::JSX as i32,
+ media_type: MediaType::JSX,
source_code: "function foo() {}".to_string(),
},
&ModuleGraphFile {
@@ -383,12 +389,17 @@ fn test_should_allow_js() {
.unwrap(),
type_directive: None,
resolved_type_directive: None,
+ location: Location {
+ filename: "file:///some/file1.ts".to_string(),
+ line: 0,
+ col: 0,
+ },
}],
referenced_files: vec![],
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::TypeScript as i32,
+ media_type: MediaType::TypeScript,
source_code: "function foo() {}".to_string(),
},
]));
@@ -404,7 +415,7 @@ fn test_should_allow_js() {
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::JavaScript as i32,
+ media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
},
&ModuleGraphFile {
@@ -420,12 +431,17 @@ fn test_should_allow_js() {
.unwrap(),
type_directive: None,
resolved_type_directive: None,
+ location: Location {
+ filename: "file:///some/file.js".to_string(),
+ line: 0,
+ col: 0,
+ },
}],
referenced_files: vec![],
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::JavaScript as i32,
+ media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
},
],));
@@ -446,7 +462,7 @@ fn test_needs_compilation() {
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::JavaScript as i32,
+ media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
}],
));
@@ -470,7 +486,7 @@ fn test_needs_compilation() {
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::TypeScript as i32,
+ media_type: MediaType::TypeScript,
source_code: "function foo() {}".to_string(),
},
&ModuleGraphFile {
@@ -483,7 +499,7 @@ fn test_needs_compilation() {
lib_directives: vec![],
types_directives: vec![],
type_headers: vec![],
- media_type: MediaType::JavaScript as i32,
+ media_type: MediaType::JavaScript,
source_code: "function foo() {}".to_string(),
},
],
diff --git a/cli/js/compiler.ts b/cli/js/compiler.ts
index 46dbfcaf9..fa2c5fd41 100644
--- a/cli/js/compiler.ts
+++ b/cli/js/compiler.ts
@@ -441,7 +441,6 @@ class Host implements ts.CompilerHost {
specifier,
containingFile,
maybeUrl,
- sf: SourceFile.getCached(maybeUrl!),
});
let sourceFile: SourceFile | undefined = undefined;
@@ -657,26 +656,28 @@ type WriteFileCallback = (
sourceFiles?: readonly ts.SourceFile[]
) => void;
-interface WriteFileState {
- type: CompilerRequestType;
- bundle?: boolean;
- bundleOutput?: string;
+interface CompileWriteFileState {
+ rootNames: string[];
+ emitMap: Record<string, EmittedSource>;
+}
+
+interface BundleWriteFileState {
host?: Host;
+ bundleOutput: undefined | string;
rootNames: string[];
- emitMap?: Record<string, EmittedSource>;
- sources?: Record<string, string>;
}
// Warning! The values in this enum are duplicated in `cli/msg.rs`
// Update carefully!
enum CompilerRequestType {
Compile = 0,
- RuntimeCompile = 1,
- RuntimeTranspile = 2,
+ Bundle = 1,
+ RuntimeCompile = 2,
+ RuntimeBundle = 3,
+ RuntimeTranspile = 4,
}
-// TODO(bartlomieju): probably could be defined inline?
-function createBundleWriteFile(state: WriteFileState): WriteFileCallback {
+function createBundleWriteFile(state: BundleWriteFileState): WriteFileCallback {
return function writeFile(
_fileName: string,
data: string,
@@ -684,8 +685,6 @@ function createBundleWriteFile(state: WriteFileState): WriteFileCallback {
): void {
assert(sourceFiles != null);
assert(state.host);
- assert(state.emitMap);
- assert(state.bundle);
// we only support single root names for bundles
assert(state.rootNames.length === 1);
state.bundleOutput = buildBundle(
@@ -697,17 +696,15 @@ function createBundleWriteFile(state: WriteFileState): WriteFileCallback {
};
}
-// TODO(bartlomieju): probably could be defined inline?
-function createCompileWriteFile(state: WriteFileState): WriteFileCallback {
+function createCompileWriteFile(
+ state: CompileWriteFileState
+): WriteFileCallback {
return function writeFile(
fileName: string,
data: string,
sourceFiles?: readonly ts.SourceFile[]
): void {
assert(sourceFiles != null);
- assert(state.host);
- assert(state.emitMap);
- assert(!state.bundle);
assert(sourceFiles.length === 1);
state.emitMap[fileName] = {
filename: sourceFiles[0].fileName,
@@ -1067,7 +1064,8 @@ interface SourceFileMapEntry {
typeHeaders: ReferenceDescriptor[];
}
-interface CompilerRequestCompile {
+/** Used when "deno run" is invoked */
+interface CompileRequest {
type: CompilerRequestType.Compile;
allowJs: boolean;
target: CompilerHostTarget;
@@ -1075,53 +1073,81 @@ interface CompilerRequestCompile {
configPath?: string;
config?: string;
unstable: boolean;
- bundle: boolean;
cwd: string;
// key value is fully resolved URL
sourceFileMap: Record<string, SourceFileMapEntry>;
}
-interface CompilerRequestRuntimeCompile {
+/** Used when "deno bundle" is invoked */
+interface BundleRequest {
+ type: CompilerRequestType.Bundle;
+ target: CompilerHostTarget;
+ rootNames: string[];
+ configPath?: string;
+ config?: string;
+ unstable: boolean;
+ cwd: string;
+ // key value is fully resolved URL
+ sourceFileMap: Record<string, SourceFileMapEntry>;
+}
+
+/** Used when "Deno.compile()" API is called */
+interface RuntimeCompileRequest {
type: CompilerRequestType.RuntimeCompile;
target: CompilerHostTarget;
rootNames: string[];
sourceFileMap: Record<string, SourceFileMapEntry>;
unstable?: boolean;
- bundle?: boolean;
options?: string;
}
-interface CompilerRequestRuntimeTranspile {
+/** Used when "Deno.bundle()" API is called */
+interface RuntimeBundleRequest {
+ type: CompilerRequestType.RuntimeBundle;
+ target: CompilerHostTarget;
+ rootNames: string[];
+ sourceFileMap: Record<string, SourceFileMapEntry>;
+ unstable?: boolean;
+ options?: string;
+}
+
+/** Used when "Deno.transpileOnly()" API is called */
+interface RuntimeTranspileRequest {
type: CompilerRequestType.RuntimeTranspile;
sources: Record<string, string>;
options?: string;
}
type CompilerRequest =
- | CompilerRequestCompile
- | CompilerRequestRuntimeCompile
- | CompilerRequestRuntimeTranspile;
+ | CompileRequest
+ | BundleRequest
+ | RuntimeCompileRequest
+ | RuntimeBundleRequest
+ | RuntimeTranspileRequest;
-interface CompileResult {
- emitMap?: Record<string, EmittedSource>;
+interface CompileResponse {
+ emitMap: Record<string, EmittedSource>;
+ diagnostics: Diagnostic;
+}
+
+interface BundleResponse {
bundleOutput?: string;
diagnostics: Diagnostic;
}
-interface RuntimeCompileResult {
+interface RuntimeCompileResponse {
emitMap: Record<string, EmittedSource>;
diagnostics: DiagnosticItem[];
}
-interface RuntimeBundleResult {
- output: string;
+interface RuntimeBundleResponse {
+ output?: string;
diagnostics: DiagnosticItem[];
}
-function compile(request: CompilerRequestCompile): CompileResult {
+function compile(request: CompileRequest): CompileResponse {
const {
allowJs,
- bundle,
config,
configPath,
rootNames,
@@ -1139,30 +1165,19 @@ function compile(request: CompilerRequestCompile): CompileResult {
// each file that needs to be emitted. The Deno compiler host delegates
// this, to make it easier to perform the right actions, which vary
// based a lot on the request.
- const state: WriteFileState = {
- type: request.type,
- emitMap: {},
- bundle,
- host: undefined,
+ const state: CompileWriteFileState = {
rootNames,
+ emitMap: {},
};
- let writeFile: WriteFileCallback;
- if (bundle) {
- writeFile = createBundleWriteFile(state);
- } else {
- writeFile = createCompileWriteFile(state);
- }
- const host = (state.host = new Host({
- bundle,
+ const host = new Host({
+ bundle: false,
target,
- writeFile,
unstable,
- }));
+ writeFile: createCompileWriteFile(state),
+ });
let diagnostics: readonly ts.Diagnostic[] = [];
- if (!bundle) {
- host.mergeOptions({ allowJs });
- }
+ host.mergeOptions({ allowJs });
// if there is a configuration supplied, we need to parse that
if (config && config.length && configPath) {
@@ -1186,24 +1201,12 @@ function compile(request: CompilerRequestCompile): CompileResult {
.filter(({ code }) => !ignoredDiagnostics.includes(code));
// We will only proceed with the emit if there are no diagnostics.
- if (diagnostics && diagnostics.length === 0) {
- if (bundle) {
- // we only support a single root module when bundling
- assert(rootNames.length === 1);
- setRootExports(program, rootNames[0]);
- }
+ if (diagnostics.length === 0) {
const emitResult = program.emit();
// If `checkJs` is off we still might be compiling entry point JavaScript file
// (if it has `.ts` imports), but it won't be emitted. In that case we skip
// assertion.
- if (!bundle) {
- if (options.checkJs) {
- assert(
- emitResult.emitSkipped === false,
- "Unexpected skip of the emit."
- );
- }
- } else {
+ if (options.checkJs) {
assert(
emitResult.emitSkipped === false,
"Unexpected skip of the emit."
@@ -1215,21 +1218,96 @@ function compile(request: CompilerRequestCompile): CompileResult {
}
}
- let bundleOutput = undefined;
+ log("<<< compile end", {
+ rootNames,
+ type: CompilerRequestType[request.type],
+ });
+
+ return {
+ emitMap: state.emitMap,
+ diagnostics: fromTypeScriptDiagnostic(diagnostics),
+ };
+}
+
+function bundle(request: BundleRequest): BundleResponse {
+ const {
+ config,
+ configPath,
+ rootNames,
+ target,
+ unstable,
+ cwd,
+ sourceFileMap,
+ } = request;
+ log(">>> start start", {
+ rootNames,
+ type: CompilerRequestType[request.type],
+ });
+
+ // When a programme is emitted, TypeScript will call `writeFile` with
+ // each file that needs to be emitted. The Deno compiler host delegates
+ // this, to make it easier to perform the right actions, which vary
+ // based a lot on the request.
+ const state: BundleWriteFileState = {
+ rootNames,
+ bundleOutput: undefined,
+ };
+ const host = new Host({
+ bundle: true,
+ target,
+ unstable,
+ writeFile: createBundleWriteFile(state),
+ });
+ state.host = host;
+ let diagnostics: readonly ts.Diagnostic[] = [];
+
+ // if there is a configuration supplied, we need to parse that
+ if (config && config.length && configPath) {
+ const configResult = host.configure(cwd, configPath, config);
+ diagnostics = processConfigureResponse(configResult, configPath) || [];
+ }
+
+ buildSourceFileCache(sourceFileMap);
+ // if there was a configuration and no diagnostics with it, we will continue
+ // to generate the program and possibly emit it.
+ if (diagnostics.length === 0) {
+ const options = host.getCompilationSettings();
+ const program = ts.createProgram({
+ rootNames,
+ options,
+ host,
+ });
+
+ diagnostics = ts
+ .getPreEmitDiagnostics(program)
+ .filter(({ code }) => !ignoredDiagnostics.includes(code));
+
+ // We will only proceed with the emit if there are no diagnostics.
+ if (diagnostics.length === 0) {
+ // we only support a single root module when bundling
+ assert(rootNames.length === 1);
+ setRootExports(program, rootNames[0]);
+ const emitResult = program.emit();
+ assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
+ // emitResult.diagnostics is `readonly` in TS3.5+ and can't be assigned
+ // without casting.
+ diagnostics = emitResult.diagnostics;
+ }
+ }
+
+ let bundleOutput;
- if (diagnostics && diagnostics.length === 0 && bundle) {
+ if (diagnostics.length === 0) {
assert(state.bundleOutput);
bundleOutput = state.bundleOutput;
}
- assert(state.emitMap);
- const result: CompileResult = {
- emitMap: state.emitMap,
+ const result: BundleResponse = {
bundleOutput,
diagnostics: fromTypeScriptDiagnostic(diagnostics),
};
- log("<<< compile end", {
+ log("<<< bundle end", {
rootNames,
type: CompilerRequestType[request.type],
});
@@ -1238,20 +1316,12 @@ function compile(request: CompilerRequestCompile): CompileResult {
}
function runtimeCompile(
- request: CompilerRequestRuntimeCompile
-): RuntimeCompileResult | RuntimeBundleResult {
- const {
- bundle,
- options,
- rootNames,
- target,
- unstable,
- sourceFileMap,
- } = request;
+ request: RuntimeCompileRequest
+): RuntimeCompileResponse {
+ const { options, rootNames, target, unstable, sourceFileMap } = request;
log(">>> runtime compile start", {
rootNames,
- bundle,
});
// if there are options, convert them into TypeScript compiler options,
@@ -1264,26 +1334,15 @@ function runtimeCompile(
buildLocalSourceFileCache(sourceFileMap);
- const state: WriteFileState = {
- type: request.type,
- bundle,
- host: undefined,
+ const state: CompileWriteFileState = {
rootNames,
emitMap: {},
- bundleOutput: undefined,
};
- let writeFile: WriteFileCallback;
- if (bundle) {
- writeFile = createBundleWriteFile(state);
- } else {
- writeFile = createCompileWriteFile(state);
- }
-
- const host = (state.host = new Host({
- bundle,
+ const host = new Host({
+ bundle: false,
target,
- writeFile,
- }));
+ writeFile: createCompileWriteFile(state),
+ });
const compilerOptions = [DEFAULT_RUNTIME_COMPILE_OPTIONS];
if (convertedOptions) {
compilerOptions.push(convertedOptions);
@@ -1296,9 +1355,7 @@ function runtimeCompile(
],
});
}
- if (bundle) {
- compilerOptions.push(DEFAULT_BUNDLER_OPTIONS);
- }
+
host.mergeOptions(...compilerOptions);
const program = ts.createProgram({
@@ -1307,10 +1364,6 @@ function runtimeCompile(
host,
});
- if (bundle) {
- setRootExports(program, rootNames[0]);
- }
-
const diagnostics = ts
.getPreEmitDiagnostics(program)
.filter(({ code }) => !ignoredDiagnostics.includes(code));
@@ -1319,10 +1372,8 @@ function runtimeCompile(
assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
- assert(state.emitMap);
log("<<< runtime compile finish", {
rootNames,
- bundle,
emitMap: Object.keys(state.emitMap),
});
@@ -1330,21 +1381,86 @@ function runtimeCompile(
? fromTypeScriptDiagnostic(diagnostics).items
: [];
- if (bundle) {
- return {
- diagnostics: maybeDiagnostics,
- output: state.bundleOutput,
- } as RuntimeBundleResult;
- } else {
- return {
- diagnostics: maybeDiagnostics,
- emitMap: state.emitMap,
- } as RuntimeCompileResult;
+ return {
+ diagnostics: maybeDiagnostics,
+ emitMap: state.emitMap,
+ };
+}
+
+function runtimeBundle(request: RuntimeBundleRequest): RuntimeBundleResponse {
+ const { options, rootNames, target, unstable, sourceFileMap } = request;
+
+ log(">>> runtime bundle start", {
+ rootNames,
+ });
+
+ // if there are options, convert them into TypeScript compiler options,
+ // and resolve any external file references
+ let convertedOptions: ts.CompilerOptions | undefined;
+ if (options) {
+ const result = convertCompilerOptions(options);
+ convertedOptions = result.options;
}
+
+ buildLocalSourceFileCache(sourceFileMap);
+
+ const state: BundleWriteFileState = {
+ rootNames,
+ bundleOutput: undefined,
+ };
+ const host = new Host({
+ bundle: true,
+ target,
+ writeFile: createBundleWriteFile(state),
+ });
+ state.host = host;
+
+ const compilerOptions = [DEFAULT_RUNTIME_COMPILE_OPTIONS];
+ if (convertedOptions) {
+ compilerOptions.push(convertedOptions);
+ }
+ if (unstable) {
+ compilerOptions.push({
+ lib: [
+ "deno.unstable",
+ ...((convertedOptions && convertedOptions.lib) || ["deno.window"]),
+ ],
+ });
+ }
+ compilerOptions.push(DEFAULT_BUNDLER_OPTIONS);
+ host.mergeOptions(...compilerOptions);
+
+ const program = ts.createProgram({
+ rootNames,
+ options: host.getCompilationSettings(),
+ host,
+ });
+
+ setRootExports(program, rootNames[0]);
+ const diagnostics = ts
+ .getPreEmitDiagnostics(program)
+ .filter(({ code }) => !ignoredDiagnostics.includes(code));
+
+ const emitResult = program.emit();
+
+ assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
+
+ log("<<< runtime bundle finish", {
+ rootNames,
+ });
+
+ const maybeDiagnostics = diagnostics.length
+ ? fromTypeScriptDiagnostic(diagnostics).items
+ : [];
+
+ return {
+ diagnostics: maybeDiagnostics,
+ output: state.bundleOutput,
+ };
}
function runtimeTranspile(
- request: CompilerRequestRuntimeTranspile
+ request: RuntimeTranspileRequest
): Promise<Record<string, TranspileOnlyResult>> {
const result: Record<string, TranspileOnlyResult> = {};
const { sources, options } = request;
@@ -1376,19 +1492,27 @@ async function tsCompilerOnMessage({
}): Promise<void> {
switch (request.type) {
case CompilerRequestType.Compile: {
- const result = compile(request as CompilerRequestCompile);
+ const result = compile(request as CompileRequest);
+ globalThis.postMessage(result);
+ break;
+ }
+ case CompilerRequestType.Bundle: {
+ const result = bundle(request as BundleRequest);
globalThis.postMessage(result);
break;
}
case CompilerRequestType.RuntimeCompile: {
- const result = runtimeCompile(request as CompilerRequestRuntimeCompile);
+ const result = runtimeCompile(request as RuntimeCompileRequest);
+ globalThis.postMessage(result);
+ break;
+ }
+ case CompilerRequestType.RuntimeBundle: {
+ const result = runtimeBundle(request as RuntimeBundleRequest);
globalThis.postMessage(result);
break;
}
case CompilerRequestType.RuntimeTranspile: {
- const result = await runtimeTranspile(
- request as CompilerRequestRuntimeTranspile
- );
+ const result = await runtimeTranspile(request as RuntimeTranspileRequest);
globalThis.postMessage(result);
break;
}
diff --git a/cli/main.rs b/cli/main.rs
index 1749a38cf..c94aed244 100644
--- a/cli/main.rs
+++ b/cli/main.rs
@@ -383,7 +383,6 @@ async fn eval_command(
let source_file = SourceFile {
filename: main_module_url.to_file_path().unwrap(),
url: main_module_url,
- types_url: None,
types_header: None,
media_type: if as_typescript {
MediaType::TypeScript
@@ -588,7 +587,6 @@ async fn run_command(flags: Flags, script: String) -> Result<(), ErrBox> {
let source_file = SourceFile {
filename: main_module_url.to_file_path().unwrap(),
url: main_module_url,
- types_url: None,
types_header: None,
media_type: MediaType::TypeScript,
source_code: source,
@@ -646,7 +644,6 @@ async fn test_command(
let source_file = SourceFile {
filename: test_file_url.to_file_path().unwrap(),
url: test_file_url,
- types_url: None,
types_header: None,
media_type: MediaType::TypeScript,
source_code: test_file.clone().into_bytes(),
diff --git a/cli/module_graph.rs b/cli/module_graph.rs
index 9cded48a0..b5bde1a19 100644
--- a/cli/module_graph.rs
+++ b/cli/module_graph.rs
@@ -8,8 +8,10 @@ use crate::import_map::ImportMap;
use crate::msg::MediaType;
use crate::op_error::OpError;
use crate::permissions::Permissions;
-use crate::swc_util::analyze_dependencies_and_references;
-use crate::swc_util::TsReferenceKind;
+use crate::tsc::pre_process_file;
+use crate::tsc::ImportDesc;
+use crate::tsc::TsReferenceDesc;
+use crate::tsc::TsReferenceKind;
use crate::tsc::AVAILABLE_LIBS;
use deno_core::ErrBox;
use deno_core::ModuleSpecifier;
@@ -21,20 +23,142 @@ use serde::Serialize;
use serde::Serializer;
use std::collections::HashMap;
use std::collections::HashSet;
-use std::hash::BuildHasher;
use std::path::PathBuf;
use std::pin::Pin;
// TODO(bartlomieju): it'd be great if this function returned
// more structured data and possibly format the same as TS diagnostics.
/// Decorate error with location of import that caused the error.
-fn err_with_location(e: ErrBox, location: &Location) -> ErrBox {
- let location_str = format!(
- "\nImported from \"{}:{}\"",
- location.filename, location.line
- );
- let err_str = e.to_string();
- OpError::other(format!("{}{}", err_str, location_str)).into()
+fn err_with_location(e: ErrBox, maybe_location: Option<&Location>) -> ErrBox {
+ if let Some(location) = maybe_location {
+ let location_str = format!(
+ "\nImported from \"{}:{}\"",
+ location.filename, location.line
+ );
+ let err_str = e.to_string();
+ OpError::other(format!("{}{}", err_str, location_str)).into()
+ } else {
+ e
+ }
+}
+
+/// Disallow http:// imports from modules loaded over https://
+fn validate_no_downgrade(
+ module_specifier: &ModuleSpecifier,
+ maybe_referrer: Option<&ModuleSpecifier>,
+ maybe_location: Option<&Location>,
+) -> Result<(), ErrBox> {
+ if let Some(referrer) = maybe_referrer.as_ref() {
+ if let "https" = referrer.as_url().scheme() {
+ if let "http" = module_specifier.as_url().scheme() {
+ let e = OpError::permission_denied(
+ "Modules loaded over https:// are not allowed to import modules over http://".to_string()
+ );
+ return Err(err_with_location(e.into(), maybe_location));
+ };
+ };
+ };
+
+ Ok(())
+}
+
+/// Verify that remote file doesn't try to statically import local file.
+fn validate_no_file_from_remote(
+ module_specifier: &ModuleSpecifier,
+ maybe_referrer: Option<&ModuleSpecifier>,
+ maybe_location: Option<&Location>,
+) -> Result<(), ErrBox> {
+ if let Some(referrer) = maybe_referrer.as_ref() {
+ let referrer_url = referrer.as_url();
+ match referrer_url.scheme() {
+ "http" | "https" => {
+ let specifier_url = module_specifier.as_url();
+ match specifier_url.scheme() {
+ "http" | "https" => {}
+ _ => {
+ let e = OpError::permission_denied(
+ "Remote modules are not allowed to statically import local modules. Use dynamic import instead.".to_string()
+ );
+ return Err(err_with_location(e.into(), maybe_location));
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+
+ Ok(())
+}
+
+// TODO(bartlomieju): handle imports/references in ambient contexts/TS modules
+// https://github.com/denoland/deno/issues/6133
+fn resolve_imports_and_references(
+ referrer: ModuleSpecifier,
+ maybe_import_map: Option<&ImportMap>,
+ import_descs: Vec<ImportDesc>,
+ ref_descs: Vec<TsReferenceDesc>,
+) -> Result<(Vec<ImportDescriptor>, Vec<ReferenceDescriptor>), ErrBox> {
+ let mut imports = vec![];
+ let mut references = vec![];
+
+ for import_desc in import_descs {
+ let maybe_resolved = if let Some(import_map) = maybe_import_map.as_ref() {
+ import_map.resolve(&import_desc.specifier, &referrer.to_string())?
+ } else {
+ None
+ };
+
+ let resolved_specifier = if let Some(resolved) = maybe_resolved {
+ resolved
+ } else {
+ ModuleSpecifier::resolve_import(
+ &import_desc.specifier,
+ &referrer.to_string(),
+ )?
+ };
+
+ let resolved_type_directive =
+ if let Some(types_specifier) = import_desc.deno_types.as_ref() {
+ Some(ModuleSpecifier::resolve_import(
+ &types_specifier,
+ &referrer.to_string(),
+ )?)
+ } else {
+ None
+ };
+
+ let import_descriptor = ImportDescriptor {
+ specifier: import_desc.specifier.to_string(),
+ resolved_specifier,
+ type_directive: import_desc.deno_types.clone(),
+ resolved_type_directive,
+ location: import_desc.location,
+ };
+
+ imports.push(import_descriptor);
+ }
+
+ for ref_desc in ref_descs {
+ if AVAILABLE_LIBS.contains(&ref_desc.specifier.as_str()) {
+ continue;
+ }
+
+ let resolved_specifier = ModuleSpecifier::resolve_import(
+ &ref_desc.specifier,
+ &referrer.to_string(),
+ )?;
+
+ let reference_descriptor = ReferenceDescriptor {
+ specifier: ref_desc.specifier.to_string(),
+ resolved_specifier,
+ kind: ref_desc.kind,
+ location: ref_desc.location,
+ };
+
+ references.push(reference_descriptor);
+ }
+
+ Ok((imports, references))
}
fn serialize_module_specifier<S>(
@@ -68,8 +192,7 @@ const SUPPORTED_MEDIA_TYPES: [MediaType; 4] = [
MediaType::TSX,
];
-#[derive(Debug, Serialize)]
-pub struct ModuleGraph(HashMap<String, ModuleGraphFile>);
+pub type ModuleGraph = HashMap<String, ModuleGraphFile>;
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
@@ -82,6 +205,8 @@ pub struct ImportDescriptor {
pub type_directive: Option<String>,
#[serde(serialize_with = "serialize_option_module_specifier")]
pub resolved_type_directive: Option<ModuleSpecifier>,
+ #[serde(skip)]
+ pub location: Location,
}
#[derive(Debug, Serialize)]
@@ -90,6 +215,10 @@ pub struct ReferenceDescriptor {
pub specifier: String,
#[serde(serialize_with = "serialize_module_specifier")]
pub resolved_specifier: ModuleSpecifier,
+ #[serde(skip)]
+ pub kind: TsReferenceKind,
+ #[serde(skip)]
+ pub location: Location,
}
#[derive(Debug, Serialize)]
@@ -104,7 +233,7 @@ pub struct ModuleGraphFile {
pub lib_directives: Vec<ReferenceDescriptor>,
pub types_directives: Vec<ReferenceDescriptor>,
pub type_headers: Vec<ReferenceDescriptor>,
- pub media_type: i32,
+ pub media_type: MediaType,
pub source_code: String,
}
@@ -117,7 +246,7 @@ pub struct ModuleGraphLoader {
maybe_import_map: Option<ImportMap>,
pending_downloads: FuturesUnordered<SourceFileFuture>,
has_downloaded: HashSet<ModuleSpecifier>,
- pub graph: ModuleGraph,
+ graph: ModuleGraph,
is_dyn_import: bool,
analyze_dynamic_imports: bool,
}
@@ -136,7 +265,7 @@ impl ModuleGraphLoader {
maybe_import_map,
pending_downloads: FuturesUnordered::new(),
has_downloaded: HashSet::new(),
- graph: ModuleGraph(HashMap::new()),
+ graph: ModuleGraph::new(),
is_dyn_import,
analyze_dynamic_imports,
}
@@ -153,7 +282,7 @@ impl ModuleGraphLoader {
specifier: &ModuleSpecifier,
maybe_referrer: Option<ModuleSpecifier>,
) -> Result<(), ErrBox> {
- self.download_module(specifier.clone(), maybe_referrer)?;
+ self.download_module(specifier.clone(), maybe_referrer, None)?;
loop {
let (specifier, source_file) =
@@ -170,10 +299,10 @@ impl ModuleGraphLoader {
/// This method is used to create a graph from in-memory files stored in
/// a hash map. Useful for creating module graph for code received from
/// the runtime.
- pub fn build_local_graph<S: BuildHasher>(
+ pub fn build_local_graph(
&mut self,
_root_name: &str,
- source_map: &HashMap<String, String, S>,
+ source_map: &HashMap<String, String>,
) -> Result<(), ErrBox> {
for (spec, source_code) in source_map.iter() {
self.visit_memory_module(spec.to_string(), source_code.to_string())?;
@@ -183,8 +312,8 @@ impl ModuleGraphLoader {
}
/// Consumes the loader and returns created graph.
- pub fn get_graph(self) -> HashMap<String, ModuleGraphFile> {
- self.graph.0
+ pub fn get_graph(self) -> ModuleGraph {
+ self.graph
}
fn visit_memory_module(
@@ -192,7 +321,6 @@ impl ModuleGraphLoader {
specifier: String,
source_code: String,
) -> Result<(), ErrBox> {
- let mut imports = vec![];
let mut referenced_files = vec![];
let mut lib_directives = vec![];
let mut types_directives = vec![];
@@ -208,87 +336,40 @@ impl ModuleGraphLoader {
ModuleSpecifier::resolve_url(&format!("memory://{}", specifier))?
};
- let (import_descs, ref_descs) = analyze_dependencies_and_references(
- &specifier,
+ let (raw_imports, raw_references) = pre_process_file(
+ &module_specifier.to_string(),
map_file_extension(&PathBuf::from(&specifier)),
&source_code,
self.analyze_dynamic_imports,
)?;
+ let (imports, references) = resolve_imports_and_references(
+ module_specifier.clone(),
+ self.maybe_import_map.as_ref(),
+ raw_imports,
+ raw_references,
+ )?;
- for import_desc in import_descs {
- let maybe_resolved =
- if let Some(import_map) = self.maybe_import_map.as_ref() {
- import_map
- .resolve(&import_desc.specifier, &module_specifier.to_string())?
- } else {
- None
- };
-
- let resolved_specifier = if let Some(resolved) = maybe_resolved {
- resolved
- } else {
- ModuleSpecifier::resolve_import(
- &import_desc.specifier,
- &module_specifier.to_string(),
- )?
- };
-
- let resolved_type_directive =
- if let Some(types_specifier) = import_desc.deno_types.as_ref() {
- Some(ModuleSpecifier::resolve_import(
- &types_specifier,
- &module_specifier.to_string(),
- )?)
- } else {
- None
- };
-
- let import_descriptor = ImportDescriptor {
- specifier: import_desc.specifier.to_string(),
- resolved_specifier,
- type_directive: import_desc.deno_types,
- resolved_type_directive,
- };
-
- imports.push(import_descriptor);
- }
-
- for ref_desc in ref_descs {
- if AVAILABLE_LIBS.contains(&ref_desc.specifier.as_str()) {
- continue;
- }
-
- let resolved_specifier = ModuleSpecifier::resolve_import(
- &ref_desc.specifier,
- &module_specifier.to_string(),
- )?;
-
- let reference_descriptor = ReferenceDescriptor {
- specifier: ref_desc.specifier.to_string(),
- resolved_specifier,
- };
-
- match ref_desc.kind {
+ for ref_descriptor in references {
+ match ref_descriptor.kind {
TsReferenceKind::Lib => {
- lib_directives.push(reference_descriptor);
+ lib_directives.push(ref_descriptor);
}
TsReferenceKind::Types => {
- types_directives.push(reference_descriptor);
+ types_directives.push(ref_descriptor);
}
TsReferenceKind::Path => {
- referenced_files.push(reference_descriptor);
+ referenced_files.push(ref_descriptor);
}
}
}
- self.graph.0.insert(
+ self.graph.insert(
module_specifier.to_string(),
ModuleGraphFile {
specifier: specifier.to_string(),
url: specifier.to_string(),
redirect: None,
- media_type: map_file_extension(&PathBuf::from(specifier.clone()))
- as i32,
+ media_type: map_file_extension(&PathBuf::from(specifier.clone())),
filename: specifier,
source_code,
imports,
@@ -307,43 +388,24 @@ impl ModuleGraphLoader {
&mut self,
module_specifier: ModuleSpecifier,
maybe_referrer: Option<ModuleSpecifier>,
+ maybe_location: Option<Location>,
) -> Result<(), ErrBox> {
if self.has_downloaded.contains(&module_specifier) {
return Ok(());
}
- // Disallow http:// imports from modules loaded over https://
- if let Some(referrer) = maybe_referrer.as_ref() {
- if let "https" = referrer.as_url().scheme() {
- if let "http" = module_specifier.as_url().scheme() {
- let e = OpError::permission_denied(
- "Modules loaded over https:// are not allowed to import modules over http://".to_string()
- );
- return Err(e.into());
- };
- };
- };
+ validate_no_downgrade(
+ &module_specifier,
+ maybe_referrer.as_ref(),
+ maybe_location.as_ref(),
+ )?;
if !self.is_dyn_import {
- // Verify that remote file doesn't try to statically import local file.
- if let Some(referrer) = maybe_referrer.as_ref() {
- let referrer_url = referrer.as_url();
- match referrer_url.scheme() {
- "http" | "https" => {
- let specifier_url = module_specifier.as_url();
- match specifier_url.scheme() {
- "http" | "https" => {}
- _ => {
- let e = OpError::permission_denied(
- "Remote modules are not allowed to statically import local modules. Use dynamic import instead.".to_string()
- );
- return Err(e.into());
- }
- }
- }
- _ => {}
- }
- }
+ validate_no_file_from_remote(
+ &module_specifier,
+ maybe_referrer.as_ref(),
+ maybe_location.as_ref(),
+ )?;
}
self.has_downloaded.insert(module_specifier.clone());
@@ -355,7 +417,9 @@ impl ModuleGraphLoader {
let spec_ = spec.clone();
let source_file = file_fetcher
.fetch_source_file(&spec_, maybe_referrer, perms)
- .await?;
+ .await
+ .map_err(|e| err_with_location(e, maybe_location.as_ref()))?;
+
Ok((spec_.clone(), source_file))
}
.boxed_local();
@@ -383,14 +447,14 @@ impl ModuleGraphLoader {
// for proper URL point to redirect target.
if module_specifier.as_url() != &source_file.url {
// TODO(bartlomieju): refactor, this is a band-aid
- self.graph.0.insert(
+ self.graph.insert(
module_specifier.to_string(),
ModuleGraphFile {
specifier: module_specifier.to_string(),
url: module_specifier.to_string(),
redirect: Some(source_file.url.to_string()),
filename: source_file.filename.to_str().unwrap().to_string(),
- media_type: source_file.media_type as i32,
+ media_type: source_file.media_type,
source_code: "".to_string(),
imports: vec![],
referenced_files: vec![],
@@ -412,121 +476,85 @@ impl ModuleGraphLoader {
&types_specifier,
&module_specifier.to_string(),
)?,
+ kind: TsReferenceKind::Types,
+ // TODO(bartlomieju): location is not needed in here and constructing
+ // location by hand is bad
+ location: Location {
+ filename: module_specifier.to_string(),
+ line: 0,
+ col: 0,
+ },
};
self.download_module(
type_header.resolved_specifier.clone(),
Some(module_specifier.clone()),
+ None,
)?;
type_headers.push(type_header);
}
- let (import_descs, ref_descs) = analyze_dependencies_and_references(
+ let (raw_imports, raw_refs) = pre_process_file(
&module_specifier.to_string(),
source_file.media_type,
&source_code,
self.analyze_dynamic_imports,
)?;
+ let (imports_, references) = resolve_imports_and_references(
+ module_specifier.clone(),
+ self.maybe_import_map.as_ref(),
+ raw_imports,
+ raw_refs,
+ )?;
- for import_desc in import_descs {
- let maybe_resolved =
- if let Some(import_map) = self.maybe_import_map.as_ref() {
- import_map
- .resolve(&import_desc.specifier, &module_specifier.to_string())?
- } else {
- None
- };
-
- let resolved_specifier = if let Some(resolved) = maybe_resolved {
- resolved
- } else {
- ModuleSpecifier::resolve_import(
- &import_desc.specifier,
- &module_specifier.to_string(),
- )?
- };
-
- let resolved_type_directive =
- if let Some(types_specifier) = import_desc.deno_types.as_ref() {
- Some(ModuleSpecifier::resolve_import(
- &types_specifier,
- &module_specifier.to_string(),
- )?)
- } else {
- None
- };
-
- let import_descriptor = ImportDescriptor {
- specifier: import_desc.specifier.to_string(),
- resolved_specifier,
- type_directive: import_desc.deno_types.clone(),
- resolved_type_directive,
- };
-
- self
- .download_module(
- import_descriptor.resolved_specifier.clone(),
- Some(module_specifier.clone()),
- )
- .map_err(|e| err_with_location(e, &import_desc.location))?;
+ for import_descriptor in imports_ {
+ self.download_module(
+ import_descriptor.resolved_specifier.clone(),
+ Some(module_specifier.clone()),
+ Some(import_descriptor.location.clone()),
+ )?;
if let Some(type_dir_url) =
import_descriptor.resolved_type_directive.as_ref()
{
- self
- .download_module(
- type_dir_url.clone(),
- Some(module_specifier.clone()),
- )
- .map_err(|e| err_with_location(e, &import_desc.location))?;
+ self.download_module(
+ type_dir_url.clone(),
+ Some(module_specifier.clone()),
+ Some(import_descriptor.location.clone()),
+ )?;
}
imports.push(import_descriptor);
}
- for ref_desc in ref_descs {
- if AVAILABLE_LIBS.contains(&ref_desc.specifier.as_str()) {
- continue;
- }
-
- let resolved_specifier = ModuleSpecifier::resolve_import(
- &ref_desc.specifier,
- &module_specifier.to_string(),
+ for ref_descriptor in references {
+ self.download_module(
+ ref_descriptor.resolved_specifier.clone(),
+ Some(module_specifier.clone()),
+ Some(ref_descriptor.location.clone()),
)?;
- let reference_descriptor = ReferenceDescriptor {
- specifier: ref_desc.specifier.to_string(),
- resolved_specifier,
- };
-
- self
- .download_module(
- reference_descriptor.resolved_specifier.clone(),
- Some(module_specifier.clone()),
- )
- .map_err(|e| err_with_location(e, &ref_desc.location))?;
-
- match ref_desc.kind {
+ match ref_descriptor.kind {
TsReferenceKind::Lib => {
- lib_directives.push(reference_descriptor);
+ lib_directives.push(ref_descriptor);
}
TsReferenceKind::Types => {
- types_directives.push(reference_descriptor);
+ types_directives.push(ref_descriptor);
}
TsReferenceKind::Path => {
- referenced_files.push(reference_descriptor);
+ referenced_files.push(ref_descriptor);
}
}
}
}
- self.graph.0.insert(
+ self.graph.insert(
module_specifier.to_string(),
ModuleGraphFile {
specifier: module_specifier.to_string(),
url: module_specifier.to_string(),
redirect: None,
filename: source_file.filename.to_str().unwrap().to_string(),
- media_type: source_file.media_type as i32,
+ media_type: source_file.media_type,
source_code,
imports,
referenced_files,
@@ -546,7 +574,7 @@ mod tests {
async fn build_graph(
module_specifier: &ModuleSpecifier,
- ) -> Result<HashMap<String, ModuleGraphFile>, ErrBox> {
+ ) -> Result<ModuleGraph, ErrBox> {
let global_state = GlobalState::new(Default::default()).unwrap();
let mut graph_loader = ModuleGraphLoader::new(
global_state.file_fetcher.clone(),
@@ -824,3 +852,102 @@ mod tests {
drop(http_server_guard);
}
}
+
+// TODO(bartlomieju): use baseline tests from TSC to ensure
+// compatibility
+#[test]
+fn test_pre_process_file() {
+ let source = r#"
+// This comment is placed to make sure that directives are parsed
+// even when they start on non-first line
+
+/// <reference lib="dom" />
+/// <reference types="./type_reference.d.ts" />
+/// <reference path="./type_reference/dep.ts" />
+// @deno-types="./type_definitions/foo.d.ts"
+import { foo } from "./type_definitions/foo.js";
+// @deno-types="./type_definitions/fizz.d.ts"
+import "./type_definitions/fizz.js";
+
+/// <reference path="./type_reference/dep2.ts" />
+
+import * as qat from "./type_definitions/qat.ts";
+
+console.log(foo);
+console.log(fizz);
+console.log(qat.qat);
+"#;
+
+ let (imports, references) =
+ pre_process_file("some/file.ts", MediaType::TypeScript, source, true)
+ .expect("Failed to parse");
+
+ assert_eq!(
+ imports,
+ vec![
+ ImportDesc {
+ specifier: "./type_definitions/foo.js".to_string(),
+ deno_types: Some("./type_definitions/foo.d.ts".to_string()),
+ location: Location {
+ filename: "some/file.ts".to_string(),
+ line: 9,
+ col: 0,
+ },
+ },
+ ImportDesc {
+ specifier: "./type_definitions/fizz.js".to_string(),
+ deno_types: Some("./type_definitions/fizz.d.ts".to_string()),
+ location: Location {
+ filename: "some/file.ts".to_string(),
+ line: 11,
+ col: 0,
+ },
+ },
+ ImportDesc {
+ specifier: "./type_definitions/qat.ts".to_string(),
+ deno_types: None,
+ location: Location {
+ filename: "some/file.ts".to_string(),
+ line: 15,
+ col: 0,
+ },
+ },
+ ]
+ );
+
+ // According to TS docs (https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html)
+ // directives that are not at the top of the file are ignored, so only
+ // 3 references should be captured instead of 4.
+ assert_eq!(
+ references,
+ vec![
+ TsReferenceDesc {
+ specifier: "dom".to_string(),
+ kind: TsReferenceKind::Lib,
+ location: Location {
+ filename: "some/file.ts".to_string(),
+ line: 5,
+ col: 0,
+ },
+ },
+ TsReferenceDesc {
+ specifier: "./type_reference.d.ts".to_string(),
+ kind: TsReferenceKind::Types,
+ location: Location {
+ filename: "some/file.ts".to_string(),
+ line: 6,
+ col: 0,
+ },
+ },
+ TsReferenceDesc {
+ specifier: "./type_reference/dep.ts".to_string(),
+ kind: TsReferenceKind::Path,
+ location: Location {
+ filename: "some/file.ts".to_string(),
+ line: 7,
+ col: 0,
+ },
+ },
+ ]
+ );
+}
diff --git a/cli/msg.rs b/cli/msg.rs
index 186fde42c..3e5000296 100644
--- a/cli/msg.rs
+++ b/cli/msg.rs
@@ -3,10 +3,11 @@
// Warning! The values in this enum are duplicated in js/compiler.ts
// Update carefully!
use serde::Serialize;
+use serde::Serializer;
#[allow(non_camel_case_types)]
-#[repr(i8)]
-#[derive(Clone, Copy, PartialEq, Debug, Serialize)]
+#[repr(i32)]
+#[derive(Clone, Copy, PartialEq, Debug)]
pub enum MediaType {
JavaScript = 0,
JSX = 1,
@@ -17,6 +18,24 @@ pub enum MediaType {
Unknown = 6,
}
+impl Serialize for MediaType {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let value: i32 = match self {
+ MediaType::JavaScript => 0 as i32,
+ MediaType::JSX => 1 as i32,
+ MediaType::TypeScript => 2 as i32,
+ MediaType::TSX => 3 as i32,
+ MediaType::Json => 4 as i32,
+ MediaType::Wasm => 5 as i32,
+ MediaType::Unknown => 6 as i32,
+ };
+ Serialize::serialize(&value, serializer)
+ }
+}
+
pub fn enum_name_media_type(mt: MediaType) -> &'static str {
match mt {
MediaType::JavaScript => "JavaScript",
@@ -32,10 +51,28 @@ pub fn enum_name_media_type(mt: MediaType) -> &'static str {
// Warning! The values in this enum are duplicated in js/compiler.ts
// Update carefully!
#[allow(non_camel_case_types)]
-#[repr(i8)]
+#[repr(i32)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum CompilerRequestType {
Compile = 0,
- RuntimeCompile = 1,
- RuntimeTranspile = 2,
+ Bundle = 1,
+ RuntimeCompile = 2,
+ RuntimeBundle = 3,
+ RuntimeTranspile = 4,
+}
+
+impl Serialize for CompilerRequestType {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let value: i32 = match self {
+ CompilerRequestType::Compile => 0 as i32,
+ CompilerRequestType::Bundle => 1 as i32,
+ CompilerRequestType::RuntimeCompile => 2 as i32,
+ CompilerRequestType::RuntimeBundle => 3 as i32,
+ CompilerRequestType::RuntimeTranspile => 4 as i32,
+ };
+ Serialize::serialize(&value, serializer)
+ }
}
diff --git a/cli/ops/runtime_compiler.rs b/cli/ops/runtime_compiler.rs
index 97102ef81..e70b69de7 100644
--- a/cli/ops/runtime_compiler.rs
+++ b/cli/ops/runtime_compiler.rs
@@ -3,6 +3,7 @@ use super::dispatch_json::{Deserialize, JsonOp, Value};
use crate::futures::FutureExt;
use crate::op_error::OpError;
use crate::state::State;
+use crate::tsc::runtime_bundle;
use crate::tsc::runtime_compile;
use crate::tsc::runtime_transpile;
use deno_core::CoreIsolate;
@@ -34,15 +35,27 @@ fn op_compile(
let global_state = s.global_state.clone();
let permissions = s.permissions.clone();
let fut = async move {
- runtime_compile(
- global_state,
- permissions,
- &args.root_name,
- &args.sources,
- args.bundle,
- &args.options,
- )
- .await
+ let fut = if args.bundle {
+ runtime_bundle(
+ global_state,
+ permissions,
+ &args.root_name,
+ &args.sources,
+ &args.options,
+ )
+ .boxed_local()
+ } else {
+ runtime_compile(
+ global_state,
+ permissions,
+ &args.root_name,
+ &args.sources,
+ &args.options,
+ )
+ .boxed_local()
+ };
+
+ fut.await
}
.boxed_local();
Ok(JsonOp::Async(fut))
diff --git a/cli/swc_util.rs b/cli/swc_util.rs
index 77ac6d083..906d9f9bd 100644
--- a/cli/swc_util.rs
+++ b/cli/swc_util.rs
@@ -1,8 +1,6 @@
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
-use crate::doc::Location;
use crate::msg::MediaType;
use crate::swc_common;
-use crate::swc_common::comments::CommentKind;
use crate::swc_common::comments::Comments;
use crate::swc_common::errors::Diagnostic;
use crate::swc_common::errors::DiagnosticBuilder;
@@ -26,8 +24,6 @@ use std::error::Error;
use std::fmt;
use std::sync::Arc;
use std::sync::RwLock;
-use swc_ecma_visit::Node;
-use swc_ecma_visit::Visit;
fn get_default_es_config() -> EsConfig {
let mut config = EsConfig::default();
@@ -231,438 +227,3 @@ impl AstParser {
}
}
}
-
-struct DependencyVisitor {
- dependencies: Vec<String>,
- analyze_dynamic_imports: bool,
-}
-
-impl Visit for DependencyVisitor {
- fn visit_import_decl(
- &mut self,
- import_decl: &swc_ecma_ast::ImportDecl,
- _parent: &dyn Node,
- ) {
- let src_str = import_decl.src.value.to_string();
- self.dependencies.push(src_str);
- }
-
- fn visit_named_export(
- &mut self,
- named_export: &swc_ecma_ast::NamedExport,
- _parent: &dyn Node,
- ) {
- if let Some(src) = &named_export.src {
- let src_str = src.value.to_string();
- self.dependencies.push(src_str);
- }
- }
-
- fn visit_export_all(
- &mut self,
- export_all: &swc_ecma_ast::ExportAll,
- _parent: &dyn Node,
- ) {
- let src_str = export_all.src.value.to_string();
- self.dependencies.push(src_str);
- }
-
- fn visit_call_expr(
- &mut self,
- call_expr: &swc_ecma_ast::CallExpr,
- _parent: &dyn Node,
- ) {
- if !self.analyze_dynamic_imports {
- return;
- }
-
- use swc_ecma_ast::Expr::*;
- use swc_ecma_ast::ExprOrSuper::*;
-
- let boxed_expr = match call_expr.callee.clone() {
- Super(_) => return,
- Expr(boxed) => boxed,
- };
-
- match &*boxed_expr {
- Ident(ident) => {
- if &ident.sym.to_string() != "import" {
- return;
- }
- }
- _ => return,
- };
-
- if let Some(arg) = call_expr.args.get(0) {
- match &*arg.expr {
- Lit(lit) => {
- if let swc_ecma_ast::Lit::Str(str_) = lit {
- let src_str = str_.value.to_string();
- self.dependencies.push(src_str);
- }
- }
- _ => return,
- }
- }
- }
-}
-
-#[derive(Clone, Debug, PartialEq)]
-enum DependencyKind {
- Import,
- DynamicImport,
- Export,
-}
-
-#[derive(Clone, Debug, PartialEq)]
-struct DependencyDescriptor {
- span: Span,
- specifier: String,
- kind: DependencyKind,
-}
-
-struct NewDependencyVisitor {
- dependencies: Vec<DependencyDescriptor>,
-}
-
-impl Visit for NewDependencyVisitor {
- fn visit_import_decl(
- &mut self,
- import_decl: &swc_ecma_ast::ImportDecl,
- _parent: &dyn Node,
- ) {
- let src_str = import_decl.src.value.to_string();
- self.dependencies.push(DependencyDescriptor {
- specifier: src_str,
- kind: DependencyKind::Import,
- span: import_decl.span,
- });
- }
-
- fn visit_named_export(
- &mut self,
- named_export: &swc_ecma_ast::NamedExport,
- _parent: &dyn Node,
- ) {
- if let Some(src) = &named_export.src {
- let src_str = src.value.to_string();
- self.dependencies.push(DependencyDescriptor {
- specifier: src_str,
- kind: DependencyKind::Export,
- span: named_export.span,
- });
- }
- }
-
- fn visit_export_all(
- &mut self,
- export_all: &swc_ecma_ast::ExportAll,
- _parent: &dyn Node,
- ) {
- let src_str = export_all.src.value.to_string();
- self.dependencies.push(DependencyDescriptor {
- specifier: src_str,
- kind: DependencyKind::Export,
- span: export_all.span,
- });
- }
-
- fn visit_ts_import_type(
- &mut self,
- ts_import_type: &swc_ecma_ast::TsImportType,
- _parent: &dyn Node,
- ) {
- // TODO(bartlomieju): possibly add separate DependencyKind
- let src_str = ts_import_type.arg.value.to_string();
- self.dependencies.push(DependencyDescriptor {
- specifier: src_str,
- kind: DependencyKind::Import,
- span: ts_import_type.arg.span,
- });
- }
-
- fn visit_call_expr(
- &mut self,
- call_expr: &swc_ecma_ast::CallExpr,
- parent: &dyn Node,
- ) {
- use swc_ecma_ast::Expr::*;
- use swc_ecma_ast::ExprOrSuper::*;
-
- swc_ecma_visit::visit_call_expr(self, call_expr, parent);
- let boxed_expr = match call_expr.callee.clone() {
- Super(_) => return,
- Expr(boxed) => boxed,
- };
-
- match &*boxed_expr {
- Ident(ident) => {
- if &ident.sym.to_string() != "import" {
- return;
- }
- }
- _ => return,
- };
-
- if let Some(arg) = call_expr.args.get(0) {
- match &*arg.expr {
- Lit(lit) => {
- if let swc_ecma_ast::Lit::Str(str_) = lit {
- let src_str = str_.value.to_string();
- self.dependencies.push(DependencyDescriptor {
- specifier: src_str,
- kind: DependencyKind::DynamicImport,
- span: call_expr.span,
- });
- }
- }
- _ => return,
- }
- }
- }
-}
-
-fn get_deno_types(parser: &AstParser, span: Span) -> Option<String> {
- let comments = parser.get_span_comments(span);
-
- if comments.is_empty() {
- return None;
- }
-
- // @deno-types must directly prepend import statement - hence
- // checking last comment for span
- let last = comments.last().unwrap();
- let comment = last.text.trim_start();
-
- if comment.starts_with("@deno-types") {
- let split: Vec<String> =
- comment.split('=').map(|s| s.to_string()).collect();
- assert_eq!(split.len(), 2);
- let specifier_in_quotes = split.get(1).unwrap().to_string();
- let specifier = specifier_in_quotes
- .trim_start_matches('\"')
- .trim_start_matches('\'')
- .trim_end_matches('\"')
- .trim_end_matches('\'')
- .to_string();
- return Some(specifier);
- }
-
- None
-}
-
-#[derive(Clone, Debug, PartialEq)]
-pub struct ImportDescriptor {
- pub specifier: String,
- pub deno_types: Option<String>,
- pub location: Location,
-}
-
-#[derive(Clone, Debug, PartialEq)]
-pub enum TsReferenceKind {
- Lib,
- Types,
- Path,
-}
-
-#[derive(Clone, Debug, PartialEq)]
-pub struct TsReferenceDescriptor {
- pub kind: TsReferenceKind,
- pub specifier: String,
- pub location: Location,
-}
-
-pub fn analyze_dependencies_and_references(
- file_name: &str,
- media_type: MediaType,
- source_code: &str,
- analyze_dynamic_imports: bool,
-) -> Result<
- (Vec<ImportDescriptor>, Vec<TsReferenceDescriptor>),
- SwcDiagnosticBuffer,
-> {
- let parser = AstParser::new();
- parser.parse_module(file_name, media_type, source_code, |parse_result| {
- let module = parse_result?;
- let mut collector = NewDependencyVisitor {
- dependencies: vec![],
- };
- let module_span = module.span;
- collector.visit_module(&module, &module);
-
- let dependency_descriptors = collector.dependencies;
-
- // for each import check if there's relevant @deno-types directive
- let imports = dependency_descriptors
- .iter()
- .filter(|desc| {
- if analyze_dynamic_imports {
- return true;
- }
-
- desc.kind != DependencyKind::DynamicImport
- })
- .map(|desc| {
- let location = parser.get_span_location(desc.span);
- let deno_types = get_deno_types(&parser, desc.span);
- ImportDescriptor {
- specifier: desc.specifier.to_string(),
- deno_types,
- location: location.into(),
- }
- })
- .collect();
-
- // analyze comment from beginning of the file and find TS directives
- let comments = parser
- .comments
- .take_leading_comments(module_span.lo())
- .unwrap_or_else(Vec::new);
-
- let mut references = vec![];
- for comment in comments {
- if comment.kind != CommentKind::Line {
- continue;
- }
-
- // TODO(bartlomieju): you can do better than that...
- let text = comment.text.to_string();
- let (kind, specifier_in_quotes) =
- if text.starts_with("/ <reference path=") {
- (
- TsReferenceKind::Path,
- text.trim_start_matches("/ <reference path="),
- )
- } else if text.starts_with("/ <reference lib=") {
- (
- TsReferenceKind::Lib,
- text.trim_start_matches("/ <reference lib="),
- )
- } else if text.starts_with("/ <reference types=") {
- (
- TsReferenceKind::Types,
- text.trim_start_matches("/ <reference types="),
- )
- } else {
- continue;
- };
- let specifier = specifier_in_quotes
- .trim_end_matches("/>")
- .trim_end()
- .trim_start_matches('\"')
- .trim_start_matches('\'')
- .trim_end_matches('\"')
- .trim_end_matches('\'')
- .to_string();
-
- let location = parser.get_span_location(comment.span);
- references.push(TsReferenceDescriptor {
- kind,
- specifier,
- location: location.into(),
- });
- }
- Ok((imports, references))
- })
-}
-
-#[test]
-fn test_analyze_dependencies_and_directives() {
- let source = r#"
-// This comment is placed to make sure that directives are parsed
-// even when they start on non-first line
-
-/// <reference lib="dom" />
-/// <reference types="./type_reference.d.ts" />
-/// <reference path="./type_reference/dep.ts" />
-// @deno-types="./type_definitions/foo.d.ts"
-import { foo } from "./type_definitions/foo.js";
-// @deno-types="./type_definitions/fizz.d.ts"
-import "./type_definitions/fizz.js";
-
-/// <reference path="./type_reference/dep2.ts" />
-
-import * as qat from "./type_definitions/qat.ts";
-
-console.log(foo);
-console.log(fizz);
-console.log(qat.qat);
-"#;
-
- let (imports, references) = analyze_dependencies_and_references(
- "some/file.ts",
- MediaType::TypeScript,
- source,
- true,
- )
- .expect("Failed to parse");
-
- assert_eq!(
- imports,
- vec![
- ImportDescriptor {
- specifier: "./type_definitions/foo.js".to_string(),
- deno_types: Some("./type_definitions/foo.d.ts".to_string()),
- location: Location {
- filename: "some/file.ts".to_string(),
- line: 9,
- col: 0,
- },
- },
- ImportDescriptor {
- specifier: "./type_definitions/fizz.js".to_string(),
- deno_types: Some("./type_definitions/fizz.d.ts".to_string()),
- location: Location {
- filename: "some/file.ts".to_string(),
- line: 11,
- col: 0,
- },
- },
- ImportDescriptor {
- specifier: "./type_definitions/qat.ts".to_string(),
- deno_types: None,
- location: Location {
- filename: "some/file.ts".to_string(),
- line: 15,
- col: 0,
- },
- },
- ]
- );
-
- // According to TS docs (https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html)
- // directives that are not at the top of the file are ignored, so only
- // 3 references should be captured instead of 4.
- assert_eq!(
- references,
- vec![
- TsReferenceDescriptor {
- specifier: "dom".to_string(),
- kind: TsReferenceKind::Lib,
- location: Location {
- filename: "some/file.ts".to_string(),
- line: 5,
- col: 0,
- },
- },
- TsReferenceDescriptor {
- specifier: "./type_reference.d.ts".to_string(),
- kind: TsReferenceKind::Types,
- location: Location {
- filename: "some/file.ts".to_string(),
- line: 6,
- col: 0,
- },
- },
- TsReferenceDescriptor {
- specifier: "./type_reference/dep.ts".to_string(),
- kind: TsReferenceKind::Path,
- location: Location {
- filename: "some/file.ts".to_string(),
- line: 7,
- col: 0,
- },
- },
- ]
- );
-}
diff --git a/cli/tests/error_004_missing_module.ts.out b/cli/tests/error_004_missing_module.ts.out
index d851882eb..121555868 100644
--- a/cli/tests/error_004_missing_module.ts.out
+++ b/cli/tests/error_004_missing_module.ts.out
@@ -1 +1,2 @@
[WILDCARD]error: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_004_missing_module.ts"
+Imported from "[WILDCARD]/error_004_missing_module.ts:2"
diff --git a/cli/tests/error_006_import_ext_failure.ts.out b/cli/tests/error_006_import_ext_failure.ts.out
index c44d5e746..9e1c99970 100644
--- a/cli/tests/error_006_import_ext_failure.ts.out
+++ b/cli/tests/error_006_import_ext_failure.ts.out
@@ -1 +1,2 @@
[WILDCARD]error: Cannot resolve module "[WILDCARD]/non-existent" from "[WILDCARD]/error_006_import_ext_failure.ts"
+Imported from "[WILDCARD]/error_006_import_ext_failure.ts:1"
diff --git a/cli/tests/error_016_dynamic_import_permissions2.out b/cli/tests/error_016_dynamic_import_permissions2.out
index 8f561f130..2babfbf9f 100644
--- a/cli/tests/error_016_dynamic_import_permissions2.out
+++ b/cli/tests/error_016_dynamic_import_permissions2.out
@@ -1,2 +1,3 @@
[WILDCARD]
error: Uncaught TypeError: read access to "[WILDCARD]passwd", run again with the --allow-read flag
+Imported from "[WILDCARD]evil_remote_import.js:3"
diff --git a/cli/tsc.rs b/cli/tsc.rs
index 8d0f0d5de..c1b15bbf0 100644
--- a/cli/tsc.rs
+++ b/cli/tsc.rs
@@ -3,19 +3,26 @@ use crate::colors;
use crate::diagnostics::Diagnostic;
use crate::diagnostics::DiagnosticItem;
use crate::disk_cache::DiskCache;
+use crate::doc::Location;
use crate::file_fetcher::SourceFile;
use crate::file_fetcher::SourceFileFetcher;
use crate::global_state::GlobalState;
use crate::import_map::ImportMap;
-use crate::module_graph::ModuleGraphFile;
+use crate::module_graph::ModuleGraph;
use crate::module_graph::ModuleGraphLoader;
use crate::msg;
+use crate::msg::MediaType;
use crate::op_error::OpError;
use crate::ops;
use crate::permissions::Permissions;
use crate::source_maps::SourceMapGetter;
use crate::startup_data;
use crate::state::State;
+use crate::swc_common::comments::CommentKind;
+use crate::swc_common::Span;
+use crate::swc_ecma_ast;
+use crate::swc_util::AstParser;
+use crate::swc_util::SwcDiagnosticBuffer;
use crate::version;
use crate::web_worker::WebWorker;
use crate::worker::WorkerEvent;
@@ -37,7 +44,6 @@ use sourcemap::SourceMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fs;
-use std::hash::BuildHasher;
use std::io;
use std::ops::Deref;
use std::ops::DerefMut;
@@ -48,6 +54,8 @@ use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::sync::Mutex;
use std::task::Poll;
+use swc_ecma_visit::Node;
+use swc_ecma_visit::Visit;
use url::Url;
pub const AVAILABLE_LIBS: &[&str] = &[
@@ -273,12 +281,10 @@ impl CompilerConfig {
}
/// Information associated with compiled file in cache.
-/// Includes source code path and state hash.
/// version_hash is used to validate versions of the file
/// and could be used to remove stale file in cache.
#[derive(Deserialize, Serialize)]
pub struct CompiledFileMetadata {
- pub source_path: PathBuf,
pub version_hash: String,
}
@@ -419,7 +425,6 @@ impl TsCompiler {
/// Check if there is compiled source in cache that is valid
/// and can be used again.
- // TODO(bartlomieju): there should be check that cached file actually exists
fn has_compiled_source(
&self,
file_fetcher: &SourceFileFetcher,
@@ -430,8 +435,7 @@ impl TsCompiler {
.fetch_cached_source_file(&specifier, Permissions::allow_all())
{
if let Some(metadata) = self.get_metadata(&url) {
- // 2. compare version hashes
- // TODO: it would probably be good idea to make it method implemented on SourceFile
+ // Compare version hashes
let version_hash_to_validate = source_code_version_hash(
&source_file.source_code,
version::DENO,
@@ -462,7 +466,7 @@ impl TsCompiler {
source_file: &SourceFile,
target: TargetLib,
permissions: Permissions,
- module_graph: HashMap<String, ModuleGraphFile>,
+ module_graph: ModuleGraph,
allow_js: bool,
) -> Result<(), ErrBox> {
let mut has_cached_version = false;
@@ -504,17 +508,15 @@ impl TsCompiler {
TargetLib::Worker => "worker",
};
let root_names = vec![module_url.to_string()];
- let bundle = false;
let unstable = global_state.flags.unstable;
let compiler_config = self.config.clone();
let cwd = std::env::current_dir().unwrap();
let j = match (compiler_config.path, compiler_config.content) {
(Some(config_path), Some(config_data)) => json!({
- "type": msg::CompilerRequestType::Compile as i32,
+ "type": msg::CompilerRequestType::Compile,
"allowJs": allow_js,
"target": target,
"rootNames": root_names,
- "bundle": bundle,
"unstable": unstable,
"configPath": config_path,
"config": str::from_utf8(&config_data).unwrap(),
@@ -522,11 +524,10 @@ impl TsCompiler {
"sourceFileMap": module_graph_json,
}),
_ => json!({
- "type": msg::CompilerRequestType::Compile as i32,
+ "type": msg::CompilerRequestType::Compile,
"allowJs": allow_js,
"target": target,
"rootNames": root_names,
- "bundle": bundle,
"unstable": unstable,
"cwd": cwd,
"sourceFileMap": module_graph_json,
@@ -563,8 +564,6 @@ impl TsCompiler {
}
fn get_graph_metadata(&self, url: &Url) -> Option<GraphFileMetadata> {
- // Try to load cached version:
- // 1. check if there's 'meta' file
let cache_key = self
.disk_cache
.get_cache_filename_with_extension(url, "graph");
@@ -707,7 +706,6 @@ impl TsCompiler {
filename: compiled_code_filename,
media_type: msg::MediaType::JavaScript,
source_code: compiled_code,
- types_url: None,
types_header: None,
};
@@ -763,10 +761,7 @@ impl TsCompiler {
&self.config.hash,
);
- let compiled_file_metadata = CompiledFileMetadata {
- source_path: source_file.filename,
- version_hash,
- };
+ let compiled_file_metadata = CompiledFileMetadata { version_hash };
let meta_key = self
.disk_cache
.get_cache_filename_with_extension(module_specifier.as_url(), "meta");
@@ -795,7 +790,6 @@ impl TsCompiler {
filename: source_map_filename,
media_type: msg::MediaType::JavaScript,
source_code,
- types_url: None,
types_header: None,
};
@@ -953,7 +947,6 @@ pub async fn bundle(
serde_json::to_value(module_graph).expect("Failed to serialize data");
let root_names = vec![module_specifier.to_string()];
- let bundle = true;
let target = "main";
let cwd = std::env::current_dir().unwrap();
@@ -961,10 +954,9 @@ pub async fn bundle(
// be optional
let j = match (compiler_config.path, compiler_config.content) {
(Some(config_path), Some(config_data)) => json!({
- "type": msg::CompilerRequestType::Compile as i32,
+ "type": msg::CompilerRequestType::Bundle,
"target": target,
"rootNames": root_names,
- "bundle": bundle,
"unstable": unstable,
"configPath": config_path,
"config": str::from_utf8(&config_data).unwrap(),
@@ -972,10 +964,9 @@ pub async fn bundle(
"sourceFileMap": module_graph_json,
}),
_ => json!({
- "type": msg::CompilerRequestType::Compile as i32,
+ "type": msg::CompilerRequestType::Bundle,
"target": target,
"rootNames": root_names,
- "bundle": bundle,
"unstable": unstable,
"cwd": cwd,
"sourceFileMap": module_graph_json,
@@ -1000,20 +991,18 @@ pub async fn bundle(
Ok(output)
}
-/// This function is used by `Deno.compile()` and `Deno.bundle()` APIs.
-pub async fn runtime_compile<S: BuildHasher>(
+async fn create_runtime_module_graph(
global_state: GlobalState,
permissions: Permissions,
root_name: &str,
- sources: &Option<HashMap<String, String, S>>,
- bundle: bool,
+ sources: &Option<HashMap<String, String>>,
maybe_options: &Option<String>,
-) -> Result<Value, OpError> {
+) -> Result<(Vec<String>, ModuleGraph), OpError> {
let mut root_names = vec![];
let mut module_graph_loader = ModuleGraphLoader::new(
global_state.file_fetcher.clone(),
None,
- permissions.clone(),
+ permissions,
false,
false,
);
@@ -1050,17 +1039,34 @@ pub async fn runtime_compile<S: BuildHasher>(
}
}
- let module_graph = module_graph_loader.get_graph();
+ Ok((root_names, module_graph_loader.get_graph()))
+}
+
+/// This function is used by `Deno.compile()` API.
+pub async fn runtime_compile(
+ global_state: GlobalState,
+ permissions: Permissions,
+ root_name: &str,
+ sources: &Option<HashMap<String, String>>,
+ maybe_options: &Option<String>,
+) -> Result<Value, OpError> {
+ let (root_names, module_graph) = create_runtime_module_graph(
+ global_state.clone(),
+ permissions.clone(),
+ root_name,
+ sources,
+ maybe_options,
+ )
+ .await?;
let module_graph_json =
serde_json::to_value(module_graph).expect("Failed to serialize data");
let req_msg = json!({
- "type": msg::CompilerRequestType::RuntimeCompile as i32,
+ "type": msg::CompilerRequestType::RuntimeCompile,
"target": "runtime",
"rootNames": root_names,
"sourceFileMap": module_graph_json,
"options": maybe_options,
- "bundle": bundle,
"unstable": global_state.flags.unstable,
})
.to_string()
@@ -1072,12 +1078,6 @@ pub async fn runtime_compile<S: BuildHasher>(
let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
let json_str = std::str::from_utf8(&msg).unwrap();
- // TODO(bartlomieju): factor `bundle` path into separate function `runtime_bundle`
- if bundle {
- let _response: RuntimeBundleResponse = serde_json::from_str(json_str)?;
- return Ok(serde_json::from_str::<Value>(json_str).unwrap());
- }
-
let response: RuntimeCompileResponse = serde_json::from_str(json_str)?;
if response.diagnostics.is_empty() && sources.is_none() {
@@ -1085,20 +1085,60 @@ pub async fn runtime_compile<S: BuildHasher>(
}
// We're returning `Ok()` instead of `Err()` because it's not runtime
- // error if there were diagnostics produces; we want to let user handle
+ // error if there were diagnostics produced; we want to let user handle
+ // diagnostics in the runtime.
+ Ok(serde_json::from_str::<Value>(json_str).unwrap())
+}
+
+/// This function is used by `Deno.bundle()` API.
+pub async fn runtime_bundle(
+ global_state: GlobalState,
+ permissions: Permissions,
+ root_name: &str,
+ sources: &Option<HashMap<String, String>>,
+ maybe_options: &Option<String>,
+) -> Result<Value, OpError> {
+ let (root_names, module_graph) = create_runtime_module_graph(
+ global_state.clone(),
+ permissions.clone(),
+ root_name,
+ sources,
+ maybe_options,
+ )
+ .await?;
+ let module_graph_json =
+ serde_json::to_value(module_graph).expect("Failed to serialize data");
+
+ let req_msg = json!({
+ "type": msg::CompilerRequestType::RuntimeBundle,
+ "target": "runtime",
+ "rootNames": root_names,
+ "sourceFileMap": module_graph_json,
+ "options": maybe_options,
+ "unstable": global_state.flags.unstable,
+ })
+ .to_string()
+ .into_boxed_str()
+ .into_boxed_bytes();
+
+ let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
+ let json_str = std::str::from_utf8(&msg).unwrap();
+ let _response: RuntimeBundleResponse = serde_json::from_str(json_str)?;
+ // We're returning `Ok()` instead of `Err()` because it's not runtime
+ // error if there were diagnostics produced; we want to let user handle
// diagnostics in the runtime.
Ok(serde_json::from_str::<Value>(json_str).unwrap())
}
/// This function is used by `Deno.transpileOnly()` API.
-pub async fn runtime_transpile<S: BuildHasher>(
+pub async fn runtime_transpile(
global_state: GlobalState,
permissions: Permissions,
- sources: &HashMap<String, String, S>,
+ sources: &HashMap<String, String>,
options: &Option<String>,
) -> Result<Value, OpError> {
let req_msg = json!({
- "type": msg::CompilerRequestType::RuntimeTranspile as i32,
+ "type": msg::CompilerRequestType::RuntimeTranspile,
"sources": sources,
"options": options,
})
@@ -1113,6 +1153,278 @@ pub async fn runtime_transpile<S: BuildHasher>(
Ok(v)
}
+#[derive(Clone, Debug, PartialEq)]
+enum DependencyKind {
+ Import,
+ DynamicImport,
+ Export,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+struct DependencyDescriptor {
+ span: Span,
+ specifier: String,
+ kind: DependencyKind,
+}
+
+struct DependencyVisitor {
+ dependencies: Vec<DependencyDescriptor>,
+}
+
+impl Visit for DependencyVisitor {
+ fn visit_import_decl(
+ &mut self,
+ import_decl: &swc_ecma_ast::ImportDecl,
+ _parent: &dyn Node,
+ ) {
+ let src_str = import_decl.src.value.to_string();
+ self.dependencies.push(DependencyDescriptor {
+ specifier: src_str,
+ kind: DependencyKind::Import,
+ span: import_decl.span,
+ });
+ }
+
+ fn visit_named_export(
+ &mut self,
+ named_export: &swc_ecma_ast::NamedExport,
+ _parent: &dyn Node,
+ ) {
+ if let Some(src) = &named_export.src {
+ let src_str = src.value.to_string();
+ self.dependencies.push(DependencyDescriptor {
+ specifier: src_str,
+ kind: DependencyKind::Export,
+ span: named_export.span,
+ });
+ }
+ }
+
+ fn visit_export_all(
+ &mut self,
+ export_all: &swc_ecma_ast::ExportAll,
+ _parent: &dyn Node,
+ ) {
+ let src_str = export_all.src.value.to_string();
+ self.dependencies.push(DependencyDescriptor {
+ specifier: src_str,
+ kind: DependencyKind::Export,
+ span: export_all.span,
+ });
+ }
+
+ fn visit_ts_import_type(
+ &mut self,
+ ts_import_type: &swc_ecma_ast::TsImportType,
+ _parent: &dyn Node,
+ ) {
+ // TODO(bartlomieju): possibly add separate DependencyKind
+ let src_str = ts_import_type.arg.value.to_string();
+ self.dependencies.push(DependencyDescriptor {
+ specifier: src_str,
+ kind: DependencyKind::Import,
+ span: ts_import_type.arg.span,
+ });
+ }
+
+ fn visit_call_expr(
+ &mut self,
+ call_expr: &swc_ecma_ast::CallExpr,
+ parent: &dyn Node,
+ ) {
+ use swc_ecma_ast::Expr::*;
+ use swc_ecma_ast::ExprOrSuper::*;
+
+ swc_ecma_visit::visit_call_expr(self, call_expr, parent);
+ let boxed_expr = match call_expr.callee.clone() {
+ Super(_) => return,
+ Expr(boxed) => boxed,
+ };
+
+ match &*boxed_expr {
+ Ident(ident) => {
+ if &ident.sym.to_string() != "import" {
+ return;
+ }
+ }
+ _ => return,
+ };
+
+ if let Some(arg) = call_expr.args.get(0) {
+ match &*arg.expr {
+ Lit(lit) => {
+ if let swc_ecma_ast::Lit::Str(str_) = lit {
+ let src_str = str_.value.to_string();
+ self.dependencies.push(DependencyDescriptor {
+ specifier: src_str,
+ kind: DependencyKind::DynamicImport,
+ span: call_expr.span,
+ });
+ }
+ }
+ _ => return,
+ }
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct ImportDesc {
+ pub specifier: String,
+ pub deno_types: Option<String>,
+ pub location: Location,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub enum TsReferenceKind {
+ Lib,
+ Types,
+ Path,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct TsReferenceDesc {
+ pub kind: TsReferenceKind,
+ pub specifier: String,
+ pub location: Location,
+}
+
+// TODO(bartlomieju): handle imports in ambient contexts/TS modules
+/// This function is a port of `ts.preProcessFile()`
+///
+/// Additionally it captures `@deno-types` references directly
+/// preceeding `import .. from` and `export .. from` statements.
+pub fn pre_process_file(
+ file_name: &str,
+ media_type: MediaType,
+ source_code: &str,
+ analyze_dynamic_imports: bool,
+) -> Result<(Vec<ImportDesc>, Vec<TsReferenceDesc>), SwcDiagnosticBuffer> {
+ let parser = AstParser::new();
+ parser.parse_module(file_name, media_type, source_code, |parse_result| {
+ let module = parse_result?;
+ let mut collector = DependencyVisitor {
+ dependencies: vec![],
+ };
+ let module_span = module.span;
+ collector.visit_module(&module, &module);
+
+ let dependency_descriptors = collector.dependencies;
+
+ // for each import check if there's relevant @deno-types directive
+ let imports = dependency_descriptors
+ .iter()
+ .filter(|desc| {
+ if analyze_dynamic_imports {
+ return true;
+ }
+
+ desc.kind != DependencyKind::DynamicImport
+ })
+ .map(|desc| {
+ let location = parser.get_span_location(desc.span);
+ let deno_types = get_deno_types(&parser, desc.span);
+ ImportDesc {
+ specifier: desc.specifier.to_string(),
+ deno_types,
+ location: location.into(),
+ }
+ })
+ .collect();
+
+ // analyze comment from beginning of the file and find TS directives
+ let comments = parser
+ .comments
+ .take_leading_comments(module_span.lo())
+ .unwrap_or_else(Vec::new);
+
+ let mut references = vec![];
+ for comment in comments {
+ if comment.kind != CommentKind::Line {
+ continue;
+ }
+
+ let text = comment.text.to_string();
+ if let Some((kind, specifier)) = parse_ts_reference(text.trim()) {
+ let location = parser.get_span_location(comment.span);
+ references.push(TsReferenceDesc {
+ kind,
+ specifier,
+ location: location.into(),
+ });
+ }
+ }
+ Ok((imports, references))
+ })
+}
+
+fn get_deno_types(parser: &AstParser, span: Span) -> Option<String> {
+ let comments = parser.get_span_comments(span);
+
+ if comments.is_empty() {
+ return None;
+ }
+
+ // @deno-types must directly prepend import statement - hence
+ // checking last comment for span
+ let last = comments.last().unwrap();
+ let comment = last.text.trim_start();
+ parse_deno_types(&comment)
+}
+
+// TODO(bartlomieju): refactor
+fn parse_ts_reference(comment: &str) -> Option<(TsReferenceKind, String)> {
+ let (kind, specifier_in_quotes) = if comment.starts_with("/ <reference path=")
+ {
+ (
+ TsReferenceKind::Path,
+ comment.trim_start_matches("/ <reference path="),
+ )
+ } else if comment.starts_with("/ <reference lib=") {
+ (
+ TsReferenceKind::Lib,
+ comment.trim_start_matches("/ <reference lib="),
+ )
+ } else if comment.starts_with("/ <reference types=") {
+ (
+ TsReferenceKind::Types,
+ comment.trim_start_matches("/ <reference types="),
+ )
+ } else {
+ return None;
+ };
+
+ let specifier = specifier_in_quotes
+ .trim_end_matches("/>")
+ .trim_end()
+ .trim_start_matches('\"')
+ .trim_start_matches('\'')
+ .trim_end_matches('\"')
+ .trim_end_matches('\'')
+ .to_string();
+
+ Some((kind, specifier))
+}
+
+fn parse_deno_types(comment: &str) -> Option<String> {
+ if comment.starts_with("@deno-types") {
+ let split: Vec<String> =
+ comment.split('=').map(|s| s.to_string()).collect();
+ assert_eq!(split.len(), 2);
+ let specifier_in_quotes = split.get(1).unwrap().to_string();
+ let specifier = specifier_in_quotes
+ .trim()
+ .trim_start_matches('\"')
+ .trim_start_matches('\'')
+ .trim_end_matches('\"')
+ .trim_end_matches('\'')
+ .to_string();
+ return Some(specifier);
+ }
+
+ None
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -1121,6 +1433,44 @@ mod tests {
use std::path::PathBuf;
use tempfile::TempDir;
+ #[test]
+ fn test_parse_deno_types() {
+ assert_eq!(
+ parse_deno_types("@deno-types=./a/b/c.d.ts"),
+ Some("./a/b/c.d.ts".to_string())
+ );
+ assert_eq!(
+ parse_deno_types("@deno-types = https://dneo.land/x/some/package/a.d.ts"),
+ Some("https://dneo.land/x/some/package/a.d.ts".to_string())
+ );
+ assert_eq!(
+ parse_deno_types("@deno-types = ./a/b/c.d.ts"),
+ Some("./a/b/c.d.ts".to_string())
+ );
+ assert!(parse_deno_types("asdf").is_none());
+ assert!(parse_deno_types("// deno-types = fooo").is_none());
+ }
+
+ #[test]
+ fn test_parse_ts_reference() {
+ assert_eq!(
+ parse_ts_reference(r#"/ <reference lib="deno.shared_globals" />"#),
+ Some((TsReferenceKind::Lib, "deno.shared_globals".to_string()))
+ );
+ assert_eq!(
+ parse_ts_reference(r#"/ <reference path="./type/reference/dep.ts" />"#),
+ Some((TsReferenceKind::Path, "./type/reference/dep.ts".to_string()))
+ );
+ assert_eq!(
+ parse_ts_reference(r#"/ <reference types="./type/reference.d.ts" />"#),
+ Some((TsReferenceKind::Types, "./type/reference.d.ts".to_string()))
+ );
+ assert!(parse_ts_reference("asdf").is_none());
+ assert!(
+ parse_ts_reference(r#"/ <reference unknown="unknown" />"#).is_none()
+ );
+ }
+
#[tokio::test]
async fn test_compile() {
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
@@ -1134,7 +1484,6 @@ mod tests {
filename: PathBuf::from(p.to_str().unwrap().to_string()),
media_type: msg::MediaType::TypeScript,
source_code: include_bytes!("./tests/002_hello.ts").to_vec(),
- types_url: None,
types_header: None,
};
let mock_state =
diff --git a/cli/web_worker.rs b/cli/web_worker.rs
index e060a157d..de4cd91f9 100644
--- a/cli/web_worker.rs
+++ b/cli/web_worker.rs
@@ -212,7 +212,6 @@ impl Future for WebWorker {
match r {
Some(msg) => {
let msg = String::from_utf8(msg.to_vec()).unwrap();
- debug!("received message from host: {}", msg);
let script = format!("workerMessageRecvCallback({})", msg);
if let Err(e) = worker.execute(&script) {