summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cli/args/mod.rs5
-rw-r--r--cli/cache/check.rs6
-rw-r--r--cli/cache/common.rs35
-rw-r--r--cli/cache/disk_cache.rs78
-rw-r--r--cli/cache/emit.rs232
-rw-r--r--cli/cache/incremental.rs15
-rw-r--r--cli/cache/mod.rs53
-rw-r--r--cli/deno_dir.rs2
-rw-r--r--cli/emit.rs154
-rw-r--r--cli/graph_util.rs3
-rw-r--r--cli/main.rs45
-rw-r--r--cli/module_loader.rs154
-rw-r--r--cli/proc_state.rs242
-rw-r--r--cli/tests/integration/info_tests.rs1
-rw-r--r--cli/text_encoding.rs57
-rw-r--r--cli/tools/coverage/mod.rs20
-rw-r--r--core/source_map.rs18
17 files changed, 582 insertions, 538 deletions
diff --git a/cli/args/mod.rs b/cli/args/mod.rs
index badfdc39d..7c1502ad6 100644
--- a/cli/args/mod.rs
+++ b/cli/args/mod.rs
@@ -286,6 +286,11 @@ impl CliOptions {
self.flags.enable_testing_features
}
+ /// If the --inspect or --inspect-brk flags are used.
+ pub fn is_inspecting(&self) -> bool {
+ self.flags.inspect.is_some() || self.flags.inspect_brk.is_some()
+ }
+
pub fn inspect_brk(&self) -> Option<SocketAddr> {
self.flags.inspect_brk
}
diff --git a/cli/cache/check.rs b/cli/cache/check.rs
index 4e0f8d912..6f3c41950 100644
--- a/cli/cache/check.rs
+++ b/cli/cache/check.rs
@@ -22,7 +22,7 @@ impl TypeCheckCache {
Err(err) => {
log::debug!(
concat!(
- "Failed creating internal type checking cache. ",
+ "Failed loading internal type checking cache. ",
"Recreating...\n\nError details:\n{:#}",
),
err
@@ -35,7 +35,7 @@ impl TypeCheckCache {
Err(err) => {
log::debug!(
concat!(
- "Unable to create internal cache for type checking. ",
+ "Unable to load internal cache for type checking. ",
"This will reduce the performance of type checking.\n\n",
"Error details:\n{:#}",
),
@@ -233,7 +233,7 @@ mod test {
cache.set_tsbuildinfo(&specifier1, "test");
assert_eq!(cache.get_tsbuildinfo(&specifier1), Some("test".to_string()));
- // recreating the cache should not remove the data because the CLI version and state hash is the same
+ // recreating the cache should not remove the data because the CLI version is the same
let conn = cache.0.unwrap();
let cache =
TypeCheckCache::from_connection(conn, "2.0.0".to_string()).unwrap();
diff --git a/cli/cache/common.rs b/cli/cache/common.rs
index c01c1ab9a..b536d6cb2 100644
--- a/cli/cache/common.rs
+++ b/cli/cache/common.rs
@@ -1,16 +1,37 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+use std::hash::Hasher;
+
use deno_core::error::AnyError;
use deno_runtime::deno_webstorage::rusqlite::Connection;
-/// Very fast non-cryptographically secure hash.
-pub fn fast_insecure_hash(bytes: &[u8]) -> u64 {
- use std::hash::Hasher;
- use twox_hash::XxHash64;
+/// A very fast insecure hasher that uses the xxHash algorithm.
+#[derive(Default)]
+pub struct FastInsecureHasher(twox_hash::XxHash64);
+
+impl FastInsecureHasher {
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ pub fn write_str(&mut self, text: &str) -> &mut Self {
+ self.write(text.as_bytes());
+ self
+ }
+
+ pub fn write(&mut self, bytes: &[u8]) -> &mut Self {
+ self.0.write(bytes);
+ self
+ }
+
+ pub fn write_u64(&mut self, value: u64) -> &mut Self {
+ self.0.write_u64(value);
+ self
+ }
- let mut hasher = XxHash64::default();
- hasher.write(bytes);
- hasher.finish()
+ pub fn finish(&self) -> u64 {
+ self.0.finish()
+ }
}
/// Runs the common sqlite pragma.
diff --git a/cli/cache/disk_cache.rs b/cli/cache/disk_cache.rs
index 01352c398..5a2f11e3c 100644
--- a/cli/cache/disk_cache.rs
+++ b/cli/cache/disk_cache.rs
@@ -3,13 +3,6 @@
use crate::fs_util;
use crate::http_cache::url_to_filename;
-use super::CacheType;
-use super::Cacher;
-use super::EmitMetadata;
-
-use deno_ast::ModuleSpecifier;
-use deno_core::error::AnyError;
-use deno_core::serde_json;
use deno_core::url::Host;
use deno_core::url::Url;
use std::ffi::OsStr;
@@ -154,77 +147,6 @@ impl DiskCache {
fs_util::atomic_write_file(&path, data, crate::http_cache::CACHE_PERM)
.map_err(|e| with_io_context(&e, format!("{:#?}", &path)))
}
-
- fn get_emit_metadata(
- &self,
- specifier: &ModuleSpecifier,
- ) -> Option<EmitMetadata> {
- let filename = self.get_cache_filename_with_extension(specifier, "meta")?;
- let bytes = self.get(&filename).ok()?;
- serde_json::from_slice(&bytes).ok()
- }
-
- fn set_emit_metadata(
- &self,
- specifier: &ModuleSpecifier,
- data: EmitMetadata,
- ) -> Result<(), AnyError> {
- let filename = self
- .get_cache_filename_with_extension(specifier, "meta")
- .unwrap();
- let bytes = serde_json::to_vec(&data)?;
- self.set(&filename, &bytes).map_err(|e| e.into())
- }
-}
-
-// todo(13302): remove and replace with sqlite database
-impl Cacher for DiskCache {
- fn get(
- &self,
- cache_type: CacheType,
- specifier: &ModuleSpecifier,
- ) -> Option<String> {
- let extension = match cache_type {
- CacheType::Emit => "js",
- CacheType::SourceMap => "js.map",
- CacheType::Version => {
- return self.get_emit_metadata(specifier).map(|d| d.version_hash)
- }
- };
- let filename =
- self.get_cache_filename_with_extension(specifier, extension)?;
- self
- .get(&filename)
- .ok()
- .and_then(|b| String::from_utf8(b).ok())
- }
-
- fn set(
- &self,
- cache_type: CacheType,
- specifier: &ModuleSpecifier,
- value: String,
- ) -> Result<(), AnyError> {
- let extension = match cache_type {
- CacheType::Emit => "js",
- CacheType::SourceMap => "js.map",
- CacheType::Version => {
- let data = if let Some(mut data) = self.get_emit_metadata(specifier) {
- data.version_hash = value;
- data
- } else {
- EmitMetadata {
- version_hash: value,
- }
- };
- return self.set_emit_metadata(specifier, data);
- }
- };
- let filename = self
- .get_cache_filename_with_extension(specifier, extension)
- .unwrap();
- self.set(&filename, value.as_bytes()).map_err(|e| e.into())
- }
}
#[cfg(test)]
diff --git a/cli/cache/emit.rs b/cli/cache/emit.rs
index e1469b862..61039a966 100644
--- a/cli/cache/emit.rs
+++ b/cli/cache/emit.rs
@@ -1,71 +1,209 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+use std::path::PathBuf;
+
use deno_ast::ModuleSpecifier;
+use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
+use deno_core::serde_json;
+use serde::Deserialize;
+use serde::Serialize;
-use super::CacheType;
-use super::Cacher;
+use super::DiskCache;
+use super::FastInsecureHasher;
-/// Emit cache for a single file.
-#[derive(Debug, Clone, PartialEq)]
-pub struct SpecifierEmitCacheData {
+#[derive(Debug, Deserialize, Serialize)]
+struct EmitMetadata {
pub source_hash: String,
- pub text: String,
- pub map: Option<String>,
+ pub emit_hash: String,
+ // purge the cache between cli versions
+ pub cli_version: String,
}
-pub trait EmitCache {
- /// Gets the emit data from the cache.
- fn get_emit_data(
- &self,
- specifier: &ModuleSpecifier,
- ) -> Option<SpecifierEmitCacheData>;
- /// Sets the emit data in the cache.
- fn set_emit_data(
- &self,
- specifier: ModuleSpecifier,
- data: SpecifierEmitCacheData,
- ) -> Result<(), AnyError>;
- /// Gets the stored hash of the source of the provider specifier
- /// to tell if the emit is out of sync with the source.
- /// TODO(13302): this is actually not reliable and should be removed
- /// once switching to an sqlite db
- fn get_source_hash(&self, specifier: &ModuleSpecifier) -> Option<String>;
- /// Gets the emitted JavaScript of the TypeScript source.
- /// TODO(13302): remove this once switching to an sqlite db
- fn get_emit_text(&self, specifier: &ModuleSpecifier) -> Option<String>;
+/// The cache that stores previously emitted files.
+#[derive(Clone)]
+pub struct EmitCache {
+ disk_cache: DiskCache,
+ cli_version: String,
}
-impl<T: Cacher> EmitCache for T {
- fn get_emit_data(
+impl EmitCache {
+ pub fn new(disk_cache: DiskCache) -> Self {
+ Self {
+ disk_cache,
+ cli_version: crate::version::deno(),
+ }
+ }
+
+ /// Gets the emitted code with embedded sourcemap from the cache.
+ ///
+ /// The expected source hash is used in order to verify
+ /// that you're getting a value from the cache that is
+ /// for the provided source.
+ ///
+ /// Cached emits from previous CLI releases will not be returned
+ /// or emits that do not match the source.
+ pub fn get_emit_code(
&self,
specifier: &ModuleSpecifier,
- ) -> Option<SpecifierEmitCacheData> {
- Some(SpecifierEmitCacheData {
- source_hash: self.get_source_hash(specifier)?,
- text: self.get_emit_text(specifier)?,
- map: self.get(CacheType::SourceMap, specifier),
- })
+ expected_source_hash: u64,
+ ) -> Option<String> {
+ let meta_filename = self.get_meta_filename(specifier)?;
+ let emit_filename = self.get_emit_filename(specifier)?;
+
+ // load and verify the meta data file is for this source and CLI version
+ let bytes = self.disk_cache.get(&meta_filename).ok()?;
+ let meta: EmitMetadata = serde_json::from_slice(&bytes).ok()?;
+ if meta.source_hash != expected_source_hash.to_string()
+ || meta.cli_version != self.cli_version
+ {
+ return None;
+ }
+
+ // load and verify the emit is for the meta data
+ let emit_bytes = self.disk_cache.get(&emit_filename).ok()?;
+ if meta.emit_hash != compute_emit_hash(&emit_bytes) {
+ return None;
+ }
+
+ // everything looks good, return it
+ let emit_text = String::from_utf8(emit_bytes).ok()?;
+ Some(emit_text)
}
- fn get_source_hash(&self, specifier: &ModuleSpecifier) -> Option<String> {
- self.get(CacheType::Version, specifier)
+ /// Gets the filepath which stores the emit.
+ pub fn get_emit_filepath(
+ &self,
+ specifier: &ModuleSpecifier,
+ ) -> Option<PathBuf> {
+ Some(
+ self
+ .disk_cache
+ .location
+ .join(self.get_emit_filename(specifier)?),
+ )
}
- fn get_emit_text(&self, specifier: &ModuleSpecifier) -> Option<String> {
- self.get(CacheType::Emit, specifier)
+ /// Sets the emit code in the cache.
+ pub fn set_emit_code(
+ &self,
+ specifier: &ModuleSpecifier,
+ source_hash: u64,
+ code: &str,
+ ) {
+ if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) {
+ // should never error here, but if it ever does don't fail
+ if cfg!(debug_assertions) {
+ panic!("Error saving emit data ({}): {}", specifier, err);
+ } else {
+ log::debug!("Error saving emit data({}): {}", specifier, err);
+ }
+ }
}
- fn set_emit_data(
+ fn set_emit_code_result(
&self,
- specifier: ModuleSpecifier,
- data: SpecifierEmitCacheData,
+ specifier: &ModuleSpecifier,
+ source_hash: u64,
+ code: &str,
) -> Result<(), AnyError> {
- self.set(CacheType::Version, &specifier, data.source_hash)?;
- self.set(CacheType::Emit, &specifier, data.text)?;
- if let Some(map) = data.map {
- self.set(CacheType::SourceMap, &specifier, map)?;
- }
+ let meta_filename = self
+ .get_meta_filename(specifier)
+ .ok_or_else(|| anyhow!("Could not get meta filename."))?;
+ let emit_filename = self
+ .get_emit_filename(specifier)
+ .ok_or_else(|| anyhow!("Could not get emit filename."))?;
+
+ // save the metadata
+ let metadata = EmitMetadata {
+ cli_version: self.cli_version.to_string(),
+ source_hash: source_hash.to_string(),
+ emit_hash: compute_emit_hash(code.as_bytes()),
+ };
+ self
+ .disk_cache
+ .set(&meta_filename, &serde_json::to_vec(&metadata)?)?;
+
+ // save the emit source
+ self.disk_cache.set(&emit_filename, code.as_bytes())?;
+
Ok(())
}
+
+ fn get_meta_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
+ self
+ .disk_cache
+ .get_cache_filename_with_extension(specifier, "meta")
+ }
+
+ fn get_emit_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
+ self
+ .disk_cache
+ .get_cache_filename_with_extension(specifier, "js")
+ }
+}
+
+fn compute_emit_hash(bytes: &[u8]) -> String {
+ // it's ok to use an insecure hash here because
+ // if someone can change the emit source then they
+ // can also change the version hash
+ FastInsecureHasher::new().write(bytes).finish().to_string()
+}
+
+#[cfg(test)]
+mod test {
+ use test_util::TempDir;
+
+ use super::*;
+
+ #[test]
+ pub fn emit_cache_general_use() {
+ let temp_dir = TempDir::new();
+ let disk_cache = DiskCache::new(temp_dir.path());
+ let cache = EmitCache {
+ disk_cache: disk_cache.clone(),
+ cli_version: "1.0.0".to_string(),
+ };
+
+ let specifier1 =
+ ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts"))
+ .unwrap();
+ let specifier2 =
+ ModuleSpecifier::from_file_path(temp_dir.path().join("file2.ts"))
+ .unwrap();
+ assert_eq!(cache.get_emit_code(&specifier1, 1), None);
+ let emit_code1 = "text1".to_string();
+ let emit_code2 = "text2".to_string();
+ cache.set_emit_code(&specifier1, 10, &emit_code1);
+ cache.set_emit_code(&specifier2, 2, &emit_code2);
+ // providing the incorrect source hash
+ assert_eq!(cache.get_emit_code(&specifier1, 5), None);
+ // providing the correct source hash
+ assert_eq!(
+ cache.get_emit_code(&specifier1, 10),
+ Some(emit_code1.clone()),
+ );
+ assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2),);
+
+ // try changing the cli version (should not load previous ones)
+ let cache = EmitCache {
+ disk_cache: disk_cache.clone(),
+ cli_version: "2.0.0".to_string(),
+ };
+ assert_eq!(cache.get_emit_code(&specifier1, 10), None);
+ cache.set_emit_code(&specifier1, 5, &emit_code1);
+
+ // recreating the cache should still load the data because the CLI version is the same
+ let cache = EmitCache {
+ disk_cache,
+ cli_version: "2.0.0".to_string(),
+ };
+ assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
+
+ // adding when already exists should not cause issue
+ let emit_code3 = "asdf".to_string();
+ cache.set_emit_code(&specifier1, 20, &emit_code3);
+ assert_eq!(cache.get_emit_code(&specifier1, 5), None);
+ assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
+ }
}
diff --git a/cli/cache/incremental.rs b/cli/cache/incremental.rs
index b5fff0734..da832a8b5 100644
--- a/cli/cache/incremental.rs
+++ b/cli/cache/incremental.rs
@@ -12,8 +12,8 @@ use deno_runtime::deno_webstorage::rusqlite::Connection;
use serde::Serialize;
use tokio::task::JoinHandle;
-use super::common::fast_insecure_hash;
use super::common::run_sqlite_pragma;
+use super::common::FastInsecureHasher;
/// Cache used to skip formatting/linting a file again when we
/// know it is already formatted or has no lint diagnostics.
@@ -79,8 +79,9 @@ impl IncrementalCacheInner {
state: &TState,
initial_file_paths: &[PathBuf],
) -> Result<Self, AnyError> {
- let state_hash =
- fast_insecure_hash(serde_json::to_string(state).unwrap().as_bytes());
+ let state_hash = FastInsecureHasher::new()
+ .write_str(&serde_json::to_string(state).unwrap())
+ .finish();
let sql_cache = SqlIncrementalCache::new(db_file_path, state_hash)?;
Ok(Self::from_sql_incremental_cache(
sql_cache,
@@ -123,13 +124,15 @@ impl IncrementalCacheInner {
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
match self.previous_hashes.get(file_path) {
- Some(hash) => *hash == fast_insecure_hash(file_text.as_bytes()),
+ Some(hash) => {
+ *hash == FastInsecureHasher::new().write_str(file_text).finish()
+ }
None => false,
}
}
pub fn update_file(&self, file_path: &Path, file_text: &str) {
- let hash = fast_insecure_hash(file_text.as_bytes());
+ let hash = FastInsecureHasher::new().write_str(file_text).finish();
if let Some(previous_hash) = self.previous_hashes.get(file_path) {
if *previous_hash == hash {
return; // do not bother updating the db file because nothing has changed
@@ -334,7 +337,7 @@ mod test {
.unwrap();
let file_path = PathBuf::from("/mod.ts");
let file_text = "test";
- let file_hash = fast_insecure_hash(file_text.as_bytes());
+ let file_hash = FastInsecureHasher::new().write_str(file_text).finish();
sql_cache.set_source_hash(&file_path, file_hash).unwrap();
let cache = IncrementalCacheInner::from_sql_incremental_cache(
sql_cache,
diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs
index f363d8fa8..7482826cf 100644
--- a/cli/cache/mod.rs
+++ b/cli/cache/mod.rs
@@ -3,10 +3,7 @@
use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher;
-use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
-use deno_core::serde::Deserialize;
-use deno_core::serde::Serialize;
use deno_core::ModuleSpecifier;
use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture;
@@ -22,44 +19,15 @@ mod emit;
mod incremental;
pub use check::TypeCheckCache;
+pub use common::FastInsecureHasher;
pub use disk_cache::DiskCache;
pub use emit::EmitCache;
-pub use emit::SpecifierEmitCacheData;
pub use incremental::IncrementalCache;
-#[derive(Debug, Deserialize, Serialize)]
-pub struct EmitMetadata {
- pub version_hash: String,
-}
-
-pub enum CacheType {
- Emit,
- SourceMap,
- Version,
-}
-
-/// A trait which provides a concise implementation to getting and setting
-/// values in a cache.
-pub trait Cacher {
- /// Get a value from the cache.
- fn get(
- &self,
- cache_type: CacheType,
- specifier: &ModuleSpecifier,
- ) -> Option<String>;
- /// Set a value in the cache.
- fn set(
- &self,
- cache_type: CacheType,
- specifier: &ModuleSpecifier,
- value: String,
- ) -> Result<(), AnyError>;
-}
-
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher {
- disk_cache: DiskCache,
+ emit_cache: EmitCache,
dynamic_permissions: Permissions,
file_fetcher: Arc<FileFetcher>,
root_permissions: Permissions,
@@ -67,7 +35,7 @@ pub struct FetchCacher {
impl FetchCacher {
pub fn new(
- disk_cache: DiskCache,
+ emit_cache: EmitCache,
file_fetcher: FileFetcher,
root_permissions: Permissions,
dynamic_permissions: Permissions,
@@ -75,7 +43,7 @@ impl FetchCacher {
let file_fetcher = Arc::new(file_fetcher);
Self {
- disk_cache,
+ emit_cache,
dynamic_permissions,
file_fetcher,
root_permissions,
@@ -87,21 +55,14 @@ impl Loader for FetchCacher {
fn get_cache_info(&self, specifier: &ModuleSpecifier) -> Option<CacheInfo> {
let local = self.file_fetcher.get_local_path(specifier)?;
if local.is_file() {
- let location = &self.disk_cache.location;
let emit = self
- .disk_cache
- .get_cache_filename_with_extension(specifier, "js")
- .map(|p| location.join(p))
- .filter(|p| p.is_file());
- let map = self
- .disk_cache
- .get_cache_filename_with_extension(specifier, "js.map")
- .map(|p| location.join(p))
+ .emit_cache
+ .get_emit_filepath(specifier)
.filter(|p| p.is_file());
Some(CacheInfo {
local: Some(local),
emit,
- map,
+ map: None,
})
} else {
None
diff --git a/cli/deno_dir.rs b/cli/deno_dir.rs
index 8ca705691..303ad2c11 100644
--- a/cli/deno_dir.rs
+++ b/cli/deno_dir.rs
@@ -58,7 +58,7 @@ impl DenoDir {
self.root.join("lint_incremental_cache_v1")
}
- /// Path for the incremental cache used for linting.
+ /// Path for the cache used for type checking.
pub fn type_checking_cache_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache
self.root.join("check_cache_v1")
diff --git a/cli/emit.rs b/cli/emit.rs
index 584593869..1d8ecca43 100644
--- a/cli/emit.rs
+++ b/cli/emit.rs
@@ -10,7 +10,7 @@ use crate::args::EmitConfigOptions;
use crate::args::TsConfig;
use crate::args::TypeCheckMode;
use crate::cache::EmitCache;
-use crate::cache::SpecifierEmitCacheData;
+use crate::cache::FastInsecureHasher;
use crate::cache::TypeCheckCache;
use crate::colors;
use crate::diagnostics::Diagnostics;
@@ -22,6 +22,7 @@ use crate::version;
use deno_ast::swc::bundler::Hook;
use deno_ast::swc::bundler::ModuleRecord;
use deno_ast::swc::common::Span;
+use deno_ast::ParsedSource;
use deno_core::error::AnyError;
use deno_core::parking_lot::RwLock;
use deno_core::serde::Deserialize;
@@ -32,14 +33,11 @@ use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::ModuleSpecifier;
use deno_graph::MediaType;
-use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
use deno_graph::ModuleKind;
use deno_graph::ResolutionError;
-use std::collections::HashSet;
use std::fmt;
use std::sync::Arc;
-use std::time::Instant;
/// A structure representing stats from an emit operation for a graph.
#[derive(Clone, Debug, Default, Eq, PartialEq)]
@@ -116,8 +114,8 @@ pub enum TsConfigType {
/// Return a configuration for bundling, using swc to emit the bundle. This is
/// independent of type checking.
Bundle,
- /// Return a configuration to use tsc to type check and optionally emit. This
- /// is independent of either bundling or just emitting via swc
+ /// Return a configuration to use tsc to type check. This
+ /// is independent of either bundling or emitting via swc.
Check { lib: TsTypeLib },
/// Return a configuration to use swc to emit single module files.
Emit,
@@ -234,31 +232,30 @@ fn get_tsc_roots(
/// A hashing function that takes the source code, version and optionally a
/// user provided config and generates a string hash which can be stored to
/// determine if the cached emit is valid or not.
-fn get_version(source_bytes: &[u8], config_bytes: &[u8]) -> String {
- crate::checksum::gen(&[
- source_bytes,
- version::deno().as_bytes(),
- config_bytes,
- ])
+pub fn get_source_hash(source_text: &str, emit_options_hash: u64) -> u64 {
+ FastInsecureHasher::new()
+ .write_str(source_text)
+ .write_u64(emit_options_hash)
+ .finish()
}
-/// Determine if a given module kind and media type is emittable or not.
-pub fn is_emittable(
- kind: &ModuleKind,
- media_type: &MediaType,
- include_js: bool,
-) -> bool {
- if matches!(kind, ModuleKind::Synthetic) {
- return false;
- }
- match &media_type {
- MediaType::TypeScript
- | MediaType::Mts
- | MediaType::Cts
- | MediaType::Tsx
- | MediaType::Jsx => true,
- MediaType::JavaScript | MediaType::Mjs | MediaType::Cjs => include_js,
- _ => false,
+pub fn emit_parsed_source(
+ cache: &EmitCache,
+ specifier: &ModuleSpecifier,
+ parsed_source: &ParsedSource,
+ emit_options: &deno_ast::EmitOptions,
+ emit_config_hash: u64,
+) -> Result<String, AnyError> {
+ let source_hash =
+ get_source_hash(parsed_source.text_info().text_str(), emit_config_hash);
+
+ if let Some(emit_code) = cache.get_emit_code(specifier, source_hash) {
+ Ok(emit_code)
+ } else {
+ let transpiled_source = parsed_source.transpile(emit_options)?;
+ debug_assert!(transpiled_source.source_map.is_none());
+ cache.set_emit_code(specifier, source_hash, &transpiled_source.text);
+ Ok(transpiled_source.text)
}
}
@@ -376,72 +373,6 @@ pub fn check(
})
}
-pub struct EmitOptions {
- pub ts_config: TsConfig,
- pub reload: bool,
- pub reload_exclusions: HashSet<ModuleSpecifier>,
-}
-
-/// Given a module graph, emit any appropriate modules and cache them.
-// TODO(nayeemrmn): This would ideally take `GraphData` like
-// `check()`, but the AST isn't stored in that. Cleanup.
-pub fn emit(
- graph: &ModuleGraph,
- cache: &dyn EmitCache,
- options: EmitOptions,
-) -> Result<CheckResult, AnyError> {
- let start = Instant::now();
- let config_bytes = options.ts_config.as_bytes();
- let include_js = options.ts_config.get_check_js();
- let emit_options = options.ts_config.into();
-
- let mut emit_count = 0_u32;
- let mut file_count = 0_u32;
- for module in graph.modules() {
- file_count += 1;
- if !is_emittable(&module.kind, &module.media_type, include_js) {
- continue;
- }
- let needs_reload =
- options.reload && !options.reload_exclusions.contains(&module.specifier);
- let version = get_version(
- module.maybe_source.as_ref().map(|s| s.as_bytes()).unwrap(),
- &config_bytes,
- );
- let is_valid = cache
- .get_source_hash(&module.specifier)
- .map_or(false, |v| v == version);
- if is_valid && !needs_reload {
- continue;
- }
- let transpiled_source = module
- .maybe_parsed_source
- .as_ref()
- .map(|source| source.transpile(&emit_options))
- .unwrap()?;
- emit_count += 1;
- cache.set_emit_data(
- module.specifier.clone(),
- SpecifierEmitCacheData {
- source_hash: version,
- text: transpiled_source.text,
- map: transpiled_source.source_map,
- },
- )?;
- }
-
- let stats = Stats(vec![
- ("Files".to_string(), file_count),
- ("Emitted".to_string(), emit_count),
- ("Total time".to_string(), start.elapsed().as_millis() as u32),
- ]);
-
- Ok(CheckResult {
- diagnostics: Diagnostics::default(),
- stats,
- })
-}
-
enum CheckHashResult {
Hash(u64),
NoFiles,
@@ -624,36 +555,3 @@ impl From<TsConfig> for deno_ast::EmitOptions {
}
}
}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_is_emittable() {
- assert!(is_emittable(
- &ModuleKind::Esm,
- &MediaType::TypeScript,
- false
- ));
- assert!(!is_emittable(
- &ModuleKind::Synthetic,
- &MediaType::TypeScript,
- false
- ));
- assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Dts, false));
- assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Dcts, false));
- assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Dmts, false));
- assert!(is_emittable(&ModuleKind::Esm, &MediaType::Tsx, false));
- assert!(!is_emittable(
- &ModuleKind::Esm,
- &MediaType::JavaScript,
- false
- ));
- assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Cjs, false));
- assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Mjs, false));
- assert!(is_emittable(&ModuleKind::Esm, &MediaType::JavaScript, true));
- assert!(is_emittable(&ModuleKind::Esm, &MediaType::Jsx, false));
- assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Json, false));
- }
-}
diff --git a/cli/graph_util.rs b/cli/graph_util.rs
index de418edd7..4f9c66138 100644
--- a/cli/graph_util.rs
+++ b/cli/graph_util.rs
@@ -4,6 +4,7 @@ use crate::colors;
use crate::emit::TsTypeLib;
use crate::errors::get_error_class_name;
+use deno_ast::ParsedSource;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::ModuleSpecifier;
@@ -38,6 +39,7 @@ pub fn contains_specifier(
pub enum ModuleEntry {
Module {
code: Arc<str>,
+ maybe_parsed_source: Option<ParsedSource>,
dependencies: BTreeMap<String, Dependency>,
media_type: MediaType,
/// Whether or not this is a JS/JSX module with a `@ts-check` directive.
@@ -146,6 +148,7 @@ impl GraphData {
};
let module_entry = ModuleEntry::Module {
code,
+ maybe_parsed_source: module.maybe_parsed_source.clone(),
dependencies: module.dependencies.clone(),
ts_check,
media_type,
diff --git a/cli/main.rs b/cli/main.rs
index 55416e0e4..adc719c83 100644
--- a/cli/main.rs
+++ b/cli/main.rs
@@ -168,11 +168,11 @@ fn create_web_worker_callback(
.map(ToOwned::to_owned),
root_cert_store: Some(ps.root_cert_store.clone()),
seed: ps.options.seed(),
- module_loader,
create_web_worker_cb,
preload_module_cb,
format_js_error_fn: Some(Arc::new(format_js_error)),
- source_map_getter: Some(Box::new(ps.clone())),
+ source_map_getter: Some(Box::new(module_loader.clone())),
+ module_loader,
worker_type: args.worker_type,
maybe_inspector_server,
get_error_class_fn: Some(&errors::get_error_class_name),
@@ -248,7 +248,7 @@ pub fn create_main_worker(
.map(ToOwned::to_owned),
root_cert_store: Some(ps.root_cert_store.clone()),
seed: ps.options.seed(),
- source_map_getter: Some(Box::new(ps.clone())),
+ source_map_getter: Some(Box::new(module_loader.clone())),
format_js_error_fn: Some(Arc::new(format_js_error)),
create_web_worker_cb,
web_worker_preload_module_cb,
@@ -518,10 +518,28 @@ async fn cache_command(
cache_flags: CacheFlags,
) -> Result<i32, AnyError> {
let ps = ProcState::build(flags).await?;
+ load_and_type_check(&ps, &cache_flags.files).await?;
+ ps.cache_module_emits()?;
+ Ok(0)
+}
+
+async fn check_command(
+ flags: Flags,
+ check_flags: CheckFlags,
+) -> Result<i32, AnyError> {
+ let ps = ProcState::build(flags).await?;
+ load_and_type_check(&ps, &check_flags.files).await?;
+ Ok(0)
+}
+
+async fn load_and_type_check(
+ ps: &ProcState,
+ files: &Vec<String>,
+) -> Result<(), AnyError> {
let lib = ps.options.ts_type_lib_window();
- for file in cache_flags.files {
- let specifier = resolve_url_or_path(&file)?;
+ for file in files {
+ let specifier = resolve_url_or_path(file)?;
ps.prepare_module_load(
vec![specifier],
false,
@@ -533,20 +551,7 @@ async fn cache_command(
.await?;
}
- Ok(0)
-}
-
-async fn check_command(
- flags: Flags,
- check_flags: CheckFlags,
-) -> Result<i32, AnyError> {
- cache_command(
- flags,
- CacheFlags {
- files: check_flags.files,
- },
- )
- .await
+ Ok(())
}
async fn eval_command(
@@ -609,7 +614,7 @@ async fn create_graph_and_maybe_check(
debug: bool,
) -> Result<Arc<deno_graph::ModuleGraph>, AnyError> {
let mut cache = cache::FetchCacher::new(
- ps.dir.gen_cache.clone(),
+ ps.emit_cache.clone(),
ps.file_fetcher.clone(),
Permissions::allow_all(),
Permissions::allow_all(),
diff --git a/cli/module_loader.rs b/cli/module_loader.rs
index cf94a4767..8da205907 100644
--- a/cli/module_loader.rs
+++ b/cli/module_loader.rs
@@ -1,20 +1,36 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
+use crate::emit::emit_parsed_source;
use crate::emit::TsTypeLib;
+use crate::graph_util::ModuleEntry;
use crate::proc_state::ProcState;
+use crate::text_encoding::code_without_source_map;
+use crate::text_encoding::source_map_from_code;
+use deno_ast::MediaType;
+use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::futures::future::FutureExt;
use deno_core::futures::Future;
+use deno_core::resolve_url;
use deno_core::ModuleLoader;
+use deno_core::ModuleSource;
use deno_core::ModuleSpecifier;
+use deno_core::ModuleType;
use deno_core::OpState;
+use deno_core::SourceMapGetter;
use deno_runtime::permissions::Permissions;
use std::cell::RefCell;
use std::pin::Pin;
use std::rc::Rc;
use std::str;
+struct ModuleCodeSource {
+ pub code: String,
+ pub found_url: ModuleSpecifier,
+ pub media_type: MediaType,
+}
+
pub struct CliModuleLoader {
pub lib: TsTypeLib,
/// The initial set of permissions used to resolve the static imports in the
@@ -40,6 +56,65 @@ impl CliModuleLoader {
ps,
})
}
+
+ fn load_prepared_module(
+ &self,
+ specifier: &ModuleSpecifier,
+ ) -> Result<ModuleCodeSource, AnyError> {
+ let graph_data = self.ps.graph_data.read();
+ let found_url = graph_data.follow_redirect(specifier);
+ match graph_data.get(&found_url) {
+ Some(ModuleEntry::Module {
+ code,
+ media_type,
+ maybe_parsed_source,
+ ..
+ }) => {
+ let code = match media_type {
+ MediaType::JavaScript
+ | MediaType::Unknown
+ | MediaType::Cjs
+ | MediaType::Mjs
+ | MediaType::Json => {
+ if let Some(source) = graph_data.get_cjs_esm_translation(specifier)
+ {
+ source.to_owned()
+ } else {
+ code.to_string()
+ }
+ }
+ MediaType::Dts | MediaType::Dcts | MediaType::Dmts => "".to_string(),
+ MediaType::TypeScript
+ | MediaType::Mts
+ | MediaType::Cts
+ | MediaType::Jsx
+ | MediaType::Tsx => {
+ // get emit text
+ let parsed_source = maybe_parsed_source.as_ref().unwrap(); // should always be set
+ emit_parsed_source(
+ &self.ps.emit_cache,
+ &found_url,
+ parsed_source,
+ &self.ps.emit_options,
+ self.ps.emit_options_hash,
+ )?
+ }
+ MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
+ panic!("Unexpected media type {} for {}", media_type, found_url)
+ }
+ };
+ Ok(ModuleCodeSource {
+ code,
+ found_url,
+ media_type: *media_type,
+ })
+ }
+ _ => Err(anyhow!(
+ "Loading unprepared module: {}",
+ specifier.to_string()
+ )),
+ }
+ }
}
impl ModuleLoader for CliModuleLoader {
@@ -54,18 +129,35 @@ impl ModuleLoader for CliModuleLoader {
fn load(
&self,
- module_specifier: &ModuleSpecifier,
- maybe_referrer: Option<ModuleSpecifier>,
- is_dynamic: bool,
+ specifier: &ModuleSpecifier,
+ _maybe_referrer: Option<ModuleSpecifier>,
+ _is_dynamic: bool,
) -> Pin<Box<deno_core::ModuleSourceFuture>> {
- let module_specifier = module_specifier.clone();
- let ps = self.ps.clone();
-
// NOTE: this block is async only because of `deno_core` interface
// requirements; module was already loaded when constructing module graph
- // during call to `prepare_load`.
- async move { ps.load(module_specifier, maybe_referrer, is_dynamic) }
- .boxed_local()
+ // during call to `prepare_load` so we can load it synchronously.
+ let result = self.load_prepared_module(specifier).map(|code_source| {
+ let code = if self.ps.options.is_inspecting() {
+ // we need the code with the source map in order for
+ // it to work with --inspect or --inspect-brk
+ code_source.code
+ } else {
+ // reduce memory and throw away the source map
+ // because we don't need it
+ code_without_source_map(code_source.code)
+ };
+ ModuleSource {
+ code: code.into_bytes().into_boxed_slice(),
+ module_url_specified: specifier.to_string(),
+ module_url_found: code_source.found_url.to_string(),
+ module_type: match code_source.media_type {
+ MediaType::Json => ModuleType::Json,
+ _ => ModuleType::JavaScript,
+ },
+ }
+ });
+
+ Box::pin(deno_core::futures::future::ready(result))
}
fn prepare_load(
@@ -103,3 +195,47 @@ impl ModuleLoader for CliModuleLoader {
.boxed_local()
}
}
+
+impl SourceMapGetter for CliModuleLoader {
+ fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
+ if let Ok(specifier) = resolve_url(file_name) {
+ match specifier.scheme() {
+ // we should only be looking for emits for schemes that denote external
+ // modules, which the disk_cache supports
+ "wasm" | "file" | "http" | "https" | "data" | "blob" => (),
+ _ => return None,
+ }
+ if let Ok(source) = self.load_prepared_module(&specifier) {
+ source_map_from_code(&source.code)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ }
+
+ fn get_source_line(
+ &self,
+ file_name: &str,
+ line_number: usize,
+ ) -> Option<String> {
+ let graph_data = self.ps.graph_data.read();
+ let specifier = graph_data.follow_redirect(&resolve_url(file_name).ok()?);
+ let code = match graph_data.get(&specifier) {
+ Some(ModuleEntry::Module { code, .. }) => code,
+ _ => return None,
+ };
+ // Do NOT use .lines(): it skips the terminating empty line.
+ // (due to internally using_terminator() instead of .split())
+ let lines: Vec<&str> = code.split('\n').collect();
+ if line_number >= lines.len() {
+ Some(format!(
+ "{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)",
+ crate::colors::yellow("Warning"), line_number + 1,
+ ))
+ } else {
+ Some(lines[line_number].to_string())
+ }
+ }
+}
diff --git a/cli/proc_state.rs b/cli/proc_state.rs
index bb51b6f1a..4d9a4a779 100644
--- a/cli/proc_state.rs
+++ b/cli/proc_state.rs
@@ -6,11 +6,13 @@ use crate::args::Flags;
use crate::args::TypeCheckMode;
use crate::cache;
use crate::cache::EmitCache;
+use crate::cache::FastInsecureHasher;
use crate::cache::TypeCheckCache;
use crate::compat;
use crate::compat::NodeEsmResolver;
use crate::deno_dir;
use crate::emit;
+use crate::emit::emit_parsed_source;
use crate::emit::TsConfigType;
use crate::emit::TsTypeLib;
use crate::file_fetcher::FileFetcher;
@@ -23,7 +25,6 @@ use crate::lockfile::Lockfile;
use crate::resolver::ImportMapResolver;
use crate::resolver::JsxResolver;
-use deno_ast::MediaType;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::custom_error;
@@ -31,14 +32,10 @@ use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::RwLock;
-use deno_core::resolve_url;
use deno_core::url::Url;
use deno_core::CompiledWasmModuleStore;
-use deno_core::ModuleSource;
use deno_core::ModuleSpecifier;
-use deno_core::ModuleType;
use deno_core::SharedArrayBufferStore;
-use deno_core::SourceMapGetter;
use deno_graph::create_graph;
use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture;
@@ -70,7 +67,10 @@ pub struct Inner {
pub coverage_dir: Option<String>,
pub file_fetcher: FileFetcher,
pub options: Arc<CliOptions>,
- graph_data: Arc<RwLock<GraphData>>,
+ pub emit_cache: EmitCache,
+ pub emit_options: deno_ast::EmitOptions,
+ pub emit_options_hash: u64,
+ pub graph_data: Arc<RwLock<GraphData>>,
pub lockfile: Option<Arc<Mutex<Lockfile>>>,
pub maybe_import_map: Option<Arc<ImportMap>>,
pub maybe_inspector_server: Option<Arc<InspectorServer>>,
@@ -211,10 +211,23 @@ impl ProcState {
file_paths: Arc::new(Mutex::new(vec![])),
});
+ let ts_config_result =
+ cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?;
+ if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
+ warn!("{}", ignored_options);
+ }
+ let emit_cache = EmitCache::new(dir.gen_cache.clone());
+
Ok(ProcState(Arc::new(Inner {
dir,
coverage_dir,
options: cli_options,
+ emit_cache,
+ emit_options_hash: FastInsecureHasher::new()
+ // todo(dsherret): use hash of emit options instead as it's more specific
+ .write(&ts_config_result.ts_config.as_bytes())
+ .finish(),
+ emit_options: ts_config_result.ts_config.into(),
file_fetcher,
graph_data: Default::default(),
lockfile,
@@ -300,7 +313,7 @@ impl ProcState {
}
}
let mut cache = cache::FetchCacher::new(
- self.dir.gen_cache.clone(),
+ self.emit_cache.clone(),
self.file_fetcher.clone(),
root_permissions.clone(),
dynamic_permissions.clone(),
@@ -411,58 +424,31 @@ impl ProcState {
.unwrap()?;
}
- let config_type = if self.options.type_check_mode() == TypeCheckMode::None {
- TsConfigType::Emit
- } else {
- TsConfigType::Check { lib }
- };
-
- let ts_config_result =
- self.options.resolve_ts_config_for_emit(config_type)?;
-
- if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
- warn!("{}", ignored_options);
- }
-
- // start type checking if necessary
- let type_checking_task =
- if self.options.type_check_mode() != TypeCheckMode::None {
- let maybe_config_specifier = self.options.maybe_config_file_specifier();
- let roots = roots.clone();
- let options = emit::CheckOptions {
- type_check_mode: self.options.type_check_mode(),
- debug: self.options.log_level() == Some(log::Level::Debug),
- maybe_config_specifier,
- ts_config: ts_config_result.ts_config.clone(),
- log_checks: true,
- reload: self.options.reload_flag()
- && !roots.iter().all(|r| reload_exclusions.contains(&r.0)),
- };
- // todo(THIS PR): don't use a cache on failure
- let check_cache =
- TypeCheckCache::new(&self.dir.type_checking_cache_db_file_path());
- let graph_data = self.graph_data.clone();
- Some(tokio::task::spawn_blocking(move || {
- emit::check(&roots, graph_data, &check_cache, options)
- }))
- } else {
- None
+ // type check if necessary
+ if self.options.type_check_mode() != TypeCheckMode::None {
+ let maybe_config_specifier = self.options.maybe_config_file_specifier();
+ let roots = roots.clone();
+ let options = emit::CheckOptions {
+ type_check_mode: self.options.type_check_mode(),
+ debug: self.options.log_level() == Some(log::Level::Debug),
+ maybe_config_specifier,
+ ts_config: self
+ .options
+ .resolve_ts_config_for_emit(TsConfigType::Check { lib })?
+ .ts_config,
+ log_checks: true,
+ reload: self.options.reload_flag()
+ && !roots.iter().all(|r| reload_exclusions.contains(&r.0)),
};
-
- let options = emit::EmitOptions {
- ts_config: ts_config_result.ts_config,
- reload: self.options.reload_flag(),
- reload_exclusions,
- };
- let emit_result = emit::emit(&graph, &self.dir.gen_cache, options)?;
- log::debug!("{}", emit_result.stats);
-
- if let Some(type_checking_task) = type_checking_task {
- let type_check_result = type_checking_task.await??;
- if !type_check_result.diagnostics.is_empty() {
- return Err(anyhow!(type_check_result.diagnostics));
+ let check_cache =
+ TypeCheckCache::new(&self.dir.type_checking_cache_db_file_path());
+ let graph_data = self.graph_data.clone();
+ let check_result =
+ emit::check(&roots, graph_data, &check_cache, options)?;
+ if !check_result.diagnostics.is_empty() {
+ return Err(anyhow!(check_result.diagnostics));
}
- log::debug!("{}", type_check_result.stats);
+ log::debug!("{}", check_result.stats);
}
if self.options.type_check_mode() != TypeCheckMode::None {
@@ -531,72 +517,24 @@ impl ProcState {
}
}
- pub fn load(
- &self,
- specifier: ModuleSpecifier,
- maybe_referrer: Option<ModuleSpecifier>,
- is_dynamic: bool,
- ) -> Result<ModuleSource, AnyError> {
- log::debug!(
- "specifier: {} maybe_referrer: {} is_dynamic: {}",
- specifier,
- maybe_referrer
- .as_ref()
- .map(|s| s.to_string())
- .unwrap_or_else(|| "<none>".to_string()),
- is_dynamic
- );
-
+ pub fn cache_module_emits(&self) -> Result<(), AnyError> {
let graph_data = self.graph_data.read();
- let found_url = graph_data.follow_redirect(&specifier);
- match graph_data.get(&found_url) {
- Some(ModuleEntry::Module {
- code, media_type, ..
- }) => {
- let code = match media_type {
- MediaType::JavaScript
- | MediaType::Unknown
- | MediaType::Cjs
- | MediaType::Mjs
- | MediaType::Json => {
- if let Some(source) = graph_data.get_cjs_esm_translation(&specifier)
- {
- source.to_owned()
- } else {
- code.to_string()
- }
- }
- MediaType::Dts | MediaType::Dcts | MediaType::Dmts => "".to_string(),
- MediaType::TypeScript
- | MediaType::Mts
- | MediaType::Cts
- | MediaType::Jsx
- | MediaType::Tsx => {
- let cached_text = self.dir.gen_cache.get_emit_text(&found_url);
- match cached_text {
- Some(text) => text,
- None => unreachable!("Unexpected missing emit: {}\n\nTry reloading with the --reload CLI flag or deleting your DENO_DIR.", found_url),
- }
- }
- MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
- panic!("Unexpected media type {} for {}", media_type, found_url)
- }
- };
- Ok(ModuleSource {
- code: code.into_bytes().into_boxed_slice(),
- module_url_specified: specifier.to_string(),
- module_url_found: found_url.to_string(),
- module_type: match media_type {
- MediaType::Json => ModuleType::Json,
- _ => ModuleType::JavaScript,
- },
- })
+ for (specifier, entry) in graph_data.entries() {
+ if let ModuleEntry::Module {
+ maybe_parsed_source: Some(parsed_source),
+ ..
+ } = entry
+ {
+ emit_parsed_source(
+ &self.emit_cache,
+ specifier,
+ parsed_source,
+ &self.emit_options,
+ self.emit_options_hash,
+ )?;
}
- _ => Err(anyhow!(
- "Loading unprepared module: {}",
- specifier.to_string()
- )),
}
+ Ok(())
}
pub async fn create_graph(
@@ -604,7 +542,7 @@ impl ProcState {
roots: Vec<(ModuleSpecifier, ModuleKind)>,
) -> Result<deno_graph::ModuleGraph, AnyError> {
let mut cache = cache::FetchCacher::new(
- self.dir.gen_cache.clone(),
+ self.emit_cache.clone(),
self.file_fetcher.clone(),
Permissions::allow_all(),
Permissions::allow_all(),
@@ -641,55 +579,6 @@ impl ProcState {
}
}
-// TODO(@kitsonk) this is only temporary, but should be refactored to somewhere
-// else, like a refactored file_fetcher.
-impl SourceMapGetter for ProcState {
- fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
- if let Ok(specifier) = resolve_url(file_name) {
- match specifier.scheme() {
- // we should only be looking for emits for schemes that denote external
- // modules, which the disk_cache supports
- "wasm" | "file" | "http" | "https" | "data" | "blob" => (),
- _ => return None,
- }
- if let Some(cache_data) = self.dir.gen_cache.get_emit_data(&specifier) {
- source_map_from_code(cache_data.text.as_bytes())
- .or_else(|| cache_data.map.map(|t| t.into_bytes()))
- } else if let Ok(source) = self.load(specifier, None, false) {
- source_map_from_code(&source.code)
- } else {
- None
- }
- } else {
- None
- }
- }
-
- fn get_source_line(
- &self,
- file_name: &str,
- line_number: usize,
- ) -> Option<String> {
- let graph_data = self.graph_data.read();
- let specifier = graph_data.follow_redirect(&resolve_url(file_name).ok()?);
- let code = match graph_data.get(&specifier) {
- Some(ModuleEntry::Module { code, .. }) => code,
- _ => return None,
- };
- // Do NOT use .lines(): it skips the terminating empty line.
- // (due to internally using_terminator() instead of .split())
- let lines: Vec<&str> = code.split('\n').collect();
- if line_number >= lines.len() {
- Some(format!(
- "{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)",
- crate::colors::yellow("Warning"), line_number + 1,
- ))
- } else {
- Some(lines[line_number].to_string())
- }
- }
-}
-
pub fn import_map_from_text(
specifier: &Url,
json_text: &str,
@@ -714,19 +603,6 @@ pub fn import_map_from_text(
Ok(result.import_map)
}
-fn source_map_from_code(code: &[u8]) -> Option<Vec<u8>> {
- static PREFIX: &[u8] = b"//# sourceMappingURL=data:application/json;base64,";
- let last_line = code.rsplitn(2, |u| u == &b'\n').next().unwrap();
- if last_line.starts_with(PREFIX) {
- let input = last_line.split_at(PREFIX.len()).1;
- let decoded_map = base64::decode(input)
- .expect("Unable to decode source map from emitted file.");
- Some(decoded_map)
- } else {
- None
- }
-}
-
#[derive(Debug)]
struct FileWatcherReporter {
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
diff --git a/cli/tests/integration/info_tests.rs b/cli/tests/integration/info_tests.rs
index e02bb8ff2..aa678cb6d 100644
--- a/cli/tests/integration/info_tests.rs
+++ b/cli/tests/integration/info_tests.rs
@@ -1,6 +1,7 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use crate::itest;
+
use test_util as util;
use test_util::TempDir;
diff --git a/cli/text_encoding.rs b/cli/text_encoding.rs
index 392bab7b8..2bb45beb0 100644
--- a/cli/text_encoding.rs
+++ b/cli/text_encoding.rs
@@ -54,6 +54,34 @@ pub fn strip_bom(text: &str) -> &str {
}
}
+static SOURCE_MAP_PREFIX: &str =
+ "//# sourceMappingURL=data:application/json;base64,";
+
+pub fn source_map_from_code(code: &str) -> Option<Vec<u8>> {
+ let last_line = code.rsplit(|u| u == '\n').next()?;
+ if last_line.starts_with(SOURCE_MAP_PREFIX) {
+ let input = last_line.split_at(SOURCE_MAP_PREFIX.len()).1;
+ let decoded_map = base64::decode(input)
+ .expect("Unable to decode source map from emitted file.");
+ Some(decoded_map)
+ } else {
+ None
+ }
+}
+
+pub fn code_without_source_map(mut code: String) -> String {
+ if let Some(last_line_index) = code.rfind('\n') {
+ if code[last_line_index + 1..].starts_with(SOURCE_MAP_PREFIX) {
+ code.truncate(last_line_index + 1);
+ code
+ } else {
+ code
+ }
+ } else {
+ code
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -103,4 +131,33 @@ mod tests {
let err = result.expect_err("Err expected");
assert!(err.kind() == ErrorKind::InvalidData);
}
+
+ #[test]
+ fn test_source_without_source_map() {
+ run_test("", "");
+ run_test("\n", "\n");
+ run_test("\r\n", "\r\n");
+ run_test("a", "a");
+ run_test("a\n", "a\n");
+ run_test("a\r\n", "a\r\n");
+ run_test("a\r\nb", "a\r\nb");
+ run_test("a\nb\n", "a\nb\n");
+ run_test("a\r\nb\r\n", "a\r\nb\r\n");
+ run_test(
+ "test\n//# sourceMappingURL=data:application/json;base64,test",
+ "test\n",
+ );
+ run_test(
+ "test\r\n//# sourceMappingURL=data:application/json;base64,test",
+ "test\r\n",
+ );
+ run_test(
+ "\n//# sourceMappingURL=data:application/json;base64,test",
+ "\n",
+ );
+
+ fn run_test(input: &str, output: &str) {
+ assert_eq!(code_without_source_map(input.to_string()), output);
+ }
+ }
}
diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs
index d2c6c1894..1ff7f2bc2 100644
--- a/cli/tools/coverage/mod.rs
+++ b/cli/tools/coverage/mod.rs
@@ -2,10 +2,11 @@
use crate::args::CoverageFlags;
use crate::args::Flags;
-use crate::cache::EmitCache;
use crate::colors;
+use crate::emit::get_source_hash;
use crate::fs_util::collect_files;
use crate::proc_state::ProcState;
+use crate::text_encoding::source_map_from_code;
use crate::tools::fmt::format_json;
use deno_ast::MediaType;
@@ -17,7 +18,6 @@ use deno_core::serde_json;
use deno_core::sourcemap::SourceMap;
use deno_core::url::Url;
use deno_core::LocalInspectorSession;
-use deno_core::SourceMapGetter;
use regex::Regex;
use std::fs;
use std::fs::File;
@@ -665,7 +665,8 @@ pub async fn cover_files(
})?;
// Check if file was transpiled
- let transpiled_source = match file.media_type {
+ let original_source = &file.source;
+ let transpiled_code = match file.media_type {
MediaType::JavaScript
| MediaType::Unknown
| MediaType::Cjs
@@ -677,8 +678,10 @@ pub async fn cover_files(
| MediaType::Mts
| MediaType::Cts
| MediaType::Tsx => {
- match ps.dir.gen_cache.get_emit_text(&file.specifier) {
- Some(source) => source,
+ let source_hash =
+ get_source_hash(original_source, ps.emit_options_hash);
+ match ps.emit_cache.get_emit_code(&file.specifier, source_hash) {
+ Some(code) => code,
None => {
return Err(anyhow!(
"Missing transpiled source code for: \"{}\".
@@ -693,13 +696,10 @@ pub async fn cover_files(
}
};
- let original_source = &file.source;
- let maybe_source_map = ps.get_source_map(&script_coverage.url);
-
let coverage_report = generate_coverage_report(
&script_coverage,
- &transpiled_source,
- &maybe_source_map,
+ &transpiled_code,
+ &source_map_from_code(&transpiled_code),
&out_mode,
);
diff --git a/core/source_map.rs b/core/source_map.rs
index 6a261fa7d..0df58c4be 100644
--- a/core/source_map.rs
+++ b/core/source_map.rs
@@ -5,6 +5,7 @@
use crate::resolve_url;
pub use sourcemap::SourceMap;
use std::collections::HashMap;
+use std::rc::Rc;
use std::str;
pub trait SourceMapGetter {
@@ -17,6 +18,23 @@ pub trait SourceMapGetter {
) -> Option<String>;
}
+impl<T> SourceMapGetter for Rc<T>
+where
+ T: SourceMapGetter,
+{
+ fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
+ (**self).get_source_map(file_name)
+ }
+
+ fn get_source_line(
+ &self,
+ file_name: &str,
+ line_number: usize,
+ ) -> Option<String> {
+ (**self).get_source_line(file_name, line_number)
+ }
+}
+
#[derive(Debug, Default)]
pub struct SourceMapCache {
maps: HashMap<String, Option<SourceMap>>,