summaryrefslogtreecommitdiff
path: root/ext/node
diff options
context:
space:
mode:
Diffstat (limited to 'ext/node')
-rw-r--r--ext/node/Cargo.toml4
-rw-r--r--ext/node/lib.rs127
-rw-r--r--ext/node/ops/blocklist.rs53
-rw-r--r--ext/node/ops/crypto/cipher.rs145
-rw-r--r--ext/node/ops/crypto/digest.rs30
-rw-r--r--ext/node/ops/crypto/keys.rs683
-rw-r--r--ext/node/ops/crypto/mod.rs416
-rw-r--r--ext/node/ops/crypto/sign.rs168
-rw-r--r--ext/node/ops/crypto/x509.rs66
-rw-r--r--ext/node/ops/fs.rs61
-rw-r--r--ext/node/ops/http.rs91
-rw-r--r--ext/node/ops/http2.rs83
-rw-r--r--ext/node/ops/idna.rs47
-rw-r--r--ext/node/ops/inspector.rs161
-rw-r--r--ext/node/ops/ipc.rs59
-rw-r--r--ext/node/ops/mod.rs2
-rw-r--r--ext/node/ops/os/mod.rs194
-rw-r--r--ext/node/ops/os/priority.rs30
-rw-r--r--ext/node/ops/perf_hooks.rs135
-rw-r--r--ext/node/ops/process.rs3
-rw-r--r--ext/node/ops/require.rs200
-rw-r--r--ext/node/ops/util.rs3
-rw-r--r--ext/node/ops/v8.rs25
-rw-r--r--ext/node/ops/winerror.rs3
-rw-r--r--ext/node/ops/worker_threads.rs77
-rw-r--r--ext/node/ops/zlib/brotli.rs77
-rw-r--r--ext/node/ops/zlib/mod.rs86
-rw-r--r--ext/node/ops/zlib/mode.rs21
-rw-r--r--ext/node/polyfills/01_require.js68
-rw-r--r--ext/node/polyfills/_fs/_fs_common.ts1
-rw-r--r--ext/node/polyfills/_fs/_fs_copy.ts6
-rw-r--r--ext/node/polyfills/_fs/_fs_open.ts4
-rw-r--r--ext/node/polyfills/_fs/_fs_readFile.ts10
-rw-r--r--ext/node/polyfills/_fs/_fs_readlink.ts33
-rw-r--r--ext/node/polyfills/_fs/_fs_readv.ts1
-rw-r--r--ext/node/polyfills/_fs/_fs_stat.ts24
-rw-r--r--ext/node/polyfills/_next_tick.ts5
-rw-r--r--ext/node/polyfills/_process/streams.mjs9
-rw-r--r--ext/node/polyfills/_tls_wrap.ts16
-rw-r--r--ext/node/polyfills/_utils.ts4
-rw-r--r--ext/node/polyfills/_zlib.mjs7
-rw-r--r--ext/node/polyfills/child_process.ts2
-rw-r--r--ext/node/polyfills/http.ts533
-rw-r--r--ext/node/polyfills/http2.ts12
-rw-r--r--ext/node/polyfills/inspector.js210
-rw-r--r--ext/node/polyfills/inspector.ts82
-rw-r--r--ext/node/polyfills/inspector/promises.js20
-rw-r--r--ext/node/polyfills/internal/buffer.mjs575
-rw-r--r--ext/node/polyfills/internal/child_process.ts27
-rw-r--r--ext/node/polyfills/internal/crypto/_randomInt.ts26
-rw-r--r--ext/node/polyfills/internal/crypto/keygen.ts21
-rw-r--r--ext/node/polyfills/internal/crypto/random.ts1
-rw-r--r--ext/node/polyfills/internal/errors.ts49
-rw-r--r--ext/node/polyfills/internal/net.ts1
-rw-r--r--ext/node/polyfills/internal/util/inspect.mjs13
-rw-r--r--ext/node/polyfills/internal_binding/_timingSafeEqual.ts21
-rw-r--r--ext/node/polyfills/internal_binding/http_parser.ts160
-rw-r--r--ext/node/polyfills/internal_binding/mod.ts3
-rw-r--r--ext/node/polyfills/internal_binding/tcp_wrap.ts6
-rw-r--r--ext/node/polyfills/internal_binding/uv.ts2
-rw-r--r--ext/node/polyfills/net.ts490
-rw-r--r--ext/node/polyfills/os.ts53
-rw-r--r--ext/node/polyfills/perf_hooks.ts10
-rw-r--r--ext/node/polyfills/process.ts20
-rw-r--r--ext/node/polyfills/timers.ts88
-rw-r--r--ext/node/polyfills/vm.js1
-rw-r--r--ext/node/polyfills/zlib.ts53
67 files changed, 4027 insertions, 1690 deletions
diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml
index c5f07210b..36910a844 100644
--- a/ext/node/Cargo.toml
+++ b/ext/node/Cargo.toml
@@ -2,7 +2,7 @@
[package]
name = "deno_node"
-version = "0.108.0"
+version = "0.114.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
@@ -22,6 +22,7 @@ aes.workspace = true
async-trait.workspace = true
base64.workspace = true
blake2 = "0.10.6"
+boxed_error.workspace = true
brotli.workspace = true
bytes.workspace = true
cbc.workspace = true
@@ -94,6 +95,7 @@ spki.workspace = true
stable_deref_trait = "1.2.0"
thiserror.workspace = true
tokio.workspace = true
+tokio-eld = "0.2"
url.workspace = true
webpki-root-certs.workspace = true
winapi.workspace = true
diff --git a/ext/node/lib.rs b/ext/node/lib.rs
index 03462f36f..63f5794b7 100644
--- a/ext/node/lib.rs
+++ b/ext/node/lib.rs
@@ -9,25 +9,23 @@ use std::path::Path;
use std::path::PathBuf;
use deno_core::error::AnyError;
-use deno_core::located_script_name;
use deno_core::op2;
use deno_core::url::Url;
#[allow(unused_imports)]
use deno_core::v8;
use deno_core::v8::ExternalReference;
-use deno_core::JsRuntime;
-use deno_fs::sync::MaybeSend;
-use deno_fs::sync::MaybeSync;
-use node_resolver::NpmResolverRc;
+use node_resolver::errors::ClosestPkgJsonError;
+use node_resolver::NpmPackageFolderResolverRc;
use once_cell::sync::Lazy;
extern crate libz_sys as zlib;
mod global;
-mod ops;
+pub mod ops;
mod polyfill;
pub use deno_package_json::PackageJson;
+use deno_permissions::PermissionCheckError;
pub use node_resolver::PathClean;
pub use ops::ipc::ChildPipeFd;
pub use ops::ipc::IpcJsonStreamResource;
@@ -49,10 +47,18 @@ pub trait NodePermissions {
&mut self,
url: &Url,
api_name: &str,
- ) -> Result<(), AnyError>;
+ ) -> Result<(), PermissionCheckError>;
+ fn check_net(
+ &mut self,
+ host: (&str, Option<u16>),
+ api_name: &str,
+ ) -> Result<(), PermissionCheckError>;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
#[inline(always)]
- fn check_read(&mut self, path: &str) -> Result<PathBuf, AnyError> {
+ fn check_read(
+ &mut self,
+ path: &str,
+ ) -> Result<PathBuf, PermissionCheckError> {
self.check_read_with_api_name(path, None)
}
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
@@ -60,20 +66,24 @@ pub trait NodePermissions {
&mut self,
path: &str,
api_name: Option<&str>,
- ) -> Result<PathBuf, AnyError>;
+ ) -> Result<PathBuf, PermissionCheckError>;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn check_read_path<'a>(
&mut self,
path: &'a Path,
- ) -> Result<Cow<'a, Path>, AnyError>;
+ ) -> Result<Cow<'a, Path>, PermissionCheckError>;
fn query_read_all(&mut self) -> bool;
- fn check_sys(&mut self, kind: &str, api_name: &str) -> Result<(), AnyError>;
+ fn check_sys(
+ &mut self,
+ kind: &str,
+ api_name: &str,
+ ) -> Result<(), PermissionCheckError>;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn check_write_with_api_name(
&mut self,
path: &str,
api_name: Option<&str>,
- ) -> Result<PathBuf, AnyError>;
+ ) -> Result<PathBuf, PermissionCheckError>;
}
impl NodePermissions for deno_permissions::PermissionsContainer {
@@ -82,16 +92,24 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
&mut self,
url: &Url,
api_name: &str,
- ) -> Result<(), AnyError> {
+ ) -> Result<(), PermissionCheckError> {
deno_permissions::PermissionsContainer::check_net_url(self, url, api_name)
}
+ fn check_net(
+ &mut self,
+ host: (&str, Option<u16>),
+ api_name: &str,
+ ) -> Result<(), PermissionCheckError> {
+ deno_permissions::PermissionsContainer::check_net(self, &host, api_name)
+ }
+
#[inline(always)]
fn check_read_with_api_name(
&mut self,
path: &str,
api_name: Option<&str>,
- ) -> Result<PathBuf, AnyError> {
+ ) -> Result<PathBuf, PermissionCheckError> {
deno_permissions::PermissionsContainer::check_read_with_api_name(
self, path, api_name,
)
@@ -100,7 +118,7 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
fn check_read_path<'a>(
&mut self,
path: &'a Path,
- ) -> Result<Cow<'a, Path>, AnyError> {
+ ) -> Result<Cow<'a, Path>, PermissionCheckError> {
deno_permissions::PermissionsContainer::check_read_path(self, path, None)
}
@@ -113,28 +131,37 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
&mut self,
path: &str,
api_name: Option<&str>,
- ) -> Result<PathBuf, AnyError> {
+ ) -> Result<PathBuf, PermissionCheckError> {
deno_permissions::PermissionsContainer::check_write_with_api_name(
self, path, api_name,
)
}
- fn check_sys(&mut self, kind: &str, api_name: &str) -> Result<(), AnyError> {
+ fn check_sys(
+ &mut self,
+ kind: &str,
+ api_name: &str,
+ ) -> Result<(), PermissionCheckError> {
deno_permissions::PermissionsContainer::check_sys(self, kind, api_name)
}
}
#[allow(clippy::disallowed_types)]
-pub type NodeRequireResolverRc =
- deno_fs::sync::MaybeArc<dyn NodeRequireResolver>;
+pub type NodeRequireLoaderRc = std::rc::Rc<dyn NodeRequireLoader>;
-pub trait NodeRequireResolver: std::fmt::Debug + MaybeSend + MaybeSync {
+pub trait NodeRequireLoader {
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError>;
+
+ fn load_text_file_lossy(&self, path: &Path) -> Result<String, AnyError>;
+
+ /// Get if the module kind is maybe CJS and loading should determine
+ /// if its CJS or ESM.
+ fn is_maybe_cjs(&self, specifier: &Url) -> Result<bool, ClosestPkgJsonError>;
}
pub static NODE_ENV_VAR_ALLOWLIST: Lazy<HashSet<String>> = Lazy::new(|| {
@@ -152,10 +179,12 @@ fn op_node_build_os() -> String {
env!("TARGET").split('-').nth(2).unwrap().to_string()
}
+#[derive(Clone)]
pub struct NodeExtInitServices {
- pub node_require_resolver: NodeRequireResolverRc,
+ pub node_require_loader: NodeRequireLoaderRc,
pub node_resolver: NodeResolverRc,
- pub npm_resolver: NpmResolverRc,
+ pub npm_resolver: NpmPackageFolderResolverRc,
+ pub pkg_json_resolver: PackageJsonResolverRc,
}
deno_core::extension!(deno_node,
@@ -321,6 +350,7 @@ deno_core::extension!(deno_node,
ops::zlib::op_zlib_write,
ops::zlib::op_zlib_init,
ops::zlib::op_zlib_reset,
+ ops::zlib::op_zlib_crc32,
ops::zlib::brotli::op_brotli_compress,
ops::zlib::brotli::op_brotli_compress_async,
ops::zlib::brotli::op_create_brotli_compress,
@@ -348,7 +378,7 @@ deno_core::extension!(deno_node,
ops::http2::op_http2_send_response,
ops::os::op_node_os_get_priority<P>,
ops::os::op_node_os_set_priority<P>,
- ops::os::op_node_os_username<P>,
+ ops::os::op_node_os_user_info<P>,
ops::os::op_geteuid<P>,
ops::os::op_getegid<P>,
ops::os::op_cpus<P>,
@@ -360,6 +390,7 @@ deno_core::extension!(deno_node,
ops::require::op_require_proxy_path,
ops::require::op_require_is_deno_dir_package,
ops::require::op_require_resolve_deno_dir,
+ ops::require::op_require_is_maybe_cjs,
ops::require::op_require_is_request_relative,
ops::require::op_require_resolve_lookup_paths,
ops::require::op_require_try_self_parent_path<P>,
@@ -373,7 +404,6 @@ deno_core::extension!(deno_node,
ops::require::op_require_read_file<P>,
ops::require::op_require_as_file_path,
ops::require::op_require_resolve_exports<P>,
- ops::require::op_require_read_closest_package_json<P>,
ops::require::op_require_read_package_scope<P>,
ops::require::op_require_package_imports_resolve<P>,
ops::require::op_require_break_on_next_statement,
@@ -387,6 +417,18 @@ deno_core::extension!(deno_node,
ops::process::op_node_process_kill,
ops::process::op_process_abort,
ops::tls::op_get_root_certificates,
+ ops::inspector::op_inspector_open<P>,
+ ops::inspector::op_inspector_close,
+ ops::inspector::op_inspector_url,
+ ops::inspector::op_inspector_wait,
+ ops::inspector::op_inspector_connect<P>,
+ ops::inspector::op_inspector_dispatch,
+ ops::inspector::op_inspector_disconnect,
+ ops::inspector::op_inspector_emit_protocol_event,
+ ops::inspector::op_inspector_enabled,
+ ],
+ objects = [
+ ops::perf_hooks::EldHistogram
],
esm_entry_point = "ext:deno_node/02_init.js",
esm = [
@@ -469,6 +511,7 @@ deno_core::extension!(deno_node,
"internal_binding/constants.ts",
"internal_binding/crypto.ts",
"internal_binding/handle_wrap.ts",
+ "internal_binding/http_parser.ts",
"internal_binding/mod.ts",
"internal_binding/node_file.ts",
"internal_binding/node_options.ts",
@@ -594,8 +637,8 @@ deno_core::extension!(deno_node,
"node:http" = "http.ts",
"node:http2" = "http2.ts",
"node:https" = "https.ts",
- "node:inspector" = "inspector.ts",
- "node:inspector/promises" = "inspector.ts",
+ "node:inspector" = "inspector.js",
+ "node:inspector/promises" = "inspector/promises.js",
"node:module" = "01_require.js",
"node:net" = "net.ts",
"node:os" = "os.ts",
@@ -638,9 +681,10 @@ deno_core::extension!(deno_node,
state.put(options.fs.clone());
if let Some(init) = &options.maybe_init {
- state.put(init.node_require_resolver.clone());
+ state.put(init.node_require_loader.clone());
state.put(init.node_resolver.clone());
state.put(init.npm_resolver.clone());
+ state.put(init.pkg_json_resolver.clone());
}
},
global_template_middleware = global_template_middleware,
@@ -760,33 +804,16 @@ deno_core::extension!(deno_node,
},
);
-pub fn load_cjs_module(
- js_runtime: &mut JsRuntime,
- module: &str,
- main: bool,
- inspect_brk: bool,
-) -> Result<(), AnyError> {
- fn escape_for_single_quote_string(text: &str) -> String {
- text.replace('\\', r"\\").replace('\'', r"\'")
- }
-
- let source_code = format!(
- r#"(function loadCjsModule(moduleName, isMain, inspectBrk) {{
- Deno[Deno.internal].node.loadCjsModule(moduleName, isMain, inspectBrk);
- }})('{module}', {main}, {inspect_brk});"#,
- main = main,
- module = escape_for_single_quote_string(module),
- inspect_brk = inspect_brk,
- );
-
- js_runtime.execute_script(located_script_name!(), source_code)?;
- Ok(())
-}
-
pub type NodeResolver = node_resolver::NodeResolver<DenoFsNodeResolverEnv>;
#[allow(clippy::disallowed_types)]
pub type NodeResolverRc =
deno_fs::sync::MaybeArc<node_resolver::NodeResolver<DenoFsNodeResolverEnv>>;
+pub type PackageJsonResolver =
+ node_resolver::PackageJsonResolver<DenoFsNodeResolverEnv>;
+#[allow(clippy::disallowed_types)]
+pub type PackageJsonResolverRc = deno_fs::sync::MaybeArc<
+ node_resolver::PackageJsonResolver<DenoFsNodeResolverEnv>,
+>;
#[derive(Debug)]
pub struct DenoFsNodeResolverEnv {
diff --git a/ext/node/ops/blocklist.rs b/ext/node/ops/blocklist.rs
index 332cdda8f..6c64d68ec 100644
--- a/ext/node/ops/blocklist.rs
+++ b/ext/node/ops/blocklist.rs
@@ -7,9 +7,6 @@ use std::net::Ipv4Addr;
use std::net::Ipv6Addr;
use std::net::SocketAddr;
-use deno_core::anyhow::anyhow;
-use deno_core::anyhow::bail;
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
@@ -27,13 +24,25 @@ impl deno_core::GarbageCollected for BlockListResource {}
#[derive(Serialize)]
struct SocketAddressSerialization(String, String);
+#[derive(Debug, thiserror::Error)]
+pub enum BlocklistError {
+ #[error("{0}")]
+ AddrParse(#[from] std::net::AddrParseError),
+ #[error("{0}")]
+ IpNetwork(#[from] ipnetwork::IpNetworkError),
+ #[error("Invalid address")]
+ InvalidAddress,
+ #[error("IP version mismatch between start and end addresses")]
+ IpVersionMismatch,
+}
+
#[op2(fast)]
pub fn op_socket_address_parse(
state: &mut OpState,
#[string] addr: &str,
#[smi] port: u16,
#[string] family: &str,
-) -> Result<bool, AnyError> {
+) -> Result<bool, BlocklistError> {
let ip = addr.parse::<IpAddr>()?;
let parsed: SocketAddr = SocketAddr::new(ip, port);
let parsed_ip_str = parsed.ip().to_string();
@@ -52,7 +61,7 @@ pub fn op_socket_address_parse(
Ok(false)
}
} else {
- Err(anyhow!("Invalid address"))
+ Err(BlocklistError::InvalidAddress)
}
}
@@ -60,8 +69,8 @@ pub fn op_socket_address_parse(
#[serde]
pub fn op_socket_address_get_serialization(
state: &mut OpState,
-) -> Result<SocketAddressSerialization, AnyError> {
- Ok(state.take::<SocketAddressSerialization>())
+) -> SocketAddressSerialization {
+ state.take::<SocketAddressSerialization>()
}
#[op2]
@@ -77,7 +86,7 @@ pub fn op_blocklist_new() -> BlockListResource {
pub fn op_blocklist_add_address(
#[cppgc] wrap: &BlockListResource,
#[string] addr: &str,
-) -> Result<(), AnyError> {
+) -> Result<(), BlocklistError> {
wrap.blocklist.borrow_mut().add_address(addr)
}
@@ -86,7 +95,7 @@ pub fn op_blocklist_add_range(
#[cppgc] wrap: &BlockListResource,
#[string] start: &str,
#[string] end: &str,
-) -> Result<bool, AnyError> {
+) -> Result<bool, BlocklistError> {
wrap.blocklist.borrow_mut().add_range(start, end)
}
@@ -95,7 +104,7 @@ pub fn op_blocklist_add_subnet(
#[cppgc] wrap: &BlockListResource,
#[string] addr: &str,
#[smi] prefix: u8,
-) -> Result<(), AnyError> {
+) -> Result<(), BlocklistError> {
wrap.blocklist.borrow_mut().add_subnet(addr, prefix)
}
@@ -104,7 +113,7 @@ pub fn op_blocklist_check(
#[cppgc] wrap: &BlockListResource,
#[string] addr: &str,
#[string] r#type: &str,
-) -> Result<bool, AnyError> {
+) -> Result<bool, BlocklistError> {
wrap.blocklist.borrow().check(addr, r#type)
}
@@ -123,7 +132,7 @@ impl BlockList {
&mut self,
addr: IpAddr,
prefix: Option<u8>,
- ) -> Result<(), AnyError> {
+ ) -> Result<(), BlocklistError> {
match addr {
IpAddr::V4(addr) => {
let ipv4_prefix = prefix.unwrap_or(32);
@@ -154,7 +163,7 @@ impl BlockList {
Ok(())
}
- pub fn add_address(&mut self, address: &str) -> Result<(), AnyError> {
+ pub fn add_address(&mut self, address: &str) -> Result<(), BlocklistError> {
let ip: IpAddr = address.parse()?;
self.map_addr_add_network(ip, None)?;
Ok(())
@@ -164,7 +173,7 @@ impl BlockList {
&mut self,
start: &str,
end: &str,
- ) -> Result<bool, AnyError> {
+ ) -> Result<bool, BlocklistError> {
let start_ip: IpAddr = start.parse()?;
let end_ip: IpAddr = end.parse()?;
@@ -193,25 +202,33 @@ impl BlockList {
self.map_addr_add_network(IpAddr::V6(addr), None)?;
}
}
- _ => bail!("IP version mismatch between start and end addresses"),
+ _ => return Err(BlocklistError::IpVersionMismatch),
}
Ok(true)
}
- pub fn add_subnet(&mut self, addr: &str, prefix: u8) -> Result<(), AnyError> {
+ pub fn add_subnet(
+ &mut self,
+ addr: &str,
+ prefix: u8,
+ ) -> Result<(), BlocklistError> {
let ip: IpAddr = addr.parse()?;
self.map_addr_add_network(ip, Some(prefix))?;
Ok(())
}
- pub fn check(&self, addr: &str, r#type: &str) -> Result<bool, AnyError> {
+ pub fn check(
+ &self,
+ addr: &str,
+ r#type: &str,
+ ) -> Result<bool, BlocklistError> {
let addr: IpAddr = addr.parse()?;
let family = r#type.to_lowercase();
if family == "ipv4" && addr.is_ipv4() || family == "ipv6" && addr.is_ipv6()
{
Ok(self.rules.iter().any(|net| net.contains(addr)))
} else {
- Err(anyhow!("Invalid address"))
+ Err(BlocklistError::InvalidAddress)
}
}
}
diff --git a/ext/node/ops/crypto/cipher.rs b/ext/node/ops/crypto/cipher.rs
index b80aa33fe..ec45146b4 100644
--- a/ext/node/ops/crypto/cipher.rs
+++ b/ext/node/ops/crypto/cipher.rs
@@ -4,9 +4,6 @@ use aes::cipher::block_padding::Pkcs7;
use aes::cipher::BlockDecryptMut;
use aes::cipher::BlockEncryptMut;
use aes::cipher::KeyIvInit;
-use deno_core::error::range_error;
-use deno_core::error::type_error;
-use deno_core::error::AnyError;
use deno_core::Resource;
use digest::generic_array::GenericArray;
use digest::KeyInit;
@@ -50,8 +47,22 @@ pub struct DecipherContext {
decipher: Rc<RefCell<Decipher>>,
}
+#[derive(Debug, thiserror::Error)]
+pub enum CipherContextError {
+ #[error("Cipher context is already in use")]
+ ContextInUse,
+ #[error("{0}")]
+ Resource(deno_core::error::AnyError),
+ #[error(transparent)]
+ Cipher(#[from] CipherError),
+}
+
impl CipherContext {
- pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result<Self, AnyError> {
+ pub fn new(
+ algorithm: &str,
+ key: &[u8],
+ iv: &[u8],
+ ) -> Result<Self, CipherContextError> {
Ok(Self {
cipher: Rc::new(RefCell::new(Cipher::new(algorithm, key, iv)?)),
})
@@ -74,16 +85,31 @@ impl CipherContext {
auto_pad: bool,
input: &[u8],
output: &mut [u8],
- ) -> Result<Tag, AnyError> {
+ ) -> Result<Tag, CipherContextError> {
Rc::try_unwrap(self.cipher)
- .map_err(|_| type_error("Cipher context is already in use"))?
+ .map_err(|_| CipherContextError::ContextInUse)?
.into_inner()
.r#final(auto_pad, input, output)
+ .map_err(Into::into)
}
}
+#[derive(Debug, thiserror::Error)]
+pub enum DecipherContextError {
+ #[error("Decipher context is already in use")]
+ ContextInUse,
+ #[error("{0}")]
+ Resource(deno_core::error::AnyError),
+ #[error(transparent)]
+ Decipher(#[from] DecipherError),
+}
+
impl DecipherContext {
- pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result<Self, AnyError> {
+ pub fn new(
+ algorithm: &str,
+ key: &[u8],
+ iv: &[u8],
+ ) -> Result<Self, DecipherContextError> {
Ok(Self {
decipher: Rc::new(RefCell::new(Decipher::new(algorithm, key, iv)?)),
})
@@ -103,11 +129,12 @@ impl DecipherContext {
input: &[u8],
output: &mut [u8],
auth_tag: &[u8],
- ) -> Result<(), AnyError> {
+ ) -> Result<(), DecipherContextError> {
Rc::try_unwrap(self.decipher)
- .map_err(|_| type_error("Decipher context is already in use"))?
+ .map_err(|_| DecipherContextError::ContextInUse)?
.into_inner()
.r#final(auto_pad, input, output, auth_tag)
+ .map_err(Into::into)
}
}
@@ -123,12 +150,26 @@ impl Resource for DecipherContext {
}
}
+#[derive(Debug, thiserror::Error)]
+pub enum CipherError {
+ #[error("IV length must be 12 bytes")]
+ InvalidIvLength,
+ #[error("Invalid key length")]
+ InvalidKeyLength,
+ #[error("Invalid initialization vector")]
+ InvalidInitializationVector,
+ #[error("Cannot pad the input data")]
+ CannotPadInputData,
+ #[error("Unknown cipher {0}")]
+ UnknownCipher(String),
+}
+
impl Cipher {
fn new(
algorithm_name: &str,
key: &[u8],
iv: &[u8],
- ) -> Result<Self, AnyError> {
+ ) -> Result<Self, CipherError> {
use Cipher::*;
Ok(match algorithm_name {
"aes-128-cbc" => {
@@ -139,7 +180,7 @@ impl Cipher {
"aes-256-ecb" => Aes256Ecb(Box::new(ecb::Encryptor::new(key.into()))),
"aes-128-gcm" => {
if iv.len() != 12 {
- return Err(type_error("IV length must be 12 bytes"));
+ return Err(CipherError::InvalidIvLength);
}
let cipher =
@@ -149,7 +190,7 @@ impl Cipher {
}
"aes-256-gcm" => {
if iv.len() != 12 {
- return Err(type_error("IV length must be 12 bytes"));
+ return Err(CipherError::InvalidIvLength);
}
let cipher =
@@ -159,15 +200,15 @@ impl Cipher {
}
"aes256" | "aes-256-cbc" => {
if key.len() != 32 {
- return Err(range_error("Invalid key length"));
+ return Err(CipherError::InvalidKeyLength);
}
if iv.len() != 16 {
- return Err(type_error("Invalid initialization vector"));
+ return Err(CipherError::InvalidInitializationVector);
}
Aes256Cbc(Box::new(cbc::Encryptor::new(key.into(), iv.into())))
}
- _ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))),
+ _ => return Err(CipherError::UnknownCipher(algorithm_name.to_string())),
})
}
@@ -235,14 +276,14 @@ impl Cipher {
auto_pad: bool,
input: &[u8],
output: &mut [u8],
- ) -> Result<Tag, AnyError> {
+ ) -> Result<Tag, CipherError> {
assert!(input.len() < 16);
use Cipher::*;
match (self, auto_pad) {
(Aes128Cbc(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot pad the input data"))?;
+ .map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes128Cbc(mut encryptor), false) => {
@@ -255,7 +296,7 @@ impl Cipher {
(Aes128Ecb(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot pad the input data"))?;
+ .map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes128Ecb(mut encryptor), false) => {
@@ -268,7 +309,7 @@ impl Cipher {
(Aes192Ecb(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot pad the input data"))?;
+ .map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes192Ecb(mut encryptor), false) => {
@@ -281,7 +322,7 @@ impl Cipher {
(Aes256Ecb(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot pad the input data"))?;
+ .map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes256Ecb(mut encryptor), false) => {
@@ -296,7 +337,7 @@ impl Cipher {
(Aes256Cbc(encryptor), true) => {
let _ = (*encryptor)
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot pad the input data"))?;
+ .map_err(|_| CipherError::CannotPadInputData)?;
Ok(None)
}
(Aes256Cbc(mut encryptor), false) => {
@@ -319,12 +360,32 @@ impl Cipher {
}
}
+#[derive(Debug, thiserror::Error)]
+pub enum DecipherError {
+ #[error("IV length must be 12 bytes")]
+ InvalidIvLength,
+ #[error("Invalid key length")]
+ InvalidKeyLength,
+ #[error("Invalid initialization vector")]
+ InvalidInitializationVector,
+ #[error("Cannot unpad the input data")]
+ CannotUnpadInputData,
+ #[error("Failed to authenticate data")]
+ DataAuthenticationFailed,
+ #[error("setAutoPadding(false) not supported for Aes128Gcm yet")]
+ SetAutoPaddingFalseAes128GcmUnsupported,
+ #[error("setAutoPadding(false) not supported for Aes256Gcm yet")]
+ SetAutoPaddingFalseAes256GcmUnsupported,
+ #[error("Unknown cipher {0}")]
+ UnknownCipher(String),
+}
+
impl Decipher {
fn new(
algorithm_name: &str,
key: &[u8],
iv: &[u8],
- ) -> Result<Self, AnyError> {
+ ) -> Result<Self, DecipherError> {
use Decipher::*;
Ok(match algorithm_name {
"aes-128-cbc" => {
@@ -335,7 +396,7 @@ impl Decipher {
"aes-256-ecb" => Aes256Ecb(Box::new(ecb::Decryptor::new(key.into()))),
"aes-128-gcm" => {
if iv.len() != 12 {
- return Err(type_error("IV length must be 12 bytes"));
+ return Err(DecipherError::InvalidIvLength);
}
let decipher =
@@ -345,7 +406,7 @@ impl Decipher {
}
"aes-256-gcm" => {
if iv.len() != 12 {
- return Err(type_error("IV length must be 12 bytes"));
+ return Err(DecipherError::InvalidIvLength);
}
let decipher =
@@ -355,15 +416,17 @@ impl Decipher {
}
"aes256" | "aes-256-cbc" => {
if key.len() != 32 {
- return Err(range_error("Invalid key length"));
+ return Err(DecipherError::InvalidKeyLength);
}
if iv.len() != 16 {
- return Err(type_error("Invalid initialization vector"));
+ return Err(DecipherError::InvalidInitializationVector);
}
Aes256Cbc(Box::new(cbc::Decryptor::new(key.into(), iv.into())))
}
- _ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))),
+ _ => {
+ return Err(DecipherError::UnknownCipher(algorithm_name.to_string()))
+ }
})
}
@@ -432,14 +495,14 @@ impl Decipher {
input: &[u8],
output: &mut [u8],
auth_tag: &[u8],
- ) -> Result<(), AnyError> {
+ ) -> Result<(), DecipherError> {
use Decipher::*;
match (self, auto_pad) {
(Aes128Cbc(decryptor), true) => {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot unpad the input data"))?;
+ .map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes128Cbc(mut decryptor), false) => {
@@ -453,7 +516,7 @@ impl Decipher {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot unpad the input data"))?;
+ .map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes128Ecb(mut decryptor), false) => {
@@ -467,7 +530,7 @@ impl Decipher {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot unpad the input data"))?;
+ .map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes192Ecb(mut decryptor), false) => {
@@ -481,7 +544,7 @@ impl Decipher {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot unpad the input data"))?;
+ .map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes256Ecb(mut decryptor), false) => {
@@ -496,28 +559,28 @@ impl Decipher {
if tag.as_slice() == auth_tag {
Ok(())
} else {
- Err(type_error("Failed to authenticate data"))
+ Err(DecipherError::DataAuthenticationFailed)
}
}
- (Aes128Gcm(_), false) => Err(type_error(
- "setAutoPadding(false) not supported for Aes256Gcm yet",
- )),
+ (Aes128Gcm(_), false) => {
+ Err(DecipherError::SetAutoPaddingFalseAes128GcmUnsupported)
+ }
(Aes256Gcm(decipher), true) => {
let tag = decipher.finish();
if tag.as_slice() == auth_tag {
Ok(())
} else {
- Err(type_error("Failed to authenticate data"))
+ Err(DecipherError::DataAuthenticationFailed)
}
}
- (Aes256Gcm(_), false) => Err(type_error(
- "setAutoPadding(false) not supported for Aes256Gcm yet",
- )),
+ (Aes256Gcm(_), false) => {
+ Err(DecipherError::SetAutoPaddingFalseAes256GcmUnsupported)
+ }
(Aes256Cbc(decryptor), true) => {
assert!(input.len() == 16);
let _ = (*decryptor)
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
- .map_err(|_| type_error("Cannot unpad the input data"))?;
+ .map_err(|_| DecipherError::CannotUnpadInputData)?;
Ok(())
}
(Aes256Cbc(mut decryptor), false) => {
diff --git a/ext/node/ops/crypto/digest.rs b/ext/node/ops/crypto/digest.rs
index 293e8e063..a7d8fb51f 100644
--- a/ext/node/ops/crypto/digest.rs
+++ b/ext/node/ops/crypto/digest.rs
@@ -1,6 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::generic_error;
-use deno_core::error::AnyError;
use deno_core::GarbageCollected;
use digest::Digest;
use digest::DynDigest;
@@ -19,7 +17,7 @@ impl Hasher {
pub fn new(
algorithm: &str,
output_length: Option<usize>,
- ) -> Result<Self, AnyError> {
+ ) -> Result<Self, HashError> {
let hash = Hash::new(algorithm, output_length)?;
Ok(Self {
@@ -44,7 +42,7 @@ impl Hasher {
pub fn clone_inner(
&self,
output_length: Option<usize>,
- ) -> Result<Option<Self>, AnyError> {
+ ) -> Result<Option<Self>, HashError> {
let hash = self.hash.borrow();
let Some(hash) = hash.as_ref() else {
return Ok(None);
@@ -184,11 +182,19 @@ pub enum Hash {
use Hash::*;
+#[derive(Debug, thiserror::Error)]
+pub enum HashError {
+ #[error("Output length mismatch for non-extendable algorithm")]
+ OutputLengthMismatch,
+ #[error("Digest method not supported: {0}")]
+ DigestMethodUnsupported(String),
+}
+
impl Hash {
pub fn new(
algorithm_name: &str,
output_length: Option<usize>,
- ) -> Result<Self, AnyError> {
+ ) -> Result<Self, HashError> {
match algorithm_name {
"shake128" => return Ok(Shake128(Default::default(), output_length)),
"shake256" => return Ok(Shake256(Default::default(), output_length)),
@@ -201,17 +207,13 @@ impl Hash {
let digest: D = Digest::new();
if let Some(length) = output_length {
if length != digest.output_size() {
- return Err(generic_error(
- "Output length mismatch for non-extendable algorithm",
- ));
+ return Err(HashError::OutputLengthMismatch);
}
}
FixedSize(Box::new(digest))
},
_ => {
- return Err(generic_error(format!(
- "Digest method not supported: {algorithm_name}"
- )))
+ return Err(HashError::DigestMethodUnsupported(algorithm_name.to_string()))
}
);
@@ -243,14 +245,12 @@ impl Hash {
pub fn clone_hash(
&self,
output_length: Option<usize>,
- ) -> Result<Self, AnyError> {
+ ) -> Result<Self, HashError> {
let hash = match self {
FixedSize(context) => {
if let Some(length) = output_length {
if length != context.output_size() {
- return Err(generic_error(
- "Output length mismatch for non-extendable algorithm",
- ));
+ return Err(HashError::OutputLengthMismatch);
}
}
FixedSize(context.box_clone())
diff --git a/ext/node/ops/crypto/keys.rs b/ext/node/ops/crypto/keys.rs
index 867b34e04..f164972d4 100644
--- a/ext/node/ops/crypto/keys.rs
+++ b/ext/node/ops/crypto/keys.rs
@@ -4,9 +4,7 @@ use std::borrow::Cow;
use std::cell::RefCell;
use base64::Engine;
-use deno_core::error::generic_error;
use deno_core::error::type_error;
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::serde_v8::BigInt as V8BigInt;
use deno_core::unsync::spawn_blocking;
@@ -46,6 +44,7 @@ use spki::der::Reader as _;
use spki::DecodePublicKey as _;
use spki::EncodePublicKey as _;
use spki::SubjectPublicKeyInfoRef;
+use x509_parser::error::X509Error;
use x509_parser::x509;
use super::dh;
@@ -236,9 +235,11 @@ impl RsaPssPrivateKey {
}
impl EcPublicKey {
- pub fn to_jwk(&self) -> Result<elliptic_curve::JwkEcKey, AnyError> {
+ pub fn to_jwk(&self) -> Result<JwkEcKey, AsymmetricPublicKeyJwkError> {
match self {
- EcPublicKey::P224(_) => Err(type_error("Unsupported JWK EC curve: P224")),
+ EcPublicKey::P224(_) => {
+ Err(AsymmetricPublicKeyJwkError::UnsupportedJwkEcCurveP224)
+ }
EcPublicKey::P256(key) => Ok(key.to_jwk()),
EcPublicKey::P384(key) => Ok(key.to_jwk()),
}
@@ -363,49 +364,201 @@ impl<'a> TryFrom<rsa::pkcs8::der::asn1::AnyRef<'a>> for RsaPssParameters<'a> {
}
}
+#[derive(Debug, thiserror::Error)]
+pub enum X509PublicKeyError {
+ #[error(transparent)]
+ X509(#[from] x509_parser::error::X509Error),
+ #[error(transparent)]
+ Rsa(#[from] rsa::Error),
+ #[error(transparent)]
+ Asn1(#[from] x509_parser::der_parser::asn1_rs::Error),
+ #[error(transparent)]
+ Ec(#[from] elliptic_curve::Error),
+ #[error("unsupported ec named curve")]
+ UnsupportedEcNamedCurve,
+ #[error("missing ec parameters")]
+ MissingEcParameters,
+ #[error("malformed DSS public key")]
+ MalformedDssPublicKey,
+ #[error("unsupported x509 public key type")]
+ UnsupportedX509KeyType,
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum RsaJwkError {
+ #[error(transparent)]
+ Base64(#[from] base64::DecodeError),
+ #[error(transparent)]
+ Rsa(#[from] rsa::Error),
+ #[error("missing RSA private component")]
+ MissingRsaPrivateComponent,
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum EcJwkError {
+ #[error(transparent)]
+ Ec(#[from] elliptic_curve::Error),
+ #[error("unsupported curve: {0}")]
+ UnsupportedCurve(String),
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum EdRawError {
+ #[error(transparent)]
+ Ed25519Signature(#[from] ed25519_dalek::SignatureError),
+ #[error("invalid Ed25519 key")]
+ InvalidEd25519Key,
+ #[error("unsupported curve")]
+ UnsupportedCurve,
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum AsymmetricPrivateKeyError {
+ #[error("invalid PEM private key: not valid utf8 starting at byte {0}")]
+ InvalidPemPrivateKeyInvalidUtf8(usize),
+ #[error("invalid encrypted PEM private key")]
+ InvalidEncryptedPemPrivateKey,
+ #[error("invalid PEM private key")]
+ InvalidPemPrivateKey,
+ #[error("encrypted private key requires a passphrase to decrypt")]
+ EncryptedPrivateKeyRequiresPassphraseToDecrypt,
+ #[error("invalid PKCS#1 private key")]
+ InvalidPkcs1PrivateKey,
+ #[error("invalid SEC1 private key")]
+ InvalidSec1PrivateKey,
+ #[error("unsupported PEM label: {0}")]
+ UnsupportedPemLabel(String),
+ #[error(transparent)]
+ RsaPssParamsParse(#[from] RsaPssParamsParseError),
+ #[error("invalid encrypted PKCS#8 private key")]
+ InvalidEncryptedPkcs8PrivateKey,
+ #[error("invalid PKCS#8 private key")]
+ InvalidPkcs8PrivateKey,
+ #[error("PKCS#1 private key does not support encryption with passphrase")]
+ Pkcs1PrivateKeyDoesNotSupportEncryptionWithPassphrase,
+ #[error("SEC1 private key does not support encryption with passphrase")]
+ Sec1PrivateKeyDoesNotSupportEncryptionWithPassphrase,
+ #[error("unsupported ec named curve")]
+ UnsupportedEcNamedCurve,
+ #[error("invalid private key")]
+ InvalidPrivateKey,
+ #[error("invalid DSA private key")]
+ InvalidDsaPrivateKey,
+ #[error("malformed or missing named curve in ec parameters")]
+ MalformedOrMissingNamedCurveInEcParameters,
+ #[error("unsupported key type: {0}")]
+ UnsupportedKeyType(String),
+ #[error("unsupported key format: {0}")]
+ UnsupportedKeyFormat(String),
+ #[error("invalid x25519 private key")]
+ InvalidX25519PrivateKey,
+ #[error("x25519 private key is the wrong length")]
+ X25519PrivateKeyIsWrongLength,
+ #[error("invalid Ed25519 private key")]
+ InvalidEd25519PrivateKey,
+ #[error("missing dh parameters")]
+ MissingDhParameters,
+ #[error("unsupported private key oid")]
+ UnsupportedPrivateKeyOid,
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum AsymmetricPublicKeyError {
+ #[error("invalid PEM private key: not valid utf8 starting at byte {0}")]
+ InvalidPemPrivateKeyInvalidUtf8(usize),
+ #[error("invalid PEM public key")]
+ InvalidPemPublicKey,
+ #[error("invalid PKCS#1 public key")]
+ InvalidPkcs1PublicKey,
+ #[error(transparent)]
+ AsymmetricPrivateKey(#[from] AsymmetricPrivateKeyError),
+ #[error("invalid x509 certificate")]
+ InvalidX509Certificate,
+ #[error(transparent)]
+ X509(#[from] x509_parser::nom::Err<X509Error>),
+ #[error(transparent)]
+ X509PublicKey(#[from] X509PublicKeyError),
+ #[error("unsupported PEM label: {0}")]
+ UnsupportedPemLabel(String),
+ #[error("invalid SPKI public key")]
+ InvalidSpkiPublicKey,
+ #[error("unsupported key type: {0}")]
+ UnsupportedKeyType(String),
+ #[error("unsupported key format: {0}")]
+ UnsupportedKeyFormat(String),
+ #[error(transparent)]
+ Spki(#[from] spki::Error),
+ #[error(transparent)]
+ Pkcs1(#[from] rsa::pkcs1::Error),
+ #[error(transparent)]
+ RsaPssParamsParse(#[from] RsaPssParamsParseError),
+ #[error("malformed DSS public key")]
+ MalformedDssPublicKey,
+ #[error("malformed or missing named curve in ec parameters")]
+ MalformedOrMissingNamedCurveInEcParameters,
+ #[error("malformed or missing public key in ec spki")]
+ MalformedOrMissingPublicKeyInEcSpki,
+ #[error(transparent)]
+ Ec(#[from] elliptic_curve::Error),
+ #[error("unsupported ec named curve")]
+ UnsupportedEcNamedCurve,
+ #[error("malformed or missing public key in x25519 spki")]
+ MalformedOrMissingPublicKeyInX25519Spki,
+ #[error("x25519 public key is too short")]
+ X25519PublicKeyIsTooShort,
+ #[error("invalid Ed25519 public key")]
+ InvalidEd25519PublicKey,
+ #[error("missing dh parameters")]
+ MissingDhParameters,
+ #[error("malformed dh parameters")]
+ MalformedDhParameters,
+ #[error("malformed or missing public key in dh spki")]
+ MalformedOrMissingPublicKeyInDhSpki,
+ #[error("unsupported private key oid")]
+ UnsupportedPrivateKeyOid,
+}
+
impl KeyObjectHandle {
pub fn new_asymmetric_private_key_from_js(
key: &[u8],
format: &str,
typ: &str,
passphrase: Option<&[u8]>,
- ) -> Result<KeyObjectHandle, AnyError> {
+ ) -> Result<KeyObjectHandle, AsymmetricPrivateKeyError> {
let document = match format {
"pem" => {
let pem = std::str::from_utf8(key).map_err(|err| {
- type_error(format!(
- "invalid PEM private key: not valid utf8 starting at byte {}",
- err.valid_up_to()
- ))
+ AsymmetricPrivateKeyError::InvalidPemPrivateKeyInvalidUtf8(
+ err.valid_up_to(),
+ )
})?;
if let Some(passphrase) = passphrase {
- SecretDocument::from_pkcs8_encrypted_pem(pem, passphrase)
- .map_err(|_| type_error("invalid encrypted PEM private key"))?
+ SecretDocument::from_pkcs8_encrypted_pem(pem, passphrase).map_err(
+ |_| AsymmetricPrivateKeyError::InvalidEncryptedPemPrivateKey,
+ )?
} else {
let (label, doc) = SecretDocument::from_pem(pem)
- .map_err(|_| type_error("invalid PEM private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidPemPrivateKey)?;
match label {
EncryptedPrivateKeyInfo::PEM_LABEL => {
- return Err(type_error(
- "encrypted private key requires a passphrase to decrypt",
- ))
+ return Err(AsymmetricPrivateKeyError::EncryptedPrivateKeyRequiresPassphraseToDecrypt);
}
PrivateKeyInfo::PEM_LABEL => doc,
rsa::pkcs1::RsaPrivateKey::PEM_LABEL => {
- SecretDocument::from_pkcs1_der(doc.as_bytes())
- .map_err(|_| type_error("invalid PKCS#1 private key"))?
+ SecretDocument::from_pkcs1_der(doc.as_bytes()).map_err(|_| {
+ AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey
+ })?
}
sec1::EcPrivateKey::PEM_LABEL => {
SecretDocument::from_sec1_der(doc.as_bytes())
- .map_err(|_| type_error("invalid SEC1 private key"))?
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)?
}
_ => {
- return Err(type_error(format!(
- "unsupported PEM label: {}",
- label
- )))
+ return Err(AsymmetricPrivateKeyError::UnsupportedPemLabel(
+ label.to_string(),
+ ))
}
}
}
@@ -413,54 +566,57 @@ impl KeyObjectHandle {
"der" => match typ {
"pkcs8" => {
if let Some(passphrase) = passphrase {
- SecretDocument::from_pkcs8_encrypted_der(key, passphrase)
- .map_err(|_| type_error("invalid encrypted PKCS#8 private key"))?
+ SecretDocument::from_pkcs8_encrypted_der(key, passphrase).map_err(
+ |_| AsymmetricPrivateKeyError::InvalidEncryptedPkcs8PrivateKey,
+ )?
} else {
SecretDocument::from_pkcs8_der(key)
- .map_err(|_| type_error("invalid PKCS#8 private key"))?
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidPkcs8PrivateKey)?
}
}
"pkcs1" => {
if passphrase.is_some() {
- return Err(type_error(
- "PKCS#1 private key does not support encryption with passphrase",
- ));
+ return Err(AsymmetricPrivateKeyError::Pkcs1PrivateKeyDoesNotSupportEncryptionWithPassphrase);
}
SecretDocument::from_pkcs1_der(key)
- .map_err(|_| type_error("invalid PKCS#1 private key"))?
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey)?
}
"sec1" => {
if passphrase.is_some() {
- return Err(type_error(
- "SEC1 private key does not support encryption with passphrase",
- ));
+ return Err(AsymmetricPrivateKeyError::Sec1PrivateKeyDoesNotSupportEncryptionWithPassphrase);
}
SecretDocument::from_sec1_der(key)
- .map_err(|_| type_error("invalid SEC1 private key"))?
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)?
+ }
+ _ => {
+ return Err(AsymmetricPrivateKeyError::UnsupportedKeyType(
+ typ.to_string(),
+ ))
}
- _ => return Err(type_error(format!("unsupported key type: {}", typ))),
},
_ => {
- return Err(type_error(format!("unsupported key format: {}", format)))
+ return Err(AsymmetricPrivateKeyError::UnsupportedKeyFormat(
+ format.to_string(),
+ ))
}
};
let pk_info = PrivateKeyInfo::try_from(document.as_bytes())
- .map_err(|_| type_error("invalid private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidPrivateKey)?;
let alg = pk_info.algorithm.oid;
let private_key = match alg {
RSA_ENCRYPTION_OID => {
let private_key =
rsa::RsaPrivateKey::from_pkcs1_der(pk_info.private_key)
- .map_err(|_| type_error("invalid PKCS#1 private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey)?;
AsymmetricPrivateKey::Rsa(private_key)
}
RSASSA_PSS_OID => {
let details = parse_rsa_pss_params(pk_info.algorithm.parameters)?;
let private_key =
rsa::RsaPrivateKey::from_pkcs1_der(pk_info.private_key)
- .map_err(|_| type_error("invalid PKCS#1 private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey)?;
AsymmetricPrivateKey::RsaPss(RsaPssPrivateKey {
key: private_key,
details,
@@ -468,40 +624,43 @@ impl KeyObjectHandle {
}
DSA_OID => {
let private_key = dsa::SigningKey::try_from(pk_info)
- .map_err(|_| type_error("invalid DSA private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidDsaPrivateKey)?;
AsymmetricPrivateKey::Dsa(private_key)
}
EC_OID => {
let named_curve = pk_info.algorithm.parameters_oid().map_err(|_| {
- type_error("malformed or missing named curve in ec parameters")
+ AsymmetricPrivateKeyError::MalformedOrMissingNamedCurveInEcParameters
})?;
match named_curve {
ID_SECP224R1_OID => {
- let secret_key =
- p224::SecretKey::from_sec1_der(pk_info.private_key)
- .map_err(|_| type_error("invalid SEC1 private key"))?;
+ let secret_key = p224::SecretKey::from_sec1_der(
+ pk_info.private_key,
+ )
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)?;
AsymmetricPrivateKey::Ec(EcPrivateKey::P224(secret_key))
}
ID_SECP256R1_OID => {
- let secret_key =
- p256::SecretKey::from_sec1_der(pk_info.private_key)
- .map_err(|_| type_error("invalid SEC1 private key"))?;
+ let secret_key = p256::SecretKey::from_sec1_der(
+ pk_info.private_key,
+ )
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)?;
AsymmetricPrivateKey::Ec(EcPrivateKey::P256(secret_key))
}
ID_SECP384R1_OID => {
- let secret_key =
- p384::SecretKey::from_sec1_der(pk_info.private_key)
- .map_err(|_| type_error("invalid SEC1 private key"))?;
+ let secret_key = p384::SecretKey::from_sec1_der(
+ pk_info.private_key,
+ )
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)?;
AsymmetricPrivateKey::Ec(EcPrivateKey::P384(secret_key))
}
- _ => return Err(type_error("unsupported ec named curve")),
+ _ => return Err(AsymmetricPrivateKeyError::UnsupportedEcNamedCurve),
}
}
X25519_OID => {
let string_ref = OctetStringRef::from_der(pk_info.private_key)
- .map_err(|_| type_error("invalid x25519 private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidX25519PrivateKey)?;
if string_ref.as_bytes().len() != 32 {
- return Err(type_error("x25519 private key is the wrong length"));
+ return Err(AsymmetricPrivateKeyError::X25519PrivateKeyIsWrongLength);
}
let mut bytes = [0; 32];
bytes.copy_from_slice(string_ref.as_bytes());
@@ -509,22 +668,22 @@ impl KeyObjectHandle {
}
ED25519_OID => {
let signing_key = ed25519_dalek::SigningKey::try_from(pk_info)
- .map_err(|_| type_error("invalid Ed25519 private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyError::InvalidEd25519PrivateKey)?;
AsymmetricPrivateKey::Ed25519(signing_key)
}
DH_KEY_AGREEMENT_OID => {
let params = pk_info
.algorithm
.parameters
- .ok_or_else(|| type_error("missing dh parameters"))?;
+ .ok_or(AsymmetricPrivateKeyError::MissingDhParameters)?;
let params = pkcs3::DhParameter::from_der(&params.to_der().unwrap())
- .map_err(|_| type_error("malformed dh parameters"))?;
+ .map_err(|_| AsymmetricPrivateKeyError::MissingDhParameters)?;
AsymmetricPrivateKey::Dh(DhPrivateKey {
key: dh::PrivateKey::from_bytes(pk_info.private_key),
params,
})
}
- _ => return Err(type_error("unsupported private key oid")),
+ _ => return Err(AsymmetricPrivateKeyError::UnsupportedPrivateKeyOid),
};
Ok(KeyObjectHandle::AsymmetricPrivate(private_key))
@@ -532,7 +691,7 @@ impl KeyObjectHandle {
pub fn new_x509_public_key(
spki: &x509::SubjectPublicKeyInfo,
- ) -> Result<KeyObjectHandle, AnyError> {
+ ) -> Result<KeyObjectHandle, X509PublicKeyError> {
use x509_parser::der_parser::asn1_rs::oid;
use x509_parser::public_key::PublicKey;
@@ -565,18 +724,18 @@ impl KeyObjectHandle {
let public_key = p384::PublicKey::from_sec1_bytes(data)?;
AsymmetricPublicKey::Ec(EcPublicKey::P384(public_key))
}
- _ => return Err(type_error("unsupported ec named curve")),
+ _ => return Err(X509PublicKeyError::UnsupportedEcNamedCurve),
}
} else {
- return Err(type_error("missing ec parameters"));
+ return Err(X509PublicKeyError::MissingEcParameters);
}
}
PublicKey::DSA(_) => {
let verifying_key = dsa::VerifyingKey::from_public_key_der(spki.raw)
- .map_err(|_| type_error("malformed DSS public key"))?;
+ .map_err(|_| X509PublicKeyError::MalformedDssPublicKey)?;
AsymmetricPublicKey::Dsa(verifying_key)
}
- _ => return Err(type_error("unsupported x509 public key type")),
+ _ => return Err(X509PublicKeyError::UnsupportedX509KeyType),
};
Ok(KeyObjectHandle::AsymmetricPublic(key))
@@ -585,7 +744,7 @@ impl KeyObjectHandle {
pub fn new_rsa_jwk(
jwk: RsaJwkKey,
is_public: bool,
- ) -> Result<KeyObjectHandle, AnyError> {
+ ) -> Result<KeyObjectHandle, RsaJwkError> {
use base64::prelude::BASE64_URL_SAFE_NO_PAD;
let n = BASE64_URL_SAFE_NO_PAD.decode(jwk.n.as_bytes())?;
@@ -604,19 +763,19 @@ impl KeyObjectHandle {
let d = BASE64_URL_SAFE_NO_PAD.decode(
jwk
.d
- .ok_or_else(|| type_error("missing RSA private component"))?
+ .ok_or(RsaJwkError::MissingRsaPrivateComponent)?
.as_bytes(),
)?;
let p = BASE64_URL_SAFE_NO_PAD.decode(
jwk
.p
- .ok_or_else(|| type_error("missing RSA private component"))?
+ .ok_or(RsaJwkError::MissingRsaPrivateComponent)?
.as_bytes(),
)?;
let q = BASE64_URL_SAFE_NO_PAD.decode(
jwk
.q
- .ok_or_else(|| type_error("missing RSA private component"))?
+ .ok_or(RsaJwkError::MissingRsaPrivateComponent)?
.as_bytes(),
)?;
@@ -640,7 +799,7 @@ impl KeyObjectHandle {
pub fn new_ec_jwk(
jwk: &JwkEcKey,
is_public: bool,
- ) -> Result<KeyObjectHandle, AnyError> {
+ ) -> Result<KeyObjectHandle, EcJwkError> {
// https://datatracker.ietf.org/doc/html/rfc7518#section-6.2.1.1
let handle = match jwk.crv() {
"P-256" if is_public => {
@@ -660,7 +819,7 @@ impl KeyObjectHandle {
EcPrivateKey::P384(p384::SecretKey::from_jwk(jwk)?),
)),
_ => {
- return Err(type_error(format!("unsupported curve: {}", jwk.crv())));
+ return Err(EcJwkError::UnsupportedCurve(jwk.crv().to_string()));
}
};
@@ -671,12 +830,11 @@ impl KeyObjectHandle {
curve: &str,
data: &[u8],
is_public: bool,
- ) -> Result<KeyObjectHandle, AnyError> {
+ ) -> Result<KeyObjectHandle, EdRawError> {
match curve {
"Ed25519" => {
- let data = data
- .try_into()
- .map_err(|_| type_error("invalid Ed25519 key"))?;
+ let data =
+ data.try_into().map_err(|_| EdRawError::InvalidEd25519Key)?;
if !is_public {
Ok(KeyObjectHandle::AsymmetricPrivate(
AsymmetricPrivateKey::Ed25519(
@@ -692,9 +850,8 @@ impl KeyObjectHandle {
}
}
"X25519" => {
- let data: [u8; 32] = data
- .try_into()
- .map_err(|_| type_error("invalid x25519 key"))?;
+ let data: [u8; 32] =
+ data.try_into().map_err(|_| EdRawError::InvalidEd25519Key)?;
if !is_public {
Ok(KeyObjectHandle::AsymmetricPrivate(
AsymmetricPrivateKey::X25519(x25519_dalek::StaticSecret::from(
@@ -707,7 +864,7 @@ impl KeyObjectHandle {
))
}
}
- _ => Err(type_error("unsupported curve")),
+ _ => Err(EdRawError::UnsupportedCurve),
}
}
@@ -716,24 +873,23 @@ impl KeyObjectHandle {
format: &str,
typ: &str,
passphrase: Option<&[u8]>,
- ) -> Result<KeyObjectHandle, AnyError> {
+ ) -> Result<KeyObjectHandle, AsymmetricPublicKeyError> {
let document = match format {
"pem" => {
let pem = std::str::from_utf8(key).map_err(|err| {
- type_error(format!(
- "invalid PEM public key: not valid utf8 starting at byte {}",
- err.valid_up_to()
- ))
+ AsymmetricPublicKeyError::InvalidPemPrivateKeyInvalidUtf8(
+ err.valid_up_to(),
+ )
})?;
let (label, document) = Document::from_pem(pem)
- .map_err(|_| type_error("invalid PEM public key"))?;
+ .map_err(|_| AsymmetricPublicKeyError::InvalidPemPublicKey)?;
match label {
SubjectPublicKeyInfoRef::PEM_LABEL => document,
rsa::pkcs1::RsaPublicKey::PEM_LABEL => {
Document::from_pkcs1_der(document.as_bytes())
- .map_err(|_| type_error("invalid PKCS#1 public key"))?
+ .map_err(|_| AsymmetricPublicKeyError::InvalidPkcs1PublicKey)?
}
EncryptedPrivateKeyInfo::PEM_LABEL
| PrivateKeyInfo::PEM_LABEL
@@ -754,27 +910,36 @@ impl KeyObjectHandle {
}
"CERTIFICATE" => {
let (_, pem) = x509_parser::pem::parse_x509_pem(pem.as_bytes())
- .map_err(|_| type_error("invalid x509 certificate"))?;
+ .map_err(|_| AsymmetricPublicKeyError::InvalidX509Certificate)?;
let cert = pem.parse_x509()?;
let public_key = cert.tbs_certificate.subject_pki;
- return KeyObjectHandle::new_x509_public_key(&public_key);
+ return KeyObjectHandle::new_x509_public_key(&public_key)
+ .map_err(Into::into);
}
_ => {
- return Err(type_error(format!("unsupported PEM label: {}", label)))
+ return Err(AsymmetricPublicKeyError::UnsupportedPemLabel(
+ label.to_string(),
+ ))
}
}
}
"der" => match typ {
"pkcs1" => Document::from_pkcs1_der(key)
- .map_err(|_| type_error("invalid PKCS#1 public key"))?,
+ .map_err(|_| AsymmetricPublicKeyError::InvalidPkcs1PublicKey)?,
"spki" => Document::from_public_key_der(key)
- .map_err(|_| type_error("invalid SPKI public key"))?,
- _ => return Err(type_error(format!("unsupported key type: {}", typ))),
+ .map_err(|_| AsymmetricPublicKeyError::InvalidSpkiPublicKey)?,
+ _ => {
+ return Err(AsymmetricPublicKeyError::UnsupportedKeyType(
+ typ.to_string(),
+ ))
+ }
},
_ => {
- return Err(type_error(format!("unsupported key format: {}", format)))
+ return Err(AsymmetricPublicKeyError::UnsupportedKeyType(
+ format.to_string(),
+ ))
}
};
@@ -799,16 +964,16 @@ impl KeyObjectHandle {
}
DSA_OID => {
let verifying_key = dsa::VerifyingKey::try_from(spki)
- .map_err(|_| type_error("malformed DSS public key"))?;
+ .map_err(|_| AsymmetricPublicKeyError::MalformedDssPublicKey)?;
AsymmetricPublicKey::Dsa(verifying_key)
}
EC_OID => {
let named_curve = spki.algorithm.parameters_oid().map_err(|_| {
- type_error("malformed or missing named curve in ec parameters")
- })?;
- let data = spki.subject_public_key.as_bytes().ok_or_else(|| {
- type_error("malformed or missing public key in ec spki")
+ AsymmetricPublicKeyError::MalformedOrMissingNamedCurveInEcParameters
})?;
+ let data = spki.subject_public_key.as_bytes().ok_or(
+ AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInEcSpki,
+ )?;
match named_curve {
ID_SECP224R1_OID => {
@@ -823,54 +988,68 @@ impl KeyObjectHandle {
let public_key = p384::PublicKey::from_sec1_bytes(data)?;
AsymmetricPublicKey::Ec(EcPublicKey::P384(public_key))
}
- _ => return Err(type_error("unsupported ec named curve")),
+ _ => return Err(AsymmetricPublicKeyError::UnsupportedEcNamedCurve),
}
}
X25519_OID => {
let mut bytes = [0; 32];
- let data = spki.subject_public_key.as_bytes().ok_or_else(|| {
- type_error("malformed or missing public key in x25519 spki")
- })?;
+ let data = spki.subject_public_key.as_bytes().ok_or(
+ AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInX25519Spki,
+ )?;
if data.len() < 32 {
- return Err(type_error("x25519 public key is too short"));
+ return Err(AsymmetricPublicKeyError::X25519PublicKeyIsTooShort);
}
bytes.copy_from_slice(&data[0..32]);
AsymmetricPublicKey::X25519(x25519_dalek::PublicKey::from(bytes))
}
ED25519_OID => {
let verifying_key = ed25519_dalek::VerifyingKey::try_from(spki)
- .map_err(|_| type_error("invalid Ed25519 private key"))?;
+ .map_err(|_| AsymmetricPublicKeyError::InvalidEd25519PublicKey)?;
AsymmetricPublicKey::Ed25519(verifying_key)
}
DH_KEY_AGREEMENT_OID => {
let params = spki
.algorithm
.parameters
- .ok_or_else(|| type_error("missing dh parameters"))?;
+ .ok_or(AsymmetricPublicKeyError::MissingDhParameters)?;
let params = pkcs3::DhParameter::from_der(&params.to_der().unwrap())
- .map_err(|_| type_error("malformed dh parameters"))?;
+ .map_err(|_| AsymmetricPublicKeyError::MalformedDhParameters)?;
let Some(subject_public_key) = spki.subject_public_key.as_bytes()
else {
- return Err(type_error("malformed or missing public key in dh spki"));
+ return Err(
+ AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInDhSpki,
+ );
};
AsymmetricPublicKey::Dh(DhPublicKey {
key: dh::PublicKey::from_bytes(subject_public_key),
params,
})
}
- _ => return Err(type_error("unsupported public key oid")),
+ _ => return Err(AsymmetricPublicKeyError::UnsupportedPrivateKeyOid),
};
Ok(KeyObjectHandle::AsymmetricPublic(public_key))
}
}
+#[derive(Debug, thiserror::Error)]
+pub enum RsaPssParamsParseError {
+ #[error("malformed pss private key parameters")]
+ MalformedPssPrivateKeyParameters,
+ #[error("unsupported pss hash algorithm")]
+ UnsupportedPssHashAlgorithm,
+ #[error("unsupported pss mask gen algorithm")]
+ UnsupportedPssMaskGenAlgorithm,
+ #[error("malformed or missing pss mask gen algorithm parameters")]
+ MalformedOrMissingPssMaskGenAlgorithm,
+}
+
fn parse_rsa_pss_params(
parameters: Option<AnyRef<'_>>,
-) -> Result<Option<RsaPssDetails>, deno_core::anyhow::Error> {
+) -> Result<Option<RsaPssDetails>, RsaPssParamsParseError> {
let details = if let Some(parameters) = parameters {
let params = RsaPssParameters::try_from(parameters)
- .map_err(|_| type_error("malformed pss private key parameters"))?;
+ .map_err(|_| RsaPssParamsParseError::MalformedPssPrivateKeyParameters)?;
let hash_algorithm = match params.hash_algorithm.map(|k| k.oid) {
Some(ID_SHA1_OID) => RsaPssHashAlgorithm::Sha1,
@@ -881,16 +1060,16 @@ fn parse_rsa_pss_params(
Some(ID_SHA512_224_OID) => RsaPssHashAlgorithm::Sha512_224,
Some(ID_SHA512_256_OID) => RsaPssHashAlgorithm::Sha512_256,
None => RsaPssHashAlgorithm::Sha1,
- _ => return Err(type_error("unsupported pss hash algorithm")),
+ _ => return Err(RsaPssParamsParseError::UnsupportedPssHashAlgorithm),
};
let mf1_hash_algorithm = match params.mask_gen_algorithm {
Some(alg) => {
if alg.oid != ID_MFG1 {
- return Err(type_error("unsupported pss mask gen algorithm"));
+ return Err(RsaPssParamsParseError::UnsupportedPssMaskGenAlgorithm);
}
let params = alg.parameters_oid().map_err(|_| {
- type_error("malformed or missing pss mask gen algorithm parameters")
+ RsaPssParamsParseError::MalformedOrMissingPssMaskGenAlgorithm
})?;
match params {
ID_SHA1_OID => RsaPssHashAlgorithm::Sha1,
@@ -900,7 +1079,9 @@ fn parse_rsa_pss_params(
ID_SHA512_OID => RsaPssHashAlgorithm::Sha512,
ID_SHA512_224_OID => RsaPssHashAlgorithm::Sha512_224,
ID_SHA512_256_OID => RsaPssHashAlgorithm::Sha512_256,
- _ => return Err(type_error("unsupported pss mask gen algorithm")),
+ _ => {
+ return Err(RsaPssParamsParseError::UnsupportedPssMaskGenAlgorithm)
+ }
}
}
None => hash_algorithm,
@@ -921,14 +1102,49 @@ fn parse_rsa_pss_params(
Ok(details)
}
-use base64::prelude::BASE64_URL_SAFE_NO_PAD;
-
fn bytes_to_b64(bytes: &[u8]) -> String {
+ use base64::prelude::BASE64_URL_SAFE_NO_PAD;
BASE64_URL_SAFE_NO_PAD.encode(bytes)
}
+#[derive(Debug, thiserror::Error)]
+pub enum AsymmetricPublicKeyJwkError {
+ #[error("key is not an asymmetric public key")]
+ KeyIsNotAsymmetricPublicKey,
+ #[error("Unsupported JWK EC curve: P224")]
+ UnsupportedJwkEcCurveP224,
+ #[error("jwk export not implemented for this key type")]
+ JwkExportNotImplementedForKeyType,
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum AsymmetricPublicKeyDerError {
+ #[error("key is not an asymmetric public key")]
+ KeyIsNotAsymmetricPublicKey,
+ #[error("invalid RSA public key")]
+ InvalidRsaPublicKey,
+ #[error("exporting non-RSA public key as PKCS#1 is not supported")]
+ ExportingNonRsaPublicKeyAsPkcs1Unsupported,
+ #[error("invalid EC public key")]
+ InvalidEcPublicKey,
+ #[error("exporting RSA-PSS public key as SPKI is not supported yet")]
+ ExportingNonRsaPssPublicKeyAsSpkiUnsupported,
+ #[error("invalid DSA public key")]
+ InvalidDsaPublicKey,
+ #[error("invalid X25519 public key")]
+ InvalidX25519PublicKey,
+ #[error("invalid Ed25519 public key")]
+ InvalidEd25519PublicKey,
+ #[error("invalid DH public key")]
+ InvalidDhPublicKey,
+ #[error("unsupported key type: {0}")]
+ UnsupportedKeyType(String),
+}
+
impl AsymmetricPublicKey {
- fn export_jwk(&self) -> Result<deno_core::serde_json::Value, AnyError> {
+ fn export_jwk(
+ &self,
+ ) -> Result<deno_core::serde_json::Value, AsymmetricPublicKeyJwkError> {
match self {
AsymmetricPublicKey::Ec(key) => {
let jwk = key.to_jwk()?;
@@ -974,40 +1190,39 @@ impl AsymmetricPublicKey {
});
Ok(jwk)
}
- _ => Err(type_error("jwk export not implemented for this key type")),
+ _ => Err(AsymmetricPublicKeyJwkError::JwkExportNotImplementedForKeyType),
}
}
- fn export_der(&self, typ: &str) -> Result<Box<[u8]>, AnyError> {
+ fn export_der(
+ &self,
+ typ: &str,
+ ) -> Result<Box<[u8]>, AsymmetricPublicKeyDerError> {
match typ {
"pkcs1" => match self {
AsymmetricPublicKey::Rsa(key) => {
let der = key
.to_pkcs1_der()
- .map_err(|_| type_error("invalid RSA public key"))?
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidRsaPublicKey)?
.into_vec()
.into_boxed_slice();
Ok(der)
}
- _ => Err(type_error(
- "exporting non-RSA public key as PKCS#1 is not supported",
- )),
+ _ => Err(AsymmetricPublicKeyDerError::ExportingNonRsaPublicKeyAsPkcs1Unsupported),
},
"spki" => {
let der = match self {
AsymmetricPublicKey::Rsa(key) => key
.to_public_key_der()
- .map_err(|_| type_error("invalid RSA public key"))?
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidRsaPublicKey)?
.into_vec()
.into_boxed_slice(),
AsymmetricPublicKey::RsaPss(_key) => {
- return Err(generic_error(
- "exporting RSA-PSS public key as SPKI is not supported yet",
- ))
+ return Err(AsymmetricPublicKeyDerError::ExportingNonRsaPssPublicKeyAsSpkiUnsupported)
}
AsymmetricPublicKey::Dsa(key) => key
.to_public_key_der()
- .map_err(|_| type_error("invalid DSA public key"))?
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidDsaPublicKey)?
.into_vec()
.into_boxed_slice(),
AsymmetricPublicKey::Ec(key) => {
@@ -1023,12 +1238,12 @@ impl AsymmetricPublicKey {
parameters: Some(asn1::AnyRef::from(&oid)),
},
subject_public_key: BitStringRef::from_bytes(&sec1)
- .map_err(|_| type_error("invalid EC public key"))?,
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidEcPublicKey)?,
};
spki
.to_der()
- .map_err(|_| type_error("invalid EC public key"))?
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidEcPublicKey)?
.into_boxed_slice()
}
AsymmetricPublicKey::X25519(key) => {
@@ -1038,12 +1253,12 @@ impl AsymmetricPublicKey {
parameters: None,
},
subject_public_key: BitStringRef::from_bytes(key.as_bytes())
- .map_err(|_| type_error("invalid X25519 public key"))?,
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidX25519PublicKey)?,
};
spki
.to_der()
- .map_err(|_| type_error("invalid X25519 public key"))?
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidX25519PublicKey)?
.into_boxed_slice()
}
AsymmetricPublicKey::Ed25519(key) => {
@@ -1053,12 +1268,12 @@ impl AsymmetricPublicKey {
parameters: None,
},
subject_public_key: BitStringRef::from_bytes(key.as_bytes())
- .map_err(|_| type_error("invalid Ed25519 public key"))?,
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidEd25519PublicKey)?,
};
spki
.to_der()
- .map_err(|_| type_error("invalid Ed25519 public key"))?
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidEd25519PublicKey)?
.into_boxed_slice()
}
AsymmetricPublicKey::Dh(key) => {
@@ -1071,43 +1286,67 @@ impl AsymmetricPublicKey {
},
subject_public_key: BitStringRef::from_bytes(&public_key_bytes)
.map_err(|_| {
- type_error("invalid DH public key")
+ AsymmetricPublicKeyDerError::InvalidDhPublicKey
})?,
};
spki
.to_der()
- .map_err(|_| type_error("invalid DH public key"))?
+ .map_err(|_| AsymmetricPublicKeyDerError::InvalidDhPublicKey)?
.into_boxed_slice()
}
};
Ok(der)
}
- _ => Err(type_error(format!("unsupported key type: {}", typ))),
+ _ => Err(AsymmetricPublicKeyDerError::UnsupportedKeyType(typ.to_string())),
}
}
}
+#[derive(Debug, thiserror::Error)]
+pub enum AsymmetricPrivateKeyDerError {
+ #[error("key is not an asymmetric private key")]
+ KeyIsNotAsymmetricPrivateKey,
+ #[error("invalid RSA private key")]
+ InvalidRsaPrivateKey,
+ #[error("exporting non-RSA private key as PKCS#1 is not supported")]
+ ExportingNonRsaPrivateKeyAsPkcs1Unsupported,
+ #[error("invalid EC private key")]
+ InvalidEcPrivateKey,
+ #[error("exporting non-EC private key as SEC1 is not supported")]
+ ExportingNonEcPrivateKeyAsSec1Unsupported,
+ #[error("exporting RSA-PSS private key as PKCS#8 is not supported yet")]
+ ExportingNonRsaPssPrivateKeyAsPkcs8Unsupported,
+ #[error("invalid DSA private key")]
+ InvalidDsaPrivateKey,
+ #[error("invalid X25519 private key")]
+ InvalidX25519PrivateKey,
+ #[error("invalid Ed25519 private key")]
+ InvalidEd25519PrivateKey,
+ #[error("invalid DH private key")]
+ InvalidDhPrivateKey,
+ #[error("unsupported key type: {0}")]
+ UnsupportedKeyType(String),
+}
+
impl AsymmetricPrivateKey {
fn export_der(
&self,
typ: &str,
// cipher: Option<&str>,
// passphrase: Option<&str>,
- ) -> Result<Box<[u8]>, AnyError> {
+ ) -> Result<Box<[u8]>, AsymmetricPrivateKeyDerError> {
match typ {
"pkcs1" => match self {
AsymmetricPrivateKey::Rsa(key) => {
let der = key
.to_pkcs1_der()
- .map_err(|_| type_error("invalid RSA private key"))?
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidRsaPrivateKey)?
.to_bytes()
.to_vec()
.into_boxed_slice();
Ok(der)
}
- _ => Err(type_error(
- "exporting non-RSA private key as PKCS#1 is not supported",
- )),
+ _ => Err(AsymmetricPrivateKeyDerError::ExportingNonRsaPrivateKeyAsPkcs1Unsupported),
},
"sec1" => match self {
AsymmetricPrivateKey::Ec(key) => {
@@ -1116,30 +1355,26 @@ impl AsymmetricPrivateKey {
EcPrivateKey::P256(key) => key.to_sec1_der(),
EcPrivateKey::P384(key) => key.to_sec1_der(),
}
- .map_err(|_| type_error("invalid EC private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEcPrivateKey)?;
Ok(sec1.to_vec().into_boxed_slice())
}
- _ => Err(type_error(
- "exporting non-EC private key as SEC1 is not supported",
- )),
+ _ => Err(AsymmetricPrivateKeyDerError::ExportingNonEcPrivateKeyAsSec1Unsupported),
},
"pkcs8" => {
let der = match self {
AsymmetricPrivateKey::Rsa(key) => {
let document = key
.to_pkcs8_der()
- .map_err(|_| type_error("invalid RSA private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidRsaPrivateKey)?;
document.to_bytes().to_vec().into_boxed_slice()
}
AsymmetricPrivateKey::RsaPss(_key) => {
- return Err(generic_error(
- "exporting RSA-PSS private key as PKCS#8 is not supported yet",
- ))
+ return Err(AsymmetricPrivateKeyDerError::ExportingNonRsaPssPrivateKeyAsPkcs8Unsupported)
}
AsymmetricPrivateKey::Dsa(key) => {
let document = key
.to_pkcs8_der()
- .map_err(|_| type_error("invalid DSA private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidDsaPrivateKey)?;
document.to_bytes().to_vec().into_boxed_slice()
}
AsymmetricPrivateKey::Ec(key) => {
@@ -1148,14 +1383,14 @@ impl AsymmetricPrivateKey {
EcPrivateKey::P256(key) => key.to_pkcs8_der(),
EcPrivateKey::P384(key) => key.to_pkcs8_der(),
}
- .map_err(|_| type_error("invalid EC private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEcPrivateKey)?;
document.to_bytes().to_vec().into_boxed_slice()
}
AsymmetricPrivateKey::X25519(key) => {
let private_key = OctetStringRef::new(key.as_bytes())
- .map_err(|_| type_error("invalid X25519 private key"))?
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey)?
.to_der()
- .map_err(|_| type_error("invalid X25519 private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey)?;
let private_key = PrivateKeyInfo {
algorithm: rsa::pkcs8::AlgorithmIdentifierRef {
@@ -1168,15 +1403,15 @@ impl AsymmetricPrivateKey {
let der = private_key
.to_der()
- .map_err(|_| type_error("invalid X25519 private key"))?
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey)?
.into_boxed_slice();
return Ok(der);
}
AsymmetricPrivateKey::Ed25519(key) => {
let private_key = OctetStringRef::new(key.as_bytes())
- .map_err(|_| type_error("invalid Ed25519 private key"))?
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey)?
.to_der()
- .map_err(|_| type_error("invalid Ed25519 private key"))?;
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey)?;
let private_key = PrivateKeyInfo {
algorithm: rsa::pkcs8::AlgorithmIdentifierRef {
@@ -1189,7 +1424,7 @@ impl AsymmetricPrivateKey {
private_key
.to_der()
- .map_err(|_| type_error("invalid ED25519 private key"))?
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey)?
.into_boxed_slice()
}
AsymmetricPrivateKey::Dh(key) => {
@@ -1206,14 +1441,14 @@ impl AsymmetricPrivateKey {
private_key
.to_der()
- .map_err(|_| type_error("invalid DH private key"))?
+ .map_err(|_| AsymmetricPrivateKeyDerError::InvalidDhPrivateKey)?
.into_boxed_slice()
}
};
Ok(der)
}
- _ => Err(type_error(format!("unsupported key type: {}", typ))),
+ _ => Err(AsymmetricPrivateKeyDerError::UnsupportedKeyType(typ.to_string())),
}
}
}
@@ -1225,7 +1460,7 @@ pub fn op_node_create_private_key(
#[string] format: &str,
#[string] typ: &str,
#[buffer] passphrase: Option<&[u8]>,
-) -> Result<KeyObjectHandle, AnyError> {
+) -> Result<KeyObjectHandle, AsymmetricPrivateKeyError> {
KeyObjectHandle::new_asymmetric_private_key_from_js(
key, format, typ, passphrase,
)
@@ -1237,7 +1472,7 @@ pub fn op_node_create_ed_raw(
#[string] curve: &str,
#[buffer] key: &[u8],
is_public: bool,
-) -> Result<KeyObjectHandle, AnyError> {
+) -> Result<KeyObjectHandle, EdRawError> {
KeyObjectHandle::new_ed_raw(curve, key, is_public)
}
@@ -1255,16 +1490,16 @@ pub struct RsaJwkKey {
pub fn op_node_create_rsa_jwk(
#[serde] jwk: RsaJwkKey,
is_public: bool,
-) -> Result<KeyObjectHandle, AnyError> {
+) -> Result<KeyObjectHandle, RsaJwkError> {
KeyObjectHandle::new_rsa_jwk(jwk, is_public)
}
#[op2]
#[cppgc]
pub fn op_node_create_ec_jwk(
- #[serde] jwk: elliptic_curve::JwkEcKey,
+ #[serde] jwk: JwkEcKey,
is_public: bool,
-) -> Result<KeyObjectHandle, AnyError> {
+) -> Result<KeyObjectHandle, EcJwkError> {
KeyObjectHandle::new_ec_jwk(&jwk, is_public)
}
@@ -1275,7 +1510,7 @@ pub fn op_node_create_public_key(
#[string] format: &str,
#[string] typ: &str,
#[buffer] passphrase: Option<&[u8]>,
-) -> Result<KeyObjectHandle, AnyError> {
+) -> Result<KeyObjectHandle, AsymmetricPublicKeyError> {
KeyObjectHandle::new_asymmetric_public_key_from_js(
key, format, typ, passphrase,
)
@@ -1293,7 +1528,7 @@ pub fn op_node_create_secret_key(
#[string]
pub fn op_node_get_asymmetric_key_type(
#[cppgc] handle: &KeyObjectHandle,
-) -> Result<&'static str, AnyError> {
+) -> Result<&'static str, deno_core::error::AnyError> {
match handle {
KeyObjectHandle::AsymmetricPrivate(AsymmetricPrivateKey::Rsa(_))
| KeyObjectHandle::AsymmetricPublic(AsymmetricPublicKey::Rsa(_)) => {
@@ -1364,7 +1599,7 @@ pub enum AsymmetricKeyDetails {
#[serde]
pub fn op_node_get_asymmetric_key_details(
#[cppgc] handle: &KeyObjectHandle,
-) -> Result<AsymmetricKeyDetails, AnyError> {
+) -> Result<AsymmetricKeyDetails, deno_core::error::AnyError> {
match handle {
KeyObjectHandle::AsymmetricPrivate(private_key) => match private_key {
AsymmetricPrivateKey::Rsa(key) => {
@@ -1482,12 +1717,10 @@ pub fn op_node_get_asymmetric_key_details(
#[smi]
pub fn op_node_get_symmetric_key_size(
#[cppgc] handle: &KeyObjectHandle,
-) -> Result<usize, AnyError> {
+) -> Result<usize, deno_core::error::AnyError> {
match handle {
- KeyObjectHandle::AsymmetricPrivate(_) => {
- Err(type_error("asymmetric key is not a symmetric key"))
- }
- KeyObjectHandle::AsymmetricPublic(_) => {
+ KeyObjectHandle::AsymmetricPrivate(_)
+ | KeyObjectHandle::AsymmetricPublic(_) => {
Err(type_error("asymmetric key is not a symmetric key"))
}
KeyObjectHandle::Secret(key) => Ok(key.len() * 8),
@@ -1592,13 +1825,17 @@ pub async fn op_node_generate_rsa_key_async(
.unwrap()
}
+#[derive(Debug, thiserror::Error)]
+#[error("digest not allowed for RSA-PSS keys{}", .0.as_ref().map(|digest| format!(": {digest}")).unwrap_or_default())]
+pub struct GenerateRsaPssError(Option<String>);
+
fn generate_rsa_pss(
modulus_length: usize,
public_exponent: usize,
hash_algorithm: Option<&str>,
mf1_hash_algorithm: Option<&str>,
salt_length: Option<u32>,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, GenerateRsaPssError> {
let key = RsaPrivateKey::new_with_exp(
&mut thread_rng(),
modulus_length,
@@ -1617,25 +1854,19 @@ fn generate_rsa_pss(
let hash_algorithm = match_fixed_digest_with_oid!(
hash_algorithm,
fn (algorithm: Option<RsaPssHashAlgorithm>) {
- algorithm.ok_or_else(|| type_error("digest not allowed for RSA-PSS keys: {}"))?
+ algorithm.ok_or(GenerateRsaPssError(None))?
},
_ => {
- return Err(type_error(format!(
- "digest not allowed for RSA-PSS keys: {}",
- hash_algorithm
- )))
+ return Err(GenerateRsaPssError(Some(hash_algorithm.to_string())))
}
);
let mf1_hash_algorithm = match_fixed_digest_with_oid!(
mf1_hash_algorithm,
fn (algorithm: Option<RsaPssHashAlgorithm>) {
- algorithm.ok_or_else(|| type_error("digest not allowed for RSA-PSS keys: {}"))?
+ algorithm.ok_or(GenerateRsaPssError(None))?
},
_ => {
- return Err(type_error(format!(
- "digest not allowed for RSA-PSS keys: {}",
- mf1_hash_algorithm
- )))
+ return Err(GenerateRsaPssError(Some(mf1_hash_algorithm.to_string())))
}
);
let salt_length =
@@ -1663,7 +1894,7 @@ pub fn op_node_generate_rsa_pss_key(
#[string] hash_algorithm: Option<String>, // todo: Option<&str> not supproted in ops yet
#[string] mf1_hash_algorithm: Option<String>, // todo: Option<&str> not supproted in ops yet
#[smi] salt_length: Option<u32>,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, GenerateRsaPssError> {
generate_rsa_pss(
modulus_length,
public_exponent,
@@ -1681,7 +1912,7 @@ pub async fn op_node_generate_rsa_pss_key_async(
#[string] hash_algorithm: Option<String>, // todo: Option<&str> not supproted in ops yet
#[string] mf1_hash_algorithm: Option<String>, // todo: Option<&str> not supproted in ops yet
#[smi] salt_length: Option<u32>,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, GenerateRsaPssError> {
spawn_blocking(move || {
generate_rsa_pss(
modulus_length,
@@ -1698,7 +1929,7 @@ pub async fn op_node_generate_rsa_pss_key_async(
fn dsa_generate(
modulus_length: usize,
divisor_length: usize,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
let mut rng = rand::thread_rng();
use dsa::Components;
use dsa::KeySize;
@@ -1729,7 +1960,7 @@ fn dsa_generate(
pub fn op_node_generate_dsa_key(
#[smi] modulus_length: usize,
#[smi] divisor_length: usize,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
dsa_generate(modulus_length, divisor_length)
}
@@ -1738,13 +1969,15 @@ pub fn op_node_generate_dsa_key(
pub async fn op_node_generate_dsa_key_async(
#[smi] modulus_length: usize,
#[smi] divisor_length: usize,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
spawn_blocking(move || dsa_generate(modulus_length, divisor_length))
.await
.unwrap()
}
-fn ec_generate(named_curve: &str) -> Result<KeyObjectHandlePair, AnyError> {
+fn ec_generate(
+ named_curve: &str,
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
let mut rng = rand::thread_rng();
// TODO(@littledivy): Support public key point encoding.
// Default is uncompressed.
@@ -1776,7 +2009,7 @@ fn ec_generate(named_curve: &str) -> Result<KeyObjectHandlePair, AnyError> {
#[cppgc]
pub fn op_node_generate_ec_key(
#[string] named_curve: &str,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
ec_generate(named_curve)
}
@@ -1784,7 +2017,7 @@ pub fn op_node_generate_ec_key(
#[cppgc]
pub async fn op_node_generate_ec_key_async(
#[string] named_curve: String,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
spawn_blocking(move || ec_generate(&named_curve))
.await
.unwrap()
@@ -1840,7 +2073,7 @@ fn u32_slice_to_u8_slice(slice: &[u32]) -> &[u8] {
fn dh_group_generate(
group_name: &str,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
let (dh, prime, generator) = match group_name {
"modp5" => (
dh::DiffieHellman::group::<dh::Modp1536>(),
@@ -1895,7 +2128,7 @@ fn dh_group_generate(
#[cppgc]
pub fn op_node_generate_dh_group_key(
#[string] group_name: &str,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
dh_group_generate(group_name)
}
@@ -1903,7 +2136,7 @@ pub fn op_node_generate_dh_group_key(
#[cppgc]
pub async fn op_node_generate_dh_group_key_async(
#[string] group_name: String,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> Result<KeyObjectHandlePair, deno_core::error::AnyError> {
spawn_blocking(move || dh_group_generate(&group_name))
.await
.unwrap()
@@ -1913,7 +2146,7 @@ fn dh_generate(
prime: Option<&[u8]>,
prime_len: usize,
generator: usize,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> KeyObjectHandlePair {
let prime = prime
.map(|p| p.into())
.unwrap_or_else(|| Prime::generate(prime_len));
@@ -1923,7 +2156,7 @@ fn dh_generate(
base: asn1::Int::new(generator.to_be_bytes().as_slice()).unwrap(),
private_value_length: None,
};
- Ok(KeyObjectHandlePair::new(
+ KeyObjectHandlePair::new(
AsymmetricPrivateKey::Dh(DhPrivateKey {
key: dh.private_key,
params: params.clone(),
@@ -1932,7 +2165,7 @@ fn dh_generate(
key: dh.public_key,
params,
}),
- ))
+ )
}
#[op2]
@@ -1941,7 +2174,7 @@ pub fn op_node_generate_dh_key(
#[buffer] prime: Option<&[u8]>,
#[smi] prime_len: usize,
#[smi] generator: usize,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> KeyObjectHandlePair {
dh_generate(prime, prime_len, generator)
}
@@ -1951,7 +2184,7 @@ pub async fn op_node_generate_dh_key_async(
#[buffer(copy)] prime: Option<Box<[u8]>>,
#[smi] prime_len: usize,
#[smi] generator: usize,
-) -> Result<KeyObjectHandlePair, AnyError> {
+) -> KeyObjectHandlePair {
spawn_blocking(move || dh_generate(prime.as_deref(), prime_len, generator))
.await
.unwrap()
@@ -1963,21 +2196,21 @@ pub fn op_node_dh_keys_generate_and_export(
#[buffer] prime: Option<&[u8]>,
#[smi] prime_len: usize,
#[smi] generator: usize,
-) -> Result<(ToJsBuffer, ToJsBuffer), AnyError> {
+) -> (ToJsBuffer, ToJsBuffer) {
let prime = prime
.map(|p| p.into())
.unwrap_or_else(|| Prime::generate(prime_len));
let dh = dh::DiffieHellman::new(prime, generator);
let private_key = dh.private_key.into_vec().into_boxed_slice();
let public_key = dh.public_key.into_vec().into_boxed_slice();
- Ok((private_key.into(), public_key.into()))
+ (private_key.into(), public_key.into())
}
#[op2]
#[buffer]
pub fn op_node_export_secret_key(
#[cppgc] handle: &KeyObjectHandle,
-) -> Result<Box<[u8]>, AnyError> {
+) -> Result<Box<[u8]>, deno_core::error::AnyError> {
let key = handle
.as_secret_key()
.ok_or_else(|| type_error("key is not a secret key"))?;
@@ -1988,7 +2221,7 @@ pub fn op_node_export_secret_key(
#[string]
pub fn op_node_export_secret_key_b64url(
#[cppgc] handle: &KeyObjectHandle,
-) -> Result<String, AnyError> {
+) -> Result<String, deno_core::error::AnyError> {
let key = handle
.as_secret_key()
.ok_or_else(|| type_error("key is not a secret key"))?;
@@ -1999,23 +2232,33 @@ pub fn op_node_export_secret_key_b64url(
#[serde]
pub fn op_node_export_public_key_jwk(
#[cppgc] handle: &KeyObjectHandle,
-) -> Result<deno_core::serde_json::Value, AnyError> {
+) -> Result<deno_core::serde_json::Value, AsymmetricPublicKeyJwkError> {
let public_key = handle
.as_public_key()
- .ok_or_else(|| type_error("key is not an asymmetric public key"))?;
+ .ok_or(AsymmetricPublicKeyJwkError::KeyIsNotAsymmetricPublicKey)?;
public_key.export_jwk()
}
+#[derive(Debug, thiserror::Error)]
+pub enum ExportPublicKeyPemError {
+ #[error(transparent)]
+ AsymmetricPublicKeyDer(#[from] AsymmetricPublicKeyDerError),
+ #[error("very large data")]
+ VeryLargeData,
+ #[error(transparent)]
+ Der(#[from] der::Error),
+}
+
#[op2]
#[string]
pub fn op_node_export_public_key_pem(
#[cppgc] handle: &KeyObjectHandle,
#[string] typ: &str,
-) -> Result<String, AnyError> {
+) -> Result<String, ExportPublicKeyPemError> {
let public_key = handle
.as_public_key()
- .ok_or_else(|| type_error("key is not an asymmetric public key"))?;
+ .ok_or(AsymmetricPublicKeyDerError::KeyIsNotAsymmetricPublicKey)?;
let data = public_key.export_der(typ)?;
let label = match typ {
@@ -2024,7 +2267,9 @@ pub fn op_node_export_public_key_pem(
_ => unreachable!("export_der would have errored"),
};
- let mut out = vec![0; 2048];
+ let pem_len = der::pem::encapsulated_len(label, LineEnding::LF, data.len())
+ .map_err(|_| ExportPublicKeyPemError::VeryLargeData)?;
+ let mut out = vec![0; pem_len];
let mut writer = PemWriter::new(label, LineEnding::LF, &mut out)?;
writer.write(&data)?;
let len = writer.finish()?;
@@ -2038,22 +2283,32 @@ pub fn op_node_export_public_key_pem(
pub fn op_node_export_public_key_der(
#[cppgc] handle: &KeyObjectHandle,
#[string] typ: &str,
-) -> Result<Box<[u8]>, AnyError> {
+) -> Result<Box<[u8]>, AsymmetricPublicKeyDerError> {
let public_key = handle
.as_public_key()
- .ok_or_else(|| type_error("key is not an asymmetric public key"))?;
+ .ok_or(AsymmetricPublicKeyDerError::KeyIsNotAsymmetricPublicKey)?;
public_key.export_der(typ)
}
+#[derive(Debug, thiserror::Error)]
+pub enum ExportPrivateKeyPemError {
+ #[error(transparent)]
+ AsymmetricPublicKeyDer(#[from] AsymmetricPrivateKeyDerError),
+ #[error("very large data")]
+ VeryLargeData,
+ #[error(transparent)]
+ Der(#[from] der::Error),
+}
+
#[op2]
#[string]
pub fn op_node_export_private_key_pem(
#[cppgc] handle: &KeyObjectHandle,
#[string] typ: &str,
-) -> Result<String, AnyError> {
+) -> Result<String, ExportPrivateKeyPemError> {
let private_key = handle
.as_private_key()
- .ok_or_else(|| type_error("key is not an asymmetric private key"))?;
+ .ok_or(AsymmetricPrivateKeyDerError::KeyIsNotAsymmetricPrivateKey)?;
let data = private_key.export_der(typ)?;
let label = match typ {
@@ -2063,7 +2318,9 @@ pub fn op_node_export_private_key_pem(
_ => unreachable!("export_der would have errored"),
};
- let mut out = vec![0; 2048];
+ let pem_len = der::pem::encapsulated_len(label, LineEnding::LF, data.len())
+ .map_err(|_| ExportPrivateKeyPemError::VeryLargeData)?;
+ let mut out = vec![0; pem_len];
let mut writer = PemWriter::new(label, LineEnding::LF, &mut out)?;
writer.write(&data)?;
let len = writer.finish()?;
@@ -2077,10 +2334,10 @@ pub fn op_node_export_private_key_pem(
pub fn op_node_export_private_key_der(
#[cppgc] handle: &KeyObjectHandle,
#[string] typ: &str,
-) -> Result<Box<[u8]>, AnyError> {
+) -> Result<Box<[u8]>, AsymmetricPrivateKeyDerError> {
let private_key = handle
.as_private_key()
- .ok_or_else(|| type_error("key is not an asymmetric private key"))?;
+ .ok_or(AsymmetricPrivateKeyDerError::KeyIsNotAsymmetricPrivateKey)?;
private_key.export_der(typ)
}
@@ -2098,7 +2355,7 @@ pub fn op_node_key_type(#[cppgc] handle: &KeyObjectHandle) -> &'static str {
#[cppgc]
pub fn op_node_derive_public_key_from_private_key(
#[cppgc] handle: &KeyObjectHandle,
-) -> Result<KeyObjectHandle, AnyError> {
+) -> Result<KeyObjectHandle, deno_core::error::AnyError> {
let Some(private_key) = handle.as_private_key() else {
return Err(type_error("expected private key"));
};
diff --git a/ext/node/ops/crypto/mod.rs b/ext/node/ops/crypto/mod.rs
index 600d31558..e90e82090 100644
--- a/ext/node/ops/crypto/mod.rs
+++ b/ext/node/ops/crypto/mod.rs
@@ -1,7 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::generic_error;
use deno_core::error::type_error;
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::unsync::spawn_blocking;
use deno_core::JsBuffer;
@@ -34,14 +33,14 @@ use rsa::Pkcs1v15Encrypt;
use rsa::RsaPrivateKey;
use rsa::RsaPublicKey;
-mod cipher;
+pub mod cipher;
mod dh;
-mod digest;
+pub mod digest;
pub mod keys;
mod md5_sha1;
mod pkcs3;
mod primes;
-mod sign;
+pub mod sign;
pub mod x509;
use self::digest::match_fixed_digest_with_eager_block_buffer;
@@ -58,38 +57,31 @@ pub fn op_node_check_prime(
pub fn op_node_check_prime_bytes(
#[anybuffer] bytes: &[u8],
#[number] checks: usize,
-) -> Result<bool, AnyError> {
+) -> bool {
let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes);
- Ok(primes::is_probably_prime(&candidate, checks))
+ primes::is_probably_prime(&candidate, checks)
}
#[op2(async)]
pub async fn op_node_check_prime_async(
#[bigint] num: i64,
#[number] checks: usize,
-) -> Result<bool, AnyError> {
+) -> Result<bool, tokio::task::JoinError> {
// TODO(@littledivy): use rayon for CPU-bound tasks
- Ok(
- spawn_blocking(move || {
- primes::is_probably_prime(&BigInt::from(num), checks)
- })
- .await?,
- )
+ spawn_blocking(move || primes::is_probably_prime(&BigInt::from(num), checks))
+ .await
}
#[op2(async)]
pub fn op_node_check_prime_bytes_async(
#[anybuffer] bytes: &[u8],
#[number] checks: usize,
-) -> Result<impl Future<Output = Result<bool, AnyError>>, AnyError> {
+) -> impl Future<Output = Result<bool, tokio::task::JoinError>> {
let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes);
// TODO(@littledivy): use rayon for CPU-bound tasks
- Ok(async move {
- Ok(
- spawn_blocking(move || primes::is_probably_prime(&candidate, checks))
- .await?,
- )
- })
+ async move {
+ spawn_blocking(move || primes::is_probably_prime(&candidate, checks)).await
+ }
}
#[op2]
@@ -97,7 +89,7 @@ pub fn op_node_check_prime_bytes_async(
pub fn op_node_create_hash(
#[string] algorithm: &str,
output_length: Option<u32>,
-) -> Result<digest::Hasher, AnyError> {
+) -> Result<digest::Hasher, digest::HashError> {
digest::Hasher::new(algorithm, output_length.map(|l| l as usize))
}
@@ -145,17 +137,31 @@ pub fn op_node_hash_digest_hex(
pub fn op_node_hash_clone(
#[cppgc] hasher: &digest::Hasher,
output_length: Option<u32>,
-) -> Result<Option<digest::Hasher>, AnyError> {
+) -> Result<Option<digest::Hasher>, digest::HashError> {
hasher.clone_inner(output_length.map(|l| l as usize))
}
+#[derive(Debug, thiserror::Error)]
+pub enum PrivateEncryptDecryptError {
+ #[error(transparent)]
+ Pkcs8(#[from] pkcs8::Error),
+ #[error(transparent)]
+ Spki(#[from] spki::Error),
+ #[error(transparent)]
+ Utf8(#[from] std::str::Utf8Error),
+ #[error(transparent)]
+ Rsa(#[from] rsa::Error),
+ #[error("Unknown padding")]
+ UnknownPadding,
+}
+
#[op2]
#[serde]
pub fn op_node_private_encrypt(
#[serde] key: StringOrBuffer,
#[serde] msg: StringOrBuffer,
#[smi] padding: u32,
-) -> Result<ToJsBuffer, AnyError> {
+) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?;
let mut rng = rand::thread_rng();
@@ -172,7 +178,7 @@ pub fn op_node_private_encrypt(
.encrypt(&mut rng, Oaep::new::<sha1::Sha1>(), &msg)?
.into(),
),
- _ => Err(type_error("Unknown padding")),
+ _ => Err(PrivateEncryptDecryptError::UnknownPadding),
}
}
@@ -182,13 +188,13 @@ pub fn op_node_private_decrypt(
#[serde] key: StringOrBuffer,
#[serde] msg: StringOrBuffer,
#[smi] padding: u32,
-) -> Result<ToJsBuffer, AnyError> {
+) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?;
match padding {
1 => Ok(key.decrypt(Pkcs1v15Encrypt, &msg)?.into()),
4 => Ok(key.decrypt(Oaep::new::<sha1::Sha1>(), &msg)?.into()),
- _ => Err(type_error("Unknown padding")),
+ _ => Err(PrivateEncryptDecryptError::UnknownPadding),
}
}
@@ -198,7 +204,7 @@ pub fn op_node_public_encrypt(
#[serde] key: StringOrBuffer,
#[serde] msg: StringOrBuffer,
#[smi] padding: u32,
-) -> Result<ToJsBuffer, AnyError> {
+) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
let key = RsaPublicKey::from_public_key_pem((&key).try_into()?)?;
let mut rng = rand::thread_rng();
@@ -209,7 +215,7 @@ pub fn op_node_public_encrypt(
.encrypt(&mut rng, Oaep::new::<sha1::Sha1>(), &msg)?
.into(),
),
- _ => Err(type_error("Unknown padding")),
+ _ => Err(PrivateEncryptDecryptError::UnknownPadding),
}
}
@@ -220,7 +226,7 @@ pub fn op_node_create_cipheriv(
#[string] algorithm: &str,
#[buffer] key: &[u8],
#[buffer] iv: &[u8],
-) -> Result<u32, AnyError> {
+) -> Result<u32, cipher::CipherContextError> {
let context = cipher::CipherContext::new(algorithm, key, iv)?;
Ok(state.resource_table.add(context))
}
@@ -262,11 +268,14 @@ pub fn op_node_cipheriv_final(
auto_pad: bool,
#[buffer] input: &[u8],
#[anybuffer] output: &mut [u8],
-) -> Result<Option<Vec<u8>>, AnyError> {
- let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
+) -> Result<Option<Vec<u8>>, cipher::CipherContextError> {
+ let context = state
+ .resource_table
+ .take::<cipher::CipherContext>(rid)
+ .map_err(cipher::CipherContextError::Resource)?;
let context = Rc::try_unwrap(context)
- .map_err(|_| type_error("Cipher context is already in use"))?;
- context.r#final(auto_pad, input, output)
+ .map_err(|_| cipher::CipherContextError::ContextInUse)?;
+ context.r#final(auto_pad, input, output).map_err(Into::into)
}
#[op2]
@@ -274,10 +283,13 @@ pub fn op_node_cipheriv_final(
pub fn op_node_cipheriv_take(
state: &mut OpState,
#[smi] rid: u32,
-) -> Result<Option<Vec<u8>>, AnyError> {
- let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
+) -> Result<Option<Vec<u8>>, cipher::CipherContextError> {
+ let context = state
+ .resource_table
+ .take::<cipher::CipherContext>(rid)
+ .map_err(cipher::CipherContextError::Resource)?;
let context = Rc::try_unwrap(context)
- .map_err(|_| type_error("Cipher context is already in use"))?;
+ .map_err(|_| cipher::CipherContextError::ContextInUse)?;
Ok(context.take_tag())
}
@@ -288,7 +300,7 @@ pub fn op_node_create_decipheriv(
#[string] algorithm: &str,
#[buffer] key: &[u8],
#[buffer] iv: &[u8],
-) -> Result<u32, AnyError> {
+) -> Result<u32, cipher::DecipherContextError> {
let context = cipher::DecipherContext::new(algorithm, key, iv)?;
Ok(state.resource_table.add(context))
}
@@ -326,10 +338,13 @@ pub fn op_node_decipheriv_decrypt(
pub fn op_node_decipheriv_take(
state: &mut OpState,
#[smi] rid: u32,
-) -> Result<(), AnyError> {
- let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
+) -> Result<(), cipher::DecipherContextError> {
+ let context = state
+ .resource_table
+ .take::<cipher::DecipherContext>(rid)
+ .map_err(cipher::DecipherContextError::Resource)?;
Rc::try_unwrap(context)
- .map_err(|_| type_error("Cipher context is already in use"))?;
+ .map_err(|_| cipher::DecipherContextError::ContextInUse)?;
Ok(())
}
@@ -341,11 +356,16 @@ pub fn op_node_decipheriv_final(
#[buffer] input: &[u8],
#[anybuffer] output: &mut [u8],
#[buffer] auth_tag: &[u8],
-) -> Result<(), AnyError> {
- let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
+) -> Result<(), cipher::DecipherContextError> {
+ let context = state
+ .resource_table
+ .take::<cipher::DecipherContext>(rid)
+ .map_err(cipher::DecipherContextError::Resource)?;
let context = Rc::try_unwrap(context)
- .map_err(|_| type_error("Cipher context is already in use"))?;
- context.r#final(auto_pad, input, output, auth_tag)
+ .map_err(|_| cipher::DecipherContextError::ContextInUse)?;
+ context
+ .r#final(auto_pad, input, output, auth_tag)
+ .map_err(Into::into)
}
#[op2]
@@ -356,7 +376,7 @@ pub fn op_node_sign(
#[string] digest_type: &str,
#[smi] pss_salt_length: Option<u32>,
#[smi] dsa_signature_encoding: u32,
-) -> Result<Box<[u8]>, AnyError> {
+) -> Result<Box<[u8]>, sign::KeyObjectHandlePrehashedSignAndVerifyError> {
handle.sign_prehashed(
digest_type,
digest,
@@ -373,7 +393,7 @@ pub fn op_node_verify(
#[buffer] signature: &[u8],
#[smi] pss_salt_length: Option<u32>,
#[smi] dsa_signature_encoding: u32,
-) -> Result<bool, AnyError> {
+) -> Result<bool, sign::KeyObjectHandlePrehashedSignAndVerifyError> {
handle.verify_prehashed(
digest_type,
digest,
@@ -383,13 +403,21 @@ pub fn op_node_verify(
)
}
+#[derive(Debug, thiserror::Error)]
+pub enum Pbkdf2Error {
+ #[error("unsupported digest: {0}")]
+ UnsupportedDigest(String),
+ #[error(transparent)]
+ Join(#[from] tokio::task::JoinError),
+}
+
fn pbkdf2_sync(
password: &[u8],
salt: &[u8],
iterations: u32,
algorithm_name: &str,
derived_key: &mut [u8],
-) -> Result<(), AnyError> {
+) -> Result<(), Pbkdf2Error> {
match_fixed_digest_with_eager_block_buffer!(
algorithm_name,
fn <D>() {
@@ -397,10 +425,7 @@ fn pbkdf2_sync(
Ok(())
},
_ => {
- Err(type_error(format!(
- "unsupported digest: {}",
- algorithm_name
- )))
+ Err(Pbkdf2Error::UnsupportedDigest(algorithm_name.to_string()))
}
)
}
@@ -424,7 +449,7 @@ pub async fn op_node_pbkdf2_async(
#[smi] iterations: u32,
#[string] digest: String,
#[number] keylen: usize,
-) -> Result<ToJsBuffer, AnyError> {
+) -> Result<ToJsBuffer, Pbkdf2Error> {
spawn_blocking(move || {
let mut derived_key = vec![0; keylen];
pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key)
@@ -450,15 +475,27 @@ pub async fn op_node_fill_random_async(#[smi] len: i32) -> ToJsBuffer {
.unwrap()
}
+#[derive(Debug, thiserror::Error)]
+pub enum HkdfError {
+ #[error("expected secret key")]
+ ExpectedSecretKey,
+ #[error("HKDF-Expand failed")]
+ HkdfExpandFailed,
+ #[error("Unsupported digest: {0}")]
+ UnsupportedDigest(String),
+ #[error(transparent)]
+ Join(#[from] tokio::task::JoinError),
+}
+
fn hkdf_sync(
digest_algorithm: &str,
handle: &KeyObjectHandle,
salt: &[u8],
info: &[u8],
okm: &mut [u8],
-) -> Result<(), AnyError> {
+) -> Result<(), HkdfError> {
let Some(ikm) = handle.as_secret_key() else {
- return Err(type_error("expected secret key"));
+ return Err(HkdfError::ExpectedSecretKey);
};
match_fixed_digest_with_eager_block_buffer!(
@@ -466,10 +503,10 @@ fn hkdf_sync(
fn <D>() {
let hk = Hkdf::<D>::new(Some(salt), ikm);
hk.expand(info, okm)
- .map_err(|_| type_error("HKDF-Expand failed"))
+ .map_err(|_| HkdfError::HkdfExpandFailed)
},
_ => {
- Err(type_error(format!("Unsupported digest: {}", digest_algorithm)))
+ Err(HkdfError::UnsupportedDigest(digest_algorithm.to_string()))
}
)
}
@@ -481,7 +518,7 @@ pub fn op_node_hkdf(
#[buffer] salt: &[u8],
#[buffer] info: &[u8],
#[buffer] okm: &mut [u8],
-) -> Result<(), AnyError> {
+) -> Result<(), HkdfError> {
hkdf_sync(digest_algorithm, handle, salt, info, okm)
}
@@ -493,7 +530,7 @@ pub async fn op_node_hkdf_async(
#[buffer] salt: JsBuffer,
#[buffer] info: JsBuffer,
#[number] okm_len: usize,
-) -> Result<ToJsBuffer, AnyError> {
+) -> Result<ToJsBuffer, HkdfError> {
let handle = handle.clone();
spawn_blocking(move || {
let mut okm = vec![0u8; okm_len];
@@ -509,27 +546,24 @@ pub fn op_node_dh_compute_secret(
#[buffer] prime: JsBuffer,
#[buffer] private_key: JsBuffer,
#[buffer] their_public_key: JsBuffer,
-) -> Result<ToJsBuffer, AnyError> {
+) -> ToJsBuffer {
let pubkey: BigUint = BigUint::from_bytes_be(their_public_key.as_ref());
let privkey: BigUint = BigUint::from_bytes_be(private_key.as_ref());
let primei: BigUint = BigUint::from_bytes_be(prime.as_ref());
let shared_secret: BigUint = pubkey.modpow(&privkey, &primei);
- Ok(shared_secret.to_bytes_be().into())
+ shared_secret.to_bytes_be().into()
}
#[op2(fast)]
-#[smi]
-pub fn op_node_random_int(
- #[smi] min: i32,
- #[smi] max: i32,
-) -> Result<i32, AnyError> {
+#[number]
+pub fn op_node_random_int(#[number] min: i64, #[number] max: i64) -> i64 {
let mut rng = rand::thread_rng();
// Uniform distribution is required to avoid Modulo Bias
// https://en.wikipedia.org/wiki/Fisher–Yates_shuffle#Modulo_bias
let dist = Uniform::from(min..max);
- Ok(dist.sample(&mut rng))
+ dist.sample(&mut rng)
}
#[allow(clippy::too_many_arguments)]
@@ -542,7 +576,7 @@ fn scrypt(
parallelization: u32,
_maxmem: u32,
output_buffer: &mut [u8],
-) -> Result<(), AnyError> {
+) -> Result<(), deno_core::error::AnyError> {
// Construct Params
let params = scrypt::Params::new(
cost as u8,
@@ -573,7 +607,7 @@ pub fn op_node_scrypt_sync(
#[smi] parallelization: u32,
#[smi] maxmem: u32,
#[anybuffer] output_buffer: &mut [u8],
-) -> Result<(), AnyError> {
+) -> Result<(), deno_core::error::AnyError> {
scrypt(
password,
salt,
@@ -586,6 +620,14 @@ pub fn op_node_scrypt_sync(
)
}
+#[derive(Debug, thiserror::Error)]
+pub enum ScryptAsyncError {
+ #[error(transparent)]
+ Join(#[from] tokio::task::JoinError),
+ #[error(transparent)]
+ Other(deno_core::error::AnyError),
+}
+
#[op2(async)]
#[serde]
pub async fn op_node_scrypt_async(
@@ -596,10 +638,11 @@ pub async fn op_node_scrypt_async(
#[smi] block_size: u32,
#[smi] parallelization: u32,
#[smi] maxmem: u32,
-) -> Result<ToJsBuffer, AnyError> {
+) -> Result<ToJsBuffer, ScryptAsyncError> {
spawn_blocking(move || {
let mut output_buffer = vec![0u8; keylen as usize];
- let res = scrypt(
+
+ scrypt(
password,
salt,
keylen,
@@ -608,25 +651,30 @@ pub async fn op_node_scrypt_async(
parallelization,
maxmem,
&mut output_buffer,
- );
-
- if res.is_ok() {
- Ok(output_buffer.into())
- } else {
- // TODO(lev): rethrow the error?
- Err(generic_error("scrypt failure"))
- }
+ )
+ .map(|_| output_buffer.into())
+ .map_err(ScryptAsyncError::Other)
})
.await?
}
+#[derive(Debug, thiserror::Error)]
+pub enum EcdhEncodePubKey {
+ #[error("Invalid public key")]
+ InvalidPublicKey,
+ #[error("Unsupported curve")]
+ UnsupportedCurve,
+ #[error(transparent)]
+ Sec1(#[from] sec1::Error),
+}
+
#[op2]
#[buffer]
pub fn op_node_ecdh_encode_pubkey(
#[string] curve: &str,
#[buffer] pubkey: &[u8],
compress: bool,
-) -> Result<Vec<u8>, AnyError> {
+) -> Result<Vec<u8>, EcdhEncodePubKey> {
use elliptic_curve::sec1::FromEncodedPoint;
match curve {
@@ -639,7 +687,7 @@ pub fn op_node_ecdh_encode_pubkey(
);
// CtOption does not expose its variants.
if pubkey.is_none().into() {
- return Err(type_error("Invalid public key"));
+ return Err(EcdhEncodePubKey::InvalidPublicKey);
}
let pubkey = pubkey.unwrap();
@@ -652,7 +700,7 @@ pub fn op_node_ecdh_encode_pubkey(
);
// CtOption does not expose its variants.
if pubkey.is_none().into() {
- return Err(type_error("Invalid public key"));
+ return Err(EcdhEncodePubKey::InvalidPublicKey);
}
let pubkey = pubkey.unwrap();
@@ -665,7 +713,7 @@ pub fn op_node_ecdh_encode_pubkey(
);
// CtOption does not expose its variants.
if pubkey.is_none().into() {
- return Err(type_error("Invalid public key"));
+ return Err(EcdhEncodePubKey::InvalidPublicKey);
}
let pubkey = pubkey.unwrap();
@@ -678,14 +726,14 @@ pub fn op_node_ecdh_encode_pubkey(
);
// CtOption does not expose its variants.
if pubkey.is_none().into() {
- return Err(type_error("Invalid public key"));
+ return Err(EcdhEncodePubKey::InvalidPublicKey);
}
let pubkey = pubkey.unwrap();
Ok(pubkey.to_encoded_point(compress).as_ref().to_vec())
}
- &_ => Err(type_error("Unsupported curve")),
+ &_ => Err(EcdhEncodePubKey::UnsupportedCurve),
}
}
@@ -695,7 +743,7 @@ pub fn op_node_ecdh_generate_keys(
#[buffer] pubbuf: &mut [u8],
#[buffer] privbuf: &mut [u8],
#[string] format: &str,
-) -> Result<(), AnyError> {
+) -> Result<(), deno_core::error::AnyError> {
let mut rng = rand::thread_rng();
let compress = format == "compressed";
match curve {
@@ -742,7 +790,7 @@ pub fn op_node_ecdh_compute_secret(
#[buffer] this_priv: Option<JsBuffer>,
#[buffer] their_pub: &mut [u8],
#[buffer] secret: &mut [u8],
-) -> Result<(), AnyError> {
+) {
match curve {
"secp256k1" => {
let their_public_key =
@@ -760,8 +808,6 @@ pub fn op_node_ecdh_compute_secret(
their_public_key.as_affine(),
);
secret.copy_from_slice(shared_secret.raw_secret_bytes());
-
- Ok(())
}
"prime256v1" | "secp256r1" => {
let their_public_key =
@@ -776,8 +822,6 @@ pub fn op_node_ecdh_compute_secret(
their_public_key.as_affine(),
);
secret.copy_from_slice(shared_secret.raw_secret_bytes());
-
- Ok(())
}
"secp384r1" => {
let their_public_key =
@@ -792,8 +836,6 @@ pub fn op_node_ecdh_compute_secret(
their_public_key.as_affine(),
);
secret.copy_from_slice(shared_secret.raw_secret_bytes());
-
- Ok(())
}
"secp224r1" => {
let their_public_key =
@@ -808,8 +850,6 @@ pub fn op_node_ecdh_compute_secret(
their_public_key.as_affine(),
);
secret.copy_from_slice(shared_secret.raw_secret_bytes());
-
- Ok(())
}
&_ => todo!(),
}
@@ -820,7 +860,7 @@ pub fn op_node_ecdh_compute_public_key(
#[string] curve: &str,
#[buffer] privkey: &[u8],
#[buffer] pubkey: &mut [u8],
-) -> Result<(), AnyError> {
+) {
match curve {
"secp256k1" => {
let this_private_key =
@@ -828,8 +868,6 @@ pub fn op_node_ecdh_compute_public_key(
.expect("bad private key");
let public_key = this_private_key.public_key();
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
-
- Ok(())
}
"prime256v1" | "secp256r1" => {
let this_private_key =
@@ -837,7 +875,6 @@ pub fn op_node_ecdh_compute_public_key(
.expect("bad private key");
let public_key = this_private_key.public_key();
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
- Ok(())
}
"secp384r1" => {
let this_private_key =
@@ -845,7 +882,6 @@ pub fn op_node_ecdh_compute_public_key(
.expect("bad private key");
let public_key = this_private_key.public_key();
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
- Ok(())
}
"secp224r1" => {
let this_private_key =
@@ -853,7 +889,6 @@ pub fn op_node_ecdh_compute_public_key(
.expect("bad private key");
let public_key = this_private_key.public_key();
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
- Ok(())
}
&_ => todo!(),
}
@@ -874,8 +909,20 @@ pub fn op_node_gen_prime(#[number] size: usize) -> ToJsBuffer {
#[serde]
pub async fn op_node_gen_prime_async(
#[number] size: usize,
-) -> Result<ToJsBuffer, AnyError> {
- Ok(spawn_blocking(move || gen_prime(size)).await?)
+) -> Result<ToJsBuffer, tokio::task::JoinError> {
+ spawn_blocking(move || gen_prime(size)).await
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum DiffieHellmanError {
+ #[error("Expected private key")]
+ ExpectedPrivateKey,
+ #[error("Expected public key")]
+ ExpectedPublicKey,
+ #[error("DH parameters mismatch")]
+ DhParametersMismatch,
+ #[error("Unsupported key type for diffie hellman, or key type mismatch")]
+ UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch,
}
#[op2]
@@ -883,117 +930,134 @@ pub async fn op_node_gen_prime_async(
pub fn op_node_diffie_hellman(
#[cppgc] private: &KeyObjectHandle,
#[cppgc] public: &KeyObjectHandle,
-) -> Result<Box<[u8]>, AnyError> {
+) -> Result<Box<[u8]>, DiffieHellmanError> {
let private = private
.as_private_key()
- .ok_or_else(|| type_error("Expected private key"))?;
+ .ok_or(DiffieHellmanError::ExpectedPrivateKey)?;
let public = public
.as_public_key()
- .ok_or_else(|| type_error("Expected public key"))?;
-
- let res = match (private, &*public) {
- (
- AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)),
- AsymmetricPublicKey::Ec(EcPublicKey::P224(public)),
- ) => p224::ecdh::diffie_hellman(
- private.to_nonzero_scalar(),
- public.as_affine(),
- )
- .raw_secret_bytes()
- .to_vec()
- .into_boxed_slice(),
- (
- AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)),
- AsymmetricPublicKey::Ec(EcPublicKey::P256(public)),
- ) => p256::ecdh::diffie_hellman(
- private.to_nonzero_scalar(),
- public.as_affine(),
- )
- .raw_secret_bytes()
- .to_vec()
- .into_boxed_slice(),
- (
- AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)),
- AsymmetricPublicKey::Ec(EcPublicKey::P384(public)),
- ) => p384::ecdh::diffie_hellman(
- private.to_nonzero_scalar(),
- public.as_affine(),
- )
- .raw_secret_bytes()
- .to_vec()
- .into_boxed_slice(),
- (
- AsymmetricPrivateKey::X25519(private),
- AsymmetricPublicKey::X25519(public),
- ) => private
- .diffie_hellman(public)
- .to_bytes()
- .into_iter()
- .collect(),
- (AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => {
- if private.params.prime != public.params.prime
- || private.params.base != public.params.base
- {
- return Err(type_error("DH parameters mismatch"));
+ .ok_or(DiffieHellmanError::ExpectedPublicKey)?;
+
+ let res =
+ match (private, &*public) {
+ (
+ AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)),
+ AsymmetricPublicKey::Ec(EcPublicKey::P224(public)),
+ ) => p224::ecdh::diffie_hellman(
+ private.to_nonzero_scalar(),
+ public.as_affine(),
+ )
+ .raw_secret_bytes()
+ .to_vec()
+ .into_boxed_slice(),
+ (
+ AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)),
+ AsymmetricPublicKey::Ec(EcPublicKey::P256(public)),
+ ) => p256::ecdh::diffie_hellman(
+ private.to_nonzero_scalar(),
+ public.as_affine(),
+ )
+ .raw_secret_bytes()
+ .to_vec()
+ .into_boxed_slice(),
+ (
+ AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)),
+ AsymmetricPublicKey::Ec(EcPublicKey::P384(public)),
+ ) => p384::ecdh::diffie_hellman(
+ private.to_nonzero_scalar(),
+ public.as_affine(),
+ )
+ .raw_secret_bytes()
+ .to_vec()
+ .into_boxed_slice(),
+ (
+ AsymmetricPrivateKey::X25519(private),
+ AsymmetricPublicKey::X25519(public),
+ ) => private
+ .diffie_hellman(public)
+ .to_bytes()
+ .into_iter()
+ .collect(),
+ (AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => {
+ if private.params.prime != public.params.prime
+ || private.params.base != public.params.base
+ {
+ return Err(DiffieHellmanError::DhParametersMismatch);
+ }
+
+ // OSIP - Octet-String-to-Integer primitive
+ let public_key = public.key.clone().into_vec();
+ let pubkey = BigUint::from_bytes_be(&public_key);
+
+ // Exponentiation (z = y^x mod p)
+ let prime = BigUint::from_bytes_be(private.params.prime.as_bytes());
+ let private_key = private.key.clone().into_vec();
+ let private_key = BigUint::from_bytes_be(&private_key);
+ let shared_secret = pubkey.modpow(&private_key, &prime);
+
+ shared_secret.to_bytes_be().into()
}
-
- // OSIP - Octet-String-to-Integer primitive
- let public_key = public.key.clone().into_vec();
- let pubkey = BigUint::from_bytes_be(&public_key);
-
- // Exponentiation (z = y^x mod p)
- let prime = BigUint::from_bytes_be(private.params.prime.as_bytes());
- let private_key = private.key.clone().into_vec();
- let private_key = BigUint::from_bytes_be(&private_key);
- let shared_secret = pubkey.modpow(&private_key, &prime);
-
- shared_secret.to_bytes_be().into()
- }
- _ => {
- return Err(type_error(
- "Unsupported key type for diffie hellman, or key type mismatch",
- ))
- }
- };
+ _ => return Err(
+ DiffieHellmanError::UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch,
+ ),
+ };
Ok(res)
}
+#[derive(Debug, thiserror::Error)]
+pub enum SignEd25519Error {
+ #[error("Expected private key")]
+ ExpectedPrivateKey,
+ #[error("Expected Ed25519 private key")]
+ ExpectedEd25519PrivateKey,
+ #[error("Invalid Ed25519 private key")]
+ InvalidEd25519PrivateKey,
+}
+
#[op2(fast)]
pub fn op_node_sign_ed25519(
#[cppgc] key: &KeyObjectHandle,
#[buffer] data: &[u8],
#[buffer] signature: &mut [u8],
-) -> Result<(), AnyError> {
+) -> Result<(), SignEd25519Error> {
let private = key
.as_private_key()
- .ok_or_else(|| type_error("Expected private key"))?;
+ .ok_or(SignEd25519Error::ExpectedPrivateKey)?;
let ed25519 = match private {
AsymmetricPrivateKey::Ed25519(private) => private,
- _ => return Err(type_error("Expected Ed25519 private key")),
+ _ => return Err(SignEd25519Error::ExpectedEd25519PrivateKey),
};
let pair = Ed25519KeyPair::from_seed_unchecked(ed25519.as_bytes().as_slice())
- .map_err(|_| type_error("Invalid Ed25519 private key"))?;
+ .map_err(|_| SignEd25519Error::InvalidEd25519PrivateKey)?;
signature.copy_from_slice(pair.sign(data).as_ref());
Ok(())
}
+#[derive(Debug, thiserror::Error)]
+pub enum VerifyEd25519Error {
+ #[error("Expected public key")]
+ ExpectedPublicKey,
+ #[error("Expected Ed25519 public key")]
+ ExpectedEd25519PublicKey,
+}
+
#[op2(fast)]
pub fn op_node_verify_ed25519(
#[cppgc] key: &KeyObjectHandle,
#[buffer] data: &[u8],
#[buffer] signature: &[u8],
-) -> Result<bool, AnyError> {
+) -> Result<bool, VerifyEd25519Error> {
let public = key
.as_public_key()
- .ok_or_else(|| type_error("Expected public key"))?;
+ .ok_or(VerifyEd25519Error::ExpectedPublicKey)?;
let ed25519 = match &*public {
AsymmetricPublicKey::Ed25519(public) => public,
- _ => return Err(type_error("Expected Ed25519 public key")),
+ _ => return Err(VerifyEd25519Error::ExpectedEd25519PublicKey),
};
let verified = ring::signature::UnparsedPublicKey::new(
diff --git a/ext/node/ops/crypto/sign.rs b/ext/node/ops/crypto/sign.rs
index b7779a5d8..30094c076 100644
--- a/ext/node/ops/crypto/sign.rs
+++ b/ext/node/ops/crypto/sign.rs
@@ -1,7 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::generic_error;
-use deno_core::error::type_error;
-use deno_core::error::AnyError;
use rand::rngs::OsRng;
use rsa::signature::hazmat::PrehashSigner as _;
use rsa::signature::hazmat::PrehashVerifier as _;
@@ -26,7 +23,7 @@ use elliptic_curve::FieldBytesSize;
fn dsa_signature<C: elliptic_curve::PrimeCurve>(
encoding: u32,
signature: ecdsa::Signature<C>,
-) -> Result<Box<[u8]>, AnyError>
+) -> Result<Box<[u8]>, KeyObjectHandlePrehashedSignAndVerifyError>
where
MaxSize<C>: ArrayLength<u8>,
<FieldBytesSize<C> as Add>::Output: Add<MaxOverhead> + ArrayLength<u8>,
@@ -36,10 +33,54 @@ where
0 => Ok(signature.to_der().to_bytes().to_vec().into_boxed_slice()),
// IEEE P1363
1 => Ok(signature.to_bytes().to_vec().into_boxed_slice()),
- _ => Err(type_error("invalid DSA signature encoding")),
+ _ => Err(
+ KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignatureEncoding,
+ ),
}
}
+#[derive(Debug, thiserror::Error)]
+pub enum KeyObjectHandlePrehashedSignAndVerifyError {
+ #[error("invalid DSA signature encoding")]
+ InvalidDsaSignatureEncoding,
+ #[error("key is not a private key")]
+ KeyIsNotPrivate,
+ #[error("digest not allowed for RSA signature: {0}")]
+ DigestNotAllowedForRsaSignature(String),
+ #[error("failed to sign digest with RSA")]
+ FailedToSignDigestWithRsa,
+ #[error("digest not allowed for RSA-PSS signature: {0}")]
+ DigestNotAllowedForRsaPssSignature(String),
+ #[error("failed to sign digest with RSA-PSS")]
+ FailedToSignDigestWithRsaPss,
+ #[error("failed to sign digest with DSA")]
+ FailedToSignDigestWithDsa,
+ #[error("rsa-pss with different mf1 hash algorithm and hash algorithm is not supported")]
+ RsaPssHashAlgorithmUnsupported,
+ #[error(
+ "private key does not allow {actual} to be used, expected {expected}"
+ )]
+ PrivateKeyDisallowsUsage { actual: String, expected: String },
+ #[error("failed to sign digest")]
+ FailedToSignDigest,
+ #[error("x25519 key cannot be used for signing")]
+ X25519KeyCannotBeUsedForSigning,
+ #[error("Ed25519 key cannot be used for prehashed signing")]
+ Ed25519KeyCannotBeUsedForPrehashedSigning,
+ #[error("DH key cannot be used for signing")]
+ DhKeyCannotBeUsedForSigning,
+ #[error("key is not a public or private key")]
+ KeyIsNotPublicOrPrivate,
+ #[error("Invalid DSA signature")]
+ InvalidDsaSignature,
+ #[error("x25519 key cannot be used for verification")]
+ X25519KeyCannotBeUsedForVerification,
+ #[error("Ed25519 key cannot be used for prehashed verification")]
+ Ed25519KeyCannotBeUsedForPrehashedVerification,
+ #[error("DH key cannot be used for verification")]
+ DhKeyCannotBeUsedForVerification,
+}
+
impl KeyObjectHandle {
pub fn sign_prehashed(
&self,
@@ -47,10 +88,10 @@ impl KeyObjectHandle {
digest: &[u8],
pss_salt_length: Option<u32>,
dsa_signature_encoding: u32,
- ) -> Result<Box<[u8]>, AnyError> {
+ ) -> Result<Box<[u8]>, KeyObjectHandlePrehashedSignAndVerifyError> {
let private_key = self
.as_private_key()
- .ok_or_else(|| type_error("key is not a private key"))?;
+ .ok_or(KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPrivate)?;
match private_key {
AsymmetricPrivateKey::Rsa(key) => {
@@ -63,34 +104,26 @@ impl KeyObjectHandle {
rsa::pkcs1v15::Pkcs1v15Sign::new::<D>()
},
_ => {
- return Err(type_error(format!(
- "digest not allowed for RSA signature: {}",
- digest_type
- )))
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
}
)
};
let signature = signer
.sign(Some(&mut OsRng), key, digest)
- .map_err(|_| generic_error("failed to sign digest with RSA"))?;
+ .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsa)?;
Ok(signature.into())
}
AsymmetricPrivateKey::RsaPss(key) => {
let mut hash_algorithm = None;
let mut salt_length = None;
- match &key.details {
- Some(details) => {
- if details.hash_algorithm != details.mf1_hash_algorithm {
- return Err(type_error(
- "rsa-pss with different mf1 hash algorithm and hash algorithm is not supported",
- ));
- }
- hash_algorithm = Some(details.hash_algorithm);
- salt_length = Some(details.salt_length as usize);
+ if let Some(details) = &key.details {
+ if details.hash_algorithm != details.mf1_hash_algorithm {
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported);
}
- None => {}
- };
+ hash_algorithm = Some(details.hash_algorithm);
+ salt_length = Some(details.salt_length as usize);
+ }
if let Some(s) = pss_salt_length {
salt_length = Some(s as usize);
}
@@ -99,10 +132,10 @@ impl KeyObjectHandle {
fn <D>(algorithm: Option<RsaPssHashAlgorithm>) {
if let Some(hash_algorithm) = hash_algorithm.take() {
if Some(hash_algorithm) != algorithm {
- return Err(type_error(format!(
- "private key does not allow {} to be used, expected {}",
- digest_type, hash_algorithm.as_str()
- )));
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage {
+ actual: digest_type.to_string(),
+ expected: hash_algorithm.as_str().to_string(),
+ });
}
}
if let Some(salt_length) = salt_length {
@@ -112,15 +145,12 @@ impl KeyObjectHandle {
}
},
_ => {
- return Err(type_error(format!(
- "digest not allowed for RSA-PSS signature: {}",
- digest_type
- )))
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string()));
}
);
let signature = pss
.sign(Some(&mut OsRng), &key.key, digest)
- .map_err(|_| generic_error("failed to sign digest with RSA-PSS"))?;
+ .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsaPss)?;
Ok(signature.into())
}
AsymmetricPrivateKey::Dsa(key) => {
@@ -130,15 +160,12 @@ impl KeyObjectHandle {
key.sign_prehashed_rfc6979::<D>(digest)
},
_ => {
- return Err(type_error(format!(
- "digest not allowed for RSA signature: {}",
- digest_type
- )))
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
}
);
let signature =
- res.map_err(|_| generic_error("failed to sign digest with DSA"))?;
+ res.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithDsa)?;
Ok(signature.into())
}
AsymmetricPrivateKey::Ec(key) => match key {
@@ -146,7 +173,7 @@ impl KeyObjectHandle {
let signing_key = p224::ecdsa::SigningKey::from(key);
let signature: p224::ecdsa::Signature = signing_key
.sign_prehash(digest)
- .map_err(|_| type_error("failed to sign digest"))?;
+ .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
dsa_signature(dsa_signature_encoding, signature)
}
@@ -154,7 +181,7 @@ impl KeyObjectHandle {
let signing_key = p256::ecdsa::SigningKey::from(key);
let signature: p256::ecdsa::Signature = signing_key
.sign_prehash(digest)
- .map_err(|_| type_error("failed to sign digest"))?;
+ .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
dsa_signature(dsa_signature_encoding, signature)
}
@@ -162,19 +189,17 @@ impl KeyObjectHandle {
let signing_key = p384::ecdsa::SigningKey::from(key);
let signature: p384::ecdsa::Signature = signing_key
.sign_prehash(digest)
- .map_err(|_| type_error("failed to sign digest"))?;
+ .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
dsa_signature(dsa_signature_encoding, signature)
}
},
AsymmetricPrivateKey::X25519(_) => {
- Err(type_error("x25519 key cannot be used for signing"))
+ Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForSigning)
}
- AsymmetricPrivateKey::Ed25519(_) => Err(type_error(
- "Ed25519 key cannot be used for prehashed signing",
- )),
+ AsymmetricPrivateKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedSigning),
AsymmetricPrivateKey::Dh(_) => {
- Err(type_error("DH key cannot be used for signing"))
+ Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForSigning)
}
}
}
@@ -186,10 +211,10 @@ impl KeyObjectHandle {
signature: &[u8],
pss_salt_length: Option<u32>,
dsa_signature_encoding: u32,
- ) -> Result<bool, AnyError> {
- let public_key = self
- .as_public_key()
- .ok_or_else(|| type_error("key is not a public or private key"))?;
+ ) -> Result<bool, KeyObjectHandlePrehashedSignAndVerifyError> {
+ let public_key = self.as_public_key().ok_or(
+ KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPublicOrPrivate,
+ )?;
match &*public_key {
AsymmetricPublicKey::Rsa(key) => {
@@ -202,10 +227,7 @@ impl KeyObjectHandle {
rsa::pkcs1v15::Pkcs1v15Sign::new::<D>()
},
_ => {
- return Err(type_error(format!(
- "digest not allowed for RSA signature: {}",
- digest_type
- )))
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
}
)
};
@@ -215,18 +237,13 @@ impl KeyObjectHandle {
AsymmetricPublicKey::RsaPss(key) => {
let mut hash_algorithm = None;
let mut salt_length = None;
- match &key.details {
- Some(details) => {
- if details.hash_algorithm != details.mf1_hash_algorithm {
- return Err(type_error(
- "rsa-pss with different mf1 hash algorithm and hash algorithm is not supported",
- ));
- }
- hash_algorithm = Some(details.hash_algorithm);
- salt_length = Some(details.salt_length as usize);
+ if let Some(details) = &key.details {
+ if details.hash_algorithm != details.mf1_hash_algorithm {
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported);
}
- None => {}
- };
+ hash_algorithm = Some(details.hash_algorithm);
+ salt_length = Some(details.salt_length as usize);
+ }
if let Some(s) = pss_salt_length {
salt_length = Some(s as usize);
}
@@ -235,10 +252,10 @@ impl KeyObjectHandle {
fn <D>(algorithm: Option<RsaPssHashAlgorithm>) {
if let Some(hash_algorithm) = hash_algorithm.take() {
if Some(hash_algorithm) != algorithm {
- return Err(type_error(format!(
- "private key does not allow {} to be used, expected {}",
- digest_type, hash_algorithm.as_str()
- )));
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage {
+ actual: digest_type.to_string(),
+ expected: hash_algorithm.as_str().to_string(),
+ });
}
}
if let Some(salt_length) = salt_length {
@@ -248,17 +265,14 @@ impl KeyObjectHandle {
}
},
_ => {
- return Err(type_error(format!(
- "digest not allowed for RSA-PSS signature: {}",
- digest_type
- )))
+ return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string()));
}
);
Ok(pss.verify(&key.key, digest, signature).is_ok())
}
AsymmetricPublicKey::Dsa(key) => {
let signature = dsa::Signature::from_der(signature)
- .map_err(|_| type_error("Invalid DSA signature"))?;
+ .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignature)?;
Ok(key.verify_prehash(digest, &signature).is_ok())
}
AsymmetricPublicKey::Ec(key) => match key {
@@ -300,13 +314,11 @@ impl KeyObjectHandle {
}
},
AsymmetricPublicKey::X25519(_) => {
- Err(type_error("x25519 key cannot be used for verification"))
+ Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForVerification)
}
- AsymmetricPublicKey::Ed25519(_) => Err(type_error(
- "Ed25519 key cannot be used for prehashed verification",
- )),
+ AsymmetricPublicKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedVerification),
AsymmetricPublicKey::Dh(_) => {
- Err(type_error("DH key cannot be used for verification"))
+ Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForVerification)
}
}
}
diff --git a/ext/node/ops/crypto/x509.rs b/ext/node/ops/crypto/x509.rs
index b44ff3a4b..ab8e52f70 100644
--- a/ext/node/ops/crypto/x509.rs
+++ b/ext/node/ops/crypto/x509.rs
@@ -1,11 +1,11 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::AnyError;
use deno_core::op2;
use x509_parser::der_parser::asn1_rs::Any;
use x509_parser::der_parser::asn1_rs::Tag;
use x509_parser::der_parser::oid::Oid;
+pub use x509_parser::error::X509Error;
use x509_parser::extensions;
use x509_parser::pem;
use x509_parser::prelude::*;
@@ -65,7 +65,7 @@ impl<'a> Deref for CertificateView<'a> {
#[cppgc]
pub fn op_node_x509_parse(
#[buffer] buf: &[u8],
-) -> Result<Certificate, AnyError> {
+) -> Result<Certificate, X509Error> {
let source = match pem::parse_x509_pem(buf) {
Ok((_, pem)) => CertificateSources::Pem(pem),
Err(_) => CertificateSources::Der(buf.to_vec().into_boxed_slice()),
@@ -81,7 +81,7 @@ pub fn op_node_x509_parse(
X509Certificate::from_der(buf).map(|(_, cert)| cert)?
}
};
- Ok::<_, AnyError>(CertificateView { cert })
+ Ok::<_, X509Error>(CertificateView { cert })
},
)?;
@@ -89,23 +89,23 @@ pub fn op_node_x509_parse(
}
#[op2(fast)]
-pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> Result<bool, AnyError> {
+pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> bool {
let cert = cert.inner.get().deref();
- Ok(cert.is_ca())
+ cert.is_ca()
}
#[op2(fast)]
pub fn op_node_x509_check_email(
#[cppgc] cert: &Certificate,
#[string] email: &str,
-) -> Result<bool, AnyError> {
+) -> bool {
let cert = cert.inner.get().deref();
let subject = cert.subject();
if subject
.iter_email()
.any(|e| e.as_str().unwrap_or("") == email)
{
- return Ok(true);
+ return true;
}
let subject_alt = cert
@@ -121,62 +121,60 @@ pub fn op_node_x509_check_email(
for name in &subject_alt.general_names {
if let extensions::GeneralName::RFC822Name(n) = name {
if *n == email {
- return Ok(true);
+ return true;
}
}
}
}
- Ok(false)
+ false
}
#[op2]
#[string]
-pub fn op_node_x509_fingerprint(
- #[cppgc] cert: &Certificate,
-) -> Result<Option<String>, AnyError> {
- Ok(cert.fingerprint::<sha1::Sha1>())
+pub fn op_node_x509_fingerprint(#[cppgc] cert: &Certificate) -> Option<String> {
+ cert.fingerprint::<sha1::Sha1>()
}
#[op2]
#[string]
pub fn op_node_x509_fingerprint256(
#[cppgc] cert: &Certificate,
-) -> Result<Option<String>, AnyError> {
- Ok(cert.fingerprint::<sha2::Sha256>())
+) -> Option<String> {
+ cert.fingerprint::<sha2::Sha256>()
}
#[op2]
#[string]
pub fn op_node_x509_fingerprint512(
#[cppgc] cert: &Certificate,
-) -> Result<Option<String>, AnyError> {
- Ok(cert.fingerprint::<sha2::Sha512>())
+) -> Option<String> {
+ cert.fingerprint::<sha2::Sha512>()
}
#[op2]
#[string]
pub fn op_node_x509_get_issuer(
#[cppgc] cert: &Certificate,
-) -> Result<String, AnyError> {
+) -> Result<String, X509Error> {
let cert = cert.inner.get().deref();
- Ok(x509name_to_string(cert.issuer(), oid_registry())?)
+ x509name_to_string(cert.issuer(), oid_registry())
}
#[op2]
#[string]
pub fn op_node_x509_get_subject(
#[cppgc] cert: &Certificate,
-) -> Result<String, AnyError> {
+) -> Result<String, X509Error> {
let cert = cert.inner.get().deref();
- Ok(x509name_to_string(cert.subject(), oid_registry())?)
+ x509name_to_string(cert.subject(), oid_registry())
}
#[op2]
#[cppgc]
pub fn op_node_x509_public_key(
#[cppgc] cert: &Certificate,
-) -> Result<KeyObjectHandle, AnyError> {
+) -> Result<KeyObjectHandle, super::keys::X509PublicKeyError> {
let cert = cert.inner.get().deref();
let public_key = &cert.tbs_certificate.subject_pki;
@@ -245,37 +243,29 @@ fn x509name_to_string(
#[op2]
#[string]
-pub fn op_node_x509_get_valid_from(
- #[cppgc] cert: &Certificate,
-) -> Result<String, AnyError> {
+pub fn op_node_x509_get_valid_from(#[cppgc] cert: &Certificate) -> String {
let cert = cert.inner.get().deref();
- Ok(cert.validity().not_before.to_string())
+ cert.validity().not_before.to_string()
}
#[op2]
#[string]
-pub fn op_node_x509_get_valid_to(
- #[cppgc] cert: &Certificate,
-) -> Result<String, AnyError> {
+pub fn op_node_x509_get_valid_to(#[cppgc] cert: &Certificate) -> String {
let cert = cert.inner.get().deref();
- Ok(cert.validity().not_after.to_string())
+ cert.validity().not_after.to_string()
}
#[op2]
#[string]
-pub fn op_node_x509_get_serial_number(
- #[cppgc] cert: &Certificate,
-) -> Result<String, AnyError> {
+pub fn op_node_x509_get_serial_number(#[cppgc] cert: &Certificate) -> String {
let cert = cert.inner.get().deref();
let mut s = cert.serial.to_str_radix(16);
s.make_ascii_uppercase();
- Ok(s)
+ s
}
#[op2(fast)]
-pub fn op_node_x509_key_usage(
- #[cppgc] cert: &Certificate,
-) -> Result<u16, AnyError> {
+pub fn op_node_x509_key_usage(#[cppgc] cert: &Certificate) -> u16 {
let cert = cert.inner.get().deref();
let key_usage = cert
.extensions()
@@ -286,5 +276,5 @@ pub fn op_node_x509_key_usage(
_ => None,
});
- Ok(key_usage.map(|k| k.flags).unwrap_or(0))
+ key_usage.map(|k| k.flags).unwrap_or(0)
}
diff --git a/ext/node/ops/fs.rs b/ext/node/ops/fs.rs
index 6253f32d0..9c0e4e1cc 100644
--- a/ext/node/ops/fs.rs
+++ b/ext/node/ops/fs.rs
@@ -3,7 +3,6 @@
use std::cell::RefCell;
use std::rc::Rc;
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
use deno_fs::FileSystemRc;
@@ -11,11 +10,27 @@ use serde::Serialize;
use crate::NodePermissions;
+#[derive(Debug, thiserror::Error)]
+pub enum FsError {
+ #[error(transparent)]
+ Permission(#[from] deno_permissions::PermissionCheckError),
+ #[error("{0}")]
+ Io(#[from] std::io::Error),
+ #[cfg(windows)]
+ #[error("Path has no root.")]
+ PathHasNoRoot,
+ #[cfg(not(any(unix, windows)))]
+ #[error("Unsupported platform.")]
+ UnsupportedPlatform,
+ #[error(transparent)]
+ Fs(#[from] deno_io::fs::FsError),
+}
+
#[op2(fast)]
pub fn op_node_fs_exists_sync<P>(
state: &mut OpState,
#[string] path: String,
-) -> Result<bool, AnyError>
+) -> Result<bool, deno_core::error::AnyError>
where
P: NodePermissions + 'static,
{
@@ -30,7 +45,7 @@ where
pub async fn op_node_fs_exists<P>(
state: Rc<RefCell<OpState>>,
#[string] path: String,
-) -> Result<bool, AnyError>
+) -> Result<bool, FsError>
where
P: NodePermissions + 'static,
{
@@ -50,7 +65,7 @@ pub fn op_node_cp_sync<P>(
state: &mut OpState,
#[string] path: &str,
#[string] new_path: &str,
-) -> Result<(), AnyError>
+) -> Result<(), FsError>
where
P: NodePermissions + 'static,
{
@@ -71,7 +86,7 @@ pub async fn op_node_cp<P>(
state: Rc<RefCell<OpState>>,
#[string] path: String,
#[string] new_path: String,
-) -> Result<(), AnyError>
+) -> Result<(), FsError>
where
P: NodePermissions + 'static,
{
@@ -108,7 +123,7 @@ pub fn op_node_statfs<P>(
state: Rc<RefCell<OpState>>,
#[string] path: String,
bigint: bool,
-) -> Result<StatFs, AnyError>
+) -> Result<StatFs, FsError>
where
P: NodePermissions + 'static,
{
@@ -130,13 +145,21 @@ where
let mut cpath = path.as_bytes().to_vec();
cpath.push(0);
if bigint {
- #[cfg(not(target_os = "macos"))]
+ #[cfg(not(any(
+ target_os = "macos",
+ target_os = "freebsd",
+ target_os = "openbsd"
+ )))]
// SAFETY: `cpath` is NUL-terminated and result is pointer to valid statfs memory.
let (code, result) = unsafe {
let mut result: libc::statfs64 = std::mem::zeroed();
(libc::statfs64(cpath.as_ptr() as _, &mut result), result)
};
- #[cfg(target_os = "macos")]
+ #[cfg(any(
+ target_os = "macos",
+ target_os = "freebsd",
+ target_os = "openbsd"
+ ))]
// SAFETY: `cpath` is NUL-terminated and result is pointer to valid statfs memory.
let (code, result) = unsafe {
let mut result: libc::statfs = std::mem::zeroed();
@@ -146,7 +169,10 @@ where
return Err(std::io::Error::last_os_error().into());
}
Ok(StatFs {
+ #[cfg(not(target_os = "openbsd"))]
typ: result.f_type as _,
+ #[cfg(target_os = "openbsd")]
+ typ: 0 as _,
bsize: result.f_bsize as _,
blocks: result.f_blocks as _,
bfree: result.f_bfree as _,
@@ -164,7 +190,10 @@ where
return Err(std::io::Error::last_os_error().into());
}
Ok(StatFs {
+ #[cfg(not(target_os = "openbsd"))]
typ: result.f_type as _,
+ #[cfg(target_os = "openbsd")]
+ typ: 0 as _,
bsize: result.f_bsize as _,
blocks: result.f_blocks as _,
bfree: result.f_bfree as _,
@@ -176,7 +205,6 @@ where
}
#[cfg(windows)]
{
- use deno_core::anyhow::anyhow;
use std::ffi::OsStr;
use std::os::windows::ffi::OsStrExt;
use windows_sys::Win32::Storage::FileSystem::GetDiskFreeSpaceW;
@@ -186,10 +214,7 @@ where
// call below.
#[allow(clippy::disallowed_methods)]
let path = path.canonicalize()?;
- let root = path
- .ancestors()
- .last()
- .ok_or(anyhow!("Path has no root."))?;
+ let root = path.ancestors().last().ok_or(FsError::PathHasNoRoot)?;
let mut root = OsStr::new(root).encode_wide().collect::<Vec<_>>();
root.push(0);
let mut sectors_per_cluster = 0;
@@ -229,7 +254,7 @@ where
{
let _ = path;
let _ = bigint;
- Err(anyhow!("Unsupported platform."))
+ Err(FsError::UnsupportedPlatform)
}
}
@@ -241,7 +266,7 @@ pub fn op_node_lutimes_sync<P>(
#[smi] atime_nanos: u32,
#[number] mtime_secs: i64,
#[smi] mtime_nanos: u32,
-) -> Result<(), AnyError>
+) -> Result<(), FsError>
where
P: NodePermissions + 'static,
{
@@ -262,7 +287,7 @@ pub async fn op_node_lutimes<P>(
#[smi] atime_nanos: u32,
#[number] mtime_secs: i64,
#[smi] mtime_nanos: u32,
-) -> Result<(), AnyError>
+) -> Result<(), FsError>
where
P: NodePermissions + 'static,
{
@@ -286,7 +311,7 @@ pub fn op_node_lchown_sync<P>(
#[string] path: String,
uid: Option<u32>,
gid: Option<u32>,
-) -> Result<(), AnyError>
+) -> Result<(), FsError>
where
P: NodePermissions + 'static,
{
@@ -304,7 +329,7 @@ pub async fn op_node_lchown<P>(
#[string] path: String,
uid: Option<u32>,
gid: Option<u32>,
-) -> Result<(), AnyError>
+) -> Result<(), FsError>
where
P: NodePermissions + 'static,
{
diff --git a/ext/node/ops/http.rs b/ext/node/ops/http.rs
index 773902ded..69571078f 100644
--- a/ext/node/ops/http.rs
+++ b/ext/node/ops/http.rs
@@ -8,14 +8,12 @@ use std::task::Context;
use std::task::Poll;
use bytes::Bytes;
-use deno_core::anyhow;
-use deno_core::error::type_error;
-use deno_core::error::AnyError;
use deno_core::futures::stream::Peekable;
use deno_core::futures::Future;
use deno_core::futures::FutureExt;
use deno_core::futures::Stream;
use deno_core::futures::StreamExt;
+use deno_core::futures::TryFutureExt;
use deno_core::op2;
use deno_core::serde::Serialize;
use deno_core::unsync::spawn;
@@ -33,6 +31,7 @@ use deno_core::Resource;
use deno_core::ResourceId;
use deno_fetch::get_or_create_client_from_state;
use deno_fetch::FetchCancelHandle;
+use deno_fetch::FetchError;
use deno_fetch::FetchRequestResource;
use deno_fetch::FetchReturn;
use deno_fetch::HttpClientResource;
@@ -59,12 +58,15 @@ pub fn op_node_http_request<P>(
#[serde] headers: Vec<(ByteString, ByteString)>,
#[smi] client_rid: Option<u32>,
#[smi] body: Option<ResourceId>,
-) -> Result<FetchReturn, AnyError>
+) -> Result<FetchReturn, FetchError>
where
P: crate::NodePermissions + 'static,
{
let client = if let Some(rid) = client_rid {
- let r = state.resource_table.get::<HttpClientResource>(rid)?;
+ let r = state
+ .resource_table
+ .get::<HttpClientResource>(rid)
+ .map_err(FetchError::Resource)?;
r.client.clone()
} else {
get_or_create_client_from_state(state)?
@@ -81,10 +83,8 @@ where
let mut header_map = HeaderMap::new();
for (key, value) in headers {
- let name = HeaderName::from_bytes(&key)
- .map_err(|err| type_error(err.to_string()))?;
- let v = HeaderValue::from_bytes(&value)
- .map_err(|err| type_error(err.to_string()))?;
+ let name = HeaderName::from_bytes(&key)?;
+ let v = HeaderValue::from_bytes(&value)?;
header_map.append(name, v);
}
@@ -92,7 +92,10 @@ where
let (body, con_len) = if let Some(body) = body {
(
BodyExt::boxed(NodeHttpResourceToBodyAdapter::new(
- state.resource_table.take_any(body)?,
+ state
+ .resource_table
+ .take_any(body)
+ .map_err(FetchError::Resource)?,
)),
None,
)
@@ -117,7 +120,7 @@ where
*request.uri_mut() = url
.as_str()
.parse()
- .map_err(|_| type_error("Invalid URL"))?;
+ .map_err(|_| FetchError::InvalidUrl(url.clone()))?;
*request.headers_mut() = header_map;
if let Some((username, password)) = maybe_authority {
@@ -136,9 +139,9 @@ where
let fut = async move {
client
.send(request)
+ .map_err(Into::into)
.or_cancel(cancel_handle_)
.await
- .map(|res| res.map_err(|err| type_error(err.to_string())))
};
let request_rid = state.resource_table.add(FetchRequestResource {
@@ -174,11 +177,12 @@ pub struct NodeHttpFetchResponse {
pub async fn op_node_http_fetch_send(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
-) -> Result<NodeHttpFetchResponse, AnyError> {
+) -> Result<NodeHttpFetchResponse, FetchError> {
let request = state
.borrow_mut()
.resource_table
- .take::<FetchRequestResource>(rid)?;
+ .take::<FetchRequestResource>(rid)
+ .map_err(FetchError::Resource)?;
let request = Rc::try_unwrap(request)
.ok()
@@ -191,22 +195,23 @@ pub async fn op_node_http_fetch_send(
// If any error in the chain is a hyper body error, return that as a special result we can use to
// reconstruct an error chain (eg: `new TypeError(..., { cause: new Error(...) })`).
// TODO(mmastrac): it would be a lot easier if we just passed a v8::Global through here instead
- let mut err_ref: &dyn std::error::Error = err.as_ref();
- while let Some(err) = std::error::Error::source(err_ref) {
- if let Some(err) = err.downcast_ref::<hyper::Error>() {
- if let Some(err) = std::error::Error::source(err) {
- return Ok(NodeHttpFetchResponse {
- error: Some(err.to_string()),
- ..Default::default()
- });
+
+ if let FetchError::ClientSend(err_src) = &err {
+ if let Some(client_err) = std::error::Error::source(&err_src.source) {
+ if let Some(err_src) = client_err.downcast_ref::<hyper::Error>() {
+ if let Some(err_src) = std::error::Error::source(err_src) {
+ return Ok(NodeHttpFetchResponse {
+ error: Some(err_src.to_string()),
+ ..Default::default()
+ });
+ }
}
}
- err_ref = err;
}
- return Err(type_error(err.to_string()));
+ return Err(err);
}
- Err(_) => return Err(type_error("request was cancelled")),
+ Err(_) => return Err(FetchError::RequestCanceled),
};
let status = res.status();
@@ -250,11 +255,12 @@ pub async fn op_node_http_fetch_send(
pub async fn op_node_http_fetch_response_upgrade(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
-) -> Result<ResourceId, AnyError> {
+) -> Result<ResourceId, FetchError> {
let raw_response = state
.borrow_mut()
.resource_table
- .take::<NodeHttpFetchResponseResource>(rid)?;
+ .take::<NodeHttpFetchResponseResource>(rid)
+ .map_err(FetchError::Resource)?;
let raw_response = Rc::try_unwrap(raw_response)
.expect("Someone is holding onto NodeHttpFetchResponseResource");
@@ -277,7 +283,7 @@ pub async fn op_node_http_fetch_response_upgrade(
}
read_tx.write_all(&buf[..read]).await?;
}
- Ok::<_, AnyError>(())
+ Ok::<_, FetchError>(())
});
spawn(async move {
let mut buf = [0; 1024];
@@ -288,7 +294,7 @@ pub async fn op_node_http_fetch_response_upgrade(
}
upgraded_tx.write_all(&buf[..read]).await?;
}
- Ok::<_, AnyError>(())
+ Ok::<_, FetchError>(())
});
}
@@ -318,23 +324,26 @@ impl UpgradeStream {
}
}
- async fn read(self: Rc<Self>, buf: &mut [u8]) -> Result<usize, AnyError> {
+ async fn read(
+ self: Rc<Self>,
+ buf: &mut [u8],
+ ) -> Result<usize, std::io::Error> {
let cancel_handle = RcRef::map(self.clone(), |this| &this.cancel_handle);
async {
let read = RcRef::map(self, |this| &this.read);
let mut read = read.borrow_mut().await;
- Ok(Pin::new(&mut *read).read(buf).await?)
+ Pin::new(&mut *read).read(buf).await
}
.try_or_cancel(cancel_handle)
.await
}
- async fn write(self: Rc<Self>, buf: &[u8]) -> Result<usize, AnyError> {
+ async fn write(self: Rc<Self>, buf: &[u8]) -> Result<usize, std::io::Error> {
let cancel_handle = RcRef::map(self.clone(), |this| &this.cancel_handle);
async {
let write = RcRef::map(self, |this| &this.write);
let mut write = write.borrow_mut().await;
- Ok(Pin::new(&mut *write).write(buf).await?)
+ Pin::new(&mut *write).write(buf).await
}
.try_or_cancel(cancel_handle)
.await
@@ -387,7 +396,7 @@ impl NodeHttpFetchResponseResource {
}
}
- pub async fn upgrade(self) -> Result<hyper::upgrade::Upgraded, AnyError> {
+ pub async fn upgrade(self) -> Result<hyper::upgrade::Upgraded, hyper::Error> {
let reader = self.response_reader.into_inner();
match reader {
NodeHttpFetchResponseReader::Start(resp) => {
@@ -445,7 +454,9 @@ impl Resource for NodeHttpFetchResponseResource {
// safely call `await` on it without creating a race condition.
Some(_) => match reader.as_mut().next().await.unwrap() {
Ok(chunk) => assert!(chunk.is_empty()),
- Err(err) => break Err(type_error(err.to_string())),
+ Err(err) => {
+ break Err(deno_core::error::type_error(err.to_string()))
+ }
},
None => break Ok(BufView::empty()),
}
@@ -453,7 +464,7 @@ impl Resource for NodeHttpFetchResponseResource {
};
let cancel_handle = RcRef::map(self, |r| &r.cancel);
- fut.try_or_cancel(cancel_handle).await
+ fut.try_or_cancel(cancel_handle).await.map_err(Into::into)
})
}
@@ -469,7 +480,9 @@ impl Resource for NodeHttpFetchResponseResource {
#[allow(clippy::type_complexity)]
pub struct NodeHttpResourceToBodyAdapter(
Rc<dyn Resource>,
- Option<Pin<Box<dyn Future<Output = Result<BufView, anyhow::Error>>>>>,
+ Option<
+ Pin<Box<dyn Future<Output = Result<BufView, deno_core::anyhow::Error>>>>,
+ >,
);
impl NodeHttpResourceToBodyAdapter {
@@ -485,7 +498,7 @@ unsafe impl Send for NodeHttpResourceToBodyAdapter {}
unsafe impl Sync for NodeHttpResourceToBodyAdapter {}
impl Stream for NodeHttpResourceToBodyAdapter {
- type Item = Result<Bytes, anyhow::Error>;
+ type Item = Result<Bytes, deno_core::anyhow::Error>;
fn poll_next(
self: Pin<&mut Self>,
@@ -515,7 +528,7 @@ impl Stream for NodeHttpResourceToBodyAdapter {
impl hyper::body::Body for NodeHttpResourceToBodyAdapter {
type Data = Bytes;
- type Error = anyhow::Error;
+ type Error = deno_core::anyhow::Error;
fn poll_frame(
self: Pin<&mut Self>,
diff --git a/ext/node/ops/http2.rs b/ext/node/ops/http2.rs
index 9595cb33d..53dada9f4 100644
--- a/ext/node/ops/http2.rs
+++ b/ext/node/ops/http2.rs
@@ -7,7 +7,6 @@ use std::rc::Rc;
use std::task::Poll;
use bytes::Bytes;
-use deno_core::error::AnyError;
use deno_core::futures::future::poll_fn;
use deno_core::op2;
use deno_core::serde::Serialize;
@@ -110,17 +109,28 @@ impl Resource for Http2ServerSendResponse {
}
}
+#[derive(Debug, thiserror::Error)]
+pub enum Http2Error {
+ #[error(transparent)]
+ Resource(deno_core::error::AnyError),
+ #[error(transparent)]
+ UrlParse(#[from] url::ParseError),
+ #[error(transparent)]
+ H2(#[from] h2::Error),
+}
+
#[op2(async)]
#[serde]
pub async fn op_http2_connect(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
#[string] url: String,
-) -> Result<(ResourceId, ResourceId), AnyError> {
+) -> Result<(ResourceId, ResourceId), Http2Error> {
// No permission check necessary because we're using an existing connection
let network_stream = {
let mut state = state.borrow_mut();
- take_network_stream_resource(&mut state.resource_table, rid)?
+ take_network_stream_resource(&mut state.resource_table, rid)
+ .map_err(Http2Error::Resource)?
};
let url = Url::parse(&url)?;
@@ -144,9 +154,10 @@ pub async fn op_http2_connect(
pub async fn op_http2_listen(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
-) -> Result<ResourceId, AnyError> {
+) -> Result<ResourceId, Http2Error> {
let stream =
- take_network_stream_resource(&mut state.borrow_mut().resource_table, rid)?;
+ take_network_stream_resource(&mut state.borrow_mut().resource_table, rid)
+ .map_err(Http2Error::Resource)?;
let conn = h2::server::Builder::new().handshake(stream).await?;
Ok(
@@ -166,12 +177,13 @@ pub async fn op_http2_accept(
#[smi] rid: ResourceId,
) -> Result<
Option<(Vec<(ByteString, ByteString)>, ResourceId, ResourceId)>,
- AnyError,
+ Http2Error,
> {
let resource = state
.borrow()
.resource_table
- .get::<Http2ServerConnection>(rid)?;
+ .get::<Http2ServerConnection>(rid)
+ .map_err(Http2Error::Resource)?;
let mut conn = RcRef::map(&resource, |r| &r.conn).borrow_mut().await;
if let Some(res) = conn.accept().await {
let (req, resp) = res?;
@@ -233,11 +245,12 @@ pub async fn op_http2_send_response(
#[smi] rid: ResourceId,
#[smi] status: u16,
#[serde] headers: Vec<(ByteString, ByteString)>,
-) -> Result<(ResourceId, u32), AnyError> {
+) -> Result<(ResourceId, u32), Http2Error> {
let resource = state
.borrow()
.resource_table
- .get::<Http2ServerSendResponse>(rid)?;
+ .get::<Http2ServerSendResponse>(rid)
+ .map_err(Http2Error::Resource)?;
let mut send_response = RcRef::map(resource, |r| &r.send_response)
.borrow_mut()
.await;
@@ -262,8 +275,12 @@ pub async fn op_http2_send_response(
pub async fn op_http2_poll_client_connection(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
-) -> Result<(), AnyError> {
- let resource = state.borrow().resource_table.get::<Http2ClientConn>(rid)?;
+) -> Result<(), Http2Error> {
+ let resource = state
+ .borrow()
+ .resource_table
+ .get::<Http2ClientConn>(rid)
+ .map_err(Http2Error::Resource)?;
let cancel_handle = RcRef::map(resource.clone(), |this| &this.cancel_handle);
let mut conn = RcRef::map(resource, |this| &this.conn).borrow_mut().await;
@@ -289,11 +306,12 @@ pub async fn op_http2_client_request(
// 4 strings of keys?
#[serde] mut pseudo_headers: HashMap<String, String>,
#[serde] headers: Vec<(ByteString, ByteString)>,
-) -> Result<(ResourceId, u32), AnyError> {
+) -> Result<(ResourceId, u32), Http2Error> {
let resource = state
.borrow()
.resource_table
- .get::<Http2Client>(client_rid)?;
+ .get::<Http2Client>(client_rid)
+ .map_err(Http2Error::Resource)?;
let url = resource.url.clone();
@@ -326,7 +344,10 @@ pub async fn op_http2_client_request(
let resource = {
let state = state.borrow();
- state.resource_table.get::<Http2Client>(client_rid)?
+ state
+ .resource_table
+ .get::<Http2Client>(client_rid)
+ .map_err(Http2Error::Resource)?
};
let mut client = RcRef::map(&resource, |r| &r.client).borrow_mut().await;
poll_fn(|cx| client.poll_ready(cx)).await?;
@@ -345,11 +366,12 @@ pub async fn op_http2_client_send_data(
#[smi] stream_rid: ResourceId,
#[buffer] data: JsBuffer,
end_of_stream: bool,
-) -> Result<(), AnyError> {
+) -> Result<(), Http2Error> {
let resource = state
.borrow()
.resource_table
- .get::<Http2ClientStream>(stream_rid)?;
+ .get::<Http2ClientStream>(stream_rid)
+ .map_err(Http2Error::Resource)?;
let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await;
stream.send_data(data.to_vec().into(), end_of_stream)?;
@@ -361,7 +383,7 @@ pub async fn op_http2_client_reset_stream(
state: Rc<RefCell<OpState>>,
#[smi] stream_rid: ResourceId,
#[smi] code: u32,
-) -> Result<(), AnyError> {
+) -> Result<(), deno_core::error::AnyError> {
let resource = state
.borrow()
.resource_table
@@ -376,11 +398,12 @@ pub async fn op_http2_client_send_trailers(
state: Rc<RefCell<OpState>>,
#[smi] stream_rid: ResourceId,
#[serde] trailers: Vec<(ByteString, ByteString)>,
-) -> Result<(), AnyError> {
+) -> Result<(), Http2Error> {
let resource = state
.borrow()
.resource_table
- .get::<Http2ClientStream>(stream_rid)?;
+ .get::<Http2ClientStream>(stream_rid)
+ .map_err(Http2Error::Resource)?;
let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await;
let mut trailers_map = http::HeaderMap::new();
@@ -408,11 +431,12 @@ pub struct Http2ClientResponse {
pub async fn op_http2_client_get_response(
state: Rc<RefCell<OpState>>,
#[smi] stream_rid: ResourceId,
-) -> Result<(Http2ClientResponse, bool), AnyError> {
+) -> Result<(Http2ClientResponse, bool), Http2Error> {
let resource = state
.borrow()
.resource_table
- .get::<Http2ClientStream>(stream_rid)?;
+ .get::<Http2ClientStream>(stream_rid)
+ .map_err(Http2Error::Resource)?;
let mut response_future =
RcRef::map(&resource, |r| &r.response).borrow_mut().await;
@@ -478,23 +502,22 @@ fn poll_data_or_trailers(
pub async fn op_http2_client_get_response_body_chunk(
state: Rc<RefCell<OpState>>,
#[smi] body_rid: ResourceId,
-) -> Result<(Option<Vec<u8>>, bool, bool), AnyError> {
+) -> Result<(Option<Vec<u8>>, bool, bool), Http2Error> {
let resource = state
.borrow()
.resource_table
- .get::<Http2ClientResponseBody>(body_rid)?;
+ .get::<Http2ClientResponseBody>(body_rid)
+ .map_err(Http2Error::Resource)?;
let mut body = RcRef::map(&resource, |r| &r.body).borrow_mut().await;
loop {
let result = poll_fn(|cx| poll_data_or_trailers(cx, &mut body)).await;
if let Err(err) = result {
- let reason = err.reason();
- if let Some(reason) = reason {
- if reason == Reason::CANCEL {
- return Ok((None, false, true));
- }
+ match err.reason() {
+ Some(Reason::NO_ERROR) => return Ok((None, true, false)),
+ Some(Reason::CANCEL) => return Ok((None, false, true)),
+ _ => return Err(err.into()),
}
- return Err(err.into());
}
match result.unwrap() {
DataOrTrailers::Data(data) => {
@@ -527,7 +550,7 @@ pub async fn op_http2_client_get_response_body_chunk(
pub async fn op_http2_client_get_response_trailers(
state: Rc<RefCell<OpState>>,
#[smi] body_rid: ResourceId,
-) -> Result<Option<Vec<(ByteString, ByteString)>>, AnyError> {
+) -> Result<Option<Vec<(ByteString, ByteString)>>, deno_core::error::AnyError> {
let resource = state
.borrow()
.resource_table
diff --git a/ext/node/ops/idna.rs b/ext/node/ops/idna.rs
index 9c9450c70..a3d85e77c 100644
--- a/ext/node/ops/idna.rs
+++ b/ext/node/ops/idna.rs
@@ -1,7 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::anyhow::Error;
-use deno_core::error::range_error;
use deno_core::op2;
use std::borrow::Cow;
@@ -11,19 +9,21 @@ use std::borrow::Cow;
const PUNY_PREFIX: &str = "xn--";
-fn invalid_input_err() -> Error {
- range_error("Invalid input")
-}
-
-fn not_basic_err() -> Error {
- range_error("Illegal input >= 0x80 (not a basic code point)")
+#[derive(Debug, thiserror::Error)]
+pub enum IdnaError {
+ #[error("Invalid input")]
+ InvalidInput,
+ #[error("Input would take more than 63 characters to encode")]
+ InputTooLong,
+ #[error("Illegal input >= 0x80 (not a basic code point)")]
+ IllegalInput,
}
/// map a domain by mapping each label with the given function
-fn map_domain<E>(
+fn map_domain(
domain: &str,
- f: impl Fn(&str) -> Result<Cow<'_, str>, E>,
-) -> Result<String, E> {
+ f: impl Fn(&str) -> Result<Cow<'_, str>, IdnaError>,
+) -> Result<String, IdnaError> {
let mut result = String::with_capacity(domain.len());
let mut domain = domain;
@@ -48,7 +48,7 @@ fn map_domain<E>(
/// Maps a unicode domain to ascii by punycode encoding each label
///
/// Note this is not IDNA2003 or IDNA2008 compliant, rather it matches node.js's punycode implementation
-fn to_ascii(input: &str) -> Result<String, Error> {
+fn to_ascii(input: &str) -> Result<String, IdnaError> {
if input.is_ascii() {
return Ok(input.into());
}
@@ -61,9 +61,7 @@ fn to_ascii(input: &str) -> Result<String, Error> {
} else {
idna::punycode::encode_str(label)
.map(|encoded| [PUNY_PREFIX, &encoded].join("").into()) // add the prefix
- .ok_or_else(|| {
- Error::msg("Input would take more than 63 characters to encode") // only error possible per the docs
- })
+ .ok_or(IdnaError::InputTooLong) // only error possible per the docs
}
})?;
@@ -74,13 +72,13 @@ fn to_ascii(input: &str) -> Result<String, Error> {
/// Maps an ascii domain to unicode by punycode decoding each label
///
/// Note this is not IDNA2003 or IDNA2008 compliant, rather it matches node.js's punycode implementation
-fn to_unicode(input: &str) -> Result<String, Error> {
+fn to_unicode(input: &str) -> Result<String, IdnaError> {
map_domain(input, |s| {
if let Some(puny) = s.strip_prefix(PUNY_PREFIX) {
// it's a punycode encoded label
Ok(
idna::punycode::decode_to_string(&puny.to_lowercase())
- .ok_or_else(invalid_input_err)?
+ .ok_or(IdnaError::InvalidInput)?
.into(),
)
} else {
@@ -95,7 +93,7 @@ fn to_unicode(input: &str) -> Result<String, Error> {
#[string]
pub fn op_node_idna_punycode_to_ascii(
#[string] domain: String,
-) -> Result<String, Error> {
+) -> Result<String, IdnaError> {
to_ascii(&domain)
}
@@ -105,7 +103,7 @@ pub fn op_node_idna_punycode_to_ascii(
#[string]
pub fn op_node_idna_punycode_to_unicode(
#[string] domain: String,
-) -> Result<String, Error> {
+) -> Result<String, IdnaError> {
to_unicode(&domain)
}
@@ -115,8 +113,8 @@ pub fn op_node_idna_punycode_to_unicode(
#[string]
pub fn op_node_idna_domain_to_ascii(
#[string] domain: String,
-) -> Result<String, Error> {
- idna::domain_to_ascii(&domain).map_err(|e| e.into())
+) -> Result<String, idna::Errors> {
+ idna::domain_to_ascii(&domain)
}
/// Converts a domain to Unicode as per the IDNA spec
@@ -131,7 +129,7 @@ pub fn op_node_idna_domain_to_unicode(#[string] domain: String) -> String {
#[string]
pub fn op_node_idna_punycode_decode(
#[string] domain: String,
-) -> Result<String, Error> {
+) -> Result<String, IdnaError> {
if domain.is_empty() {
return Ok(domain);
}
@@ -147,11 +145,10 @@ pub fn op_node_idna_punycode_decode(
.unwrap_or(domain.len() - 1);
if !domain[..last_dash].is_ascii() {
- return Err(not_basic_err());
+ return Err(IdnaError::IllegalInput);
}
- idna::punycode::decode_to_string(&domain)
- .ok_or_else(|| deno_core::error::range_error("Invalid input"))
+ idna::punycode::decode_to_string(&domain).ok_or(IdnaError::InvalidInput)
}
#[op2]
diff --git a/ext/node/ops/inspector.rs b/ext/node/ops/inspector.rs
new file mode 100644
index 000000000..34a7e004c
--- /dev/null
+++ b/ext/node/ops/inspector.rs
@@ -0,0 +1,161 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use crate::NodePermissions;
+use deno_core::anyhow::Error;
+use deno_core::error::generic_error;
+use deno_core::futures::channel::mpsc;
+use deno_core::op2;
+use deno_core::v8;
+use deno_core::GarbageCollected;
+use deno_core::InspectorSessionKind;
+use deno_core::InspectorSessionOptions;
+use deno_core::JsRuntimeInspector;
+use deno_core::OpState;
+use std::cell::RefCell;
+use std::rc::Rc;
+
+#[op2(fast)]
+pub fn op_inspector_enabled() -> bool {
+ // TODO: hook up to InspectorServer
+ false
+}
+
+#[op2]
+pub fn op_inspector_open<P>(
+ _state: &mut OpState,
+ _port: Option<u16>,
+ #[string] _host: Option<String>,
+) -> Result<(), Error>
+where
+ P: NodePermissions + 'static,
+{
+ // TODO: hook up to InspectorServer
+ /*
+ let server = state.borrow_mut::<InspectorServer>();
+ if let Some(host) = host {
+ server.set_host(host);
+ }
+ if let Some(port) = port {
+ server.set_port(port);
+ }
+ state
+ .borrow_mut::<P>()
+ .check_net((server.host(), Some(server.port())), "inspector.open")?;
+ */
+
+ Ok(())
+}
+
+#[op2(fast)]
+pub fn op_inspector_close() {
+ // TODO: hook up to InspectorServer
+}
+
+#[op2]
+#[string]
+pub fn op_inspector_url() -> Option<String> {
+ // TODO: hook up to InspectorServer
+ None
+}
+
+#[op2(fast)]
+pub fn op_inspector_wait(state: &OpState) -> bool {
+ match state.try_borrow::<Rc<RefCell<JsRuntimeInspector>>>() {
+ Some(inspector) => {
+ inspector
+ .borrow_mut()
+ .wait_for_session_and_break_on_next_statement();
+ true
+ }
+ None => false,
+ }
+}
+
+#[op2(fast)]
+pub fn op_inspector_emit_protocol_event(
+ #[string] _event_name: String,
+ #[string] _params: String,
+) {
+ // TODO: inspector channel & protocol notifications
+}
+
+struct JSInspectorSession {
+ tx: RefCell<Option<mpsc::UnboundedSender<String>>>,
+}
+
+impl GarbageCollected for JSInspectorSession {}
+
+#[op2]
+#[cppgc]
+pub fn op_inspector_connect<'s, P>(
+ isolate: *mut v8::Isolate,
+ scope: &mut v8::HandleScope<'s>,
+ state: &mut OpState,
+ connect_to_main_thread: bool,
+ callback: v8::Local<'s, v8::Function>,
+) -> Result<JSInspectorSession, Error>
+where
+ P: NodePermissions + 'static,
+{
+ state
+ .borrow_mut::<P>()
+ .check_sys("inspector", "inspector.Session.connect")?;
+
+ if connect_to_main_thread {
+ return Err(generic_error("connectToMainThread not supported"));
+ }
+
+ let context = scope.get_current_context();
+ let context = v8::Global::new(scope, context);
+ let callback = v8::Global::new(scope, callback);
+
+ let inspector = state
+ .borrow::<Rc<RefCell<JsRuntimeInspector>>>()
+ .borrow_mut();
+
+ let tx = inspector.create_raw_session(
+ InspectorSessionOptions {
+ kind: InspectorSessionKind::NonBlocking {
+ wait_for_disconnect: false,
+ },
+ },
+ // The inspector connection does not keep the event loop alive but
+ // when the inspector sends a message to the frontend, the JS that
+ // that runs may keep the event loop alive so we have to call back
+ // synchronously, instead of using the usual LocalInspectorSession
+ // UnboundedReceiver<InspectorMsg> API.
+ Box::new(move |message| {
+ // SAFETY: This function is called directly by the inspector, so
+ // 1) The isolate is still valid
+ // 2) We are on the same thread as the Isolate
+ let scope = unsafe { &mut v8::CallbackScope::new(&mut *isolate) };
+ let context = v8::Local::new(scope, context.clone());
+ let scope = &mut v8::ContextScope::new(scope, context);
+ let scope = &mut v8::TryCatch::new(scope);
+ let recv = v8::undefined(scope);
+ if let Some(message) = v8::String::new(scope, &message.content) {
+ let callback = v8::Local::new(scope, callback.clone());
+ callback.call(scope, recv.into(), &[message.into()]);
+ }
+ }),
+ );
+
+ Ok(JSInspectorSession {
+ tx: RefCell::new(Some(tx)),
+ })
+}
+
+#[op2(fast)]
+pub fn op_inspector_dispatch(
+ #[cppgc] session: &JSInspectorSession,
+ #[string] message: String,
+) {
+ if let Some(tx) = &*session.tx.borrow() {
+ let _ = tx.unbounded_send(message);
+ }
+}
+
+#[op2(fast)]
+pub fn op_inspector_disconnect(#[cppgc] session: &JSInspectorSession) {
+ drop(session.tx.borrow_mut().take());
+}
diff --git a/ext/node/ops/ipc.rs b/ext/node/ops/ipc.rs
index 59b6fece1..672cf0d70 100644
--- a/ext/node/ops/ipc.rs
+++ b/ext/node/ops/ipc.rs
@@ -17,8 +17,6 @@ mod impl_ {
use std::task::Context;
use std::task::Poll;
- use deno_core::error::bad_resource_id;
- use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::serde;
use deno_core::serde::Serializer;
@@ -167,7 +165,7 @@ mod impl_ {
#[smi]
pub fn op_node_child_ipc_pipe(
state: &mut OpState,
- ) -> Result<Option<ResourceId>, AnyError> {
+ ) -> Result<Option<ResourceId>, io::Error> {
let fd = match state.try_borrow_mut::<crate::ChildPipeFd>() {
Some(child_pipe_fd) => child_pipe_fd.0,
None => return Ok(None),
@@ -180,6 +178,18 @@ mod impl_ {
))
}
+ #[derive(Debug, thiserror::Error)]
+ pub enum IpcError {
+ #[error(transparent)]
+ Resource(deno_core::error::AnyError),
+ #[error(transparent)]
+ IpcJsonStream(#[from] IpcJsonStreamError),
+ #[error(transparent)]
+ Canceled(#[from] deno_core::Canceled),
+ #[error("failed to serialize json value: {0}")]
+ SerdeJson(serde_json::Error),
+ }
+
#[op2(async)]
pub fn op_node_ipc_write<'a>(
scope: &mut v8::HandleScope<'a>,
@@ -192,34 +202,37 @@ mod impl_ {
// ideally we would just return `Result<(impl Future, bool), ..>`, but that's not
// supported by `op2` currently.
queue_ok: v8::Local<'a, v8::Array>,
- ) -> Result<impl Future<Output = Result<(), AnyError>>, AnyError> {
+ ) -> Result<impl Future<Output = Result<(), io::Error>>, IpcError> {
let mut serialized = Vec::with_capacity(64);
let mut ser = serde_json::Serializer::new(&mut serialized);
- serialize_v8_value(scope, value, &mut ser).map_err(|e| {
- deno_core::error::type_error(format!(
- "failed to serialize json value: {e}"
- ))
- })?;
+ serialize_v8_value(scope, value, &mut ser).map_err(IpcError::SerdeJson)?;
serialized.push(b'\n');
let stream = state
.borrow()
.resource_table
.get::<IpcJsonStreamResource>(rid)
- .map_err(|_| bad_resource_id())?;
+ .map_err(IpcError::Resource)?;
let old = stream
.queued_bytes
.fetch_add(serialized.len(), std::sync::atomic::Ordering::Relaxed);
if old + serialized.len() > 2 * INITIAL_CAPACITY {
// sending messages too fast
- let v = false.to_v8(scope)?;
+ let v = false.to_v8(scope).unwrap(); // Infallible
queue_ok.set_index(scope, 0, v);
}
Ok(async move {
- stream.clone().write_msg_bytes(&serialized).await?;
+ let cancel = stream.cancel.clone();
+ let result = stream
+ .clone()
+ .write_msg_bytes(&serialized)
+ .or_cancel(cancel)
+ .await;
+ // adjust count even on error
stream
.queued_bytes
.fetch_sub(serialized.len(), std::sync::atomic::Ordering::Relaxed);
+ result??;
Ok(())
})
}
@@ -239,12 +252,12 @@ mod impl_ {
pub async fn op_node_ipc_read(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
- ) -> Result<serde_json::Value, AnyError> {
+ ) -> Result<serde_json::Value, IpcError> {
let stream = state
.borrow()
.resource_table
.get::<IpcJsonStreamResource>(rid)
- .map_err(|_| bad_resource_id())?;
+ .map_err(IpcError::Resource)?;
let cancel = stream.cancel.clone();
let mut stream = RcRef::map(stream, |r| &r.read_half).borrow_mut().await;
@@ -400,7 +413,7 @@ mod impl_ {
async fn write_msg_bytes(
self: Rc<Self>,
msg: &[u8],
- ) -> Result<(), AnyError> {
+ ) -> Result<(), io::Error> {
let mut write_half =
RcRef::map(self, |r| &r.write_half).borrow_mut().await;
write_half.write_all(msg).await?;
@@ -455,6 +468,14 @@ mod impl_ {
}
}
+ #[derive(Debug, thiserror::Error)]
+ pub enum IpcJsonStreamError {
+ #[error("{0}")]
+ Io(#[source] std::io::Error),
+ #[error("{0}")]
+ SimdJson(#[source] simd_json::Error),
+ }
+
// JSON serialization stream over IPC pipe.
//
// `\n` is used as a delimiter between messages.
@@ -475,7 +496,7 @@ mod impl_ {
async fn read_msg(
&mut self,
- ) -> Result<Option<serde_json::Value>, AnyError> {
+ ) -> Result<Option<serde_json::Value>, IpcJsonStreamError> {
let mut json = None;
let nread = read_msg_inner(
&mut self.pipe,
@@ -483,7 +504,8 @@ mod impl_ {
&mut json,
&mut self.read_buffer,
)
- .await?;
+ .await
+ .map_err(IpcJsonStreamError::Io)?;
if nread == 0 {
// EOF.
return Ok(None);
@@ -493,7 +515,8 @@ mod impl_ {
Some(v) => v,
None => {
// Took more than a single read and some buffering.
- simd_json::from_slice(&mut self.buffer[..nread])?
+ simd_json::from_slice(&mut self.buffer[..nread])
+ .map_err(IpcJsonStreamError::SimdJson)?
}
};
diff --git a/ext/node/ops/mod.rs b/ext/node/ops/mod.rs
index b562261f3..e5ea8b417 100644
--- a/ext/node/ops/mod.rs
+++ b/ext/node/ops/mod.rs
@@ -7,8 +7,10 @@ pub mod fs;
pub mod http;
pub mod http2;
pub mod idna;
+pub mod inspector;
pub mod ipc;
pub mod os;
+pub mod perf_hooks;
pub mod process;
pub mod require;
pub mod tls;
diff --git a/ext/node/ops/os/mod.rs b/ext/node/ops/os/mod.rs
index ca91895f2..d291277ad 100644
--- a/ext/node/ops/os/mod.rs
+++ b/ext/node/ops/os/mod.rs
@@ -1,19 +1,31 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+use std::mem::MaybeUninit;
+
use crate::NodePermissions;
-use deno_core::error::type_error;
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
mod cpus;
-mod priority;
+pub mod priority;
+
+#[derive(Debug, thiserror::Error)]
+pub enum OsError {
+ #[error(transparent)]
+ Priority(priority::PriorityError),
+ #[error(transparent)]
+ Permission(#[from] deno_permissions::PermissionCheckError),
+ #[error("Failed to get cpu info")]
+ FailedToGetCpuInfo,
+ #[error("Failed to get user info")]
+ FailedToGetUserInfo(#[source] std::io::Error),
+}
#[op2(fast)]
pub fn op_node_os_get_priority<P>(
state: &mut OpState,
pid: u32,
-) -> Result<i32, AnyError>
+) -> Result<i32, OsError>
where
P: NodePermissions + 'static,
{
@@ -22,7 +34,7 @@ where
permissions.check_sys("getPriority", "node:os.getPriority()")?;
}
- priority::get_priority(pid)
+ priority::get_priority(pid).map_err(OsError::Priority)
}
#[op2(fast)]
@@ -30,7 +42,7 @@ pub fn op_node_os_set_priority<P>(
state: &mut OpState,
pid: u32,
priority: i32,
-) -> Result<(), AnyError>
+) -> Result<(), OsError>
where
P: NodePermissions + 'static,
{
@@ -39,25 +51,171 @@ where
permissions.check_sys("setPriority", "node:os.setPriority()")?;
}
- priority::set_priority(pid, priority)
+ priority::set_priority(pid, priority).map_err(OsError::Priority)
+}
+
+#[derive(serde::Serialize)]
+pub struct UserInfo {
+ username: String,
+ homedir: String,
+ shell: Option<String>,
+}
+
+#[cfg(unix)]
+fn get_user_info(uid: u32) -> Result<UserInfo, OsError> {
+ use std::ffi::CStr;
+ let mut pw: MaybeUninit<libc::passwd> = MaybeUninit::uninit();
+ let mut result: *mut libc::passwd = std::ptr::null_mut();
+ // SAFETY: libc call, no invariants
+ let max_buf_size = unsafe { libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) };
+ let buf_size = if max_buf_size < 0 {
+ // from the man page
+ 16_384
+ } else {
+ max_buf_size as usize
+ };
+ let mut buf = {
+ let mut b = Vec::<MaybeUninit<libc::c_char>>::with_capacity(buf_size);
+ // SAFETY: MaybeUninit has no initialization invariants, and len == cap
+ unsafe {
+ b.set_len(buf_size);
+ }
+ b
+ };
+ // SAFETY: libc call, args are correct
+ let s = unsafe {
+ libc::getpwuid_r(
+ uid,
+ pw.as_mut_ptr(),
+ buf.as_mut_ptr().cast(),
+ buf_size,
+ std::ptr::addr_of_mut!(result),
+ )
+ };
+ if result.is_null() {
+ if s != 0 {
+ return Err(
+ OsError::FailedToGetUserInfo(std::io::Error::last_os_error()),
+ );
+ } else {
+ return Err(OsError::FailedToGetUserInfo(std::io::Error::from(
+ std::io::ErrorKind::NotFound,
+ )));
+ }
+ }
+ // SAFETY: pw was initialized by the call to `getpwuid_r` above
+ let pw = unsafe { pw.assume_init() };
+ // SAFETY: initialized above, pw alive until end of function, nul terminated
+ let username = unsafe { CStr::from_ptr(pw.pw_name) };
+ // SAFETY: initialized above, pw alive until end of function, nul terminated
+ let homedir = unsafe { CStr::from_ptr(pw.pw_dir) };
+ // SAFETY: initialized above, pw alive until end of function, nul terminated
+ let shell = unsafe { CStr::from_ptr(pw.pw_shell) };
+ Ok(UserInfo {
+ username: username.to_string_lossy().into_owned(),
+ homedir: homedir.to_string_lossy().into_owned(),
+ shell: Some(shell.to_string_lossy().into_owned()),
+ })
+}
+
+#[cfg(windows)]
+fn get_user_info(_uid: u32) -> Result<UserInfo, OsError> {
+ use std::ffi::OsString;
+ use std::os::windows::ffi::OsStringExt;
+
+ use windows_sys::Win32::Foundation::CloseHandle;
+ use windows_sys::Win32::Foundation::GetLastError;
+ use windows_sys::Win32::Foundation::ERROR_INSUFFICIENT_BUFFER;
+ use windows_sys::Win32::Foundation::HANDLE;
+ use windows_sys::Win32::System::Threading::GetCurrentProcess;
+ use windows_sys::Win32::System::Threading::OpenProcessToken;
+ use windows_sys::Win32::UI::Shell::GetUserProfileDirectoryW;
+ struct Handle(HANDLE);
+ impl Drop for Handle {
+ fn drop(&mut self) {
+ // SAFETY: win32 call
+ unsafe {
+ CloseHandle(self.0);
+ }
+ }
+ }
+ let mut token: MaybeUninit<HANDLE> = MaybeUninit::uninit();
+
+ // Get a handle to the current process
+ // SAFETY: win32 call
+ unsafe {
+ if OpenProcessToken(
+ GetCurrentProcess(),
+ windows_sys::Win32::Security::TOKEN_READ,
+ token.as_mut_ptr(),
+ ) == 0
+ {
+ return Err(
+ OsError::FailedToGetUserInfo(std::io::Error::last_os_error()),
+ );
+ }
+ }
+
+ // SAFETY: initialized by call above
+ let token = Handle(unsafe { token.assume_init() });
+
+ let mut bufsize = 0;
+ // get the size for the homedir buf (it'll end up in `bufsize`)
+ // SAFETY: win32 call
+ unsafe {
+ GetUserProfileDirectoryW(token.0, std::ptr::null_mut(), &mut bufsize);
+ let err = GetLastError();
+ if err != ERROR_INSUFFICIENT_BUFFER {
+ return Err(OsError::FailedToGetUserInfo(
+ std::io::Error::from_raw_os_error(err as i32),
+ ));
+ }
+ }
+ let mut path = vec![0; bufsize as usize];
+ // Actually get the homedir
+ // SAFETY: path is `bufsize` elements
+ unsafe {
+ if GetUserProfileDirectoryW(token.0, path.as_mut_ptr(), &mut bufsize) == 0 {
+ return Err(
+ OsError::FailedToGetUserInfo(std::io::Error::last_os_error()),
+ );
+ }
+ }
+ // remove trailing nul
+ path.pop();
+ let homedir_wide = OsString::from_wide(&path);
+ let homedir = homedir_wide.to_string_lossy().into_owned();
+
+ Ok(UserInfo {
+ username: deno_whoami::username(),
+ homedir,
+ shell: None,
+ })
}
#[op2]
-#[string]
-pub fn op_node_os_username<P>(state: &mut OpState) -> Result<String, AnyError>
+#[serde]
+pub fn op_node_os_user_info<P>(
+ state: &mut OpState,
+ #[smi] uid: u32,
+) -> Result<UserInfo, OsError>
where
P: NodePermissions + 'static,
{
{
let permissions = state.borrow_mut::<P>();
- permissions.check_sys("username", "node:os.userInfo()")?;
+ permissions
+ .check_sys("userInfo", "node:os.userInfo()")
+ .map_err(OsError::Permission)?;
}
- Ok(deno_whoami::username())
+ get_user_info(uid)
}
#[op2(fast)]
-pub fn op_geteuid<P>(state: &mut OpState) -> Result<u32, AnyError>
+pub fn op_geteuid<P>(
+ state: &mut OpState,
+) -> Result<u32, deno_core::error::AnyError>
where
P: NodePermissions + 'static,
{
@@ -76,7 +234,9 @@ where
}
#[op2(fast)]
-pub fn op_getegid<P>(state: &mut OpState) -> Result<u32, AnyError>
+pub fn op_getegid<P>(
+ state: &mut OpState,
+) -> Result<u32, deno_core::error::AnyError>
where
P: NodePermissions + 'static,
{
@@ -96,7 +256,7 @@ where
#[op2]
#[serde]
-pub fn op_cpus<P>(state: &mut OpState) -> Result<Vec<cpus::CpuInfo>, AnyError>
+pub fn op_cpus<P>(state: &mut OpState) -> Result<Vec<cpus::CpuInfo>, OsError>
where
P: NodePermissions + 'static,
{
@@ -105,12 +265,14 @@ where
permissions.check_sys("cpus", "node:os.cpus()")?;
}
- cpus::cpu_info().ok_or_else(|| type_error("Failed to get cpu info"))
+ cpus::cpu_info().ok_or(OsError::FailedToGetCpuInfo)
}
#[op2]
#[string]
-pub fn op_homedir<P>(state: &mut OpState) -> Result<Option<String>, AnyError>
+pub fn op_homedir<P>(
+ state: &mut OpState,
+) -> Result<Option<String>, deno_core::error::AnyError>
where
P: NodePermissions + 'static,
{
diff --git a/ext/node/ops/os/priority.rs b/ext/node/ops/os/priority.rs
index 043928e2a..9a1ebcca7 100644
--- a/ext/node/ops/os/priority.rs
+++ b/ext/node/ops/os/priority.rs
@@ -1,12 +1,18 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::AnyError;
-
pub use impl_::*;
+#[derive(Debug, thiserror::Error)]
+pub enum PriorityError {
+ #[error("{0}")]
+ Io(#[from] std::io::Error),
+ #[cfg(windows)]
+ #[error("Invalid priority")]
+ InvalidPriority,
+}
+
#[cfg(unix)]
mod impl_ {
- use super::*;
use errno::errno;
use errno::set_errno;
use errno::Errno;
@@ -16,7 +22,7 @@ mod impl_ {
const PRIORITY_HIGH: i32 = -14;
// Ref: https://github.com/libuv/libuv/blob/55376b044b74db40772e8a6e24d67a8673998e02/src/unix/core.c#L1533-L1547
- pub fn get_priority(pid: u32) -> Result<i32, AnyError> {
+ pub fn get_priority(pid: u32) -> Result<i32, super::PriorityError> {
set_errno(Errno(0));
match (
// SAFETY: libc::getpriority is unsafe
@@ -29,7 +35,10 @@ mod impl_ {
}
}
- pub fn set_priority(pid: u32, priority: i32) -> Result<(), AnyError> {
+ pub fn set_priority(
+ pid: u32,
+ priority: i32,
+ ) -> Result<(), super::PriorityError> {
// SAFETY: libc::setpriority is unsafe
match unsafe { libc::setpriority(PRIO_PROCESS, pid as id_t, priority) } {
-1 => Err(std::io::Error::last_os_error().into()),
@@ -40,8 +49,6 @@ mod impl_ {
#[cfg(windows)]
mod impl_ {
- use super::*;
- use deno_core::error::type_error;
use winapi::shared::minwindef::DWORD;
use winapi::shared::minwindef::FALSE;
use winapi::shared::ntdef::NULL;
@@ -67,7 +74,7 @@ mod impl_ {
const PRIORITY_HIGHEST: i32 = -20;
// Ported from: https://github.com/libuv/libuv/blob/a877ca2435134ef86315326ef4ef0c16bdbabf17/src/win/util.c#L1649-L1685
- pub fn get_priority(pid: u32) -> Result<i32, AnyError> {
+ pub fn get_priority(pid: u32) -> Result<i32, super::PriorityError> {
// SAFETY: Windows API calls
unsafe {
let handle = if pid == 0 {
@@ -95,7 +102,10 @@ mod impl_ {
}
// Ported from: https://github.com/libuv/libuv/blob/a877ca2435134ef86315326ef4ef0c16bdbabf17/src/win/util.c#L1688-L1719
- pub fn set_priority(pid: u32, priority: i32) -> Result<(), AnyError> {
+ pub fn set_priority(
+ pid: u32,
+ priority: i32,
+ ) -> Result<(), super::PriorityError> {
// SAFETY: Windows API calls
unsafe {
let handle = if pid == 0 {
@@ -109,7 +119,7 @@ mod impl_ {
#[allow(clippy::manual_range_contains)]
let priority_class =
if priority < PRIORITY_HIGHEST || priority > PRIORITY_LOW {
- return Err(type_error("Invalid priority"));
+ return Err(super::PriorityError::InvalidPriority);
} else if priority < PRIORITY_HIGH {
REALTIME_PRIORITY_CLASS
} else if priority < PRIORITY_ABOVE_NORMAL {
diff --git a/ext/node/ops/perf_hooks.rs b/ext/node/ops/perf_hooks.rs
new file mode 100644
index 000000000..636d0b2ad
--- /dev/null
+++ b/ext/node/ops/perf_hooks.rs
@@ -0,0 +1,135 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+
+use deno_core::op2;
+use deno_core::GarbageCollected;
+
+use std::cell::Cell;
+
+#[derive(Debug, thiserror::Error)]
+pub enum PerfHooksError {
+ #[error(transparent)]
+ TokioEld(#[from] tokio_eld::Error),
+}
+
+pub struct EldHistogram {
+ eld: tokio_eld::EldHistogram<u64>,
+ started: Cell<bool>,
+}
+
+impl GarbageCollected for EldHistogram {}
+
+#[op2]
+impl EldHistogram {
+ // Creates an interval EldHistogram object that samples and reports the event
+ // loop delay over time.
+ //
+ // The delays will be reported in nanoseconds.
+ #[constructor]
+ #[cppgc]
+ pub fn new(#[smi] resolution: u32) -> Result<EldHistogram, PerfHooksError> {
+ Ok(EldHistogram {
+ eld: tokio_eld::EldHistogram::new(resolution as usize)?,
+ started: Cell::new(false),
+ })
+ }
+
+ // Disables the update interval timer.
+ //
+ // Returns true if the timer was stopped, false if it was already stopped.
+ #[fast]
+ fn enable(&self) -> bool {
+ if self.started.get() {
+ return false;
+ }
+
+ self.eld.start();
+ self.started.set(true);
+
+ true
+ }
+
+ // Enables the update interval timer.
+ //
+ // Returns true if the timer was started, false if it was already started.
+ #[fast]
+ fn disable(&self) -> bool {
+ if !self.started.get() {
+ return false;
+ }
+
+ self.eld.stop();
+ self.started.set(false);
+
+ true
+ }
+
+ // Returns the value at the given percentile.
+ //
+ // `percentile` ∈ (0, 100]
+ #[fast]
+ #[number]
+ fn percentile(&self, percentile: f64) -> u64 {
+ self.eld.value_at_percentile(percentile)
+ }
+
+ // Returns the value at the given percentile as a bigint.
+ #[fast]
+ #[bigint]
+ fn percentile_big_int(&self, percentile: f64) -> u64 {
+ self.eld.value_at_percentile(percentile)
+ }
+
+ // The number of samples recorded by the histogram.
+ #[getter]
+ #[number]
+ fn count(&self) -> u64 {
+ self.eld.len()
+ }
+
+ // The number of samples recorded by the histogram as a bigint.
+ #[getter]
+ #[bigint]
+ fn count_big_int(&self) -> u64 {
+ self.eld.len()
+ }
+
+ // The maximum recorded event loop delay.
+ #[getter]
+ #[number]
+ fn max(&self) -> u64 {
+ self.eld.max()
+ }
+
+ // The maximum recorded event loop delay as a bigint.
+ #[getter]
+ #[bigint]
+ fn max_big_int(&self) -> u64 {
+ self.eld.max()
+ }
+
+ // The mean of the recorded event loop delays.
+ #[getter]
+ fn mean(&self) -> f64 {
+ self.eld.mean()
+ }
+
+ // The minimum recorded event loop delay.
+ #[getter]
+ #[number]
+ fn min(&self) -> u64 {
+ self.eld.min()
+ }
+
+ // The minimum recorded event loop delay as a bigint.
+ #[getter]
+ #[bigint]
+ fn min_big_int(&self) -> u64 {
+ self.eld.min()
+ }
+
+ // The standard deviation of the recorded event loop delays.
+ #[getter]
+ fn stddev(&self) -> f64 {
+ self.eld.stdev()
+ }
+}
diff --git a/ext/node/ops/process.rs b/ext/node/ops/process.rs
index 0992c46c6..282567226 100644
--- a/ext/node/ops/process.rs
+++ b/ext/node/ops/process.rs
@@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
use deno_permissions::PermissionsContainer;
@@ -51,7 +50,7 @@ pub fn op_node_process_kill(
state: &mut OpState,
#[smi] pid: i32,
#[smi] sig: i32,
-) -> Result<i32, AnyError> {
+) -> Result<i32, deno_core::error::AnyError> {
state
.borrow_mut::<PermissionsContainer>()
.check_run_all("process.kill")?;
diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs
index 547336981..06c034fd5 100644
--- a/ext/node/ops/require.rs
+++ b/ext/node/ops/require.rs
@@ -1,18 +1,19 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::anyhow::Context;
-use deno_core::error::generic_error;
+use boxed_error::Boxed;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::url::Url;
use deno_core::v8;
use deno_core::JsRuntimeInspector;
-use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_fs::FileSystemRc;
+use deno_package_json::NodeModuleKind;
use deno_package_json::PackageJsonRc;
use deno_path_util::normalize_path;
-use node_resolver::NodeModuleKind;
+use deno_path_util::url_from_file_path;
+use deno_path_util::url_to_file_path;
+use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeResolutionMode;
use node_resolver::REQUIRE_CONDITIONS;
use std::borrow::Cow;
@@ -22,21 +23,55 @@ use std::path::PathBuf;
use std::rc::Rc;
use crate::NodePermissions;
-use crate::NodeRequireResolverRc;
+use crate::NodeRequireLoaderRc;
use crate::NodeResolverRc;
-use crate::NpmResolverRc;
+use crate::NpmPackageFolderResolverRc;
+use crate::PackageJsonResolverRc;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn ensure_read_permission<'a, P>(
state: &mut OpState,
file_path: &'a Path,
-) -> Result<Cow<'a, Path>, AnyError>
+) -> Result<Cow<'a, Path>, deno_core::error::AnyError>
where
P: NodePermissions + 'static,
{
- let resolver = state.borrow::<NodeRequireResolverRc>().clone();
+ let loader = state.borrow::<NodeRequireLoaderRc>().clone();
let permissions = state.borrow_mut::<P>();
- resolver.ensure_read_permission(permissions, file_path)
+ loader.ensure_read_permission(permissions, file_path)
+}
+
+#[derive(Debug, Boxed)]
+pub struct RequireError(pub Box<RequireErrorKind>);
+
+#[derive(Debug, thiserror::Error)]
+pub enum RequireErrorKind {
+ #[error(transparent)]
+ UrlParse(#[from] url::ParseError),
+ #[error(transparent)]
+ Permission(deno_core::error::AnyError),
+ #[error(transparent)]
+ PackageExportsResolve(
+ #[from] node_resolver::errors::PackageExportsResolveError,
+ ),
+ #[error(transparent)]
+ PackageJsonLoad(#[from] node_resolver::errors::PackageJsonLoadError),
+ #[error(transparent)]
+ ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
+ #[error(transparent)]
+ PackageImportsResolve(
+ #[from] node_resolver::errors::PackageImportsResolveError,
+ ),
+ #[error(transparent)]
+ FilePathConversion(#[from] deno_path_util::UrlToFilePathError),
+ #[error(transparent)]
+ UrlConversion(#[from] deno_path_util::PathToUrlError),
+ #[error(transparent)]
+ Fs(#[from] deno_io::fs::FsError),
+ #[error(transparent)]
+ ReadModule(deno_core::error::AnyError),
+ #[error("Unable to get CWD: {0}")]
+ UnableToGetCwd(deno_io::fs::FsError),
}
#[op2]
@@ -95,7 +130,7 @@ pub fn op_require_init_paths() -> Vec<String> {
pub fn op_require_node_module_paths<P>(
state: &mut OpState,
#[string] from: String,
-) -> Result<Vec<String>, AnyError>
+) -> Result<Vec<String>, RequireError>
where
P: NodePermissions + 'static,
{
@@ -104,13 +139,10 @@ where
let from = if from.starts_with("file:///") {
url_to_file_path(&Url::parse(&from)?)?
} else {
- let current_dir =
- &(fs.cwd().map_err(AnyError::from)).context("Unable to get CWD")?;
- deno_path_util::normalize_path(current_dir.join(from))
+ let current_dir = &fs.cwd().map_err(RequireErrorKind::UnableToGetCwd)?;
+ normalize_path(current_dir.join(from))
};
- let from = ensure_read_permission::<P>(state, &from)?;
-
if cfg!(windows) {
// return root node_modules when path is 'D:\\'.
let from_str = from.to_str().unwrap();
@@ -131,7 +163,7 @@ where
}
let mut paths = Vec::with_capacity(from.components().count());
- let mut current_path = from.as_ref();
+ let mut current_path = from.as_path();
let mut maybe_parent = Some(current_path);
while let Some(parent) = maybe_parent {
if !parent.ends_with("node_modules") {
@@ -191,17 +223,17 @@ pub fn op_require_resolve_deno_dir(
state: &mut OpState,
#[string] request: String,
#[string] parent_filename: String,
-) -> Option<String> {
- let resolver = state.borrow::<NpmResolverRc>();
- resolver
- .resolve_package_folder_from_package(
- &request,
- &ModuleSpecifier::from_file_path(&parent_filename).unwrap_or_else(|_| {
- panic!("Url::from_file_path: [{:?}]", parent_filename)
- }),
- )
- .ok()
- .map(|p| p.to_string_lossy().into_owned())
+) -> Result<Option<String>, AnyError> {
+ let resolver = state.borrow::<NpmPackageFolderResolverRc>();
+ Ok(
+ resolver
+ .resolve_package_folder_from_package(
+ &request,
+ &url_from_file_path(&PathBuf::from(parent_filename))?,
+ )
+ .ok()
+ .map(|p| p.to_string_lossy().into_owned()),
+ )
}
#[op2(fast)]
@@ -209,8 +241,11 @@ pub fn op_require_is_deno_dir_package(
state: &mut OpState,
#[string] path: String,
) -> bool {
- let resolver = state.borrow::<NpmResolverRc>();
- resolver.in_npm_package_at_file_path(&PathBuf::from(path))
+ let resolver = state.borrow::<NodeResolverRc>();
+ match deno_path_util::url_from_file_path(&PathBuf::from(path)) {
+ Ok(specifier) => resolver.in_npm_package(&specifier),
+ Err(_) => false,
+ }
}
#[op2]
@@ -264,7 +299,7 @@ pub fn op_require_path_is_absolute(#[string] p: String) -> bool {
pub fn op_require_stat<P>(
state: &mut OpState,
#[string] path: String,
-) -> Result<i32, AnyError>
+) -> Result<i32, deno_core::error::AnyError>
where
P: NodePermissions + 'static,
{
@@ -287,15 +322,16 @@ where
pub fn op_require_real_path<P>(
state: &mut OpState,
#[string] request: String,
-) -> Result<String, AnyError>
+) -> Result<String, RequireError>
where
P: NodePermissions + 'static,
{
let path = PathBuf::from(request);
- let path = ensure_read_permission::<P>(state, &path)?;
+ let path = ensure_read_permission::<P>(state, &path)
+ .map_err(RequireErrorKind::Permission)?;
let fs = state.borrow::<FileSystemRc>();
let canonicalized_path =
- deno_core::strip_unc_prefix(fs.realpath_sync(&path)?);
+ deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?);
Ok(canonicalized_path.to_string_lossy().into_owned())
}
@@ -319,12 +355,14 @@ pub fn op_require_path_resolve(#[serde] parts: Vec<String>) -> String {
#[string]
pub fn op_require_path_dirname(
#[string] request: String,
-) -> Result<String, AnyError> {
+) -> Result<String, deno_core::error::AnyError> {
let p = PathBuf::from(request);
if let Some(parent) = p.parent() {
Ok(parent.to_string_lossy().into_owned())
} else {
- Err(generic_error("Path doesn't have a parent"))
+ Err(deno_core::error::generic_error(
+ "Path doesn't have a parent",
+ ))
}
}
@@ -332,12 +370,14 @@ pub fn op_require_path_dirname(
#[string]
pub fn op_require_path_basename(
#[string] request: String,
-) -> Result<String, AnyError> {
+) -> Result<String, deno_core::error::AnyError> {
let p = PathBuf::from(request);
if let Some(path) = p.file_name() {
Ok(path.to_string_lossy().into_owned())
} else {
- Err(generic_error("Path doesn't have a file name"))
+ Err(deno_core::error::generic_error(
+ "Path doesn't have a file name",
+ ))
}
}
@@ -348,7 +388,7 @@ pub fn op_require_try_self_parent_path<P>(
has_parent: bool,
#[string] maybe_parent_filename: Option<String>,
#[string] maybe_parent_id: Option<String>,
-) -> Result<Option<String>, AnyError>
+) -> Result<Option<String>, deno_core::error::AnyError>
where
P: NodePermissions + 'static,
{
@@ -378,7 +418,7 @@ pub fn op_require_try_self<P>(
state: &mut OpState,
#[string] parent_path: Option<String>,
#[string] request: String,
-) -> Result<Option<String>, AnyError>
+) -> Result<Option<String>, RequireError>
where
P: NodePermissions + 'static,
{
@@ -386,8 +426,8 @@ where
return Ok(None);
}
- let node_resolver = state.borrow::<NodeResolverRc>();
- let pkg = node_resolver
+ let pkg_json_resolver = state.borrow::<PackageJsonResolverRc>();
+ let pkg = pkg_json_resolver
.get_closest_package_json_from_path(&PathBuf::from(parent_path.unwrap()))
.ok()
.flatten();
@@ -416,6 +456,7 @@ where
let referrer = deno_core::url::Url::from_file_path(&pkg.path).unwrap();
if let Some(exports) = &pkg.exports {
+ let node_resolver = state.borrow::<NodeResolverRc>();
let r = node_resolver.package_exports_resolve(
&pkg.path,
&expansion,
@@ -440,14 +481,18 @@ where
pub fn op_require_read_file<P>(
state: &mut OpState,
#[string] file_path: String,
-) -> Result<String, AnyError>
+) -> Result<String, RequireError>
where
P: NodePermissions + 'static,
{
let file_path = PathBuf::from(file_path);
- let file_path = ensure_read_permission::<P>(state, &file_path)?;
- let fs = state.borrow::<FileSystemRc>();
- Ok(fs.read_text_file_lossy_sync(&file_path, None)?)
+ // todo(dsherret): there's multiple borrows to NodeRequireLoaderRc here
+ let file_path = ensure_read_permission::<P>(state, &file_path)
+ .map_err(RequireErrorKind::Permission)?;
+ let loader = state.borrow::<NodeRequireLoaderRc>();
+ loader
+ .load_text_file_lossy(&file_path)
+ .map_err(|e| RequireErrorKind::ReadModule(e).into_box())
}
#[op2]
@@ -472,16 +517,17 @@ pub fn op_require_resolve_exports<P>(
#[string] name: String,
#[string] expansion: String,
#[string] parent_path: String,
-) -> Result<Option<String>, AnyError>
+) -> Result<Option<String>, RequireError>
where
P: NodePermissions + 'static,
{
let fs = state.borrow::<FileSystemRc>();
- let npm_resolver = state.borrow::<NpmResolverRc>();
let node_resolver = state.borrow::<NodeResolverRc>();
+ let pkg_json_resolver = state.borrow::<PackageJsonResolverRc>();
let modules_path = PathBuf::from(&modules_path_str);
- let pkg_path = if npm_resolver.in_npm_package_at_file_path(&modules_path)
+ let modules_specifier = deno_path_util::url_from_file_path(&modules_path)?;
+ let pkg_path = if node_resolver.in_npm_package(&modules_specifier)
&& !uses_local_node_modules_dir
{
modules_path
@@ -495,7 +541,7 @@ where
}
};
let Some(pkg) =
- node_resolver.load_package_json(&pkg_path.join("package.json"))?
+ pkg_json_resolver.load_package_json(&pkg_path.join("package.json"))?
else {
return Ok(None);
};
@@ -503,12 +549,16 @@ where
return Ok(None);
};
- let referrer = Url::from_file_path(parent_path).unwrap();
+ let referrer = if parent_path.is_empty() {
+ None
+ } else {
+ Some(Url::from_file_path(parent_path).unwrap())
+ };
let r = node_resolver.package_exports_resolve(
&pkg.path,
&format!(".{expansion}"),
exports,
- Some(&referrer),
+ referrer.as_ref(),
NodeModuleKind::Cjs,
REQUIRE_CONDITIONS,
NodeResolutionMode::Execution,
@@ -520,21 +570,17 @@ where
}))
}
-#[op2]
-#[serde]
-pub fn op_require_read_closest_package_json<P>(
+#[op2(fast)]
+pub fn op_require_is_maybe_cjs(
state: &mut OpState,
#[string] filename: String,
-) -> Result<Option<PackageJsonRc>, AnyError>
-where
- P: NodePermissions + 'static,
-{
+) -> Result<bool, ClosestPkgJsonError> {
let filename = PathBuf::from(filename);
- // permissions: allow reading the closest package.json files
- let node_resolver = state.borrow::<NodeResolverRc>().clone();
- node_resolver
- .get_closest_package_json_from_path(&filename)
- .map_err(AnyError::from)
+ let Ok(url) = url_from_file_path(&filename) else {
+ return Ok(false);
+ };
+ let loader = state.borrow::<NodeRequireLoaderRc>();
+ loader.is_maybe_cjs(&url)
}
#[op2]
@@ -546,13 +592,13 @@ pub fn op_require_read_package_scope<P>(
where
P: NodePermissions + 'static,
{
- let node_resolver = state.borrow::<NodeResolverRc>().clone();
+ let pkg_json_resolver = state.borrow::<PackageJsonResolverRc>();
let package_json_path = PathBuf::from(package_json_path);
if package_json_path.file_name() != Some("package.json".as_ref()) {
// permissions: do not allow reading a non-package.json file
return None;
}
- node_resolver
+ pkg_json_resolver
.load_package_json(&package_json_path)
.ok()
.flatten()
@@ -564,22 +610,23 @@ pub fn op_require_package_imports_resolve<P>(
state: &mut OpState,
#[string] referrer_filename: String,
#[string] request: String,
-) -> Result<Option<String>, AnyError>
+) -> Result<Option<String>, RequireError>
where
P: NodePermissions + 'static,
{
let referrer_path = PathBuf::from(&referrer_filename);
- let referrer_path = ensure_read_permission::<P>(state, &referrer_path)?;
- let node_resolver = state.borrow::<NodeResolverRc>();
+ let referrer_path = ensure_read_permission::<P>(state, &referrer_path)
+ .map_err(RequireErrorKind::Permission)?;
+ let pkg_json_resolver = state.borrow::<PackageJsonResolverRc>();
let Some(pkg) =
- node_resolver.get_closest_package_json_from_path(&referrer_path)?
+ pkg_json_resolver.get_closest_package_json_from_path(&referrer_path)?
else {
return Ok(None);
};
if pkg.imports.is_some() {
- let referrer_url =
- deno_core::url::Url::from_file_path(&referrer_filename).unwrap();
+ let node_resolver = state.borrow::<NodeResolverRc>();
+ let referrer_url = Url::from_file_path(&referrer_filename).unwrap();
let url = node_resolver.package_imports_resolve(
&request,
Some(&referrer_url),
@@ -604,20 +651,11 @@ pub fn op_require_break_on_next_statement(state: Rc<RefCell<OpState>>) {
inspector.wait_for_session_and_break_on_next_statement()
}
-fn url_to_file_path_string(url: &Url) -> Result<String, AnyError> {
+fn url_to_file_path_string(url: &Url) -> Result<String, RequireError> {
let file_path = url_to_file_path(url)?;
Ok(file_path.to_string_lossy().into_owned())
}
-fn url_to_file_path(url: &Url) -> Result<PathBuf, AnyError> {
- match url.to_file_path() {
- Ok(file_path) => Ok(file_path),
- Err(()) => {
- deno_core::anyhow::bail!("failed to convert '{}' to file path", url)
- }
- }
-}
-
#[op2(fast)]
pub fn op_require_can_parse_as_esm(
scope: &mut v8::HandleScope,
diff --git a/ext/node/ops/util.rs b/ext/node/ops/util.rs
index 533d51c92..1c177ac04 100644
--- a/ext/node/ops/util.rs
+++ b/ext/node/ops/util.rs
@@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
use deno_core::ResourceHandle;
@@ -22,7 +21,7 @@ enum HandleType {
pub fn op_node_guess_handle_type(
state: &mut OpState,
rid: u32,
-) -> Result<u32, AnyError> {
+) -> Result<u32, deno_core::error::AnyError> {
let handle = state.resource_table.get_handle(rid)?;
let handle_type = match handle {
diff --git a/ext/node/ops/v8.rs b/ext/node/ops/v8.rs
index 8813d2e18..61f67f11f 100644
--- a/ext/node/ops/v8.rs
+++ b/ext/node/ops/v8.rs
@@ -1,7 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::generic_error;
-use deno_core::error::type_error;
-use deno_core::error::AnyError;
+
use deno_core::op2;
use deno_core::v8;
use deno_core::FastString;
@@ -206,10 +204,9 @@ pub fn op_v8_write_value(
scope: &mut v8::HandleScope,
#[cppgc] ser: &Serializer,
value: v8::Local<v8::Value>,
-) -> Result<(), AnyError> {
+) {
let context = scope.get_current_context();
ser.inner.write_value(context, value);
- Ok(())
}
struct DeserBuffer {
@@ -271,11 +268,13 @@ pub fn op_v8_new_deserializer(
scope: &mut v8::HandleScope,
obj: v8::Local<v8::Object>,
buffer: v8::Local<v8::ArrayBufferView>,
-) -> Result<Deserializer<'static>, AnyError> {
+) -> Result<Deserializer<'static>, deno_core::error::AnyError> {
let offset = buffer.byte_offset();
let len = buffer.byte_length();
let backing_store = buffer.get_backing_store().ok_or_else(|| {
- generic_error("deserialization buffer has no backing store")
+ deno_core::error::generic_error(
+ "deserialization buffer has no backing store",
+ )
})?;
let (buf_slice, buf_ptr) = if let Some(data) = backing_store.data() {
// SAFETY: the offset is valid for the underlying buffer because we're getting it directly from v8
@@ -317,10 +316,10 @@ pub fn op_v8_transfer_array_buffer_de(
#[op2(fast)]
pub fn op_v8_read_double(
#[cppgc] deser: &Deserializer,
-) -> Result<f64, AnyError> {
+) -> Result<f64, deno_core::error::AnyError> {
let mut double = 0f64;
if !deser.inner.read_double(&mut double) {
- return Err(type_error("ReadDouble() failed"));
+ return Err(deno_core::error::type_error("ReadDouble() failed"));
}
Ok(double)
}
@@ -355,10 +354,10 @@ pub fn op_v8_read_raw_bytes(
#[op2(fast)]
pub fn op_v8_read_uint32(
#[cppgc] deser: &Deserializer,
-) -> Result<u32, AnyError> {
+) -> Result<u32, deno_core::error::AnyError> {
let mut value = 0;
if !deser.inner.read_uint32(&mut value) {
- return Err(type_error("ReadUint32() failed"));
+ return Err(deno_core::error::type_error("ReadUint32() failed"));
}
Ok(value)
@@ -368,10 +367,10 @@ pub fn op_v8_read_uint32(
#[serde]
pub fn op_v8_read_uint64(
#[cppgc] deser: &Deserializer,
-) -> Result<(u32, u32), AnyError> {
+) -> Result<(u32, u32), deno_core::error::AnyError> {
let mut val = 0;
if !deser.inner.read_uint64(&mut val) {
- return Err(type_error("ReadUint64() failed"));
+ return Err(deno_core::error::type_error("ReadUint64() failed"));
}
Ok(((val >> 32) as u32, val as u32))
diff --git a/ext/node/ops/winerror.rs b/ext/node/ops/winerror.rs
index c0d66f7d0..cb053774e 100644
--- a/ext/node/ops/winerror.rs
+++ b/ext/node/ops/winerror.rs
@@ -62,10 +62,11 @@ pub fn op_node_sys_to_uv_error(err: i32) -> String {
WSAEHOSTUNREACH => "EHOSTUNREACH",
ERROR_INSUFFICIENT_BUFFER => "EINVAL",
ERROR_INVALID_DATA => "EINVAL",
- ERROR_INVALID_NAME => "EINVAL",
+ ERROR_INVALID_NAME => "ENOENT",
ERROR_INVALID_PARAMETER => "EINVAL",
WSAEINVAL => "EINVAL",
WSAEPFNOSUPPORT => "EINVAL",
+ ERROR_NOT_A_REPARSE_POINT => "EINVAL",
ERROR_BEGINNING_OF_MEDIA => "EIO",
ERROR_BUS_RESET => "EIO",
ERROR_CRC => "EIO",
diff --git a/ext/node/ops/worker_threads.rs b/ext/node/ops/worker_threads.rs
index 4c50092f2..d2e575882 100644
--- a/ext/node/ops/worker_threads.rs
+++ b/ext/node/ops/worker_threads.rs
@@ -1,39 +1,56 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::generic_error;
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::url::Url;
use deno_core::OpState;
use deno_fs::FileSystemRc;
-use node_resolver::NodeResolution;
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use crate::NodePermissions;
-use crate::NodeRequireResolverRc;
-use crate::NodeResolverRc;
+use crate::NodeRequireLoaderRc;
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
fn ensure_read_permission<'a, P>(
state: &mut OpState,
file_path: &'a Path,
-) -> Result<Cow<'a, Path>, AnyError>
+) -> Result<Cow<'a, Path>, deno_core::error::AnyError>
where
P: NodePermissions + 'static,
{
- let resolver = state.borrow::<NodeRequireResolverRc>().clone();
+ let loader = state.borrow::<NodeRequireLoaderRc>().clone();
let permissions = state.borrow_mut::<P>();
- resolver.ensure_read_permission(permissions, file_path)
+ loader.ensure_read_permission(permissions, file_path)
}
+#[derive(Debug, thiserror::Error)]
+pub enum WorkerThreadsFilenameError {
+ #[error(transparent)]
+ Permission(deno_core::error::AnyError),
+ #[error("{0}")]
+ UrlParse(#[from] url::ParseError),
+ #[error("Relative path entries must start with '.' or '..'")]
+ InvalidRelativeUrl,
+ #[error("URL from Path-String")]
+ UrlFromPathString,
+ #[error("URL to Path-String")]
+ UrlToPathString,
+ #[error("URL to Path")]
+ UrlToPath,
+ #[error("File not found [{0:?}]")]
+ FileNotFound(PathBuf),
+ #[error(transparent)]
+ Fs(#[from] deno_io::fs::FsError),
+}
+
+// todo(dsherret): we should remove this and do all this work inside op_create_worker
#[op2]
#[string]
pub fn op_worker_threads_filename<P>(
state: &mut OpState,
#[string] specifier: String,
-) -> Result<String, AnyError>
+) -> Result<String, WorkerThreadsFilenameError>
where
P: NodePermissions + 'static,
{
@@ -45,44 +62,26 @@ where
} else {
let path = PathBuf::from(&specifier);
if path.is_relative() && !specifier.starts_with('.') {
- return Err(generic_error(
- "Relative path entries must start with '.' or '..'",
- ));
+ return Err(WorkerThreadsFilenameError::InvalidRelativeUrl);
}
- let path = ensure_read_permission::<P>(state, &path)?;
+ let path = ensure_read_permission::<P>(state, &path)
+ .map_err(WorkerThreadsFilenameError::Permission)?;
let fs = state.borrow::<FileSystemRc>();
let canonicalized_path =
- deno_core::strip_unc_prefix(fs.realpath_sync(&path)?);
+ deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?);
Url::from_file_path(canonicalized_path)
- .map_err(|e| generic_error(format!("URL from Path-String: {:#?}", e)))?
+ .map_err(|_| WorkerThreadsFilenameError::UrlFromPathString)?
};
let url_path = url
.to_file_path()
- .map_err(|e| generic_error(format!("URL to Path-String: {:#?}", e)))?;
- let url_path = ensure_read_permission::<P>(state, &url_path)?;
+ .map_err(|_| WorkerThreadsFilenameError::UrlToPathString)?;
+ let url_path = ensure_read_permission::<P>(state, &url_path)
+ .map_err(WorkerThreadsFilenameError::Permission)?;
let fs = state.borrow::<FileSystemRc>();
if !fs.exists_sync(&url_path) {
- return Err(generic_error(format!("File not found [{:?}]", url_path)));
- }
- let node_resolver = state.borrow::<NodeResolverRc>();
- match node_resolver.url_to_node_resolution(url)? {
- NodeResolution::Esm(u) => Ok(u.to_string()),
- NodeResolution::CommonJs(u) => wrap_cjs(u),
- NodeResolution::BuiltIn(_) => Err(generic_error("Neither ESM nor CJS")),
+ return Err(WorkerThreadsFilenameError::FileNotFound(
+ url_path.to_path_buf(),
+ ));
}
-}
-
-///
-/// Wrap a CJS file-URL and the required setup in a stringified `data:`-URL
-///
-fn wrap_cjs(url: Url) -> Result<String, AnyError> {
- let path = url
- .to_file_path()
- .map_err(|e| generic_error(format!("URL to Path: {:#?}", e)))?;
- let filename = path.file_name().unwrap().to_string_lossy();
- Ok(format!(
- "data:text/javascript,import {{ createRequire }} from \"node:module\";\
- const require = createRequire(\"{}\"); require(\"./{}\");",
- url, filename,
- ))
+ Ok(url.to_string())
}
diff --git a/ext/node/ops/zlib/brotli.rs b/ext/node/ops/zlib/brotli.rs
index 3e3905fc3..1a681ff7f 100644
--- a/ext/node/ops/zlib/brotli.rs
+++ b/ext/node/ops/zlib/brotli.rs
@@ -9,8 +9,6 @@ use brotli::BrotliDecompressStream;
use brotli::BrotliResult;
use brotli::BrotliState;
use brotli::Decompressor;
-use deno_core::error::type_error;
-use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::JsBuffer;
use deno_core::OpState;
@@ -19,7 +17,23 @@ use deno_core::ToJsBuffer;
use std::cell::RefCell;
use std::io::Read;
-fn encoder_mode(mode: u32) -> Result<BrotliEncoderMode, AnyError> {
+#[derive(Debug, thiserror::Error)]
+pub enum BrotliError {
+ #[error("Invalid encoder mode")]
+ InvalidEncoderMode,
+ #[error("Failed to compress")]
+ CompressFailed,
+ #[error("Failed to decompress")]
+ DecompressFailed,
+ #[error(transparent)]
+ Join(#[from] tokio::task::JoinError),
+ #[error(transparent)]
+ Resource(deno_core::error::AnyError),
+ #[error("{0}")]
+ Io(std::io::Error),
+}
+
+fn encoder_mode(mode: u32) -> Result<BrotliEncoderMode, BrotliError> {
Ok(match mode {
0 => BrotliEncoderMode::BROTLI_MODE_GENERIC,
1 => BrotliEncoderMode::BROTLI_MODE_TEXT,
@@ -28,7 +42,7 @@ fn encoder_mode(mode: u32) -> Result<BrotliEncoderMode, AnyError> {
4 => BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR,
5 => BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR,
6 => BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR,
- _ => return Err(type_error("Invalid encoder mode")),
+ _ => return Err(BrotliError::InvalidEncoderMode),
})
}
@@ -40,7 +54,7 @@ pub fn op_brotli_compress(
#[smi] quality: i32,
#[smi] lgwin: i32,
#[smi] mode: u32,
-) -> Result<usize, AnyError> {
+) -> Result<usize, BrotliError> {
let mode = encoder_mode(mode)?;
let mut out_size = out.len();
@@ -57,7 +71,7 @@ pub fn op_brotli_compress(
&mut |_, _, _, _| (),
);
if result != 1 {
- return Err(type_error("Failed to compress"));
+ return Err(BrotliError::CompressFailed);
}
Ok(out_size)
@@ -87,7 +101,7 @@ pub async fn op_brotli_compress_async(
#[smi] quality: i32,
#[smi] lgwin: i32,
#[smi] mode: u32,
-) -> Result<ToJsBuffer, AnyError> {
+) -> Result<ToJsBuffer, BrotliError> {
let mode = encoder_mode(mode)?;
tokio::task::spawn_blocking(move || {
let input = &*input;
@@ -107,7 +121,7 @@ pub async fn op_brotli_compress_async(
&mut |_, _, _, _| (),
);
if result != 1 {
- return Err(type_error("Failed to compress"));
+ return Err(BrotliError::CompressFailed);
}
out.truncate(out_size);
@@ -151,8 +165,11 @@ pub fn op_brotli_compress_stream(
#[smi] rid: u32,
#[buffer] input: &[u8],
#[buffer] output: &mut [u8],
-) -> Result<usize, AnyError> {
- let ctx = state.resource_table.get::<BrotliCompressCtx>(rid)?;
+) -> Result<usize, BrotliError> {
+ let ctx = state
+ .resource_table
+ .get::<BrotliCompressCtx>(rid)
+ .map_err(BrotliError::Resource)?;
let mut inst = ctx.inst.borrow_mut();
let mut output_offset = 0;
@@ -168,7 +185,7 @@ pub fn op_brotli_compress_stream(
&mut |_, _, _, _| (),
);
if !result {
- return Err(type_error("Failed to compress"));
+ return Err(BrotliError::CompressFailed);
}
Ok(output_offset)
@@ -180,8 +197,11 @@ pub fn op_brotli_compress_stream_end(
state: &mut OpState,
#[smi] rid: u32,
#[buffer] output: &mut [u8],
-) -> Result<usize, AnyError> {
- let ctx = state.resource_table.get::<BrotliCompressCtx>(rid)?;
+) -> Result<usize, BrotliError> {
+ let ctx = state
+ .resource_table
+ .get::<BrotliCompressCtx>(rid)
+ .map_err(BrotliError::Resource)?;
let mut inst = ctx.inst.borrow_mut();
let mut output_offset = 0;
@@ -197,13 +217,13 @@ pub fn op_brotli_compress_stream_end(
&mut |_, _, _, _| (),
);
if !result {
- return Err(type_error("Failed to compress"));
+ return Err(BrotliError::CompressFailed);
}
Ok(output_offset)
}
-fn brotli_decompress(buffer: &[u8]) -> Result<ToJsBuffer, AnyError> {
+fn brotli_decompress(buffer: &[u8]) -> Result<ToJsBuffer, std::io::Error> {
let mut output = Vec::with_capacity(4096);
let mut decompressor = Decompressor::new(buffer, buffer.len());
decompressor.read_to_end(&mut output)?;
@@ -214,7 +234,7 @@ fn brotli_decompress(buffer: &[u8]) -> Result<ToJsBuffer, AnyError> {
#[serde]
pub fn op_brotli_decompress(
#[buffer] buffer: &[u8],
-) -> Result<ToJsBuffer, AnyError> {
+) -> Result<ToJsBuffer, std::io::Error> {
brotli_decompress(buffer)
}
@@ -222,8 +242,11 @@ pub fn op_brotli_decompress(
#[serde]
pub async fn op_brotli_decompress_async(
#[buffer] buffer: JsBuffer,
-) -> Result<ToJsBuffer, AnyError> {
- tokio::task::spawn_blocking(move || brotli_decompress(&buffer)).await?
+) -> Result<ToJsBuffer, BrotliError> {
+ tokio::task::spawn_blocking(move || {
+ brotli_decompress(&buffer).map_err(BrotliError::Io)
+ })
+ .await?
}
struct BrotliDecompressCtx {
@@ -252,8 +275,11 @@ pub fn op_brotli_decompress_stream(
#[smi] rid: u32,
#[buffer] input: &[u8],
#[buffer] output: &mut [u8],
-) -> Result<usize, AnyError> {
- let ctx = state.resource_table.get::<BrotliDecompressCtx>(rid)?;
+) -> Result<usize, BrotliError> {
+ let ctx = state
+ .resource_table
+ .get::<BrotliDecompressCtx>(rid)
+ .map_err(BrotliError::Resource)?;
let mut inst = ctx.inst.borrow_mut();
let mut output_offset = 0;
@@ -268,7 +294,7 @@ pub fn op_brotli_decompress_stream(
&mut inst,
);
if matches!(result, BrotliResult::ResultFailure) {
- return Err(type_error("Failed to decompress"));
+ return Err(BrotliError::DecompressFailed);
}
Ok(output_offset)
@@ -280,8 +306,11 @@ pub fn op_brotli_decompress_stream_end(
state: &mut OpState,
#[smi] rid: u32,
#[buffer] output: &mut [u8],
-) -> Result<usize, AnyError> {
- let ctx = state.resource_table.get::<BrotliDecompressCtx>(rid)?;
+) -> Result<usize, BrotliError> {
+ let ctx = state
+ .resource_table
+ .get::<BrotliDecompressCtx>(rid)
+ .map_err(BrotliError::Resource)?;
let mut inst = ctx.inst.borrow_mut();
let mut output_offset = 0;
@@ -296,7 +325,7 @@ pub fn op_brotli_decompress_stream_end(
&mut inst,
);
if matches!(result, BrotliResult::ResultFailure) {
- return Err(type_error("Failed to decompress"));
+ return Err(BrotliError::DecompressFailed);
}
Ok(output_offset)
diff --git a/ext/node/ops/zlib/mod.rs b/ext/node/ops/zlib/mod.rs
index b1d6d21d2..991c0925d 100644
--- a/ext/node/ops/zlib/mod.rs
+++ b/ext/node/ops/zlib/mod.rs
@@ -1,14 +1,14 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::type_error;
-use deno_core::error::AnyError;
+
use deno_core::op2;
+use libc::c_ulong;
use std::borrow::Cow;
use std::cell::RefCell;
use zlib::*;
mod alloc;
pub mod brotli;
-mod mode;
+pub mod mode;
mod stream;
use mode::Flush;
@@ -17,11 +17,11 @@ use mode::Mode;
use self::stream::StreamWrapper;
#[inline]
-fn check(condition: bool, msg: &str) -> Result<(), AnyError> {
+fn check(condition: bool, msg: &str) -> Result<(), deno_core::error::AnyError> {
if condition {
Ok(())
} else {
- Err(type_error(msg.to_string()))
+ Err(deno_core::error::type_error(msg.to_string()))
}
}
@@ -56,7 +56,7 @@ impl ZlibInner {
out_off: u32,
out_len: u32,
flush: Flush,
- ) -> Result<(), AnyError> {
+ ) -> Result<(), deno_core::error::AnyError> {
check(self.init_done, "write before init")?;
check(!self.write_in_progress, "write already in progress")?;
check(!self.pending_close, "close already in progress")?;
@@ -65,11 +65,11 @@ impl ZlibInner {
let next_in = input
.get(in_off as usize..in_off as usize + in_len as usize)
- .ok_or_else(|| type_error("invalid input range"))?
+ .ok_or_else(|| deno_core::error::type_error("invalid input range"))?
.as_ptr() as *mut _;
let next_out = out
.get_mut(out_off as usize..out_off as usize + out_len as usize)
- .ok_or_else(|| type_error("invalid output range"))?
+ .ok_or_else(|| deno_core::error::type_error("invalid output range"))?
.as_mut_ptr();
self.strm.avail_in = in_len;
@@ -81,7 +81,10 @@ impl ZlibInner {
Ok(())
}
- fn do_write(&mut self, flush: Flush) -> Result<(), AnyError> {
+ fn do_write(
+ &mut self,
+ flush: Flush,
+ ) -> Result<(), deno_core::error::AnyError> {
self.flush = flush;
match self.mode {
Mode::Deflate | Mode::Gzip | Mode::DeflateRaw => {
@@ -127,7 +130,7 @@ impl ZlibInner {
self.mode = Mode::Inflate;
}
} else if next_expected_header_byte.is_some() {
- return Err(type_error(
+ return Err(deno_core::error::type_error(
"invalid number of gzip magic number bytes read",
));
}
@@ -181,7 +184,7 @@ impl ZlibInner {
Ok(())
}
- fn init_stream(&mut self) -> Result<(), AnyError> {
+ fn init_stream(&mut self) -> Result<(), deno_core::error::AnyError> {
match self.mode {
Mode::Gzip | Mode::Gunzip => self.window_bits += 16,
Mode::Unzip => self.window_bits += 32,
@@ -199,7 +202,7 @@ impl ZlibInner {
Mode::Inflate | Mode::Gunzip | Mode::InflateRaw | Mode::Unzip => {
self.strm.inflate_init(self.window_bits)
}
- Mode::None => return Err(type_error("Unknown mode")),
+ Mode::None => return Err(deno_core::error::type_error("Unknown mode")),
};
self.write_in_progress = false;
@@ -208,7 +211,7 @@ impl ZlibInner {
Ok(())
}
- fn close(&mut self) -> Result<bool, AnyError> {
+ fn close(&mut self) -> Result<bool, deno_core::error::AnyError> {
if self.write_in_progress {
self.pending_close = true;
return Ok(false);
@@ -222,10 +225,8 @@ impl ZlibInner {
Ok(true)
}
- fn reset_stream(&mut self) -> Result<(), AnyError> {
+ fn reset_stream(&mut self) {
self.err = self.strm.reset(self.mode);
-
- Ok(())
}
}
@@ -243,7 +244,7 @@ impl deno_core::Resource for Zlib {
#[op2]
#[cppgc]
-pub fn op_zlib_new(#[smi] mode: i32) -> Result<Zlib, AnyError> {
+pub fn op_zlib_new(#[smi] mode: i32) -> Result<Zlib, mode::ModeError> {
let mode = Mode::try_from(mode)?;
let inner = ZlibInner {
@@ -256,12 +257,20 @@ pub fn op_zlib_new(#[smi] mode: i32) -> Result<Zlib, AnyError> {
})
}
+#[derive(Debug, thiserror::Error)]
+pub enum ZlibError {
+ #[error("zlib not initialized")]
+ NotInitialized,
+ #[error(transparent)]
+ Mode(#[from] mode::ModeError),
+ #[error(transparent)]
+ Other(#[from] deno_core::error::AnyError),
+}
+
#[op2(fast)]
-pub fn op_zlib_close(#[cppgc] resource: &Zlib) -> Result<(), AnyError> {
+pub fn op_zlib_close(#[cppgc] resource: &Zlib) -> Result<(), ZlibError> {
let mut resource = resource.inner.borrow_mut();
- let zlib = resource
- .as_mut()
- .ok_or_else(|| type_error("zlib not initialized"))?;
+ let zlib = resource.as_mut().ok_or(ZlibError::NotInitialized)?;
// If there is a pending write, defer the close until the write is done.
zlib.close()?;
@@ -282,11 +291,9 @@ pub fn op_zlib_write(
#[smi] out_off: u32,
#[smi] out_len: u32,
#[buffer] result: &mut [u32],
-) -> Result<i32, AnyError> {
+) -> Result<i32, ZlibError> {
let mut zlib = resource.inner.borrow_mut();
- let zlib = zlib
- .as_mut()
- .ok_or_else(|| type_error("zlib not initialized"))?;
+ let zlib = zlib.as_mut().ok_or(ZlibError::NotInitialized)?;
let flush = Flush::try_from(flush)?;
zlib.start_write(input, in_off, in_len, out, out_off, out_len, flush)?;
@@ -307,11 +314,9 @@ pub fn op_zlib_init(
#[smi] mem_level: i32,
#[smi] strategy: i32,
#[buffer] dictionary: &[u8],
-) -> Result<i32, AnyError> {
+) -> Result<i32, ZlibError> {
let mut zlib = resource.inner.borrow_mut();
- let zlib = zlib
- .as_mut()
- .ok_or_else(|| type_error("zlib not initialized"))?;
+ let zlib = zlib.as_mut().ok_or(ZlibError::NotInitialized)?;
check((8..=15).contains(&window_bits), "invalid windowBits")?;
check((-1..=9).contains(&level), "invalid level")?;
@@ -348,13 +353,11 @@ pub fn op_zlib_init(
#[op2(fast)]
#[smi]
-pub fn op_zlib_reset(#[cppgc] resource: &Zlib) -> Result<i32, AnyError> {
+pub fn op_zlib_reset(#[cppgc] resource: &Zlib) -> Result<i32, ZlibError> {
let mut zlib = resource.inner.borrow_mut();
- let zlib = zlib
- .as_mut()
- .ok_or_else(|| type_error("zlib not initialized"))?;
+ let zlib = zlib.as_mut().ok_or(ZlibError::NotInitialized)?;
- zlib.reset_stream()?;
+ zlib.reset_stream();
Ok(zlib.err)
}
@@ -362,12 +365,10 @@ pub fn op_zlib_reset(#[cppgc] resource: &Zlib) -> Result<i32, AnyError> {
#[op2(fast)]
pub fn op_zlib_close_if_pending(
#[cppgc] resource: &Zlib,
-) -> Result<(), AnyError> {
+) -> Result<(), ZlibError> {
let pending_close = {
let mut zlib = resource.inner.borrow_mut();
- let zlib = zlib
- .as_mut()
- .ok_or_else(|| type_error("zlib not initialized"))?;
+ let zlib = zlib.as_mut().ok_or(ZlibError::NotInitialized)?;
zlib.write_in_progress = false;
zlib.pending_close
@@ -381,6 +382,15 @@ pub fn op_zlib_close_if_pending(
Ok(())
}
+#[op2(fast)]
+#[smi]
+pub fn op_zlib_crc32(#[buffer] data: &[u8], #[smi] value: u32) -> u32 {
+ // SAFETY: `data` is a valid buffer.
+ unsafe {
+ zlib::crc32(value as c_ulong, data.as_ptr(), data.len() as u32) as u32
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
diff --git a/ext/node/ops/zlib/mode.rs b/ext/node/ops/zlib/mode.rs
index 753300cc4..41565f9b1 100644
--- a/ext/node/ops/zlib/mode.rs
+++ b/ext/node/ops/zlib/mode.rs
@@ -1,19 +1,8 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-#[derive(Debug)]
-pub enum Error {
- BadArgument,
-}
-
-impl std::fmt::Display for Error {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- Error::BadArgument => write!(f, "bad argument"),
- }
- }
-}
-
-impl std::error::Error for Error {}
+#[derive(Debug, thiserror::Error)]
+#[error("bad argument")]
+pub struct ModeError;
macro_rules! repr_i32 {
($(#[$meta:meta])* $vis:vis enum $name:ident {
@@ -25,12 +14,12 @@ macro_rules! repr_i32 {
}
impl core::convert::TryFrom<i32> for $name {
- type Error = Error;
+ type Error = ModeError;
fn try_from(v: i32) -> Result<Self, Self::Error> {
match v {
$(x if x == $name::$vname as i32 => Ok($name::$vname),)*
- _ => Err(Error::BadArgument),
+ _ => Err(ModeError),
}
}
}
diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js
index 5b0980c31..083d4e49b 100644
--- a/ext/node/polyfills/01_require.js
+++ b/ext/node/polyfills/01_require.js
@@ -11,6 +11,7 @@ import {
op_require_can_parse_as_esm,
op_require_init_paths,
op_require_is_deno_dir_package,
+ op_require_is_maybe_cjs,
op_require_is_request_relative,
op_require_node_module_paths,
op_require_package_imports_resolve,
@@ -19,7 +20,6 @@ import {
op_require_path_is_absolute,
op_require_path_resolve,
op_require_proxy_path,
- op_require_read_closest_package_json,
op_require_read_file,
op_require_read_package_scope,
op_require_real_path,
@@ -523,17 +523,13 @@ function resolveExports(
return;
}
- if (!parentPath) {
- return false;
- }
-
return op_require_resolve_exports(
usesLocalNodeModulesDir,
modulesPath,
request,
name,
expansion,
- parentPath,
+ parentPath ?? "",
) ?? false;
}
@@ -1064,23 +1060,22 @@ Module.prototype._compile = function (content, filename, format) {
return result;
};
-Module._extensions[".js"] = function (module, filename) {
- const content = op_require_read_file(filename);
-
- let format;
- if (StringPrototypeEndsWith(filename, ".js")) {
- const pkg = op_require_read_closest_package_json(filename);
- if (pkg?.type === "module") {
- format = "module";
- } else if (pkg?.type === "commonjs") {
- format = "commonjs";
- }
- } else if (StringPrototypeEndsWith(filename, ".cjs")) {
- format = "commonjs";
- }
-
- module._compile(content, filename, format);
-};
+Module._extensions[".js"] =
+ Module._extensions[".ts"] =
+ Module._extensions[".jsx"] =
+ Module._extensions[".tsx"] =
+ function (module, filename) {
+ const content = op_require_read_file(filename);
+ const format = op_require_is_maybe_cjs(filename) ? undefined : "module";
+ module._compile(content, filename, format);
+ };
+
+Module._extensions[".cjs"] =
+ Module._extensions[".cts"] =
+ function (module, filename) {
+ const content = op_require_read_file(filename);
+ module._compile(content, filename, "commonjs");
+ };
function loadESMFromCJS(module, filename, code) {
const namespace = op_import_sync(
@@ -1091,7 +1086,10 @@ function loadESMFromCJS(module, filename, code) {
module.exports = namespace;
}
-Module._extensions[".mjs"] = function (module, filename) {
+Module._extensions[".mjs"] = Module._extensions[".mts"] = function (
+ module,
+ filename,
+) {
loadESMFromCJS(module, filename);
};
@@ -1212,6 +1210,24 @@ function isBuiltin(moduleName) {
!StringPrototypeStartsWith(moduleName, "internal/");
}
+function getBuiltinModule(id) {
+ if (!isBuiltin(id)) {
+ return undefined;
+ }
+
+ if (StringPrototypeStartsWith(id, "node:")) {
+ // Slice 'node:' prefix
+ id = StringPrototypeSlice(id, 5);
+ }
+
+ const mod = loadNativeModule(id, id);
+ if (mod) {
+ return mod.exports;
+ }
+
+ return undefined;
+}
+
Module.isBuiltin = isBuiltin;
Module.createRequire = createRequire;
@@ -1291,6 +1307,8 @@ export function findSourceMap(_path) {
return undefined;
}
+Module.findSourceMap = findSourceMap;
+
/**
* @param {string | URL} _specifier
* @param {string | URL} _parentUrl
@@ -1304,7 +1322,7 @@ export function register(_specifier, _parentUrl, _options) {
return undefined;
}
-export { builtinModules, createRequire, isBuiltin, Module };
+export { builtinModules, createRequire, getBuiltinModule, isBuiltin, Module };
export const _cache = Module._cache;
export const _extensions = Module._extensions;
export const _findPath = Module._findPath;
diff --git a/ext/node/polyfills/_fs/_fs_common.ts b/ext/node/polyfills/_fs/_fs_common.ts
index ac0bf5a55..a29548bb3 100644
--- a/ext/node/polyfills/_fs/_fs_common.ts
+++ b/ext/node/polyfills/_fs/_fs_common.ts
@@ -20,6 +20,7 @@ import {
notImplemented,
TextEncodings,
} from "ext:deno_node/_utils.ts";
+import { type Buffer } from "node:buffer";
export type CallbackWithError = (err: ErrnoException | null) => void;
diff --git a/ext/node/polyfills/_fs/_fs_copy.ts b/ext/node/polyfills/_fs/_fs_copy.ts
index 2f8ddf4fc..0434bff4d 100644
--- a/ext/node/polyfills/_fs/_fs_copy.ts
+++ b/ext/node/polyfills/_fs/_fs_copy.ts
@@ -53,8 +53,9 @@ export function copyFile(
}, (e) => {
if (e instanceof Deno.errors.NotFound) {
Deno.copyFile(srcStr, destStr).then(() => cb(null), cb);
+ } else {
+ cb(e);
}
- cb(e);
});
} else {
Deno.copyFile(srcStr, destStr).then(() => cb(null), cb);
@@ -83,8 +84,9 @@ export function copyFileSync(
} catch (e) {
if (e instanceof Deno.errors.NotFound) {
Deno.copyFileSync(srcStr, destStr);
+ } else {
+ throw e;
}
- throw e;
}
} else {
Deno.copyFileSync(srcStr, destStr);
diff --git a/ext/node/polyfills/_fs/_fs_open.ts b/ext/node/polyfills/_fs/_fs_open.ts
index 8bd989790..31ca4bb61 100644
--- a/ext/node/polyfills/_fs/_fs_open.ts
+++ b/ext/node/polyfills/_fs/_fs_open.ts
@@ -147,8 +147,8 @@ export function open(
export function openPromise(
path: string | Buffer | URL,
- flags?: openFlags = "r",
- mode? = 0o666,
+ flags: openFlags = "r",
+ mode = 0o666,
): Promise<FileHandle> {
return new Promise((resolve, reject) => {
open(path, flags, mode, (err, fd) => {
diff --git a/ext/node/polyfills/_fs/_fs_readFile.ts b/ext/node/polyfills/_fs/_fs_readFile.ts
index 0f05ee167..cf7e0305d 100644
--- a/ext/node/polyfills/_fs/_fs_readFile.ts
+++ b/ext/node/polyfills/_fs/_fs_readFile.ts
@@ -19,6 +19,7 @@ import {
TextEncodings,
} from "ext:deno_node/_utils.ts";
import { FsFile } from "ext:deno_fs/30_fs.js";
+import { denoErrorToNodeError } from "ext:deno_node/internal/errors.ts";
function maybeDecode(data: Uint8Array, encoding: TextEncodings): string;
function maybeDecode(
@@ -87,7 +88,7 @@ export function readFile(
}
const buffer = maybeDecode(data, encoding);
(cb as BinaryCallback)(null, buffer);
- }, (err) => cb && cb(err));
+ }, (err) => cb && cb(denoErrorToNodeError(err)));
}
}
@@ -117,7 +118,12 @@ export function readFileSync(
opt?: FileOptionsArgument,
): string | Buffer {
path = path instanceof URL ? pathFromURL(path) : path;
- const data = Deno.readFileSync(path);
+ let data;
+ try {
+ data = Deno.readFileSync(path);
+ } catch (err) {
+ throw denoErrorToNodeError(err);
+ }
const encoding = getEncoding(opt);
if (encoding && encoding !== "binary") {
const text = maybeDecode(data, encoding);
diff --git a/ext/node/polyfills/_fs/_fs_readlink.ts b/ext/node/polyfills/_fs/_fs_readlink.ts
index 5f2312798..08bea843f 100644
--- a/ext/node/polyfills/_fs/_fs_readlink.ts
+++ b/ext/node/polyfills/_fs/_fs_readlink.ts
@@ -4,13 +4,10 @@
// deno-lint-ignore-file prefer-primordials
import { TextEncoder } from "ext:deno_web/08_text_encoding.js";
-import {
- intoCallbackAPIWithIntercept,
- MaybeEmpty,
- notImplemented,
-} from "ext:deno_node/_utils.ts";
+import { MaybeEmpty, notImplemented } from "ext:deno_node/_utils.ts";
import { pathFromURL } from "ext:deno_web/00_infra.js";
import { promisify } from "ext:deno_node/internal/util.mjs";
+import { denoErrorToNodeError } from "ext:deno_node/internal/errors.ts";
type ReadlinkCallback = (
err: MaybeEmpty<Error>,
@@ -69,12 +66,17 @@ export function readlink(
const encoding = getEncoding(optOrCallback);
- intoCallbackAPIWithIntercept<string, Uint8Array | string>(
- Deno.readLink,
- (data: string): string | Uint8Array => maybeEncode(data, encoding),
- cb,
- path,
- );
+ Deno.readLink(path).then((data: string) => {
+ const res = maybeEncode(data, encoding);
+ if (cb) cb(null, res);
+ }, (err: Error) => {
+ if (cb) {
+ (cb as (e: Error) => void)(denoErrorToNodeError(err, {
+ syscall: "readlink",
+ path,
+ }));
+ }
+ });
}
export const readlinkPromise = promisify(readlink) as (
@@ -88,5 +90,12 @@ export function readlinkSync(
): string | Uint8Array {
path = path instanceof URL ? pathFromURL(path) : path;
- return maybeEncode(Deno.readLinkSync(path), getEncoding(opt));
+ try {
+ return maybeEncode(Deno.readLinkSync(path), getEncoding(opt));
+ } catch (error) {
+ throw denoErrorToNodeError(error, {
+ syscall: "readlink",
+ path,
+ });
+ }
}
diff --git a/ext/node/polyfills/_fs/_fs_readv.ts b/ext/node/polyfills/_fs/_fs_readv.ts
index 384f5e319..2259f029a 100644
--- a/ext/node/polyfills/_fs/_fs_readv.ts
+++ b/ext/node/polyfills/_fs/_fs_readv.ts
@@ -15,6 +15,7 @@ import { maybeCallback } from "ext:deno_node/_fs/_fs_common.ts";
import { validateInteger } from "ext:deno_node/internal/validators.mjs";
import * as io from "ext:deno_io/12_io.js";
import { op_fs_seek_async, op_fs_seek_sync } from "ext:core/ops";
+import process from "node:process";
type Callback = (
err: ErrnoException | null,
diff --git a/ext/node/polyfills/_fs/_fs_stat.ts b/ext/node/polyfills/_fs/_fs_stat.ts
index c4ed82d57..507cb05ea 100644
--- a/ext/node/polyfills/_fs/_fs_stat.ts
+++ b/ext/node/polyfills/_fs/_fs_stat.ts
@@ -290,8 +290,8 @@ export function convertFileInfoToStats(origin: Deno.FileInfo): Stats {
isFIFO: () => false,
isCharacterDevice: () => false,
isSocket: () => false,
- ctime: origin.mtime,
- ctimeMs: origin.mtime?.getTime() || null,
+ ctime: origin.ctime,
+ ctimeMs: origin.ctime?.getTime() || null,
});
return stats;
@@ -336,9 +336,9 @@ export function convertFileInfoToBigIntStats(
isFIFO: () => false,
isCharacterDevice: () => false,
isSocket: () => false,
- ctime: origin.mtime,
- ctimeMs: origin.mtime ? BigInt(origin.mtime.getTime()) : null,
- ctimeNs: origin.mtime ? BigInt(origin.mtime.getTime()) * 1000000n : null,
+ ctime: origin.ctime,
+ ctimeMs: origin.ctime ? BigInt(origin.ctime.getTime()) : null,
+ ctimeNs: origin.ctime ? BigInt(origin.ctime.getTime()) * 1000000n : null,
});
return stats;
}
@@ -383,7 +383,10 @@ export function stat(
Deno.stat(path).then(
(stat) => callback(null, CFISBIS(stat, options.bigint)),
- (err) => callback(denoErrorToNodeError(err, { syscall: "stat" })),
+ (err) =>
+ callback(
+ denoErrorToNodeError(err, { syscall: "stat", path: getPathname(path) }),
+ ),
);
}
@@ -417,9 +420,16 @@ export function statSync(
return;
}
if (err instanceof Error) {
- throw denoErrorToNodeError(err, { syscall: "stat" });
+ throw denoErrorToNodeError(err, {
+ syscall: "stat",
+ path: getPathname(path),
+ });
} else {
throw err;
}
}
}
+
+function getPathname(path: string | URL) {
+ return typeof path === "string" ? path : path.pathname;
+}
diff --git a/ext/node/polyfills/_next_tick.ts b/ext/node/polyfills/_next_tick.ts
index 5ee27728d..af306a29c 100644
--- a/ext/node/polyfills/_next_tick.ts
+++ b/ext/node/polyfills/_next_tick.ts
@@ -62,6 +62,8 @@ export function processTicksAndRejections() {
callback(...args);
}
}
+ } catch (e) {
+ reportError(e);
} finally {
// FIXME(bartlomieju): Deno currently doesn't support async hooks
// if (destroyHooksExist())
@@ -87,8 +89,7 @@ export function runNextTicks() {
// runMicrotasks();
// if (!hasTickScheduled() && !hasRejectionToWarn())
// return;
- if (!core.hasTickScheduled()) {
- core.runMicrotasks();
+ if (queue.isEmpty() || !core.hasTickScheduled()) {
return true;
}
diff --git a/ext/node/polyfills/_process/streams.mjs b/ext/node/polyfills/_process/streams.mjs
index 7936e82aa..3573956c9 100644
--- a/ext/node/polyfills/_process/streams.mjs
+++ b/ext/node/polyfills/_process/streams.mjs
@@ -66,14 +66,19 @@ export function createWritableStdioStream(writer, name, warmup = false) {
// We cannot call `writer?.isTerminal()` eagerly here
let getIsTTY = () => writer?.isTerminal();
+ const getColumns = () =>
+ stream._columns ||
+ (writer?.isTerminal() ? Deno.consoleSize?.().columns : undefined);
ObjectDefineProperties(stream, {
columns: {
__proto__: null,
enumerable: true,
configurable: true,
- get: () =>
- writer?.isTerminal() ? Deno.consoleSize?.().columns : undefined,
+ get: () => getColumns(),
+ set: (value) => {
+ stream._columns = value;
+ },
},
rows: {
__proto__: null,
diff --git a/ext/node/polyfills/_tls_wrap.ts b/ext/node/polyfills/_tls_wrap.ts
index a614b45df..e36fc637e 100644
--- a/ext/node/polyfills/_tls_wrap.ts
+++ b/ext/node/polyfills/_tls_wrap.ts
@@ -68,6 +68,7 @@ export class TLSSocket extends net.Socket {
secureConnecting: boolean;
_SNICallback: any;
servername: string | null;
+ alpnProtocol: string | boolean | null;
alpnProtocols: string[] | null;
authorized: boolean;
authorizationError: any;
@@ -114,6 +115,7 @@ export class TLSSocket extends net.Socket {
this.secureConnecting = true;
this._SNICallback = null;
this.servername = null;
+ this.alpnProtocol = null;
this.alpnProtocols = tlsOptions.ALPNProtocols;
this.authorized = false;
this.authorizationError = null;
@@ -151,10 +153,21 @@ export class TLSSocket extends net.Socket {
handle.afterConnect = async (req: any, status: number) => {
try {
const conn = await Deno.startTls(handle[kStreamBaseField], options);
+ try {
+ const hs = await conn.handshake();
+ if (hs.alpnProtocol) {
+ tlssock.alpnProtocol = hs.alpnProtocol;
+ } else {
+ tlssock.alpnProtocol = false;
+ }
+ } catch {
+ // Don't interrupt "secure" event to let the first read/write
+ // operation emit the error.
+ }
handle[kStreamBaseField] = conn;
tlssock.emit("secure");
tlssock.removeListener("end", onConnectEnd);
- } catch {
+ } catch (_) {
// TODO(kt3k): Handle this
}
return afterConnect.call(handle, req, status);
@@ -269,6 +282,7 @@ export class ServerImpl extends EventEmitter {
// Creates TCP handle and socket directly from Deno.TlsConn.
// This works as TLS socket. We don't use TLSSocket class for doing
// this because Deno.startTls only supports client side tcp connection.
+ // TODO(@satyarohith): set TLSSocket.alpnProtocol when we use TLSSocket class.
const handle = new TCP(TCPConstants.SOCKET, await listener.accept());
const socket = new net.Socket({ handle });
this.emit("secureConnection", socket);
diff --git a/ext/node/polyfills/_utils.ts b/ext/node/polyfills/_utils.ts
index b50c113e1..79d84e00f 100644
--- a/ext/node/polyfills/_utils.ts
+++ b/ext/node/polyfills/_utils.ts
@@ -17,6 +17,7 @@ const {
import { TextDecoder, TextEncoder } from "ext:deno_web/08_text_encoding.js";
import { errorMap } from "ext:deno_node/internal_binding/uv.ts";
import { codes } from "ext:deno_node/internal/error_codes.ts";
+import { ERR_NOT_IMPLEMENTED } from "ext:deno_node/internal/errors.ts";
export type BinaryEncodings = "binary";
@@ -34,8 +35,7 @@ export type TextEncodings =
export type Encodings = BinaryEncodings | TextEncodings;
export function notImplemented(msg: string): never {
- const message = msg ? `Not implemented: ${msg}` : "Not implemented";
- throw new Error(message);
+ throw new ERR_NOT_IMPLEMENTED(msg);
}
export function warnNotImplemented(msg?: string) {
diff --git a/ext/node/polyfills/_zlib.mjs b/ext/node/polyfills/_zlib.mjs
index 851bd602f..07fc440ef 100644
--- a/ext/node/polyfills/_zlib.mjs
+++ b/ext/node/polyfills/_zlib.mjs
@@ -14,6 +14,7 @@ import { nextTick } from "ext:deno_node/_next_tick.ts";
import {
isAnyArrayBuffer,
isArrayBufferView,
+ isUint8Array,
} from "ext:deno_node/internal/util/types.ts";
var kRangeErrorMessage = "Cannot create final Buffer. It would be larger " +
@@ -158,6 +159,12 @@ export const inflateRawSync = function (buffer, opts) {
function sanitizeInput(input) {
if (typeof input === "string") input = Buffer.from(input);
+ if (isArrayBufferView(input) && !isUint8Array(input)) {
+ input = Buffer.from(input.buffer, input.byteOffset, input.byteLength);
+ } else if (isAnyArrayBuffer(input)) {
+ input = Buffer.from(input);
+ }
+
if (
!Buffer.isBuffer(input) &&
(input.buffer && !input.buffer.constructor === ArrayBuffer)
diff --git a/ext/node/polyfills/child_process.ts b/ext/node/polyfills/child_process.ts
index c37dfc410..eda718ff3 100644
--- a/ext/node/polyfills/child_process.ts
+++ b/ext/node/polyfills/child_process.ts
@@ -132,6 +132,8 @@ export function fork(
rm = 2;
}
execArgv.splice(index, rm);
+ } else if (flag.startsWith("--no-warnings")) {
+ execArgv[index] = "--quiet";
} else {
index++;
}
diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts
index f3f6f86ed..9a920adee 100644
--- a/ext/node/polyfills/http.ts
+++ b/ext/node/polyfills/http.ts
@@ -34,6 +34,7 @@ import {
finished,
Readable as NodeReadable,
Writable as NodeWritable,
+ WritableOptions as NodeWritableOptions,
} from "node:stream";
import {
kUniqueHeaders,
@@ -66,12 +67,13 @@ import { headersEntries } from "ext:deno_fetch/20_headers.js";
import { timerId } from "ext:deno_web/03_abort_signal.js";
import { clearTimeout as webClearTimeout } from "ext:deno_web/02_timers.js";
import { resourceForReadableStream } from "ext:deno_web/06_streams.js";
-import { TcpConn } from "ext:deno_net/01_net.js";
+import { UpgradedConn } from "ext:deno_net/01_net.js";
import { STATUS_CODES } from "node:_http_server";
import { methods as METHODS } from "node:_http_common";
+import { deprecate } from "node:util";
const { internalRidSymbol } = core;
-const { ArrayIsArray } = primordials;
+const { ArrayIsArray, StringPrototypeToLowerCase } = primordials;
type Chunk = string | Buffer | Uint8Array;
@@ -516,7 +518,7 @@ class ClientRequest extends OutgoingMessage {
);
assert(typeof res.remoteAddrIp !== "undefined");
assert(typeof res.remoteAddrIp !== "undefined");
- const conn = new TcpConn(
+ const conn = new UpgradedConn(
upgradeRid,
{
transport: "tcp",
@@ -1183,49 +1185,95 @@ function onError(self, error, cb) {
}
}
-export class ServerResponse extends NodeWritable {
- statusCode = 200;
- statusMessage?: string = undefined;
- #headers: Record<string, string | string[]> = { __proto__: null };
- #hasNonStringHeaders: boolean = false;
- #readable: ReadableStream;
- override writable = true;
- // used by `npm:on-finished`
- finished = false;
- headersSent = false;
- #resolve: (value: Response | PromiseLike<Response>) => void;
+export type ServerResponse = {
+ statusCode: number;
+ statusMessage?: string;
+
+ _headers: Record<string, string | string[]>;
+ _hasNonStringHeaders: boolean;
+
+ _readable: ReadableStream;
+ finished: boolean;
+ headersSent: boolean;
+ _resolve: (value: Response | PromiseLike<Response>) => void;
+ // deno-lint-ignore no-explicit-any
+ _socketOverride: any | null;
// deno-lint-ignore no-explicit-any
- #socketOverride: any | null = null;
+ socket: any | null;
- static #enqueue(controller: ReadableStreamDefaultController, chunk: Chunk) {
- try {
- if (typeof chunk === "string") {
- controller.enqueue(ENCODER.encode(chunk));
- } else {
- controller.enqueue(chunk);
- }
- } catch (_) {
- // The stream might have been closed. Ignore the error.
- }
- }
+ setHeader(name: string, value: string | string[]): void;
+ appendHeader(name: string, value: string | string[]): void;
+ getHeader(name: string): string | string[];
+ removeHeader(name: string): void;
+ getHeaderNames(): string[];
+ getHeaders(): Record<string, string | number | string[]>;
+ hasHeader(name: string): boolean;
- /** Returns true if the response body should be null with the given
- * http status code */
- static #bodyShouldBeNull(status: number) {
- return status === 101 || status === 204 || status === 205 || status === 304;
- }
+ writeHead(
+ status: number,
+ statusMessage?: string,
+ headers?:
+ | Record<string, string | number | string[]>
+ | Array<[string, string]>,
+ ): void;
+ writeHead(
+ status: number,
+ headers?:
+ | Record<string, string | number | string[]>
+ | Array<[string, string]>,
+ ): void;
- constructor(
+ _ensureHeaders(singleChunk?: Chunk): void;
+
+ respond(final: boolean, singleChunk?: Chunk): void;
+ // deno-lint-ignore no-explicit-any
+ end(chunk?: any, encoding?: any, cb?: any): void;
+
+ flushHeaders(): void;
+ _implicitHeader(): void;
+
+ // Undocumented field used by `npm:light-my-request`.
+ _header: string;
+
+ assignSocket(socket): void;
+ detachSocket(socket): void;
+} & { -readonly [K in keyof NodeWritable]: NodeWritable[K] };
+
+type ServerResponseStatic = {
+ new (
resolve: (value: Response | PromiseLike<Response>) => void,
socket: FakeSocket,
- ) {
- let controller: ReadableByteStreamController;
- const readable = new ReadableStream({
- start(c) {
- controller = c as ReadableByteStreamController;
- },
- });
- super({
+ ): ServerResponse;
+ _enqueue(controller: ReadableStreamDefaultController, chunk: Chunk): void;
+ _bodyShouldBeNull(statusCode: number): boolean;
+};
+
+export const ServerResponse = function (
+ this: ServerResponse,
+ resolve: (value: Response | PromiseLike<Response>) => void,
+ socket: FakeSocket,
+) {
+ this.statusCode = 200;
+ this.statusMessage = undefined;
+ this._headers = { __proto__: null };
+ this._hasNonStringHeaders = false;
+ this.writable = true;
+
+ // used by `npm:on-finished`
+ this.finished = false;
+ this.headersSent = false;
+ this._socketOverride = null;
+
+ let controller: ReadableByteStreamController;
+ const readable = new ReadableStream({
+ start(c) {
+ controller = c as ReadableByteStreamController;
+ },
+ });
+
+ NodeWritable.call(
+ this,
+ {
autoDestroy: true,
defaultEncoding: "utf-8",
emitClose: true,
@@ -1234,16 +1282,16 @@ export class ServerResponse extends NodeWritable {
write: (chunk, encoding, cb) => {
// Writes chunks are directly written to the socket if
// one is assigned via assignSocket()
- if (this.#socketOverride && this.#socketOverride.writable) {
- this.#socketOverride.write(chunk, encoding);
+ if (this._socketOverride && this._socketOverride.writable) {
+ this._socketOverride.write(chunk, encoding);
return cb();
}
if (!this.headersSent) {
- ServerResponse.#enqueue(controller, chunk);
+ ServerResponse._enqueue(controller, chunk);
this.respond(false);
return cb();
}
- ServerResponse.#enqueue(controller, chunk);
+ ServerResponse._enqueue(controller, chunk);
return cb();
},
final: (cb) => {
@@ -1259,192 +1307,269 @@ export class ServerResponse extends NodeWritable {
}
return cb(null);
},
- });
- this.#readable = readable;
- this.#resolve = resolve;
- this.socket = socket;
+ } satisfies NodeWritableOptions,
+ );
+
+ this._readable = readable;
+ this._resolve = resolve;
+ this.socket = socket;
+
+ this._header = "";
+} as unknown as ServerResponseStatic;
+
+Object.setPrototypeOf(ServerResponse.prototype, NodeWritable.prototype);
+Object.setPrototypeOf(ServerResponse, NodeWritable);
+
+ServerResponse._enqueue = function (
+ this: ServerResponse,
+ controller: ReadableStreamDefaultController,
+ chunk: Chunk,
+) {
+ try {
+ if (typeof chunk === "string") {
+ controller.enqueue(ENCODER.encode(chunk));
+ } else {
+ controller.enqueue(chunk);
+ }
+ } catch (_) {
+ // The stream might have been closed. Ignore the error.
}
+};
- setHeader(name: string, value: string | string[]) {
- if (Array.isArray(value)) {
- this.#hasNonStringHeaders = true;
- }
- this.#headers[name] = value;
- return this;
+/** Returns true if the response body should be null with the given
+ * http status code */
+ServerResponse._bodyShouldBeNull = function (
+ this: ServerResponse,
+ status: number,
+) {
+ return status === 101 || status === 204 || status === 205 || status === 304;
+};
+
+ServerResponse.prototype.setHeader = function (
+ this: ServerResponse,
+ name: string,
+ value: string | string[],
+) {
+ if (Array.isArray(value)) {
+ this._hasNonStringHeaders = true;
}
+ this._headers[StringPrototypeToLowerCase(name)] = value;
+ return this;
+};
- appendHeader(name: string, value: string | string[]) {
- if (this.#headers[name] === undefined) {
- if (Array.isArray(value)) this.#hasNonStringHeaders = true;
- this.#headers[name] = value;
+ServerResponse.prototype.appendHeader = function (
+ this: ServerResponse,
+ name: string,
+ value: string | string[],
+) {
+ const key = StringPrototypeToLowerCase(name);
+ if (this._headers[key] === undefined) {
+ if (Array.isArray(value)) this._hasNonStringHeaders = true;
+ this._headers[key] = value;
+ } else {
+ this._hasNonStringHeaders = true;
+ if (!Array.isArray(this._headers[key])) {
+ this._headers[key] = [this._headers[key]];
+ }
+ const header = this._headers[key];
+ if (Array.isArray(value)) {
+ header.push(...value);
} else {
- this.#hasNonStringHeaders = true;
- if (!Array.isArray(this.#headers[name])) {
- this.#headers[name] = [this.#headers[name]];
- }
- const header = this.#headers[name];
- if (Array.isArray(value)) {
- header.push(...value);
- } else {
- header.push(value);
- }
+ header.push(value);
}
- return this;
}
+ return this;
+};
- getHeader(name: string) {
- return this.#headers[name];
- }
- removeHeader(name: string) {
- delete this.#headers[name];
- }
- getHeaderNames() {
- return Object.keys(this.#headers);
- }
- getHeaders(): Record<string, string | number | string[]> {
- // @ts-ignore Ignore null __proto__
- return { __proto__: null, ...this.#headers };
- }
- hasHeader(name: string) {
- return Object.hasOwn(this.#headers, name);
- }
+ServerResponse.prototype.getHeader = function (
+ this: ServerResponse,
+ name: string,
+) {
+ return this._headers[StringPrototypeToLowerCase(name)];
+};
- writeHead(
- status: number,
- statusMessage?: string,
- headers?:
- | Record<string, string | number | string[]>
- | Array<[string, string]>,
- ): this;
- writeHead(
- status: number,
- headers?:
- | Record<string, string | number | string[]>
- | Array<[string, string]>,
- ): this;
- writeHead(
- status: number,
- statusMessageOrHeaders?:
- | string
- | Record<string, string | number | string[]>
- | Array<[string, string]>,
- maybeHeaders?:
- | Record<string, string | number | string[]>
- | Array<[string, string]>,
- ): this {
- this.statusCode = status;
-
- let headers = null;
- if (typeof statusMessageOrHeaders === "string") {
- this.statusMessage = statusMessageOrHeaders;
- if (maybeHeaders !== undefined) {
- headers = maybeHeaders;
- }
- } else if (statusMessageOrHeaders !== undefined) {
- headers = statusMessageOrHeaders;
- }
+ServerResponse.prototype.removeHeader = function (
+ this: ServerResponse,
+ name: string,
+) {
+ delete this._headers[StringPrototypeToLowerCase(name)];
+};
- if (headers !== null) {
- if (ArrayIsArray(headers)) {
- headers = headers as Array<[string, string]>;
- for (let i = 0; i < headers.length; i++) {
- this.appendHeader(headers[i][0], headers[i][1]);
- }
- } else {
- headers = headers as Record<string, string>;
- for (const k in headers) {
- if (Object.hasOwn(headers, k)) {
- this.setHeader(k, headers[k]);
- }
+ServerResponse.prototype.getHeaderNames = function (this: ServerResponse) {
+ return Object.keys(this._headers);
+};
+
+ServerResponse.prototype.getHeaders = function (
+ this: ServerResponse,
+): Record<string, string | number | string[]> {
+ return { __proto__: null, ...this._headers };
+};
+
+ServerResponse.prototype.hasHeader = function (
+ this: ServerResponse,
+ name: string,
+) {
+ return Object.hasOwn(this._headers, name);
+};
+
+ServerResponse.prototype.writeHead = function (
+ this: ServerResponse,
+ status: number,
+ statusMessageOrHeaders?:
+ | string
+ | Record<string, string | number | string[]>
+ | Array<[string, string]>,
+ maybeHeaders?:
+ | Record<string, string | number | string[]>
+ | Array<[string, string]>,
+) {
+ this.statusCode = status;
+
+ let headers = null;
+ if (typeof statusMessageOrHeaders === "string") {
+ this.statusMessage = statusMessageOrHeaders;
+ if (maybeHeaders !== undefined) {
+ headers = maybeHeaders;
+ }
+ } else if (statusMessageOrHeaders !== undefined) {
+ headers = statusMessageOrHeaders;
+ }
+
+ if (headers !== null) {
+ if (ArrayIsArray(headers)) {
+ headers = headers as Array<[string, string]>;
+ for (let i = 0; i < headers.length; i++) {
+ this.appendHeader(headers[i][0], headers[i][1]);
+ }
+ } else {
+ headers = headers as Record<string, string>;
+ for (const k in headers) {
+ if (Object.hasOwn(headers, k)) {
+ this.setHeader(k, headers[k]);
}
}
}
+ }
- return this;
+ return this;
+};
+
+ServerResponse.prototype._ensureHeaders = function (
+ this: ServerResponse,
+ singleChunk?: Chunk,
+) {
+ if (this.statusCode === 200 && this.statusMessage === undefined) {
+ this.statusMessage = "OK";
}
+ if (typeof singleChunk === "string" && !this.hasHeader("content-type")) {
+ this.setHeader("content-type", "text/plain;charset=UTF-8");
+ }
+};
- #ensureHeaders(singleChunk?: Chunk) {
- if (this.statusCode === 200 && this.statusMessage === undefined) {
- this.statusMessage = "OK";
- }
- if (
- typeof singleChunk === "string" &&
- !this.hasHeader("content-type")
- ) {
- this.setHeader("content-type", "text/plain;charset=UTF-8");
- }
- }
-
- respond(final: boolean, singleChunk?: Chunk) {
- this.headersSent = true;
- this.#ensureHeaders(singleChunk);
- let body = singleChunk ?? (final ? null : this.#readable);
- if (ServerResponse.#bodyShouldBeNull(this.statusCode)) {
- body = null;
- }
- let headers: Record<string, string> | [string, string][] = this
- .#headers as Record<string, string>;
- if (this.#hasNonStringHeaders) {
- headers = [];
- // Guard is not needed as this is a null prototype object.
- // deno-lint-ignore guard-for-in
- for (const key in this.#headers) {
- const entry = this.#headers[key];
- if (Array.isArray(entry)) {
- for (const value of entry) {
- headers.push([key, value]);
- }
- } else {
- headers.push([key, entry]);
+ServerResponse.prototype.respond = function (
+ this: ServerResponse,
+ final: boolean,
+ singleChunk?: Chunk,
+) {
+ this.headersSent = true;
+ this._ensureHeaders(singleChunk);
+ let body = singleChunk ?? (final ? null : this._readable);
+ if (ServerResponse._bodyShouldBeNull(this.statusCode)) {
+ body = null;
+ }
+ let headers: Record<string, string> | [string, string][] = this
+ ._headers as Record<string, string>;
+ if (this._hasNonStringHeaders) {
+ headers = [];
+ // Guard is not needed as this is a null prototype object.
+ // deno-lint-ignore guard-for-in
+ for (const key in this._headers) {
+ const entry = this._headers[key];
+ if (Array.isArray(entry)) {
+ for (const value of entry) {
+ headers.push([key, value]);
}
+ } else {
+ headers.push([key, entry]);
}
}
- this.#resolve(
- new Response(body, {
- headers,
- status: this.statusCode,
- statusText: this.statusMessage,
- }),
- );
}
+ this._resolve(
+ new Response(body, {
+ headers,
+ status: this.statusCode,
+ statusText: this.statusMessage,
+ }),
+ );
+};
+ServerResponse.prototype.end = function (
+ this: ServerResponse,
// deno-lint-ignore no-explicit-any
- override end(chunk?: any, encoding?: any, cb?: any): this {
- this.finished = true;
- if (!chunk && "transfer-encoding" in this.#headers) {
- // FIXME(bnoordhuis) Node sends a zero length chunked body instead, i.e.,
- // the trailing "0\r\n", but respondWith() just hangs when I try that.
- this.#headers["content-length"] = "0";
- delete this.#headers["transfer-encoding"];
- }
+ chunk?: any,
+ // deno-lint-ignore no-explicit-any
+ encoding?: any,
+ // deno-lint-ignore no-explicit-any
+ cb?: any,
+) {
+ this.finished = true;
+ if (!chunk && "transfer-encoding" in this._headers) {
+ // FIXME(bnoordhuis) Node sends a zero length chunked body instead, i.e.,
+ // the trailing "0\r\n", but respondWith() just hangs when I try that.
+ this._headers["content-length"] = "0";
+ delete this._headers["transfer-encoding"];
+ }
+
+ // @ts-expect-error The signature for cb is stricter than the one implemented here
+ NodeWritable.prototype.end.call(this, chunk, encoding, cb);
+};
- // @ts-expect-error The signature for cb is stricter than the one implemented here
- return super.end(chunk, encoding, cb);
- }
+ServerResponse.prototype.flushHeaders = function (this: ServerResponse) {
+ // no-op
+};
- flushHeaders() {
- // no-op
- }
+// Undocumented API used by `npm:compression`.
+ServerResponse.prototype._implicitHeader = function (this: ServerResponse) {
+ this.writeHead(this.statusCode);
+};
- // Undocumented API used by `npm:compression`.
- _implicitHeader() {
- this.writeHead(this.statusCode);
+ServerResponse.prototype.assignSocket = function (
+ this: ServerResponse,
+ socket,
+) {
+ if (socket._httpMessage) {
+ throw new ERR_HTTP_SOCKET_ASSIGNED();
}
+ socket._httpMessage = this;
+ this._socketOverride = socket;
+};
- assignSocket(socket) {
- if (socket._httpMessage) {
- throw new ERR_HTTP_SOCKET_ASSIGNED();
- }
- socket._httpMessage = this;
- this.#socketOverride = socket;
- }
+ServerResponse.prototype.detachSocket = function (
+ this: ServerResponse,
+ socket,
+) {
+ assert(socket._httpMessage === this);
+ socket._httpMessage = null;
+ this._socketOverride = null;
+};
- detachSocket(socket) {
- assert(socket._httpMessage === this);
- socket._httpMessage = null;
- this.#socketOverride = null;
- }
-}
+Object.defineProperty(ServerResponse.prototype, "connection", {
+ get: deprecate(
+ function (this: ServerResponse) {
+ return this._socketOverride;
+ },
+ "ServerResponse.prototype.connection is deprecated",
+ "DEP0066",
+ ),
+ set: deprecate(
+ // deno-lint-ignore no-explicit-any
+ function (this: ServerResponse, socket: any) {
+ this._socketOverride = socket;
+ },
+ "ServerResponse.prototype.connection is deprecated",
+ "DEP0066",
+ ),
+});
// TODO(@AaronO): optimize
export class IncomingMessageForServer extends NodeReadable {
@@ -1677,6 +1802,8 @@ export class ServerImpl extends EventEmitter {
this.#server.ref();
}
this.#unref = false;
+
+ return this;
}
unref() {
@@ -1684,6 +1811,8 @@ export class ServerImpl extends EventEmitter {
this.#server.unref();
}
this.#unref = true;
+
+ return this;
}
close(cb?: (err?: Error) => void): this {
diff --git a/ext/node/polyfills/http2.ts b/ext/node/polyfills/http2.ts
index a9ced2bd9..dc2379aeb 100644
--- a/ext/node/polyfills/http2.ts
+++ b/ext/node/polyfills/http2.ts
@@ -882,6 +882,7 @@ export class ClientHttp2Stream extends Duplex {
trailersReady: false,
endAfterHeaders: false,
shutdownWritableCalled: false,
+ serverEndedCall: false,
};
this[kDenoResponse] = undefined;
this[kDenoRid] = undefined;
@@ -1109,7 +1110,9 @@ export class ClientHttp2Stream extends Duplex {
}
debugHttp2(">>> chunk", chunk, finished, this[kDenoResponse].bodyRid);
- if (chunk === null) {
+ if (finished || chunk === null) {
+ this[kState].serverEndedCall = true;
+
const trailerList = await op_http2_client_get_response_trailers(
this[kDenoResponse].bodyRid,
);
@@ -1237,7 +1240,9 @@ export class ClientHttp2Stream extends Duplex {
this[kSession] = undefined;
session[kMaybeDestroy]();
- callback(err);
+ if (callback) {
+ callback(err);
+ }
}
[kMaybeDestroy](code = constants.NGHTTP2_NO_ERROR) {
@@ -1280,6 +1285,9 @@ function shutdownWritable(stream, callback, streamRid) {
if (state.flags & STREAM_FLAGS_HAS_TRAILERS) {
onStreamTrailers(stream);
callback();
+ } else if (state.serverEndedCall) {
+ debugHttp2(">>> stream finished");
+ callback();
} else {
op_http2_client_send_data(streamRid, new Uint8Array(), true)
.then(() => {
diff --git a/ext/node/polyfills/inspector.js b/ext/node/polyfills/inspector.js
new file mode 100644
index 000000000..7eb15ce91
--- /dev/null
+++ b/ext/node/polyfills/inspector.js
@@ -0,0 +1,210 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+// Copyright Joyent and Node contributors. All rights reserved. MIT license.
+
+import process from "node:process";
+import { EventEmitter } from "node:events";
+import { primordials } from "ext:core/mod.js";
+import {
+ op_get_extras_binding_object,
+ op_inspector_close,
+ op_inspector_connect,
+ op_inspector_disconnect,
+ op_inspector_dispatch,
+ op_inspector_emit_protocol_event,
+ op_inspector_enabled,
+ op_inspector_open,
+ op_inspector_url,
+ op_inspector_wait,
+} from "ext:core/ops";
+import {
+ isUint32,
+ validateFunction,
+ validateInt32,
+ validateObject,
+ validateString,
+} from "ext:deno_node/internal/validators.mjs";
+import {
+ ERR_INSPECTOR_ALREADY_ACTIVATED,
+ ERR_INSPECTOR_ALREADY_CONNECTED,
+ ERR_INSPECTOR_CLOSED,
+ ERR_INSPECTOR_COMMAND,
+ ERR_INSPECTOR_NOT_ACTIVE,
+ ERR_INSPECTOR_NOT_CONNECTED,
+ ERR_INSPECTOR_NOT_WORKER,
+} from "ext:deno_node/internal/errors.ts";
+
+const {
+ SymbolDispose,
+ JSONParse,
+ JSONStringify,
+ SafeMap,
+} = primordials;
+
+class Session extends EventEmitter {
+ #connection = null;
+ #nextId = 1;
+ #messageCallbacks = new SafeMap();
+
+ connect() {
+ if (this.#connection) {
+ throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session");
+ }
+ this.#connection = op_inspector_connect(false, (m) => this.#onMessage(m));
+ }
+
+ connectToMainThread() {
+ if (isMainThread) {
+ throw new ERR_INSPECTOR_NOT_WORKER();
+ }
+ if (this.#connection) {
+ throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session");
+ }
+ this.#connection = op_inspector_connect(true, (m) => this.#onMessage(m));
+ }
+
+ #onMessage(message) {
+ const parsed = JSONParse(message);
+ try {
+ if (parsed.id) {
+ const callback = this.#messageCallbacks.get(parsed.id);
+ this.#messageCallbacks.delete(parsed.id);
+ if (callback) {
+ if (parsed.error) {
+ return callback(
+ new ERR_INSPECTOR_COMMAND(
+ parsed.error.code,
+ parsed.error.message,
+ ),
+ );
+ }
+
+ callback(null, parsed.result);
+ }
+ } else {
+ this.emit(parsed.method, parsed);
+ this.emit("inspectorNotification", parsed);
+ }
+ } catch (error) {
+ process.emitWarning(error);
+ }
+ }
+
+ post(method, params, callback) {
+ validateString(method, "method");
+ if (!callback && typeof params === "function") {
+ callback = params;
+ params = null;
+ }
+ if (params) {
+ validateObject(params, "params");
+ }
+ if (callback) {
+ validateFunction(callback, "callback");
+ }
+
+ if (!this.#connection) {
+ throw new ERR_INSPECTOR_NOT_CONNECTED();
+ }
+ const id = this.#nextId++;
+ const message = { id, method };
+ if (params) {
+ message.params = params;
+ }
+ if (callback) {
+ this.#messageCallbacks.set(id, callback);
+ }
+ op_inspector_dispatch(this.#connection, JSONStringify(message));
+ }
+
+ disconnect() {
+ if (!this.#connection) {
+ return;
+ }
+ op_inspector_disconnect(this.#connection);
+ this.#connection = null;
+ // deno-lint-ignore prefer-primordials
+ for (const callback of this.#messageCallbacks.values()) {
+ process.nextTick(callback, new ERR_INSPECTOR_CLOSED());
+ }
+ this.#messageCallbacks.clear();
+ this.#nextId = 1;
+ }
+}
+
+function open(port, host, wait) {
+ if (op_inspector_enabled()) {
+ throw new ERR_INSPECTOR_ALREADY_ACTIVATED();
+ }
+ // inspectorOpen() currently does not typecheck its arguments and adding
+ // such checks would be a potentially breaking change. However, the native
+ // open() function requires the port to fit into a 16-bit unsigned integer,
+ // causing an integer overflow otherwise, so we at least need to prevent that.
+ if (isUint32(port)) {
+ validateInt32(port, "port", 0, 65535);
+ } else {
+ // equiv of handling args[0]->IsUint32()
+ port = undefined;
+ }
+ if (typeof host !== "string") {
+ // equiv of handling args[1]->IsString()
+ host = undefined;
+ }
+ op_inspector_open(port, host);
+ if (wait) {
+ op_inspector_wait();
+ }
+
+ return {
+ __proto__: null,
+ [SymbolDispose]() {
+ _debugEnd();
+ },
+ };
+}
+
+function close() {
+ op_inspector_close();
+}
+
+function url() {
+ return op_inspector_url();
+}
+
+function waitForDebugger() {
+ if (!op_inspector_wait()) {
+ throw new ERR_INSPECTOR_NOT_ACTIVE();
+ }
+}
+
+function broadcastToFrontend(eventName, params) {
+ validateString(eventName, "eventName");
+ if (params) {
+ validateObject(params, "params");
+ }
+ op_inspector_emit_protocol_event(eventName, JSONStringify(params ?? {}));
+}
+
+const Network = {
+ requestWillBeSent: (params) =>
+ broadcastToFrontend("Network.requestWillBeSent", params),
+ responseReceived: (params) =>
+ broadcastToFrontend("Network.responseReceived", params),
+ loadingFinished: (params) =>
+ broadcastToFrontend("Network.loadingFinished", params),
+ loadingFailed: (params) =>
+ broadcastToFrontend("Network.loadingFailed", params),
+};
+
+const console = op_get_extras_binding_object().console;
+
+export { close, console, Network, open, Session, url, waitForDebugger };
+
+export default {
+ open,
+ close,
+ url,
+ waitForDebugger,
+ console,
+ Session,
+ Network,
+};
diff --git a/ext/node/polyfills/inspector.ts b/ext/node/polyfills/inspector.ts
deleted file mode 100644
index 9de86ab14..000000000
--- a/ext/node/polyfills/inspector.ts
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-// Copyright Joyent and Node contributors. All rights reserved. MIT license.
-
-import { EventEmitter } from "node:events";
-import { notImplemented } from "ext:deno_node/_utils.ts";
-import { primordials } from "ext:core/mod.js";
-
-const {
- SafeMap,
-} = primordials;
-
-class Session extends EventEmitter {
- #connection = null;
- #nextId = 1;
- #messageCallbacks = new SafeMap();
-
- /** Connects the session to the inspector back-end. */
- connect() {
- notImplemented("inspector.Session.prototype.connect");
- }
-
- /** Connects the session to the main thread
- * inspector back-end. */
- connectToMainThread() {
- notImplemented("inspector.Session.prototype.connectToMainThread");
- }
-
- /** Posts a message to the inspector back-end. */
- post(
- _method: string,
- _params?: Record<string, unknown>,
- _callback?: (...args: unknown[]) => void,
- ) {
- notImplemented("inspector.Session.prototype.post");
- }
-
- /** Immediately closes the session, all pending
- * message callbacks will be called with an
- * error.
- */
- disconnect() {
- notImplemented("inspector.Session.prototype.disconnect");
- }
-}
-
-/** Activates inspector on host and port.
- * See https://nodejs.org/api/inspector.html#inspectoropenport-host-wait */
-function open(_port?: number, _host?: string, _wait?: boolean) {
- notImplemented("inspector.Session.prototype.open");
-}
-
-/** Deactivate the inspector. Blocks until there are no active connections.
- * See https://nodejs.org/api/inspector.html#inspectorclose */
-function close() {
- notImplemented("inspector.Session.prototype.close");
-}
-
-/** Return the URL of the active inspector, or undefined if there is none.
- * See https://nodejs.org/api/inspector.html#inspectorurl */
-function url() {
- // TODO(kt3k): returns undefined for now, which means the inspector is not activated.
- return undefined;
-}
-
-/** Blocks until a client (existing or connected later) has sent Runtime.runIfWaitingForDebugger command.
- * See https://nodejs.org/api/inspector.html#inspectorwaitfordebugger */
-function waitForDebugger() {
- notImplemented("inspector.wairForDebugger");
-}
-
-const console = globalThis.console;
-
-export { close, console, open, Session, url, waitForDebugger };
-
-export default {
- close,
- console,
- open,
- Session,
- url,
- waitForDebugger,
-};
diff --git a/ext/node/polyfills/inspector/promises.js b/ext/node/polyfills/inspector/promises.js
new file mode 100644
index 000000000..3483e53f5
--- /dev/null
+++ b/ext/node/polyfills/inspector/promises.js
@@ -0,0 +1,20 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+// Copyright Joyent and Node contributors. All rights reserved. MIT license.
+
+import inspector from "node:inspector";
+import { promisify } from "ext:deno_node/internal/util.mjs";
+
+class Session extends inspector.Session {
+ constructor() {
+ super();
+ }
+}
+Session.prototype.post = promisify(inspector.Session.prototype.post);
+
+export * from "node:inspector";
+export { Session };
+
+export default {
+ ...inspector,
+ Session,
+};
diff --git a/ext/node/polyfills/internal/buffer.mjs b/ext/node/polyfills/internal/buffer.mjs
index 6687f7394..dd549221f 100644
--- a/ext/node/polyfills/internal/buffer.mjs
+++ b/ext/node/polyfills/internal/buffer.mjs
@@ -2,10 +2,59 @@
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// Copyright Feross Aboukhadijeh, and other contributors. All rights reserved. MIT license.
-// TODO(petamoriken): enable prefer-primordials for node polyfills
-// deno-lint-ignore-file prefer-primordials
-
-import { core } from "ext:core/mod.js";
+import { core, primordials } from "ext:core/mod.js";
+const {
+ isAnyArrayBuffer,
+ isArrayBuffer,
+ isDataView,
+ isSharedArrayBuffer,
+ isTypedArray,
+} = core;
+const {
+ ArrayBufferPrototypeGetByteLength,
+ ArrayBufferPrototypeGetDetached,
+ ArrayIsArray,
+ ArrayPrototypeSlice,
+ BigInt,
+ DataViewPrototypeGetByteLength,
+ Float32Array,
+ Float64Array,
+ MathFloor,
+ MathMin,
+ Number,
+ NumberIsInteger,
+ NumberIsNaN,
+ NumberMAX_SAFE_INTEGER,
+ NumberMIN_SAFE_INTEGER,
+ NumberPrototypeToString,
+ ObjectCreate,
+ ObjectDefineProperty,
+ ObjectPrototypeIsPrototypeOf,
+ ObjectSetPrototypeOf,
+ RangeError,
+ SafeRegExp,
+ String,
+ StringFromCharCode,
+ StringPrototypeCharCodeAt,
+ StringPrototypeIncludes,
+ StringPrototypeReplace,
+ StringPrototypeToLowerCase,
+ StringPrototypeTrim,
+ SymbolFor,
+ SymbolToPrimitive,
+ TypeError,
+ TypeErrorPrototype,
+ TypedArrayPrototypeCopyWithin,
+ TypedArrayPrototypeFill,
+ TypedArrayPrototypeGetBuffer,
+ TypedArrayPrototypeGetByteLength,
+ TypedArrayPrototypeGetByteOffset,
+ TypedArrayPrototypeSet,
+ TypedArrayPrototypeSlice,
+ TypedArrayPrototypeSubarray,
+ Uint8Array,
+ Uint8ArrayPrototype,
+} = primordials;
import { op_is_ascii, op_is_utf8, op_transcode } from "ext:core/ops";
import { TextDecoder, TextEncoder } from "ext:deno_web/08_text_encoding.js";
@@ -24,11 +73,6 @@ import {
hexToBytes,
utf16leToBytes,
} from "ext:deno_node/internal_binding/_utils.ts";
-import {
- isAnyArrayBuffer,
- isArrayBufferView,
- isTypedArray,
-} from "ext:deno_node/internal/util/types.ts";
import { normalizeEncoding } from "ext:deno_node/internal/util.mjs";
import { validateBuffer } from "ext:deno_node/internal/validators.mjs";
import { isUint8Array } from "ext:deno_node/internal/util/types.ts";
@@ -50,9 +94,13 @@ const utf8Encoder = new TextEncoder();
// Temporary buffers to convert numbers.
const float32Array = new Float32Array(1);
-const uInt8Float32Array = new Uint8Array(float32Array.buffer);
+const uInt8Float32Array = new Uint8Array(
+ TypedArrayPrototypeGetBuffer(float32Array),
+);
const float64Array = new Float64Array(1);
-const uInt8Float64Array = new Uint8Array(float64Array.buffer);
+const uInt8Float64Array = new Uint8Array(
+ TypedArrayPrototypeGetBuffer(float64Array),
+);
// Check endianness.
float32Array[0] = -1; // 0xBF800000
@@ -64,10 +112,7 @@ export const kMaxLength = 2147483647;
export const kStringMaxLength = 536870888;
const MAX_UINT32 = 2 ** 32;
-const customInspectSymbol =
- typeof Symbol === "function" && typeof Symbol["for"] === "function"
- ? Symbol["for"]("nodejs.util.inspect.custom")
- : null;
+const customInspectSymbol = SymbolFor("nodejs.util.inspect.custom");
export const INSPECT_MAX_BYTES = 50;
@@ -76,23 +121,25 @@ export const constants = {
MAX_STRING_LENGTH: kStringMaxLength,
};
-Object.defineProperty(Buffer.prototype, "parent", {
+ObjectDefineProperty(Buffer.prototype, "parent", {
+ __proto__: null,
enumerable: true,
get: function () {
- if (!Buffer.isBuffer(this)) {
+ if (!BufferIsBuffer(this)) {
return void 0;
}
- return this.buffer;
+ return TypedArrayPrototypeGetBuffer(this);
},
});
-Object.defineProperty(Buffer.prototype, "offset", {
+ObjectDefineProperty(Buffer.prototype, "offset", {
+ __proto__: null,
enumerable: true,
get: function () {
- if (!Buffer.isBuffer(this)) {
+ if (!BufferIsBuffer(this)) {
return void 0;
}
- return this.byteOffset;
+ return TypedArrayPrototypeGetByteOffset(this);
},
});
@@ -103,10 +150,21 @@ function createBuffer(length) {
);
}
const buf = new Uint8Array(length);
- Object.setPrototypeOf(buf, Buffer.prototype);
+ ObjectSetPrototypeOf(buf, BufferPrototype);
return buf;
}
+/**
+ * @param {ArrayBufferLike} O
+ * @returns {boolean}
+ */
+function isDetachedBuffer(O) {
+ if (isSharedArrayBuffer(O)) {
+ return false;
+ }
+ return ArrayBufferPrototypeGetDetached(O);
+}
+
export function Buffer(arg, encodingOrOffset, length) {
if (typeof arg === "number") {
if (typeof encodingOrOffset === "string") {
@@ -133,6 +191,7 @@ function _from(value, encodingOrOffset, length) {
return fromArrayBuffer(value, encodingOrOffset, length);
}
+ // deno-lint-ignore prefer-primordials
const valueOf = value.valueOf && value.valueOf();
if (
valueOf != null &&
@@ -147,8 +206,8 @@ function _from(value, encodingOrOffset, length) {
return b;
}
- if (typeof value[Symbol.toPrimitive] === "function") {
- const primitive = value[Symbol.toPrimitive]("string");
+ if (typeof value[SymbolToPrimitive] === "function") {
+ const primitive = value[SymbolToPrimitive]("string");
if (typeof primitive === "string") {
return fromString(primitive, encodingOrOffset);
}
@@ -162,13 +221,19 @@ function _from(value, encodingOrOffset, length) {
);
}
-Buffer.from = function from(value, encodingOrOffset, length) {
+const BufferFrom = Buffer.from = function from(
+ value,
+ encodingOrOffset,
+ length,
+) {
return _from(value, encodingOrOffset, length);
};
-Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype);
+const BufferPrototype = Buffer.prototype;
+
+ObjectSetPrototypeOf(Buffer.prototype, Uint8ArrayPrototype);
-Object.setPrototypeOf(Buffer, Uint8Array);
+ObjectSetPrototypeOf(Buffer, Uint8Array);
function assertSize(size) {
validateNumber(size, "size", 0, kMaxLength);
@@ -186,6 +251,7 @@ function _alloc(size, fill, encoding) {
encoding,
);
}
+ // deno-lint-ignore prefer-primordials
return buffer.fill(fill, encoding);
}
return buffer;
@@ -212,13 +278,14 @@ function fromString(string, encoding) {
if (typeof encoding !== "string" || encoding === "") {
encoding = "utf8";
}
- if (!Buffer.isEncoding(encoding)) {
+ if (!BufferIsEncoding(encoding)) {
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
}
const length = byteLength(string, encoding) | 0;
let buf = createBuffer(length);
const actual = buf.write(string, encoding);
if (actual !== length) {
+ // deno-lint-ignore prefer-primordials
buf = buf.slice(0, actual);
}
return buf;
@@ -226,11 +293,12 @@ function fromString(string, encoding) {
function fromArrayLike(obj) {
const buf = new Uint8Array(obj);
- Object.setPrototypeOf(buf, Buffer.prototype);
+ ObjectSetPrototypeOf(buf, BufferPrototype);
return buf;
}
function fromObject(obj) {
+ // deno-lint-ignore prefer-primordials
if (obj.length !== undefined || isAnyArrayBuffer(obj.buffer)) {
if (typeof obj.length !== "number") {
return createBuffer(0);
@@ -239,7 +307,7 @@ function fromObject(obj) {
return fromArrayLike(obj);
}
- if (obj.type === "Buffer" && Array.isArray(obj.data)) {
+ if (obj.type === "Buffer" && ArrayIsArray(obj.data)) {
return fromArrayLike(obj.data);
}
}
@@ -248,7 +316,7 @@ function checked(length) {
if (length >= kMaxLength) {
throw new RangeError(
"Attempt to allocate Buffer larger than maximum size: 0x" +
- kMaxLength.toString(16) + " bytes",
+ NumberPrototypeToString(kMaxLength, 16) + " bytes",
);
}
return length | 0;
@@ -256,25 +324,33 @@ function checked(length) {
export function SlowBuffer(length) {
assertSize(length);
- return Buffer.alloc(+length);
+ return _alloc(+length);
}
-Object.setPrototypeOf(SlowBuffer.prototype, Uint8Array.prototype);
+ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype);
-Object.setPrototypeOf(SlowBuffer, Uint8Array);
+ObjectSetPrototypeOf(SlowBuffer, Uint8Array);
-Buffer.isBuffer = function isBuffer(b) {
- return b != null && b._isBuffer === true && b !== Buffer.prototype;
+const BufferIsBuffer = Buffer.isBuffer = function isBuffer(b) {
+ return b != null && b._isBuffer === true && b !== BufferPrototype;
};
-Buffer.compare = function compare(a, b) {
- if (isInstance(a, Uint8Array)) {
- a = Buffer.from(a, a.offset, a.byteLength);
+const BufferCompare = Buffer.compare = function compare(a, b) {
+ if (isUint8Array(a)) {
+ a = BufferFrom(
+ a,
+ TypedArrayPrototypeGetByteOffset(a),
+ TypedArrayPrototypeGetByteLength(a),
+ );
}
- if (isInstance(b, Uint8Array)) {
- b = Buffer.from(b, b.offset, b.byteLength);
+ if (isUint8Array(b)) {
+ b = BufferFrom(
+ b,
+ TypedArrayPrototypeGetByteOffset(b),
+ TypedArrayPrototypeGetByteLength(b),
+ );
}
- if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
+ if (!BufferIsBuffer(a) || !BufferIsBuffer(b)) {
throw new TypeError(
'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array',
);
@@ -284,7 +360,7 @@ Buffer.compare = function compare(a, b) {
}
let x = a.length;
let y = b.length;
- for (let i = 0, len = Math.min(x, y); i < len; ++i) {
+ for (let i = 0, len = MathMin(x, y); i < len; ++i) {
if (a[i] !== b[i]) {
x = a[i];
y = b[i];
@@ -300,18 +376,18 @@ Buffer.compare = function compare(a, b) {
return 0;
};
-Buffer.isEncoding = function isEncoding(encoding) {
+const BufferIsEncoding = Buffer.isEncoding = function isEncoding(encoding) {
return typeof encoding === "string" && encoding.length !== 0 &&
normalizeEncoding(encoding) !== undefined;
};
Buffer.concat = function concat(list, length) {
- if (!Array.isArray(list)) {
+ if (!ArrayIsArray(list)) {
throw new codes.ERR_INVALID_ARG_TYPE("list", "Array", list);
}
if (list.length === 0) {
- return Buffer.alloc(0);
+ return _alloc(0);
}
if (length === undefined) {
@@ -325,7 +401,7 @@ Buffer.concat = function concat(list, length) {
validateOffset(length, "length");
}
- const buffer = Buffer.allocUnsafe(length);
+ const buffer = _allocUnsafe(length);
let pos = 0;
for (let i = 0; i < list.length; i++) {
const buf = list[i];
@@ -346,7 +422,7 @@ Buffer.concat = function concat(list, length) {
// Zero-fill the remaining bytes if the specified `length` was more than
// the actual total length, i.e. if we have some remaining allocated bytes
// there were not initialized.
- buffer.fill(0, pos, length);
+ TypedArrayPrototypeFill(buffer, 0, pos, length);
}
return buffer;
@@ -354,7 +430,18 @@ Buffer.concat = function concat(list, length) {
function byteLength(string, encoding) {
if (typeof string !== "string") {
- if (isArrayBufferView(string) || isAnyArrayBuffer(string)) {
+ if (isTypedArray(string)) {
+ return TypedArrayPrototypeGetByteLength(string);
+ }
+ if (isDataView(string)) {
+ return DataViewPrototypeGetByteLength(string);
+ }
+ if (isArrayBuffer(string)) {
+ return ArrayBufferPrototypeGetByteLength(string);
+ }
+ if (isSharedArrayBuffer(string)) {
+ // TODO(petamoriken): add SharedArayBuffer to primordials
+ // deno-lint-ignore prefer-primordials
return string.byteLength;
}
@@ -463,6 +550,7 @@ Buffer.prototype.toString = function toString(encoding, start, end) {
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
}
+ // deno-lint-ignore prefer-primordials
return ops.slice(this, start, end);
};
@@ -479,22 +567,29 @@ Buffer.prototype.equals = function equals(b) {
if (this === b) {
return true;
}
- return Buffer.compare(this, b) === 0;
+ return BufferCompare(this, b) === 0;
};
-Buffer.prototype.inspect = function inspect() {
- let str = "";
- const max = INSPECT_MAX_BYTES;
- str = this.toString("hex", 0, max).replace(/(.{2})/g, "$1 ").trim();
- if (this.length > max) {
- str += " ... ";
- }
- return "<Buffer " + str + ">";
-};
+const SPACER_PATTERN = new SafeRegExp(/(.{2})/g);
-if (customInspectSymbol) {
- Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect;
-}
+Buffer.prototype[customInspectSymbol] =
+ Buffer.prototype.inspect =
+ function inspect() {
+ let str = "";
+ const max = INSPECT_MAX_BYTES;
+ str = StringPrototypeTrim(
+ StringPrototypeReplace(
+ // deno-lint-ignore prefer-primordials
+ this.toString("hex", 0, max),
+ SPACER_PATTERN,
+ "$1 ",
+ ),
+ );
+ if (this.length > max) {
+ str += " ... ";
+ }
+ return "<Buffer " + str + ">";
+ };
Buffer.prototype.compare = function compare(
target,
@@ -503,10 +598,14 @@ Buffer.prototype.compare = function compare(
thisStart,
thisEnd,
) {
- if (isInstance(target, Uint8Array)) {
- target = Buffer.from(target, target.offset, target.byteLength);
+ if (isUint8Array(target)) {
+ target = BufferFrom(
+ target,
+ TypedArrayPrototypeGetByteOffset(target),
+ TypedArrayPrototypeGetByteLength(target),
+ );
}
- if (!Buffer.isBuffer(target)) {
+ if (!BufferIsBuffer(target)) {
throw new codes.ERR_INVALID_ARG_TYPE(
"target",
["Buffer", "Uint8Array"],
@@ -563,8 +662,9 @@ Buffer.prototype.compare = function compare(
}
let x = thisEnd - thisStart;
let y = end - start;
- const len = Math.min(x, y);
- const thisCopy = this.slice(thisStart, thisEnd);
+ const len = MathMin(x, y);
+ const thisCopy = TypedArrayPrototypeSlice(this, thisStart, thisEnd);
+ // deno-lint-ignore prefer-primordials
const targetCopy = target.slice(start, end);
for (let i = 0; i < len; ++i) {
if (thisCopy[i] !== targetCopy[i]) {
@@ -594,7 +694,8 @@ function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
byteOffset = -0x80000000;
}
byteOffset = +byteOffset;
- if (Number.isNaN(byteOffset)) {
+ if (NumberIsNaN(byteOffset)) {
+ // deno-lint-ignore prefer-primordials
byteOffset = dir ? 0 : (buffer.length || buffer.byteLength);
}
dir = !!dir;
@@ -614,6 +715,7 @@ function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
if (ops === undefined) {
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
}
+ // deno-lint-ignore prefer-primordials
return ops.indexOf(buffer, val, byteOffset, dir);
}
@@ -630,6 +732,7 @@ function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
}
Buffer.prototype.includes = function includes(val, byteOffset, encoding) {
+ // deno-lint-ignore prefer-primordials
return this.indexOf(val, byteOffset, encoding) !== -1;
};
@@ -649,7 +752,7 @@ Buffer.prototype.asciiSlice = function asciiSlice(offset, length) {
if (offset === 0 && length === this.length) {
return bytesToAscii(this);
} else {
- return bytesToAscii(this.slice(offset, length));
+ return bytesToAscii(TypedArrayPrototypeSlice(this, offset, length));
}
};
@@ -664,7 +767,9 @@ Buffer.prototype.base64Slice = function base64Slice(
if (offset === 0 && length === this.length) {
return forgivingBase64Encode(this);
} else {
- return forgivingBase64Encode(this.slice(offset, length));
+ return forgivingBase64Encode(
+ TypedArrayPrototypeSlice(this, offset, length),
+ );
}
};
@@ -683,7 +788,9 @@ Buffer.prototype.base64urlSlice = function base64urlSlice(
if (offset === 0 && length === this.length) {
return forgivingBase64UrlEncode(this);
} else {
- return forgivingBase64UrlEncode(this.slice(offset, length));
+ return forgivingBase64UrlEncode(
+ TypedArrayPrototypeSlice(this, offset, length),
+ );
}
};
@@ -728,7 +835,7 @@ Buffer.prototype.ucs2Slice = function ucs2Slice(offset, length) {
if (offset === 0 && length === this.length) {
return bytesToUtf16le(this);
} else {
- return bytesToUtf16le(this.slice(offset, length));
+ return bytesToUtf16le(TypedArrayPrototypeSlice(this, offset, length));
}
};
@@ -747,9 +854,9 @@ Buffer.prototype.utf8Slice = function utf8Slice(string, offset, length) {
Buffer.prototype.utf8Write = function utf8Write(string, offset, length) {
offset = offset || 0;
- const maxLength = Math.min(length || Infinity, this.length - offset);
+ const maxLength = MathMin(length || Infinity, this.length - offset);
const buf = offset || maxLength < this.length
- ? this.subarray(offset, maxLength + offset)
+ ? TypedArrayPrototypeSubarray(this, offset, maxLength + offset)
: this;
return utf8Encoder.encodeInto(string, buf).written;
};
@@ -801,7 +908,7 @@ Buffer.prototype.write = function write(string, offset, length, encoding) {
Buffer.prototype.toJSON = function toJSON() {
return {
type: "Buffer",
- data: Array.prototype.slice.call(this._arr || this, 0),
+ data: ArrayPrototypeSlice(this._arr || this, 0),
};
};
function fromArrayBuffer(obj, byteOffset, length) {
@@ -810,11 +917,12 @@ function fromArrayBuffer(obj, byteOffset, length) {
byteOffset = 0;
} else {
byteOffset = +byteOffset;
- if (Number.isNaN(byteOffset)) {
+ if (NumberIsNaN(byteOffset)) {
byteOffset = 0;
}
}
+ // deno-lint-ignore prefer-primordials
const maxLength = obj.byteLength - byteOffset;
if (maxLength < 0) {
@@ -836,7 +944,7 @@ function fromArrayBuffer(obj, byteOffset, length) {
}
const buffer = new Uint8Array(obj, byteOffset, length);
- Object.setPrototypeOf(buffer, Buffer.prototype);
+ ObjectSetPrototypeOf(buffer, BufferPrototype);
return buffer;
}
@@ -844,6 +952,7 @@ function _base64Slice(buf, start, end) {
if (start === 0 && end === buf.length) {
return forgivingBase64Encode(buf);
} else {
+ // deno-lint-ignore prefer-primordials
return forgivingBase64Encode(buf.slice(start, end));
}
}
@@ -852,9 +961,10 @@ const decoder = new TextDecoder();
function _utf8Slice(buf, start, end) {
try {
+ // deno-lint-ignore prefer-primordials
return decoder.decode(buf.slice(start, end));
} catch (err) {
- if (err instanceof TypeError) {
+ if (ObjectPrototypeIsPrototypeOf(TypeErrorPrototype, err)) {
throw new NodeError("ERR_STRING_TOO_LONG", "String too long");
}
throw err;
@@ -863,9 +973,9 @@ function _utf8Slice(buf, start, end) {
function _latin1Slice(buf, start, end) {
let ret = "";
- end = Math.min(buf.length, end);
+ end = MathMin(buf.length, end);
for (let i = start; i < end; ++i) {
- ret += String.fromCharCode(buf[i]);
+ ret += StringFromCharCode(buf[i]);
}
return ret;
}
@@ -994,42 +1104,38 @@ Buffer.prototype.readUint32BE = Buffer.prototype.readUInt32BE = readUInt32BE;
Buffer.prototype.readBigUint64LE =
Buffer.prototype.readBigUInt64LE =
- defineBigIntMethod(
- function readBigUInt64LE(offset) {
- offset = offset >>> 0;
- validateNumber(offset, "offset");
- const first = this[offset];
- const last = this[offset + 7];
- if (first === void 0 || last === void 0) {
- boundsError(offset, this.length - 8);
- }
- const lo = first + this[++offset] * 2 ** 8 +
- this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 24;
- const hi = this[++offset] + this[++offset] * 2 ** 8 +
- this[++offset] * 2 ** 16 + last * 2 ** 24;
- return BigInt(lo) + (BigInt(hi) << BigInt(32));
- },
- );
+ function readBigUInt64LE(offset) {
+ offset = offset >>> 0;
+ validateNumber(offset, "offset");
+ const first = this[offset];
+ const last = this[offset + 7];
+ if (first === void 0 || last === void 0) {
+ boundsError(offset, this.length - 8);
+ }
+ const lo = first + this[++offset] * 2 ** 8 +
+ this[++offset] * 2 ** 16 +
+ this[++offset] * 2 ** 24;
+ const hi = this[++offset] + this[++offset] * 2 ** 8 +
+ this[++offset] * 2 ** 16 + last * 2 ** 24;
+ return BigInt(lo) + (BigInt(hi) << 32n);
+ };
Buffer.prototype.readBigUint64BE =
Buffer.prototype.readBigUInt64BE =
- defineBigIntMethod(
- function readBigUInt64BE(offset) {
- offset = offset >>> 0;
- validateNumber(offset, "offset");
- const first = this[offset];
- const last = this[offset + 7];
- if (first === void 0 || last === void 0) {
- boundsError(offset, this.length - 8);
- }
- const hi = first * 2 ** 24 + this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 8 + this[++offset];
- const lo = this[++offset] * 2 ** 24 + this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 8 + last;
- return (BigInt(hi) << BigInt(32)) + BigInt(lo);
- },
- );
+ function readBigUInt64BE(offset) {
+ offset = offset >>> 0;
+ validateNumber(offset, "offset");
+ const first = this[offset];
+ const last = this[offset + 7];
+ if (first === void 0 || last === void 0) {
+ boundsError(offset, this.length - 8);
+ }
+ const hi = first * 2 ** 24 + this[++offset] * 2 ** 16 +
+ this[++offset] * 2 ** 8 + this[++offset];
+ const lo = this[++offset] * 2 ** 24 + this[++offset] * 2 ** 16 +
+ this[++offset] * 2 ** 8 + last;
+ return (BigInt(hi) << 32n) + BigInt(lo);
+ };
Buffer.prototype.readIntLE = function readIntLE(
offset,
@@ -1148,43 +1254,39 @@ Buffer.prototype.readInt32BE = function readInt32BE(offset = 0) {
last;
};
-Buffer.prototype.readBigInt64LE = defineBigIntMethod(
- function readBigInt64LE(offset) {
- offset = offset >>> 0;
- validateNumber(offset, "offset");
- const first = this[offset];
- const last = this[offset + 7];
- if (first === void 0 || last === void 0) {
- boundsError(offset, this.length - 8);
- }
- const val = this[offset + 4] + this[offset + 5] * 2 ** 8 +
- this[offset + 6] * 2 ** 16 + (last << 24);
- return (BigInt(val) << BigInt(32)) +
- BigInt(
- first + this[++offset] * 2 ** 8 + this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 24,
- );
- },
-);
+Buffer.prototype.readBigInt64LE = function readBigInt64LE(offset) {
+ offset = offset >>> 0;
+ validateNumber(offset, "offset");
+ const first = this[offset];
+ const last = this[offset + 7];
+ if (first === void 0 || last === void 0) {
+ boundsError(offset, this.length - 8);
+ }
+ const val = this[offset + 4] + this[offset + 5] * 2 ** 8 +
+ this[offset + 6] * 2 ** 16 + (last << 24);
+ return (BigInt(val) << 32n) +
+ BigInt(
+ first + this[++offset] * 2 ** 8 + this[++offset] * 2 ** 16 +
+ this[++offset] * 2 ** 24,
+ );
+};
-Buffer.prototype.readBigInt64BE = defineBigIntMethod(
- function readBigInt64BE(offset) {
- offset = offset >>> 0;
- validateNumber(offset, "offset");
- const first = this[offset];
- const last = this[offset + 7];
- if (first === void 0 || last === void 0) {
- boundsError(offset, this.length - 8);
- }
- const val = (first << 24) + this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 8 + this[++offset];
- return (BigInt(val) << BigInt(32)) +
- BigInt(
- this[++offset] * 2 ** 24 + this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 8 + last,
- );
- },
-);
+Buffer.prototype.readBigInt64BE = function readBigInt64BE(offset) {
+ offset = offset >>> 0;
+ validateNumber(offset, "offset");
+ const first = this[offset];
+ const last = this[offset + 7];
+ if (first === void 0 || last === void 0) {
+ boundsError(offset, this.length - 8);
+ }
+ const val = (first << 24) + this[++offset] * 2 ** 16 +
+ this[++offset] * 2 ** 8 + this[++offset];
+ return (BigInt(val) << 32n) +
+ BigInt(
+ this[++offset] * 2 ** 24 + this[++offset] * 2 ** 16 +
+ this[++offset] * 2 ** 8 + last,
+ );
+};
Buffer.prototype.readFloatLE = function readFloatLE(offset) {
return bigEndian
@@ -1293,7 +1395,7 @@ Buffer.prototype.writeUint32BE =
function wrtBigUInt64LE(buf, value, offset, min, max) {
checkIntBI(value, min, max, buf, offset, 7);
- let lo = Number(value & BigInt(4294967295));
+ let lo = Number(value & 4294967295n);
buf[offset++] = lo;
lo = lo >> 8;
buf[offset++] = lo;
@@ -1301,7 +1403,7 @@ function wrtBigUInt64LE(buf, value, offset, min, max) {
buf[offset++] = lo;
lo = lo >> 8;
buf[offset++] = lo;
- let hi = Number(value >> BigInt(32) & BigInt(4294967295));
+ let hi = Number(value >> 32n & 4294967295n);
buf[offset++] = hi;
hi = hi >> 8;
buf[offset++] = hi;
@@ -1314,7 +1416,7 @@ function wrtBigUInt64LE(buf, value, offset, min, max) {
function wrtBigUInt64BE(buf, value, offset, min, max) {
checkIntBI(value, min, max, buf, offset, 7);
- let lo = Number(value & BigInt(4294967295));
+ let lo = Number(value & 4294967295n);
buf[offset + 7] = lo;
lo = lo >> 8;
buf[offset + 6] = lo;
@@ -1322,7 +1424,7 @@ function wrtBigUInt64BE(buf, value, offset, min, max) {
buf[offset + 5] = lo;
lo = lo >> 8;
buf[offset + 4] = lo;
- let hi = Number(value >> BigInt(32) & BigInt(4294967295));
+ let hi = Number(value >> 32n & 4294967295n);
buf[offset + 3] = hi;
hi = hi >> 8;
buf[offset + 2] = hi;
@@ -1335,31 +1437,27 @@ function wrtBigUInt64BE(buf, value, offset, min, max) {
Buffer.prototype.writeBigUint64LE =
Buffer.prototype.writeBigUInt64LE =
- defineBigIntMethod(
- function writeBigUInt64LE(value, offset = 0) {
- return wrtBigUInt64LE(
- this,
- value,
- offset,
- BigInt(0),
- BigInt("0xffffffffffffffff"),
- );
- },
- );
+ function writeBigUInt64LE(value, offset = 0) {
+ return wrtBigUInt64LE(
+ this,
+ value,
+ offset,
+ 0n,
+ 0xffffffffffffffffn,
+ );
+ };
Buffer.prototype.writeBigUint64BE =
Buffer.prototype.writeBigUInt64BE =
- defineBigIntMethod(
- function writeBigUInt64BE(value, offset = 0) {
- return wrtBigUInt64BE(
- this,
- value,
- offset,
- BigInt(0),
- BigInt("0xffffffffffffffff"),
- );
- },
- );
+ function writeBigUInt64BE(value, offset = 0) {
+ return wrtBigUInt64BE(
+ this,
+ value,
+ offset,
+ 0n,
+ 0xffffffffffffffffn,
+ );
+ };
Buffer.prototype.writeIntLE = function writeIntLE(
value,
@@ -1450,29 +1548,25 @@ Buffer.prototype.writeInt32BE = function writeInt32BE(value, offset = 0) {
return writeU_Int32BE(this, value, offset, -0x80000000, 0x7fffffff);
};
-Buffer.prototype.writeBigInt64LE = defineBigIntMethod(
- function writeBigInt64LE(value, offset = 0) {
- return wrtBigUInt64LE(
- this,
- value,
- offset,
- -BigInt("0x8000000000000000"),
- BigInt("0x7fffffffffffffff"),
- );
- },
-);
+Buffer.prototype.writeBigInt64LE = function writeBigInt64LE(value, offset = 0) {
+ return wrtBigUInt64LE(
+ this,
+ value,
+ offset,
+ -0x8000000000000000n,
+ 0x7fffffffffffffffn,
+ );
+};
-Buffer.prototype.writeBigInt64BE = defineBigIntMethod(
- function writeBigInt64BE(value, offset = 0) {
- return wrtBigUInt64BE(
- this,
- value,
- offset,
- -BigInt("0x8000000000000000"),
- BigInt("0x7fffffffffffffff"),
- );
- },
-);
+Buffer.prototype.writeBigInt64BE = function writeBigInt64BE(value, offset = 0) {
+ return wrtBigUInt64BE(
+ this,
+ value,
+ offset,
+ -0x8000000000000000n,
+ 0x7fffffffffffffffn,
+ );
+};
Buffer.prototype.writeFloatLE = function writeFloatLE(
value,
@@ -1600,14 +1694,12 @@ Buffer.prototype.copy = function copy(
}
const len = sourceEnd - sourceStart;
- if (
- this === target && typeof Uint8Array.prototype.copyWithin === "function"
- ) {
- this.copyWithin(targetStart, sourceStart, sourceEnd);
+ if (this === target) {
+ TypedArrayPrototypeCopyWithin(this, targetStart, sourceStart, sourceEnd);
} else {
- Uint8Array.prototype.set.call(
+ TypedArrayPrototypeSet(
target,
- this.subarray(sourceStart, sourceEnd),
+ TypedArrayPrototypeSubarray(this, sourceStart, sourceEnd),
targetStart,
);
}
@@ -1627,11 +1719,11 @@ Buffer.prototype.fill = function fill(val, start, end, encoding) {
if (encoding !== void 0 && typeof encoding !== "string") {
throw new TypeError("encoding must be a string");
}
- if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
+ if (typeof encoding === "string" && !BufferIsEncoding(encoding)) {
throw new TypeError("Unknown encoding: " + encoding);
}
if (val.length === 1) {
- const code = val.charCodeAt(0);
+ const code = StringPrototypeCharCodeAt(val, 0);
if (encoding === "utf8" && code < 128 || encoding === "latin1") {
val = code;
}
@@ -1658,7 +1750,7 @@ Buffer.prototype.fill = function fill(val, start, end, encoding) {
this[i] = val;
}
} else {
- const bytes = Buffer.isBuffer(val) ? val : Buffer.from(val, encoding);
+ const bytes = BufferIsBuffer(val) ? val : BufferFrom(val, encoding);
const len = bytes.length;
if (len === 0) {
throw new codes.ERR_INVALID_ARG_VALUE(
@@ -1685,7 +1777,7 @@ function checkIntBI(value, min, max, buf, offset, byteLength2) {
const n = typeof min === "bigint" ? "n" : "";
let range;
if (byteLength2 > 3) {
- if (min === 0 || min === BigInt(0)) {
+ if (min === 0 || min === 0n) {
range = `>= 0${n} and < 2${n} ** ${(byteLength2 + 1) * 8}${n}`;
} else {
range = `>= -(2${n} ** ${(byteLength2 + 1) * 8 - 1}${n}) and < 2 ** ${
@@ -1710,7 +1802,7 @@ function checkIntBI(value, min, max, buf, offset, byteLength2) {
function blitBuffer(src, dst, offset, byteLength = Infinity) {
const srcLength = src.length;
// Establish the number of bytes to be written
- const bytesToWrite = Math.min(
+ const bytesToWrite = MathMin(
// If byte length is defined in the call, then it sets an upper bound,
// otherwise it is Infinity and is never chosen.
byteLength,
@@ -1730,15 +1822,9 @@ function blitBuffer(src, dst, offset, byteLength = Infinity) {
return bytesToWrite;
}
-function isInstance(obj, type) {
- return obj instanceof type ||
- obj != null && obj.constructor != null &&
- obj.constructor.name != null && obj.constructor.name === type.name;
-}
-
const hexSliceLookupTable = function () {
const alphabet = "0123456789abcdef";
- const table = new Array(256);
+ const table = [];
for (let i = 0; i < 16; ++i) {
const i16 = i * 16;
for (let j = 0; j < 16; ++j) {
@@ -1748,14 +1834,6 @@ const hexSliceLookupTable = function () {
return table;
}();
-function defineBigIntMethod(fn) {
- return typeof BigInt === "undefined" ? BufferBigIntNotDefined : fn;
-}
-
-function BufferBigIntNotDefined() {
- throw new Error("BigInt not supported");
-}
-
export function readUInt48LE(buf, offset = 0) {
validateNumber(offset, "offset");
const first = buf[offset];
@@ -2079,10 +2157,10 @@ export function byteLengthUtf8(str) {
function base64ByteLength(str, bytes) {
// Handle padding
- if (str.charCodeAt(bytes - 1) === 0x3D) {
+ if (StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) {
bytes--;
}
- if (bytes > 1 && str.charCodeAt(bytes - 1) === 0x3D) {
+ if (bytes > 1 && StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) {
bytes--;
}
@@ -2090,7 +2168,7 @@ function base64ByteLength(str, bytes) {
return (bytes * 3) >>> 2;
}
-export const encodingsMap = Object.create(null);
+export const encodingsMap = ObjectCreate(null);
for (let i = 0; i < encodings.length; ++i) {
encodingsMap[encodings[i]] = i;
}
@@ -2220,7 +2298,7 @@ export const encodingOps = {
};
export function getEncodingOps(encoding) {
- encoding = String(encoding).toLowerCase();
+ encoding = StringPrototypeToLowerCase(String(encoding));
switch (encoding.length) {
case 4:
if (encoding === "utf8") return encodingOps.utf8;
@@ -2260,6 +2338,14 @@ export function getEncodingOps(encoding) {
}
}
+/**
+ * @param {Buffer} source
+ * @param {Buffer} target
+ * @param {number} targetStart
+ * @param {number} sourceStart
+ * @param {number} sourceEnd
+ * @returns {number}
+ */
export function _copyActual(
source,
target,
@@ -2278,6 +2364,7 @@ export function _copyActual(
}
if (sourceStart !== 0 || sourceEnd < source.length) {
+ // deno-lint-ignore prefer-primordials
source = new Uint8Array(source.buffer, source.byteOffset + sourceStart, nb);
}
@@ -2287,7 +2374,7 @@ export function _copyActual(
}
export function boundsError(value, length, type) {
- if (Math.floor(value) !== value) {
+ if (MathFloor(value) !== value) {
validateNumber(value, type);
throw new codes.ERR_OUT_OF_RANGE(type || "offset", "an integer", value);
}
@@ -2310,7 +2397,7 @@ export function validateNumber(value, name, min = undefined, max) {
if (
(min != null && value < min) || (max != null && value > max) ||
- ((min != null || max != null) && Number.isNaN(value))
+ ((min != null || max != null) && NumberIsNaN(value))
) {
throw new codes.ERR_OUT_OF_RANGE(
name,
@@ -2344,11 +2431,11 @@ function checkInt(value, min, max, buf, offset, byteLength) {
export function toInteger(n, defaultVal) {
n = +n;
if (
- !Number.isNaN(n) &&
- n >= Number.MIN_SAFE_INTEGER &&
- n <= Number.MAX_SAFE_INTEGER
+ !NumberIsNaN(n) &&
+ n >= NumberMIN_SAFE_INTEGER &&
+ n <= NumberMAX_SAFE_INTEGER
) {
- return ((n % 1) === 0 ? n : Math.floor(n));
+ return ((n % 1) === 0 ? n : MathFloor(n));
}
return defaultVal;
}
@@ -2421,7 +2508,7 @@ export function writeU_Int48BE(buf, value, offset, min, max) {
value = +value;
checkInt(value, min, max, buf, offset, 5);
- const newVal = Math.floor(value * 2 ** -32);
+ const newVal = MathFloor(value * 2 ** -32);
buf[offset++] = newVal >>> 8;
buf[offset++] = newVal;
buf[offset + 3] = value;
@@ -2439,7 +2526,7 @@ export function writeU_Int40BE(buf, value, offset, min, max) {
value = +value;
checkInt(value, min, max, buf, offset, 4);
- buf[offset++] = Math.floor(value * 2 ** -32);
+ buf[offset++] = MathFloor(value * 2 ** -32);
buf[offset + 3] = value;
value = value >>> 8;
buf[offset + 2] = value;
@@ -2482,12 +2569,12 @@ export function validateOffset(
value,
name,
min = 0,
- max = Number.MAX_SAFE_INTEGER,
+ max = NumberMAX_SAFE_INTEGER,
) {
if (typeof value !== "number") {
throw new codes.ERR_INVALID_ARG_TYPE(name, "number", value);
}
- if (!Number.isInteger(value)) {
+ if (!NumberIsInteger(value)) {
throw new codes.ERR_OUT_OF_RANGE(name, "an integer", value);
}
if (value < min || value > max) {
@@ -2500,7 +2587,7 @@ export function writeU_Int48LE(buf, value, offset, min, max) {
value = +value;
checkInt(value, min, max, buf, offset, 5);
- const newVal = Math.floor(value * 2 ** -32);
+ const newVal = MathFloor(value * 2 ** -32);
buf[offset++] = value;
value = value >>> 8;
buf[offset++] = value;
@@ -2526,7 +2613,7 @@ export function writeU_Int40LE(buf, value, offset, min, max) {
buf[offset++] = value;
value = value >>> 8;
buf[offset++] = value;
- buf[offset++] = Math.floor(newVal * 2 ** -32);
+ buf[offset++] = MathFloor(newVal * 2 ** -32);
return offset;
}
@@ -2560,14 +2647,14 @@ export function writeU_Int24LE(buf, value, offset, min, max) {
export function isUtf8(input) {
if (isTypedArray(input)) {
- if (input.buffer.detached) {
+ if (isDetachedBuffer(TypedArrayPrototypeGetBuffer(input))) {
throw new ERR_INVALID_STATE("Cannot validate on a detached buffer");
}
return op_is_utf8(input);
}
if (isAnyArrayBuffer(input)) {
- if (input.detached) {
+ if (isDetachedBuffer(input)) {
throw new ERR_INVALID_STATE("Cannot validate on a detached buffer");
}
return op_is_utf8(new Uint8Array(input));
@@ -2582,14 +2669,14 @@ export function isUtf8(input) {
export function isAscii(input) {
if (isTypedArray(input)) {
- if (input.buffer.detached) {
+ if (isDetachedBuffer(TypedArrayPrototypeGetBuffer(input))) {
throw new ERR_INVALID_STATE("Cannot validate on a detached buffer");
}
return op_is_ascii(input);
}
if (isAnyArrayBuffer(input)) {
- if (input.detached) {
+ if (isDetachedBuffer(input)) {
throw new ERR_INVALID_STATE("Cannot validate on a detached buffer");
}
return op_is_ascii(new Uint8Array(input));
@@ -2636,7 +2723,7 @@ export function transcode(source, fromEnco, toEnco) {
const result = op_transcode(new Uint8Array(source), fromEnco, toEnco);
return Buffer.from(result, toEnco);
} catch (err) {
- if (err.message.includes("Unable to transcode Buffer")) {
+ if (StringPrototypeIncludes(err.message, "Unable to transcode Buffer")) {
throw illegalArgumentError;
} else {
throw err;
diff --git a/ext/node/polyfills/internal/child_process.ts b/ext/node/polyfills/internal/child_process.ts
index 6f209b719..cfff1079f 100644
--- a/ext/node/polyfills/internal/child_process.ts
+++ b/ext/node/polyfills/internal/child_process.ts
@@ -1191,8 +1191,12 @@ function toDenoArgs(args: string[]): string[] {
}
if (flagInfo === undefined) {
- // Not a known flag that expects a value. Just copy it to the output.
- denoArgs.push(arg);
+ if (arg === "--no-warnings") {
+ denoArgs.push("--quiet");
+ } else {
+ // Not a known flag that expects a value. Just copy it to the output.
+ denoArgs.push(arg);
+ }
continue;
}
@@ -1335,7 +1339,7 @@ export function setupChannel(target: any, ipc: number) {
}
}
- process.nextTick(handleMessage, msg);
+ nextTick(handleMessage, msg);
}
} catch (err) {
if (
@@ -1396,7 +1400,7 @@ export function setupChannel(target: any, ipc: number) {
if (!target.connected) {
const err = new ERR_IPC_CHANNEL_CLOSED();
if (typeof callback === "function") {
- process.nextTick(callback, err);
+ nextTick(callback, err);
} else {
nextTick(() => target.emit("error", err));
}
@@ -1412,7 +1416,18 @@ export function setupChannel(target: any, ipc: number) {
.then(() => {
control.unrefCounted();
if (callback) {
- process.nextTick(callback, null);
+ nextTick(callback, null);
+ }
+ }, (err: Error) => {
+ control.unrefCounted();
+ if (err instanceof Deno.errors.Interrupted) {
+ // Channel closed on us mid-write.
+ } else {
+ if (typeof callback === "function") {
+ nextTick(callback, err);
+ } else {
+ nextTick(() => target.emit("error", err));
+ }
}
});
return queueOk[0];
@@ -1429,7 +1444,7 @@ export function setupChannel(target: any, ipc: number) {
target.connected = false;
target[kCanDisconnect] = false;
control[kControlDisconnect]();
- process.nextTick(() => {
+ nextTick(() => {
target.channel = null;
core.close(ipc);
target.emit("disconnect");
diff --git a/ext/node/polyfills/internal/crypto/_randomInt.ts b/ext/node/polyfills/internal/crypto/_randomInt.ts
index 7f4d703ad..e08b3e963 100644
--- a/ext/node/polyfills/internal/crypto/_randomInt.ts
+++ b/ext/node/polyfills/internal/crypto/_randomInt.ts
@@ -1,9 +1,15 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-// TODO(petamoriken): enable prefer-primordials for node polyfills
-// deno-lint-ignore-file prefer-primordials
-
import { op_node_random_int } from "ext:core/ops";
+import { primordials } from "ext:core/mod.js";
+const {
+ Error,
+ MathCeil,
+ MathFloor,
+ MathPow,
+ NumberIsSafeInteger,
+ RangeError,
+} = primordials;
export default function randomInt(max: number): number;
export default function randomInt(min: number, max: number): number;
@@ -23,7 +29,9 @@ export default function randomInt(
cb?: (err: Error | null, n?: number) => void,
): number | void {
if (typeof max === "number" && typeof min === "number") {
- [max, min] = [min, max];
+ const temp = max;
+ max = min;
+ min = temp;
}
if (min === undefined) min = 0;
else if (typeof min === "function") {
@@ -32,13 +40,13 @@ export default function randomInt(
}
if (
- !Number.isSafeInteger(min) ||
- typeof max === "number" && !Number.isSafeInteger(max)
+ !NumberIsSafeInteger(min) ||
+ typeof max === "number" && !NumberIsSafeInteger(max)
) {
throw new Error("max or min is not a Safe Number");
}
- if (max - min > Math.pow(2, 48)) {
+ if (max - min > MathPow(2, 48)) {
throw new RangeError("max - min should be less than 2^48!");
}
@@ -46,8 +54,8 @@ export default function randomInt(
throw new Error("Min is bigger than Max!");
}
- min = Math.ceil(min);
- max = Math.floor(max);
+ min = MathCeil(min);
+ max = MathFloor(max);
const result = op_node_random_int(min, max);
if (cb) {
diff --git a/ext/node/polyfills/internal/crypto/keygen.ts b/ext/node/polyfills/internal/crypto/keygen.ts
index a40c76c0d..b023ab106 100644
--- a/ext/node/polyfills/internal/crypto/keygen.ts
+++ b/ext/node/polyfills/internal/crypto/keygen.ts
@@ -29,6 +29,8 @@ import {
} from "ext:deno_node/internal/validators.mjs";
import { Buffer } from "node:buffer";
import { KeyFormat, KeyType } from "ext:deno_node/internal/crypto/types.ts";
+import process from "node:process";
+import { promisify } from "node:util";
import {
op_node_generate_dh_group_key,
@@ -569,7 +571,15 @@ export function generateKeyPair(
privateKey: any,
) => void,
) {
- createJob(kAsync, type, options).then((pair) => {
+ _generateKeyPair(type, options)
+ .then(
+ (res) => callback(null, res.publicKey, res.privateKey),
+ (err) => callback(err, null, null),
+ );
+}
+
+function _generateKeyPair(type: string, options: unknown) {
+ return createJob(kAsync, type, options).then((pair) => {
const privateKeyHandle = op_node_get_private_key_from_pair(pair);
const publicKeyHandle = op_node_get_public_key_from_pair(pair);
@@ -588,12 +598,15 @@ export function generateKeyPair(
}
}
- callback(null, publicKey, privateKey);
- }).catch((err) => {
- callback(err, null, null);
+ return { publicKey, privateKey };
});
}
+Object.defineProperty(generateKeyPair, promisify.custom, {
+ enumerable: false,
+ value: _generateKeyPair,
+});
+
export interface KeyPairKeyObjectResult {
publicKey: KeyObject;
privateKey: KeyObject;
diff --git a/ext/node/polyfills/internal/crypto/random.ts b/ext/node/polyfills/internal/crypto/random.ts
index 4219414dc..a41b86819 100644
--- a/ext/node/polyfills/internal/crypto/random.ts
+++ b/ext/node/polyfills/internal/crypto/random.ts
@@ -38,6 +38,7 @@ import {
ERR_INVALID_ARG_TYPE,
ERR_OUT_OF_RANGE,
} from "ext:deno_node/internal/errors.ts";
+import { Buffer } from "node:buffer";
export { default as randomBytes } from "ext:deno_node/internal/crypto/_randomBytes.ts";
export {
diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts
index 51bd7a025..61b53fa96 100644
--- a/ext/node/polyfills/internal/errors.ts
+++ b/ext/node/polyfills/internal/errors.ts
@@ -18,7 +18,7 @@
*/
import { primordials } from "ext:core/mod.js";
-const { JSONStringify, SymbolFor } = primordials;
+const { JSONStringify, SafeArrayIterator, SymbolFor } = primordials;
import { format, inspect } from "ext:deno_node/internal/util/inspect.mjs";
import { codes } from "ext:deno_node/internal/error_codes.ts";
import {
@@ -1874,6 +1874,11 @@ export class ERR_SOCKET_CLOSED extends NodeError {
super("ERR_SOCKET_CLOSED", `Socket is closed`);
}
}
+export class ERR_SOCKET_CONNECTION_TIMEOUT extends NodeError {
+ constructor() {
+ super("ERR_SOCKET_CONNECTION_TIMEOUT", `Socket connection timeout`);
+ }
+}
export class ERR_SOCKET_DGRAM_IS_CONNECTED extends NodeError {
constructor() {
super("ERR_SOCKET_DGRAM_IS_CONNECTED", `Already connected`);
@@ -2385,6 +2390,15 @@ export class ERR_INVALID_RETURN_VALUE extends NodeTypeError {
}
}
+export class ERR_NOT_IMPLEMENTED extends NodeError {
+ constructor(message?: string) {
+ super(
+ "ERR_NOT_IMPLEMENTED",
+ message ? `Not implemented: ${message}` : "Not implemented",
+ );
+ }
+}
+
export class ERR_INVALID_URL extends NodeTypeError {
input: string;
constructor(input: string) {
@@ -2558,19 +2572,6 @@ export class ERR_FS_RMDIR_ENOTDIR extends NodeSystemError {
}
}
-export class ERR_OS_NO_HOMEDIR extends NodeSystemError {
- constructor() {
- const code = isWindows ? "ENOENT" : "ENOTDIR";
- const ctx: NodeSystemErrorCtx = {
- message: "not a directory",
- syscall: "home",
- code,
- errno: isWindows ? osConstants.errno.ENOENT : osConstants.errno.ENOTDIR,
- };
- super(code, ctx, "Path is not a directory");
- }
-}
-
export class ERR_HTTP_SOCKET_ASSIGNED extends NodeError {
constructor() {
super(
@@ -2646,11 +2647,30 @@ export function aggregateTwoErrors(
}
return innerError || outerError;
}
+
+export class NodeAggregateError extends AggregateError {
+ code: string;
+ constructor(errors, message) {
+ super(new SafeArrayIterator(errors), message);
+ this.code = errors[0]?.code;
+ }
+
+ get [kIsNodeError]() {
+ return true;
+ }
+
+ // deno-lint-ignore adjacent-overload-signatures
+ get ["constructor"]() {
+ return AggregateError;
+ }
+}
+
codes.ERR_IPC_CHANNEL_CLOSED = ERR_IPC_CHANNEL_CLOSED;
codes.ERR_INVALID_ARG_TYPE = ERR_INVALID_ARG_TYPE;
codes.ERR_INVALID_ARG_VALUE = ERR_INVALID_ARG_VALUE;
codes.ERR_OUT_OF_RANGE = ERR_OUT_OF_RANGE;
codes.ERR_SOCKET_BAD_PORT = ERR_SOCKET_BAD_PORT;
+codes.ERR_SOCKET_CONNECTION_TIMEOUT = ERR_SOCKET_CONNECTION_TIMEOUT;
codes.ERR_BUFFER_OUT_OF_BOUNDS = ERR_BUFFER_OUT_OF_BOUNDS;
codes.ERR_UNKNOWN_ENCODING = ERR_UNKNOWN_ENCODING;
codes.ERR_PARSE_ARGS_INVALID_OPTION_VALUE = ERR_PARSE_ARGS_INVALID_OPTION_VALUE;
@@ -2851,6 +2871,7 @@ export default {
ERR_INVALID_SYNC_FORK_INPUT,
ERR_INVALID_THIS,
ERR_INVALID_TUPLE,
+ ERR_NOT_IMPLEMENTED,
ERR_INVALID_URI,
ERR_INVALID_URL,
ERR_INVALID_URL_SCHEME,
diff --git a/ext/node/polyfills/internal/net.ts b/ext/node/polyfills/internal/net.ts
index 144612626..a3dcb3ed2 100644
--- a/ext/node/polyfills/internal/net.ts
+++ b/ext/node/polyfills/internal/net.ts
@@ -95,4 +95,5 @@ export function makeSyncWrite(fd: number) {
};
}
+export const kReinitializeHandle = Symbol("kReinitializeHandle");
export const normalizedArgsSymbol = Symbol("normalizedArgs");
diff --git a/ext/node/polyfills/internal/util/inspect.mjs b/ext/node/polyfills/internal/util/inspect.mjs
index 3a61c387c..ae797449b 100644
--- a/ext/node/polyfills/internal/util/inspect.mjs
+++ b/ext/node/polyfills/internal/util/inspect.mjs
@@ -565,6 +565,19 @@ export function stripVTControlCharacters(str) {
export function styleText(format, text) {
validateString(text, "text");
+
+ if (Array.isArray(format)) {
+ for (let i = 0; i < format.length; i++) {
+ const item = format[i];
+ const formatCodes = inspect.colors[item];
+ if (formatCodes == null) {
+ validateOneOf(item, "format", Object.keys(inspect.colors));
+ }
+ text = `\u001b[${formatCodes[0]}m${text}\u001b[${formatCodes[1]}m`;
+ }
+ return text;
+ }
+
const formatCodes = inspect.colors[format];
if (formatCodes == null) {
validateOneOf(format, "format", Object.keys(inspect.colors));
diff --git a/ext/node/polyfills/internal_binding/_timingSafeEqual.ts b/ext/node/polyfills/internal_binding/_timingSafeEqual.ts
index ff141fdbf..559b7685b 100644
--- a/ext/node/polyfills/internal_binding/_timingSafeEqual.ts
+++ b/ext/node/polyfills/internal_binding/_timingSafeEqual.ts
@@ -5,10 +5,11 @@
import { Buffer } from "node:buffer";
-function assert(cond) {
- if (!cond) {
- throw new Error("assertion failed");
+function toDataView(ab: ArrayBufferLike | ArrayBufferView): DataView {
+ if (ArrayBuffer.isView(ab)) {
+ return new DataView(ab.buffer, ab.byteOffset, ab.byteLength);
}
+ return new DataView(ab);
}
/** Compare to array buffers or data views in a way that timing based attacks
@@ -21,13 +22,11 @@ function stdTimingSafeEqual(
return false;
}
if (!(a instanceof DataView)) {
- a = new DataView(ArrayBuffer.isView(a) ? a.buffer : a);
+ a = toDataView(a);
}
if (!(b instanceof DataView)) {
- b = new DataView(ArrayBuffer.isView(b) ? b.buffer : b);
+ b = toDataView(b);
}
- assert(a instanceof DataView);
- assert(b instanceof DataView);
const length = a.byteLength;
let out = 0;
let i = -1;
@@ -41,7 +40,11 @@ export const timingSafeEqual = (
a: Buffer | DataView | ArrayBuffer,
b: Buffer | DataView | ArrayBuffer,
): boolean => {
- if (a instanceof Buffer) a = new DataView(a.buffer);
- if (a instanceof Buffer) b = new DataView(a.buffer);
+ if (a instanceof Buffer) {
+ a = new DataView(a.buffer, a.byteOffset, a.byteLength);
+ }
+ if (b instanceof Buffer) {
+ b = new DataView(b.buffer, b.byteOffset, b.byteLength);
+ }
return stdTimingSafeEqual(a, b);
};
diff --git a/ext/node/polyfills/internal_binding/http_parser.ts b/ext/node/polyfills/internal_binding/http_parser.ts
new file mode 100644
index 000000000..bad10d985
--- /dev/null
+++ b/ext/node/polyfills/internal_binding/http_parser.ts
@@ -0,0 +1,160 @@
+// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import { primordials } from "ext:core/mod.js";
+import { AsyncWrap } from "ext:deno_node/internal_binding/async_wrap.ts";
+
+const {
+ ObjectDefineProperty,
+ ObjectEntries,
+ ObjectSetPrototypeOf,
+ SafeArrayIterator,
+} = primordials;
+
+export const methods = [
+ "DELETE",
+ "GET",
+ "HEAD",
+ "POST",
+ "PUT",
+ "CONNECT",
+ "OPTIONS",
+ "TRACE",
+ "COPY",
+ "LOCK",
+ "MKCOL",
+ "MOVE",
+ "PROPFIND",
+ "PROPPATCH",
+ "SEARCH",
+ "UNLOCK",
+ "BIND",
+ "REBIND",
+ "UNBIND",
+ "ACL",
+ "REPORT",
+ "MKACTIVITY",
+ "CHECKOUT",
+ "MERGE",
+ "M-SEARCH",
+ "NOTIFY",
+ "SUBSCRIBE",
+ "UNSUBSCRIBE",
+ "PATCH",
+ "PURGE",
+ "MKCALENDAR",
+ "LINK",
+ "UNLINK",
+ "SOURCE",
+ "QUERY",
+];
+
+export const allMethods = [
+ "DELETE",
+ "GET",
+ "HEAD",
+ "POST",
+ "PUT",
+ "CONNECT",
+ "OPTIONS",
+ "TRACE",
+ "COPY",
+ "LOCK",
+ "MKCOL",
+ "MOVE",
+ "PROPFIND",
+ "PROPPATCH",
+ "SEARCH",
+ "UNLOCK",
+ "BIND",
+ "REBIND",
+ "UNBIND",
+ "ACL",
+ "REPORT",
+ "MKACTIVITY",
+ "CHECKOUT",
+ "MERGE",
+ "M-SEARCH",
+ "NOTIFY",
+ "SUBSCRIBE",
+ "UNSUBSCRIBE",
+ "PATCH",
+ "PURGE",
+ "MKCALENDAR",
+ "LINK",
+ "UNLINK",
+ "SOURCE",
+ "PRI",
+ "DESCRIBE",
+ "ANNOUNCE",
+ "SETUP",
+ "PLAY",
+ "PAUSE",
+ "TEARDOWN",
+ "GET_PARAMETER",
+ "SET_PARAMETER",
+ "REDIRECT",
+ "RECORD",
+ "FLUSH",
+ "QUERY",
+];
+
+export function HTTPParser() {
+}
+
+ObjectSetPrototypeOf(HTTPParser.prototype, AsyncWrap.prototype);
+
+function defineProps(obj: object, props: Record<string, unknown>) {
+ for (const entry of new SafeArrayIterator(ObjectEntries(props))) {
+ ObjectDefineProperty(obj, entry[0], {
+ __proto__: null,
+ value: entry[1],
+ enumerable: true,
+ writable: true,
+ configurable: true,
+ });
+ }
+}
+
+defineProps(HTTPParser, {
+ REQUEST: 1,
+ RESPONSE: 2,
+ kOnMessageBegin: 0,
+ kOnHeaders: 1,
+ kOnHeadersComplete: 2,
+ kOnBody: 3,
+ kOnMessageComplete: 4,
+ kOnExecute: 5,
+ kOnTimeout: 6,
+ kLenientNone: 0,
+ kLenientHeaders: 1,
+ kLenientChunkedLength: 2,
+ kLenientKeepAlive: 4,
+ kLenientTransferEncoding: 8,
+ kLenientVersion: 16,
+ kLenientDataAfterClose: 32,
+ kLenientOptionalLFAfterCR: 64,
+ kLenientOptionalCRLFAfterChunk: 128,
+ kLenientOptionalCRBeforeLF: 256,
+ kLenientSpacesAfterChunkSize: 512,
+ kLenientAll: 1023,
+});
diff --git a/ext/node/polyfills/internal_binding/mod.ts b/ext/node/polyfills/internal_binding/mod.ts
index f2d7f55bc..ebbfc629f 100644
--- a/ext/node/polyfills/internal_binding/mod.ts
+++ b/ext/node/polyfills/internal_binding/mod.ts
@@ -17,6 +17,7 @@ import * as types from "ext:deno_node/internal_binding/types.ts";
import * as udpWrap from "ext:deno_node/internal_binding/udp_wrap.ts";
import * as util from "ext:deno_node/internal_binding/util.ts";
import * as uv from "ext:deno_node/internal_binding/uv.ts";
+import * as httpParser from "ext:deno_node/internal_binding/http_parser.ts";
const modules = {
"async_wrap": asyncWrap,
@@ -32,7 +33,7 @@ const modules = {
"fs_dir": {},
"fs_event_wrap": {},
"heap_utils": {},
- "http_parser": {},
+ "http_parser": httpParser,
icu: {},
inspector: {},
"js_stream": {},
diff --git a/ext/node/polyfills/internal_binding/tcp_wrap.ts b/ext/node/polyfills/internal_binding/tcp_wrap.ts
index 973a1d1c0..d9f1c5356 100644
--- a/ext/node/polyfills/internal_binding/tcp_wrap.ts
+++ b/ext/node/polyfills/internal_binding/tcp_wrap.ts
@@ -299,8 +299,10 @@ export class TCP extends ConnectionWrap {
* @param noDelay
* @return An error status code.
*/
- setNoDelay(_noDelay: boolean): number {
- // TODO(bnoordhuis) https://github.com/denoland/deno/pull/13103
+ setNoDelay(noDelay: boolean): number {
+ if (this[kStreamBaseField] && "setNoDelay" in this[kStreamBaseField]) {
+ this[kStreamBaseField].setNoDelay(noDelay);
+ }
return 0;
}
diff --git a/ext/node/polyfills/internal_binding/uv.ts b/ext/node/polyfills/internal_binding/uv.ts
index aa468a0a5..6cd70a7e8 100644
--- a/ext/node/polyfills/internal_binding/uv.ts
+++ b/ext/node/polyfills/internal_binding/uv.ts
@@ -530,10 +530,12 @@ export function mapSysErrnoToUvErrno(sysErrno: number): number {
export const UV_EAI_MEMORY = codeMap.get("EAI_MEMORY")!;
export const UV_EBADF = codeMap.get("EBADF")!;
+export const UV_ECANCELED = codeMap.get("ECANCELED")!;
export const UV_EEXIST = codeMap.get("EEXIST");
export const UV_EINVAL = codeMap.get("EINVAL")!;
export const UV_ENOENT = codeMap.get("ENOENT");
export const UV_ENOTSOCK = codeMap.get("ENOTSOCK")!;
+export const UV_ETIMEDOUT = codeMap.get("ETIMEDOUT")!;
export const UV_UNKNOWN = codeMap.get("UNKNOWN")!;
export function errname(errno: number): string {
diff --git a/ext/node/polyfills/net.ts b/ext/node/polyfills/net.ts
index 48e1d0de8..2b0112519 100644
--- a/ext/node/polyfills/net.ts
+++ b/ext/node/polyfills/net.ts
@@ -31,6 +31,7 @@ import {
isIP,
isIPv4,
isIPv6,
+ kReinitializeHandle,
normalizedArgsSymbol,
} from "ext:deno_node/internal/net.ts";
import { Duplex } from "node:stream";
@@ -50,9 +51,11 @@ import {
ERR_SERVER_ALREADY_LISTEN,
ERR_SERVER_NOT_RUNNING,
ERR_SOCKET_CLOSED,
+ ERR_SOCKET_CONNECTION_TIMEOUT,
errnoException,
exceptionWithHostPort,
genericNodeError,
+ NodeAggregateError,
uvExceptionWithHostPort,
} from "ext:deno_node/internal/errors.ts";
import type { ErrnoException } from "ext:deno_node/internal/errors.ts";
@@ -80,6 +83,7 @@ import { Buffer } from "node:buffer";
import type { LookupOneOptions } from "ext:deno_node/internal/dns/utils.ts";
import {
validateAbortSignal,
+ validateBoolean,
validateFunction,
validateInt32,
validateNumber,
@@ -100,13 +104,25 @@ import { ShutdownWrap } from "ext:deno_node/internal_binding/stream_wrap.ts";
import { assert } from "ext:deno_node/_util/asserts.ts";
import { isWindows } from "ext:deno_node/_util/os.ts";
import { ADDRCONFIG, lookup as dnsLookup } from "node:dns";
-import { codeMap } from "ext:deno_node/internal_binding/uv.ts";
+import {
+ codeMap,
+ UV_ECANCELED,
+ UV_ETIMEDOUT,
+} from "ext:deno_node/internal_binding/uv.ts";
import { guessHandleType } from "ext:deno_node/internal_binding/util.ts";
import { debuglog } from "ext:deno_node/internal/util/debuglog.ts";
import type { DuplexOptions } from "ext:deno_node/_stream.d.ts";
import type { BufferEncoding } from "ext:deno_node/_global.d.ts";
import type { Abortable } from "ext:deno_node/_events.d.ts";
import { channel } from "node:diagnostics_channel";
+import { primordials } from "ext:core/mod.js";
+
+const {
+ ArrayPrototypeIncludes,
+ ArrayPrototypePush,
+ FunctionPrototypeBind,
+ MathMax,
+} = primordials;
let debug = debuglog("net", (fn) => {
debug = fn;
@@ -120,6 +136,9 @@ const kBytesWritten = Symbol("kBytesWritten");
const DEFAULT_IPV4_ADDR = "0.0.0.0";
const DEFAULT_IPV6_ADDR = "::";
+let autoSelectFamilyDefault = true;
+let autoSelectFamilyAttemptTimeoutDefault = 250;
+
type Handle = TCP | Pipe;
interface HandleOptions {
@@ -214,6 +233,8 @@ interface TcpSocketConnectOptions extends ConnectOptions {
hints?: number;
family?: number;
lookup?: LookupFunction;
+ autoSelectFamily?: boolean | undefined;
+ autoSelectFamilyAttemptTimeout?: number | undefined;
}
interface IpcSocketConnectOptions extends ConnectOptions {
@@ -316,12 +337,6 @@ export function _normalizeArgs(args: unknown[]): NormalizedArgs {
return arr;
}
-function _isTCPConnectWrap(
- req: TCPConnectWrap | PipeConnectWrap,
-): req is TCPConnectWrap {
- return "localAddress" in req && "localPort" in req;
-}
-
function _afterConnect(
status: number,
// deno-lint-ignore no-explicit-any
@@ -372,7 +387,7 @@ function _afterConnect(
socket.connecting = false;
let details;
- if (_isTCPConnectWrap(req)) {
+ if (req.localAddress && req.localPort) {
details = req.localAddress + ":" + req.localPort;
}
@@ -384,7 +399,7 @@ function _afterConnect(
details,
);
- if (_isTCPConnectWrap(req)) {
+ if (details) {
ex.localAddress = req.localAddress;
ex.localPort = req.localPort;
}
@@ -393,6 +408,107 @@ function _afterConnect(
}
}
+function _createConnectionError(req, status) {
+ let details;
+
+ if (req.localAddress && req.localPort) {
+ details = req.localAddress + ":" + req.localPort;
+ }
+
+ const ex = exceptionWithHostPort(
+ status,
+ "connect",
+ req.address,
+ req.port,
+ details,
+ );
+ if (details) {
+ ex.localAddress = req.localAddress;
+ ex.localPort = req.localPort;
+ }
+
+ return ex;
+}
+
+function _afterConnectMultiple(
+ context,
+ current,
+ status,
+ handle,
+ req,
+ readable,
+ writable,
+) {
+ debug(
+ "connect/multiple: connection attempt to %s:%s completed with status %s",
+ req.address,
+ req.port,
+ status,
+ );
+
+ // Make sure another connection is not spawned
+ clearTimeout(context[kTimeout]);
+
+ // One of the connection has completed and correctly dispatched but after timeout, ignore this one
+ if (status === 0 && current !== context.current - 1) {
+ debug(
+ "connect/multiple: ignoring successful but timedout connection to %s:%s",
+ req.address,
+ req.port,
+ );
+ handle.close();
+ return;
+ }
+
+ const self = context.socket;
+
+ // Some error occurred, add to the list of exceptions
+ if (status !== 0) {
+ const ex = _createConnectionError(req, status);
+ ArrayPrototypePush(context.errors, ex);
+
+ self.emit(
+ "connectionAttemptFailed",
+ req.address,
+ req.port,
+ req.addressType,
+ ex,
+ );
+
+ // Try the next address, unless we were aborted
+ if (context.socket.connecting) {
+ _internalConnectMultiple(context, status === UV_ECANCELED);
+ }
+
+ return;
+ }
+
+ _afterConnect(status, self._handle, req, readable, writable);
+}
+
+function _internalConnectMultipleTimeout(context, req, handle) {
+ debug(
+ "connect/multiple: connection to %s:%s timed out",
+ req.address,
+ req.port,
+ );
+ context.socket.emit(
+ "connectionAttemptTimeout",
+ req.address,
+ req.port,
+ req.addressType,
+ );
+
+ req.oncomplete = undefined;
+ ArrayPrototypePush(context.errors, _createConnectionError(req, UV_ETIMEDOUT));
+ handle.close();
+
+ // Try the next address, unless we were aborted
+ if (context.socket.connecting) {
+ _internalConnectMultiple(context);
+ }
+}
+
function _checkBindError(err: number, port: number, handle: TCP) {
// EADDRINUSE may not be reported until we call `listen()` or `connect()`.
// To complicate matters, a failed `bind()` followed by `listen()` or `connect()`
@@ -495,6 +611,131 @@ function _internalConnect(
}
}
+function _internalConnectMultiple(context, canceled?: boolean) {
+ clearTimeout(context[kTimeout]);
+ const self = context.socket;
+
+ // We were requested to abort. Stop all operations
+ if (self._aborted) {
+ return;
+ }
+
+ // All connections have been tried without success, destroy with error
+ if (canceled || context.current === context.addresses.length) {
+ if (context.errors.length === 0) {
+ self.destroy(new ERR_SOCKET_CONNECTION_TIMEOUT());
+ return;
+ }
+
+ self.destroy(new NodeAggregateError(context.errors));
+ return;
+ }
+
+ assert(self.connecting);
+
+ const current = context.current++;
+
+ if (current > 0) {
+ self[kReinitializeHandle](new TCP(TCPConstants.SOCKET));
+ }
+
+ const { localPort, port, flags } = context;
+ const { address, family: addressType } = context.addresses[current];
+ let localAddress;
+ let err;
+
+ if (localPort) {
+ if (addressType === 4) {
+ localAddress = DEFAULT_IPV4_ADDR;
+ err = self._handle.bind(localAddress, localPort);
+ } else { // addressType === 6
+ localAddress = DEFAULT_IPV6_ADDR;
+ err = self._handle.bind6(localAddress, localPort, flags);
+ }
+
+ debug(
+ "connect/multiple: binding to localAddress: %s and localPort: %d (addressType: %d)",
+ localAddress,
+ localPort,
+ addressType,
+ );
+
+ err = _checkBindError(err, localPort, self._handle);
+ if (err) {
+ ArrayPrototypePush(
+ context.errors,
+ exceptionWithHostPort(err, "bind", localAddress, localPort),
+ );
+ _internalConnectMultiple(context);
+ return;
+ }
+ }
+
+ debug(
+ "connect/multiple: attempting to connect to %s:%d (addressType: %d)",
+ address,
+ port,
+ addressType,
+ );
+ self.emit("connectionAttempt", address, port, addressType);
+
+ const req = new TCPConnectWrap();
+ req.oncomplete = FunctionPrototypeBind(
+ _afterConnectMultiple,
+ undefined,
+ context,
+ current,
+ );
+ req.address = address;
+ req.port = port;
+ req.localAddress = localAddress;
+ req.localPort = localPort;
+ req.addressType = addressType;
+
+ ArrayPrototypePush(
+ self.autoSelectFamilyAttemptedAddresses,
+ `${address}:${port}`,
+ );
+
+ if (addressType === 4) {
+ err = self._handle.connect(req, address, port);
+ } else {
+ err = self._handle.connect6(req, address, port);
+ }
+
+ if (err) {
+ const sockname = self._getsockname();
+ let details;
+
+ if (sockname) {
+ details = sockname.address + ":" + sockname.port;
+ }
+
+ const ex = exceptionWithHostPort(err, "connect", address, port, details);
+ ArrayPrototypePush(context.errors, ex);
+
+ self.emit("connectionAttemptFailed", address, port, addressType, ex);
+ _internalConnectMultiple(context);
+ return;
+ }
+
+ if (current < context.addresses.length - 1) {
+ debug(
+ "connect/multiple: setting the attempt timeout to %d ms",
+ context.timeout,
+ );
+
+ // If the attempt has not returned an error, start the connection timer
+ context[kTimeout] = setTimeout(
+ _internalConnectMultipleTimeout,
+ context.timeout,
+ context,
+ req,
+ self._handle,
+ );
+ }
+}
+
// Provide a better error message when we call end() as a result
// of the other side sending a FIN. The standard "write after end"
// is overly vague, and makes it seem like the user's code is to blame.
@@ -597,7 +838,7 @@ function _lookupAndConnect(
) {
const { localAddress, localPort } = options;
const host = options.host || "localhost";
- let { port } = options;
+ let { port, autoSelectFamilyAttemptTimeout, autoSelectFamily } = options;
if (localAddress && !isIP(localAddress)) {
throw new ERR_INVALID_IP_ADDRESS(localAddress);
@@ -621,6 +862,22 @@ function _lookupAndConnect(
port |= 0;
+ if (autoSelectFamily != null) {
+ validateBoolean(autoSelectFamily, "options.autoSelectFamily");
+ } else {
+ autoSelectFamily = autoSelectFamilyDefault;
+ }
+
+ if (autoSelectFamilyAttemptTimeout !== undefined) {
+ validateInt32(autoSelectFamilyAttemptTimeout);
+
+ if (autoSelectFamilyAttemptTimeout < 10) {
+ autoSelectFamilyAttemptTimeout = 10;
+ }
+ } else {
+ autoSelectFamilyAttemptTimeout = autoSelectFamilyAttemptTimeoutDefault;
+ }
+
// If host is an IP, skip performing a lookup
const addressType = isIP(host);
if (addressType) {
@@ -649,6 +906,7 @@ function _lookupAndConnect(
const dnsOpts = {
family: options.family,
hints: options.hints || 0,
+ all: false,
};
if (
@@ -665,6 +923,31 @@ function _lookupAndConnect(
self._host = host;
const lookup = options.lookup || dnsLookup;
+ if (
+ dnsOpts.family !== 4 && dnsOpts.family !== 6 && !localAddress &&
+ autoSelectFamily
+ ) {
+ debug("connect: autodetecting");
+
+ dnsOpts.all = true;
+ defaultTriggerAsyncIdScope(self[asyncIdSymbol], function () {
+ _lookupAndConnectMultiple(
+ self,
+ asyncIdSymbol,
+ lookup,
+ host,
+ options,
+ dnsOpts,
+ port,
+ localAddress,
+ localPort,
+ autoSelectFamilyAttemptTimeout,
+ );
+ });
+
+ return;
+ }
+
defaultTriggerAsyncIdScope(self[asyncIdSymbol], function () {
lookup(
host,
@@ -719,6 +1002,143 @@ function _lookupAndConnect(
});
}
+function _lookupAndConnectMultiple(
+ self: Socket,
+ asyncIdSymbol: number,
+ // deno-lint-ignore no-explicit-any
+ lookup: any,
+ host: string,
+ options: TcpSocketConnectOptions,
+ dnsopts,
+ port: number,
+ localAddress: string,
+ localPort: number,
+ timeout: number | undefined,
+) {
+ defaultTriggerAsyncIdScope(self[asyncIdSymbol], function emitLookup() {
+ lookup(host, dnsopts, function emitLookup(err, addresses) {
+ // It's possible we were destroyed while looking this up.
+ // XXX it would be great if we could cancel the promise returned by
+ // the look up.
+ if (!self.connecting) {
+ return;
+ } else if (err) {
+ self.emit("lookup", err, undefined, undefined, host);
+
+ // net.createConnection() creates a net.Socket object and immediately
+ // calls net.Socket.connect() on it (that's us). There are no event
+ // listeners registered yet so defer the error event to the next tick.
+ nextTick(_connectErrorNT, self, err);
+ return;
+ }
+
+ // Filter addresses by only keeping the one which are either IPv4 or IPV6.
+ // The first valid address determines which group has preference on the
+ // alternate family sorting which happens later.
+ const validAddresses = [[], []];
+ const validIps = [[], []];
+ let destinations;
+ for (let i = 0, l = addresses.length; i < l; i++) {
+ const address = addresses[i];
+ const { address: ip, family: addressType } = address;
+ self.emit("lookup", err, ip, addressType, host);
+ // It's possible we were destroyed while looking this up.
+ if (!self.connecting) {
+ return;
+ }
+ if (isIP(ip) && (addressType === 4 || addressType === 6)) {
+ destinations ||= addressType === 6 ? { 6: 0, 4: 1 } : { 4: 0, 6: 1 };
+
+ const destination = destinations[addressType];
+
+ // Only try an address once
+ if (!ArrayPrototypeIncludes(validIps[destination], ip)) {
+ ArrayPrototypePush(validAddresses[destination], address);
+ ArrayPrototypePush(validIps[destination], ip);
+ }
+ }
+ }
+
+ // When no AAAA or A records are available, fail on the first one
+ if (!validAddresses[0].length && !validAddresses[1].length) {
+ const { address: firstIp, family: firstAddressType } = addresses[0];
+
+ if (!isIP(firstIp)) {
+ err = new ERR_INVALID_IP_ADDRESS(firstIp);
+ nextTick(_connectErrorNT, self, err);
+ } else if (firstAddressType !== 4 && firstAddressType !== 6) {
+ err = new ERR_INVALID_ADDRESS_FAMILY(
+ firstAddressType,
+ options.host,
+ options.port,
+ );
+ nextTick(_connectErrorNT, self, err);
+ }
+
+ return;
+ }
+
+ // Sort addresses alternating families
+ const toAttempt = [];
+ for (
+ let i = 0,
+ l = MathMax(validAddresses[0].length, validAddresses[1].length);
+ i < l;
+ i++
+ ) {
+ if (i in validAddresses[0]) {
+ ArrayPrototypePush(toAttempt, validAddresses[0][i]);
+ }
+ if (i in validAddresses[1]) {
+ ArrayPrototypePush(toAttempt, validAddresses[1][i]);
+ }
+ }
+
+ if (toAttempt.length === 1) {
+ debug(
+ "connect/multiple: only one address found, switching back to single connection",
+ );
+ const { address: ip, family: addressType } = toAttempt[0];
+
+ self._unrefTimer();
+ defaultTriggerAsyncIdScope(
+ self[asyncIdSymbol],
+ _internalConnect,
+ self,
+ ip,
+ port,
+ addressType,
+ localAddress,
+ localPort,
+ );
+
+ return;
+ }
+
+ self.autoSelectFamilyAttemptedAddresses = [];
+ debug("connect/multiple: will try the following addresses", toAttempt);
+
+ const context = {
+ socket: self,
+ addresses: toAttempt,
+ current: 0,
+ port,
+ localPort,
+ timeout,
+ [kTimeout]: null,
+ errors: [],
+ };
+
+ self._unrefTimer();
+ defaultTriggerAsyncIdScope(
+ self[asyncIdSymbol],
+ _internalConnectMultiple,
+ context,
+ );
+ });
+ });
+}
+
function _afterShutdown(this: ShutdownWrap<TCP>) {
// deno-lint-ignore no-explicit-any
const self: any = this.handle[ownerSymbol];
@@ -777,6 +1197,7 @@ export class Socket extends Duplex {
_host: string | null = null;
// deno-lint-ignore no-explicit-any
_parent: any = null;
+ autoSelectFamilyAttemptedAddresses: AddressInfo[] | undefined = undefined;
constructor(options: SocketOptions | number) {
if (typeof options === "number") {
@@ -1546,6 +1967,16 @@ export class Socket extends Duplex {
set _handle(v: Handle | null) {
this[kHandle] = v;
}
+
+ // deno-lint-ignore no-explicit-any
+ [kReinitializeHandle](handle: any) {
+ this._handle?.close();
+
+ this._handle = handle;
+ this._handle[ownerSymbol] = this;
+
+ _initSocketHandle(this);
+ }
}
export const Stream = Socket;
@@ -1593,6 +2024,33 @@ export function connect(...args: unknown[]) {
export const createConnection = connect;
+/** https://docs.deno.com/api/node/net/#namespace_getdefaultautoselectfamily */
+export function getDefaultAutoSelectFamily() {
+ return autoSelectFamilyDefault;
+}
+
+/** https://docs.deno.com/api/node/net/#namespace_setdefaultautoselectfamily */
+export function setDefaultAutoSelectFamily(value: boolean) {
+ validateBoolean(value, "value");
+ autoSelectFamilyDefault = value;
+}
+
+/** https://docs.deno.com/api/node/net/#namespace_getdefaultautoselectfamilyattempttimeout */
+export function getDefaultAutoSelectFamilyAttemptTimeout() {
+ return autoSelectFamilyAttemptTimeoutDefault;
+}
+
+/** https://docs.deno.com/api/node/net/#namespace_setdefaultautoselectfamilyattempttimeout */
+export function setDefaultAutoSelectFamilyAttemptTimeout(value: number) {
+ validateInt32(value, "value", 1);
+
+ if (value < 10) {
+ value = 10;
+ }
+
+ autoSelectFamilyAttemptTimeoutDefault = value;
+}
+
export interface ListenOptions extends Abortable {
fd?: number;
port?: number | undefined;
@@ -2478,15 +2936,19 @@ export { BlockList, isIP, isIPv4, isIPv6, SocketAddress };
export default {
_createServerHandle,
_normalizeArgs,
- isIP,
- isIPv4,
- isIPv6,
BlockList,
- SocketAddress,
connect,
createConnection,
createServer,
+ getDefaultAutoSelectFamily,
+ getDefaultAutoSelectFamilyAttemptTimeout,
+ isIP,
+ isIPv4,
+ isIPv6,
Server,
+ setDefaultAutoSelectFamily,
+ setDefaultAutoSelectFamilyAttemptTimeout,
Socket,
+ SocketAddress,
Stream,
};
diff --git a/ext/node/polyfills/os.ts b/ext/node/polyfills/os.ts
index e47e8679e..edc89ed2c 100644
--- a/ext/node/polyfills/os.ts
+++ b/ext/node/polyfills/os.ts
@@ -28,16 +28,17 @@ import {
op_homedir,
op_node_os_get_priority,
op_node_os_set_priority,
- op_node_os_username,
+ op_node_os_user_info,
} from "ext:core/ops";
import { validateIntegerRange } from "ext:deno_node/_utils.ts";
import process from "node:process";
import { isWindows } from "ext:deno_node/_util/os.ts";
-import { ERR_OS_NO_HOMEDIR } from "ext:deno_node/internal/errors.ts";
import { os } from "ext:deno_node/internal_binding/constants.ts";
import { osUptime } from "ext:runtime/30_os.js";
import { Buffer } from "ext:deno_node/internal/buffer.mjs";
+import { primordials } from "ext:core/mod.js";
+const { StringPrototypeEndsWith, StringPrototypeSlice } = primordials;
export const constants = os;
@@ -136,6 +137,8 @@ export function arch(): string {
(uptime as any)[Symbol.toPrimitive] = (): number => uptime();
// deno-lint-ignore no-explicit-any
(machine as any)[Symbol.toPrimitive] = (): string => machine();
+// deno-lint-ignore no-explicit-any
+(tmpdir as any)[Symbol.toPrimitive] = (): string | null => tmpdir();
export function cpus(): CPUCoreInfo[] {
return op_cpus();
@@ -268,26 +271,27 @@ export function setPriority(pid: number, priority?: number) {
export function tmpdir(): string | null {
/* This follows the node js implementation, but has a few
differences:
- * On windows, if none of the environment variables are defined,
- we return null.
- * On unix we use a plain Deno.env.get, instead of safeGetenv,
+ * We use a plain Deno.env.get, instead of safeGetenv,
which special cases setuid binaries.
- * Node removes a single trailing / or \, we remove all.
*/
if (isWindows) {
- const temp = Deno.env.get("TEMP") || Deno.env.get("TMP");
- if (temp) {
- return temp.replace(/(?<!:)[/\\]*$/, "");
- }
- const base = Deno.env.get("SYSTEMROOT") || Deno.env.get("WINDIR");
- if (base) {
- return base + "\\temp";
+ let temp = Deno.env.get("TEMP") || Deno.env.get("TMP") ||
+ (Deno.env.get("SystemRoot") || Deno.env.get("windir")) + "\\temp";
+ if (
+ temp.length > 1 && StringPrototypeEndsWith(temp, "\\") &&
+ !StringPrototypeEndsWith(temp, ":\\")
+ ) {
+ temp = StringPrototypeSlice(temp, 0, -1);
}
- return null;
+
+ return temp;
} else { // !isWindows
- const temp = Deno.env.get("TMPDIR") || Deno.env.get("TMP") ||
+ let temp = Deno.env.get("TMPDIR") || Deno.env.get("TMP") ||
Deno.env.get("TEMP") || "/tmp";
- return temp.replace(/(?<!^)\/*$/, "");
+ if (temp.length > 1 && StringPrototypeEndsWith(temp, "/")) {
+ temp = StringPrototypeSlice(temp, 0, -1);
+ }
+ return temp;
}
}
@@ -320,7 +324,6 @@ export function uptime(): number {
return osUptime();
}
-/** Not yet implemented */
export function userInfo(
options: UserInfoOptions = { encoding: "utf-8" },
): UserInfo {
@@ -331,20 +334,10 @@ export function userInfo(
uid = -1;
gid = -1;
}
-
- // TODO(@crowlKats): figure out how to do this correctly:
- // The value of homedir returned by os.userInfo() is provided by the operating system.
- // This differs from the result of os.homedir(), which queries environment
- // variables for the home directory before falling back to the operating system response.
- let _homedir = homedir();
- if (!_homedir) {
- throw new ERR_OS_NO_HOMEDIR();
- }
- let shell = isWindows ? null : (Deno.env.get("SHELL") || null);
- let username = op_node_os_username();
+ let { username, homedir, shell } = op_node_os_user_info(uid);
if (options?.encoding === "buffer") {
- _homedir = _homedir ? Buffer.from(_homedir) : _homedir;
+ homedir = homedir ? Buffer.from(homedir) : homedir;
shell = shell ? Buffer.from(shell) : shell;
username = Buffer.from(username);
}
@@ -352,7 +345,7 @@ export function userInfo(
return {
uid,
gid,
- homedir: _homedir,
+ homedir,
shell,
username,
};
diff --git a/ext/node/polyfills/perf_hooks.ts b/ext/node/polyfills/perf_hooks.ts
index d92b925b5..ec76b3ce2 100644
--- a/ext/node/polyfills/perf_hooks.ts
+++ b/ext/node/polyfills/perf_hooks.ts
@@ -8,6 +8,7 @@ import {
performance as shimPerformance,
PerformanceEntry,
} from "ext:deno_web/15_performance.js";
+import { EldHistogram } from "ext:core/ops";
class PerformanceObserver {
static supportedEntryTypes: string[] = [];
@@ -89,10 +90,11 @@ const performance:
) => shimPerformance.dispatchEvent(...args),
};
-const monitorEventLoopDelay = () =>
- notImplemented(
- "monitorEventLoopDelay from performance",
- );
+function monitorEventLoopDelay(options = {}) {
+ const { resolution = 10 } = options;
+
+ return new EldHistogram(resolution);
+}
export default {
performance,
diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts
index 3dc6ce61a..647376d5c 100644
--- a/ext/node/polyfills/process.ts
+++ b/ext/node/polyfills/process.ts
@@ -15,7 +15,7 @@ import {
import { warnNotImplemented } from "ext:deno_node/_utils.ts";
import { EventEmitter } from "node:events";
-import Module from "node:module";
+import Module, { getBuiltinModule } from "node:module";
import { report } from "ext:deno_node/internal/process/report.ts";
import { validateString } from "ext:deno_node/internal/validators.mjs";
import {
@@ -38,7 +38,15 @@ import {
versions,
} from "ext:deno_node/_process/process.ts";
import { _exiting } from "ext:deno_node/_process/exiting.ts";
-export { _nextTick as nextTick, chdir, cwd, env, version, versions };
+export {
+ _nextTick as nextTick,
+ chdir,
+ cwd,
+ env,
+ getBuiltinModule,
+ version,
+ versions,
+};
import {
createWritableStdioStream,
initStdin,
@@ -520,9 +528,7 @@ Process.prototype.on = function (
} else if (
event !== "SIGBREAK" && event !== "SIGINT" && Deno.build.os === "windows"
) {
- // Ignores all signals except SIGBREAK and SIGINT on windows.
- // deno-lint-ignore no-console
- console.warn(`Ignoring signal "${event}" on Windows`);
+ // TODO(#26331): Ignores all signals except SIGBREAK and SIGINT on windows.
} else {
EventEmitter.prototype.on.call(this, event, listener);
Deno.addSignalListener(event as Deno.Signal, listener);
@@ -730,6 +736,8 @@ Process.prototype.getegid = getegid;
/** This method is removed on Windows */
Process.prototype.geteuid = geteuid;
+Process.prototype.getBuiltinModule = getBuiltinModule;
+
// TODO(kt3k): Implement this when we added -e option to node compat mode
Process.prototype._eval = undefined;
@@ -911,7 +919,7 @@ Object.defineProperty(argv, "1", {
if (Deno.mainModule?.startsWith("file:")) {
return pathFromURL(new URL(Deno.mainModule));
} else {
- return join(Deno.cwd(), "$deno$node.js");
+ return join(Deno.cwd(), "$deno$node.mjs");
}
},
});
diff --git a/ext/node/polyfills/timers.ts b/ext/node/polyfills/timers.ts
index 02f69466e..e826416ed 100644
--- a/ext/node/polyfills/timers.ts
+++ b/ext/node/polyfills/timers.ts
@@ -15,10 +15,16 @@ import {
setUnrefTimeout,
Timeout,
} from "ext:deno_node/internal/timers.mjs";
-import { validateFunction } from "ext:deno_node/internal/validators.mjs";
+import {
+ validateAbortSignal,
+ validateBoolean,
+ validateFunction,
+ validateObject,
+} from "ext:deno_node/internal/validators.mjs";
import { promisify } from "ext:deno_node/internal/util.mjs";
export { setUnrefTimeout } from "ext:deno_node/internal/timers.mjs";
import * as timers from "ext:deno_web/02_timers.js";
+import { AbortError } from "ext:deno_node/internal/errors.ts";
const clearTimeout_ = timers.clearTimeout;
const clearInterval_ = timers.clearInterval;
@@ -89,10 +95,88 @@ export function clearImmediate(immediate: Immediate) {
clearTimeout_(immediate._immediateId);
}
+async function* setIntervalAsync(
+ after: number,
+ value: number,
+ options: { signal?: AbortSignal; ref?: boolean } = { __proto__: null },
+) {
+ validateObject(options, "options");
+
+ if (typeof options?.signal !== "undefined") {
+ validateAbortSignal(options.signal, "options.signal");
+ }
+
+ if (typeof options?.ref !== "undefined") {
+ validateBoolean(options.ref, "options.ref");
+ }
+
+ const { signal, ref = true } = options;
+
+ if (signal?.aborted) {
+ throw new AbortError(undefined, { cause: signal?.reason });
+ }
+
+ let onCancel: (() => void) | undefined = undefined;
+ let interval: Timeout | undefined = undefined;
+ try {
+ let notYielded = 0;
+ let callback: ((value?: object) => void) | undefined = undefined;
+ let rejectCallback: ((message?: string) => void) | undefined = undefined;
+ interval = new Timeout(
+ () => {
+ notYielded++;
+ if (callback) {
+ callback();
+ callback = undefined;
+ rejectCallback = undefined;
+ }
+ },
+ after,
+ [],
+ true,
+ ref,
+ );
+ if (signal) {
+ onCancel = () => {
+ clearInterval(interval);
+ if (rejectCallback) {
+ rejectCallback(signal.reason);
+ callback = undefined;
+ rejectCallback = undefined;
+ }
+ };
+ signal.addEventListener("abort", onCancel, { once: true });
+ }
+ while (!signal?.aborted) {
+ if (notYielded === 0) {
+ await new Promise((resolve: () => void, reject: () => void) => {
+ callback = resolve;
+ rejectCallback = reject;
+ });
+ }
+ for (; notYielded > 0; notYielded--) {
+ yield value;
+ }
+ }
+ } catch (error) {
+ if (signal?.aborted) {
+ throw new AbortError(undefined, { cause: signal?.reason });
+ }
+ throw error;
+ } finally {
+ if (interval) {
+ clearInterval(interval);
+ }
+ if (onCancel) {
+ signal?.removeEventListener("abort", onCancel);
+ }
+ }
+}
+
export const promises = {
setTimeout: promisify(setTimeout),
setImmediate: promisify(setImmediate),
- setInterval: promisify(setInterval),
+ setInterval: setIntervalAsync,
};
promises.scheduler = {
diff --git a/ext/node/polyfills/vm.js b/ext/node/polyfills/vm.js
index 183ddad2f..b64c847c5 100644
--- a/ext/node/polyfills/vm.js
+++ b/ext/node/polyfills/vm.js
@@ -182,6 +182,7 @@ function getContextOptions(options) {
let defaultContextNameIndex = 1;
export function createContext(
+ // deno-lint-ignore prefer-primordials
contextObject = {},
options = { __proto__: null },
) {
diff --git a/ext/node/polyfills/zlib.ts b/ext/node/polyfills/zlib.ts
index 3fe5f8bbd..6e5d02b5b 100644
--- a/ext/node/polyfills/zlib.ts
+++ b/ext/node/polyfills/zlib.ts
@@ -40,6 +40,58 @@ import {
createBrotliCompress,
createBrotliDecompress,
} from "ext:deno_node/_brotli.js";
+import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts";
+import { validateUint32 } from "ext:deno_node/internal/validators.mjs";
+import { op_zlib_crc32 } from "ext:core/ops";
+import { core, primordials } from "ext:core/mod.js";
+import { TextEncoder } from "ext:deno_web/08_text_encoding.js";
+const {
+ Uint8Array,
+ TypedArrayPrototypeGetBuffer,
+ TypedArrayPrototypeGetByteLength,
+ TypedArrayPrototypeGetByteOffset,
+ DataViewPrototypeGetBuffer,
+ DataViewPrototypeGetByteLength,
+ DataViewPrototypeGetByteOffset,
+} = primordials;
+const { isTypedArray, isDataView } = core;
+
+const enc = new TextEncoder();
+const toU8 = (input) => {
+ if (typeof input === "string") {
+ return enc.encode(input);
+ }
+
+ if (isTypedArray(input)) {
+ return new Uint8Array(
+ TypedArrayPrototypeGetBuffer(input),
+ TypedArrayPrototypeGetByteOffset(input),
+ TypedArrayPrototypeGetByteLength(input),
+ );
+ } else if (isDataView(input)) {
+ return new Uint8Array(
+ DataViewPrototypeGetBuffer(input),
+ DataViewPrototypeGetByteOffset(input),
+ DataViewPrototypeGetByteLength(input),
+ );
+ }
+
+ return input;
+};
+
+export function crc32(data, value = 0) {
+ if (typeof data !== "string" && !isArrayBufferView(data)) {
+ throw new ERR_INVALID_ARG_TYPE("data", [
+ "Buffer",
+ "TypedArray",
+ "DataView",
+ "string",
+ ], data);
+ }
+ validateUint32(value, "value");
+
+ return op_zlib_crc32(toU8(data), value);
+}
export class Options {
constructor() {
@@ -87,6 +139,7 @@ export default {
BrotliOptions,
codes,
constants,
+ crc32,
createBrotliCompress,
createBrotliDecompress,
createDeflate,