summaryrefslogtreecommitdiff
path: root/ext
diff options
context:
space:
mode:
authorLuca Casonato <hello@lcas.dev>2024-06-24 11:47:12 +0200
committerGitHub <noreply@github.com>2024-06-24 11:47:12 +0200
commit1e8a6b94b1dcf98a2ae4de97b3e98e7b3e4e8f7f (patch)
tree89ae2bc343dea6bf17ca9d512ea80b51540347ca /ext
parentff535061077d2b67e20154a7dfefe8ca92502c5a (diff)
fix(ext/node): rewrite crypto.Hash (#24302)
Changes in this PR: - Added new fixed size hash algorithms (blake2b512, blake2s256, sha512-224, sha512-256, sha3-224, sha3-256, sha3-384, sha3-512, sm3) - Added variable size hash algorithms (the concept), with the algorithms shake128 and shake256 - Use cppgc instead of resources for the hasher - Enable Node's crypto.Hash tests and fix found bugs
Diffstat (limited to 'ext')
-rw-r--r--ext/node/Cargo.toml3
-rw-r--r--ext/node/ops/crypto/digest.rs290
-rw-r--r--ext/node/ops/crypto/mod.rs73
-rw-r--r--ext/node/polyfills/internal/crypto/hash.ts232
4 files changed, 394 insertions, 204 deletions
diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml
index 2a6fd758d..8b5895bc7 100644
--- a/ext/node/Cargo.toml
+++ b/ext/node/Cargo.toml
@@ -17,6 +17,7 @@ path = "lib.rs"
aead-gcm-stream = "0.1"
aes.workspace = true
async-trait.workspace = true
+blake2 = "0.10.6"
brotli.workspace = true
bytes.workspace = true
cbc.workspace = true
@@ -70,8 +71,10 @@ sec1 = "0.7"
serde = "1.0.149"
sha1.workspace = true
sha2.workspace = true
+sha3 = "0.10.8"
signature.workspace = true
simd-json = "0.13.4"
+sm3 = "0.4.2"
spki.workspace = true
tokio.workspace = true
url.workspace = true
diff --git a/ext/node/ops/crypto/digest.rs b/ext/node/ops/crypto/digest.rs
index 25bb0ab98..588ea7425 100644
--- a/ext/node/ops/crypto/digest.rs
+++ b/ext/node/ops/crypto/digest.rs
@@ -1,107 +1,293 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
-use deno_core::error::type_error;
+use deno_core::error::generic_error;
use deno_core::error::AnyError;
-use deno_core::Resource;
+use deno_core::GcResource;
use digest::Digest;
use digest::DynDigest;
-use std::borrow::Cow;
+use digest::ExtendableOutput;
+use digest::Update;
use std::cell::RefCell;
use std::rc::Rc;
-pub enum Hash {
- Md4(Box<md4::Md4>),
- Md5(Box<md5::Md5>),
- Ripemd160(Box<ripemd::Ripemd160>),
- Sha1(Box<sha1::Sha1>),
- Sha224(Box<sha2::Sha224>),
- Sha256(Box<sha2::Sha256>),
- Sha384(Box<sha2::Sha384>),
- Sha512(Box<sha2::Sha512>),
+pub struct Hasher {
+ pub hash: Rc<RefCell<Option<Hash>>>,
}
-pub struct Context {
- pub hash: Rc<RefCell<Hash>>,
-}
+impl GcResource for Hasher {}
+
+impl Hasher {
+ pub fn new(
+ algorithm: &str,
+ output_length: Option<usize>,
+ ) -> Result<Self, AnyError> {
+ let hash = Hash::new(algorithm, output_length)?;
-impl Context {
- pub fn new(algorithm: &str) -> Result<Self, AnyError> {
Ok(Self {
- hash: Rc::new(RefCell::new(Hash::new(algorithm)?)),
+ hash: Rc::new(RefCell::new(Some(hash))),
})
}
- pub fn update(&self, data: &[u8]) {
- self.hash.borrow_mut().update(data);
+ pub fn update(&self, data: &[u8]) -> bool {
+ if let Some(hash) = self.hash.borrow_mut().as_mut() {
+ hash.update(data);
+ true
+ } else {
+ false
+ }
}
- pub fn digest(self) -> Result<Box<[u8]>, AnyError> {
- let hash = Rc::try_unwrap(self.hash)
- .map_err(|_| type_error("Hash context is already in use"))?;
-
- let hash = hash.into_inner();
- Ok(hash.digest_and_drop())
+ pub fn digest(&self) -> Option<Box<[u8]>> {
+ let hash = self.hash.borrow_mut().take()?;
+ Some(hash.digest_and_drop())
}
-}
-impl Clone for Context {
- fn clone(&self) -> Self {
- Self {
- hash: Rc::new(RefCell::new(self.hash.borrow().clone())),
- }
+ pub fn clone_inner(
+ &self,
+ output_length: Option<usize>,
+ ) -> Result<Option<Self>, AnyError> {
+ let hash = self.hash.borrow();
+ let Some(hash) = hash.as_ref() else {
+ return Ok(None);
+ };
+ let hash = hash.clone_hash(output_length)?;
+ Ok(Some(Self {
+ hash: Rc::new(RefCell::new(Some(hash))),
+ }))
}
}
-impl Resource for Context {
- fn name(&self) -> Cow<str> {
- "cryptoDigest".into()
- }
+pub enum Hash {
+ Blake2b512(Box<blake2::Blake2b512>),
+ Blake2s256(Box<blake2::Blake2s256>),
+
+ Md4(Box<md4::Md4>),
+ Md5(Box<md5::Md5>),
+
+ Ripemd160(Box<ripemd::Ripemd160>),
+
+ Sha1(Box<sha1::Sha1>),
+
+ Sha224(Box<sha2::Sha224>),
+ Sha256(Box<sha2::Sha256>),
+ Sha384(Box<sha2::Sha384>),
+ Sha512(Box<sha2::Sha512>),
+ Sha512_224(Box<sha2::Sha512_224>),
+ Sha512_256(Box<sha2::Sha512_256>),
+
+ Sha3_224(Box<sha3::Sha3_224>),
+ Sha3_256(Box<sha3::Sha3_256>),
+ Sha3_384(Box<sha3::Sha3_384>),
+ Sha3_512(Box<sha3::Sha3_512>),
+
+ Sm3(Box<sm3::Sm3>),
+
+ Shake128(Box<sha3::Shake128>, /* output_length: */ Option<usize>),
+ Shake256(Box<sha3::Shake256>, /* output_length: */ Option<usize>),
}
use Hash::*;
impl Hash {
- pub fn new(algorithm_name: &str) -> Result<Self, AnyError> {
- Ok(match algorithm_name {
+ pub fn new(
+ algorithm_name: &str,
+ output_length: Option<usize>,
+ ) -> Result<Self, AnyError> {
+ match algorithm_name {
+ "shake128" => return Ok(Shake128(Default::default(), output_length)),
+ "shake256" => return Ok(Shake256(Default::default(), output_length)),
+ _ => {}
+ }
+
+ let algorithm = match algorithm_name {
+ "blake2b512" => Blake2b512(Default::default()),
+ "blake2s256" => Blake2s256(Default::default()),
+
"md4" => Md4(Default::default()),
"md5" => Md5(Default::default()),
+
"ripemd160" => Ripemd160(Default::default()),
+
"sha1" => Sha1(Default::default()),
"sha224" => Sha224(Default::default()),
"sha256" => Sha256(Default::default()),
"sha384" => Sha384(Default::default()),
"sha512" => Sha512(Default::default()),
- _ => return Err(type_error("unsupported algorithm")),
- })
+ "sha512-224" => Sha512_224(Default::default()),
+ "sha512-256" => Sha512_256(Default::default()),
+
+ "sha3-224" => Sha3_224(Default::default()),
+ "sha3-256" => Sha3_256(Default::default()),
+ "sha3-384" => Sha3_384(Default::default()),
+ "sha3-512" => Sha3_512(Default::default()),
+
+ "sm3" => Sm3(Default::default()),
+
+ _ => {
+ return Err(generic_error(format!(
+ "Digest method not supported: {algorithm_name}"
+ )))
+ }
+ };
+ if let Some(length) = output_length {
+ if length != algorithm.output_length() {
+ return Err(generic_error(
+ "Output length mismatch for non-extendable algorithm",
+ ));
+ }
+ }
+ Ok(algorithm)
+ }
+
+ pub fn output_length(&self) -> usize {
+ match self {
+ Blake2b512(context) => context.output_size(),
+ Blake2s256(context) => context.output_size(),
+
+ Md4(context) => context.output_size(),
+ Md5(context) => context.output_size(),
+
+ Ripemd160(context) => context.output_size(),
+
+ Sha1(context) => context.output_size(),
+ Sha224(context) => context.output_size(),
+ Sha256(context) => context.output_size(),
+ Sha384(context) => context.output_size(),
+ Sha512(context) => context.output_size(),
+ Sha512_224(context) => context.output_size(),
+ Sha512_256(context) => context.output_size(),
+
+ Sha3_224(context) => context.output_size(),
+ Sha3_256(context) => context.output_size(),
+ Sha3_384(context) => context.output_size(),
+ Sha3_512(context) => context.output_size(),
+
+ Sm3(context) => context.output_size(),
+
+ Shake128(_, _) => unreachable!(
+ "output_length() should not be called on extendable algorithms"
+ ),
+ Shake256(_, _) => unreachable!(
+ "output_length() should not be called on extendable algorithms"
+ ),
+ }
}
pub fn update(&mut self, data: &[u8]) {
match self {
+ Blake2b512(context) => Digest::update(&mut **context, data),
+ Blake2s256(context) => Digest::update(&mut **context, data),
+
Md4(context) => Digest::update(&mut **context, data),
Md5(context) => Digest::update(&mut **context, data),
+
Ripemd160(context) => Digest::update(&mut **context, data),
+
Sha1(context) => Digest::update(&mut **context, data),
Sha224(context) => Digest::update(&mut **context, data),
Sha256(context) => Digest::update(&mut **context, data),
Sha384(context) => Digest::update(&mut **context, data),
Sha512(context) => Digest::update(&mut **context, data),
+ Sha512_224(context) => Digest::update(&mut **context, data),
+ Sha512_256(context) => Digest::update(&mut **context, data),
+
+ Sha3_224(context) => Digest::update(&mut **context, data),
+ Sha3_256(context) => Digest::update(&mut **context, data),
+ Sha3_384(context) => Digest::update(&mut **context, data),
+ Sha3_512(context) => Digest::update(&mut **context, data),
+
+ Sm3(context) => Digest::update(&mut **context, data),
+
+ Shake128(context, _) => Update::update(&mut **context, data),
+ Shake256(context, _) => Update::update(&mut **context, data),
};
}
pub fn digest_and_drop(self) -> Box<[u8]> {
match self {
+ Blake2b512(context) => context.finalize(),
+ Blake2s256(context) => context.finalize(),
+
Md4(context) => context.finalize(),
Md5(context) => context.finalize(),
+
Ripemd160(context) => context.finalize(),
+
Sha1(context) => context.finalize(),
Sha224(context) => context.finalize(),
Sha256(context) => context.finalize(),
Sha384(context) => context.finalize(),
Sha512(context) => context.finalize(),
+ Sha512_224(context) => context.finalize(),
+ Sha512_256(context) => context.finalize(),
+
+ Sha3_224(context) => context.finalize(),
+ Sha3_256(context) => context.finalize(),
+ Sha3_384(context) => context.finalize(),
+ Sha3_512(context) => context.finalize(),
+
+ Sm3(context) => context.finalize(),
+
+ // The default output lengths align with Node.js
+ Shake128(context, output_length) => {
+ context.finalize_boxed(output_length.unwrap_or(16))
+ }
+ Shake256(context, output_length) => {
+ context.finalize_boxed(output_length.unwrap_or(32))
+ }
+ }
+ }
+
+ pub fn clone_hash(
+ &self,
+ output_length: Option<usize>,
+ ) -> Result<Self, AnyError> {
+ let hash = match self {
+ Shake128(context, _) => {
+ return Ok(Shake128(context.clone(), output_length))
+ }
+ Shake256(context, _) => {
+ return Ok(Shake256(context.clone(), output_length))
+ }
+
+ Blake2b512(context) => Blake2b512(context.clone()),
+ Blake2s256(context) => Blake2s256(context.clone()),
+
+ Md4(context) => Md4(context.clone()),
+ Md5(context) => Md5(context.clone()),
+
+ Ripemd160(context) => Ripemd160(context.clone()),
+
+ Sha1(context) => Sha1(context.clone()),
+ Sha224(context) => Sha224(context.clone()),
+ Sha256(context) => Sha256(context.clone()),
+ Sha384(context) => Sha384(context.clone()),
+ Sha512(context) => Sha512(context.clone()),
+ Sha512_224(context) => Sha512_224(context.clone()),
+ Sha512_256(context) => Sha512_256(context.clone()),
+
+ Sha3_224(context) => Sha3_224(context.clone()),
+ Sha3_256(context) => Sha3_256(context.clone()),
+ Sha3_384(context) => Sha3_384(context.clone()),
+ Sha3_512(context) => Sha3_512(context.clone()),
+
+ Sm3(context) => Sm3(context.clone()),
+ };
+
+ if let Some(length) = output_length {
+ if length != hash.output_length() {
+ return Err(generic_error(
+ "Output length mismatch for non-extendable algorithm",
+ ));
+ }
}
+
+ Ok(hash)
}
pub fn get_hashes() -> Vec<&'static str> {
vec![
+ "blake2s256",
+ "blake2b512",
"md4",
"md5",
"ripemd160",
@@ -110,21 +296,15 @@ impl Hash {
"sha256",
"sha384",
"sha512",
+ "sha512-224",
+ "sha512-256",
+ "sha3-224",
+ "sha3-256",
+ "sha3-384",
+ "sha3-512",
+ "shake128",
+ "shake256",
+ "sm3",
]
}
}
-
-impl Clone for Hash {
- fn clone(&self) -> Self {
- match self {
- Md4(_) => Md4(Default::default()),
- Md5(_) => Md5(Default::default()),
- Ripemd160(_) => Ripemd160(Default::default()),
- Sha1(_) => Sha1(Default::default()),
- Sha224(_) => Sha224(Default::default()),
- Sha256(_) => Sha256(Default::default()),
- Sha384(_) => Sha384(Default::default()),
- Sha512(_) => Sha512(Default::default()),
- }
- }
-}
diff --git a/ext/node/ops/crypto/mod.rs b/ext/node/ops/crypto/mod.rs
index 666ce8409..8ded3420a 100644
--- a/ext/node/ops/crypto/mod.rs
+++ b/ext/node/ops/crypto/mod.rs
@@ -7,7 +7,6 @@ use deno_core::serde_v8::BigInt as V8BigInt;
use deno_core::unsync::spawn_blocking;
use deno_core::JsBuffer;
use deno_core::OpState;
-use deno_core::ResourceId;
use deno_core::StringOrBuffer;
use deno_core::ToJsBuffer;
use elliptic_curve::sec1::ToEncodedPoint;
@@ -96,18 +95,13 @@ pub fn op_node_check_prime_bytes_async(
})
}
-#[op2(fast)]
-#[smi]
+#[op2]
+#[cppgc]
pub fn op_node_create_hash(
- state: &mut OpState,
#[string] algorithm: &str,
-) -> u32 {
- state
- .resource_table
- .add(match digest::Context::new(algorithm) {
- Ok(context) => context,
- Err(_) => return 0,
- })
+ output_length: Option<u32>,
+) -> Result<digest::Hasher, AnyError> {
+ digest::Hasher::new(algorithm, output_length.map(|l| l as usize))
}
#[op2]
@@ -118,65 +112,44 @@ pub fn op_node_get_hashes() -> Vec<&'static str> {
#[op2(fast)]
pub fn op_node_hash_update(
- state: &mut OpState,
- #[smi] rid: u32,
+ #[cppgc] hasher: &digest::Hasher,
#[buffer] data: &[u8],
) -> bool {
- let context = match state.resource_table.get::<digest::Context>(rid) {
- Ok(context) => context,
- _ => return false,
- };
- context.update(data);
- true
+ hasher.update(data)
}
#[op2(fast)]
pub fn op_node_hash_update_str(
- state: &mut OpState,
- #[smi] rid: u32,
+ #[cppgc] hasher: &digest::Hasher,
#[string] data: &str,
) -> bool {
- let context = match state.resource_table.get::<digest::Context>(rid) {
- Ok(context) => context,
- _ => return false,
- };
- context.update(data.as_bytes());
- true
+ hasher.update(data.as_bytes())
}
#[op2]
-#[serde]
+#[buffer]
pub fn op_node_hash_digest(
- state: &mut OpState,
- #[smi] rid: ResourceId,
-) -> Result<ToJsBuffer, AnyError> {
- let context = state.resource_table.take::<digest::Context>(rid)?;
- let context = Rc::try_unwrap(context)
- .map_err(|_| type_error("Hash context is already in use"))?;
- Ok(context.digest()?.into())
+ #[cppgc] hasher: &digest::Hasher,
+) -> Option<Box<[u8]>> {
+ hasher.digest()
}
#[op2]
#[string]
pub fn op_node_hash_digest_hex(
- state: &mut OpState,
- #[smi] rid: ResourceId,
-) -> Result<String, AnyError> {
- let context = state.resource_table.take::<digest::Context>(rid)?;
- let context = Rc::try_unwrap(context)
- .map_err(|_| type_error("Hash context is already in use"))?;
- let digest = context.digest()?;
- Ok(faster_hex::hex_string(&digest))
+ #[cppgc] hasher: &digest::Hasher,
+) -> Option<String> {
+ let digest = hasher.digest()?;
+ Some(faster_hex::hex_string(&digest))
}
-#[op2(fast)]
-#[smi]
+#[op2]
+#[cppgc]
pub fn op_node_hash_clone(
- state: &mut OpState,
- #[smi] rid: ResourceId,
-) -> Result<ResourceId, AnyError> {
- let context = state.resource_table.get::<digest::Context>(rid)?;
- Ok(state.resource_table.add(context.as_ref().clone()))
+ #[cppgc] hasher: &digest::Hasher,
+ output_length: Option<u32>,
+) -> Result<Option<digest::Hasher>, AnyError> {
+ hasher.clone_inner(output_length.map(|l| l as usize))
}
#[op2]
diff --git a/ext/node/polyfills/internal/crypto/hash.ts b/ext/node/polyfills/internal/crypto/hash.ts
index a1d61f953..2e040be25 100644
--- a/ext/node/polyfills/internal/crypto/hash.ts
+++ b/ext/node/polyfills/internal/crypto/hash.ts
@@ -13,8 +13,8 @@ import {
op_node_hash_update,
op_node_hash_update_str,
} from "ext:core/ops";
+import { primordials } from "ext:core/mod.js";
-import { TextEncoder } from "ext:deno_web/08_text_encoding.js";
import { Buffer } from "node:buffer";
import { Transform } from "node:stream";
import {
@@ -22,7 +22,11 @@ import {
forgivingBase64UrlEncode as encodeToBase64Url,
} from "ext:deno_web/00_infra.js";
import type { TransformOptions } from "ext:deno_node/_stream.d.ts";
-import { validateString } from "ext:deno_node/internal/validators.mjs";
+import {
+ validateEncoding,
+ validateString,
+ validateUint32,
+} from "ext:deno_node/internal/validators.mjs";
import type {
BinaryToTextEncoding,
Encoding,
@@ -32,119 +36,148 @@ import {
KeyObject,
prepareSecretKey,
} from "ext:deno_node/internal/crypto/keys.ts";
+import {
+ ERR_CRYPTO_HASH_FINALIZED,
+ ERR_INVALID_ARG_TYPE,
+ NodeError,
+} from "ext:deno_node/internal/errors.ts";
+import LazyTransform from "ext:deno_node/internal/streams/lazy_transform.mjs";
+import {
+ getDefaultEncoding,
+ toBuf,
+} from "ext:deno_node/internal/crypto/util.ts";
+import { isArrayBufferView } from "ext:deno_node/internal/util/types.ts";
+
+const { ReflectApply, ObjectSetPrototypeOf } = primordials;
-// TODO(@littledivy): Use Result<T, E> instead of boolean when
-// https://bugs.chromium.org/p/v8/issues/detail?id=13600 is fixed.
function unwrapErr(ok: boolean) {
- if (!ok) {
- throw new Error("Context is not initialized");
- }
+ if (!ok) throw new ERR_CRYPTO_HASH_FINALIZED();
}
-const coerceToBytes = (data: string | BufferSource): Uint8Array => {
- if (data instanceof Uint8Array) {
- return data;
- } else if (typeof data === "string") {
- // This assumes UTF-8, which may not be correct.
- return new TextEncoder().encode(data);
- } else if (ArrayBuffer.isView(data)) {
- return new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
- } else if (data instanceof ArrayBuffer) {
- return new Uint8Array(data);
- } else {
- throw new TypeError("expected data to be string | BufferSource");
- }
-};
+declare const __hasher: unique symbol;
+type Hasher = { __hasher: typeof __hasher };
-/**
- * The Hash class is a utility for creating hash digests of data. It can be used in one of two ways:
- *
- * - As a stream that is both readable and writable, where data is written to produce a computed hash digest on the readable side, or
- * - Using the hash.update() and hash.digest() methods to produce the computed hash.
- *
- * The crypto.createHash() method is used to create Hash instances. Hash objects are not to be created directly using the new keyword.
- */
-export class Hash extends Transform {
- #context: number;
+const kHandle = Symbol("kHandle");
- constructor(
- algorithm: string | number,
- _opts?: TransformOptions,
- ) {
- super({
- transform(chunk: string, _encoding: string, callback: () => void) {
- op_node_hash_update(context, coerceToBytes(chunk));
- callback();
- },
- flush(callback: () => void) {
- this.push(this.digest(undefined));
- callback();
- },
- });
+export function Hash(
+ this: Hash,
+ algorithm: string | Hasher,
+ options?: { outputLength?: number },
+): Hash {
+ if (!(this instanceof Hash)) {
+ return new Hash(algorithm, options);
+ }
+ if (!(typeof algorithm === "object")) {
+ validateString(algorithm, "algorithm");
+ }
+ const xofLen = typeof options === "object" && options !== null
+ ? options.outputLength
+ : undefined;
+ if (xofLen !== undefined) {
+ validateUint32(xofLen, "options.outputLength");
+ }
- if (typeof algorithm === "string") {
- this.#context = op_node_create_hash(
- algorithm.toLowerCase(),
+ try {
+ this[kHandle] = typeof algorithm === "object"
+ ? op_node_hash_clone(algorithm, xofLen)
+ : op_node_create_hash(algorithm.toLowerCase(), xofLen);
+ } catch (err) {
+ // TODO(lucacasonato): don't do this
+ if (err.message === "Output length mismatch for non-extendable algorithm") {
+ throw new NodeError(
+ "ERR_OSSL_EVP_NOT_XOF_OR_INVALID_LENGTH",
+ "Invalid XOF digest length",
);
- if (this.#context === 0) {
- throw new TypeError(`Unknown hash algorithm: ${algorithm}`);
- }
} else {
- this.#context = algorithm;
+ throw err;
}
+ }
+
+ if (this[kHandle] === null) throw new ERR_CRYPTO_HASH_FINALIZED();
+
+ ReflectApply(LazyTransform, this, [options]);
+}
- const context = this.#context;
+interface Hash {
+ [kHandle]: object;
+}
+
+ObjectSetPrototypeOf(Hash.prototype, LazyTransform.prototype);
+ObjectSetPrototypeOf(Hash, LazyTransform);
+
+Hash.prototype.copy = function copy(options?: { outputLength: number }) {
+ return new Hash(this[kHandle], options);
+};
+
+Hash.prototype._transform = function _transform(
+ chunk: string | Buffer,
+ encoding: Encoding | "buffer",
+ callback: () => void,
+) {
+ this.update(chunk, encoding);
+ callback();
+};
+
+Hash.prototype._flush = function _flush(callback: () => void) {
+ this.push(this.digest());
+ callback();
+};
+
+Hash.prototype.update = function update(
+ data: string | Buffer,
+ encoding: Encoding | "buffer",
+) {
+ encoding = encoding || getDefaultEncoding();
+
+ if (typeof data === "string") {
+ validateEncoding(data, encoding);
+ } else if (!isArrayBufferView(data)) {
+ throw new ERR_INVALID_ARG_TYPE(
+ "data",
+ ["string", "Buffer", "TypedArray", "DataView"],
+ data,
+ );
}
- copy(): Hash {
- return new Hash(op_node_hash_clone(this.#context));
+ if (
+ typeof data === "string" && (encoding === "utf8" || encoding === "buffer")
+ ) {
+ unwrapErr(op_node_hash_update_str(this[kHandle], data));
+ } else {
+ unwrapErr(op_node_hash_update(this[kHandle], toBuf(data, encoding)));
}
- /**
- * Updates the hash content with the given data.
- */
- update(data: string | ArrayBuffer, _encoding?: string): this {
- if (typeof data === "string") {
- unwrapErr(op_node_hash_update_str(this.#context, data));
- } else {
- unwrapErr(op_node_hash_update(this.#context, coerceToBytes(data)));
- }
+ return this;
+};
- return this;
- }
+Hash.prototype.digest = function digest(outputEncoding: Encoding | "buffer") {
+ outputEncoding = outputEncoding || getDefaultEncoding();
+ outputEncoding = `${outputEncoding}`;
- /**
- * Calculates the digest of all of the data.
- *
- * If encoding is provided a string will be returned; otherwise a Buffer is returned.
- *
- * Supported encodings are currently 'hex', 'binary', 'base64', 'base64url'.
- */
- digest(encoding?: string): Buffer | string {
- if (encoding === "hex") {
- return op_node_hash_digest_hex(this.#context);
- }
+ if (outputEncoding === "hex") {
+ const result = op_node_hash_digest_hex(this[kHandle]);
+ if (result === null) throw new ERR_CRYPTO_HASH_FINALIZED();
+ return result;
+ }
- const digest = op_node_hash_digest(this.#context);
- if (encoding === undefined) {
+ const digest = op_node_hash_digest(this[kHandle]);
+ if (digest === null) throw new ERR_CRYPTO_HASH_FINALIZED();
+
+ // TODO(@littedivy): Fast paths for below encodings.
+ switch (outputEncoding) {
+ case "binary":
+ return String.fromCharCode(...digest);
+ case "base64":
+ return encodeToBase64(digest);
+ case "base64url":
+ return encodeToBase64Url(digest);
+ case undefined:
+ case "buffer":
return Buffer.from(digest);
- }
-
- // TODO(@littedivy): Fast paths for below encodings.
- switch (encoding) {
- case "binary":
- return String.fromCharCode(...digest);
- case "base64":
- return encodeToBase64(digest);
- case "base64url":
- return encodeToBase64Url(digest);
- case "buffer":
- return Buffer.from(digest);
- default:
- return Buffer.from(digest).toString(encoding);
- }
+ default:
+ return Buffer.from(digest).toString(outputEncoding);
}
-}
+};
export function Hmac(
hmac: string,
@@ -171,7 +204,7 @@ class HmacImpl extends Transform {
super({
transform(chunk: string, encoding: string, callback: () => void) {
// deno-lint-ignore no-explicit-any
- self.update(coerceToBytes(chunk), encoding as any);
+ self.update(Buffer.from(chunk), encoding as any);
callback();
},
flush(callback: () => void) {
@@ -219,9 +252,10 @@ class HmacImpl extends Transform {
digest(encoding?: BinaryToTextEncoding): Buffer | string {
const result = this.#hash.digest();
- return new Hash(this.#algorithm).update(this.#opad).update(result).digest(
- encoding,
- );
+ return new Hash(this.#algorithm).update(this.#opad).update(result)
+ .digest(
+ encoding,
+ );
}
update(data: string | ArrayBuffer, inputEncoding?: Encoding): this {