summaryrefslogtreecommitdiff
path: root/ext/web
diff options
context:
space:
mode:
authorBartek IwaƄczuk <biwanczuk@gmail.com>2023-10-05 14:34:38 +0200
committerGitHub <noreply@github.com>2023-10-05 14:34:38 +0200
commit5d98a544b421e2b0bc3f99318fe44d1fed6d95d9 (patch)
tree871b510abadb2746a532ba7a13988f7dc437d932 /ext/web
parent551a08145098e95022efb778308d677db60a67cc (diff)
refactor: rewrite several extension ops to op2 (#20457)
Rewrites following extensions: - `ext/web` - `ext/url` - `ext/webstorage` - `ext/io` --------- Co-authored-by: Matt Mastracci <matthew@mastracci.com>
Diffstat (limited to 'ext/web')
-rw-r--r--ext/web/blob.rs44
-rw-r--r--ext/web/compression.rs19
-rw-r--r--ext/web/lib.rs75
-rw-r--r--ext/web/timers.rs8
4 files changed, 84 insertions, 62 deletions
diff --git a/ext/web/blob.rs b/ext/web/blob.rs
index 3481f6178..97974caf0 100644
--- a/ext/web/blob.rs
+++ b/ext/web/blob.rs
@@ -9,7 +9,7 @@ use std::sync::Arc;
use async_trait::async_trait;
use deno_core::error::type_error;
use deno_core::error::AnyError;
-use deno_core::op;
+use deno_core::op2;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_core::JsBuffer;
@@ -165,8 +165,12 @@ impl BlobPart for SlicedBlobPart {
}
}
-#[op]
-pub fn op_blob_create_part(state: &mut OpState, data: JsBuffer) -> Uuid {
+#[op2]
+#[serde]
+pub fn op_blob_create_part(
+ state: &mut OpState,
+ #[buffer] data: JsBuffer,
+) -> Uuid {
let blob_store = state.borrow::<Arc<BlobStore>>();
let part = InMemoryBlobPart(data.to_vec());
blob_store.insert_part(Arc::new(part))
@@ -179,11 +183,12 @@ pub struct SliceOptions {
len: usize,
}
-#[op]
+#[op2]
+#[serde]
pub fn op_blob_slice_part(
state: &mut OpState,
- id: Uuid,
- options: SliceOptions,
+ #[serde] id: Uuid,
+ #[serde] options: SliceOptions,
) -> Result<Uuid, AnyError> {
let blob_store = state.borrow::<Arc<BlobStore>>();
let part = blob_store
@@ -205,10 +210,11 @@ pub fn op_blob_slice_part(
Ok(id)
}
-#[op]
+#[op2(async)]
+#[serde]
pub async fn op_blob_read_part(
state: Rc<RefCell<OpState>>,
- id: Uuid,
+ #[serde] id: Uuid,
) -> Result<ToJsBuffer, AnyError> {
let part = {
let state = state.borrow();
@@ -220,17 +226,18 @@ pub async fn op_blob_read_part(
Ok(ToJsBuffer::from(buf.to_vec()))
}
-#[op]
-pub fn op_blob_remove_part(state: &mut OpState, id: Uuid) {
+#[op2]
+pub fn op_blob_remove_part(state: &mut OpState, #[serde] id: Uuid) {
let blob_store = state.borrow::<Arc<BlobStore>>();
blob_store.remove_part(&id);
}
-#[op]
+#[op2]
+#[string]
pub fn op_blob_create_object_url(
state: &mut OpState,
- media_type: String,
- part_ids: Vec<Uuid>,
+ #[string] media_type: String,
+ #[serde] part_ids: Vec<Uuid>,
) -> Result<String, AnyError> {
let mut parts = Vec::with_capacity(part_ids.len());
let blob_store = state.borrow::<Arc<BlobStore>>();
@@ -252,10 +259,10 @@ pub fn op_blob_create_object_url(
Ok(url.to_string())
}
-#[op]
+#[op2(fast)]
pub fn op_blob_revoke_object_url(
- state: &mut deno_core::OpState,
- url: &str,
+ state: &mut OpState,
+ #[string] url: &str,
) -> Result<(), AnyError> {
let url = Url::parse(url)?;
let blob_store = state.borrow::<Arc<BlobStore>>();
@@ -275,10 +282,11 @@ pub struct ReturnBlobPart {
pub size: usize,
}
-#[op]
+#[op2]
+#[serde]
pub fn op_blob_from_object_url(
state: &mut OpState,
- url: String,
+ #[string] url: String,
) -> Result<Option<ReturnBlob>, AnyError> {
let url = Url::parse(&url)?;
if url.scheme() != "blob" {
diff --git a/ext/web/compression.rs b/ext/web/compression.rs
index 1ebb453b8..ff84b7971 100644
--- a/ext/web/compression.rs
+++ b/ext/web/compression.rs
@@ -2,7 +2,7 @@
use deno_core::error::type_error;
use deno_core::error::AnyError;
-use deno_core::op;
+use deno_core::op2;
use deno_core::OpState;
use deno_core::Resource;
use deno_core::ResourceId;
@@ -39,10 +39,11 @@ impl Resource for CompressionResource {
}
}
-#[op]
+#[op2(fast)]
+#[smi]
pub fn op_compression_new(
state: &mut OpState,
- format: &str,
+ #[string] format: &str,
is_decoder: bool,
) -> ResourceId {
let w = Vec::new();
@@ -65,11 +66,12 @@ pub fn op_compression_new(
state.resource_table.add(resource)
}
-#[op]
+#[op2]
+#[serde]
pub fn op_compression_write(
state: &mut OpState,
- rid: ResourceId,
- input: &[u8],
+ #[smi] rid: ResourceId,
+ #[anybuffer] input: &[u8],
) -> Result<ToJsBuffer, AnyError> {
let resource = state.resource_table.get::<CompressionResource>(rid)?;
let mut inner = resource.0.borrow_mut();
@@ -109,10 +111,11 @@ pub fn op_compression_write(
Ok(out.into())
}
-#[op]
+#[op2]
+#[serde]
pub fn op_compression_finish(
state: &mut OpState,
- rid: ResourceId,
+ #[smi] rid: ResourceId,
) -> Result<ToJsBuffer, AnyError> {
let resource = state.resource_table.take::<CompressionResource>(rid)?;
let resource = Rc::try_unwrap(resource).unwrap();
diff --git a/ext/web/lib.rs b/ext/web/lib.rs
index ebdb6b39e..f4789123b 100644
--- a/ext/web/lib.rs
+++ b/ext/web/lib.rs
@@ -11,6 +11,7 @@ use deno_core::error::range_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op;
+use deno_core::op2;
use deno_core::serde_v8;
use deno_core::url::Url;
use deno_core::v8;
@@ -131,16 +132,18 @@ deno_core::extension!(deno_web,
}
);
-#[op]
-fn op_base64_decode(input: String) -> Result<ToJsBuffer, AnyError> {
+#[op2]
+#[serde]
+fn op_base64_decode(#[string] input: String) -> Result<ToJsBuffer, AnyError> {
let mut s = input.into_bytes();
let decoded_len = forgiving_base64_decode_inplace(&mut s)?;
s.truncate(decoded_len);
Ok(s.into())
}
-#[op]
-fn op_base64_atob(mut s: ByteString) -> Result<ByteString, AnyError> {
+#[op2]
+#[serde]
+fn op_base64_atob(#[serde] mut s: ByteString) -> Result<ByteString, AnyError> {
let decoded_len = forgiving_base64_decode_inplace(&mut s)?;
s.truncate(decoded_len);
Ok(s)
@@ -158,13 +161,15 @@ fn forgiving_base64_decode_inplace(
Ok(decoded.len())
}
-#[op]
-fn op_base64_encode(s: &[u8]) -> String {
+#[op2]
+#[string]
+fn op_base64_encode(#[buffer] s: &[u8]) -> String {
forgiving_base64_encode(s)
}
-#[op]
-fn op_base64_btoa(s: ByteString) -> String {
+#[op2]
+#[string]
+fn op_base64_btoa(#[serde] s: ByteString) -> String {
forgiving_base64_encode(s.as_ref())
}
@@ -174,8 +179,11 @@ fn forgiving_base64_encode(s: &[u8]) -> String {
base64_simd::STANDARD.encode_to_string(s)
}
-#[op]
-fn op_encoding_normalize_label(label: String) -> Result<String, AnyError> {
+#[op2]
+#[string]
+fn op_encoding_normalize_label(
+ #[string] label: String,
+) -> Result<String, AnyError> {
let encoding = Encoding::for_label_no_replacement(label.as_bytes())
.ok_or_else(|| {
range_error(format!(
@@ -185,12 +193,12 @@ fn op_encoding_normalize_label(label: String) -> Result<String, AnyError> {
Ok(encoding.name().to_lowercase())
}
-#[op(v8)]
+#[op2]
fn op_encoding_decode_utf8<'a>(
scope: &mut v8::HandleScope<'a>,
- zero_copy: &[u8],
+ #[anybuffer] zero_copy: &[u8],
ignore_bom: bool,
-) -> Result<serde_v8::Value<'a>, AnyError> {
+) -> Result<v8::Local<'a, v8::String>, AnyError> {
let buf = &zero_copy;
let buf = if !ignore_bom
@@ -213,15 +221,16 @@ fn op_encoding_decode_utf8<'a>(
// - https://github.com/denoland/deno/issues/6649
// - https://github.com/v8/v8/blob/d68fb4733e39525f9ff0a9222107c02c28096e2a/include/v8.h#L3277-L3278
match v8::String::new_from_utf8(scope, buf, v8::NewStringType::Normal) {
- Some(text) => Ok(serde_v8::from_v8(scope, text.into())?),
+ Some(text) => Ok(text),
None => Err(type_error("buffer exceeds maximum length")),
}
}
-#[op]
+#[op2]
+#[serde]
fn op_encoding_decode_single(
- data: &[u8],
- label: String,
+ #[anybuffer] data: &[u8],
+ #[string] label: String,
fatal: bool,
ignore_bom: bool,
) -> Result<U16String, AnyError> {
@@ -271,10 +280,11 @@ fn op_encoding_decode_single(
}
}
-#[op]
+#[op2(fast)]
+#[smi]
fn op_encoding_new_decoder(
state: &mut OpState,
- label: &str,
+ #[string] label: &str,
fatal: bool,
ignore_bom: bool,
) -> Result<ResourceId, AnyError> {
@@ -298,11 +308,12 @@ fn op_encoding_new_decoder(
Ok(rid)
}
-#[op]
+#[op2]
+#[serde]
fn op_encoding_decode(
state: &mut OpState,
- data: &[u8],
- rid: ResourceId,
+ #[anybuffer] data: &[u8],
+ #[smi] rid: ResourceId,
stream: bool,
) -> Result<U16String, AnyError> {
let resource = state.resource_table.get::<TextDecoderResource>(rid)?;
@@ -415,30 +426,28 @@ fn op_encoding_encode_into(
out_buf[1] = boundary as u32;
}
-#[op(v8)]
+#[op2]
fn op_transfer_arraybuffer<'a>(
scope: &mut v8::HandleScope<'a>,
- input: serde_v8::Value<'a>,
-) -> Result<serde_v8::Value<'a>, AnyError> {
- let ab = v8::Local::<v8::ArrayBuffer>::try_from(input.v8_value)?;
+ ab: &v8::ArrayBuffer,
+) -> Result<v8::Local<'a, v8::ArrayBuffer>, AnyError> {
if !ab.is_detachable() {
return Err(type_error("ArrayBuffer is not detachable"));
}
let bs = ab.get_backing_store();
ab.detach(None);
- let ab = v8::ArrayBuffer::with_backing_store(scope, &bs);
- Ok(serde_v8::Value {
- v8_value: ab.into(),
- })
+ Ok(v8::ArrayBuffer::with_backing_store(scope, &bs))
}
-#[op]
-fn op_encode_binary_string(s: &[u8]) -> ByteString {
+#[op2]
+#[serde]
+fn op_encode_binary_string(#[buffer] s: &[u8]) -> ByteString {
ByteString::from(s)
}
/// Creates a [`CancelHandle`] resource that can be used to cancel invocations of certain ops.
-#[op(fast)]
+#[op2(fast)]
+#[smi]
pub fn op_cancel_handle(state: &mut OpState) -> u32 {
state.resource_table.add(CancelHandle::new())
}
diff --git a/ext/web/timers.rs b/ext/web/timers.rs
index 6e0759a98..17b46c2be 100644
--- a/ext/web/timers.rs
+++ b/ext/web/timers.rs
@@ -5,6 +5,7 @@
use crate::hr_timer_lock::hr_timer_lock;
use deno_core::error::AnyError;
use deno_core::op;
+use deno_core::op2;
use deno_core::CancelFuture;
use deno_core::CancelHandle;
use deno_core::OpState;
@@ -27,8 +28,8 @@ pub type StartTime = Instant;
// since the start time of the deno runtime.
// If the High precision flag is not set, the
// nanoseconds are rounded on 2ms.
-#[op(fast)]
-pub fn op_now<TP>(state: &mut OpState, buf: &mut [u8])
+#[op2(fast)]
+pub fn op_now<TP>(state: &mut OpState, #[buffer] buf: &mut [u8])
where
TP: TimersPermission + 'static,
{
@@ -68,7 +69,8 @@ impl Resource for TimerHandle {
/// Creates a [`TimerHandle`] resource that can be used to cancel invocations of
/// [`op_sleep`].
-#[op]
+#[op2(fast)]
+#[smi]
pub fn op_timer_handle(state: &mut OpState) -> ResourceId {
state
.resource_table