From 4c9eee3ebe383f0aa8f082dd6831f609cd5d5abb Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 14 Oct 2024 23:25:18 -0400 Subject: perf(http): cache webidl.converters lookups in ext/fetch/23_response.js (#26256) --- ext/fetch/23_response.js | 105 ++++++++++++++++++++++++++--------------------- 1 file changed, 58 insertions(+), 47 deletions(-) (limited to 'ext/fetch') diff --git a/ext/fetch/23_response.js b/ext/fetch/23_response.js index ff4ad5fac..278dcb7de 100644 --- a/ext/fetch/23_response.js +++ b/ext/fetch/23_response.js @@ -61,6 +61,15 @@ const _mimeType = Symbol("mime type"); const _body = Symbol("body"); const _brand = webidl.brand; +// it's slightly faster to cache these +const webidlConvertersBodyInitDomString = + webidl.converters["BodyInit_DOMString?"]; +const webidlConvertersUSVString = webidl.converters["USVString"]; +const webidlConvertersUnsignedShort = webidl.converters["unsigned short"]; +const webidlConvertersAny = webidl.converters["any"]; +const webidlConvertersByteString = webidl.converters["ByteString"]; +const webidlConvertersHeadersInit = webidl.converters["HeadersInit"]; + /** * @typedef InnerResponse * @property {"basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect"} type @@ -259,8 +268,8 @@ class Response { */ static redirect(url, status = 302) { const prefix = "Failed to execute 'Response.redirect'"; - url = webidl.converters["USVString"](url, prefix, "Argument 1"); - status = webidl.converters["unsigned short"](status, prefix, "Argument 2"); + url = webidlConvertersUSVString(url, prefix, "Argument 1"); + status = webidlConvertersUnsignedShort(status, prefix, "Argument 2"); const baseURL = getLocationHref(); const parsedURL = new URL(url, baseURL); @@ -286,8 +295,8 @@ class Response { */ static json(data = undefined, init = { __proto__: null }) { const prefix = "Failed to execute 'Response.json'"; - data = webidl.converters.any(data); - init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2"); + data = webidlConvertersAny(data); + init = webidlConvertersResponseInitFast(init, prefix, "Argument 2"); const str = serializeJSValueToJSONString(data); const res = extractBody(str); @@ -313,8 +322,8 @@ class Response { } const prefix = "Failed to construct 'Response'"; - body = webidl.converters["BodyInit_DOMString?"](body, prefix, "Argument 1"); - init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2"); + body = webidlConvertersBodyInitDomString(body, prefix, "Argument 1"); + init = webidlConvertersResponseInitFast(init, prefix, "Argument 2"); this[_response] = newInnerResponse(); this[_headers] = headersFromHeaderList( @@ -443,47 +452,49 @@ webidl.converters["Response"] = webidl.createInterfaceConverter( "Response", ResponsePrototype, ); -webidl.converters["ResponseInit"] = webidl.createDictionaryConverter( - "ResponseInit", - [{ - key: "status", - defaultValue: 200, - converter: webidl.converters["unsigned short"], - }, { - key: "statusText", - defaultValue: "", - converter: webidl.converters["ByteString"], - }, { - key: "headers", - converter: webidl.converters["HeadersInit"], - }], -); -webidl.converters["ResponseInit_fast"] = function ( - init, - prefix, - context, - opts, -) { - if (init === undefined || init === null) { - return { status: 200, statusText: "", headers: undefined }; - } - // Fast path, if not a proxy - if (typeof init === "object" && !core.isProxy(init)) { - // Not a proxy fast path - const status = init.status !== undefined - ? webidl.converters["unsigned short"](init.status) - : 200; - const statusText = init.statusText !== undefined - ? webidl.converters["ByteString"](init.statusText) - : ""; - const headers = init.headers !== undefined - ? webidl.converters["HeadersInit"](init.headers) - : undefined; - return { status, statusText, headers }; - } - // Slow default path - return webidl.converters["ResponseInit"](init, prefix, context, opts); -}; +const webidlConvertersResponseInit = webidl.converters["ResponseInit"] = webidl + .createDictionaryConverter( + "ResponseInit", + [{ + key: "status", + defaultValue: 200, + converter: webidlConvertersUnsignedShort, + }, { + key: "statusText", + defaultValue: "", + converter: webidlConvertersByteString, + }, { + key: "headers", + converter: webidlConvertersHeadersInit, + }], + ); +const webidlConvertersResponseInitFast = webidl + .converters["ResponseInit_fast"] = function ( + init, + prefix, + context, + opts, + ) { + if (init === undefined || init === null) { + return { status: 200, statusText: "", headers: undefined }; + } + // Fast path, if not a proxy + if (typeof init === "object" && !core.isProxy(init)) { + // Not a proxy fast path + const status = init.status !== undefined + ? webidlConvertersUnsignedShort(init.status) + : 200; + const statusText = init.statusText !== undefined + ? webidlConvertersByteString(init.statusText) + : ""; + const headers = init.headers !== undefined + ? webidlConvertersHeadersInit(init.headers) + : undefined; + return { status, statusText, headers }; + } + // Slow default path + return webidlConvertersResponseInit(init, prefix, context, opts); + }; /** * @param {Response} response -- cgit v1.2.3 From 3385d1252e4eae093234d0a075f4a564308ba48e Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Wed, 16 Oct 2024 19:48:42 -0400 Subject: chore: forward v2.0.1 release commit to main (#26338) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is the release commit being forwarded back to main for 2.0.1 Co-authored-by: bartlomieju Co-authored-by: Bartek Iwańczuk --- ext/fetch/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'ext/fetch') diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index cc9e4f03d..c8e2c858b 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.195.0" +version = "0.196.0" authors.workspace = true edition.workspace = true license.workspace = true -- cgit v1.2.3 From 3ae10a01e0c8b9c425276a33b98f661c1473cd59 Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Thu, 17 Oct 2024 21:12:49 -0400 Subject: chore: forward v2.0.2 release commit to main (#26376) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is the release commit being forwarded back to main for 2.0.2 Co-authored-by: bartlomieju Co-authored-by: Bartek Iwańczuk --- ext/fetch/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'ext/fetch') diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index c8e2c858b..ddb58a3f3 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.196.0" +version = "0.197.0" authors.workspace = true edition.workspace = true license.workspace = true -- cgit v1.2.3 From 6c4ef11f04cc35b61417bf08d5e7592d44197c75 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Fri, 18 Oct 2024 18:20:58 -0700 Subject: refactor(ext/fetch): use concrete error types (#26220) --- ext/fetch/Cargo.toml | 1 + ext/fetch/fs_fetch_handler.rs | 5 +- ext/fetch/lib.rs | 244 ++++++++++++++++++++++++++---------------- 3 files changed, 156 insertions(+), 94 deletions(-) (limited to 'ext/fetch') diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index ddb58a3f3..afffd3ffb 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -32,6 +32,7 @@ percent-encoding.workspace = true rustls-webpki.workspace = true serde.workspace = true serde_json.workspace = true +thiserror.workspace = true tokio.workspace = true tokio-rustls.workspace = true tokio-socks.workspace = true diff --git a/ext/fetch/fs_fetch_handler.rs b/ext/fetch/fs_fetch_handler.rs index 4c2b81f35..c236dd9c6 100644 --- a/ext/fetch/fs_fetch_handler.rs +++ b/ext/fetch/fs_fetch_handler.rs @@ -4,7 +4,6 @@ use crate::CancelHandle; use crate::CancelableResponseFuture; use crate::FetchHandler; -use deno_core::error::type_error; use deno_core::futures::FutureExt; use deno_core::futures::TryFutureExt; use deno_core::futures::TryStreamExt; @@ -42,9 +41,7 @@ impl FetchHandler for FsFetchHandler { .map_err(|_| ())?; Ok::<_, ()>(response) } - .map_err(move |_| { - type_error("NetworkError when attempting to fetch resource") - }) + .map_err(move |_| super::FetchError::NetworkError) .or_cancel(&cancel_handle) .boxed_local(); diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 88f303852..4df8dc3d7 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -17,10 +17,6 @@ use std::sync::Arc; use std::task::Context; use std::task::Poll; -use deno_core::anyhow::anyhow; -use deno_core::anyhow::Error; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::futures::stream::Peekable; use deno_core::futures::Future; use deno_core::futures::FutureExt; @@ -28,6 +24,7 @@ use deno_core::futures::Stream; use deno_core::futures::StreamExt; use deno_core::futures::TryFutureExt; use deno_core::op2; +use deno_core::url; use deno_core::url::Url; use deno_core::AsyncRefCell; use deno_core::AsyncResult; @@ -87,15 +84,18 @@ pub struct Options { pub root_cert_store_provider: Option>, pub proxy: Option, #[allow(clippy::type_complexity)] - pub request_builder_hook: - Option) -> Result<(), AnyError>>, + pub request_builder_hook: Option< + fn(&mut http::Request) -> Result<(), deno_core::error::AnyError>, + >, pub unsafely_ignore_certificate_errors: Option>, pub client_cert_chain_and_key: TlsKeys, pub file_fetch_handler: Rc, } impl Options { - pub fn root_cert_store(&self) -> Result, AnyError> { + pub fn root_cert_store( + &self, + ) -> Result, deno_core::error::AnyError> { Ok(match &self.root_cert_store_provider { Some(provider) => Some(provider.get_or_try_init()?.clone()), None => None, @@ -144,6 +144,51 @@ deno_core::extension!(deno_fetch, }, ); +#[derive(Debug, thiserror::Error)] +pub enum FetchError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + Permission(deno_core::error::AnyError), + #[error("NetworkError when attempting to fetch resource")] + NetworkError, + #[error("Fetching files only supports the GET method: received {0}")] + FsNotGet(Method), + #[error("Invalid URL {0}")] + InvalidUrl(Url), + #[error(transparent)] + InvalidHeaderName(#[from] http::header::InvalidHeaderName), + #[error(transparent)] + InvalidHeaderValue(#[from] http::header::InvalidHeaderValue), + #[error("{0:?}")] + DataUrl(data_url::DataUrlError), + #[error("{0:?}")] + Base64(data_url::forgiving_base64::InvalidBase64), + #[error("Blob for the given URL not found.")] + BlobNotFound, + #[error("Url scheme '{0}' not supported")] + SchemeNotSupported(String), + #[error("Request was cancelled")] + RequestCanceled, + #[error(transparent)] + Http(#[from] http::Error), + #[error(transparent)] + ClientCreate(#[from] HttpClientCreateError), + #[error(transparent)] + Url(#[from] url::ParseError), + #[error(transparent)] + Method(#[from] http::method::InvalidMethod), + #[error(transparent)] + ClientSend(#[from] ClientSendError), + #[error(transparent)] + RequestBuilderHook(deno_core::error::AnyError), + #[error(transparent)] + Io(#[from] std::io::Error), + // Only used for node upgrade + #[error(transparent)] + Hyper(#[from] hyper::Error), +} + pub type CancelableResponseFuture = Pin>>; @@ -170,11 +215,7 @@ impl FetchHandler for DefaultFileFetchHandler { _state: &mut OpState, _url: &Url, ) -> (CancelableResponseFuture, Option>) { - let fut = async move { - Ok(Err(type_error( - "NetworkError when attempting to fetch resource", - ))) - }; + let fut = async move { Ok(Err(FetchError::NetworkError)) }; (Box::pin(fut), None) } } @@ -191,7 +232,7 @@ pub struct FetchReturn { pub fn get_or_create_client_from_state( state: &mut OpState, -) -> Result { +) -> Result { if let Some(client) = state.try_borrow::() { Ok(client.clone()) } else { @@ -204,11 +245,13 @@ pub fn get_or_create_client_from_state( pub fn create_client_from_options( options: &Options, -) -> Result { +) -> Result { create_http_client( &options.user_agent, CreateHttpClientOptions { - root_cert_store: options.root_cert_store()?, + root_cert_store: options + .root_cert_store() + .map_err(HttpClientCreateError::RootCertStore)?, ca_certs: vec![], proxy: options.proxy.clone(), unsafely_ignore_certificate_errors: options @@ -230,7 +273,9 @@ pub fn create_client_from_options( #[allow(clippy::type_complexity)] pub struct ResourceToBodyAdapter( Rc, - Option>>>>, + Option< + Pin>>>, + >, ); impl ResourceToBodyAdapter { @@ -246,7 +291,7 @@ unsafe impl Send for ResourceToBodyAdapter {} unsafe impl Sync for ResourceToBodyAdapter {} impl Stream for ResourceToBodyAdapter { - type Item = Result; + type Item = Result; fn poll_next( self: Pin<&mut Self>, @@ -276,7 +321,7 @@ impl Stream for ResourceToBodyAdapter { impl hyper::body::Body for ResourceToBodyAdapter { type Data = Bytes; - type Error = Error; + type Error = deno_core::error::AnyError; fn poll_frame( self: Pin<&mut Self>, @@ -301,13 +346,13 @@ pub trait FetchPermissions { &mut self, url: &Url, api_name: &str, - ) -> Result<(), AnyError>; + ) -> Result<(), deno_core::error::AnyError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_read<'a>( &mut self, p: &'a Path, api_name: &str, - ) -> Result, AnyError>; + ) -> Result, deno_core::error::AnyError>; } impl FetchPermissions for deno_permissions::PermissionsContainer { @@ -316,7 +361,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer { &mut self, url: &Url, api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), deno_core::error::AnyError> { deno_permissions::PermissionsContainer::check_net_url(self, url, api_name) } @@ -325,7 +370,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer { &mut self, path: &'a Path, api_name: &str, - ) -> Result, AnyError> { + ) -> Result, deno_core::error::AnyError> { deno_permissions::PermissionsContainer::check_read_path( self, path, @@ -346,12 +391,15 @@ pub fn op_fetch( has_body: bool, #[buffer] data: Option, #[smi] resource: Option, -) -> Result +) -> Result where FP: FetchPermissions + 'static, { let (client, allow_host) = if let Some(rid) = client_rid { - let r = state.resource_table.get::(rid)?; + let r = state + .resource_table + .get::(rid) + .map_err(FetchError::Resource)?; (r.client.clone(), r.allow_host) } else { (get_or_create_client_from_state(state)?, false) @@ -364,20 +412,18 @@ where let scheme = url.scheme(); let (request_rid, cancel_handle_rid) = match scheme { "file" => { - let path = url.to_file_path().map_err(|_| { - type_error("NetworkError when attempting to fetch resource") - })?; + let path = url.to_file_path().map_err(|_| FetchError::NetworkError)?; let permissions = state.borrow_mut::(); - let path = permissions.check_read(&path, "fetch()")?; + let path = permissions + .check_read(&path, "fetch()") + .map_err(FetchError::Permission)?; let url = match path { Cow::Owned(path) => Url::from_file_path(path).unwrap(), Cow::Borrowed(_) => url, }; if method != Method::GET { - return Err(type_error(format!( - "Fetching files only supports the GET method: received {method}" - ))); + return Err(FetchError::FsNotGet(method)); } let Options { @@ -396,13 +442,15 @@ where } "http" | "https" => { let permissions = state.borrow_mut::(); - permissions.check_net_url(&url, "fetch()")?; + permissions + .check_net_url(&url, "fetch()") + .map_err(FetchError::Resource)?; let maybe_authority = extract_authority(&mut url); let uri = url .as_str() .parse::() - .map_err(|_| type_error(format!("Invalid URL {url}")))?; + .map_err(|_| FetchError::InvalidUrl(url.clone()))?; let mut con_len = None; let body = if has_body { @@ -416,7 +464,10 @@ where .boxed() } (_, Some(resource)) => { - let resource = state.resource_table.take_any(resource)?; + let resource = state + .resource_table + .take_any(resource) + .map_err(FetchError::Resource)?; match resource.size_hint() { (body_size, Some(n)) if body_size == n && body_size > 0 => { con_len = Some(body_size); @@ -453,10 +504,8 @@ where } for (key, value) in headers { - let name = HeaderName::from_bytes(&key) - .map_err(|err| type_error(err.to_string()))?; - let v = HeaderValue::from_bytes(&value) - .map_err(|err| type_error(err.to_string()))?; + let name = HeaderName::from_bytes(&key)?; + let v = HeaderValue::from_bytes(&value)?; if (name != HOST || allow_host) && name != CONTENT_LENGTH { request.headers_mut().append(name, v); @@ -474,20 +523,18 @@ where let options = state.borrow::(); if let Some(request_builder_hook) = options.request_builder_hook { request_builder_hook(&mut request) - .map_err(|err| type_error(err.to_string()))?; + .map_err(FetchError::RequestBuilderHook)?; } let cancel_handle = CancelHandle::new_rc(); let cancel_handle_ = cancel_handle.clone(); - let fut = { - async move { - client - .send(request) - .map_err(Into::into) - .or_cancel(cancel_handle_) - .await - } + let fut = async move { + client + .send(request) + .map_err(Into::into) + .or_cancel(cancel_handle_) + .await }; let request_rid = state.resource_table.add(FetchRequestResource { @@ -501,12 +548,10 @@ where (request_rid, Some(cancel_handle_rid)) } "data" => { - let data_url = DataUrl::process(url.as_str()) - .map_err(|e| type_error(format!("{e:?}")))?; + let data_url = + DataUrl::process(url.as_str()).map_err(FetchError::DataUrl)?; - let (body, _) = data_url - .decode_to_vec() - .map_err(|e| type_error(format!("{e:?}")))?; + let (body, _) = data_url.decode_to_vec().map_err(FetchError::Base64)?; let body = http_body_util::Full::new(body.into()) .map_err(|never| match never {}) .boxed(); @@ -528,11 +573,9 @@ where "blob" => { // Blob URL resolution happens in the JS side of fetch. If we got here is // because the URL isn't an object URL. - return Err(type_error("Blob for the given URL not found.")); - } - _ => { - return Err(type_error(format!("Url scheme '{scheme}' not supported"))) + return Err(FetchError::BlobNotFound); } + _ => return Err(FetchError::SchemeNotSupported(scheme.to_string())), }; Ok(FetchReturn { @@ -564,11 +607,12 @@ pub struct FetchResponse { pub async fn op_fetch_send( state: Rc>, #[smi] rid: ResourceId, -) -> Result { +) -> Result { let request = state .borrow_mut() .resource_table - .take::(rid)?; + .take::(rid) + .map_err(FetchError::Resource)?; let request = Rc::try_unwrap(request) .ok() @@ -581,22 +625,23 @@ pub async fn op_fetch_send( // If any error in the chain is a hyper body error, return that as a special result we can use to // reconstruct an error chain (eg: `new TypeError(..., { cause: new Error(...) })`). // TODO(mmastrac): it would be a lot easier if we just passed a v8::Global through here instead - let mut err_ref: &dyn std::error::Error = err.as_ref(); - while let Some(err_src) = std::error::Error::source(err_ref) { - if let Some(err_src) = err_src.downcast_ref::() { - if let Some(err_src) = std::error::Error::source(err_src) { - return Ok(FetchResponse { - error: Some((err.to_string(), err_src.to_string())), - ..Default::default() - }); + + if let FetchError::ClientSend(err_src) = &err { + if let Some(client_err) = std::error::Error::source(&err_src.source) { + if let Some(err_src) = client_err.downcast_ref::() { + if let Some(err_src) = std::error::Error::source(err_src) { + return Ok(FetchResponse { + error: Some((err.to_string(), err_src.to_string())), + ..Default::default() + }); + } } } - err_ref = err_src; } - return Err(type_error(err.to_string())); + return Err(err); } - Err(_) => return Err(type_error("Request was cancelled")), + Err(_) => return Err(FetchError::RequestCanceled), }; let status = res.status(); @@ -636,7 +681,7 @@ pub async fn op_fetch_send( } type CancelableResponseResult = - Result, AnyError>, Canceled>; + Result, FetchError>, Canceled>; pub struct FetchRequestResource { pub future: Pin>>, @@ -691,7 +736,7 @@ impl FetchResponseResource { } } - pub async fn upgrade(self) -> Result { + pub async fn upgrade(self) -> Result { let reader = self.response_reader.into_inner(); match reader { FetchResponseReader::Start(resp) => Ok(hyper::upgrade::on(resp).await?), @@ -746,7 +791,9 @@ impl Resource for FetchResponseResource { // safely call `await` on it without creating a race condition. Some(_) => match reader.as_mut().next().await.unwrap() { Ok(chunk) => assert!(chunk.is_empty()), - Err(err) => break Err(type_error(err.to_string())), + Err(err) => { + break Err(deno_core::error::type_error(err.to_string())) + } }, None => break Ok(BufView::empty()), } @@ -809,14 +856,16 @@ pub fn op_fetch_custom_client( state: &mut OpState, #[serde] args: CreateHttpClientArgs, #[cppgc] tls_keys: &TlsKeysHolder, -) -> Result +) -> Result where FP: FetchPermissions + 'static, { if let Some(proxy) = args.proxy.clone() { let permissions = state.borrow_mut::(); let url = Url::parse(&proxy.url)?; - permissions.check_net_url(&url, "Deno.createHttpClient()")?; + permissions + .check_net_url(&url, "Deno.createHttpClient()") + .map_err(FetchError::Permission)?; } let options = state.borrow::(); @@ -829,7 +878,9 @@ where let client = create_http_client( &options.user_agent, CreateHttpClientOptions { - root_cert_store: options.root_cert_store()?, + root_cert_store: options + .root_cert_store() + .map_err(HttpClientCreateError::RootCertStore)?, ca_certs, proxy: args.proxy, unsafely_ignore_certificate_errors: options @@ -887,19 +938,34 @@ impl Default for CreateHttpClientOptions { } } +#[derive(Debug, thiserror::Error)] +pub enum HttpClientCreateError { + #[error(transparent)] + Tls(deno_tls::TlsError), + #[error("Illegal characters in User-Agent: received {0}")] + InvalidUserAgent(String), + #[error("invalid proxy url")] + InvalidProxyUrl, + #[error("Cannot create Http Client: either `http1` or `http2` needs to be set to true")] + HttpVersionSelectionInvalid, + #[error(transparent)] + RootCertStore(deno_core::error::AnyError), +} + /// Create new instance of async Client. This client supports /// proxies and doesn't follow redirects. pub fn create_http_client( user_agent: &str, options: CreateHttpClientOptions, -) -> Result { +) -> Result { let mut tls_config = deno_tls::create_client_config( options.root_cert_store, options.ca_certs, options.unsafely_ignore_certificate_errors, options.client_cert_chain_and_key.into(), deno_tls::SocketUse::Http, - )?; + ) + .map_err(HttpClientCreateError::Tls)?; // Proxy TLS should not send ALPN tls_config.alpn_protocols.clear(); @@ -919,9 +985,7 @@ pub fn create_http_client( http_connector.enforce_http(false); let user_agent = user_agent.parse::().map_err(|_| { - type_error(format!( - "Illegal characters in User-Agent: received {user_agent}" - )) + HttpClientCreateError::InvalidUserAgent(user_agent.to_string()) })?; let mut builder = @@ -932,7 +996,7 @@ pub fn create_http_client( let mut proxies = proxy::from_env(); if let Some(proxy) = options.proxy { let mut intercept = proxy::Intercept::all(&proxy.url) - .ok_or_else(|| type_error("invalid proxy url"))?; + .ok_or_else(|| HttpClientCreateError::InvalidProxyUrl)?; if let Some(basic_auth) = &proxy.basic_auth { intercept.set_auth(&basic_auth.username, &basic_auth.password); } @@ -964,7 +1028,7 @@ pub fn create_http_client( } (true, true) => {} (false, false) => { - return Err(type_error("Cannot create Http Client: either `http1` or `http2` needs to be set to true")) + return Err(HttpClientCreateError::HttpVersionSelectionInvalid) } } @@ -980,10 +1044,8 @@ pub fn create_http_client( #[op2] #[serde] -pub fn op_utf8_to_byte_string( - #[string] input: String, -) -> Result { - Ok(input.into()) +pub fn op_utf8_to_byte_string(#[string] input: String) -> ByteString { + input.into() } #[derive(Clone, Debug)] @@ -1003,7 +1065,7 @@ const STAR_STAR: HeaderValue = HeaderValue::from_static("*/*"); #[derive(Debug)] pub struct ClientSendError { uri: Uri, - source: hyper_util::client::legacy::Error, + pub source: hyper_util::client::legacy::Error, } impl ClientSendError { @@ -1075,12 +1137,14 @@ impl Client { .oneshot(req) .await .map_err(|e| ClientSendError { uri, source: e })?; - Ok(resp.map(|b| b.map_err(|e| anyhow!(e)).boxed())) + Ok(resp.map(|b| b.map_err(|e| deno_core::anyhow::anyhow!(e)).boxed())) } } -pub type ReqBody = http_body_util::combinators::BoxBody; -pub type ResBody = http_body_util::combinators::BoxBody; +pub type ReqBody = + http_body_util::combinators::BoxBody; +pub type ResBody = + http_body_util::combinators::BoxBody; /// Copied from https://github.com/seanmonstar/reqwest/blob/b9d62a0323d96f11672a61a17bf8849baec00275/src/async_impl/request.rs#L572 /// Check the request URL for a "username:password" type authority, and if -- cgit v1.2.3 From 730331622ee17cf603447f4eb53631b9cfd7bef1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 25 Oct 2024 14:57:40 +0100 Subject: chore: forward v2.0.3 commit to main (#26535) Forwarding v2.0.3 commit to `main` Co-authored-by: denobot <33910674+denobot@users.noreply.github.com> Co-authored-by: bartlomieju --- ext/fetch/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'ext/fetch') diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index afffd3ffb..316a6eea8 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.197.0" +version = "0.198.0" authors.workspace = true edition.workspace = true license.workspace = true -- cgit v1.2.3 From a1473d82c5612676c50af00ded0467dbb29bc0a8 Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Wed, 30 Oct 2024 08:46:31 -0400 Subject: chore: forward v2.0.4 release commit to main (#26636) This is the release commit being forwarded back to main for 2.0.4 Co-authored-by: bartlomieju --- ext/fetch/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'ext/fetch') diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 316a6eea8..93fc88ae6 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.198.0" +version = "0.199.0" authors.workspace = true edition.workspace = true license.workspace = true -- cgit v1.2.3 From fe9f0ee5934871175758857899fe64e56c397fd5 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Mon, 4 Nov 2024 09:17:21 -0800 Subject: refactor(runtime/permissions): use concrete error types (#26464) --- ext/fetch/lib.rs | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) (limited to 'ext/fetch') diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 4df8dc3d7..7ef26431c 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -39,6 +39,7 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use deno_permissions::PermissionCheckError; use deno_tls::rustls::RootCertStore; use deno_tls::Proxy; use deno_tls::RootCertStoreProvider; @@ -149,7 +150,7 @@ pub enum FetchError { #[error(transparent)] Resource(deno_core::error::AnyError), #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] PermissionCheckError), #[error("NetworkError when attempting to fetch resource")] NetworkError, #[error("Fetching files only supports the GET method: received {0}")] @@ -346,13 +347,13 @@ pub trait FetchPermissions { &mut self, url: &Url, api_name: &str, - ) -> Result<(), deno_core::error::AnyError>; + ) -> Result<(), PermissionCheckError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_read<'a>( &mut self, p: &'a Path, api_name: &str, - ) -> Result, deno_core::error::AnyError>; + ) -> Result, PermissionCheckError>; } impl FetchPermissions for deno_permissions::PermissionsContainer { @@ -361,7 +362,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer { &mut self, url: &Url, api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_net_url(self, url, api_name) } @@ -370,7 +371,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer { &mut self, path: &'a Path, api_name: &str, - ) -> Result, deno_core::error::AnyError> { + ) -> Result, PermissionCheckError> { deno_permissions::PermissionsContainer::check_read_path( self, path, @@ -414,9 +415,7 @@ where "file" => { let path = url.to_file_path().map_err(|_| FetchError::NetworkError)?; let permissions = state.borrow_mut::(); - let path = permissions - .check_read(&path, "fetch()") - .map_err(FetchError::Permission)?; + let path = permissions.check_read(&path, "fetch()")?; let url = match path { Cow::Owned(path) => Url::from_file_path(path).unwrap(), Cow::Borrowed(_) => url, @@ -442,9 +441,7 @@ where } "http" | "https" => { let permissions = state.borrow_mut::(); - permissions - .check_net_url(&url, "fetch()") - .map_err(FetchError::Resource)?; + permissions.check_net_url(&url, "fetch()")?; let maybe_authority = extract_authority(&mut url); let uri = url @@ -863,9 +860,7 @@ where if let Some(proxy) = args.proxy.clone() { let permissions = state.borrow_mut::(); let url = Url::parse(&proxy.url)?; - permissions - .check_net_url(&url, "Deno.createHttpClient()") - .map_err(FetchError::Permission)?; + permissions.check_net_url(&url, "Deno.createHttpClient()")?; } let options = state.borrow::(); -- cgit v1.2.3 From ef7432c03f83ad9e9ca2812d0ab5653e87fa5259 Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Tue, 5 Nov 2024 20:27:14 -0500 Subject: chore: forward v2.0.5 release commit to main (#26755) This is the release commit being forwarded back to main for 2.0.5 Co-authored-by: bartlomieju --- ext/fetch/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'ext/fetch') diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 93fc88ae6..0b15d05b8 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.199.0" +version = "0.200.0" authors.workspace = true edition.workspace = true license.workspace = true -- cgit v1.2.3 From b9262130fec34137e38c922015c6b671c0fa9396 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Thu, 7 Nov 2024 17:12:13 +0530 Subject: feat(ext/http): abort signal when request is cancelled (#26761) Closes https://github.com/denoland/deno/issues/21653 --- ext/fetch/23_request.js | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) (limited to 'ext/fetch') diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index 6211e927d..22c17d6d2 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -269,19 +269,25 @@ class Request { /** @type {AbortSignal} */ get [_signal]() { const signal = this[_signalCache]; - // This signal not been created yet, and the request is still in progress - if (signal === undefined) { + // This signal has not been created yet, but the request has already completed + if (signal === false) { const signal = newSignal(); this[_signalCache] = signal; + signal[signalAbort](signalAbortError); return signal; } - // This signal has not been created yet, but the request has already completed - if (signal === false) { + + // This signal not been created yet, and the request is still in progress + if (signal === undefined) { const signal = newSignal(); this[_signalCache] = signal; - signal[signalAbort](signalAbortError); return signal; } + + if (!signal.aborted && this[_request].isCancelled) { + signal[signalAbort](signalAbortError); + } + return signal; } get [_mimeType]() { -- cgit v1.2.3 From b482a50299ae4f636a186038460e54af65e2b627 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Fri, 8 Nov 2024 18:46:11 +0530 Subject: feat(ext/http): abort event when request is cancelled (#26781) ```js Deno.serve(async (req) => { const { promise, resolve } = Promise.withResolvers(); req.signal.addEventListener("abort", () => { resolve(); }); await promise; return new Response("Ok"); }); ``` --- ext/fetch/23_request.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'ext/fetch') diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index 22c17d6d2..61cac22d2 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -281,11 +281,11 @@ class Request { if (signal === undefined) { const signal = newSignal(); this[_signalCache] = signal; - return signal; - } + this[_request].onCancel?.(() => { + signal[signalAbort](signalAbortError); + }); - if (!signal.aborted && this[_request].isCancelled) { - signal[signalAbort](signalAbortError); + return signal; } return signal; -- cgit v1.2.3 From e1b40a69c0241a9be7249b64118eae8741e24268 Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Sun, 10 Nov 2024 02:42:18 -0500 Subject: chore: forward v2.0.6 release commit to main (#26804) This is the release commit being forwarded back to main for 2.0.6 Signed-off-by: Divy Srivastava Co-authored-by: Divy Srivastava --- ext/fetch/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'ext/fetch') diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 0b15d05b8..56d416bbb 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.200.0" +version = "0.201.0" authors.workspace = true edition.workspace = true license.workspace = true -- cgit v1.2.3 From 032ae7fb19bd01c1de28515facd5c3b2ce821924 Mon Sep 17 00:00:00 2001 From: Sahand Akbarzadeh Date: Fri, 15 Nov 2024 14:14:11 +0330 Subject: feat(ext/fetch): allow embedders to use `hickory_dns_resolver` instead of default `GaiResolver` (#26740) Allows embedders to use `hickory-dns-resolver` instead of threaded "getaddrinfo" resolver in the `fetch()` implementation. --- ext/fetch/Cargo.toml | 1 + ext/fetch/dns.rs | 116 +++++++++++++++++++++++++++++++++++++++++++++++++++ ext/fetch/lib.rs | 20 ++++++++- ext/fetch/tests.rs | 79 ++++++++++++++++++++++++++++++++--- 4 files changed, 209 insertions(+), 7 deletions(-) create mode 100644 ext/fetch/dns.rs (limited to 'ext/fetch') diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 56d416bbb..00c85f2aa 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -22,6 +22,7 @@ deno_permissions.workspace = true deno_tls.workspace = true dyn-clone = "1" error_reporter = "1" +hickory-resolver.workspace = true http.workspace = true http-body-util.workspace = true hyper.workspace = true diff --git a/ext/fetch/dns.rs b/ext/fetch/dns.rs new file mode 100644 index 000000000..9e21a4c34 --- /dev/null +++ b/ext/fetch/dns.rs @@ -0,0 +1,116 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::future::Future; +use std::io; +use std::net::SocketAddr; +use std::pin::Pin; +use std::task::Poll; +use std::task::{self}; +use std::vec; + +use hickory_resolver::error::ResolveError; +use hickory_resolver::name_server::GenericConnector; +use hickory_resolver::name_server::TokioRuntimeProvider; +use hickory_resolver::AsyncResolver; +use hyper_util::client::legacy::connect::dns::GaiResolver; +use hyper_util::client::legacy::connect::dns::Name; +use tokio::task::JoinHandle; +use tower::Service; + +#[derive(Clone, Debug)] +pub enum Resolver { + /// A resolver using blocking `getaddrinfo` calls in a threadpool. + Gai(GaiResolver), + /// hickory-resolver's userspace resolver. + Hickory(AsyncResolver>), +} + +impl Default for Resolver { + fn default() -> Self { + Self::gai() + } +} + +impl Resolver { + pub fn gai() -> Self { + Self::Gai(GaiResolver::new()) + } + + /// Create a [`AsyncResolver`] from system conf. + pub fn hickory() -> Result { + Ok(Self::Hickory( + hickory_resolver::AsyncResolver::tokio_from_system_conf()?, + )) + } + + pub fn hickory_from_async_resolver( + resolver: AsyncResolver>, + ) -> Self { + Self::Hickory(resolver) + } +} + +type SocketAddrs = vec::IntoIter; + +pub struct ResolveFut { + inner: JoinHandle>, +} + +impl Future for ResolveFut { + type Output = Result; + + fn poll( + mut self: Pin<&mut Self>, + cx: &mut task::Context<'_>, + ) -> Poll { + Pin::new(&mut self.inner).poll(cx).map(|res| match res { + Ok(Ok(addrs)) => Ok(addrs), + Ok(Err(e)) => Err(e), + Err(join_err) => { + if join_err.is_cancelled() { + Err(io::Error::new(io::ErrorKind::Interrupted, join_err)) + } else { + Err(io::Error::new(io::ErrorKind::Other, join_err)) + } + } + }) + } +} + +impl Service for Resolver { + type Response = SocketAddrs; + type Error = io::Error; + type Future = ResolveFut; + + fn poll_ready( + &mut self, + _cx: &mut task::Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, name: Name) -> Self::Future { + let task = match self { + Resolver::Gai(gai_resolver) => { + let mut resolver = gai_resolver.clone(); + tokio::spawn(async move { + let result = resolver.call(name).await?; + let x: Vec<_> = result.into_iter().collect(); + let iter: SocketAddrs = x.into_iter(); + Ok(iter) + }) + } + Resolver::Hickory(async_resolver) => { + let resolver = async_resolver.clone(); + tokio::spawn(async move { + let result = resolver.lookup_ip(name.as_str()).await?; + + let x: Vec<_> = + result.into_iter().map(|x| SocketAddr::new(x, 0)).collect(); + let iter: SocketAddrs = x.into_iter(); + Ok(iter) + }) + } + }; + ResolveFut { inner: task } + } +} diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 7ef26431c..5949f9f75 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -1,5 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +pub mod dns; mod fs_fetch_handler; mod proxy; #[cfg(test)] @@ -91,6 +92,7 @@ pub struct Options { pub unsafely_ignore_certificate_errors: Option>, pub client_cert_chain_and_key: TlsKeys, pub file_fetch_handler: Rc, + pub resolver: dns::Resolver, } impl Options { @@ -114,6 +116,7 @@ impl Default for Options { unsafely_ignore_certificate_errors: None, client_cert_chain_and_key: TlsKeys::Null, file_fetch_handler: Rc::new(DefaultFileFetchHandler), + resolver: dns::Resolver::default(), } } } @@ -255,6 +258,7 @@ pub fn create_client_from_options( .map_err(HttpClientCreateError::RootCertStore)?, ca_certs: vec![], proxy: options.proxy.clone(), + dns_resolver: options.resolver.clone(), unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), @@ -835,6 +839,8 @@ pub struct CreateHttpClientArgs { proxy: Option, pool_max_idle_per_host: Option, pool_idle_timeout: Option, + #[serde(default)] + use_hickory_resolver: bool, #[serde(default = "default_true")] http1: bool, #[serde(default = "default_true")] @@ -878,6 +884,13 @@ where .map_err(HttpClientCreateError::RootCertStore)?, ca_certs, proxy: args.proxy, + dns_resolver: if args.use_hickory_resolver { + dns::Resolver::hickory() + .map_err(deno_core::error::AnyError::new) + .map_err(FetchError::Resource)? + } else { + dns::Resolver::default() + }, unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), @@ -909,6 +922,7 @@ pub struct CreateHttpClientOptions { pub root_cert_store: Option, pub ca_certs: Vec>, pub proxy: Option, + pub dns_resolver: dns::Resolver, pub unsafely_ignore_certificate_errors: Option>, pub client_cert_chain_and_key: Option, pub pool_max_idle_per_host: Option, @@ -923,6 +937,7 @@ impl Default for CreateHttpClientOptions { root_cert_store: None, ca_certs: vec![], proxy: None, + dns_resolver: dns::Resolver::default(), unsafely_ignore_certificate_errors: None, client_cert_chain_and_key: None, pool_max_idle_per_host: None, @@ -976,7 +991,8 @@ pub fn create_http_client( tls_config.alpn_protocols = alpn_protocols; let tls_config = Arc::from(tls_config); - let mut http_connector = HttpConnector::new(); + let mut http_connector = + HttpConnector::new_with_resolver(options.dns_resolver.clone()); http_connector.enforce_http(false); let user_agent = user_agent.parse::().map_err(|_| { @@ -1051,7 +1067,7 @@ pub struct Client { user_agent: HeaderValue, } -type Connector = proxy::ProxyConnector; +type Connector = proxy::ProxyConnector>; // clippy is wrong here #[allow(clippy::declare_interior_mutable_const)] diff --git a/ext/fetch/tests.rs b/ext/fetch/tests.rs index dad1b34a9..5cd1a35a5 100644 --- a/ext/fetch/tests.rs +++ b/ext/fetch/tests.rs @@ -1,6 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use std::net::SocketAddr; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering::SeqCst; use std::sync::Arc; use bytes::Bytes; @@ -10,6 +12,8 @@ use http_body_util::BodyExt; use tokio::io::AsyncReadExt; use tokio::io::AsyncWriteExt; +use crate::dns; + use super::create_http_client; use super::CreateHttpClientOptions; @@ -17,6 +21,53 @@ static EXAMPLE_CRT: &[u8] = include_bytes!("../tls/testdata/example1_cert.der"); static EXAMPLE_KEY: &[u8] = include_bytes!("../tls/testdata/example1_prikey.der"); +#[test] +fn test_userspace_resolver() { + let thread_counter = Arc::new(AtomicUsize::new(0)); + + let thread_counter_ref = thread_counter.clone(); + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .on_thread_start(move || { + thread_counter_ref.fetch_add(1, SeqCst); + }) + .build() + .unwrap(); + + rt.block_on(async move { + assert_eq!(thread_counter.load(SeqCst), 0); + let src_addr = create_https_server(true).await; + assert_eq!(src_addr.ip().to_string(), "127.0.0.1"); + // use `localhost` to ensure dns step happens. + let addr = format!("localhost:{}", src_addr.port()); + + let hickory = hickory_resolver::AsyncResolver::tokio( + Default::default(), + Default::default(), + ); + + assert_eq!(thread_counter.load(SeqCst), 0); + rust_test_client_with_resolver( + None, + addr.clone(), + "https", + http::Version::HTTP_2, + dns::Resolver::hickory_from_async_resolver(hickory), + ) + .await; + assert_eq!(thread_counter.load(SeqCst), 0, "userspace resolver shouldn't spawn new threads."); + rust_test_client_with_resolver( + None, + addr.clone(), + "https", + http::Version::HTTP_2, + dns::Resolver::gai(), + ) + .await; + assert_eq!(thread_counter.load(SeqCst), 1, "getaddrinfo is called inside spawn_blocking, so tokio spawn a new worker thread for it."); + }); +} + #[tokio::test] async fn test_https_proxy_http11() { let src_addr = create_https_server(false).await; @@ -52,25 +103,27 @@ async fn test_socks_proxy_h2() { run_test_client(prx_addr, src_addr, "socks5", http::Version::HTTP_2).await; } -async fn run_test_client( - prx_addr: SocketAddr, - src_addr: SocketAddr, +async fn rust_test_client_with_resolver( + prx_addr: Option, + src_addr: String, proto: &str, ver: http::Version, + resolver: dns::Resolver, ) { let client = create_http_client( "fetch/test", CreateHttpClientOptions { root_cert_store: None, ca_certs: vec![], - proxy: Some(deno_tls::Proxy { - url: format!("{}://{}", proto, prx_addr), + proxy: prx_addr.map(|p| deno_tls::Proxy { + url: format!("{}://{}", proto, p), basic_auth: None, }), unsafely_ignore_certificate_errors: Some(vec![]), client_cert_chain_and_key: None, pool_max_idle_per_host: None, pool_idle_timeout: None, + dns_resolver: resolver, http1: true, http2: true, }, @@ -92,6 +145,22 @@ async fn run_test_client( assert_eq!(hello, "hello from server"); } +async fn run_test_client( + prx_addr: SocketAddr, + src_addr: SocketAddr, + proto: &str, + ver: http::Version, +) { + rust_test_client_with_resolver( + Some(prx_addr), + src_addr.to_string(), + proto, + ver, + Default::default(), + ) + .await +} + async fn create_https_server(allow_h2: bool) -> SocketAddr { let mut tls_config = deno_tls::rustls::server::ServerConfig::builder() .with_no_client_auth() -- cgit v1.2.3 From b8cf2599242a9d85d03b57d3649ccdf8bce1530e Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Fri, 15 Nov 2024 15:54:28 +0100 Subject: feat(fetch): accept async iterables for body (#26882) Reland of #24623, but with a fix for `String` objects. Co-authored-by: crowlkats --- ext/fetch/22_body.js | 14 ++++++++++++++ ext/fetch/lib.deno_fetch.d.ts | 2 ++ 2 files changed, 16 insertions(+) (limited to 'ext/fetch') diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js index 61a06b4af..c7e977c0b 100644 --- a/ext/fetch/22_body.js +++ b/ext/fetch/22_body.js @@ -15,6 +15,7 @@ import { core, primordials } from "ext:core/mod.js"; const { isAnyArrayBuffer, isArrayBuffer, + isStringObject, } = core; const { ArrayBufferIsView, @@ -466,6 +467,8 @@ function extractBody(object) { if (object.locked || isReadableStreamDisturbed(object)) { throw new TypeError("ReadableStream is locked or disturbed"); } + } else if (object[webidl.AsyncIterable] === webidl.AsyncIterable) { + stream = ReadableStream.from(object.open()); } if (typeof source === "string") { // WARNING: this deviates from spec (expects length to be set) @@ -483,6 +486,9 @@ function extractBody(object) { return { body, contentType }; } +webidl.converters["async iterable"] = webidl + .createAsyncIterableConverter(webidl.converters.Uint8Array); + webidl.converters["BodyInit_DOMString"] = (V, prefix, context, opts) => { // Union for (ReadableStream or Blob or ArrayBufferView or ArrayBuffer or FormData or URLSearchParams or USVString) if (ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, V)) { @@ -501,6 +507,14 @@ webidl.converters["BodyInit_DOMString"] = (V, prefix, context, opts) => { if (ArrayBufferIsView(V)) { return webidl.converters["ArrayBufferView"](V, prefix, context, opts); } + if (webidl.isAsyncIterable(V) && !isStringObject(V)) { + return webidl.converters["async iterable"]( + V, + prefix, + context, + opts, + ); + } } // BodyInit conversion is passed to extractBody(), which calls core.encode(). // core.encode() will UTF-8 encode strings with replacement, being equivalent to the USV normalization. diff --git a/ext/fetch/lib.deno_fetch.d.ts b/ext/fetch/lib.deno_fetch.d.ts index d219a3859..8614dec89 100644 --- a/ext/fetch/lib.deno_fetch.d.ts +++ b/ext/fetch/lib.deno_fetch.d.ts @@ -163,6 +163,8 @@ type BodyInit = | FormData | URLSearchParams | ReadableStream + | Iterable + | AsyncIterable | string; /** @category Fetch */ type RequestDestination = -- cgit v1.2.3 From 0e2f6e38e7b93e42099f546ef2c01629964d095a Mon Sep 17 00:00:00 2001 From: Yusuke Tanaka Date: Tue, 19 Nov 2024 10:48:57 +0900 Subject: feat(ext/fetch): Make fetch client parameters configurable (#26909) This commit makes HTTP client parameters used in `fetch` API configurable on the extension initialization via a callback `client_builder_hook` that users can provide. The main motivation behind this change is to allow `deno_fetch` users to tune the HTTP/2 client to suit their needs, although Deno CLI users will not benefit from it as no JavaScript interface is exposed to set these parameters currently. It is up to users whether to provide a hook function. If not provided, the default configuration from hyper crate will be used. Ref #26785 --- ext/fetch/lib.rs | 23 +++++++++++++++++++++-- ext/fetch/tests.rs | 1 + 2 files changed, 22 insertions(+), 2 deletions(-) (limited to 'ext/fetch') diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 5949f9f75..c8e93b9fe 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -67,6 +67,7 @@ use http_body_util::BodyExt; use hyper::body::Frame; use hyper_util::client::legacy::connect::HttpConnector; use hyper_util::client::legacy::connect::HttpInfo; +use hyper_util::client::legacy::Builder as HyperClientBuilder; use hyper_util::rt::TokioExecutor; use hyper_util::rt::TokioTimer; use serde::Deserialize; @@ -85,6 +86,16 @@ pub struct Options { pub user_agent: String, pub root_cert_store_provider: Option>, pub proxy: Option, + /// A callback to customize HTTP client configuration. + /// + /// The settings applied with this hook may be overridden by the options + /// provided through `Deno.createHttpClient()` API. For instance, if the hook + /// calls [`hyper_util::client::legacy::Builder::pool_max_idle_per_host`] with + /// a value of 99, and a user calls `Deno.createHttpClient({ poolMaxIdlePerHost: 42 })`, + /// the value that will take effect is 42. + /// + /// For more info on what can be configured, see [`hyper_util::client::legacy::Builder`]. + pub client_builder_hook: Option HyperClientBuilder>, #[allow(clippy::type_complexity)] pub request_builder_hook: Option< fn(&mut http::Request) -> Result<(), deno_core::error::AnyError>, @@ -112,6 +123,7 @@ impl Default for Options { user_agent: "".to_string(), root_cert_store_provider: None, proxy: None, + client_builder_hook: None, request_builder_hook: None, unsafely_ignore_certificate_errors: None, client_cert_chain_and_key: TlsKeys::Null, @@ -271,6 +283,7 @@ pub fn create_client_from_options( pool_idle_timeout: None, http1: true, http2: true, + client_builder_hook: options.client_builder_hook, }, ) } @@ -908,6 +921,7 @@ where ), http1: args.http1, http2: args.http2, + client_builder_hook: options.client_builder_hook, }, )?; @@ -929,6 +943,7 @@ pub struct CreateHttpClientOptions { pub pool_idle_timeout: Option>, pub http1: bool, pub http2: bool, + pub client_builder_hook: Option HyperClientBuilder>, } impl Default for CreateHttpClientOptions { @@ -944,6 +959,7 @@ impl Default for CreateHttpClientOptions { pool_idle_timeout: None, http1: true, http2: true, + client_builder_hook: None, } } } @@ -999,11 +1015,14 @@ pub fn create_http_client( HttpClientCreateError::InvalidUserAgent(user_agent.to_string()) })?; - let mut builder = - hyper_util::client::legacy::Builder::new(TokioExecutor::new()); + let mut builder = HyperClientBuilder::new(TokioExecutor::new()); builder.timer(TokioTimer::new()); builder.pool_timer(TokioTimer::new()); + if let Some(client_builder_hook) = options.client_builder_hook { + builder = client_builder_hook(builder); + } + let mut proxies = proxy::from_env(); if let Some(proxy) = options.proxy { let mut intercept = proxy::Intercept::all(&proxy.url) diff --git a/ext/fetch/tests.rs b/ext/fetch/tests.rs index 5cd1a35a5..e053c6b1c 100644 --- a/ext/fetch/tests.rs +++ b/ext/fetch/tests.rs @@ -126,6 +126,7 @@ async fn rust_test_client_with_resolver( dns_resolver: resolver, http1: true, http2: true, + client_builder_hook: None, }, ) .unwrap(); -- cgit v1.2.3