summaryrefslogtreecommitdiff
path: root/ext/cache
diff options
context:
space:
mode:
authorSatya Rohith <me@satyarohith.com>2022-10-03 10:52:54 +0530
committerGitHub <noreply@github.com>2022-10-03 10:52:54 +0530
commiteacd6a7f295a9a8ce4f4ca38cbf3e9905c4a5d02 (patch)
treec42049ce8a6360c5d72cd1f13795d9f41f8b4dd5 /ext/cache
parente2990be264776d4d17e0fa982f74e1ad54624d0d (diff)
chore(ext/cache): make helper functions public (#16117)
Diffstat (limited to 'ext/cache')
-rw-r--r--ext/cache/Cargo.toml2
-rw-r--r--ext/cache/lib.rs199
-rw-r--r--ext/cache/sqlite.rs87
3 files changed, 179 insertions, 109 deletions
diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml
index 38e08356d..ea824de46 100644
--- a/ext/cache/Cargo.toml
+++ b/ext/cache/Cargo.toml
@@ -2,7 +2,7 @@
[package]
name = "deno_cache"
-version = "0.2.0"
+version = "0.3.0"
authors = ["the Deno authors"]
edition = "2021"
license = "MIT"
diff --git a/ext/cache/lib.rs b/ext/cache/lib.rs
index 350efbc38..e6fdaa764 100644
--- a/ext/cache/lib.rs
+++ b/ext/cache/lib.rs
@@ -20,6 +20,38 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
+#[derive(Clone)]
+pub struct CreateCache<C: Cache + 'static>(pub Arc<dyn Fn() -> C>);
+
+pub fn init<CA: Cache + 'static>(
+ maybe_create_cache: Option<CreateCache<CA>>,
+) -> Extension {
+ Extension::builder()
+ .js(include_js_files!(
+ prefix "deno:ext/cache",
+ "01_cache.js",
+ ))
+ .ops(vec![
+ op_cache_storage_open::decl::<CA>(),
+ op_cache_storage_has::decl::<CA>(),
+ op_cache_storage_delete::decl::<CA>(),
+ op_cache_put::decl::<CA>(),
+ op_cache_match::decl::<CA>(),
+ op_cache_delete::decl::<CA>(),
+ ])
+ .state(move |state| {
+ if let Some(create_cache) = maybe_create_cache.clone() {
+ state.put(create_cache);
+ }
+ Ok(())
+ })
+ .build()
+}
+
+pub fn get_declaration() -> PathBuf {
+ PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_cache.d.ts")
+}
+
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct CachePutRequest {
@@ -181,34 +213,151 @@ where
}
}
-#[derive(Clone)]
-pub struct CreateCache<C: Cache + 'static>(pub Arc<dyn Fn() -> C>);
+/// Check if headers, mentioned in the vary header, of query request
+/// and cached request are equal.
+pub fn vary_header_matches(
+ vary_header: &ByteString,
+ query_request_headers: &[(ByteString, ByteString)],
+ cached_request_headers: &[(ByteString, ByteString)],
+) -> bool {
+ let vary_header = match std::str::from_utf8(vary_header) {
+ Ok(vary_header) => vary_header,
+ Err(_) => return false,
+ };
+ let headers = get_headers_from_vary_header(vary_header);
+ for header in headers {
+ let query_header = get_header(&header, query_request_headers);
+ let cached_header = get_header(&header, cached_request_headers);
+ if query_header != cached_header {
+ return false;
+ }
+ }
+ true
+}
-pub fn init<CA: Cache + 'static>(
- maybe_create_cache: Option<CreateCache<CA>>,
-) -> Extension {
- Extension::builder()
- .js(include_js_files!(
- prefix "deno:ext/cache",
- "01_cache.js",
- ))
- .ops(vec![
- op_cache_storage_open::decl::<CA>(),
- op_cache_storage_has::decl::<CA>(),
- op_cache_storage_delete::decl::<CA>(),
- op_cache_put::decl::<CA>(),
- op_cache_match::decl::<CA>(),
- op_cache_delete::decl::<CA>(),
- ])
- .state(move |state| {
- if let Some(create_cache) = maybe_create_cache.clone() {
- state.put(create_cache);
+#[test]
+fn test_vary_header_matches() {
+ let vary_header = ByteString::from("accept-encoding");
+ let query_request_headers = vec![(
+ ByteString::from("accept-encoding"),
+ ByteString::from("gzip"),
+ )];
+ let cached_request_headers = vec![(
+ ByteString::from("accept-encoding"),
+ ByteString::from("gzip"),
+ )];
+ assert!(vary_header_matches(
+ &vary_header,
+ &query_request_headers,
+ &cached_request_headers
+ ));
+ let vary_header = ByteString::from("accept-encoding");
+ let query_request_headers = vec![(
+ ByteString::from("accept-encoding"),
+ ByteString::from("gzip"),
+ )];
+ let cached_request_headers =
+ vec![(ByteString::from("accept-encoding"), ByteString::from("br"))];
+ assert!(!vary_header_matches(
+ &vary_header,
+ &query_request_headers,
+ &cached_request_headers
+ ));
+}
+
+/// Get headers from the vary header.
+pub fn get_headers_from_vary_header(vary_header: &str) -> Vec<String> {
+ vary_header
+ .split(',')
+ .map(|s| s.trim().to_lowercase())
+ .collect()
+}
+
+#[test]
+fn test_get_headers_from_vary_header() {
+ let headers = get_headers_from_vary_header("accept-encoding");
+ assert_eq!(headers, vec!["accept-encoding"]);
+ let headers = get_headers_from_vary_header("accept-encoding, user-agent");
+ assert_eq!(headers, vec!["accept-encoding", "user-agent"]);
+}
+
+/// Get value for the header with the given name.
+pub fn get_header(
+ name: &str,
+ headers: &[(ByteString, ByteString)],
+) -> Option<ByteString> {
+ headers
+ .iter()
+ .find(|(k, _)| {
+ if let Ok(k) = std::str::from_utf8(k) {
+ k.eq_ignore_ascii_case(name)
+ } else {
+ false
}
- Ok(())
})
- .build()
+ .map(|(_, v)| v.to_owned())
}
-pub fn get_declaration() -> PathBuf {
- PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_cache.d.ts")
+#[test]
+fn test_get_header() {
+ let headers = vec![
+ (
+ ByteString::from("accept-encoding"),
+ ByteString::from("gzip"),
+ ),
+ (
+ ByteString::from("content-type"),
+ ByteString::from("application/json"),
+ ),
+ (
+ ByteString::from("vary"),
+ ByteString::from("accept-encoding"),
+ ),
+ ];
+ let value = get_header("accept-encoding", &headers);
+ assert_eq!(value, Some(ByteString::from("gzip")));
+ let value = get_header("content-type", &headers);
+ assert_eq!(value, Some(ByteString::from("application/json")));
+ let value = get_header("vary", &headers);
+ assert_eq!(value, Some(ByteString::from("accept-encoding")));
+}
+
+/// Serialize headers into bytes.
+pub fn serialize_headers(headers: &[(ByteString, ByteString)]) -> Vec<u8> {
+ let mut serialized_headers = Vec::new();
+ for (name, value) in headers {
+ serialized_headers.extend_from_slice(name);
+ serialized_headers.extend_from_slice(b"\r\n");
+ serialized_headers.extend_from_slice(value);
+ serialized_headers.extend_from_slice(b"\r\n");
+ }
+ serialized_headers
+}
+
+/// Deserialize bytes into headers.
+pub fn deserialize_headers(
+ serialized_headers: &[u8],
+) -> Vec<(ByteString, ByteString)> {
+ let mut headers = Vec::new();
+ let mut piece = None;
+ let mut start = 0;
+ for (i, byte) in serialized_headers.iter().enumerate() {
+ if byte == &b'\r' && serialized_headers.get(i + 1) == Some(&b'\n') {
+ if piece.is_none() {
+ piece = Some(start..i);
+ } else {
+ let name = piece.unwrap();
+ let value = start..i;
+ headers.push((
+ ByteString::from(&serialized_headers[name]),
+ ByteString::from(&serialized_headers[value]),
+ ));
+ piece = None;
+ }
+ start = i + 2;
+ }
+ }
+ assert!(piece.is_none());
+ assert_eq!(start, serialized_headers.len());
+ headers
}
diff --git a/ext/cache/sqlite.rs b/ext/cache/sqlite.rs
index 1e5591839..7e97fb563 100644
--- a/ext/cache/sqlite.rs
+++ b/ext/cache/sqlite.rs
@@ -21,6 +21,10 @@ use std::sync::Arc;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
+use crate::deserialize_headers;
+use crate::get_header;
+use crate::serialize_headers;
+use crate::vary_header_matches;
use crate::Cache;
use crate::CacheDeleteRequest;
use crate::CacheMatchRequest;
@@ -328,51 +332,6 @@ fn get_responses_dir(cache_storage_dir: PathBuf, cache_id: i64) -> PathBuf {
.join("responses")
}
-/// Check if the headers provided in the vary_header match
-/// the query request headers and the cached request headers.
-fn vary_header_matches(
- vary_header: &ByteString,
- query_request_headers: &[(ByteString, ByteString)],
- cached_request_headers: &[(ByteString, ByteString)],
-) -> bool {
- let vary_header = match std::str::from_utf8(vary_header) {
- Ok(vary_header) => vary_header,
- Err(_) => return false,
- };
- let headers = get_headers_from_vary_header(vary_header);
- for header in headers {
- let query_header = get_header(&header, query_request_headers);
- let cached_header = get_header(&header, cached_request_headers);
- if query_header != cached_header {
- return false;
- }
- }
- true
-}
-
-fn get_headers_from_vary_header(vary_header: &str) -> Vec<String> {
- vary_header
- .split(',')
- .map(|s| s.trim().to_lowercase())
- .collect()
-}
-
-fn get_header(
- name: &str,
- headers: &[(ByteString, ByteString)],
-) -> Option<ByteString> {
- headers
- .iter()
- .find(|(k, _)| {
- if let Ok(k) = std::str::from_utf8(k) {
- k.eq_ignore_ascii_case(name)
- } else {
- false
- }
- })
- .map(|(_, v)| v.to_owned())
-}
-
impl deno_core::Resource for SqliteBackedCache {
fn name(&self) -> std::borrow::Cow<str> {
"SqliteBackedCache".into()
@@ -463,41 +422,3 @@ pub fn hash(token: &str) -> String {
use sha2::Digest;
format!("{:x}", sha2::Sha256::digest(token.as_bytes()))
}
-
-fn serialize_headers(headers: &[(ByteString, ByteString)]) -> Vec<u8> {
- let mut serialized_headers = Vec::new();
- for (name, value) in headers {
- serialized_headers.extend_from_slice(name);
- serialized_headers.extend_from_slice(b"\r\n");
- serialized_headers.extend_from_slice(value);
- serialized_headers.extend_from_slice(b"\r\n");
- }
- serialized_headers
-}
-
-fn deserialize_headers(
- serialized_headers: &[u8],
-) -> Vec<(ByteString, ByteString)> {
- let mut headers = Vec::new();
- let mut piece = None;
- let mut start = 0;
- for (i, byte) in serialized_headers.iter().enumerate() {
- if byte == &b'\r' && serialized_headers.get(i + 1) == Some(&b'\n') {
- if piece.is_none() {
- piece = Some(start..i);
- } else {
- let name = piece.unwrap();
- let value = start..i;
- headers.push((
- ByteString::from(&serialized_headers[name]),
- ByteString::from(&serialized_headers[value]),
- ));
- piece = None;
- }
- start = i + 2;
- }
- }
- assert!(piece.is_none());
- assert_eq!(start, serialized_headers.len());
- headers
-}