From ff6b21abb4dd80a05d168f8f401471de62cdafc8 Mon Sep 17 00:00:00 2001 From: Manuel Raimann Date: Thu, 19 Sep 2024 12:01:16 +0200 Subject: [PATCH 1/4] Add sync_writes_by_key --- README.md | 6 ++--- cached_proc_macro/src/cached.rs | 48 ++++++++++++++++++++++++++++++++- cached_proc_macro/src/lib.rs | 1 + examples/async_std.rs | 2 +- src/lib.rs | 6 ++--- src/proc_macro.rs | 4 +-- tests/cached.rs | 45 ++++++++++++++++++++++++++++++- 7 files changed, 101 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index e2bb7ef..ab00c4a 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ function-cache wrapped in a mutex/rwlock, or externally synchronized in the case By default, the function-cache is **not** locked for the duration of the function's execution, so initial (on an empty cache) concurrent calls of long-running functions with the same arguments will each execute fully and each overwrite the memoized value as they complete. This mirrors the behavior of Python's `functools.lru_cache`. To synchronize the execution and caching -of un-cached arguments, specify `#[cached(sync_writes = true)]` / `#[once(sync_writes = true)]` (not supported by `#[io_cached]`. +of un-cached arguments, specify `#[cached(sync_writes = "default")]` / `#[once(sync_writes = "default")]` (not supported by `#[io_cached]`. - See [`cached::stores` docs](https://docs.rs/cached/latest/cached/stores/index.html) cache stores available. - See [`proc_macro`](https://docs.rs/cached/latest/cached/proc_macro/index.html) for more procedural macro examples. @@ -93,7 +93,7 @@ use cached::proc_macro::once; /// When no (or expired) cache, concurrent calls /// will synchronize (`sync_writes`) so the function /// is only executed once. -#[once(time=10, option = true, sync_writes = true)] +#[once(time=10, option = true, sync_writes = "default")] fn keyed(a: String) -> Option { if a == "a" { Some(a.len()) @@ -112,7 +112,7 @@ use cached::proc_macro::cached; #[cached( result = true, time = 1, - sync_writes = true, + sync_writes = "default", result_fallback = true )] fn doesnt_compile() -> Result { diff --git a/cached_proc_macro/src/cached.rs b/cached_proc_macro/src/cached.rs index fc63555..0dda2f8 100644 --- a/cached_proc_macro/src/cached.rs +++ b/cached_proc_macro/src/cached.rs @@ -29,6 +29,8 @@ struct MacroArgs { #[darling(default)] sync_writes: bool, #[darling(default)] + sync_writes_by_key: bool, + #[darling(default)] with_cached_flag: bool, #[darling(default)] ty: Option, @@ -92,6 +94,13 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { Some(ref name) => Ident::new(name, fn_ident.span()), None => Ident::new(&fn_ident.to_string().to_uppercase(), fn_ident.span()), }; + let cache_ident_key = match args.name { + Some(ref name) => Ident::new(&format!("{}_key", name), fn_ident.span()), + None => Ident::new( + &format!("{}_key", fn_ident.to_string().to_uppercase()), + fn_ident.span(), + ), + }; let (cache_key_ty, key_convert_block) = make_cache_key_type(&args.key, &args.convert, &args.ty, input_tys, &input_names); @@ -194,6 +203,14 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { panic!("the result_fallback and sync_writes attributes are mutually exclusive"); } + if args.result_fallback && args.sync_writes_by_key { + panic!("the result_fallback and sync_writes_by_key attributes are mutually exclusive"); + } + + if args.sync_writes && args.sync_writes_by_key { + panic!("the sync_writes and sync_writes_by_key attributes are mutually exclusive"); + } + let set_cache_and_return = quote! { #set_cache_block result @@ -202,6 +219,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let no_cache_fn_ident = Ident::new(&format!("{}_no_cache", &fn_ident), fn_ident.span()); let lock; + let lock_key; let function_no_cache; let function_call; let ty; @@ -210,6 +228,16 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let mut cache = #cache_ident.lock().await; }; + lock_key = quote! { + let mut locks = #cache_ident_key.lock().await; + let lock = locks + .entry(key.clone()) + .or_insert_with(|| std::sync::Arc::new(::cached::async_sync::Mutex::new(#cache_create))) + .clone(); + drop(locks); + let mut cache = lock.lock().await; + }; + function_no_cache = quote! { async fn #no_cache_fn_ident #generics (#inputs) #output #body }; @@ -220,12 +248,20 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { ty = quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex<#cache_ty>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(#cache_create)); + #visibility static #cache_ident_key: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex>>>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(std::collections::HashMap::new())); }; } else { lock = quote! { let mut cache = #cache_ident.lock().unwrap(); }; + lock_key = quote! { + let mut locks = #cache_ident_key.lock().unwrap(); + let lock = locks.entry(key.clone()).or_insert_with(|| std::sync::Arc::new(std::sync::Mutex::new(#cache_create))).clone(); + drop(locks); + let mut cache = lock.lock().unwrap(); + }; + function_no_cache = quote! { fn #no_cache_fn_ident #generics (#inputs) #output #body }; @@ -236,6 +272,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { ty = quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(#cache_create)); + #visibility static #cache_ident_key: ::cached::once_cell::sync::Lazy>>>> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(std::collections::HashMap::new())); }; } @@ -247,7 +284,16 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { #set_cache_and_return }; - let do_set_return_block = if args.sync_writes { + let do_set_return_block = if args.sync_writes_by_key { + quote! { + #lock_key + if let Some(result) = cache.cache_get(&key) { + #return_cache_block + } + #function_call + #set_cache_and_return + } + } else if args.sync_writes { quote! { #lock if let Some(result) = cache.cache_get(&key) { diff --git a/cached_proc_macro/src/lib.rs b/cached_proc_macro/src/lib.rs index 83d6d39..5985017 100644 --- a/cached_proc_macro/src/lib.rs +++ b/cached_proc_macro/src/lib.rs @@ -14,6 +14,7 @@ use proc_macro::TokenStream; /// - `time`: (optional, u64) specify a cache TTL in seconds, implies the cache type is a `TimedCache` or `TimedSizedCache`. /// - `time_refresh`: (optional, bool) specify whether to refresh the TTL on cache hits. /// - `sync_writes`: (optional, bool) specify whether to synchronize the execution of writing of uncached values. +/// - `sync_writes_by_key`: (optional, bool) specify whether to synchronize the execution of writing of uncached values by key. /// - `ty`: (optional, string type) The cache store type to use. Defaults to `UnboundCache`. When `unbound` is /// specified, defaults to `UnboundCache`. When `size` is specified, defaults to `SizedCache`. /// When `time` is specified, defaults to `TimedCached`. diff --git a/examples/async_std.rs b/examples/async_std.rs index a619993..53740e8 100644 --- a/examples/async_std.rs +++ b/examples/async_std.rs @@ -86,7 +86,7 @@ async fn only_cached_once_per_second(s: String) -> Vec { /// _one_ call will be "executed" and all others will be synchronized /// to return the cached result of the one call instead of all /// concurrently un-cached tasks executing and writing concurrently. -#[once(time = 2, sync_writes = true)] +#[once(time = 2, sync_writes = "default")] async fn only_cached_once_per_second_sync_writes(s: String) -> Vec { vec![s] } diff --git a/src/lib.rs b/src/lib.rs index 163f322..67d8f9b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -13,7 +13,7 @@ function-cache wrapped in a mutex/rwlock, or externally synchronized in the case By default, the function-cache is **not** locked for the duration of the function's execution, so initial (on an empty cache) concurrent calls of long-running functions with the same arguments will each execute fully and each overwrite the memoized value as they complete. This mirrors the behavior of Python's `functools.lru_cache`. To synchronize the execution and caching -of un-cached arguments, specify `#[cached(sync_writes = true)]` / `#[once(sync_writes = true)]` (not supported by `#[io_cached]`. +of un-cached arguments, specify `#[cached(sync_writes = "default")]` / `#[once(sync_writes = "default")]` (not supported by `#[io_cached]`. - See [`cached::stores` docs](https://docs.rs/cached/latest/cached/stores/index.html) cache stores available. - See [`proc_macro`](https://docs.rs/cached/latest/cached/proc_macro/index.html) for more procedural macro examples. @@ -94,7 +94,7 @@ use cached::proc_macro::once; /// When no (or expired) cache, concurrent calls /// will synchronize (`sync_writes`) so the function /// is only executed once. -#[once(time=10, option = true, sync_writes = true)] +#[once(time=10, option = true, sync_writes = "default")] fn keyed(a: String) -> Option { if a == "a" { Some(a.len()) @@ -114,7 +114,7 @@ use cached::proc_macro::cached; #[cached( result = true, time = 1, - sync_writes = true, + sync_writes = "default", result_fallback = true )] fn doesnt_compile() -> Result { diff --git a/src/proc_macro.rs b/src/proc_macro.rs index b61a702..9453d8d 100644 --- a/src/proc_macro.rs +++ b/src/proc_macro.rs @@ -115,7 +115,7 @@ use cached::proc_macro::cached; /// When called concurrently, duplicate argument-calls will be /// synchronized so as to only run once - the remaining concurrent /// calls return a cached value. -#[cached(size=1, option = true, sync_writes = true)] +#[cached(size=1, option = true, sync_writes = "default")] fn keyed(a: String) -> Option { if a == "a" { Some(a.len()) @@ -233,7 +233,7 @@ use cached::proc_macro::once; /// When no (or expired) cache, concurrent calls /// will synchronize (`sync_writes`) so the function /// is only executed once. -#[once(time=10, option = true, sync_writes = true)] +#[once(time=10, option = true, sync_writes = "default")] fn keyed(a: String) -> Option { if a == "a" { Some(a.len()) diff --git a/tests/cached.rs b/tests/cached.rs index 8a161bd..a2a26bc 100644 --- a/tests/cached.rs +++ b/tests/cached.rs @@ -10,7 +10,7 @@ use cached::{ }; use serial_test::serial; use std::thread::{self, sleep}; -use std::time::Duration; +use std::time::{Duration, Instant}; cached! { UNBOUND_FIB; @@ -898,6 +898,49 @@ async fn test_cached_sync_writes_a() { assert_eq!(a, c.await.unwrap()); } +#[cached(time = 2, sync_writes_by_key = true, key = "u32", convert = "{ 1 }")] +fn cached_sync_writes_by_key(s: String) -> Vec { + sleep(Duration::new(1, 0)); + vec![s] +} + +#[test] +fn test_cached_sync_writes_by_key() { + let a = std::thread::spawn(|| cached_sync_writes_by_key("a".to_string())); + let b = std::thread::spawn(|| cached_sync_writes_by_key("b".to_string())); + let c = std::thread::spawn(|| cached_sync_writes_by_key("c".to_string())); + let start = Instant::now(); + let a = a.join().unwrap(); + let b = b.join().unwrap(); + let c = c.join().unwrap(); + assert!(start.elapsed() < Duration::from_secs(2)); +} + +#[cfg(feature = "async")] +#[cached( + time = 5, + sync_writes_by_key = true, + key = "String", + convert = r#"{ format!("{}", s) }"# +)] +async fn cached_sync_writes_by_key_a(s: String) -> Vec { + tokio::time::sleep(Duration::from_secs(1)).await; + vec![s] +} + +#[cfg(feature = "async")] +#[tokio::test] +async fn test_cached_sync_writes_by_key_a() { + let a = tokio::spawn(cached_sync_writes_by_key_a("a".to_string())); + let b = tokio::spawn(cached_sync_writes_by_key_a("b".to_string())); + let c = tokio::spawn(cached_sync_writes_by_key_a("c".to_string())); + let start = Instant::now(); + a.await.unwrap(); + b.await.unwrap(); + c.await.unwrap(); + assert!(start.elapsed() < Duration::from_secs(2)); +} + #[cfg(feature = "async")] #[once(sync_writes = true)] async fn once_sync_writes_a(s: &tokio::sync::Mutex) -> String { From b352cdd3e8fb3d83aeaeb637823ff8b5caa48065 Mon Sep 17 00:00:00 2001 From: Manuel Raimann Date: Thu, 19 Sep 2024 16:16:50 +0200 Subject: [PATCH 2/4] Merge lock_key into lock --- cached_proc_macro/src/cached.rs | 89 ++++++++++++++++----------------- 1 file changed, 43 insertions(+), 46 deletions(-) diff --git a/cached_proc_macro/src/cached.rs b/cached_proc_macro/src/cached.rs index 0dda2f8..21fbe2b 100644 --- a/cached_proc_macro/src/cached.rs +++ b/cached_proc_macro/src/cached.rs @@ -94,13 +94,6 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { Some(ref name) => Ident::new(name, fn_ident.span()), None => Ident::new(&fn_ident.to_string().to_uppercase(), fn_ident.span()), }; - let cache_ident_key = match args.name { - Some(ref name) => Ident::new(&format!("{}_key", name), fn_ident.span()), - None => Ident::new( - &format!("{}_key", fn_ident.to_string().to_uppercase()), - fn_ident.span(), - ), - }; let (cache_key_ty, key_convert_block) = make_cache_key_type(&args.key, &args.convert, &args.ty, input_tys, &input_names); @@ -219,23 +212,24 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let no_cache_fn_ident = Ident::new(&format!("{}_no_cache", &fn_ident), fn_ident.span()); let lock; - let lock_key; let function_no_cache; let function_call; let ty; if asyncness.is_some() { - lock = quote! { - let mut cache = #cache_ident.lock().await; - }; - - lock_key = quote! { - let mut locks = #cache_ident_key.lock().await; - let lock = locks - .entry(key.clone()) - .or_insert_with(|| std::sync::Arc::new(::cached::async_sync::Mutex::new(#cache_create))) - .clone(); - drop(locks); - let mut cache = lock.lock().await; + lock = if args.sync_writes_by_key { + quote! { + let mut locks = #cache_ident.lock().await; + let lock = locks + .entry(key.clone()) + .or_insert_with(|| std::sync::Arc::new(::cached::async_sync::Mutex::new(#cache_create))) + .clone(); + drop(locks); + let mut cache = lock.lock().await; + } + } else { + quote! { + let mut cache = #cache_ident.lock().await; + } }; function_no_cache = quote! { @@ -246,20 +240,27 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let result = #no_cache_fn_ident(#(#input_names),*).await; }; - ty = quote! { - #visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex<#cache_ty>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(#cache_create)); - #visibility static #cache_ident_key: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex>>>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(std::collections::HashMap::new())); + ty = if args.sync_writes_by_key { + quote! { + #visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex>>>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(std::collections::HashMap::new())); + } + } else { + quote! { + #visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex<#cache_ty>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(#cache_create)); + } }; } else { - lock = quote! { - let mut cache = #cache_ident.lock().unwrap(); - }; - - lock_key = quote! { - let mut locks = #cache_ident_key.lock().unwrap(); - let lock = locks.entry(key.clone()).or_insert_with(|| std::sync::Arc::new(std::sync::Mutex::new(#cache_create))).clone(); - drop(locks); - let mut cache = lock.lock().unwrap(); + lock = if args.sync_writes_by_key { + quote! { + let mut locks = #cache_ident.lock().unwrap(); + let lock = locks.entry(key.clone()).or_insert_with(|| std::sync::Arc::new(std::sync::Mutex::new(#cache_create))).clone(); + drop(locks); + let mut cache = lock.lock().unwrap(); + } + } else { + quote! { + let mut cache = #cache_ident.lock().unwrap(); + } }; function_no_cache = quote! { @@ -270,10 +271,15 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let result = #no_cache_fn_ident(#(#input_names),*); }; - ty = quote! { - #visibility static #cache_ident: ::cached::once_cell::sync::Lazy> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(#cache_create)); - #visibility static #cache_ident_key: ::cached::once_cell::sync::Lazy>>>> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(std::collections::HashMap::new())); - }; + ty = if args.sync_writes_by_key { + quote! { + #visibility static #cache_ident: ::cached::once_cell::sync::Lazy>>>> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(std::collections::HashMap::new())); + } + } else { + quote! { + #visibility static #cache_ident: ::cached::once_cell::sync::Lazy> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(#cache_create)); + } + } } let prime_do_set_return_block = quote! { @@ -284,16 +290,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { #set_cache_and_return }; - let do_set_return_block = if args.sync_writes_by_key { - quote! { - #lock_key - if let Some(result) = cache.cache_get(&key) { - #return_cache_block - } - #function_call - #set_cache_and_return - } - } else if args.sync_writes { + let do_set_return_block = if args.sync_writes_by_key || args.sync_writes { quote! { #lock if let Some(result) = cache.cache_get(&key) { From f70e20602db8b9dee999418f141cac91e7acd804 Mon Sep 17 00:00:00 2001 From: Manuel Raimann Date: Thu, 19 Sep 2024 16:40:45 +0200 Subject: [PATCH 3/4] Use enum to configure sync write mode --- cached_proc_macro/src/cached.rs | 71 +++++++++++++++------------------ cached_proc_macro/src/once.rs | 20 ++++++---- tests/cached.rs | 12 +++--- 3 files changed, 52 insertions(+), 51 deletions(-) diff --git a/cached_proc_macro/src/cached.rs b/cached_proc_macro/src/cached.rs index 21fbe2b..b4025d5 100644 --- a/cached_proc_macro/src/cached.rs +++ b/cached_proc_macro/src/cached.rs @@ -3,9 +3,18 @@ use darling::ast::NestedMeta; use darling::FromMeta; use proc_macro::TokenStream; use quote::quote; +use std::cmp::PartialEq; use syn::spanned::Spanned; use syn::{parse_macro_input, parse_str, Block, Ident, ItemFn, ReturnType, Type}; +#[derive(Debug, Default, FromMeta, Eq, PartialEq)] +enum SyncWriteMode { + #[default] + Disabled, + Default, + ByKey, +} + #[derive(FromMeta)] struct MacroArgs { #[darling(default)] @@ -27,9 +36,7 @@ struct MacroArgs { #[darling(default)] option: bool, #[darling(default)] - sync_writes: bool, - #[darling(default)] - sync_writes_by_key: bool, + sync_writes: SyncWriteMode, #[darling(default)] with_cached_flag: bool, #[darling(default)] @@ -192,16 +199,8 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { _ => panic!("the result and option attributes are mutually exclusive"), }; - if args.result_fallback && args.sync_writes { - panic!("the result_fallback and sync_writes attributes are mutually exclusive"); - } - - if args.result_fallback && args.sync_writes_by_key { - panic!("the result_fallback and sync_writes_by_key attributes are mutually exclusive"); - } - - if args.sync_writes && args.sync_writes_by_key { - panic!("the sync_writes and sync_writes_by_key attributes are mutually exclusive"); + if args.result_fallback && args.sync_writes != SyncWriteMode::Disabled { + panic!("result_fallback and sync_writes are mutually exclusive"); } let set_cache_and_return = quote! { @@ -216,8 +215,8 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let function_call; let ty; if asyncness.is_some() { - lock = if args.sync_writes_by_key { - quote! { + lock = match args.sync_writes { + SyncWriteMode::ByKey => quote! { let mut locks = #cache_ident.lock().await; let lock = locks .entry(key.clone()) @@ -225,11 +224,10 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { .clone(); drop(locks); let mut cache = lock.lock().await; - } - } else { - quote! { + }, + _ => quote! { let mut cache = #cache_ident.lock().await; - } + }, }; function_no_cache = quote! { @@ -240,27 +238,25 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let result = #no_cache_fn_ident(#(#input_names),*).await; }; - ty = if args.sync_writes_by_key { - quote! { + ty = match args.sync_writes { + SyncWriteMode::ByKey => quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex>>>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(std::collections::HashMap::new())); - } - } else { - quote! { + }, + _ => quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex<#cache_ty>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(#cache_create)); - } + }, }; } else { - lock = if args.sync_writes_by_key { - quote! { + lock = match args.sync_writes { + SyncWriteMode::ByKey => quote! { let mut locks = #cache_ident.lock().unwrap(); let lock = locks.entry(key.clone()).or_insert_with(|| std::sync::Arc::new(std::sync::Mutex::new(#cache_create))).clone(); drop(locks); let mut cache = lock.lock().unwrap(); - } - } else { - quote! { + }, + _ => quote! { let mut cache = #cache_ident.lock().unwrap(); - } + }, }; function_no_cache = quote! { @@ -271,14 +267,13 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let result = #no_cache_fn_ident(#(#input_names),*); }; - ty = if args.sync_writes_by_key { - quote! { + ty = match args.sync_writes { + SyncWriteMode::ByKey => quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy>>>> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(std::collections::HashMap::new())); - } - } else { - quote! { + }, + _ => quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(#cache_create)); - } + }, } } @@ -290,7 +285,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { #set_cache_and_return }; - let do_set_return_block = if args.sync_writes_by_key || args.sync_writes { + let do_set_return_block = if args.sync_writes != SyncWriteMode::Disabled { quote! { #lock if let Some(result) = cache.cache_get(&key) { diff --git a/cached_proc_macro/src/once.rs b/cached_proc_macro/src/once.rs index 70d5617..fd2ac07 100644 --- a/cached_proc_macro/src/once.rs +++ b/cached_proc_macro/src/once.rs @@ -6,6 +6,13 @@ use quote::quote; use syn::spanned::Spanned; use syn::{parse_macro_input, Ident, ItemFn, ReturnType}; +#[derive(Debug, Default, FromMeta)] +enum SyncWriteMode { + #[default] + Disabled, + Default, +} + #[derive(FromMeta)] struct OnceMacroArgs { #[darling(default)] @@ -13,7 +20,7 @@ struct OnceMacroArgs { #[darling(default)] time: Option, #[darling(default)] - sync_writes: bool, + sync_writes: SyncWriteMode, #[darling(default)] result: bool, #[darling(default)] @@ -220,8 +227,8 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let do_set_return_block = if args.sync_writes { - quote! { + let do_set_return_block = match args.sync_writes { + SyncWriteMode::Default => quote! { #r_lock_return_cache_block #w_lock if let Some(result) = &*cached { @@ -229,14 +236,13 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { } #function_call #set_cache_and_return - } - } else { - quote! { + }, + SyncWriteMode::Disabled => quote! { #r_lock_return_cache_block #function_call #w_lock #set_cache_and_return - } + }, }; let signature_no_muts = get_mut_signature(signature); diff --git a/tests/cached.rs b/tests/cached.rs index a2a26bc..0d33e04 100644 --- a/tests/cached.rs +++ b/tests/cached.rs @@ -848,7 +848,7 @@ async fn test_only_cached_option_once_per_second_a() { /// to return the cached result of the one call instead of all /// concurrently un-cached tasks executing and writing concurrently. #[cfg(feature = "async")] -#[once(time = 2, sync_writes = true)] +#[once(time = 2, sync_writes = "default")] async fn only_cached_once_per_second_sync_writes(s: String) -> Vec { vec![s] } @@ -862,7 +862,7 @@ async fn test_only_cached_once_per_second_sync_writes() { assert_eq!(a.await.unwrap(), b.await.unwrap()); } -#[cached(time = 2, sync_writes = true, key = "u32", convert = "{ 1 }")] +#[cached(time = 2, sync_writes = "default", key = "u32", convert = "{ 1 }")] fn cached_sync_writes(s: String) -> Vec { vec![s] } @@ -881,7 +881,7 @@ fn test_cached_sync_writes() { } #[cfg(feature = "async")] -#[cached(time = 2, sync_writes = true, key = "u32", convert = "{ 1 }")] +#[cached(time = 2, sync_writes = "default", key = "u32", convert = "{ 1 }")] async fn cached_sync_writes_a(s: String) -> Vec { vec![s] } @@ -898,7 +898,7 @@ async fn test_cached_sync_writes_a() { assert_eq!(a, c.await.unwrap()); } -#[cached(time = 2, sync_writes_by_key = true, key = "u32", convert = "{ 1 }")] +#[cached(time = 2, sync_writes = "by_key", key = "u32", convert = "{ 1 }")] fn cached_sync_writes_by_key(s: String) -> Vec { sleep(Duration::new(1, 0)); vec![s] @@ -919,7 +919,7 @@ fn test_cached_sync_writes_by_key() { #[cfg(feature = "async")] #[cached( time = 5, - sync_writes_by_key = true, + sync_writes = "by_key", key = "String", convert = r#"{ format!("{}", s) }"# )] @@ -942,7 +942,7 @@ async fn test_cached_sync_writes_by_key_a() { } #[cfg(feature = "async")] -#[once(sync_writes = true)] +#[once(sync_writes = "default")] async fn once_sync_writes_a(s: &tokio::sync::Mutex) -> String { let mut guard = s.lock().await; let results: String = (*guard).clone().to_string(); From 1e8e0d8b59e933cff2d1ed00eccca185f2f67340 Mon Sep 17 00:00:00 2001 From: Manuel Raimann Date: Thu, 19 Sep 2024 17:04:51 +0200 Subject: [PATCH 4/4] Replace Disabled SyncWriteMode with Option --- cached_proc_macro/src/cached.rs | 15 +++++++-------- cached_proc_macro/src/once.rs | 7 +++---- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/cached_proc_macro/src/cached.rs b/cached_proc_macro/src/cached.rs index b4025d5..4eec8b8 100644 --- a/cached_proc_macro/src/cached.rs +++ b/cached_proc_macro/src/cached.rs @@ -10,7 +10,6 @@ use syn::{parse_macro_input, parse_str, Block, Ident, ItemFn, ReturnType, Type}; #[derive(Debug, Default, FromMeta, Eq, PartialEq)] enum SyncWriteMode { #[default] - Disabled, Default, ByKey, } @@ -36,7 +35,7 @@ struct MacroArgs { #[darling(default)] option: bool, #[darling(default)] - sync_writes: SyncWriteMode, + sync_writes: Option, #[darling(default)] with_cached_flag: bool, #[darling(default)] @@ -199,7 +198,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { _ => panic!("the result and option attributes are mutually exclusive"), }; - if args.result_fallback && args.sync_writes != SyncWriteMode::Disabled { + if args.result_fallback && args.sync_writes.is_some() { panic!("result_fallback and sync_writes are mutually exclusive"); } @@ -216,7 +215,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let ty; if asyncness.is_some() { lock = match args.sync_writes { - SyncWriteMode::ByKey => quote! { + Some(SyncWriteMode::ByKey) => quote! { let mut locks = #cache_ident.lock().await; let lock = locks .entry(key.clone()) @@ -239,7 +238,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { }; ty = match args.sync_writes { - SyncWriteMode::ByKey => quote! { + Some(SyncWriteMode::ByKey) => quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex>>>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(std::collections::HashMap::new())); }, _ => quote! { @@ -248,7 +247,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { }; } else { lock = match args.sync_writes { - SyncWriteMode::ByKey => quote! { + Some(SyncWriteMode::ByKey) => quote! { let mut locks = #cache_ident.lock().unwrap(); let lock = locks.entry(key.clone()).or_insert_with(|| std::sync::Arc::new(std::sync::Mutex::new(#cache_create))).clone(); drop(locks); @@ -268,7 +267,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { }; ty = match args.sync_writes { - SyncWriteMode::ByKey => quote! { + Some(SyncWriteMode::ByKey) => quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy>>>> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(std::collections::HashMap::new())); }, _ => quote! { @@ -285,7 +284,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { #set_cache_and_return }; - let do_set_return_block = if args.sync_writes != SyncWriteMode::Disabled { + let do_set_return_block = if args.sync_writes.is_some() { quote! { #lock if let Some(result) = cache.cache_get(&key) { diff --git a/cached_proc_macro/src/once.rs b/cached_proc_macro/src/once.rs index fd2ac07..907f74b 100644 --- a/cached_proc_macro/src/once.rs +++ b/cached_proc_macro/src/once.rs @@ -9,7 +9,6 @@ use syn::{parse_macro_input, Ident, ItemFn, ReturnType}; #[derive(Debug, Default, FromMeta)] enum SyncWriteMode { #[default] - Disabled, Default, } @@ -20,7 +19,7 @@ struct OnceMacroArgs { #[darling(default)] time: Option, #[darling(default)] - sync_writes: SyncWriteMode, + sync_writes: Option, #[darling(default)] result: bool, #[darling(default)] @@ -228,7 +227,7 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { }; let do_set_return_block = match args.sync_writes { - SyncWriteMode::Default => quote! { + Some(SyncWriteMode::Default) => quote! { #r_lock_return_cache_block #w_lock if let Some(result) = &*cached { @@ -237,7 +236,7 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { #function_call #set_cache_and_return }, - SyncWriteMode::Disabled => quote! { + None => quote! { #r_lock_return_cache_block #function_call #w_lock