diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3d7d291..a9c7a94 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -23,7 +23,7 @@ jobs: target: wasm32-unknown-unknown - name: Cache cargo dir - uses: actions/cache@v2 + uses: actions/cache@v4 env: cache-name: cache-cargo-dir with: @@ -35,7 +35,7 @@ jobs: ${{ runner.os }}- - name: Cache target dir - uses: actions/cache@v2 + uses: actions/cache@v4 env: cache-name: cache-target-dir with: diff --git a/CHANGELOG.md b/CHANGELOG.md index 6cc9efa..3cc772c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ## [Unreleased] ## Added ## Changed +- Refactor how attributes are parsed ## Removed ## [0.55.1 / [cached_proc_macro[0.24.0]]] diff --git a/cached_proc_macro/Cargo.toml b/cached_proc_macro/Cargo.toml index a5a518b..bc33b4e 100644 --- a/cached_proc_macro/Cargo.toml +++ b/cached_proc_macro/Cargo.toml @@ -18,6 +18,7 @@ proc-macro = true [dependencies] quote = "1.0.6" -darling = "0.20.8" proc-macro2 = "1.0.49" syn = "2.0.52" +strum-lite = "0.1.1" +attrs = "0.2.5" diff --git a/cached_proc_macro/src/cached.rs b/cached_proc_macro/src/cached.rs index 4eec8b8..d739371 100644 --- a/cached_proc_macro/src/cached.rs +++ b/cached_proc_macro/src/cached.rs @@ -1,78 +1,80 @@ use crate::helpers::*; -use darling::ast::NestedMeta; -use darling::FromMeta; +use attrs::*; use proc_macro::TokenStream; use quote::quote; use std::cmp::PartialEq; -use syn::spanned::Spanned; -use syn::{parse_macro_input, parse_str, Block, Ident, ItemFn, ReturnType, Type}; - -#[derive(Debug, Default, FromMeta, Eq, PartialEq)] -enum SyncWriteMode { - #[default] - Default, - ByKey, -} - -#[derive(FromMeta)] -struct MacroArgs { - #[darling(default)] - name: Option, - #[darling(default)] - unbound: bool, - #[darling(default)] - size: Option, - #[darling(default)] - time: Option, - #[darling(default)] - time_refresh: bool, - #[darling(default)] - key: Option, - #[darling(default)] - convert: Option, - #[darling(default)] - result: bool, - #[darling(default)] - option: bool, - #[darling(default)] - sync_writes: Option, - #[darling(default)] - with_cached_flag: bool, - #[darling(default)] - ty: Option, - #[darling(default)] - create: Option, - #[darling(default)] - result_fallback: bool, +use syn::{parse::Parser as _, spanned::Spanned as _}; +use syn::{parse_macro_input, parse_str, Block, Ident, ItemFn, ReturnType, Signature, Token, Type}; + +strum_lite::strum! { + #[derive(Debug, Default, Eq, PartialEq)] + enum SyncWriteMode { + #[default] + Default = "default", + ByKey = "by_key", + } } pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { - let attr_args = match NestedMeta::parse_meta_list(args.into()) { - Ok(v) => v, - Err(e) => { - return TokenStream::from(darling::Error::from(e).write_errors()); - } - }; - let args = match MacroArgs::from_list(&attr_args) { - Ok(v) => v, - Err(e) => { - return TokenStream::from(e.write_errors()); - } - }; - let input = parse_macro_input!(input as ItemFn); + let mut time = None::; + let mut size = None::; + let mut name = None::; + let mut key = None::; + let mut convert = None::; + let mut ty = None::; + let mut create = None::; + let mut sync_writes = None::; + let mut unbound = false; + let mut time_refresh = false; + let mut result = false; + let mut option = false; + let mut with_cached_flag = false; + let mut result_fallback = false; + + match Attrs::new() + .once("time", with::eq(set::lit(&mut time))) + .once("size", with::eq(set::lit(&mut size))) + .once("name", with::eq(set::from_str(&mut name))) + .once("key", with::eq(set::from_str(&mut key))) + .once("convert", with::eq(set::from_str(&mut convert))) + .once("ty", with::eq(set::from_str(&mut ty))) + .once("create", with::eq(set::from_str(&mut create))) + .once("sync_writes", |input| match input.peek(Token![=]) { + true => with::eq(set::from_str(&mut sync_writes))(input), + false => { + sync_writes = Some(SyncWriteMode::Default); + Ok(()) + } + }) + .once("unbound", with::eq(on::lit(&mut unbound))) + .once("time_refresh", with::eq(on::lit(&mut time_refresh))) + .once("result", with::eq(on::lit(&mut result))) + .once("option", with::eq(on::lit(&mut option))) + .once("with_cached_flag", with::eq(on::lit(&mut with_cached_flag))) + .once("result_fallback", with::eq(on::lit(&mut result_fallback))) + .parse(args) + { + Ok(()) => {} + Err(e) => return e.into_compile_error().into(), + } - // pull out the parts of the input - let mut attributes = input.attrs; - let visibility = input.vis; - let signature = input.sig; - let body = input.block; + let ItemFn { + attrs: mut attributes, + vis: visibility, + sig, + block: body, + } = parse_macro_input!(input as _); - // pull out the parts of the function signature - let fn_ident = signature.ident.clone(); - let inputs = signature.inputs.clone(); - let output = signature.output.clone(); - let asyncness = signature.asyncness; - let generics = signature.generics.clone(); + let signature_no_muts = get_mut_signature(sig.clone()); + + let Signature { + ident: fn_ident, + inputs, + output, + asyncness, + generics, + .. + } = sig; let input_tys = get_input_types(&inputs); let input_names = get_input_names(&inputs); @@ -89,30 +91,23 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let output_string = output_parts.join("::"); let output_type_display = output_ts.to_string().replace(' ', ""); - if check_with_cache_flag(args.with_cached_flag, output_string) { + if check_with_cache_flag(with_cached_flag, output_string) { return with_cache_flag_error(output_span, output_type_display); } - let cache_value_ty = find_value_type(args.result, args.option, &output, output_ty); + let cache_value_ty = find_value_type(result, option, &output, output_ty); // make the cache identifier - let cache_ident = match args.name { + let cache_ident = match name { Some(ref name) => Ident::new(name, fn_ident.span()), None => Ident::new(&fn_ident.to_string().to_uppercase(), fn_ident.span()), }; let (cache_key_ty, key_convert_block) = - make_cache_key_type(&args.key, &args.convert, &args.ty, input_tys, &input_names); + make_cache_key_type(&key, &convert, &ty, input_tys, &input_names); // make the cache type and create statement - let (cache_ty, cache_create) = match ( - &args.unbound, - &args.size, - &args.time, - &args.ty, - &args.create, - &args.time_refresh, - ) { + let (cache_ty, cache_create) = match (&unbound, &size, &time, &ty, &create, &time_refresh) { (true, None, None, None, None, _) => { let cache_ty = quote! {cached::UnboundCache<#cache_key_ty, #cache_value_ty>}; let cache_create = quote! {cached::UnboundCache::new()}; @@ -159,10 +154,10 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { }; // make the set cache and return cache blocks - let (set_cache_block, return_cache_block) = match (&args.result, &args.option) { + let (set_cache_block, return_cache_block) = match (&result, &option) { (false, false) => { let set_cache_block = quote! { cache.cache_set(key, result.clone()); }; - let return_cache_block = if args.with_cached_flag { + let return_cache_block = if with_cached_flag { quote! { let mut r = result.to_owned(); r.was_cached = true; return r } } else { quote! { return result.to_owned() } @@ -175,7 +170,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { cache.cache_set(key, result.clone()); } }; - let return_cache_block = if args.with_cached_flag { + let return_cache_block = if with_cached_flag { quote! { let mut r = result.to_owned(); r.was_cached = true; return Ok(r) } } else { quote! { return Ok(result.to_owned()) } @@ -188,7 +183,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { cache.cache_set(key, result.clone()); } }; - let return_cache_block = if args.with_cached_flag { + let return_cache_block = if with_cached_flag { quote! { let mut r = result.to_owned(); r.was_cached = true; return Some(r) } } else { quote! { return Some(result.clone()) } @@ -198,7 +193,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { _ => panic!("the result and option attributes are mutually exclusive"), }; - if args.result_fallback && args.sync_writes.is_some() { + if result_fallback && sync_writes.is_some() { panic!("result_fallback and sync_writes are mutually exclusive"); } @@ -214,7 +209,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let function_call; let ty; if asyncness.is_some() { - lock = match args.sync_writes { + lock = match sync_writes { Some(SyncWriteMode::ByKey) => quote! { let mut locks = #cache_ident.lock().await; let lock = locks @@ -237,7 +232,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let result = #no_cache_fn_ident(#(#input_names),*).await; }; - ty = match args.sync_writes { + ty = match sync_writes { Some(SyncWriteMode::ByKey) => quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex>>>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(std::collections::HashMap::new())); }, @@ -246,7 +241,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { }, }; } else { - lock = match args.sync_writes { + lock = match sync_writes { Some(SyncWriteMode::ByKey) => quote! { let mut locks = #cache_ident.lock().unwrap(); let lock = locks.entry(key.clone()).or_insert_with(|| std::sync::Arc::new(std::sync::Mutex::new(#cache_create))).clone(); @@ -266,7 +261,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { let result = #no_cache_fn_ident(#(#input_names),*); }; - ty = match args.sync_writes { + ty = match sync_writes { Some(SyncWriteMode::ByKey) => quote! { #visibility static #cache_ident: ::cached::once_cell::sync::Lazy>>>> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(std::collections::HashMap::new())); }, @@ -284,7 +279,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { #set_cache_and_return }; - let do_set_return_block = if args.sync_writes.is_some() { + let do_set_return_block = if sync_writes.is_some() { quote! { #lock if let Some(result) = cache.cache_get(&key) { @@ -293,7 +288,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { #function_call #set_cache_and_return } - } else if args.result_fallback { + } else if result_fallback { quote! { let old_val = { #lock @@ -327,8 +322,6 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let signature_no_muts = get_mut_signature(signature); - // create a signature for the cache-priming function let prime_fn_ident = Ident::new(&format!("{}_prime_cache", &fn_ident), fn_ident.span()); let mut prime_sig = signature_no_muts.clone(); diff --git a/cached_proc_macro/src/io_cached.rs b/cached_proc_macro/src/io_cached.rs index 2750eee..ac7bd36 100644 --- a/cached_proc_macro/src/io_cached.rs +++ b/cached_proc_macro/src/io_cached.rs @@ -1,73 +1,86 @@ use crate::helpers::*; -use darling::ast::NestedMeta; -use darling::FromMeta; +use attrs::*; use proc_macro::TokenStream; +use proc_macro2::Span; use quote::quote; -use syn::spanned::Spanned; +use syn::{parse::Parser as _, spanned::Spanned as _}; use syn::{ parse_macro_input, parse_str, Block, Expr, ExprClosure, GenericArgument, Ident, ItemFn, - PathArguments, ReturnType, Type, + PathArguments, ReturnType, Signature, Type, }; -#[derive(FromMeta)] -struct IOMacroArgs { - map_error: String, - #[darling(default)] - disk: bool, - #[darling(default)] - disk_dir: Option, - #[darling(default)] - redis: bool, - #[darling(default)] - cache_prefix_block: Option, - #[darling(default)] - name: Option, - #[darling(default)] - time: Option, - #[darling(default)] - time_refresh: Option, - #[darling(default)] - key: Option, - #[darling(default)] - convert: Option, - #[darling(default)] - with_cached_flag: bool, - #[darling(default)] - ty: Option, - #[darling(default)] - create: Option, - #[darling(default)] - sync_to_disk_on_cache_change: Option, - #[darling(default)] - connection_config: Option, -} - pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { - let attr_args = match NestedMeta::parse_meta_list(args.into()) { - Ok(v) => v, - Err(e) => { - return TokenStream::from(darling::Error::from(e).write_errors()); - } - }; - let args = match IOMacroArgs::from_list(&attr_args) { - Ok(v) => v, - Err(e) => { - return TokenStream::from(e.write_errors()); - } + let mut map_error = None::; + let mut time = None::; + + let mut time_refresh = None::; + let mut sync_to_disk_on_cache_change = None::; + + let mut with_cached_flag = false; + let mut disk = false; + let mut redis = false; + + let mut disk_dir = None::; + let mut cache_prefix_block = None::; + let mut name = None::; + let mut key = None::; + let mut convert = None::; + let mut ty = None::; + let mut create = None::; + let mut connection_config = None::; + + match Attrs::new() + .once("map_error", with::eq(set::lit(&mut map_error))) + .once("time", with::eq(set::lit(&mut time))) + .once("time_refresh", with::eq(set::lit(&mut time_refresh))) + .once( + "sync_to_disk_on_cache_change", + with::eq(set::lit(&mut sync_to_disk_on_cache_change)), + ) + .once("with_cached_flag", with::eq(on::lit(&mut with_cached_flag))) + .once("disk", with::eq(on::lit(&mut disk))) + .once("redis", with::eq(on::lit(&mut redis))) + .once("disk_dir", with::eq(set::lit(&mut disk_dir))) + .once( + "cache_prefix_block", + with::eq(set::lit(&mut cache_prefix_block)), + ) + .once("name", with::eq(set::lit(&mut name))) + .once("key", with::eq(set::lit(&mut key))) + .once("convert", with::eq(set::lit(&mut convert))) + .once("ty", with::eq(set::lit(&mut ty))) + .once("create", with::eq(set::lit(&mut create))) + .once( + "connection_config", + with::eq(set::lit(&mut connection_config)), + ) + .parse(args) + { + Ok(()) => {} + Err(e) => return e.into_compile_error().into(), + } + let Some(map_error) = map_error else { + return syn::Error::new(Span::call_site(), "Argument `map_error` is required") + .into_compile_error() + .into(); }; - let input = parse_macro_input!(input as ItemFn); - // pull out the parts of the input - let mut attributes = input.attrs; - let visibility = input.vis; - let signature = input.sig; - let body = input.block; + let ItemFn { + attrs: mut attributes, + vis: visibility, + sig: signature, + block: body, + } = parse_macro_input!(input as _); - // pull out the parts of the function signature - let fn_ident = signature.ident.clone(); - let inputs = signature.inputs.clone(); - let output = signature.output.clone(); - let asyncness = signature.asyncness; + let signature_no_muts = get_mut_signature(signature.clone()); + + let Signature { + ident: fn_ident, + inputs, + output, + asyncness, + .. + } = signature; let input_tys = get_input_types(&inputs); @@ -88,7 +101,7 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { // if `with_cached_flag = true`, then enforce that the return type // is something wrapped in `Return`. Either `Return` or the // fully qualified `cached::Return` - if args.with_cached_flag + if with_cached_flag && !output_string.contains("Return") && !output_string.contains("cached::Return") { @@ -159,26 +172,26 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { }; // make the cache identifier - let cache_ident = match args.name { + let cache_ident = match name { Some(ref name) => Ident::new(name, fn_ident.span()), None => Ident::new(&fn_ident.to_string().to_uppercase(), fn_ident.span()), }; let cache_name = cache_ident.to_string(); let (cache_key_ty, key_convert_block) = - make_cache_key_type(&args.key, &args.convert, &args.ty, input_tys, &input_names); + make_cache_key_type(&key, &convert, &ty, input_tys, &input_names); // make the cache type and create statement let (cache_ty, cache_create) = match ( - &args.redis, - &args.disk, - &args.time, - &args.time_refresh, - &args.cache_prefix_block, - &args.ty, - &args.create, - &args.sync_to_disk_on_cache_change, - &args.connection_config, + &redis, + &disk, + &time, + &time_refresh, + &cache_prefix_block, + &ty, + &create, + &sync_to_disk_on_cache_change, + &connection_config, ) { // redis (true, false, time, time_refresh, cache_prefix, ty, cache_create, _, _) => { @@ -325,7 +338,7 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { } } }; - let create = match args.disk_dir { + let create = match disk_dir { None => create, Some(disk_dir) => { quote! { (#create).set_disk_directory(#disk_dir) } @@ -364,14 +377,14 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { _ => panic!("#[io_cached] cache types cache type could not be determined"), }; - let map_error = &args.map_error; + let map_error = &map_error; let map_error = parse_str::(map_error).expect("unable to parse map_error block"); // make the set cache and return cache blocks let (set_cache_block, return_cache_block) = { - let (set_cache_block, return_cache_block) = if args.with_cached_flag { + let (set_cache_block, return_cache_block) = if with_cached_flag { ( - if asyncness.is_some() && !args.disk { + if asyncness.is_some() && !disk { quote! { if let Ok(result) = &result { cache.cache_set(key, result.value.clone()).await.map_err(#map_error)?; @@ -388,7 +401,7 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { ) } else { ( - if asyncness.is_some() && !args.disk { + if asyncness.is_some() && !disk { quote! { if let Ok(result) = &result { cache.cache_set(key, result.clone()).await.map_err(#map_error)?; @@ -427,8 +440,6 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let signature_no_muts = get_mut_signature(signature); - // create a signature for the cache-priming function let prime_fn_ident = Ident::new(&format!("{}_prime_cache", &fn_ident), fn_ident.span()); let mut prime_sig = signature_no_muts.clone(); @@ -443,7 +454,7 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { ); fill_in_attributes(&mut attributes, cache_fn_doc_extra); - let async_trait = if asyncness.is_some() && !args.disk { + let async_trait = if asyncness.is_some() && !disk { quote! { use cached::IOCachedAsync; } @@ -453,7 +464,7 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let async_cache_get_return = if asyncness.is_some() && !args.disk { + let async_cache_get_return = if asyncness.is_some() && !disk { quote! { if let Some(result) = cache.cache_get(&key).await.map_err(#map_error)? { #return_cache_block diff --git a/cached_proc_macro/src/once.rs b/cached_proc_macro/src/once.rs index 70d5617..98a1f20 100644 --- a/cached_proc_macro/src/once.rs +++ b/cached_proc_macro/src/once.rs @@ -1,53 +1,45 @@ use crate::helpers::*; -use darling::ast::NestedMeta; -use darling::FromMeta; +use attrs::*; use proc_macro::TokenStream; use quote::quote; -use syn::spanned::Spanned; -use syn::{parse_macro_input, Ident, ItemFn, ReturnType}; - -#[derive(FromMeta)] -struct OnceMacroArgs { - #[darling(default)] - name: Option, - #[darling(default)] - time: Option, - #[darling(default)] - sync_writes: bool, - #[darling(default)] - result: bool, - #[darling(default)] - option: bool, - #[darling(default)] - with_cached_flag: bool, -} +use syn::{parse::Parser as _, spanned::Spanned as _}; +use syn::{parse_macro_input, Ident, ItemFn, ReturnType, Signature}; pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { - let attr_args = match NestedMeta::parse_meta_list(args.into()) { - Ok(v) => v, - Err(e) => { - return TokenStream::from(darling::Error::from(e).write_errors()); - } - }; - let args = match OnceMacroArgs::from_list(&attr_args) { - Ok(v) => v, - Err(e) => { - return TokenStream::from(e.write_errors()); - } - }; - let input = parse_macro_input!(input as ItemFn); + let mut name = None::; + let mut time = None::; + let mut sync_writes = false; + let mut result = false; + let mut option = false; + let mut with_cached_flag = false; + match Attrs::new() + .once("name", with::eq(set::lit(&mut name))) + .once("time", with::eq(set::lit(&mut time))) + .once("sync_writes", flag::or_eq(&mut sync_writes)) + .once("result", with::eq(on::lit(&mut result))) + .once("option", with::eq(on::lit(&mut option))) + .once("with_cached_flag", with::eq(on::lit(&mut with_cached_flag))) + .parse(args) + { + Ok(()) => {} + Err(e) => return e.into_compile_error().into(), + } + let ItemFn { + attrs: mut attributes, + vis: visibility, + sig, + block: body, + } = parse_macro_input!(input as ItemFn); - // pull out the parts of the input - let mut attributes = input.attrs; - let visibility = input.vis; - let signature = input.sig; - let body = input.block; + let signature_no_muts = get_mut_signature(sig.clone()); - // pull out the parts of the function signature - let fn_ident = signature.ident.clone(); - let inputs = signature.inputs.clone(); - let output = signature.output.clone(); - let asyncness = signature.asyncness; + let Signature { + ident: fn_ident, + inputs, + output, + asyncness, + .. + } = sig; // pull out the names and types of the function inputs let input_names = get_input_names(&inputs); @@ -64,20 +56,20 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { let output_string = output_parts.join("::"); let output_type_display = output_ts.to_string().replace(' ', ""); - if check_with_cache_flag(args.with_cached_flag, output_string) { + if check_with_cache_flag(with_cached_flag, output_string) { return with_cache_flag_error(output_span, output_type_display); } - let cache_value_ty = find_value_type(args.result, args.option, &output, output_ty); + let cache_value_ty = find_value_type(result, option, &output, output_ty); // make the cache identifier - let cache_ident = match args.name { + let cache_ident = match name { Some(name) => Ident::new(&name, fn_ident.span()), None => Ident::new(&fn_ident.to_string().to_uppercase(), fn_ident.span()), }; // make the cache type and create statement - let (cache_ty, cache_create) = match &args.time { + let (cache_ty, cache_create) = match &time { None => (quote! { Option<#cache_value_ty> }, quote! { None }), Some(_) => ( quote! { Option<(::cached::web_time::Instant, #cache_value_ty)> }, @@ -86,9 +78,9 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { }; // make the set cache and return cache blocks - let (set_cache_block, return_cache_block) = match (&args.result, &args.option) { + let (set_cache_block, return_cache_block) = match (&result, &option) { (false, false) => { - let set_cache_block = if args.time.is_some() { + let set_cache_block = if time.is_some() { quote! { *cached = Some((now, result.clone())); } @@ -98,16 +90,16 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let return_cache_block = if args.with_cached_flag { + let return_cache_block = if with_cached_flag { quote! { let mut r = result.clone(); r.was_cached = true; return r } } else { quote! { return result.clone() } }; - let return_cache_block = gen_return_cache_block(args.time, return_cache_block); + let return_cache_block = gen_return_cache_block(time, return_cache_block); (set_cache_block, return_cache_block) } (true, false) => { - let set_cache_block = if args.time.is_some() { + let set_cache_block = if time.is_some() { quote! { if let Ok(result) = &result { *cached = Some((now, result.clone())); @@ -121,16 +113,16 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let return_cache_block = if args.with_cached_flag { + let return_cache_block = if with_cached_flag { quote! { let mut r = result.clone(); r.was_cached = true; return Ok(r) } } else { quote! { return Ok(result.clone()) } }; - let return_cache_block = gen_return_cache_block(args.time, return_cache_block); + let return_cache_block = gen_return_cache_block(time, return_cache_block); (set_cache_block, return_cache_block) } (false, true) => { - let set_cache_block = if args.time.is_some() { + let set_cache_block = if time.is_some() { quote! { if let Some(result) = &result { *cached = Some((now, result.clone())); @@ -144,12 +136,12 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let return_cache_block = if args.with_cached_flag { + let return_cache_block = if with_cached_flag { quote! { let mut r = result.clone(); r.was_cached = true; return Some(r) } } else { quote! { return Some(result.clone()) } }; - let return_cache_block = gen_return_cache_block(args.time, return_cache_block); + let return_cache_block = gen_return_cache_block(time, return_cache_block); (set_cache_block, return_cache_block) } _ => panic!("the result and option attributes are mutually exclusive"), @@ -220,7 +212,7 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let do_set_return_block = if args.sync_writes { + let do_set_return_block = if sync_writes { quote! { #r_lock_return_cache_block #w_lock @@ -239,8 +231,6 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream { } }; - let signature_no_muts = get_mut_signature(signature); - let prime_fn_ident = Ident::new(&format!("{}_prime_cache", &fn_ident), fn_ident.span()); let mut prime_sig = signature_no_muts.clone(); prime_sig.ident = prime_fn_ident;