omega_cache_macros/
lib.rs1use crate::ast::{BackoffInput, CacheInput, EngineInput, MetricsInput};
2use proc_macro::TokenStream;
3use quote::{ToTokens, quote};
4use syn::parse_macro_input;
5
6mod ast;
7mod parse;
8
9#[proc_macro]
10pub fn cache(input: TokenStream) -> TokenStream {
11 let input = parse_macro_input!(input as CacheInput);
12
13 let expanded = quote! {#input};
14
15 TokenStream::from(expanded)
16}
17
18impl ToTokens for CacheInput {
19 fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
20 let engine = &self.engine;
21 let backoff_config = &self.backoff;
22
23 tokens.extend(quote! {
24 ::omega_cache::Cache::new(#engine, #backoff_config)
25 });
26 }
27}
28
29impl ToTokens for EngineInput {
30 fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
31 match self {
32 EngineInput::Clock(clock) => {
33 let capacity = &clock.capacity;
34 let metrics = &clock.metrics;
35
36 tokens.extend(quote! {
37 ::omega_cache::clock::ClockCache::new(#capacity, #metrics)
38 });
39 }
40 EngineInput::S3FIFO(s3fifo) => {
41 let capacity = &s3fifo.capacity;
42 let metrics = &s3fifo.metrics;
43
44 tokens.extend(quote! {
45 ::omega_cache::s3fifo::S3FIFOCache::new(#capacity, #metrics)
46 });
47 }
48 }
49 }
50}
51
52impl ToTokens for BackoffInput {
53 fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
54 let policy = &self.policy;
55 let limit = &self.limit;
56
57 let extend = quote! {
58 ::omega_cache::core::backoff::BackoffConfig { policy: #policy, limit: #limit }
59 };
60
61 tokens.extend(extend);
62 }
63}
64
65impl ToTokens for MetricsInput {
66 fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
67 let shards = &self.shards;
68 let latency_samples = &self.latency_samples;
69
70 tokens.extend(quote! {
71 ::omega_cache::metrics::MetricsConfig::new(#shards, #latency_samples)
72 });
73 }
74}