dory_pcs/backends/arkworks/ark_cache.rs
1//! Prepared point cache for BN254 pairing optimization
2//!
3//! This module provides a global cache for prepared G1/G2 points that are reused
4//! across multiple pairing operations. Prepared points skip the affine conversion
5//! and preprocessing steps, providing ~20-30% speedup for repeated pairings.
6//!
7//! The cache supports smart re-initialization: if a larger setup is needed,
8//! the cache is automatically replaced. Smaller or equal setups reuse the existing cache.
9
10use super::ark_group::{ArkG1, ArkG2};
11use ark_bn254::{Bn254, G1Affine, G2Affine};
12use ark_ec::pairing::Pairing;
13use std::sync::{Arc, RwLock};
14
15/// Global cache for prepared points
16#[derive(Debug, Clone)]
17pub struct PreparedCache {
18 /// Prepared G1 points for efficient pairing operations
19 pub g1_prepared: Vec<<Bn254 as Pairing>::G1Prepared>,
20 /// Prepared G2 points for efficient pairing operations
21 pub g2_prepared: Vec<<Bn254 as Pairing>::G2Prepared>,
22}
23
24static CACHE: RwLock<Option<Arc<PreparedCache>>> = RwLock::new(None);
25
26/// Initialize the global cache with G1 and G2 vectors.
27///
28/// This function implements smart re-initialization:
29/// - If the cache doesn't exist, it creates one
30/// - If the cache exists but is too small, it replaces it with a larger one
31/// - If the cache exists and is large enough, it does nothing (reuses existing)
32///
33/// This allows multiple proofs with different setup sizes to run in the same process.
34///
35/// # Arguments
36/// * `g1_vec` - Vector of G1 points to prepare and cache
37/// * `g2_vec` - Vector of G2 points to prepare and cache
38///
39/// # Panics
40/// Panics if the internal `RwLock` is poisoned.
41///
42/// # Example
43/// ```ignore
44/// use dory_pcs::backends::arkworks::{init_cache, BN254};
45/// use dory_pcs::setup::ProverSetup;
46///
47/// let setup = ProverSetup::<BN254>::new(max_log_n);
48/// init_cache(&setup.g1_vec, &setup.g2_vec);
49/// ```
50pub fn init_cache(g1_vec: &[ArkG1], g2_vec: &[ArkG2]) {
51 // Fast path: check if existing cache is sufficient (read lock only)
52 {
53 let read_guard = CACHE.read().unwrap();
54 if let Some(ref cache) = *read_guard {
55 if cache.g1_prepared.len() >= g1_vec.len() && cache.g2_prepared.len() >= g2_vec.len() {
56 return; // Existing cache is large enough
57 }
58 }
59 }
60
61 // Slow path: need to initialize or grow the cache
62 let mut write_guard = CACHE.write().unwrap();
63
64 // Double-check after acquiring write lock (another thread may have initialized)
65 if let Some(ref cache) = *write_guard {
66 if cache.g1_prepared.len() >= g1_vec.len() && cache.g2_prepared.len() >= g2_vec.len() {
67 return; // Another thread initialized a sufficient cache
68 }
69 }
70
71 // Prepare the new cache
72 let g1_prepared: Vec<<Bn254 as Pairing>::G1Prepared> = g1_vec
73 .iter()
74 .map(|g| {
75 let affine: G1Affine = g.0.into();
76 affine.into()
77 })
78 .collect();
79
80 let g2_prepared: Vec<<Bn254 as Pairing>::G2Prepared> = g2_vec
81 .iter()
82 .map(|g| {
83 let affine: G2Affine = g.0.into();
84 affine.into()
85 })
86 .collect();
87
88 *write_guard = Some(Arc::new(PreparedCache {
89 g1_prepared,
90 g2_prepared,
91 }));
92}
93
94/// Get a shared reference to the prepared cache.
95///
96/// Returns `None` if cache has not been initialized.
97/// The returned `Arc` keeps the cache data alive even if the cache is replaced.
98///
99/// # Panics
100/// Panics if the internal `RwLock` is poisoned.
101///
102/// # Returns
103/// Arc-wrapped cache, or `None` if uninitialized.
104pub fn get_prepared_cache() -> Option<Arc<PreparedCache>> {
105 CACHE.read().unwrap().clone()
106}
107
108/// Check if cache is initialized.
109///
110/// # Panics
111/// Panics if the internal `RwLock` is poisoned.
112///
113/// # Returns
114/// `true` if cache has been initialized, `false` otherwise.
115pub fn is_cached() -> bool {
116 CACHE.read().unwrap().is_some()
117}