Skip to main content

synaptic_lark/store/
cache.rs

1use async_trait::async_trait;
2use serde_json::json;
3use synaptic_core::{ChatResponse, LlmCache, SynapticError};
4
5use crate::{api::bitable::BitableApi, LarkConfig};
6
7/// A team-shared LLM response cache stored in a Feishu Bitable table.
8///
9/// Each row represents one cached response, keyed by `cache_key`. Hit counts
10/// are tracked in a `hit_count` field and are visible directly in the Feishu
11/// spreadsheet, making cache utilisation observable without additional tooling.
12///
13/// # Bitable table schema
14///
15/// | Field name      | Type   | Notes                         |
16/// |-----------------|--------|-------------------------------|
17/// | `cache_key`     | Text   | Unique cache key              |
18/// | `response_json` | Text   | Serialised `ChatResponse`     |
19/// | `hit_count`     | Text   | Number of cache hits (string) |
20/// | `created_at`    | Text   | Unix timestamp (seconds)      |
21pub struct LarkBitableLlmCache {
22    api: BitableApi,
23    app_token: String,
24    table_id: String,
25}
26
27impl LarkBitableLlmCache {
28    /// Create a new cache backed by the given Bitable table.
29    pub fn new(
30        config: LarkConfig,
31        app_token: impl Into<String>,
32        table_id: impl Into<String>,
33    ) -> Self {
34        Self {
35            api: BitableApi::new(config),
36            app_token: app_token.into(),
37            table_id: table_id.into(),
38        }
39    }
40
41    /// Return the Bitable application token.
42    pub fn app_token(&self) -> &str {
43        &self.app_token
44    }
45
46    /// Return the Bitable table ID.
47    pub fn table_id(&self) -> &str {
48        &self.table_id
49    }
50}
51
52#[async_trait]
53impl LlmCache for LarkBitableLlmCache {
54    async fn get(&self, key: &str) -> Result<Option<ChatResponse>, SynapticError> {
55        let body = json!({
56            "page_size": 1,
57            "filter": {
58                "conjunction": "and",
59                "conditions": [{
60                    "field_name": "cache_key",
61                    "operator": "is",
62                    "value": [key]
63                }]
64            }
65        });
66        let items = self
67            .api
68            .search_records(&self.app_token, &self.table_id, body)
69            .await
70            .map_err(|e| SynapticError::Cache(e.to_string()))?;
71
72        let rec = match items.into_iter().next() {
73            None => return Ok(None),
74            Some(r) => r,
75        };
76
77        let json_str = rec["fields"]["response_json"].as_str().unwrap_or("{}");
78        let response: ChatResponse = serde_json::from_str(json_str)
79            .map_err(|e| SynapticError::Cache(format!("deserialize cache: {e}")))?;
80
81        // Increment hit_count — fire-and-forget; errors ignored so a counter
82        // update failure never breaks the caller.
83        let record_id = rec["record_id"].as_str().unwrap_or("").to_string();
84        let hit = rec["fields"]["hit_count"]
85            .as_str()
86            .and_then(|s| s.parse::<u64>().ok())
87            .unwrap_or(0)
88            + 1;
89        let _ = self
90            .api
91            .update_record(
92                &self.app_token,
93                &self.table_id,
94                &record_id,
95                json!({ "hit_count": hit.to_string() }),
96            )
97            .await;
98
99        Ok(Some(response))
100    }
101
102    async fn put(&self, key: &str, response: &ChatResponse) -> Result<(), SynapticError> {
103        let json_str = serde_json::to_string(response)
104            .map_err(|e| SynapticError::Cache(format!("serialize cache: {e}")))?;
105        let records = vec![json!({
106            "fields": {
107                "cache_key": key,
108                "response_json": json_str,
109                "hit_count": "0",
110                "created_at": now_ts(),
111            }
112        })];
113        self.api
114            .batch_create_records(&self.app_token, &self.table_id, records)
115            .await
116            .map_err(|e| SynapticError::Cache(e.to_string()))?;
117        Ok(())
118    }
119
120    async fn clear(&self) -> Result<(), SynapticError> {
121        let body = json!({ "page_size": 500 });
122        let items = self
123            .api
124            .search_records(&self.app_token, &self.table_id, body)
125            .await
126            .map_err(|e| SynapticError::Cache(e.to_string()))?;
127
128        let ids: Vec<String> = items
129            .iter()
130            .filter_map(|r| r["record_id"].as_str().map(String::from))
131            .collect();
132
133        if ids.is_empty() {
134            return Ok(());
135        }
136
137        self.api
138            .batch_delete_records(&self.app_token, &self.table_id, ids)
139            .await
140            .map_err(|e| SynapticError::Cache(e.to_string()))
141    }
142}
143
144fn now_ts() -> String {
145    std::time::SystemTime::now()
146        .duration_since(std::time::UNIX_EPOCH)
147        .unwrap_or_default()
148        .as_secs()
149        .to_string()
150}