1use crate::plugins::core::{
9 ClientPlugin, PluginConfig, PluginContext, PluginResult, RequestContext, ResponseContext,
10};
11use async_trait::async_trait;
12use chrono::{DateTime, Utc};
13use serde::{Deserialize, Serialize};
14use serde_json::{Value, json};
15use std::collections::HashMap;
16use std::sync::{Arc, Mutex};
17use std::time::{Duration, Instant};
18use tracing::{debug, info, warn};
19
20#[derive(Debug, Clone, Serialize, Deserialize)]
26pub struct MetricsData {
27 pub total_requests: u64,
29
30 pub successful_responses: u64,
32
33 pub failed_responses: u64,
35
36 pub avg_response_time_ms: f64,
38
39 pub min_response_time_ms: u64,
41
42 pub max_response_time_ms: u64,
44
45 pub requests_per_minute: f64,
47
48 pub method_metrics: HashMap<String, MethodMetrics>,
50
51 pub start_time: DateTime<Utc>,
53
54 pub last_reset: DateTime<Utc>,
56}
57
58#[derive(Debug, Clone, Serialize, Deserialize)]
59pub struct MethodMetrics {
60 pub count: u64,
61 pub avg_duration_ms: f64,
62 pub success_count: u64,
63 pub error_count: u64,
64}
65
66impl Default for MetricsData {
67 fn default() -> Self {
68 let now = Utc::now();
69 Self {
70 total_requests: 0,
71 successful_responses: 0,
72 failed_responses: 0,
73 avg_response_time_ms: 0.0,
74 min_response_time_ms: u64::MAX,
75 max_response_time_ms: 0,
76 requests_per_minute: 0.0,
77 method_metrics: HashMap::new(),
78 start_time: now,
79 last_reset: now,
80 }
81 }
82}
83
84#[derive(Debug)]
86pub struct MetricsPlugin {
87 metrics: Arc<Mutex<MetricsData>>,
89
90 request_times: Arc<Mutex<HashMap<String, Instant>>>,
92
93 recent_requests: Arc<Mutex<Vec<Instant>>>,
95}
96
97impl MetricsPlugin {
98 pub fn new(_config: PluginConfig) -> Self {
100 Self {
101 metrics: Arc::new(Mutex::new(MetricsData::default())),
102 request_times: Arc::new(Mutex::new(HashMap::new())),
103 recent_requests: Arc::new(Mutex::new(Vec::new())),
104 }
105 }
106
107 pub fn get_metrics(&self) -> MetricsData {
109 self.metrics.lock().unwrap().clone()
110 }
111
112 pub fn reset_metrics(&self) {
114 let mut metrics = self.metrics.lock().unwrap();
115 let now = Utc::now();
116 *metrics = MetricsData {
117 start_time: metrics.start_time,
118 last_reset: now,
119 ..MetricsData::default()
120 };
121 self.request_times.lock().unwrap().clear();
122 self.recent_requests.lock().unwrap().clear();
123 }
124
125 fn update_request_rate(&self) {
126 let mut recent = self.recent_requests.lock().unwrap();
127 let now = Instant::now();
128
129 recent.retain(|×tamp| now.duration_since(timestamp).as_secs() < 60);
131
132 recent.push(now);
134
135 let mut metrics = self.metrics.lock().unwrap();
137 metrics.requests_per_minute = recent.len() as f64;
138 }
139
140 fn update_method_metrics(&self, method: &str, duration: Duration, is_success: bool) {
141 let mut metrics = self.metrics.lock().unwrap();
142 let entry = metrics
143 .method_metrics
144 .entry(method.to_string())
145 .or_insert(MethodMetrics {
146 count: 0,
147 avg_duration_ms: 0.0,
148 success_count: 0,
149 error_count: 0,
150 });
151
152 entry.count += 1;
153 if is_success {
154 entry.success_count += 1;
155 } else {
156 entry.error_count += 1;
157 }
158
159 let duration_ms = duration.as_millis() as f64;
161 entry.avg_duration_ms =
162 (entry.avg_duration_ms * (entry.count - 1) as f64 + duration_ms) / entry.count as f64;
163 }
164}
165
166#[async_trait]
167impl ClientPlugin for MetricsPlugin {
168 fn name(&self) -> &str {
169 "metrics"
170 }
171
172 fn version(&self) -> &str {
173 "1.0.0"
174 }
175
176 fn description(&self) -> Option<&str> {
177 Some("Collects request/response metrics and performance data")
178 }
179
180 async fn initialize(&self, context: &PluginContext) -> PluginResult<()> {
181 info!(
182 "Metrics plugin initialized for client: {}",
183 context.client_name
184 );
185 Ok(())
186 }
187
188 async fn before_request(&self, context: &mut RequestContext) -> PluginResult<()> {
189 let request_id = context.request.id.to_string();
190
191 self.request_times
193 .lock()
194 .unwrap()
195 .insert(request_id.clone(), Instant::now());
196
197 {
199 let mut metrics = self.metrics.lock().unwrap();
200 metrics.total_requests += 1;
201 }
202
203 self.update_request_rate();
204
205 context.add_metadata("metrics.request_id".to_string(), json!(request_id));
207 context.add_metadata(
208 "metrics.start_time".to_string(),
209 json!(Utc::now().to_rfc3339()),
210 );
211
212 debug!(
213 "Metrics: Recorded request start for method: {}",
214 context.method()
215 );
216 Ok(())
217 }
218
219 async fn after_response(&self, context: &mut ResponseContext) -> PluginResult<()> {
220 let request_id = context.request_context.request.id.to_string();
221
222 let duration =
224 if let Some(start_time) = self.request_times.lock().unwrap().remove(&request_id) {
225 start_time.elapsed()
226 } else {
227 context.duration
228 };
229
230 let is_success = context.is_success();
231 let method = context.method().to_string();
232 let duration_ms = duration.as_millis();
233
234 {
236 let mut metrics = self.metrics.lock().unwrap();
237
238 if is_success {
239 metrics.successful_responses += 1;
240 } else {
241 metrics.failed_responses += 1;
242 }
243
244 let duration_ms_u64 = duration_ms as u64;
245
246 if duration_ms_u64 < metrics.min_response_time_ms {
248 metrics.min_response_time_ms = duration_ms_u64;
249 }
250 if duration_ms_u64 > metrics.max_response_time_ms {
251 metrics.max_response_time_ms = duration_ms_u64;
252 }
253
254 let total_responses = metrics.successful_responses + metrics.failed_responses;
256 metrics.avg_response_time_ms =
257 (metrics.avg_response_time_ms * (total_responses - 1) as f64 + duration_ms as f64)
258 / total_responses as f64;
259 }
260
261 self.update_method_metrics(&method, duration, is_success);
263
264 context.add_metadata("metrics.duration_ms".to_string(), json!(duration_ms));
266 context.add_metadata("metrics.success".to_string(), json!(is_success));
267
268 debug!(
269 "Metrics: Recorded response for method: {} ({}ms, success: {})",
270 method, duration_ms, is_success
271 );
272
273 Ok(())
274 }
275
276 async fn handle_custom(
277 &self,
278 method: &str,
279 params: Option<Value>,
280 ) -> PluginResult<Option<Value>> {
281 match method {
282 "metrics.get_stats" => {
283 let metrics = self.get_metrics();
284 Ok(Some(serde_json::to_value(metrics).unwrap()))
285 }
286 "metrics.reset" => {
287 self.reset_metrics();
288 info!("Metrics reset");
289 Ok(Some(json!({"status": "reset"})))
290 }
291 "metrics.get_method_stats" => {
292 if let Some(params) = params {
293 if let Some(method_name) = params.get("method").and_then(|v| v.as_str()) {
294 let metrics = self.metrics.lock().unwrap();
295 if let Some(method_metrics) = metrics.method_metrics.get(method_name) {
296 Ok(Some(serde_json::to_value(method_metrics).unwrap()))
297 } else {
298 Ok(Some(json!({"error": "Method not found"})))
299 }
300 } else {
301 Ok(Some(json!({"error": "Method parameter required"})))
302 }
303 } else {
304 Ok(Some(json!({"error": "Parameters required"})))
305 }
306 }
307 _ => Ok(None),
308 }
309 }
310}
311
312#[derive(Debug, Clone, Serialize, Deserialize)]
318pub struct RetryConfig {
319 pub max_retries: u32,
321
322 pub base_delay_ms: u64,
324
325 pub max_delay_ms: u64,
327
328 pub backoff_multiplier: f64,
330
331 pub retry_on_timeout: bool,
333
334 pub retry_on_connection_error: bool,
336}
337
338impl Default for RetryConfig {
339 fn default() -> Self {
340 Self {
341 max_retries: 3,
342 base_delay_ms: 100,
343 max_delay_ms: 5000,
344 backoff_multiplier: 2.0,
345 retry_on_timeout: true,
346 retry_on_connection_error: true,
347 }
348 }
349}
350
351#[derive(Debug)]
353pub struct RetryPlugin {
354 config: RetryConfig,
355 retry_stats: Arc<Mutex<HashMap<String, u32>>>,
356}
357
358impl RetryPlugin {
359 pub fn new(config: PluginConfig) -> Self {
361 let retry_config = match config {
362 PluginConfig::Retry(config) => config,
363 _ => RetryConfig::default(),
364 };
365
366 Self {
367 config: retry_config,
368 retry_stats: Arc::new(Mutex::new(HashMap::new())),
369 }
370 }
371
372 fn should_retry(&self, error: &turbomcp_core::Error) -> bool {
373 let error_string = error.to_string().to_lowercase();
374
375 if self.config.retry_on_connection_error
376 && (error_string.contains("transport") || error_string.contains("connection"))
377 {
378 return true;
379 }
380
381 if self.config.retry_on_timeout && error_string.contains("timeout") {
382 return true;
383 }
384
385 false
386 }
387
388 fn calculate_delay(&self, attempt: u32) -> Duration {
389 let delay_ms = (self.config.base_delay_ms as f64
390 * self.config.backoff_multiplier.powi(attempt as i32)) as u64;
391 Duration::from_millis(delay_ms.min(self.config.max_delay_ms))
392 }
393
394 fn get_retry_count(&self, request_id: &str) -> u32 {
395 self.retry_stats
396 .lock()
397 .unwrap()
398 .get(request_id)
399 .copied()
400 .unwrap_or(0)
401 }
402
403 fn increment_retry_count(&self, request_id: &str) {
404 let mut stats = self.retry_stats.lock().unwrap();
405 let count = stats.entry(request_id.to_string()).or_insert(0);
406 *count += 1;
407 }
408
409 fn clear_retry_count(&self, request_id: &str) {
410 self.retry_stats.lock().unwrap().remove(request_id);
411 }
412}
413
414#[async_trait]
415impl ClientPlugin for RetryPlugin {
416 fn name(&self) -> &str {
417 "retry"
418 }
419
420 fn version(&self) -> &str {
421 "1.0.0"
422 }
423
424 fn description(&self) -> Option<&str> {
425 Some("Automatic retry with exponential backoff for failed requests")
426 }
427
428 async fn initialize(&self, context: &PluginContext) -> PluginResult<()> {
429 info!(
430 "Retry plugin initialized for client: {} (max_retries: {}, base_delay: {}ms)",
431 context.client_name, self.config.max_retries, self.config.base_delay_ms
432 );
433 Ok(())
434 }
435
436 async fn before_request(&self, context: &mut RequestContext) -> PluginResult<()> {
437 let request_id = context.request.id.to_string();
438 let retry_count = self.get_retry_count(&request_id);
439
440 context.add_metadata("retry.attempt".to_string(), json!(retry_count + 1));
442 context.add_metadata(
443 "retry.max_attempts".to_string(),
444 json!(self.config.max_retries + 1),
445 );
446
447 if retry_count > 0 {
448 debug!(
449 "Retry: Attempt {} for request {} (method: {})",
450 retry_count + 1,
451 request_id,
452 context.method()
453 );
454 }
455
456 Ok(())
457 }
458
459 async fn after_response(&self, context: &mut ResponseContext) -> PluginResult<()> {
460 let request_id = context.request_context.request.id.to_string();
461
462 if context.is_success() {
463 self.clear_retry_count(&request_id);
465 debug!("Retry: Request {} succeeded", request_id);
466 } else if let Some(error) = &context.error {
467 let retry_count = self.get_retry_count(&request_id);
468
469 if self.should_retry(error) && retry_count < self.config.max_retries {
470 self.increment_retry_count(&request_id);
472
473 let delay = self.calculate_delay(retry_count);
474 warn!(
475 "Retry: Request {} failed (attempt {}), will retry after {:?}",
476 request_id,
477 retry_count + 1,
478 delay
479 );
480
481 context.add_metadata("retry.will_retry".to_string(), json!(true));
483 context.add_metadata("retry.delay_ms".to_string(), json!(delay.as_millis()));
484 context.add_metadata("retry.next_attempt".to_string(), json!(retry_count + 2));
485
486 } else {
489 self.clear_retry_count(&request_id);
491 if retry_count >= self.config.max_retries {
492 warn!("Retry: Request {} exhausted all retry attempts", request_id);
493 } else {
494 debug!("Retry: Error not retryable for request {}", request_id);
495 }
496 context.add_metadata("retry.will_retry".to_string(), json!(false));
497 context.add_metadata(
498 "retry.reason".to_string(),
499 json!(if retry_count >= self.config.max_retries {
500 "max_retries_reached"
501 } else {
502 "error_not_retryable"
503 }),
504 );
505 }
506 }
507
508 Ok(())
509 }
510
511 async fn handle_custom(
512 &self,
513 method: &str,
514 _params: Option<Value>,
515 ) -> PluginResult<Option<Value>> {
516 match method {
517 "retry.get_config" => Ok(Some(serde_json::to_value(&self.config).unwrap())),
518 "retry.get_stats" => {
519 let stats = self.retry_stats.lock().unwrap();
520 Ok(Some(json!({
521 "active_retries": stats.len(),
522 "retry_counts": stats.clone()
523 })))
524 }
525 "retry.clear_stats" => {
526 self.retry_stats.lock().unwrap().clear();
527 info!("Retry stats cleared");
528 Ok(Some(json!({"status": "cleared"})))
529 }
530 _ => Ok(None),
531 }
532 }
533}
534
535#[derive(Debug, Clone, Serialize, Deserialize)]
541pub struct CacheConfig {
542 pub max_entries: usize,
544
545 pub ttl_seconds: u64,
547
548 pub cache_responses: bool,
550
551 pub cache_resources: bool,
553
554 pub cache_tools: bool,
556}
557
558impl Default for CacheConfig {
559 fn default() -> Self {
560 Self {
561 max_entries: 1000,
562 ttl_seconds: 300, cache_responses: true,
564 cache_resources: true,
565 cache_tools: true,
566 }
567 }
568}
569
570#[derive(Debug, Clone)]
571struct CacheEntry {
572 data: Value,
573 timestamp: Instant,
574 access_count: u64,
575}
576
577impl CacheEntry {
578 fn new(data: Value) -> Self {
579 Self {
580 data,
581 timestamp: Instant::now(),
582 access_count: 0,
583 }
584 }
585
586 fn is_expired(&self, ttl: Duration) -> bool {
587 self.timestamp.elapsed() > ttl
588 }
589
590 fn access(&mut self) -> &Value {
591 self.access_count += 1;
592 &self.data
593 }
594}
595
596#[derive(Debug)]
598pub struct CachePlugin {
599 config: CacheConfig,
600 cache: Arc<Mutex<HashMap<String, CacheEntry>>>,
601 stats: Arc<Mutex<CacheStats>>,
602}
603
604#[derive(Debug, Default)]
605struct CacheStats {
606 hits: u64,
607 misses: u64,
608 evictions: u64,
609 total_entries: u64,
610}
611
612impl CachePlugin {
613 pub fn new(config: PluginConfig) -> Self {
615 let cache_config = match config {
616 PluginConfig::Cache(config) => config,
617 _ => CacheConfig::default(),
618 };
619
620 Self {
621 config: cache_config,
622 cache: Arc::new(Mutex::new(HashMap::new())),
623 stats: Arc::new(Mutex::new(CacheStats::default())),
624 }
625 }
626
627 fn should_cache_method(&self, method: &str) -> bool {
628 match method {
629 m if m.starts_with("tools/") && self.config.cache_tools => true,
630 m if m.starts_with("resources/") && self.config.cache_resources => true,
631 _ if self.config.cache_responses => true,
632 _ => false,
633 }
634 }
635
636 fn generate_cache_key(&self, context: &RequestContext) -> String {
637 let params_hash = if let Some(params) = &context.request.params {
639 format!("{:x}", fxhash::hash64(params))
640 } else {
641 "no_params".to_string()
642 };
643 format!("{}:{}", context.method(), params_hash)
644 }
645
646 fn get_cached(&self, key: &str) -> Option<Value> {
647 let mut cache = self.cache.lock().unwrap();
648 let ttl = Duration::from_secs(self.config.ttl_seconds);
649
650 if let Some(entry) = cache.get_mut(key) {
651 if !entry.is_expired(ttl) {
652 let mut stats = self.stats.lock().unwrap();
653 stats.hits += 1;
654 return Some(entry.access().clone());
655 } else {
656 cache.remove(key);
658 let mut stats = self.stats.lock().unwrap();
659 stats.evictions += 1;
660 }
661 }
662
663 let mut stats = self.stats.lock().unwrap();
664 stats.misses += 1;
665 None
666 }
667
668 fn store_cached(&self, key: String, data: Value) {
669 let mut cache = self.cache.lock().unwrap();
670
671 if cache.len() >= self.config.max_entries {
673 let evict_keys: Vec<_> = {
675 let mut entries: Vec<_> = cache
676 .iter()
677 .map(|(k, v)| (k.clone(), v.timestamp))
678 .collect();
679 entries.sort_by_key(|(_, timestamp)| *timestamp);
680
681 let evict_count = (cache.len() - self.config.max_entries + 1).min(cache.len() / 2);
682 entries
683 .into_iter()
684 .take(evict_count)
685 .map(|(key, _)| key)
686 .collect()
687 };
688
689 let evict_count = evict_keys.len();
690 for key in evict_keys {
691 cache.remove(&key);
692 }
693
694 let mut stats = self.stats.lock().unwrap();
695 stats.evictions += evict_count as u64;
696 }
697
698 cache.insert(key, CacheEntry::new(data));
699 let mut stats = self.stats.lock().unwrap();
700 stats.total_entries += 1;
701 }
702
703 fn cleanup_expired(&self) {
704 let mut cache = self.cache.lock().unwrap();
705 let ttl = Duration::from_secs(self.config.ttl_seconds);
706
707 let expired_keys: Vec<_> = cache
708 .iter()
709 .filter(|(_, entry)| entry.is_expired(ttl))
710 .map(|(key, _)| key.clone())
711 .collect();
712
713 let eviction_count = expired_keys.len();
714 for key in expired_keys {
715 cache.remove(&key);
716 }
717
718 if eviction_count > 0 {
719 let mut stats = self.stats.lock().unwrap();
720 stats.evictions += eviction_count as u64;
721 debug!("Cache: Cleaned up {} expired entries", eviction_count);
722 }
723 }
724}
725
726#[async_trait]
727impl ClientPlugin for CachePlugin {
728 fn name(&self) -> &str {
729 "cache"
730 }
731
732 fn version(&self) -> &str {
733 "1.0.0"
734 }
735
736 fn description(&self) -> Option<&str> {
737 Some("Response caching with TTL and LRU eviction")
738 }
739
740 async fn initialize(&self, context: &PluginContext) -> PluginResult<()> {
741 info!(
742 "Cache plugin initialized for client: {} (max_entries: {}, ttl: {}s)",
743 context.client_name, self.config.max_entries, self.config.ttl_seconds
744 );
745 Ok(())
746 }
747
748 async fn before_request(&self, context: &mut RequestContext) -> PluginResult<()> {
749 if !self.should_cache_method(context.method()) {
750 return Ok(());
751 }
752
753 let cache_key = self.generate_cache_key(context);
754
755 if let Some(_cached_data) = self.get_cached(&cache_key) {
756 debug!(
757 "Cache: Hit for method {} (key: {})",
758 context.method(),
759 cache_key
760 );
761 context.add_metadata("cache.hit".to_string(), json!(true));
762 context.add_metadata("cache.key".to_string(), json!(cache_key));
763 } else {
765 debug!(
766 "Cache: Miss for method {} (key: {})",
767 context.method(),
768 cache_key
769 );
770 context.add_metadata("cache.hit".to_string(), json!(false));
771 context.add_metadata("cache.key".to_string(), json!(cache_key));
772 }
773
774 Ok(())
775 }
776
777 async fn after_response(&self, context: &mut ResponseContext) -> PluginResult<()> {
778 if !self.should_cache_method(context.method()) || !context.is_success() {
779 return Ok(());
780 }
781
782 if let Some(cache_key) = context
783 .request_context
784 .get_metadata("cache.key")
785 .and_then(|v| v.as_str())
786 && let Some(response_data) = &context.response
787 {
788 self.store_cached(cache_key.to_string(), response_data.clone());
789 debug!(
790 "Cache: Stored response for method {} (key: {})",
791 context.method(),
792 cache_key
793 );
794 context.add_metadata("cache.stored".to_string(), json!(true));
795 }
796
797 self.cleanup_expired();
799
800 Ok(())
801 }
802
803 async fn handle_custom(
804 &self,
805 method: &str,
806 params: Option<Value>,
807 ) -> PluginResult<Option<Value>> {
808 match method {
809 "cache.get_stats" => {
810 let stats = self.stats.lock().unwrap();
811 let cache_size = self.cache.lock().unwrap().len();
812
813 Ok(Some(json!({
814 "hits": stats.hits,
815 "misses": stats.misses,
816 "evictions": stats.evictions,
817 "total_entries": stats.total_entries,
818 "current_size": cache_size,
819 "hit_rate": if stats.hits + stats.misses > 0 {
820 stats.hits as f64 / (stats.hits + stats.misses) as f64
821 } else {
822 0.0
823 }
824 })))
825 }
826 "cache.clear" => {
827 let mut cache = self.cache.lock().unwrap();
828 let cleared_count = cache.len();
829 cache.clear();
830 info!("Cache: Cleared {} entries", cleared_count);
831 Ok(Some(json!({"cleared_entries": cleared_count})))
832 }
833 "cache.get_config" => Ok(Some(serde_json::to_value(&self.config).unwrap())),
834 "cache.cleanup" => {
835 self.cleanup_expired();
836 let cache_size = self.cache.lock().unwrap().len();
837 Ok(Some(json!({"remaining_entries": cache_size})))
838 }
839 "cache.get" => {
840 if let Some(params) = params {
841 if let Some(key) = params.get("key").and_then(|v| v.as_str()) {
842 if let Some(data) = self.get_cached(key) {
843 Ok(Some(json!({"found": true, "data": data})))
844 } else {
845 Ok(Some(json!({"found": false})))
846 }
847 } else {
848 Ok(Some(json!({"error": "Key parameter required"})))
849 }
850 } else {
851 Ok(Some(json!({"error": "Parameters required"})))
852 }
853 }
854 _ => Ok(None),
855 }
856 }
857}
858
859mod fxhash {
861 use serde_json::Value;
862 use std::collections::hash_map::DefaultHasher;
863 use std::hash::{Hash, Hasher};
864
865 pub fn hash64(value: &Value) -> u64 {
866 let mut hasher = DefaultHasher::new();
867
868 let json_str = value.to_string();
870 json_str.hash(&mut hasher);
871
872 hasher.finish()
873 }
874}
875
876#[cfg(test)]
877mod tests {
878 use super::*;
879
880 #[tokio::test]
881 async fn test_metrics_plugin_creation() {
882 let plugin = MetricsPlugin::new(PluginConfig::Metrics);
883 assert_eq!(plugin.name(), "metrics");
884
885 let metrics = plugin.get_metrics();
886 assert_eq!(metrics.total_requests, 0);
887 assert_eq!(metrics.successful_responses, 0);
888 }
889
890 #[tokio::test]
891 async fn test_retry_plugin_creation() {
892 let config = RetryConfig {
893 max_retries: 5,
894 base_delay_ms: 200,
895 max_delay_ms: 2000,
896 backoff_multiplier: 1.5,
897 retry_on_timeout: true,
898 retry_on_connection_error: false,
899 };
900
901 let plugin = RetryPlugin::new(PluginConfig::Retry(config.clone()));
902 assert_eq!(plugin.name(), "retry");
903 assert_eq!(plugin.config.max_retries, 5);
904 assert_eq!(plugin.config.base_delay_ms, 200);
905 }
906
907 #[tokio::test]
908 async fn test_cache_plugin_creation() {
909 let config = CacheConfig {
910 max_entries: 500,
911 ttl_seconds: 600,
912 cache_responses: true,
913 cache_resources: false,
914 cache_tools: true,
915 };
916
917 let plugin = CachePlugin::new(PluginConfig::Cache(config.clone()));
918 assert_eq!(plugin.name(), "cache");
919 assert_eq!(plugin.config.max_entries, 500);
920 assert_eq!(plugin.config.ttl_seconds, 600);
921 }
922
923 #[test]
924 fn test_retry_delay_calculation() {
925 let config = RetryConfig {
926 max_retries: 3,
927 base_delay_ms: 100,
928 max_delay_ms: 1000,
929 backoff_multiplier: 2.0,
930 retry_on_timeout: true,
931 retry_on_connection_error: true,
932 };
933
934 let plugin = RetryPlugin::new(PluginConfig::Retry(config));
935
936 assert_eq!(plugin.calculate_delay(0), Duration::from_millis(100));
937 assert_eq!(plugin.calculate_delay(1), Duration::from_millis(200));
938 assert_eq!(plugin.calculate_delay(2), Duration::from_millis(400));
939 assert_eq!(plugin.calculate_delay(3), Duration::from_millis(800));
940 assert_eq!(plugin.calculate_delay(4), Duration::from_millis(1000)); }
942
943 #[test]
944 fn test_cache_entry_expiration() {
945 let entry = CacheEntry::new(json!({"test": "data"}));
946 assert!(!entry.is_expired(Duration::from_secs(1)));
947
948 }
950}