1use crate::plugins::core::{
9 ClientPlugin, PluginConfig, PluginContext, PluginResult, RequestContext, ResponseContext,
10};
11use async_trait::async_trait;
12use chrono::{DateTime, Utc};
13use serde::{Deserialize, Serialize};
14use serde_json::{Value, json};
15use std::collections::HashMap;
16use std::sync::{Arc, Mutex};
17use std::time::{Duration, Instant};
18use tracing::{debug, info, warn};
19
20#[derive(Debug, Clone, Serialize, Deserialize)]
26pub struct MetricsData {
27 pub total_requests: u64,
29
30 pub successful_responses: u64,
32
33 pub failed_responses: u64,
35
36 pub avg_response_time_ms: f64,
38
39 pub min_response_time_ms: u64,
41
42 pub max_response_time_ms: u64,
44
45 pub requests_per_minute: f64,
47
48 pub method_metrics: HashMap<String, MethodMetrics>,
50
51 pub start_time: DateTime<Utc>,
53
54 pub last_reset: DateTime<Utc>,
56}
57
58#[derive(Debug, Clone, Serialize, Deserialize)]
59pub struct MethodMetrics {
60 pub count: u64,
61 pub avg_duration_ms: f64,
62 pub success_count: u64,
63 pub error_count: u64,
64}
65
66impl Default for MetricsData {
67 fn default() -> Self {
68 let now = Utc::now();
69 Self {
70 total_requests: 0,
71 successful_responses: 0,
72 failed_responses: 0,
73 avg_response_time_ms: 0.0,
74 min_response_time_ms: u64::MAX,
75 max_response_time_ms: 0,
76 requests_per_minute: 0.0,
77 method_metrics: HashMap::new(),
78 start_time: now,
79 last_reset: now,
80 }
81 }
82}
83
84#[derive(Debug)]
86pub struct MetricsPlugin {
87 metrics: Arc<Mutex<MetricsData>>,
89
90 request_times: Arc<Mutex<HashMap<String, Instant>>>,
92
93 recent_requests: Arc<Mutex<Vec<Instant>>>,
95}
96
97impl MetricsPlugin {
98 pub fn new(_config: PluginConfig) -> Self {
100 Self {
101 metrics: Arc::new(Mutex::new(MetricsData::default())),
102 request_times: Arc::new(Mutex::new(HashMap::new())),
103 recent_requests: Arc::new(Mutex::new(Vec::new())),
104 }
105 }
106
107 pub fn get_metrics(&self) -> MetricsData {
109 self.metrics.lock().unwrap().clone()
110 }
111
112 pub fn reset_metrics(&self) {
114 let mut metrics = self.metrics.lock().unwrap();
115 let now = Utc::now();
116 *metrics = MetricsData {
117 start_time: metrics.start_time,
118 last_reset: now,
119 ..MetricsData::default()
120 };
121 self.request_times.lock().unwrap().clear();
122 self.recent_requests.lock().unwrap().clear();
123 }
124
125 fn update_request_rate(&self) {
126 let mut recent = self.recent_requests.lock().unwrap();
127 let now = Instant::now();
128
129 recent.retain(|×tamp| now.duration_since(timestamp).as_secs() < 60);
131
132 recent.push(now);
134
135 let mut metrics = self.metrics.lock().unwrap();
137 metrics.requests_per_minute = recent.len() as f64;
138 }
139
140 fn update_method_metrics(&self, method: &str, duration: Duration, is_success: bool) {
141 let mut metrics = self.metrics.lock().unwrap();
142 let entry = metrics
143 .method_metrics
144 .entry(method.to_string())
145 .or_insert(MethodMetrics {
146 count: 0,
147 avg_duration_ms: 0.0,
148 success_count: 0,
149 error_count: 0,
150 });
151
152 entry.count += 1;
153 if is_success {
154 entry.success_count += 1;
155 } else {
156 entry.error_count += 1;
157 }
158
159 let duration_ms = duration.as_millis() as f64;
161 entry.avg_duration_ms =
162 (entry.avg_duration_ms * (entry.count - 1) as f64 + duration_ms) / entry.count as f64;
163 }
164}
165
166#[async_trait]
167impl ClientPlugin for MetricsPlugin {
168 fn name(&self) -> &str {
169 "metrics"
170 }
171
172 fn version(&self) -> &str {
173 "1.0.0"
174 }
175
176 fn description(&self) -> Option<&str> {
177 Some("Collects request/response metrics and performance data")
178 }
179
180 async fn initialize(&self, context: &PluginContext) -> PluginResult<()> {
181 info!(
182 "Metrics plugin initialized for client: {}",
183 context.client_name
184 );
185 Ok(())
186 }
187
188 async fn before_request(&self, context: &mut RequestContext) -> PluginResult<()> {
189 let request_id = context.request.id.to_string();
190
191 self.request_times
193 .lock()
194 .unwrap()
195 .insert(request_id.clone(), Instant::now());
196
197 {
199 let mut metrics = self.metrics.lock().unwrap();
200 metrics.total_requests += 1;
201 }
202
203 self.update_request_rate();
204
205 context.add_metadata("metrics.request_id".to_string(), json!(request_id));
207 context.add_metadata(
208 "metrics.start_time".to_string(),
209 json!(Utc::now().to_rfc3339()),
210 );
211
212 debug!(
213 "Metrics: Recorded request start for method: {}",
214 context.method()
215 );
216 Ok(())
217 }
218
219 async fn after_response(&self, context: &mut ResponseContext) -> PluginResult<()> {
220 let request_id = context.request_context.request.id.to_string();
221
222 let duration =
224 if let Some(start_time) = self.request_times.lock().unwrap().remove(&request_id) {
225 start_time.elapsed()
226 } else {
227 context.duration
228 };
229
230 let is_success = context.is_success();
231 let method = context.method().to_string();
232 let duration_ms = duration.as_millis();
233
234 {
236 let mut metrics = self.metrics.lock().unwrap();
237
238 if is_success {
239 metrics.successful_responses += 1;
240 } else {
241 metrics.failed_responses += 1;
242 }
243
244 let duration_ms_u64 = duration_ms as u64;
245
246 if duration_ms_u64 < metrics.min_response_time_ms {
248 metrics.min_response_time_ms = duration_ms_u64;
249 }
250 if duration_ms_u64 > metrics.max_response_time_ms {
251 metrics.max_response_time_ms = duration_ms_u64;
252 }
253
254 let total_responses = metrics.successful_responses + metrics.failed_responses;
256 metrics.avg_response_time_ms =
257 (metrics.avg_response_time_ms * (total_responses - 1) as f64 + duration_ms as f64)
258 / total_responses as f64;
259 }
260
261 self.update_method_metrics(&method, duration, is_success);
263
264 context.add_metadata("metrics.duration_ms".to_string(), json!(duration_ms));
266 context.add_metadata("metrics.success".to_string(), json!(is_success));
267
268 debug!(
269 "Metrics: Recorded response for method: {} ({}ms, success: {})",
270 method, duration_ms, is_success
271 );
272
273 Ok(())
274 }
275
276 async fn handle_custom(
277 &self,
278 method: &str,
279 params: Option<Value>,
280 ) -> PluginResult<Option<Value>> {
281 match method {
282 "metrics.get_stats" => {
283 let metrics = self.get_metrics();
284 Ok(Some(serde_json::to_value(metrics).unwrap()))
285 }
286 "metrics.reset" => {
287 self.reset_metrics();
288 info!("Metrics reset");
289 Ok(Some(json!({"status": "reset"})))
290 }
291 "metrics.get_method_stats" => {
292 if let Some(params) = params {
293 if let Some(method_name) = params.get("method").and_then(|v| v.as_str()) {
294 let metrics = self.metrics.lock().unwrap();
295 if let Some(method_metrics) = metrics.method_metrics.get(method_name) {
296 Ok(Some(serde_json::to_value(method_metrics).unwrap()))
297 } else {
298 Ok(Some(json!({"error": "Method not found"})))
299 }
300 } else {
301 Ok(Some(json!({"error": "Method parameter required"})))
302 }
303 } else {
304 Ok(Some(json!({"error": "Parameters required"})))
305 }
306 }
307 _ => Ok(None),
308 }
309 }
310}
311
312#[derive(Debug, Clone, Serialize, Deserialize)]
318pub struct RetryConfig {
319 pub max_retries: u32,
321
322 pub base_delay_ms: u64,
324
325 pub max_delay_ms: u64,
327
328 pub backoff_multiplier: f64,
330
331 pub retry_on_timeout: bool,
333
334 pub retry_on_connection_error: bool,
336}
337
338impl Default for RetryConfig {
339 fn default() -> Self {
340 Self {
341 max_retries: 3,
342 base_delay_ms: 100,
343 max_delay_ms: 5000,
344 backoff_multiplier: 2.0,
345 retry_on_timeout: true,
346 retry_on_connection_error: true,
347 }
348 }
349}
350
351#[derive(Debug)]
353pub struct RetryPlugin {
354 config: RetryConfig,
355 retry_stats: Arc<Mutex<HashMap<String, u32>>>,
356}
357
358impl RetryPlugin {
359 pub fn new(config: PluginConfig) -> Self {
361 let retry_config = match config {
362 PluginConfig::Retry(config) => config,
363 _ => RetryConfig::default(),
364 };
365
366 Self {
367 config: retry_config,
368 retry_stats: Arc::new(Mutex::new(HashMap::new())),
369 }
370 }
371
372 fn should_retry(&self, error: &turbomcp_core::Error) -> bool {
373 let error_string = error.to_string().to_lowercase();
374
375 if self.config.retry_on_connection_error
376 && (error_string.contains("transport") || error_string.contains("connection"))
377 {
378 return true;
379 }
380
381 if self.config.retry_on_timeout && error_string.contains("timeout") {
382 return true;
383 }
384
385 false
386 }
387
388 fn calculate_delay(&self, attempt: u32) -> Duration {
389 let delay_ms = (self.config.base_delay_ms as f64
390 * self.config.backoff_multiplier.powi(attempt as i32)) as u64;
391 Duration::from_millis(delay_ms.min(self.config.max_delay_ms))
392 }
393
394 fn get_retry_count(&self, request_id: &str) -> u32 {
395 self.retry_stats
396 .lock()
397 .unwrap()
398 .get(request_id)
399 .copied()
400 .unwrap_or(0)
401 }
402
403 fn increment_retry_count(&self, request_id: &str) {
404 let mut stats = self.retry_stats.lock().unwrap();
405 let count = stats.entry(request_id.to_string()).or_insert(0);
406 *count += 1;
407 }
408
409 fn clear_retry_count(&self, request_id: &str) {
410 self.retry_stats.lock().unwrap().remove(request_id);
411 }
412}
413
414#[async_trait]
415impl ClientPlugin for RetryPlugin {
416 fn name(&self) -> &str {
417 "retry"
418 }
419
420 fn version(&self) -> &str {
421 "1.0.0"
422 }
423
424 fn description(&self) -> Option<&str> {
425 Some("Automatic retry with exponential backoff for failed requests")
426 }
427
428 async fn initialize(&self, context: &PluginContext) -> PluginResult<()> {
429 info!(
430 "Retry plugin initialized for client: {} (max_retries: {}, base_delay: {}ms)",
431 context.client_name, self.config.max_retries, self.config.base_delay_ms
432 );
433 Ok(())
434 }
435
436 async fn before_request(&self, context: &mut RequestContext) -> PluginResult<()> {
437 let request_id = context.request.id.to_string();
438 let retry_count = self.get_retry_count(&request_id);
439
440 context.add_metadata("retry.attempt".to_string(), json!(retry_count + 1));
442 context.add_metadata(
443 "retry.max_attempts".to_string(),
444 json!(self.config.max_retries + 1),
445 );
446
447 if retry_count > 0 {
448 debug!(
449 "Retry: Attempt {} for request {} (method: {})",
450 retry_count + 1,
451 request_id,
452 context.method()
453 );
454 }
455
456 Ok(())
457 }
458
459 async fn after_response(&self, context: &mut ResponseContext) -> PluginResult<()> {
460 let request_id = context.request_context.request.id.to_string();
461
462 if context.is_success() {
463 self.clear_retry_count(&request_id);
465 debug!("Retry: Request {} succeeded", request_id);
466 } else if let Some(error) = &context.error {
467 let retry_count = self.get_retry_count(&request_id);
468
469 if self.should_retry(error) && retry_count < self.config.max_retries {
470 self.increment_retry_count(&request_id);
472
473 let delay = self.calculate_delay(retry_count);
474 warn!(
475 "Retry: Request {} failed (attempt {}), will retry after {:?}",
476 request_id,
477 retry_count + 1,
478 delay
479 );
480
481 context.add_metadata("retry.will_retry".to_string(), json!(true));
483 context.add_metadata("retry.delay_ms".to_string(), json!(delay.as_millis()));
484 context.add_metadata("retry.next_attempt".to_string(), json!(retry_count + 2));
485
486 context.add_metadata("retry.should_retry".to_string(), json!(true));
489 context.add_metadata(
490 "retry.recommended_action".to_string(),
491 json!("retry_request"),
492 );
493 } else {
494 self.clear_retry_count(&request_id);
496 if retry_count >= self.config.max_retries {
497 warn!("Retry: Request {} exhausted all retry attempts", request_id);
498 } else {
499 debug!("Retry: Error not retryable for request {}", request_id);
500 }
501 context.add_metadata("retry.will_retry".to_string(), json!(false));
502 context.add_metadata(
503 "retry.reason".to_string(),
504 json!(if retry_count >= self.config.max_retries {
505 "max_retries_reached"
506 } else {
507 "error_not_retryable"
508 }),
509 );
510 }
511 }
512
513 Ok(())
514 }
515
516 async fn handle_custom(
517 &self,
518 method: &str,
519 _params: Option<Value>,
520 ) -> PluginResult<Option<Value>> {
521 match method {
522 "retry.get_config" => Ok(Some(serde_json::to_value(&self.config).unwrap())),
523 "retry.get_stats" => {
524 let stats = self.retry_stats.lock().unwrap();
525 Ok(Some(json!({
526 "active_retries": stats.len(),
527 "retry_counts": stats.clone()
528 })))
529 }
530 "retry.clear_stats" => {
531 self.retry_stats.lock().unwrap().clear();
532 info!("Retry stats cleared");
533 Ok(Some(json!({"status": "cleared"})))
534 }
535 _ => Ok(None),
536 }
537 }
538}
539
540#[derive(Debug, Clone, Serialize, Deserialize)]
546pub struct CacheConfig {
547 pub max_entries: usize,
549
550 pub ttl_seconds: u64,
552
553 pub cache_responses: bool,
555
556 pub cache_resources: bool,
558
559 pub cache_tools: bool,
561}
562
563impl Default for CacheConfig {
564 fn default() -> Self {
565 Self {
566 max_entries: 1000,
567 ttl_seconds: 300, cache_responses: true,
569 cache_resources: true,
570 cache_tools: true,
571 }
572 }
573}
574
575#[derive(Debug, Clone)]
576struct CacheEntry {
577 data: Value,
578 timestamp: Instant,
579 access_count: u64,
580}
581
582impl CacheEntry {
583 fn new(data: Value) -> Self {
584 Self {
585 data,
586 timestamp: Instant::now(),
587 access_count: 0,
588 }
589 }
590
591 fn is_expired(&self, ttl: Duration) -> bool {
592 self.timestamp.elapsed() > ttl
593 }
594
595 fn access(&mut self) -> &Value {
596 self.access_count += 1;
597 &self.data
598 }
599}
600
601#[derive(Debug)]
603pub struct CachePlugin {
604 config: CacheConfig,
605 cache: Arc<Mutex<HashMap<String, CacheEntry>>>,
606 stats: Arc<Mutex<CacheStats>>,
607}
608
609#[derive(Debug, Default)]
610struct CacheStats {
611 hits: u64,
612 misses: u64,
613 evictions: u64,
614 total_entries: u64,
615}
616
617impl CachePlugin {
618 pub fn new(config: PluginConfig) -> Self {
620 let cache_config = match config {
621 PluginConfig::Cache(config) => config,
622 _ => CacheConfig::default(),
623 };
624
625 Self {
626 config: cache_config,
627 cache: Arc::new(Mutex::new(HashMap::new())),
628 stats: Arc::new(Mutex::new(CacheStats::default())),
629 }
630 }
631
632 fn should_cache_method(&self, method: &str) -> bool {
633 match method {
634 m if m.starts_with("tools/") && self.config.cache_tools => true,
635 m if m.starts_with("resources/") && self.config.cache_resources => true,
636 _ if self.config.cache_responses => true,
637 _ => false,
638 }
639 }
640
641 fn generate_cache_key(&self, context: &RequestContext) -> String {
642 let params_hash = if let Some(params) = &context.request.params {
644 format!("{:x}", fxhash::hash64(params))
645 } else {
646 "no_params".to_string()
647 };
648 format!("{}:{}", context.method(), params_hash)
649 }
650
651 fn get_cached(&self, key: &str) -> Option<Value> {
652 let mut cache = self.cache.lock().unwrap();
653 let ttl = Duration::from_secs(self.config.ttl_seconds);
654
655 if let Some(entry) = cache.get_mut(key) {
656 if !entry.is_expired(ttl) {
657 let mut stats = self.stats.lock().unwrap();
658 stats.hits += 1;
659 return Some(entry.access().clone());
660 } else {
661 cache.remove(key);
663 let mut stats = self.stats.lock().unwrap();
664 stats.evictions += 1;
665 }
666 }
667
668 let mut stats = self.stats.lock().unwrap();
669 stats.misses += 1;
670 None
671 }
672
673 fn store_cached(&self, key: String, data: Value) {
674 let mut cache = self.cache.lock().unwrap();
675
676 if cache.len() >= self.config.max_entries {
678 let evict_keys: Vec<_> = {
680 let mut entries: Vec<_> = cache
681 .iter()
682 .map(|(k, v)| (k.clone(), v.timestamp))
683 .collect();
684 entries.sort_by_key(|(_, timestamp)| *timestamp);
685
686 let evict_count = (cache.len() - self.config.max_entries + 1).min(cache.len() / 2);
687 entries
688 .into_iter()
689 .take(evict_count)
690 .map(|(key, _)| key)
691 .collect()
692 };
693
694 let evict_count = evict_keys.len();
695 for key in evict_keys {
696 cache.remove(&key);
697 }
698
699 let mut stats = self.stats.lock().unwrap();
700 stats.evictions += evict_count as u64;
701 }
702
703 cache.insert(key, CacheEntry::new(data));
704 let mut stats = self.stats.lock().unwrap();
705 stats.total_entries += 1;
706 }
707
708 fn cleanup_expired(&self) {
709 let mut cache = self.cache.lock().unwrap();
710 let ttl = Duration::from_secs(self.config.ttl_seconds);
711
712 let expired_keys: Vec<_> = cache
713 .iter()
714 .filter(|(_, entry)| entry.is_expired(ttl))
715 .map(|(key, _)| key.clone())
716 .collect();
717
718 let eviction_count = expired_keys.len();
719 for key in expired_keys {
720 cache.remove(&key);
721 }
722
723 if eviction_count > 0 {
724 let mut stats = self.stats.lock().unwrap();
725 stats.evictions += eviction_count as u64;
726 debug!("Cache: Cleaned up {} expired entries", eviction_count);
727 }
728 }
729}
730
731#[async_trait]
732impl ClientPlugin for CachePlugin {
733 fn name(&self) -> &str {
734 "cache"
735 }
736
737 fn version(&self) -> &str {
738 "1.0.0"
739 }
740
741 fn description(&self) -> Option<&str> {
742 Some("Response caching with TTL and LRU eviction")
743 }
744
745 async fn initialize(&self, context: &PluginContext) -> PluginResult<()> {
746 info!(
747 "Cache plugin initialized for client: {} (max_entries: {}, ttl: {}s)",
748 context.client_name, self.config.max_entries, self.config.ttl_seconds
749 );
750 Ok(())
751 }
752
753 async fn before_request(&self, context: &mut RequestContext) -> PluginResult<()> {
754 if !self.should_cache_method(context.method()) {
755 return Ok(());
756 }
757
758 let cache_key = self.generate_cache_key(context);
759
760 if let Some(cached_data) = self.get_cached(&cache_key) {
761 debug!(
762 "Cache: Hit for method {} (key: {})",
763 context.method(),
764 cache_key
765 );
766 context.add_metadata("cache.hit".to_string(), json!(true));
767 context.add_metadata("cache.key".to_string(), json!(cache_key));
768 context.add_metadata("cache.response_source".to_string(), json!("cache"));
769 context.add_metadata("cache.response_data".to_string(), cached_data.clone());
771 context.add_metadata("cache.should_skip_request".to_string(), json!(true));
772 } else {
773 debug!(
774 "Cache: Miss for method {} (key: {})",
775 context.method(),
776 cache_key
777 );
778 context.add_metadata("cache.hit".to_string(), json!(false));
779 context.add_metadata("cache.key".to_string(), json!(cache_key));
780 context.add_metadata("cache.should_skip_request".to_string(), json!(false));
781 }
782
783 Ok(())
784 }
785
786 async fn after_response(&self, context: &mut ResponseContext) -> PluginResult<()> {
787 if let Some(cached_response_data) =
789 context.request_context.get_metadata("cache.response_data")
790 {
791 context.response = Some(cached_response_data.clone());
792 debug!(
793 "Cache: Used cached response for method {}",
794 context.method()
795 );
796 return Ok(());
797 }
798
799 if !self.should_cache_method(context.method()) || !context.is_success() {
800 return Ok(());
801 }
802
803 if let Some(cache_key) = context
804 .request_context
805 .get_metadata("cache.key")
806 .and_then(|v| v.as_str())
807 && let Some(response_data) = &context.response
808 {
809 self.store_cached(cache_key.to_string(), response_data.clone());
810 debug!(
811 "Cache: Stored response for method {} (key: {})",
812 context.method(),
813 cache_key
814 );
815 context.add_metadata("cache.stored".to_string(), json!(true));
816 }
817
818 self.cleanup_expired();
820
821 Ok(())
822 }
823
824 async fn handle_custom(
825 &self,
826 method: &str,
827 params: Option<Value>,
828 ) -> PluginResult<Option<Value>> {
829 match method {
830 "cache.get_stats" => {
831 let stats = self.stats.lock().unwrap();
832 let cache_size = self.cache.lock().unwrap().len();
833
834 Ok(Some(json!({
835 "hits": stats.hits,
836 "misses": stats.misses,
837 "evictions": stats.evictions,
838 "total_entries": stats.total_entries,
839 "current_size": cache_size,
840 "hit_rate": if stats.hits + stats.misses > 0 {
841 stats.hits as f64 / (stats.hits + stats.misses) as f64
842 } else {
843 0.0
844 }
845 })))
846 }
847 "cache.clear" => {
848 let mut cache = self.cache.lock().unwrap();
849 let cleared_count = cache.len();
850 cache.clear();
851 info!("Cache: Cleared {} entries", cleared_count);
852 Ok(Some(json!({"cleared_entries": cleared_count})))
853 }
854 "cache.get_config" => Ok(Some(serde_json::to_value(&self.config).unwrap())),
855 "cache.cleanup" => {
856 self.cleanup_expired();
857 let cache_size = self.cache.lock().unwrap().len();
858 Ok(Some(json!({"remaining_entries": cache_size})))
859 }
860 "cache.get" => {
861 if let Some(params) = params {
862 if let Some(key) = params.get("key").and_then(|v| v.as_str()) {
863 if let Some(data) = self.get_cached(key) {
864 Ok(Some(json!({"found": true, "data": data})))
865 } else {
866 Ok(Some(json!({"found": false})))
867 }
868 } else {
869 Ok(Some(json!({"error": "Key parameter required"})))
870 }
871 } else {
872 Ok(Some(json!({"error": "Parameters required"})))
873 }
874 }
875 _ => Ok(None),
876 }
877 }
878}
879
880mod fxhash {
882 use serde_json::Value;
883 use std::collections::hash_map::DefaultHasher;
884 use std::hash::{Hash, Hasher};
885
886 pub fn hash64(value: &Value) -> u64 {
887 let mut hasher = DefaultHasher::new();
888
889 let json_str = value.to_string();
891 json_str.hash(&mut hasher);
892
893 hasher.finish()
894 }
895}
896
897#[cfg(test)]
898mod tests {
899 use super::*;
900
901 #[tokio::test]
902 async fn test_metrics_plugin_creation() {
903 let plugin = MetricsPlugin::new(PluginConfig::Metrics);
904 assert_eq!(plugin.name(), "metrics");
905
906 let metrics = plugin.get_metrics();
907 assert_eq!(metrics.total_requests, 0);
908 assert_eq!(metrics.successful_responses, 0);
909 }
910
911 #[tokio::test]
912 async fn test_retry_plugin_creation() {
913 let config = RetryConfig {
914 max_retries: 5,
915 base_delay_ms: 200,
916 max_delay_ms: 2000,
917 backoff_multiplier: 1.5,
918 retry_on_timeout: true,
919 retry_on_connection_error: false,
920 };
921
922 let plugin = RetryPlugin::new(PluginConfig::Retry(config.clone()));
923 assert_eq!(plugin.name(), "retry");
924 assert_eq!(plugin.config.max_retries, 5);
925 assert_eq!(plugin.config.base_delay_ms, 200);
926 }
927
928 #[tokio::test]
929 async fn test_cache_plugin_creation() {
930 let config = CacheConfig {
931 max_entries: 500,
932 ttl_seconds: 600,
933 cache_responses: true,
934 cache_resources: false,
935 cache_tools: true,
936 };
937
938 let plugin = CachePlugin::new(PluginConfig::Cache(config.clone()));
939 assert_eq!(plugin.name(), "cache");
940 assert_eq!(plugin.config.max_entries, 500);
941 assert_eq!(plugin.config.ttl_seconds, 600);
942 }
943
944 #[test]
945 fn test_retry_delay_calculation() {
946 let config = RetryConfig {
947 max_retries: 3,
948 base_delay_ms: 100,
949 max_delay_ms: 1000,
950 backoff_multiplier: 2.0,
951 retry_on_timeout: true,
952 retry_on_connection_error: true,
953 };
954
955 let plugin = RetryPlugin::new(PluginConfig::Retry(config));
956
957 assert_eq!(plugin.calculate_delay(0), Duration::from_millis(100));
958 assert_eq!(plugin.calculate_delay(1), Duration::from_millis(200));
959 assert_eq!(plugin.calculate_delay(2), Duration::from_millis(400));
960 assert_eq!(plugin.calculate_delay(3), Duration::from_millis(800));
961 assert_eq!(plugin.calculate_delay(4), Duration::from_millis(1000)); }
963
964 #[test]
965 fn test_cache_entry_expiration() {
966 let entry = CacheEntry::new(json!({"test": "data"}));
967 assert!(!entry.is_expired(Duration::from_secs(1)));
968
969 }
971}