Skip to main content

rustfs_mcp/
server.rs

1// Copyright 2024 RustFS Team
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15use anyhow::Result;
16use rmcp::{
17    ErrorData, RoleServer, ServerHandler,
18    handler::server::{router::tool::ToolRouter, wrapper::Parameters},
19    model::{Implementation, ProtocolVersion, ServerCapabilities, ServerInfo},
20    service::{NotificationContext, RequestContext},
21    tool, tool_handler, tool_router,
22};
23use schemars::JsonSchema;
24use serde::{Deserialize, Serialize};
25use tracing::{debug, error, info};
26
27use crate::config::Config;
28use crate::s3_client::{
29    DetectedFileType, GetObjectOptions, ListObjectsOptions, S3Client, UploadFileOptions,
30};
31
32/// Request payload for listing objects in a bucket.
33#[derive(Serialize, Deserialize, JsonSchema)]
34pub struct ListObjectsRequest {
35    pub bucket_name: String,
36    #[serde(default)]
37    #[schemars(description = "Optional prefix to filter objects")]
38    pub prefix: Option<String>,
39}
40
41/// Request payload for uploading a file to S3.
42#[derive(Serialize, Deserialize, JsonSchema)]
43pub struct UploadFileRequest {
44    #[schemars(description = "Path to the local file to upload")]
45    pub local_file_path: String,
46    #[schemars(description = "Name of the S3 bucket to upload to")]
47    pub bucket_name: String,
48    #[schemars(description = "S3 object key (path/filename in the bucket)")]
49    pub object_key: String,
50    #[serde(default)]
51    #[schemars(description = "Optional content type (auto-detected if not specified)")]
52    pub content_type: Option<String>,
53    #[serde(default)]
54    #[schemars(description = "Optional storage class (STANDARD, REDUCED_REDUNDANCY, etc.)")]
55    pub storage_class: Option<String>,
56    #[serde(default)]
57    #[schemars(description = "Optional cache control header")]
58    pub cache_control: Option<String>,
59}
60
61/// Request payload for creating a bucket.
62#[derive(Serialize, Deserialize, JsonSchema)]
63pub struct CreateBucketReqeust {
64    #[schemars(description = "Name of the S3 bucket to create")]
65    pub bucket_name: String,
66}
67
68/// Request payload for deleting a bucket.
69#[derive(Serialize, Deserialize, JsonSchema)]
70pub struct DeleteBucketReqeust {
71    #[schemars(description = "Name of the S3 bucket to delete")]
72    pub bucket_name: String,
73}
74
75/// Request payload for reading/downloading an object.
76#[derive(Serialize, Deserialize, JsonSchema)]
77pub struct GetObjectRequest {
78    #[schemars(description = "Name of the S3 bucket")]
79    pub bucket_name: String,
80    #[schemars(description = "S3 object key (path/filename in the bucket)")]
81    pub object_key: String,
82    #[serde(default)]
83    #[schemars(description = "Optional version ID for versioned objects")]
84    pub version_id: Option<String>,
85    #[serde(default = "default_operation_mode")]
86    #[schemars(
87        description = "Operation mode: read (return content) or download (save to local file)"
88    )]
89    pub mode: GetObjectMode,
90    #[serde(default)]
91    #[schemars(description = "Local file path for download mode (required when mode is download)")]
92    pub local_path: Option<String>,
93    #[serde(default = "default_max_content_size")]
94    #[schemars(description = "Maximum content size to read in bytes for read mode (default: 1MB)")]
95    pub max_content_size: usize,
96}
97
98#[derive(Serialize, Deserialize, JsonSchema, Debug, Clone, PartialEq)]
99pub enum GetObjectMode {
100    /// Read object content directly and return it in response text.
101    #[serde(rename = "read")]
102    Read,
103    /// Download object content to a local file path.
104    #[serde(rename = "download")]
105    Download,
106}
107
108fn default_operation_mode() -> GetObjectMode {
109    GetObjectMode::Read
110}
111fn default_max_content_size() -> usize {
112    1024 * 1024
113}
114
115#[derive(Debug, Clone)]
116pub struct RustfsMcpServer {
117    s3_client: S3Client,
118    _config: Config,
119    tool_router: ToolRouter<Self>,
120}
121
122#[tool_router(router = tool_router)]
123impl RustfsMcpServer {
124    /// Build a server instance from validated configuration.
125    pub async fn new(config: Config) -> Result<Self> {
126        info!("Creating RustFS MCP Server");
127
128        let s3_client = S3Client::new(&config).await?;
129
130        Ok(Self {
131            s3_client,
132            _config: config,
133            tool_router: Self::tool_router(),
134        })
135    }
136
137    #[tool(description = "Create a new S3 bucket with the specified name")]
138    pub async fn create_bucket(&self, Parameters(req): Parameters<CreateBucketReqeust>) -> String {
139        info!(
140            "Executing create_bucket tool for bucket: {}",
141            req.bucket_name
142        );
143
144        match self.s3_client.create_bucket(&req.bucket_name).await {
145            Ok(_) => {
146                format!("Successfully created bucket: {}", req.bucket_name)
147            }
148            Err(e) => {
149                format!("Failed to create bucket '{}': {:?}", req.bucket_name, e)
150            }
151        }
152    }
153
154    #[tool(description = "Delete an existing S3 bucket with the specified name")]
155    pub async fn delete_bucket(&self, Parameters(req): Parameters<DeleteBucketReqeust>) -> String {
156        info!(
157            "Executing delete_bucket tool for bucket: {}",
158            req.bucket_name
159        );
160
161        // check if bucket is empty, if not, can not delete bucket directly.
162        let object_result = match self
163            .s3_client
164            .list_objects_v2(&req.bucket_name, ListObjectsOptions::default())
165            .await
166        {
167            Ok(result) => result,
168            Err(e) => {
169                error!(
170                    "Failed to list objects in bucket '{}': {:?}",
171                    req.bucket_name, e
172                );
173                return format!(
174                    "Failed to list objects in bucket '{}': {:?}",
175                    req.bucket_name, e
176                );
177            }
178        };
179
180        if !object_result.objects.is_empty() {
181            error!("Bucket '{}' is not empty", req.bucket_name);
182            return format!(
183                "Failed to delete bucket '{}': bucket is not empty",
184                req.bucket_name
185            );
186        }
187
188        // delete the bucket.
189        match self.s3_client.delete_bucket(&req.bucket_name).await {
190            Ok(_) => {
191                format!("Successfully deleted bucket: {}", req.bucket_name)
192            }
193            Err(e) => {
194                format!("Failed to delete bucket '{}': {:?}", req.bucket_name, e)
195            }
196        }
197    }
198
199    #[tool(description = "List all S3 buckets accessible with the configured credentials")]
200    pub async fn list_buckets(&self) -> String {
201        info!("Executing list_buckets tool");
202
203        match self.s3_client.list_buckets().await {
204            Ok(buckets) => {
205                debug!("Successfully retrieved {} buckets", buckets.len());
206
207                if buckets.is_empty() {
208                    return "No S3 buckets found. The AWS credentials may not have access to any buckets, or no buckets exist in this account.".to_string();
209                }
210
211                let mut result_text = format!("Found {} S3 bucket(s):\n\n", buckets.len());
212
213                for (index, bucket) in buckets.iter().enumerate() {
214                    result_text.push_str(&format!("{}. **{}**", index + 1, bucket.name));
215
216                    if let Some(ref creation_date) = bucket.creation_date {
217                        result_text.push_str(&format!("\n   - Created: {creation_date}"));
218                    }
219                    result_text.push_str("\n\n");
220                }
221
222                result_text.push_str("---\n");
223                result_text.push_str(&format!("Total buckets: {}\n", buckets.len()));
224                result_text.push_str(
225                    "Note: Only buckets accessible with the current AWS credentials are shown.",
226                );
227
228                info!("list_buckets tool executed successfully");
229                result_text
230            }
231            Err(e) => {
232                error!("Failed to list buckets: {:?}", e);
233
234                format!(
235                    "Failed to list S3 buckets: {e}\n\nPossible causes:\n\
236                     • AWS credentials are not set or invalid\n\
237                     • Network connectivity issues\n\
238                     • AWS region is not set correctly\n\
239                     • Insufficient permissions to list buckets\n\
240                     • Custom endpoint is misconfigured\n\n\
241                     Please verify your AWS configuration and try again."
242                )
243            }
244        }
245    }
246
247    #[tool(description = "List objects in a specific S3 bucket with optional prefix filtering")]
248    pub async fn list_objects(&self, Parameters(req): Parameters<ListObjectsRequest>) -> String {
249        info!(
250            "Executing list_objects tool for bucket: {}",
251            req.bucket_name
252        );
253
254        let options = ListObjectsOptions {
255            prefix: req.prefix.clone(),
256            delimiter: None,
257            max_keys: Some(1000),
258            ..ListObjectsOptions::default()
259        };
260
261        match self
262            .s3_client
263            .list_objects_v2(&req.bucket_name, options)
264            .await
265        {
266            Ok(result) => {
267                debug!(
268                    "Successfully retrieved {} objects and {} common prefixes from bucket '{}'",
269                    result.objects.len(),
270                    result.common_prefixes.len(),
271                    req.bucket_name
272                );
273
274                if result.objects.is_empty() && result.common_prefixes.is_empty() {
275                    let prefix_msg = req
276                        .prefix
277                        .as_ref()
278                        .map(|p| format!(" with prefix '{p}'"))
279                        .unwrap_or_default();
280                    return format!(
281                        "No objects found in bucket '{}'{prefix_msg}. The bucket may be empty or the prefix may not match any objects.",
282                        req.bucket_name
283                    );
284                }
285
286                let mut result_text = format!(
287                    "Found {} object(s) in bucket **{}**",
288                    result.key_count, req.bucket_name
289                );
290
291                if let Some(ref p) = req.prefix {
292                    result_text.push_str(&format!(" with prefix '{p}'"));
293                }
294                result_text.push_str(":\n\n");
295
296                if !result.common_prefixes.is_empty() {
297                    result_text.push_str("**Directories:**\n");
298                    for (index, prefix) in result.common_prefixes.iter().enumerate() {
299                        result_text.push_str(&format!("{}. 📁 {prefix}\n", index + 1));
300                    }
301                    result_text.push('\n');
302                }
303
304                if !result.objects.is_empty() {
305                    result_text.push_str("**Objects:**\n");
306                    for (index, obj) in result.objects.iter().enumerate() {
307                        result_text.push_str(&format!("{}. **{}**\n", index + 1, obj.key));
308
309                        if let Some(size) = obj.size {
310                            result_text.push_str(&format!("   - Size: {size} bytes\n"));
311                        }
312
313                        if let Some(ref last_modified) = obj.last_modified {
314                            result_text.push_str(&format!("   - Last Modified: {last_modified}\n"));
315                        }
316
317                        if let Some(ref etag) = obj.etag {
318                            result_text.push_str(&format!("   - ETag: {etag}\n"));
319                        }
320
321                        if let Some(ref storage_class) = obj.storage_class {
322                            result_text.push_str(&format!("   - Storage Class: {storage_class}\n"));
323                        }
324
325                        result_text.push('\n');
326                    }
327                }
328
329                if result.is_truncated {
330                    result_text.push_str("**Note:** Results are truncated. ");
331                    if let Some(ref token) = result.next_continuation_token {
332                        result_text.push_str(&format!(
333                            "Use continuation token '{token}' to get more results.\n"
334                        ));
335                    }
336                    result_text.push('\n');
337                }
338
339                result_text.push_str("---\n");
340                result_text.push_str(&format!(
341                    "Total: {} object(s), {} directory/ies",
342                    result.objects.len(),
343                    result.common_prefixes.len()
344                ));
345
346                if let Some(max_keys) = result.max_keys {
347                    result_text.push_str(&format!(", Max keys: {max_keys}"));
348                }
349
350                info!(
351                    "list_objects tool executed successfully for bucket '{}'",
352                    req.bucket_name
353                );
354                result_text
355            }
356            Err(e) => {
357                error!(
358                    "Failed to list objects in bucket '{}': {:?}",
359                    req.bucket_name, e
360                );
361
362                format!(
363                    "Failed to list objects in S3 bucket '{}': {}\n\nPossible causes:\n\
364                     • Bucket does not exist or is not accessible\n\
365                     • AWS credentials lack permissions to list objects in this bucket\n\
366                     • Network connectivity issues\n\
367                     • Custom endpoint is misconfigured\n\
368                     • Bucket name contains invalid characters\n\n\
369                     Please verify the bucket name, your AWS configuration, and permissions.",
370                    req.bucket_name, e
371                )
372            }
373        }
374    }
375
376    #[tool(
377        description = "Get/download an object from an S3 bucket - supports read mode for text files and download mode for all files"
378    )]
379    pub async fn get_object(&self, Parameters(req): Parameters<GetObjectRequest>) -> String {
380        info!(
381            "Executing get_object tool: s3://{}/{} (mode: {:?})",
382            req.bucket_name, req.object_key, req.mode
383        );
384
385        match req.mode {
386            GetObjectMode::Read => self.handle_read_mode(req).await,
387            GetObjectMode::Download => self.handle_download_mode(req).await,
388        }
389    }
390
391    async fn handle_read_mode(&self, req: GetObjectRequest) -> String {
392        let options = GetObjectOptions {
393            version_id: req.version_id.clone(),
394            max_content_size: Some(req.max_content_size),
395            ..GetObjectOptions::default()
396        };
397
398        match self
399            .s3_client
400            .get_object(&req.bucket_name, &req.object_key, options)
401            .await
402        {
403            Ok(result) => {
404                debug!(
405                    "Successfully retrieved object s3://{}/{} ({} bytes)",
406                    req.bucket_name, req.object_key, result.content_length
407                );
408
409                match result.detected_type {
410                    DetectedFileType::Text => {
411                        if let Some(ref text_content) = result.text_content {
412                            format!(
413                                "✅ **Text file content retrieved!**\n\n\
414                                 **S3 Location:** s3://{}/{}\n\
415                                 **File Size:** {} bytes\n\
416                                 **Content Type:** {}\n\n\
417                                 **Content:**\n```\n{}\n```",
418                                result.bucket,
419                                result.key,
420                                result.content_length,
421                                result.content_type,
422                                text_content
423                            )
424                        } else {
425                            format!(
426                                "⚠️ **Text file detected but content could not be decoded!**\n\n\
427                                 **S3 Location:** s3://{}/{}\n\
428                                 **File Size:** {} bytes\n\
429                                 **Content Type:** {}\n\n\
430                                 **Note:** Could not decode file as UTF-8 text. \
431                                 Try using download mode instead.",
432                                result.bucket,
433                                result.key,
434                                result.content_length,
435                                result.content_type
436                            )
437                        }
438                    }
439                    DetectedFileType::NonText(ref mime_type) => {
440                        let file_category = if mime_type.starts_with("image/") {
441                            "Image"
442                        } else if mime_type.starts_with("audio/") {
443                            "Audio"
444                        } else if mime_type.starts_with("video/") {
445                            "Video"
446                        } else {
447                            "Binary"
448                        };
449
450                        format!(
451                            "⚠️ **Non-text file detected!**\n\n\
452                             **S3 Location:** s3://{}/{}\n\
453                             **File Type:** {} ({})\n\
454                             **File Size:** {} bytes ({:.2} MB)\n\n\
455                             **Note:** This file type cannot be displayed as text.\n\
456                             Please use download mode to save it to a local file:\n\n\
457                             ```json\n{{\n  \"mode\": \"download\",\n  \"local_path\": \"/path/to/save/file\"\n}}\n```",
458                            result.bucket,
459                            result.key,
460                            file_category,
461                            mime_type,
462                            result.content_length,
463                            result.content_length as f64 / 1_048_576.0
464                        )
465                    }
466                }
467            }
468            Err(e) => {
469                error!(
470                    "Failed to read object s3://{}/{}: {:?}",
471                    req.bucket_name, req.object_key, e
472                );
473                self.format_error_message(&req, e)
474            }
475        }
476    }
477
478    async fn handle_download_mode(&self, req: GetObjectRequest) -> String {
479        let local_path = match req.local_path {
480            Some(ref path) => path,
481            None => {
482                return "❌ **Error:** local_path is required when using download mode.\n\n\
483                        **Example:**\n```json\n{\n  \"mode\": \"download\",\n  \"local_path\": \"/path/to/save/file.ext\"\n}\n```"
484                    .to_string();
485            }
486        };
487
488        let options = GetObjectOptions {
489            version_id: req.version_id.clone(),
490            ..GetObjectOptions::default()
491        };
492
493        match self
494            .s3_client
495            .download_object_to_file(&req.bucket_name, &req.object_key, local_path, options)
496            .await
497        {
498            Ok((bytes_downloaded, absolute_path)) => {
499                info!(
500                    "Successfully downloaded object s3://{}/{} to {} ({} bytes)",
501                    req.bucket_name, req.object_key, absolute_path, bytes_downloaded
502                );
503
504                format!(
505                    "✅ **File downloaded successfully!**\n\n\
506                     **S3 Location:** s3://{}/{}\n\
507                     **Local Path (requested):** {}\n\
508                     **Absolute Path:** {}\n\
509                     **File Size:** {} bytes ({:.2} MB)\n\n\
510                     **✨ File saved successfully!** You can now access it at:\n\
511                     `{}`",
512                    req.bucket_name,
513                    req.object_key,
514                    local_path,
515                    absolute_path,
516                    bytes_downloaded,
517                    bytes_downloaded as f64 / 1_048_576.0,
518                    absolute_path
519                )
520            }
521            Err(e) => {
522                error!(
523                    "Failed to download object s3://{}/{} to {}: {:?}",
524                    req.bucket_name, req.object_key, local_path, e
525                );
526
527                format!(
528                    "❌ **Failed to download file from S3**\n\n\
529                     **S3 Location:** s3://{}/{}\n\
530                     **Local Path:** {}\n\
531                     **Error:** {}\n\n\
532                     **Possible causes:**\n\
533                     • Object does not exist in the specified bucket\n\
534                     • AWS credentials lack permissions to read this object\n\
535                     • Cannot write to the specified local path\n\
536                     • Insufficient disk space\n\
537                     • Network connectivity issues\n\n\
538                     **Troubleshooting steps:**\n\
539                     1. Verify the object exists using list_objects\n\
540                     2. Check your AWS credentials and permissions\n\
541                     3. Ensure the local directory exists and is writable\n\
542                     4. Check available disk space",
543                    req.bucket_name, req.object_key, local_path, e
544                )
545            }
546        }
547    }
548
549    fn format_error_message(&self, req: &GetObjectRequest, error: anyhow::Error) -> String {
550        format!(
551            "❌ **Failed to get object from S3 bucket '{}'**\n\n\
552             **Object Key:** {}\n\
553             **Mode:** {:?}\n\
554             **Error:** {}\n\n\
555             **Possible causes:**\n\
556             • Object does not exist in the specified bucket\n\
557             • AWS credentials lack permissions to read this object\n\
558             • Network connectivity issues\n\
559             • Object key contains invalid characters\n\
560             • Bucket does not exist or is not accessible\n\
561             • Object is in a different AWS region\n\
562             • Version ID is invalid (for versioned objects)\n\n\
563             **Troubleshooting steps:**\n\
564             1. Verify the object exists using list_objects\n\
565             2. Check your AWS credentials and permissions\n\
566             3. Ensure the bucket name and object key are correct\n\
567             4. Try with a different object to test connectivity\n\
568             5. Check if the bucket has versioning enabled",
569            req.bucket_name, req.object_key, req.mode, error
570        )
571    }
572
573    #[tool(description = "Upload a local file to an S3 bucket")]
574    pub async fn upload_file(&self, Parameters(req): Parameters<UploadFileRequest>) -> String {
575        info!(
576            "Executing upload_file tool: '{}' -> s3://{}/{}",
577            req.local_file_path, req.bucket_name, req.object_key
578        );
579
580        let options = UploadFileOptions {
581            content_type: req.content_type.clone(),
582            storage_class: req.storage_class.clone(),
583            cache_control: req.cache_control.clone(),
584            ..UploadFileOptions::default()
585        };
586
587        match self
588            .s3_client
589            .upload_file(
590                &req.local_file_path,
591                &req.bucket_name,
592                &req.object_key,
593                options,
594            )
595            .await
596        {
597            Ok(result) => {
598                debug!(
599                    "Successfully uploaded file '{}' to s3://{}/{} ({} bytes)",
600                    req.local_file_path, req.bucket_name, req.object_key, result.file_size
601                );
602
603                let mut result_text = format!(
604                    "✅ **File uploaded successfully!**\n\n\
605                     **Local File:** {}\n\
606                     **S3 Location:** s3://{}/{}\n\
607                     **File Size:** {} bytes ({:.2} MB)\n\
608                     **Content Type:** {}\n\
609                     **ETag:** {}\n",
610                    req.local_file_path,
611                    result.bucket,
612                    result.key,
613                    result.file_size,
614                    result.file_size as f64 / 1_048_576.0,
615                    result.content_type,
616                    result.etag
617                );
618
619                if let Some(ref version_id) = result.version_id {
620                    result_text.push_str(&format!("**Version ID:** {version_id}\n"));
621                }
622
623                result_text.push_str("\n---\n");
624                result_text.push_str("**Upload Summary:**\n");
625                result_text.push_str(&format!("• Source: {}\n", req.local_file_path));
626                result_text.push_str(&format!("• Destination: {}\n", result.location));
627                result_text.push_str(&format!("• Size: {} bytes\n", result.file_size));
628                result_text.push_str(&format!("• Type: {}\n", result.content_type));
629
630                if result.file_size > 5 * 1024 * 1024 {
631                    result_text.push_str("\n💡 **Note:** Large file uploaded successfully. Consider using multipart upload for files larger than 100MB for better performance and reliability.");
632                }
633
634                info!(
635                    "upload_file tool executed successfully: {} bytes uploaded to s3://{}/{}",
636                    result.file_size, req.bucket_name, req.object_key
637                );
638                result_text
639            }
640            Err(e) => {
641                error!(
642                    "Failed to upload file '{}' to s3://{}/{}: {:?}",
643                    req.local_file_path, req.bucket_name, req.object_key, e
644                );
645
646                format!(
647                    "❌ **Failed to upload file '{}' to S3 bucket '{}'**\n\n\
648                     **Error:** {}\n\n\
649                     **Possible causes:**\n\
650                     • Local file does not exist or is not readable\n\
651                     • AWS credentials lack permissions to upload to this bucket\n\
652                     • S3 bucket does not exist or is not accessible\n\
653                     • Network connectivity issues\n\
654                     • File path contains invalid characters or is too long\n\
655                     • Insufficient disk space or memory\n\
656                     • Custom endpoint is misconfigured\n\
657                     • File is locked by another process\n\n\
658                     **Troubleshooting steps:**\n\
659                     1. Verify the local file exists and is readable\n\
660                     2. Check your AWS credentials and permissions\n\
661                     3. Ensure the bucket name is correct and accessible\n\
662                     4. Try with a smaller file to test connectivity\n\
663                     5. Check the file path for special characters\n\n\
664                     **File:** {}\n\
665                     **Bucket:** {}\n\
666                     **Object Key:** {}",
667                    req.local_file_path,
668                    req.bucket_name,
669                    e,
670                    req.local_file_path,
671                    req.bucket_name,
672                    req.object_key
673                )
674            }
675        }
676    }
677}
678
679#[tool_handler(router = self.tool_router)]
680impl ServerHandler for RustfsMcpServer {
681    fn get_info(&self) -> ServerInfo {
682        ServerInfo::new(ServerCapabilities::builder().enable_tools().build())
683            .with_instructions(
684                "RustFS MCP Server providing S3 operations through Model Context Protocol",
685            )
686            .with_server_info(Implementation::new(
687                "rustfs-mcp-server",
688                env!("CARGO_PKG_VERSION"),
689            ))
690            .with_protocol_version(ProtocolVersion::LATEST)
691    }
692
693    async fn ping(&self, _ctx: RequestContext<RoleServer>) -> Result<(), ErrorData> {
694        info!("Received ping request");
695        Ok(())
696    }
697
698    async fn on_initialized(&self, _ctx: NotificationContext<RoleServer>) {
699        info!("Client initialized successfully");
700    }
701}
702
703#[cfg(test)]
704mod tests {
705    use super::*;
706
707    #[tokio::test]
708    async fn test_server_creation() {
709        let config = Config {
710            access_key_id: Some("test_key".to_string()),
711            secret_access_key: Some("test_secret".to_string()),
712            endpoint_url: Some("http://127.0.0.1:9000".to_string()),
713            force_path_style: true,
714            ..Config::default()
715        };
716
717        let result = RustfsMcpServer::new(config).await;
718        assert!(result.is_err() || result.is_ok());
719    }
720
721    #[test]
722    fn test_get_object_request_defaults() {
723        let request = GetObjectRequest {
724            bucket_name: "test-bucket".to_string(),
725            object_key: "test-key".to_string(),
726            version_id: None,
727            mode: default_operation_mode(),
728            local_path: None,
729            max_content_size: default_max_content_size(),
730        };
731
732        assert_eq!(request.bucket_name, "test-bucket");
733        assert_eq!(request.object_key, "test-key");
734        assert!(request.version_id.is_none());
735        assert_eq!(request.mode, GetObjectMode::Read);
736        assert!(request.local_path.is_none());
737        assert_eq!(request.max_content_size, 1024 * 1024);
738    }
739
740    #[test]
741    fn test_get_object_request_serialization() {
742        let request = GetObjectRequest {
743            bucket_name: "test-bucket".to_string(),
744            object_key: "test-key".to_string(),
745            version_id: Some("version123".to_string()),
746            mode: GetObjectMode::Download,
747            local_path: Some("/path/to/file".to_string()),
748            max_content_size: 2048,
749        };
750
751        let json = serde_json::to_string(&request).unwrap();
752        let deserialized: GetObjectRequest = serde_json::from_str(&json).unwrap();
753
754        assert_eq!(request.bucket_name, deserialized.bucket_name);
755        assert_eq!(request.object_key, deserialized.object_key);
756        assert_eq!(request.version_id, deserialized.version_id);
757        assert_eq!(request.mode, deserialized.mode);
758        assert_eq!(request.local_path, deserialized.local_path);
759        assert_eq!(request.max_content_size, deserialized.max_content_size);
760    }
761
762    #[test]
763    fn test_get_object_request_serde_with_defaults() {
764        let json = r#"{
765            "bucket_name": "test-bucket",
766            "object_key": "test-key"
767        }"#;
768
769        let request: GetObjectRequest = serde_json::from_str(json).unwrap();
770        assert_eq!(request.bucket_name, "test-bucket");
771        assert_eq!(request.object_key, "test-key");
772        assert!(request.version_id.is_none());
773        assert_eq!(request.mode, GetObjectMode::Read);
774        assert!(request.local_path.is_none());
775        assert_eq!(request.max_content_size, 1024 * 1024);
776    }
777
778    #[test]
779    fn test_default_functions() {
780        assert_eq!(default_operation_mode(), GetObjectMode::Read);
781        assert_eq!(default_max_content_size(), 1024 * 1024);
782    }
783
784    #[test]
785    fn test_get_object_mode_serialization() {
786        let read_mode = GetObjectMode::Read;
787        let download_mode = GetObjectMode::Download;
788
789        let read_json = serde_json::to_string(&read_mode).unwrap();
790        let download_json = serde_json::to_string(&download_mode).unwrap();
791
792        assert_eq!(read_json, r#""read""#);
793        assert_eq!(download_json, r#""download""#);
794
795        let read_mode_deser: GetObjectMode = serde_json::from_str(r#""read""#).unwrap();
796        let download_mode_deser: GetObjectMode = serde_json::from_str(r#""download""#).unwrap();
797
798        assert_eq!(read_mode_deser, GetObjectMode::Read);
799        assert_eq!(download_mode_deser, GetObjectMode::Download);
800    }
801
802    #[test]
803    fn test_bucket_creation() {
804        let request = CreateBucketReqeust {
805            bucket_name: "test-bucket".to_string(),
806        };
807        assert_eq!(request.bucket_name, "test-bucket");
808    }
809
810    #[test]
811    fn test_bucket_deletion() {
812        let request = DeleteBucketReqeust {
813            bucket_name: "test-bucket".to_string(),
814        };
815        assert_eq!(request.bucket_name, "test-bucket");
816    }
817}