Skip to main content

composio_sdk/models/
files.rs

1//! File handling for Composio SDK
2//!
3//! This module provides functionality for uploading and downloading files
4//! to/from S3, as well as processing tool schemas that involve file parameters.
5//!
6//! # Features
7//!
8//! - Upload files from local paths or public URLs
9//! - Download files from S3 to local directory
10//! - Transform tool schemas for file upload/download
11//! - MD5 hash calculation for file integrity
12//! - Security features: size limits, timeout handling, redirect protection
13//!
14//! # Examples
15//!
16//! ```rust,no_run
17//! use composio_sdk::models::files::{FileUploadable, FileHelper};
18//! use composio_sdk::client::ComposioClient;
19//! use std::path::Path;
20//!
21//! # async fn example() -> Result<(), Box<dyn std::error::Error>> {
22//! let client = ComposioClient::builder()
23//!     .api_key("your-api-key")
24//!     .build()?;
25//!
26//! // Upload a local file
27//! let uploadable = FileUploadable::from_path(
28//!     &client,
29//!     Path::new("document.pdf"),
30//!     "GMAIL_SEND_EMAIL",
31//!     "gmail"
32//! ).await?;
33//!
34//! println!("Uploaded to S3: {}", uploadable.s3key);
35//! # Ok(())
36//! # }
37//! ```
38
39use serde::{Deserialize, Serialize};
40use serde_json::Value as JsonValue;
41use std::collections::HashMap;
42use std::path::{Path, PathBuf};
43
44// ============================================================================
45// Constants
46// ============================================================================
47
48/// Default chunk size for file operations (1 MB)
49const DEFAULT_CHUNK_SIZE: usize = 1024 * 1024;
50
51/// Maximum filename length to prevent issues with long URLs
52const MAX_FILENAME_LENGTH: usize = 100;
53
54/// Maximum response size when fetching files from URLs (100 MB)
55const MAX_RESPONSE_SIZE: usize = 100 * 1024 * 1024;
56
57/// Connection timeout in seconds
58const CONNECT_TIMEOUT_SECS: u64 = 5;
59
60/// Read timeout in seconds
61const READ_TIMEOUT_SECS: u64 = 60;
62
63/// Local cache directory name
64const LOCAL_CACHE_DIRECTORY_NAME: &str = ".composio";
65
66/// Environment variable for cache directory
67const ENV_LOCAL_CACHE_DIRECTORY: &str = "COMPOSIO_CACHE_DIR";
68
69/// Local output file directory name
70const LOCAL_OUTPUT_FILE_DIRECTORY: &str = "outputs";
71
72// ============================================================================
73// Helper Functions
74// ============================================================================
75
76/// Get the local cache directory path
77pub fn get_local_cache_directory() -> PathBuf {
78    if let Ok(cache_dir) = std::env::var(ENV_LOCAL_CACHE_DIRECTORY) {
79        PathBuf::from(cache_dir)
80    } else {
81        dirs::home_dir()
82            .unwrap_or_else(|| PathBuf::from("."))
83            .join(LOCAL_CACHE_DIRECTORY_NAME)
84    }
85}
86
87/// Get the local output file directory path
88pub fn get_local_output_directory() -> PathBuf {
89    get_local_cache_directory().join(LOCAL_OUTPUT_FILE_DIRECTORY)
90}
91
92/// Calculate MD5 hash of a file
93///
94/// Note: MD5 is used for file integrity checking and deduplication,
95/// not for cryptographic security. The Composio API requires MD5 hashes.
96pub fn calculate_md5(file_path: &Path) -> Result<String, std::io::Error> {
97    use std::fs::File;
98    use std::io::Read;
99
100    let mut file = File::open(file_path)?;
101    let mut hasher = md5::Context::new();
102    let mut buffer = vec![0; DEFAULT_CHUNK_SIZE];
103
104    loop {
105        let bytes_read = file.read(&mut buffer)?;
106        if bytes_read == 0 {
107            break;
108        }
109        hasher.consume(&buffer[..bytes_read]);
110    }
111
112    Ok(format!("{:x}", hasher.compute()))
113}
114
115/// Check if a string is a valid HTTP/HTTPS URL
116pub fn is_url(value: &str) -> bool {
117    value.starts_with("http://") || value.starts_with("https://")
118}
119
120// ============================================================================
121// Data Structures
122// ============================================================================
123
124/// Query parameters for listing files.
125#[derive(Debug, Clone, Default, Serialize, Deserialize)]
126pub struct FileListParams {
127    /// Cursor token for pagination.
128    #[serde(skip_serializing_if = "Option::is_none")]
129    pub cursor: Option<String>,
130    /// Page size limit.
131    #[serde(skip_serializing_if = "Option::is_none")]
132    pub limit: Option<u32>,
133    /// Optional tool slug filter.
134    #[serde(skip_serializing_if = "Option::is_none")]
135    pub tool_slug: Option<String>,
136    /// Optional toolkit slug filter.
137    #[serde(skip_serializing_if = "Option::is_none")]
138    pub toolkit_slug: Option<String>,
139}
140
141/// A file list item returned by the API.
142#[derive(Debug, Clone, Serialize, Deserialize)]
143pub struct FileListItem {
144    /// File name.
145    pub filename: String,
146    /// MD5 checksum.
147    pub md5: String,
148    /// MIME type.
149    pub mimetype: String,
150    /// Tool slug.
151    pub tool_slug: String,
152    /// Toolkit slug.
153    pub toolkit_slug: String,
154}
155
156/// Paginated file listing response.
157#[derive(Debug, Clone, Serialize, Deserialize)]
158pub struct FileListResponse {
159    /// Current page number.
160    pub current_page: u32,
161    /// Number of total items.
162    pub total_items: u32,
163    /// Number of total pages.
164    pub total_pages: u32,
165    /// Cursor for next page, if present.
166    #[serde(skip_serializing_if = "Option::is_none")]
167    pub next_cursor: Option<String>,
168    /// Returned file items.
169    pub items: Vec<FileListItem>,
170}
171
172/// Request payload for creating a file upload request.
173#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct FileCreatePresignedUrlParams {
175    /// File name.
176    pub filename: String,
177    /// MD5 checksum.
178    pub md5: String,
179    /// MIME type.
180    pub mimetype: String,
181    /// Tool slug.
182    pub tool_slug: String,
183    /// Toolkit slug.
184    pub toolkit_slug: String,
185}
186
187/// Storage backend used for uploaded file metadata.
188#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
189#[serde(rename_all = "snake_case")]
190pub enum FileStorageBackend {
191    /// AWS S3 backend.
192    S3,
193    /// Azure Blob Storage backend.
194    AzureBlobStorage,
195}
196
197/// Metadata included in presigned upload response.
198#[derive(Debug, Clone, Serialize, Deserialize)]
199pub struct FileCreatePresignedUrlMetadata {
200    /// Backing storage provider.
201    pub storage_backend: FileStorageBackend,
202}
203
204/// Response from file upload request
205#[derive(Debug, Clone, Serialize, Deserialize)]
206pub struct FileUploadResponse {
207    /// File ID
208    pub id: String,
209    /// S3 key
210    pub key: String,
211    /// File type
212    #[serde(rename = "type")]
213    pub file_type: String,
214    /// Presigned URL for upload
215    #[serde(alias = "newPresignedUrl")]
216    pub new_presigned_url: String,
217    /// Additional metadata about the generated upload URL.
218    #[serde(skip_serializing_if = "Option::is_none")]
219    pub metadata: Option<FileCreatePresignedUrlMetadata>,
220}
221
222/// Alias for endpoint parity naming.
223pub type FileCreatePresignedUrlResponse = FileUploadResponse;
224
225/// File that can be uploaded to S3
226#[derive(Debug, Clone, Serialize, Deserialize)]
227pub struct FileUploadable {
228    /// Filename
229    pub name: String,
230    /// MIME type
231    pub mimetype: String,
232    /// S3 key after upload
233    pub s3key: String,
234}
235
236/// File that can be downloaded from S3
237#[derive(Debug, Clone, Serialize, Deserialize)]
238pub struct FileDownloadable {
239    /// Filename
240    pub name: String,
241    /// MIME type
242    pub mimetype: String,
243    /// S3 URL for download
244    pub s3url: String,
245}
246
247impl FileUploadable {
248    /// Create a FileUploadable from a local file path or public URL
249    ///
250    /// If the file parameter is a URL (starts with http:// or https://),
251    /// it will fetch the file content from the URL and upload it to S3.
252    /// Otherwise, it treats it as a local file path.
253    ///
254    /// # Arguments
255    ///
256    /// * `client` - The HTTP client for API calls
257    /// * `file` - Local file path or public URL
258    /// * `tool` - The tool slug
259    /// * `toolkit` - The toolkit slug
260    ///
261    /// # Returns
262    ///
263    /// FileUploadable instance with S3 key
264    ///
265    /// # Errors
266    ///
267    /// Returns error if file doesn't exist, is not readable, or upload fails
268    pub async fn from_path(
269        client: &crate::client::ComposioClient,
270        file: &Path,
271        tool: &str,
272        toolkit: &str,
273    ) -> Result<Self, crate::error::ComposioError> {
274        // Check if it's a URL
275        if let Some(file_str) = file.to_str() {
276            if is_url(file_str) {
277                return Self::from_url(client, file_str, tool, toolkit).await;
278            }
279        }
280
281        // Handle as local file path
282        if !file.exists() {
283            return Err(crate::error::ComposioError::FileNotFound(
284                file.display().to_string(),
285            ));
286        }
287
288        if !file.is_file() {
289            return Err(crate::error::ComposioError::InvalidFile(format!(
290                "Not a file: {}",
291                file.display()
292            )));
293        }
294
295        let filename = file.file_name().and_then(|n| n.to_str()).ok_or_else(|| {
296            crate::error::ComposioError::InvalidFile("Invalid filename".to_string())
297        })?;
298
299        let mimetype = crate::utils::mimetypes::guess_mime_type(file);
300
301        let md5_hash = calculate_md5(file)?;
302
303        // Request presigned URL from API
304        let upload_response =
305            Self::request_upload_url(client, &md5_hash, filename, &mimetype, tool, toolkit).await?;
306
307        // Upload file to S3
308        Self::upload_to_s3(&upload_response.new_presigned_url, file).await?;
309
310        Ok(Self {
311            name: filename.to_string(),
312            mimetype,
313            s3key: upload_response.key,
314        })
315    }
316
317    /// Create a FileUploadable from a public URL
318    pub async fn from_url(
319        client: &crate::client::ComposioClient,
320        url: &str,
321        tool: &str,
322        toolkit: &str,
323    ) -> Result<Self, crate::error::ComposioError> {
324        // Fetch file from URL
325        let (filename, content, mimetype) = Self::fetch_from_url(url).await?;
326
327        // Upload bytes to S3
328        let s3key =
329            Self::upload_bytes_to_s3(client, &filename, &content, &mimetype, tool, toolkit).await?;
330
331        Ok(Self {
332            name: filename,
333            mimetype,
334            s3key,
335        })
336    }
337
338    /// Request presigned URL from Composio API
339    async fn request_upload_url(
340        client: &crate::client::ComposioClient,
341        md5: &str,
342        filename: &str,
343        mimetype: &str,
344        tool: &str,
345        toolkit: &str,
346    ) -> Result<FileUploadResponse, crate::error::ComposioError> {
347        let params = FileCreatePresignedUrlParams {
348            filename: filename.to_string(),
349            md5: md5.to_string(),
350            mimetype: mimetype.to_string(),
351            tool_slug: tool.to_string(),
352            toolkit_slug: toolkit.to_string(),
353        };
354
355        client.create_file_upload_request(params).await
356    }
357
358    /// Upload file to S3 using presigned URL
359    async fn upload_to_s3(url: &str, file: &Path) -> Result<(), crate::error::ComposioError> {
360        let file_content = tokio::fs::read(file).await?;
361
362        let response = reqwest::Client::new()
363            .put(url)
364            .body(file_content)
365            .send()
366            .await?;
367
368        if response.status() != 200 {
369            return Err(crate::error::ComposioError::UploadFailed(format!(
370                "S3 upload failed with status: {}",
371                response.status()
372            )));
373        }
374
375        Ok(())
376    }
377
378    /// Fetch file from URL with security protections
379    async fn fetch_from_url(
380        url: &str,
381    ) -> Result<(String, Vec<u8>, String), crate::error::ComposioError> {
382        use reqwest::redirect::Policy;
383
384        let client = reqwest::Client::builder()
385            .redirect(Policy::none()) // Disable redirects for security
386            .connect_timeout(std::time::Duration::from_secs(CONNECT_TIMEOUT_SECS))
387            .timeout(std::time::Duration::from_secs(READ_TIMEOUT_SECS))
388            .build()?;
389
390        let response = client.get(url).send().await?;
391
392        // Reject redirects
393        if response.status().is_redirection() {
394            return Err(crate::error::ComposioError::UploadFailed(
395                "URL returned redirect. Please provide a direct URL to the file.".to_string(),
396            ));
397        }
398
399        if !response.status().is_success() {
400            return Err(crate::error::ComposioError::UploadFailed(format!(
401                "Failed to fetch file from URL. Status: {}",
402                response.status()
403            )));
404        }
405
406        // Check Content-Length header
407        if let Some(content_length) = response.content_length() {
408            if content_length as usize > MAX_RESPONSE_SIZE {
409                return Err(crate::error::ComposioError::FileTooLarge(format!(
410                    "File size ({} bytes) exceeds maximum ({} bytes)",
411                    content_length, MAX_RESPONSE_SIZE
412                )));
413            }
414        }
415
416        // Extract mimetype before consuming response
417        let mimetype = response
418            .headers()
419            .get("content-type")
420            .and_then(|v| v.to_str().ok())
421            .map(|ct| crate::utils::mimetypes::extract_from_content_type(ct))
422            .unwrap_or_else(|| crate::utils::mimetypes::DEFAULT_MIME_TYPE.to_string());
423
424        // Download with size tracking
425        let bytes = response.bytes().await?;
426        if bytes.len() > MAX_RESPONSE_SIZE {
427            return Err(crate::error::ComposioError::FileTooLarge(format!(
428                "Response size exceeds maximum ({} bytes)",
429                MAX_RESPONSE_SIZE
430            )));
431        }
432
433        // Extract filename from URL
434        let filename = Self::extract_filename_from_url(url, &mimetype);
435
436        Ok((filename, bytes.to_vec(), mimetype))
437    }
438
439    /// Extract filename from URL or generate one
440    fn extract_filename_from_url(url: &str, mimetype: &str) -> String {
441        use url::Url;
442
443        if let Ok(parsed) = Url::parse(url) {
444            if let Some(segments) = parsed.path_segments() {
445                if let Some(last) = segments.last() {
446                    let decoded = urlencoding::decode(last).unwrap_or_default();
447                    if !decoded.is_empty() {
448                        return Self::truncate_filename(&decoded);
449                    }
450                }
451            }
452        }
453
454        // Generate timestamped filename
455        Self::generate_timestamped_filename(mimetype)
456    }
457
458    /// Truncate filename if too long
459    fn truncate_filename(filename: &str) -> String {
460        if filename.len() <= MAX_FILENAME_LENGTH {
461            return filename.to_string();
462        }
463
464        // Extract extension
465        let extension = if let Some(pos) = filename.rfind('.') {
466            &filename[pos..]
467        } else {
468            ""
469        };
470
471        Self::generate_timestamped_filename(extension)
472    }
473
474    /// Generate a unique filename with timestamp
475    fn generate_timestamped_filename(extension: &str) -> String {
476        use chrono::Utc;
477        use uuid::Uuid;
478
479        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
480        let unique_id = &Uuid::new_v4().to_string()[..8];
481
482        format!("file_{}_{}{}", timestamp, unique_id, extension)
483    }
484
485    /// Upload bytes to S3
486    async fn upload_bytes_to_s3(
487        client: &crate::client::ComposioClient,
488        filename: &str,
489        content: &[u8],
490        mimetype: &str,
491        tool: &str,
492        toolkit: &str,
493    ) -> Result<String, crate::error::ComposioError> {
494        let md5_hash = format!("{:x}", md5::compute(content));
495
496        let upload_response =
497            Self::request_upload_url(client, &md5_hash, filename, mimetype, tool, toolkit).await?;
498
499        // Upload to S3
500        let response = reqwest::Client::new()
501            .put(&upload_response.new_presigned_url)
502            .header("Content-Type", mimetype)
503            .body(content.to_vec())
504            .send()
505            .await?;
506
507        if response.status() != 200 {
508            return Err(crate::error::ComposioError::UploadFailed(format!(
509                "S3 upload failed with status: {}",
510                response.status()
511            )));
512        }
513
514        Ok(upload_response.key)
515    }
516}
517
518impl FileDownloadable {
519    /// Download file from S3 to local directory
520    ///
521    /// # Arguments
522    ///
523    /// * `outdir` - Output directory path
524    ///
525    /// # Returns
526    ///
527    /// Path to the downloaded file
528    pub async fn download(&self, outdir: &Path) -> Result<PathBuf, crate::error::ComposioError> {
529        tokio::fs::create_dir_all(outdir).await?;
530
531        let outfile = outdir.join(&self.name);
532
533        let response = reqwest::get(&self.s3url).await?;
534
535        if response.status() != 200 {
536            return Err(crate::error::ComposioError::DownloadFailed(format!(
537                "Failed to download file. Status: {}",
538                response.status()
539            )));
540        }
541
542        let bytes = response.bytes().await?;
543        tokio::fs::write(&outfile, bytes).await?;
544
545        Ok(outfile)
546    }
547}
548
549// ============================================================================
550// FileHelper - Schema Processing and File Substitution
551// ============================================================================
552
553/// Helper for processing file uploads/downloads in tool schemas
554pub struct FileHelper {
555    _outdir: PathBuf,
556}
557
558impl FileHelper {
559    /// Create a new FileHelper
560    pub fn new(outdir: Option<PathBuf>) -> Self {
561        let outdir = outdir.unwrap_or_else(get_local_output_directory);
562        Self { _outdir: outdir }
563    }
564
565    /// Check if a schema has a specific file property
566    fn has_file_property(&self, schema: &JsonValue, property_name: &str) -> bool {
567        if let Some(obj) = schema.as_object() {
568            // Direct property check
569            if obj
570                .get(property_name)
571                .and_then(|v| v.as_bool())
572                .unwrap_or(false)
573            {
574                return true;
575            }
576
577            // Check anyOf variants
578            if let Some(any_of) = obj.get("anyOf").and_then(|v| v.as_array()) {
579                for variant in any_of {
580                    if self.has_file_property(variant, property_name) {
581                        return true;
582                    }
583                }
584            }
585
586            // Check oneOf variants
587            if let Some(one_of) = obj.get("oneOf").and_then(|v| v.as_array()) {
588                for variant in one_of {
589                    if self.has_file_property(variant, property_name) {
590                        return true;
591                    }
592                }
593            }
594
595            // Check allOf variants
596            if let Some(all_of) = obj.get("allOf").and_then(|v| v.as_array()) {
597                for variant in all_of {
598                    if self.has_file_property(variant, property_name) {
599                        return true;
600                    }
601                }
602            }
603
604            // Check nested properties
605            if let Some(properties) = obj.get("properties").and_then(|v| v.as_object()) {
606                for prop in properties.values() {
607                    if self.has_file_property(prop, property_name) {
608                        return true;
609                    }
610                }
611            }
612
613            // Check array items
614            if let Some(items) = obj.get("items") {
615                if self.has_file_property(items, property_name) {
616                    return true;
617                }
618            }
619        }
620
621        false
622    }
623
624    /// Check if schema has file_uploadable property
625    pub fn is_file_uploadable(&self, schema: &JsonValue) -> bool {
626        self.has_file_property(schema, "file_uploadable")
627    }
628
629    /// Check if schema has file_downloadable property
630    pub fn is_file_downloadable(&self, schema: &JsonValue) -> bool {
631        self.has_file_property(schema, "file_downloadable")
632    }
633
634    /// Transform schema for file upload (convert file_uploadable to path format)
635    pub fn transform_schema_for_file_upload(&self, schema: JsonValue) -> JsonValue {
636        if let Some(mut obj) = schema.as_object().cloned() {
637            // Direct file_uploadable - transform it
638            if obj
639                .get("file_uploadable")
640                .and_then(|v| v.as_bool())
641                .unwrap_or(false)
642            {
643                return serde_json::json!({
644                    "type": "string",
645                    "format": "path",
646                    "description": obj.get("description")
647                        .and_then(|v| v.as_str())
648                        .unwrap_or("Path to file."),
649                    "title": obj.get("title"),
650                    "file_uploadable": true,
651                });
652            }
653
654            // Transform anyOf variants
655            if let Some(any_of) = obj.get("anyOf").and_then(|v| v.as_array()) {
656                let transformed: Vec<JsonValue> = any_of
657                    .iter()
658                    .map(|v| self.transform_schema_for_file_upload(v.clone()))
659                    .collect();
660                obj.insert("anyOf".to_string(), JsonValue::Array(transformed));
661            }
662
663            // Transform oneOf variants
664            if let Some(one_of) = obj.get("oneOf").and_then(|v| v.as_array()) {
665                let transformed: Vec<JsonValue> = one_of
666                    .iter()
667                    .map(|v| self.transform_schema_for_file_upload(v.clone()))
668                    .collect();
669                obj.insert("oneOf".to_string(), JsonValue::Array(transformed));
670            }
671
672            // Transform allOf variants
673            if let Some(all_of) = obj.get("allOf").and_then(|v| v.as_array()) {
674                let transformed: Vec<JsonValue> = all_of
675                    .iter()
676                    .map(|v| self.transform_schema_for_file_upload(v.clone()))
677                    .collect();
678                obj.insert("allOf".to_string(), JsonValue::Array(transformed));
679            }
680
681            // Transform nested properties
682            if let Some(properties) = obj.get("properties").and_then(|v| v.as_object()) {
683                let transformed: HashMap<String, JsonValue> = properties
684                    .iter()
685                    .map(|(k, v)| (k.clone(), self.transform_schema_for_file_upload(v.clone())))
686                    .collect();
687                obj.insert(
688                    "properties".to_string(),
689                    serde_json::to_value(transformed).unwrap(),
690                );
691            }
692
693            // Transform array items
694            if let Some(items) = obj.get("items") {
695                let transformed = self.transform_schema_for_file_upload(items.clone());
696                obj.insert("items".to_string(), transformed);
697            }
698
699            return JsonValue::Object(obj);
700        }
701
702        schema
703    }
704
705    /// Process file_uploadable fields in schema
706    pub fn process_file_uploadable_schema(&self, mut schema: JsonValue) -> JsonValue {
707        if let Some(obj) = schema.as_object_mut() {
708            if let Some(properties) = obj.get("properties").cloned() {
709                let transformed = self.transform_schema_for_file_upload(properties);
710                obj.insert("properties".to_string(), transformed);
711            }
712        }
713        schema
714    }
715
716    /// Enhance schema descriptions with type hints and required notes
717    pub fn enhance_schema_descriptions(&self, mut schema: JsonValue) -> JsonValue {
718        if let Some(obj) = schema.as_object_mut() {
719            let required = obj
720                .get("required")
721                .and_then(|v| v.as_array())
722                .map(|arr| {
723                    arr.iter()
724                        .filter_map(|v| v.as_str())
725                        .map(|s| s.to_string())
726                        .collect::<Vec<_>>()
727                })
728                .unwrap_or_default();
729
730            if let Some(properties) = obj.get_mut("properties").and_then(|v| v.as_object_mut()) {
731                for (param, prop_schema) in properties.iter_mut() {
732                    if let Some(prop_obj) = prop_schema.as_object_mut() {
733                        // Add type hint
734                        if let Some(type_str) = prop_obj.get("type").and_then(|v| v.as_str()) {
735                            if matches!(type_str, "string" | "integer" | "number" | "boolean") {
736                                let desc = prop_obj
737                                    .get("description")
738                                    .and_then(|v| v.as_str())
739                                    .unwrap_or("")
740                                    .trim_end_matches('.');
741
742                                let ext = format!("Please provide a value of type {}.", type_str);
743                                let new_desc = if desc.is_empty() {
744                                    ext
745                                } else {
746                                    format!("{}. {}", desc, ext)
747                                };
748
749                                prop_obj
750                                    .insert("description".to_string(), JsonValue::String(new_desc));
751                            }
752                        }
753
754                        // Add required note
755                        if required.contains(param) {
756                            let desc = prop_obj
757                                .get("description")
758                                .and_then(|v| v.as_str())
759                                .unwrap_or("")
760                                .trim_end_matches('.');
761
762                            let new_desc = if desc.is_empty() {
763                                "This parameter is required.".to_string()
764                            } else {
765                                format!("{}. This parameter is required.", desc)
766                            };
767
768                            prop_obj.insert("description".to_string(), JsonValue::String(new_desc));
769                        }
770                    }
771                }
772            }
773        }
774        schema
775    }
776
777    /// Process schema for both file handling and description enhancements
778    pub fn process_schema_recursively(&self, schema: JsonValue) -> JsonValue {
779        let schema = self.process_file_uploadable_schema(schema);
780        self.enhance_schema_descriptions(schema)
781    }
782}