1use serde::{Deserialize, Serialize};
40use serde_json::Value as JsonValue;
41use std::collections::HashMap;
42use std::path::{Path, PathBuf};
43
44const DEFAULT_CHUNK_SIZE: usize = 1024 * 1024;
50
51const MAX_FILENAME_LENGTH: usize = 100;
53
54const MAX_RESPONSE_SIZE: usize = 100 * 1024 * 1024;
56
57const CONNECT_TIMEOUT_SECS: u64 = 5;
59
60const READ_TIMEOUT_SECS: u64 = 60;
62
63const LOCAL_CACHE_DIRECTORY_NAME: &str = ".composio";
65
66const ENV_LOCAL_CACHE_DIRECTORY: &str = "COMPOSIO_CACHE_DIR";
68
69const LOCAL_OUTPUT_FILE_DIRECTORY: &str = "outputs";
71
72pub fn get_local_cache_directory() -> PathBuf {
78 if let Ok(cache_dir) = std::env::var(ENV_LOCAL_CACHE_DIRECTORY) {
79 PathBuf::from(cache_dir)
80 } else {
81 dirs::home_dir()
82 .unwrap_or_else(|| PathBuf::from("."))
83 .join(LOCAL_CACHE_DIRECTORY_NAME)
84 }
85}
86
87pub fn get_local_output_directory() -> PathBuf {
89 get_local_cache_directory().join(LOCAL_OUTPUT_FILE_DIRECTORY)
90}
91
92pub fn calculate_md5(file_path: &Path) -> Result<String, std::io::Error> {
97 use std::fs::File;
98 use std::io::Read;
99
100 let mut file = File::open(file_path)?;
101 let mut hasher = md5::Context::new();
102 let mut buffer = vec![0; DEFAULT_CHUNK_SIZE];
103
104 loop {
105 let bytes_read = file.read(&mut buffer)?;
106 if bytes_read == 0 {
107 break;
108 }
109 hasher.consume(&buffer[..bytes_read]);
110 }
111
112 Ok(format!("{:x}", hasher.compute()))
113}
114
115pub fn is_url(value: &str) -> bool {
117 value.starts_with("http://") || value.starts_with("https://")
118}
119
120#[derive(Debug, Clone, Default, Serialize, Deserialize)]
126pub struct FileListParams {
127 #[serde(skip_serializing_if = "Option::is_none")]
129 pub cursor: Option<String>,
130 #[serde(skip_serializing_if = "Option::is_none")]
132 pub limit: Option<u32>,
133 #[serde(skip_serializing_if = "Option::is_none")]
135 pub tool_slug: Option<String>,
136 #[serde(skip_serializing_if = "Option::is_none")]
138 pub toolkit_slug: Option<String>,
139}
140
141#[derive(Debug, Clone, Serialize, Deserialize)]
143pub struct FileListItem {
144 pub filename: String,
146 pub md5: String,
148 pub mimetype: String,
150 pub tool_slug: String,
152 pub toolkit_slug: String,
154}
155
156#[derive(Debug, Clone, Serialize, Deserialize)]
158pub struct FileListResponse {
159 pub current_page: u32,
161 pub total_items: u32,
163 pub total_pages: u32,
165 #[serde(skip_serializing_if = "Option::is_none")]
167 pub next_cursor: Option<String>,
168 pub items: Vec<FileListItem>,
170}
171
172#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct FileCreatePresignedUrlParams {
175 pub filename: String,
177 pub md5: String,
179 pub mimetype: String,
181 pub tool_slug: String,
183 pub toolkit_slug: String,
185}
186
187#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
189#[serde(rename_all = "snake_case")]
190pub enum FileStorageBackend {
191 S3,
193 AzureBlobStorage,
195}
196
197#[derive(Debug, Clone, Serialize, Deserialize)]
199pub struct FileCreatePresignedUrlMetadata {
200 pub storage_backend: FileStorageBackend,
202}
203
204#[derive(Debug, Clone, Serialize, Deserialize)]
206pub struct FileUploadResponse {
207 pub id: String,
209 pub key: String,
211 #[serde(rename = "type")]
213 pub file_type: String,
214 #[serde(alias = "newPresignedUrl")]
216 pub new_presigned_url: String,
217 #[serde(skip_serializing_if = "Option::is_none")]
219 pub metadata: Option<FileCreatePresignedUrlMetadata>,
220}
221
222pub type FileCreatePresignedUrlResponse = FileUploadResponse;
224
225#[derive(Debug, Clone, Serialize, Deserialize)]
227pub struct FileUploadable {
228 pub name: String,
230 pub mimetype: String,
232 pub s3key: String,
234}
235
236#[derive(Debug, Clone, Serialize, Deserialize)]
238pub struct FileDownloadable {
239 pub name: String,
241 pub mimetype: String,
243 pub s3url: String,
245}
246
247impl FileUploadable {
248 pub async fn from_path(
269 client: &crate::client::ComposioClient,
270 file: &Path,
271 tool: &str,
272 toolkit: &str,
273 ) -> Result<Self, crate::error::ComposioError> {
274 if let Some(file_str) = file.to_str() {
276 if is_url(file_str) {
277 return Self::from_url(client, file_str, tool, toolkit).await;
278 }
279 }
280
281 if !file.exists() {
283 return Err(crate::error::ComposioError::FileNotFound(
284 file.display().to_string(),
285 ));
286 }
287
288 if !file.is_file() {
289 return Err(crate::error::ComposioError::InvalidFile(format!(
290 "Not a file: {}",
291 file.display()
292 )));
293 }
294
295 let filename = file.file_name().and_then(|n| n.to_str()).ok_or_else(|| {
296 crate::error::ComposioError::InvalidFile("Invalid filename".to_string())
297 })?;
298
299 let mimetype = crate::utils::mimetypes::guess_mime_type(file);
300
301 let md5_hash = calculate_md5(file)?;
302
303 let upload_response =
305 Self::request_upload_url(client, &md5_hash, filename, &mimetype, tool, toolkit).await?;
306
307 Self::upload_to_s3(&upload_response.new_presigned_url, file).await?;
309
310 Ok(Self {
311 name: filename.to_string(),
312 mimetype,
313 s3key: upload_response.key,
314 })
315 }
316
317 pub async fn from_url(
319 client: &crate::client::ComposioClient,
320 url: &str,
321 tool: &str,
322 toolkit: &str,
323 ) -> Result<Self, crate::error::ComposioError> {
324 let (filename, content, mimetype) = Self::fetch_from_url(url).await?;
326
327 let s3key =
329 Self::upload_bytes_to_s3(client, &filename, &content, &mimetype, tool, toolkit).await?;
330
331 Ok(Self {
332 name: filename,
333 mimetype,
334 s3key,
335 })
336 }
337
338 async fn request_upload_url(
340 client: &crate::client::ComposioClient,
341 md5: &str,
342 filename: &str,
343 mimetype: &str,
344 tool: &str,
345 toolkit: &str,
346 ) -> Result<FileUploadResponse, crate::error::ComposioError> {
347 let params = FileCreatePresignedUrlParams {
348 filename: filename.to_string(),
349 md5: md5.to_string(),
350 mimetype: mimetype.to_string(),
351 tool_slug: tool.to_string(),
352 toolkit_slug: toolkit.to_string(),
353 };
354
355 client.create_file_upload_request(params).await
356 }
357
358 async fn upload_to_s3(url: &str, file: &Path) -> Result<(), crate::error::ComposioError> {
360 let file_content = tokio::fs::read(file).await?;
361
362 let response = reqwest::Client::new()
363 .put(url)
364 .body(file_content)
365 .send()
366 .await?;
367
368 if response.status() != 200 {
369 return Err(crate::error::ComposioError::UploadFailed(format!(
370 "S3 upload failed with status: {}",
371 response.status()
372 )));
373 }
374
375 Ok(())
376 }
377
378 async fn fetch_from_url(
380 url: &str,
381 ) -> Result<(String, Vec<u8>, String), crate::error::ComposioError> {
382 use reqwest::redirect::Policy;
383
384 let client = reqwest::Client::builder()
385 .redirect(Policy::none()) .connect_timeout(std::time::Duration::from_secs(CONNECT_TIMEOUT_SECS))
387 .timeout(std::time::Duration::from_secs(READ_TIMEOUT_SECS))
388 .build()?;
389
390 let response = client.get(url).send().await?;
391
392 if response.status().is_redirection() {
394 return Err(crate::error::ComposioError::UploadFailed(
395 "URL returned redirect. Please provide a direct URL to the file.".to_string(),
396 ));
397 }
398
399 if !response.status().is_success() {
400 return Err(crate::error::ComposioError::UploadFailed(format!(
401 "Failed to fetch file from URL. Status: {}",
402 response.status()
403 )));
404 }
405
406 if let Some(content_length) = response.content_length() {
408 if content_length as usize > MAX_RESPONSE_SIZE {
409 return Err(crate::error::ComposioError::FileTooLarge(format!(
410 "File size ({} bytes) exceeds maximum ({} bytes)",
411 content_length, MAX_RESPONSE_SIZE
412 )));
413 }
414 }
415
416 let mimetype = response
418 .headers()
419 .get("content-type")
420 .and_then(|v| v.to_str().ok())
421 .map(|ct| crate::utils::mimetypes::extract_from_content_type(ct))
422 .unwrap_or_else(|| crate::utils::mimetypes::DEFAULT_MIME_TYPE.to_string());
423
424 let bytes = response.bytes().await?;
426 if bytes.len() > MAX_RESPONSE_SIZE {
427 return Err(crate::error::ComposioError::FileTooLarge(format!(
428 "Response size exceeds maximum ({} bytes)",
429 MAX_RESPONSE_SIZE
430 )));
431 }
432
433 let filename = Self::extract_filename_from_url(url, &mimetype);
435
436 Ok((filename, bytes.to_vec(), mimetype))
437 }
438
439 fn extract_filename_from_url(url: &str, mimetype: &str) -> String {
441 use url::Url;
442
443 if let Ok(parsed) = Url::parse(url) {
444 if let Some(segments) = parsed.path_segments() {
445 if let Some(last) = segments.last() {
446 let decoded = urlencoding::decode(last).unwrap_or_default();
447 if !decoded.is_empty() {
448 return Self::truncate_filename(&decoded);
449 }
450 }
451 }
452 }
453
454 Self::generate_timestamped_filename(mimetype)
456 }
457
458 fn truncate_filename(filename: &str) -> String {
460 if filename.len() <= MAX_FILENAME_LENGTH {
461 return filename.to_string();
462 }
463
464 let extension = if let Some(pos) = filename.rfind('.') {
466 &filename[pos..]
467 } else {
468 ""
469 };
470
471 Self::generate_timestamped_filename(extension)
472 }
473
474 fn generate_timestamped_filename(extension: &str) -> String {
476 use chrono::Utc;
477 use uuid::Uuid;
478
479 let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
480 let unique_id = &Uuid::new_v4().to_string()[..8];
481
482 format!("file_{}_{}{}", timestamp, unique_id, extension)
483 }
484
485 async fn upload_bytes_to_s3(
487 client: &crate::client::ComposioClient,
488 filename: &str,
489 content: &[u8],
490 mimetype: &str,
491 tool: &str,
492 toolkit: &str,
493 ) -> Result<String, crate::error::ComposioError> {
494 let md5_hash = format!("{:x}", md5::compute(content));
495
496 let upload_response =
497 Self::request_upload_url(client, &md5_hash, filename, mimetype, tool, toolkit).await?;
498
499 let response = reqwest::Client::new()
501 .put(&upload_response.new_presigned_url)
502 .header("Content-Type", mimetype)
503 .body(content.to_vec())
504 .send()
505 .await?;
506
507 if response.status() != 200 {
508 return Err(crate::error::ComposioError::UploadFailed(format!(
509 "S3 upload failed with status: {}",
510 response.status()
511 )));
512 }
513
514 Ok(upload_response.key)
515 }
516}
517
518impl FileDownloadable {
519 pub async fn download(&self, outdir: &Path) -> Result<PathBuf, crate::error::ComposioError> {
529 tokio::fs::create_dir_all(outdir).await?;
530
531 let outfile = outdir.join(&self.name);
532
533 let response = reqwest::get(&self.s3url).await?;
534
535 if response.status() != 200 {
536 return Err(crate::error::ComposioError::DownloadFailed(format!(
537 "Failed to download file. Status: {}",
538 response.status()
539 )));
540 }
541
542 let bytes = response.bytes().await?;
543 tokio::fs::write(&outfile, bytes).await?;
544
545 Ok(outfile)
546 }
547}
548
549pub struct FileHelper {
555 _outdir: PathBuf,
556}
557
558impl FileHelper {
559 pub fn new(outdir: Option<PathBuf>) -> Self {
561 let outdir = outdir.unwrap_or_else(get_local_output_directory);
562 Self { _outdir: outdir }
563 }
564
565 fn has_file_property(&self, schema: &JsonValue, property_name: &str) -> bool {
567 if let Some(obj) = schema.as_object() {
568 if obj
570 .get(property_name)
571 .and_then(|v| v.as_bool())
572 .unwrap_or(false)
573 {
574 return true;
575 }
576
577 if let Some(any_of) = obj.get("anyOf").and_then(|v| v.as_array()) {
579 for variant in any_of {
580 if self.has_file_property(variant, property_name) {
581 return true;
582 }
583 }
584 }
585
586 if let Some(one_of) = obj.get("oneOf").and_then(|v| v.as_array()) {
588 for variant in one_of {
589 if self.has_file_property(variant, property_name) {
590 return true;
591 }
592 }
593 }
594
595 if let Some(all_of) = obj.get("allOf").and_then(|v| v.as_array()) {
597 for variant in all_of {
598 if self.has_file_property(variant, property_name) {
599 return true;
600 }
601 }
602 }
603
604 if let Some(properties) = obj.get("properties").and_then(|v| v.as_object()) {
606 for prop in properties.values() {
607 if self.has_file_property(prop, property_name) {
608 return true;
609 }
610 }
611 }
612
613 if let Some(items) = obj.get("items") {
615 if self.has_file_property(items, property_name) {
616 return true;
617 }
618 }
619 }
620
621 false
622 }
623
624 pub fn is_file_uploadable(&self, schema: &JsonValue) -> bool {
626 self.has_file_property(schema, "file_uploadable")
627 }
628
629 pub fn is_file_downloadable(&self, schema: &JsonValue) -> bool {
631 self.has_file_property(schema, "file_downloadable")
632 }
633
634 pub fn transform_schema_for_file_upload(&self, schema: JsonValue) -> JsonValue {
636 if let Some(mut obj) = schema.as_object().cloned() {
637 if obj
639 .get("file_uploadable")
640 .and_then(|v| v.as_bool())
641 .unwrap_or(false)
642 {
643 return serde_json::json!({
644 "type": "string",
645 "format": "path",
646 "description": obj.get("description")
647 .and_then(|v| v.as_str())
648 .unwrap_or("Path to file."),
649 "title": obj.get("title"),
650 "file_uploadable": true,
651 });
652 }
653
654 if let Some(any_of) = obj.get("anyOf").and_then(|v| v.as_array()) {
656 let transformed: Vec<JsonValue> = any_of
657 .iter()
658 .map(|v| self.transform_schema_for_file_upload(v.clone()))
659 .collect();
660 obj.insert("anyOf".to_string(), JsonValue::Array(transformed));
661 }
662
663 if let Some(one_of) = obj.get("oneOf").and_then(|v| v.as_array()) {
665 let transformed: Vec<JsonValue> = one_of
666 .iter()
667 .map(|v| self.transform_schema_for_file_upload(v.clone()))
668 .collect();
669 obj.insert("oneOf".to_string(), JsonValue::Array(transformed));
670 }
671
672 if let Some(all_of) = obj.get("allOf").and_then(|v| v.as_array()) {
674 let transformed: Vec<JsonValue> = all_of
675 .iter()
676 .map(|v| self.transform_schema_for_file_upload(v.clone()))
677 .collect();
678 obj.insert("allOf".to_string(), JsonValue::Array(transformed));
679 }
680
681 if let Some(properties) = obj.get("properties").and_then(|v| v.as_object()) {
683 let transformed: HashMap<String, JsonValue> = properties
684 .iter()
685 .map(|(k, v)| (k.clone(), self.transform_schema_for_file_upload(v.clone())))
686 .collect();
687 obj.insert(
688 "properties".to_string(),
689 serde_json::to_value(transformed).unwrap(),
690 );
691 }
692
693 if let Some(items) = obj.get("items") {
695 let transformed = self.transform_schema_for_file_upload(items.clone());
696 obj.insert("items".to_string(), transformed);
697 }
698
699 return JsonValue::Object(obj);
700 }
701
702 schema
703 }
704
705 pub fn process_file_uploadable_schema(&self, mut schema: JsonValue) -> JsonValue {
707 if let Some(obj) = schema.as_object_mut() {
708 if let Some(properties) = obj.get("properties").cloned() {
709 let transformed = self.transform_schema_for_file_upload(properties);
710 obj.insert("properties".to_string(), transformed);
711 }
712 }
713 schema
714 }
715
716 pub fn enhance_schema_descriptions(&self, mut schema: JsonValue) -> JsonValue {
718 if let Some(obj) = schema.as_object_mut() {
719 let required = obj
720 .get("required")
721 .and_then(|v| v.as_array())
722 .map(|arr| {
723 arr.iter()
724 .filter_map(|v| v.as_str())
725 .map(|s| s.to_string())
726 .collect::<Vec<_>>()
727 })
728 .unwrap_or_default();
729
730 if let Some(properties) = obj.get_mut("properties").and_then(|v| v.as_object_mut()) {
731 for (param, prop_schema) in properties.iter_mut() {
732 if let Some(prop_obj) = prop_schema.as_object_mut() {
733 if let Some(type_str) = prop_obj.get("type").and_then(|v| v.as_str()) {
735 if matches!(type_str, "string" | "integer" | "number" | "boolean") {
736 let desc = prop_obj
737 .get("description")
738 .and_then(|v| v.as_str())
739 .unwrap_or("")
740 .trim_end_matches('.');
741
742 let ext = format!("Please provide a value of type {}.", type_str);
743 let new_desc = if desc.is_empty() {
744 ext
745 } else {
746 format!("{}. {}", desc, ext)
747 };
748
749 prop_obj
750 .insert("description".to_string(), JsonValue::String(new_desc));
751 }
752 }
753
754 if required.contains(param) {
756 let desc = prop_obj
757 .get("description")
758 .and_then(|v| v.as_str())
759 .unwrap_or("")
760 .trim_end_matches('.');
761
762 let new_desc = if desc.is_empty() {
763 "This parameter is required.".to_string()
764 } else {
765 format!("{}. This parameter is required.", desc)
766 };
767
768 prop_obj.insert("description".to_string(), JsonValue::String(new_desc));
769 }
770 }
771 }
772 }
773 }
774 schema
775 }
776
777 pub fn process_schema_recursively(&self, schema: JsonValue) -> JsonValue {
779 let schema = self.process_file_uploadable_schema(schema);
780 self.enhance_schema_descriptions(schema)
781 }
782}