boundless_market/storage/providers/
file.rs1use crate::storage::{
20 StorageDownloader, StorageError, StorageUploader, StorageUploaderConfig, StorageUploaderType,
21};
22use async_trait::async_trait;
23use std::{path::Path, sync::Arc};
24use tempfile::TempDir;
25use url::Url;
26
27#[derive(Clone, Debug)]
38pub struct FileStorageUploader {
39 path: Arc<TempDir>,
41}
42
43impl FileStorageUploader {
44 pub fn new() -> Result<Self, StorageError> {
46 Ok(Self { path: Arc::new(tempfile::tempdir()?) })
47 }
48
49 pub fn with_path(path: impl AsRef<Path>) -> Result<Self, StorageError> {
51 Ok(Self { path: Arc::new(tempfile::tempdir_in(path)?) })
52 }
53
54 pub fn from_config(config: &StorageUploaderConfig) -> Result<Self, StorageError> {
56 assert_eq!(config.storage_uploader, StorageUploaderType::File);
57
58 let uploader = match &config.file_path {
59 Some(path) => Self::with_path(path)?,
60 None => Self::new()?,
61 };
62 Ok(uploader)
63 }
64
65 async fn save_file(&self, data: &[u8], filename: &str) -> Result<Url, StorageError> {
66 let file_path = self.path.path().join(filename);
67 tokio::fs::write(&file_path, data).await?;
68
69 Url::from_file_path(&file_path).map_err(|()| {
70 StorageError::Other(anyhow::anyhow!(
71 "failed to convert file path to URL: {:?}",
72 file_path
73 ))
74 })
75 }
76}
77
78#[async_trait]
79impl StorageUploader for FileStorageUploader {
80 async fn upload_bytes(&self, data: &[u8], key: &str) -> Result<Url, StorageError> {
81 self.save_file(data, key).await
82 }
83}
84
85#[derive(Clone, Debug, Default)]
87pub struct FileStorageDownloader {}
88
89impl FileStorageDownloader {
90 pub fn new() -> Self {
92 Self::default()
93 }
94}
95
96#[async_trait]
97impl StorageDownloader for FileStorageDownloader {
98 async fn download_url_with_limit(
99 &self,
100 url: Url,
101 limit: usize,
102 ) -> Result<Vec<u8>, StorageError> {
103 if url.scheme() != "file" {
104 return Err(StorageError::UnsupportedScheme(url.scheme().to_string()));
105 }
106
107 tracing::debug!(%url, "downloading from file");
108
109 let path = Path::new(url.path());
110
111 let metadata = tokio::fs::metadata(path).await?;
113 let size = metadata.len() as usize;
114 if size > limit {
115 return Err(StorageError::SizeLimitExceeded { size, limit });
116 }
117
118 let data = tokio::fs::read(path).await?;
119 tracing::trace!(size = data.len(), %url, "downloaded from file");
120
121 Ok(data)
122 }
123
124 async fn download_url(&self, url: Url) -> Result<Vec<u8>, StorageError> {
125 self.download_url_with_limit(url, usize::MAX).await
127 }
128}
129
130#[cfg(test)]
131mod tests {
132 use super::*;
133
134 #[tokio::test]
135 async fn roundtrip() {
136 let uploader = FileStorageUploader::new().unwrap();
137
138 let test_data = b"test input data";
139 let url = uploader.upload_input(test_data).await.unwrap();
140
141 assert_eq!(url.scheme(), "file", "expected file:// URL");
142
143 let downloader = FileStorageDownloader::new();
144 let downloaded = downloader.download_url(url).await.unwrap();
145
146 assert_eq!(downloaded, test_data);
147 }
148
149 #[tokio::test]
150 async fn rejects_oversized_file() {
151 let uploader = FileStorageUploader::new().unwrap();
152
153 let test_data = b"this is more than 10 bytes";
154 let url = uploader.upload_input(test_data).await.unwrap();
155
156 let downloader = FileStorageDownloader::new();
157 let downloaded = downloader.download_url_with_limit(url, 10).await;
158
159 assert!(matches!(downloaded, Err(StorageError::SizeLimitExceeded { .. })));
160 }
161}