1use crate::{Error, http_client::HttpClient};
6use crate::generated::api_core::{GeneratedDbApi, ApiPaths};
7use serde_json::{json, Value};
8use std::sync::Arc;
9
10pub struct StorageClient {
12 http: Arc<HttpClient>,
13}
14
15impl StorageClient {
16 pub fn new(http: Arc<HttpClient>) -> Self { Self { http } }
17
18 pub fn bucket(&self, name: &str) -> StorageBucket {
20 StorageBucket { http: self.http.clone(), name: name.to_string() }
21 }
22}
23
24pub struct StorageBucket {
26 http: Arc<HttpClient>,
27 pub name: String,
28}
29
30impl StorageBucket {
31 fn core(&self) -> GeneratedDbApi<'_> {
32 GeneratedDbApi::new(&self.http)
33 }
34
35 fn base(&self) -> String {
36 ApiPaths::list_files(&self.name)
37 }
38
39 pub fn get_url(&self, key: &str) -> String {
41 format!("{}/api/storage/{}/{}", self.http.base_url(), self.name, urlencoded(key))
42 }
43
44 pub async fn upload(&self, key: &str, data: Vec<u8>, content_type: &str) -> Result<Value, Error> {
47 self.http.upload_multipart(
48 &format!("{}/upload", self.base()), key, data, content_type
49 ).await
50 }
51
52 pub async fn download(&self, key: &str) -> Result<Vec<u8>, Error> {
55 self.http.download_raw(&format!("{}/{}", self.base(), urlencoded(key))).await
56 }
57
58 pub async fn delete(&self, key: &str) -> Result<Value, Error> {
60 self.core().delete_file(&self.name, key).await
61 }
62
63 pub async fn list(&self, prefix: &str, limit: u32, offset: u32) -> Result<Value, Error> {
65 self.http.get(&format!(
66 "{}?prefix={}&limit={}&offset={}",
67 self.base(), urlencoded(prefix), limit, offset
68 )).await
69 }
70
71 pub async fn get_metadata(&self, key: &str) -> Result<Value, Error> {
73 self.core().get_file_metadata(&self.name, key).await
74 }
75
76 pub async fn update_metadata(&self, key: &str, metadata: &Value) -> Result<Value, Error> {
78 self.core().update_file_metadata(&self.name, key, metadata).await
79 }
80
81 pub async fn create_signed_url(&self, key: &str, expires_in: &str) -> Result<Value, Error> {
83 let body = json!({ "key": key, "expiresIn": expires_in });
84 self.core().create_signed_download_url(&self.name, &body).await
85 }
86
87 pub async fn create_signed_upload_url(&self, key: &str, expires_in: &str) -> Result<Value, Error> {
89 self.create_signed_upload_url_with_options(key, expires_in, None).await
90 }
91
92 pub async fn create_signed_upload_url_with_options(
94 &self,
95 key: &str,
96 expires_in: &str,
97 max_file_size: Option<&str>,
98 ) -> Result<Value, Error> {
99 let mut body = json!({ "key": key, "expiresIn": expires_in });
100 if let Some(max_file_size) = max_file_size {
101 body["maxFileSize"] = json!(max_file_size);
102 }
103 self.core().create_signed_upload_url(&self.name, &body).await
104 }
105
106 pub async fn upload_string(
109 &self, key: &str, data: &str, encoding: &str, content_type: &str,
110 ) -> Result<Value, Error> {
111 use base64::Engine as _;
112 let (raw, ct) = match encoding {
113 "base64" => {
114 let bytes = base64::engine::general_purpose::STANDARD.decode(data)
115 .map_err(|e| Error::Api { status: 400, message: format!("Invalid base64: {e}") })?;
116 (bytes, content_type.to_string())
117 }
118 "base64url" => {
119 let bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD.decode(data)
120 .map_err(|e| Error::Api { status: 400, message: format!("Invalid base64url: {e}") })?;
121 (bytes, content_type.to_string())
122 }
123 "data_url" => {
124 let comma = data.find(',')
125 .ok_or_else(|| Error::Api { status: 400, message: "Invalid data URL".into() })?;
126 let header = &data[..comma];
127 let body = &data[comma + 1..];
128 let ct = if content_type.is_empty() || content_type == "application/octet-stream" {
129 header.strip_prefix("data:").unwrap_or("")
130 .split(';').next().unwrap_or("application/octet-stream").to_string()
131 } else {
132 content_type.to_string()
133 };
134 let bytes = base64::engine::general_purpose::STANDARD.decode(body)
135 .map_err(|e| Error::Api { status: 400, message: format!("Invalid data URL base64: {e}") })?;
136 (bytes, ct)
137 }
138 _ => { let ct = if content_type.is_empty() { "text/plain".to_string() } else { content_type.to_string() };
140 (data.as_bytes().to_vec(), ct)
141 }
142 };
143 self.upload(key, raw, &ct).await
144 }
145
146 pub async fn initiate_resumable_upload(&self, key: &str, content_type: &str) -> Result<String, Error> {
148 let mut body = json!({ "key": key });
149 if !content_type.is_empty() {
150 body["contentType"] = json!(content_type);
151 }
152 let resp = self.core().create_multipart_upload(&self.name, &body).await?;
153 resp["uploadId"].as_str()
154 .map(|s: &str| s.to_string())
155 .ok_or_else(|| Error::Api { status: 500, message: "Missing uploadId".into() })
156 }
157
158 pub async fn upload_part(
161 &self, key: &str, upload_id: &str, part_number: u32, data: Vec<u8>,
162 ) -> Result<Value, Error> {
163 let path = format!(
164 "{}/multipart/upload-part?uploadId={}&partNumber={}&key={}",
165 self.base(), urlencoded(upload_id), part_number, urlencoded(key)
166 );
167 self.http.post_bytes(&path, data, "application/octet-stream").await
168 }
169
170 pub async fn complete_resumable_upload(
172 &self, key: &str, upload_id: &str, parts: Vec<Value>,
173 ) -> Result<Value, Error> {
174 let body = json!({ "uploadId": upload_id, "key": key, "parts": parts });
175 self.core().complete_multipart_upload(&self.name, &body).await
176 }
177
178 pub async fn abort_resumable_upload(
180 &self, key: &str, upload_id: &str,
181 ) -> Result<Value, Error> {
182 let body = json!({ "uploadId": upload_id, "key": key });
183 self.core().abort_multipart_upload(&self.name, &body).await
184 }
185
186 pub async fn resume_upload(
189 &self, key: &str, upload_id: &str, chunk: Vec<u8>, part_number: usize, is_last_chunk: bool,
190 ) -> Result<Value, Error> {
191 let pn = (part_number + 1) as u32; let part = self.upload_part(key, upload_id, pn, chunk).await?;
193 if is_last_chunk {
194 let parts = vec![part.clone()];
197 self.complete_resumable_upload(key, upload_id, parts).await
198 } else {
199 Ok(part)
200 }
201 }
202
203}
204
205
206
207fn urlencoded(s: &str) -> String {
208 urlencoding::encode(s).into_owned()
209}