pub struct Storage { /* private fields */ }Expand description
Storage client for file operations
Implementations§
Source§impl Storage
impl Storage
Sourcepub fn new(
config: Arc<SupabaseConfig>,
http_client: Arc<HttpClient>,
) -> Result<Self>
pub fn new( config: Arc<SupabaseConfig>, http_client: Arc<HttpClient>, ) -> Result<Self>
Create a new Storage instance
Sourcepub async fn list_buckets(&self) -> Result<Vec<Bucket>>
pub async fn list_buckets(&self) -> Result<Vec<Bucket>>
List all storage buckets
Sourcepub async fn get_bucket(&self, bucket_id: &str) -> Result<Bucket>
pub async fn get_bucket(&self, bucket_id: &str) -> Result<Bucket>
Get bucket information
Sourcepub async fn create_bucket(
&self,
id: &str,
name: &str,
public: bool,
) -> Result<Bucket>
pub async fn create_bucket( &self, id: &str, name: &str, public: bool, ) -> Result<Bucket>
Create a new storage bucket
Sourcepub async fn update_bucket(&self, id: &str, public: Option<bool>) -> Result<()>
pub async fn update_bucket(&self, id: &str, public: Option<bool>) -> Result<()>
Update bucket settings
Sourcepub async fn delete_bucket(&self, id: &str) -> Result<()>
pub async fn delete_bucket(&self, id: &str) -> Result<()>
Delete a storage bucket
Sourcepub async fn list(
&self,
bucket_id: &str,
path: Option<&str>,
) -> Result<Vec<FileObject>>
pub async fn list( &self, bucket_id: &str, path: Option<&str>, ) -> Result<Vec<FileObject>>
List files in a bucket
Sourcepub async fn list_with_auth(
&self,
bucket_id: &str,
path: Option<&str>,
user_token: Option<&str>,
) -> Result<Vec<FileObject>>
pub async fn list_with_auth( &self, bucket_id: &str, path: Option<&str>, user_token: Option<&str>, ) -> Result<Vec<FileObject>>
List files in a bucket with authentication token
This method allows passing a user authentication token for listing files in protected buckets with Row Level Security policies.
§Arguments
bucket_id- The bucket identifierpath- Optional path prefix to filter filesuser_token- Optional user JWT token for authenticated requests
§Examples
let auth_response = client.auth().sign_in_with_email_and_password("user@example.com", "password").await?;
let token = &auth_response.session.as_ref().unwrap().access_token;
let files = client.storage()
.list_with_auth("private-bucket", Some("user-files/"), Some(token))
.await?;Sourcepub async fn upload(
&self,
bucket_id: &str,
path: &str,
file_body: Bytes,
options: Option<FileOptions>,
) -> Result<UploadResponse>
pub async fn upload( &self, bucket_id: &str, path: &str, file_body: Bytes, options: Option<FileOptions>, ) -> Result<UploadResponse>
Upload a file from bytes
Sourcepub async fn upload_with_auth(
&self,
bucket_id: &str,
path: &str,
file_body: Bytes,
options: Option<FileOptions>,
user_token: Option<&str>,
) -> Result<UploadResponse>
pub async fn upload_with_auth( &self, bucket_id: &str, path: &str, file_body: Bytes, options: Option<FileOptions>, user_token: Option<&str>, ) -> Result<UploadResponse>
Upload a file with authentication token
This method allows passing a user authentication token for operations on protected resources (e.g., when Row Level Security policies require authentication).
§Arguments
bucket_id- The bucket identifierpath- The file path in the bucketfile_body- The file content as bytesoptions- Optional file options (content type, cache control, upsert)user_token- Optional user JWT token for authenticated requests
§Examples
// Get user session token after authentication
let auth_response = client.auth().sign_in_with_email_and_password("user@example.com", "password").await?;
let token = &auth_response.session.as_ref().unwrap().access_token;
// Upload to protected bucket
let file_data = Bytes::from("file content");
let response = client.storage()
.upload_with_auth("private-bucket", "user-files/document.txt", file_data, None, Some(token))
.await?;Sourcepub async fn upload_file<P: AsRef<Path>>(
&self,
bucket_id: &str,
path: &str,
file_path: P,
options: Option<FileOptions>,
) -> Result<UploadResponse>
pub async fn upload_file<P: AsRef<Path>>( &self, bucket_id: &str, path: &str, file_path: P, options: Option<FileOptions>, ) -> Result<UploadResponse>
Upload a file from local filesystem (Native only, requires tokio)
Sourcepub async fn download_with_auth(
&self,
bucket_id: &str,
path: &str,
user_token: Option<&str>,
) -> Result<Bytes>
pub async fn download_with_auth( &self, bucket_id: &str, path: &str, user_token: Option<&str>, ) -> Result<Bytes>
Download a file with authentication token
This method allows passing a user authentication token for downloading files from protected buckets with Row Level Security policies.
§Arguments
bucket_id- The bucket identifierpath- The file path in the bucketuser_token- Optional user JWT token for authenticated requests
§Examples
let auth_response = client.auth().sign_in_with_email_and_password("user@example.com", "password").await?;
let token = &auth_response.session.as_ref().unwrap().access_token;
let file_data = client.storage()
.download_with_auth("private-bucket", "user-files/document.txt", Some(token))
.await?;Sourcepub async fn remove_with_auth(
&self,
bucket_id: &str,
paths: &[&str],
user_token: Option<&str>,
) -> Result<()>
pub async fn remove_with_auth( &self, bucket_id: &str, paths: &[&str], user_token: Option<&str>, ) -> Result<()>
Remove/delete files with authentication token
This method allows passing a user authentication token for deleting files from protected buckets with Row Level Security policies.
§Arguments
bucket_id- The bucket identifierpaths- Array of file paths to deleteuser_token- Optional user JWT token for authenticated requests
§Examples
let auth_response = client.auth().sign_in_with_email_and_password("user@example.com", "password").await?;
let token = &auth_response.session.as_ref().unwrap().access_token;
client.storage()
.remove_with_auth("private-bucket", &["user-files/old-doc.txt"], Some(token))
.await?;Sourcepub async fn move(
&self,
bucket_id: &str,
from_path: &str,
to_path: &str,
) -> Result<()>
pub async fn move( &self, bucket_id: &str, from_path: &str, to_path: &str, ) -> Result<()>
Move a file
Sourcepub async fn copy(
&self,
bucket_id: &str,
from_path: &str,
to_path: &str,
) -> Result<()>
pub async fn copy( &self, bucket_id: &str, from_path: &str, to_path: &str, ) -> Result<()>
Copy a file
Sourcepub fn get_public_url(&self, bucket_id: &str, path: &str) -> String
pub fn get_public_url(&self, bucket_id: &str, path: &str) -> String
Get public URL for a file
Sourcepub async fn create_signed_url(
&self,
bucket_id: &str,
path: &str,
expires_in: u32,
transform: Option<TransformOptions>,
) -> Result<String>
pub async fn create_signed_url( &self, bucket_id: &str, path: &str, expires_in: u32, transform: Option<TransformOptions>, ) -> Result<String>
Get signed URL for private file access
Sourcepub fn get_public_url_transformed(
&self,
bucket_id: &str,
path: &str,
options: TransformOptions,
) -> Result<String>
pub fn get_public_url_transformed( &self, bucket_id: &str, path: &str, options: TransformOptions, ) -> Result<String>
Get transformed image URL
Sourcepub async fn start_resumable_upload(
&self,
bucket_id: &str,
path: &str,
total_size: u64,
config: Option<ResumableUploadConfig>,
options: Option<FileOptions>,
) -> Result<UploadSession>
pub async fn start_resumable_upload( &self, bucket_id: &str, path: &str, total_size: u64, config: Option<ResumableUploadConfig>, options: Option<FileOptions>, ) -> Result<UploadSession>
Start a resumable upload session for large files
§Examples
use supabase::storage::{ResumableUploadConfig, FileOptions};
let config = ResumableUploadConfig::default();
let file_opts = FileOptions {
content_type: Some("video/mp4".to_string()),
..Default::default()
};
let session = storage.start_resumable_upload(
"videos",
"my-large-video.mp4",
1024 * 1024 * 100, // 100MB
Some(config),
Some(file_opts)
).await?;
println!("Started upload session: {}", session.upload_id);Sourcepub async fn upload_chunk(
&self,
session: &UploadSession,
part_number: u32,
chunk_data: Bytes,
) -> Result<UploadedPart>
pub async fn upload_chunk( &self, session: &UploadSession, part_number: u32, chunk_data: Bytes, ) -> Result<UploadedPart>
Upload a chunk for resumable upload
§Examples
use bytes::Bytes;
let chunk_data = Bytes::from(vec![0u8; 1024 * 1024]); // 1MB chunk
let part = storage.upload_chunk(
session,
1, // part number
chunk_data
).await?;
println!("Uploaded part: {} etag: {}", part.part_number, part.etag);Sourcepub async fn complete_resumable_upload(
&self,
session: &UploadSession,
) -> Result<UploadResponse>
pub async fn complete_resumable_upload( &self, session: &UploadSession, ) -> Result<UploadResponse>
Complete a resumable upload after all chunks are uploaded
§Examples
// ... upload all chunks and collect parts ...
let response = storage.complete_resumable_upload(&session).await?;
println!("Upload completed: {}", response.key);Sourcepub async fn upload_large_file<P: AsRef<Path>>(
&self,
bucket_id: &str,
path: &str,
file_path: P,
config: Option<ResumableUploadConfig>,
options: Option<FileOptions>,
progress_callback: Option<UploadProgressCallback>,
) -> Result<UploadResponse>
pub async fn upload_large_file<P: AsRef<Path>>( &self, bucket_id: &str, path: &str, file_path: P, config: Option<ResumableUploadConfig>, options: Option<FileOptions>, progress_callback: Option<UploadProgressCallback>, ) -> Result<UploadResponse>
Upload a large file with automatic chunking and resume capability
This is a high-level method that handles the entire resumable upload process.
§Examples
use supabase::storage::{ResumableUploadConfig, FileOptions};
use std::sync::Arc;
let config = ResumableUploadConfig::default();
let file_opts = FileOptions {
content_type: Some("video/mp4".to_string()),
..Default::default()
};
let progress_callback = Arc::new(|uploaded: u64, total: u64| {
println!("Progress: {:.1}%", (uploaded as f64 / total as f64) * 100.0);
});
let response = storage.upload_large_file(
"videos",
"my-large-video.mp4",
"/path/to/large-video.mp4",
Some(config),
Some(file_opts),
Some(progress_callback)
).await?;
println!("Upload completed: {}", response.key);Sourcepub async fn get_upload_session(&self, upload_id: &str) -> Result<UploadSession>
pub async fn get_upload_session(&self, upload_id: &str) -> Result<UploadSession>
Get resumable upload session status
Sourcepub async fn cancel_upload_session(&self, upload_id: &str) -> Result<()>
pub async fn cancel_upload_session(&self, upload_id: &str) -> Result<()>
Cancel a resumable upload session
Sourcepub async fn update_file_metadata(
&self,
bucket_id: &str,
path: &str,
metadata: &FileMetadata,
) -> Result<()>
pub async fn update_file_metadata( &self, bucket_id: &str, path: &str, metadata: &FileMetadata, ) -> Result<()>
Update file metadata with tags and custom metadata
§Examples
use std::collections::HashMap;
use supabase::storage::FileMetadata;
let mut tags = HashMap::new();
tags.insert("category".to_string(), "documents".to_string());
tags.insert("project".to_string(), "web-app".to_string());
let mut custom_data = HashMap::new();
custom_data.insert("author".to_string(), serde_json::Value::String("john_doe".to_string()));
custom_data.insert("version".to_string(), serde_json::Value::Number(serde_json::Number::from(1)));
let metadata = FileMetadata {
tags: Some(tags),
custom_metadata: Some(custom_data),
description: Some("Project documentation".to_string()),
category: Some("documents".to_string()),
searchable_content: Some("documentation project guide".to_string()),
};
storage.update_file_metadata("documents", "guide.pdf", &metadata).await?;Sourcepub async fn search_files(
&self,
bucket_id: &str,
search_options: &SearchOptions,
) -> Result<Vec<FileObject>>
pub async fn search_files( &self, bucket_id: &str, search_options: &SearchOptions, ) -> Result<Vec<FileObject>>
Search files by metadata
§Examples
use std::collections::HashMap;
use supabase::storage::SearchOptions;
let mut tag_filter = HashMap::new();
tag_filter.insert("category".to_string(), "documents".to_string());
let search_options = SearchOptions {
tags: Some(tag_filter),
category: Some("documents".to_string()),
content_search: Some("project guide".to_string()),
limit: Some(20),
offset: Some(0),
};
let files = storage.search_files("documents", &search_options).await?;
println!("Found {} files", files.len());Sourcepub async fn create_policy(&self, policy: &StoragePolicy) -> Result<()>
pub async fn create_policy(&self, policy: &StoragePolicy) -> Result<()>
Create a storage policy for Row Level Security (RLS)
§Examples
use supabase::storage::{StoragePolicy, PolicyOperation};
let policy = StoragePolicy {
name: "user_files_policy".to_string(),
bucket_id: "user-files".to_string(),
operation: PolicyOperation::Select,
definition: "auth.uid()::text = (storage.foldername(name))[1]".to_string(),
check: None,
};
storage.create_policy(&policy).await?;Sourcepub async fn update_policy(&self, policy: &StoragePolicy) -> Result<()>
pub async fn update_policy(&self, policy: &StoragePolicy) -> Result<()>
Update an existing storage policy
§Examples
use supabase::storage::{StoragePolicy, PolicyOperation};
let updated_policy = StoragePolicy {
name: "user_files_policy".to_string(),
bucket_id: "user-files".to_string(),
operation: PolicyOperation::All,
definition: "auth.uid()::text = (storage.foldername(name))[1] OR auth.role() = 'admin'".to_string(),
check: Some("auth.uid() IS NOT NULL".to_string()),
};
storage.update_policy(&updated_policy).await?;Sourcepub async fn list_policies(&self, bucket_id: &str) -> Result<Vec<StoragePolicy>>
pub async fn list_policies(&self, bucket_id: &str) -> Result<Vec<StoragePolicy>>
List all storage policies for a bucket
§Examples
let policies = storage.list_policies("user-files").await?;
println!("Found {} policies", policies.len());Sourcepub async fn test_policy_access(
&self,
bucket_id: &str,
object_path: &str,
operation: PolicyOperation,
user_id: &str,
) -> Result<bool>
pub async fn test_policy_access( &self, bucket_id: &str, object_path: &str, operation: PolicyOperation, user_id: &str, ) -> Result<bool>
Test if a user can access a file based on current policies
§Examples
use supabase::storage::PolicyOperation;
let can_access = storage.test_policy_access(
"user-files",
"user123/document.pdf",
PolicyOperation::Select,
"user123"
).await?;
if can_access {
println!("User can access the file");
} else {
println!("Access denied");
}Sourcepub fn generate_policy_template(
&self,
bucket_id: &str,
policy_name: &str,
template: PolicyTemplate,
) -> StoragePolicy
pub fn generate_policy_template( &self, bucket_id: &str, policy_name: &str, template: PolicyTemplate, ) -> StoragePolicy
Generate a policy template for common use cases
§Examples
use supabase::storage::PolicyTemplate;
let policy = storage.generate_policy_template(
"user-files",
"user_files_access",
PolicyTemplate::UserFolderAccess
);
println!("Generated policy: {:?}", policy);Trait Implementations§
Auto Trait Implementations§
impl Freeze for Storage
impl !RefUnwindSafe for Storage
impl Send for Storage
impl Sync for Storage
impl Unpin for Storage
impl !UnwindSafe for Storage
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more