pub struct Bucket {
pub name: String,
pub region: Region,
pub extra_headers: HeaderMap,
pub extra_query: Query,
pub request_timeout: Option<Duration>,
/* private fields */
}
Expand description
Instantiate an existing Bucket
§Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials);
Fields§
§name: String
§region: Region
§extra_headers: HeaderMap
§extra_query: Query
§request_timeout: Option<Duration>
Implementations§
Source§impl Bucket
impl Bucket
Sourcepub async fn credentials_refresh(&self) -> Result<(), S3Error>
pub async fn credentials_refresh(&self) -> Result<(), S3Error>
Credential refreshing is done automatically, but can be manually triggered.
pub fn http_client(&self) -> Arc<Client<HttpsConnector<HttpConnector>>>
Source§impl Bucket
impl Bucket
Sourcepub async fn presign_get<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
custom_queries: Option<HashMap<String, String>>,
) -> Result<String, S3Error>
pub async fn presign_get<S: AsRef<str>>( &self, path: S, expiry_secs: u32, custom_queries: Option<HashMap<String, String>>, ) -> Result<String, S3Error>
Get a presigned url for getting object on a given path
§Example:
use std::collections::HashMap;
use s3::bucket::Bucket;
use s3::creds::Credentials;
#[tokio::main]
async fn main() {
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
// Add optional custom queries
let mut custom_queries = HashMap::new();
custom_queries.insert(
"response-content-disposition".into(),
"attachment; filename=\"test.png\"".into(),
);
let url = bucket.presign_get("/test.file", 86400, Some(custom_queries)).await.unwrap();
println!("Presigned url: {}", url);
}
Sourcepub async fn presign_post<'a>(
&self,
post_policy: PostPolicy<'a>,
) -> Result<PresignedPost, S3Error>
pub async fn presign_post<'a>( &self, post_policy: PostPolicy<'a>, ) -> Result<PresignedPost, S3Error>
Get a presigned url for posting an object to a given path
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use s3::post_policy::*;
use std::borrow::Cow;
#[tokio::main]
async fn main() {
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
let post_policy = PostPolicy::new(86400).condition(
PostPolicyField::Key,
PostPolicyValue::StartsWith(Cow::from("user/user1/"))
).unwrap();
let presigned_post = bucket.presign_post(post_policy).await.unwrap();
println!("Presigned url: {}, fields: {:?}", presigned_post.url, presigned_post.fields);
}
Sourcepub async fn presign_put<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
custom_headers: Option<HeaderMap>,
custom_queries: Option<HashMap<String, String>>,
) -> Result<String, S3Error>
pub async fn presign_put<S: AsRef<str>>( &self, path: S, expiry_secs: u32, custom_headers: Option<HeaderMap>, custom_queries: Option<HashMap<String, String>>, ) -> Result<String, S3Error>
Get a presigned url for putting object to a given path
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use http::HeaderMap;
use http::header::HeaderName;
#[tokio::main]
async fn main() {
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
// Add optional custom headers
let mut custom_headers = HeaderMap::new();
custom_headers.insert(
HeaderName::from_static("custom_header"),
"custom_value".parse().unwrap(),
);
let url = bucket.presign_put("/test.file", 86400, Some(custom_headers), None).await.unwrap();
println!("Presigned url: {}", url);
}
Sourcepub async fn presign_delete<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
) -> Result<String, S3Error>
pub async fn presign_delete<S: AsRef<str>>( &self, path: S, expiry_secs: u32, ) -> Result<String, S3Error>
Get a presigned url for deleting object on a given path
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
#[tokio::main]
async fn main() {
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
let url = bucket.presign_delete("/test.file", 86400).await.unwrap();
println!("Presigned url: {}", url);
}
Sourcepub async fn create(
name: &str,
region: Region,
credentials: Credentials,
config: BucketConfiguration,
) -> Result<CreateBucketResponse, S3Error>
pub async fn create( name: &str, region: Region, credentials: Credentials, config: BucketConfiguration, ) -> Result<CreateBucketResponse, S3Error>
Create a new Bucket
and instantiate it
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let config = BucketConfiguration::default();
// Async variant with `tokio` or `async-std` features
let create_bucket_response = Bucket::create(bucket_name, region, credentials, config).await?;
// `sync` fature will produce an identical method
#[cfg(feature = "sync")]
let create_bucket_response = Bucket::create(bucket_name, region, credentials, config)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let create_bucket_response = Bucket::create_blocking(bucket_name, region, credentials, config)?;
Sourcepub async fn list_buckets(
region: Region,
credentials: Credentials,
) -> Result<ListBucketsResponse, S3Error>
pub async fn list_buckets( region: Region, credentials: Credentials, ) -> Result<ListBucketsResponse, S3Error>
Get a list of all existing buckets in the region that are accessible by the given credentials.
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use s3::region::Region;
use anyhow::Result;
let region = Region::Custom {
region: "eu-central-1".to_owned(),
endpoint: "http://localhost:9000".to_owned()
};
let credentials = Credentials::default()?;
// Async variant with `tokio` or `async-std` features
let response = Bucket::list_buckets(region, credentials).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response = Bucket::list_buckets(region, credentials)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response = Bucket::list_buckets_blocking(region, credentials)?;
let found_buckets = response.bucket_names().collect::<Vec<String>>();
println!("found buckets: {:#?}", found_buckets);
Sourcepub async fn exists(&self) -> Result<bool, S3Error>
pub async fn exists(&self) -> Result<bool, S3Error>
Determine whether the instantiated bucket exists.
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use s3::region::Region;
use anyhow::Result;
let bucket_name = "some-bucket-that-is-known-to-exist";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let exists = bucket.exists().await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let exists = bucket.exists()?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let exists = bucket.exists_blocking()?;
assert_eq!(exists, true);
Sourcepub async fn create_with_path_style(
name: &str,
region: Region,
credentials: Credentials,
config: BucketConfiguration,
) -> Result<CreateBucketResponse, S3Error>
pub async fn create_with_path_style( name: &str, region: Region, credentials: Credentials, config: BucketConfiguration, ) -> Result<CreateBucketResponse, S3Error>
Create a new Bucket
with path style and instantiate it
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let config = BucketConfiguration::default();
// Async variant with `tokio` or `async-std` features
let create_bucket_response = Bucket::create_with_path_style(bucket_name, region, credentials, config).await?;
// `sync` fature will produce an identical method
#[cfg(feature = "sync")]
let create_bucket_response = Bucket::create_with_path_style(bucket_name, region, credentials, config)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let create_bucket_response = Bucket::create_with_path_style_blocking(bucket_name, region, credentials, config)?;
Sourcepub async fn delete(&self) -> Result<u16, S3Error>
pub async fn delete(&self) -> Result<u16, S3Error>
Delete existing Bucket
§Example
use s3::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
// Async variant with `tokio` or `async-std` features
bucket.delete().await.unwrap();
// `sync` fature will produce an identical method
#[cfg(feature = "sync")]
bucket.delete().unwrap();
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
bucket.delete_blocking().unwrap();
Sourcepub fn new(
name: &str,
region: Region,
credentials: Credentials,
) -> Result<Box<Bucket>, S3Error>
pub fn new( name: &str, region: Region, credentials: Credentials, ) -> Result<Box<Bucket>, S3Error>
Instantiate an existing Bucket
.
§Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
// Fake credentials so we don't access user's real credentials in tests
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
Sourcepub fn new_public(name: &str, region: Region) -> Result<Bucket, S3Error>
pub fn new_public(name: &str, region: Region) -> Result<Bucket, S3Error>
Instantiate a public existing Bucket
.
§Example
use s3::bucket::Bucket;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let bucket = Bucket::new_public(bucket_name, region).unwrap();
pub fn with_path_style(&self) -> Box<Bucket>
pub fn with_extra_headers( &self, extra_headers: HeaderMap, ) -> Result<Bucket, S3Error>
pub fn with_extra_query( &self, extra_query: HashMap<String, String>, ) -> Result<Bucket, S3Error>
pub fn with_request_timeout( &self, request_timeout: Duration, ) -> Result<Box<Bucket>, S3Error>
pub fn with_listobjects_v1(&self) -> Bucket
Sourcepub async fn copy_object_internal<F: AsRef<str>, T: AsRef<str>>(
&self,
from: F,
to: T,
) -> Result<u16, S3Error>
pub async fn copy_object_internal<F: AsRef<str>, T: AsRef<str>>( &self, from: F, to: T, ) -> Result<u16, S3Error>
Copy file from an S3 path, internally within the same bucket.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let code = bucket.copy_object_internal("/from.file", "/to.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let code = bucket.copy_object_internal("/from.file", "/to.file")?;
Sourcepub async fn get_object<S: AsRef<str>>(
&self,
path: S,
) -> Result<ResponseData, S3Error>
pub async fn get_object<S: AsRef<str>>( &self, path: S, ) -> Result<ResponseData, S3Error>
Gets file from an S3 path.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.get_object("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.get_object("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.get_object_blocking("/test.file")?;
pub async fn put_bucket_cors( &self, cors_config: CorsConfiguration, ) -> Result<ResponseData, S3Error>
pub async fn get_bucket_lifecycle( &self, ) -> Result<BucketLifecycleConfiguration, S3Error>
pub async fn put_bucket_lifecycle( &self, lifecycle_config: BucketLifecycleConfiguration, ) -> Result<ResponseData, S3Error>
pub async fn delete_bucket_lifecycle(&self) -> Result<ResponseData, S3Error>
Sourcepub async fn get_object_torrent<S: AsRef<str>>(
&self,
path: S,
) -> Result<ResponseData, S3Error>
pub async fn get_object_torrent<S: AsRef<str>>( &self, path: S, ) -> Result<ResponseData, S3Error>
Gets torrent from an S3 path.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.get_object_torrent("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.get_object_torrent("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.get_object_torrent_blocking("/test.file")?;
Sourcepub async fn get_object_range<S: AsRef<str>>(
&self,
path: S,
start: u64,
end: Option<u64>,
) -> Result<ResponseData, S3Error>
pub async fn get_object_range<S: AsRef<str>>( &self, path: S, start: u64, end: Option<u64>, ) -> Result<ResponseData, S3Error>
Gets specified inclusive byte range of file from an S3 path.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.get_object_range("/test.file", 0, Some(31)).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.get_object_range("/test.file", 0, Some(31))?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.get_object_range_blocking("/test.file", 0, Some(31))?;
Sourcepub async fn get_object_range_to_writer<T, S>(
&self,
path: S,
start: u64,
end: Option<u64>,
writer: &mut T,
) -> Result<u16, S3Error>
pub async fn get_object_range_to_writer<T, S>( &self, path: S, start: u64, end: Option<u64>, writer: &mut T, ) -> Result<u16, S3Error>
Stream range of bytes from S3 path to a local file, generic over T: Write.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let mut output_file = File::create("output_file").expect("Unable to create file");
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
let start = 0;
let end = Some(1024);
// Async variant with `tokio` or `async-std` features
let status_code = bucket.get_object_range_to_writer("/test.file", start, end, &mut async_output_file).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let status_code = bucket.get_object_range_to_writer("/test.file", start, end, &mut output_file)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features. Based of the async branch
#[cfg(feature = "blocking")]
let status_code = bucket.get_object_range_to_writer_blocking("/test.file", start, end, &mut async_output_file)?;
Sourcepub async fn get_object_to_writer<T: AsyncWrite + Send + Unpin + ?Sized, S: AsRef<str>>(
&self,
path: S,
writer: &mut T,
) -> Result<u16, S3Error>
pub async fn get_object_to_writer<T: AsyncWrite + Send + Unpin + ?Sized, S: AsRef<str>>( &self, path: S, writer: &mut T, ) -> Result<u16, S3Error>
Stream file from S3 path to a local file, generic over T: Write.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let mut output_file = File::create("output_file").expect("Unable to create file");
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
// Async variant with `tokio` or `async-std` features
let status_code = bucket.get_object_to_writer("/test.file", &mut async_output_file).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let status_code = bucket.get_object_to_writer("/test.file", &mut output_file)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features. Based of the async branch
#[cfg(feature = "blocking")]
let status_code = bucket.get_object_to_writer_blocking("/test.file", &mut async_output_file)?;
Sourcepub async fn get_object_stream<S: AsRef<str>>(
&self,
path: S,
) -> Result<ResponseDataStream, S3Error>
pub async fn get_object_stream<S: AsRef<str>>( &self, path: S, ) -> Result<ResponseDataStream, S3Error>
Stream file from S3 path to a local file using an async stream.
§Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
#[cfg(feature = "with-tokio")]
use tokio_stream::StreamExt;
#[cfg(feature = "with-tokio")]
use tokio::io::AsyncWriteExt;
#[cfg(feature = "with-async-std")]
use futures_util::StreamExt;
#[cfg(feature = "with-async-std")]
use futures_util::AsyncWriteExt;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let mut response_data_stream = bucket.get_object_stream(path).await?;
#[cfg(feature = "with-tokio")]
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
while let Some(chunk) = response_data_stream.bytes().next().await {
async_output_file.write_all(&chunk.unwrap()).await?;
}
Sourcepub async fn put_object_stream<R: AsyncRead + Unpin + ?Sized>(
&self,
reader: &mut R,
s3_path: impl AsRef<str>,
) -> Result<PutStreamResponse, S3Error>
pub async fn put_object_stream<R: AsyncRead + Unpin + ?Sized>( &self, reader: &mut R, s3_path: impl AsRef<str>, ) -> Result<PutStreamResponse, S3Error>
Stream file from local path to s3, generic over T: Write.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
use std::io::Write;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let test: Vec<u8> = (0..1000).map(|_| 42).collect();
let mut file = File::create(path)?;
// tokio open file
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
file.write_all(&test)?;
// Generic over std::io::Read
#[cfg(feature = "with-tokio")]
let status_code = bucket.put_object_stream(&mut async_output_file, "/path").await?;
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
// Generic over std::io::Read
let status_code = bucket.put_object_stream(&mut path, "/path")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let status_code = bucket.put_object_stream_blocking(&mut path, "/path")?;
Sourcepub async fn put_object_stream_with_content_type<R: AsyncRead + Unpin>(
&self,
reader: &mut R,
s3_path: impl AsRef<str>,
content_type: impl AsRef<str>,
) -> Result<PutStreamResponse, S3Error>
pub async fn put_object_stream_with_content_type<R: AsyncRead + Unpin>( &self, reader: &mut R, s3_path: impl AsRef<str>, content_type: impl AsRef<str>, ) -> Result<PutStreamResponse, S3Error>
Stream file from local path to s3, generic over T: Write with explicit content type.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
use std::io::Write;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let test: Vec<u8> = (0..1000).map(|_| 42).collect();
let mut file = File::create(path)?;
file.write_all(&test)?;
#[cfg(feature = "with-tokio")]
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
// Async variant with `tokio` or `async-std` features
// Generic over std::io::Read
let status_code = bucket
.put_object_stream_with_content_type(&mut async_output_file, "/path", "application/octet-stream")
.await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
// Generic over std::io::Read
let status_code = bucket
.put_object_stream_with_content_type(&mut path, "/path", "application/octet-stream")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let status_code = bucket
.put_object_stream_with_content_type_blocking(&mut path, "/path", "application/octet-stream")?;
Sourcepub async fn initiate_multipart_upload(
&self,
s3_path: &str,
content_type: &str,
) -> Result<InitiateMultipartUploadResponse, S3Error>
pub async fn initiate_multipart_upload( &self, s3_path: &str, content_type: &str, ) -> Result<InitiateMultipartUploadResponse, S3Error>
Initiate multipart upload to s3.
Sourcepub async fn put_multipart_stream<R: Read + Unpin>(
&self,
reader: &mut R,
path: &str,
part_number: u32,
upload_id: &str,
content_type: &str,
) -> Result<Part, S3Error>
pub async fn put_multipart_stream<R: Read + Unpin>( &self, reader: &mut R, path: &str, part_number: u32, upload_id: &str, content_type: &str, ) -> Result<Part, S3Error>
Upload a streamed multipart chunk to s3 using a previously initiated multipart upload
Sourcepub async fn put_multipart_chunk(
&self,
chunk: Vec<u8>,
path: &str,
part_number: u32,
upload_id: &str,
content_type: &str,
) -> Result<Part, S3Error>
pub async fn put_multipart_chunk( &self, chunk: Vec<u8>, path: &str, part_number: u32, upload_id: &str, content_type: &str, ) -> Result<Part, S3Error>
Upload a buffered multipart chunk to s3 using a previously initiated multipart upload
Sourcepub async fn complete_multipart_upload(
&self,
path: &str,
upload_id: &str,
parts: Vec<Part>,
) -> Result<ResponseData, S3Error>
pub async fn complete_multipart_upload( &self, path: &str, upload_id: &str, parts: Vec<Part>, ) -> Result<ResponseData, S3Error>
Completes a previously initiated multipart upload, with optional final data chunks
Sourcepub async fn location(&self) -> Result<(Region, u16), S3Error>
pub async fn location(&self) -> Result<(Region, u16), S3Error>
Get Bucket location.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let (region, status_code) = bucket.location().await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let (region, status_code) = bucket.location()?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let (region, status_code) = bucket.location_blocking()?;
Sourcepub async fn delete_object<S: AsRef<str>>(
&self,
path: S,
) -> Result<ResponseData, S3Error>
pub async fn delete_object<S: AsRef<str>>( &self, path: S, ) -> Result<ResponseData, S3Error>
Delete file from an S3 path.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.delete_object("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.delete_object("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.delete_object_blocking("/test.file")?;
Sourcepub async fn head_object<S: AsRef<str>>(
&self,
path: S,
) -> Result<(HeadObjectResult, u16), S3Error>
pub async fn head_object<S: AsRef<str>>( &self, path: S, ) -> Result<(HeadObjectResult, u16), S3Error>
Head object from S3.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let (head_object_result, code) = bucket.head_object("/test.png").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let (head_object_result, code) = bucket.head_object("/test.png")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let (head_object_result, code) = bucket.head_object_blocking("/test.png")?;
Sourcepub async fn put_object_with_content_type<S: AsRef<str>>(
&self,
path: S,
content: &[u8],
content_type: &str,
) -> Result<ResponseData, S3Error>
pub async fn put_object_with_content_type<S: AsRef<str>>( &self, path: S, content: &[u8], content_type: &str, ) -> Result<ResponseData, S3Error>
Put into an S3 bucket, with explicit content-type.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let content = "I want to go to S3".as_bytes();
// Async variant with `tokio` or `async-std` features
let response_data = bucket.put_object_with_content_type("/test.file", content, "text/plain").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.put_object_with_content_type("/test.file", content, "text/plain")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.put_object_with_content_type_blocking("/test.file", content, "text/plain")?;
Sourcepub async fn put_object<S: AsRef<str>>(
&self,
path: S,
content: &[u8],
) -> Result<ResponseData, S3Error>
pub async fn put_object<S: AsRef<str>>( &self, path: S, content: &[u8], ) -> Result<ResponseData, S3Error>
Put into an S3 bucket.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let content = "I want to go to S3".as_bytes();
// Async variant with `tokio` or `async-std` features
let response_data = bucket.put_object("/test.file", content).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.put_object("/test.file", content)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.put_object_blocking("/test.file", content)?;
Sourcepub async fn put_object_tagging<S: AsRef<str>>(
&self,
path: &str,
tags: &[(S, S)],
) -> Result<ResponseData, S3Error>
pub async fn put_object_tagging<S: AsRef<str>>( &self, path: &str, tags: &[(S, S)], ) -> Result<ResponseData, S3Error>
Tag an S3 object.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.put_object_tagging("/test.file", &[("Tag1", "Value1"), ("Tag2", "Value2")]).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.put_object_tagging("/test.file", &[("Tag1", "Value1"), ("Tag2", "Value2")])?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.put_object_tagging_blocking("/test.file", &[("Tag1", "Value1"), ("Tag2", "Value2")])?;
Sourcepub async fn delete_object_tagging<S: AsRef<str>>(
&self,
path: S,
) -> Result<ResponseData, S3Error>
pub async fn delete_object_tagging<S: AsRef<str>>( &self, path: S, ) -> Result<ResponseData, S3Error>
Delete tags from an S3 object.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.delete_object_tagging("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.delete_object_tagging("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.delete_object_tagging_blocking("/test.file")?;
Sourcepub async fn get_object_tagging<S: AsRef<str>>(
&self,
path: S,
) -> Result<(Vec<Tag>, u16), S3Error>
pub async fn get_object_tagging<S: AsRef<str>>( &self, path: S, ) -> Result<(Vec<Tag>, u16), S3Error>
Retrieve an S3 object list of tags.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.get_object_tagging("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.get_object_tagging("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.get_object_tagging_blocking("/test.file")?;
pub async fn list_page( &self, prefix: String, delimiter: Option<String>, continuation_token: Option<String>, start_after: Option<String>, max_keys: Option<usize>, ) -> Result<(ListBucketResult, u16), S3Error>
Sourcepub async fn list(
&self,
prefix: String,
delimiter: Option<String>,
) -> Result<Vec<ListBucketResult>, S3Error>
pub async fn list( &self, prefix: String, delimiter: Option<String>, ) -> Result<Vec<ListBucketResult>, S3Error>
List the contents of an S3 bucket.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let results = bucket.list("/".to_string(), Some("/".to_string())).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let results = bucket.list("/".to_string(), Some("/".to_string()))?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let results = bucket.list_blocking("/".to_string(), Some("/".to_string()))?;
pub async fn list_multiparts_uploads_page( &self, prefix: Option<&str>, delimiter: Option<&str>, key_marker: Option<String>, max_uploads: Option<usize>, ) -> Result<(ListMultipartUploadsResult, u16), S3Error>
Sourcepub async fn list_multiparts_uploads(
&self,
prefix: Option<&str>,
delimiter: Option<&str>,
) -> Result<Vec<ListMultipartUploadsResult>, S3Error>
pub async fn list_multiparts_uploads( &self, prefix: Option<&str>, delimiter: Option<&str>, ) -> Result<Vec<ListMultipartUploadsResult>, S3Error>
List the ongoing multipart uploads of an S3 bucket. This may be useful to cleanup failed
uploads, together with crate::bucket::Bucket::abort_upload
.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let results = bucket.list_multiparts_uploads(Some("/"), Some("/")).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let results = bucket.list_multiparts_uploads(Some("/"), Some("/"))?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let results = bucket.list_multiparts_uploads_blocking(Some("/"), Some("/"))?;
Sourcepub async fn abort_upload(
&self,
key: &str,
upload_id: &str,
) -> Result<(), S3Error>
pub async fn abort_upload( &self, key: &str, upload_id: &str, ) -> Result<(), S3Error>
Abort a running multipart upload.
§Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let results = bucket.abort_upload("/some/file.txt", "ZDFjM2I0YmEtMzU3ZC00OTQ1LTlkNGUtMTgxZThjYzIwNjA2").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let results = bucket.abort_upload("/some/file.txt", "ZDFjM2I0YmEtMzU3ZC00OTQ1LTlkNGUtMTgxZThjYzIwNjA2")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let results = bucket.abort_upload_blocking("/some/file.txt", "ZDFjM2I0YmEtMzU3ZC00OTQ1LTlkNGUtMTgxZThjYzIwNjA2")?;
Sourcepub fn is_path_style(&self) -> bool
pub fn is_path_style(&self) -> bool
Get path_style field of the Bucket struct
Sourcepub fn is_subdomain_style(&self) -> bool
pub fn is_subdomain_style(&self) -> bool
Get negated path_style field of the Bucket struct
Sourcepub fn set_path_style(&mut self)
pub fn set_path_style(&mut self)
Configure bucket to use path-style urls and headers
Sourcepub fn set_subdomain_style(&mut self)
pub fn set_subdomain_style(&mut self)
Configure bucket to use subdomain style urls and headers [default]
Sourcepub fn set_request_timeout(&mut self, timeout: Option<Duration>)
pub fn set_request_timeout(&mut self, timeout: Option<Duration>)
Configure bucket to apply this request timeout to all HTTP
requests, or no (infinity) timeout if None
. Defaults to
30 seconds.
Only the [attohttpc
] and the hyper
backends obey this option;
async code may instead await with a timeout.
Sourcepub fn set_listobjects_v1(&mut self)
pub fn set_listobjects_v1(&mut self)
Configure bucket to use the older ListObjects API
If your provider doesn’t support the ListObjectsV2 interface, set this to use the v1 ListObjects interface instead. This is currently needed at least for Google Cloud Storage.
Sourcepub fn set_listobjects_v2(&mut self)
pub fn set_listobjects_v2(&mut self)
Configure bucket to use the newer ListObjectsV2 API
pub fn host(&self) -> String
pub fn url(&self) -> String
Sourcepub fn path_style_host(&self) -> String
pub fn path_style_host(&self) -> String
Get a paths-style reference to the hostname of the S3 API endpoint.
pub fn subdomain_style_host(&self) -> String
pub fn scheme(&self) -> String
Sourcepub async fn access_key(&self) -> Result<Option<String>, S3Error>
pub async fn access_key(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS access key.
Sourcepub async fn secret_key(&self) -> Result<Option<String>, S3Error>
pub async fn secret_key(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS secret key.
Sourcepub async fn security_token(&self) -> Result<Option<String>, S3Error>
pub async fn security_token(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS security token.
Sourcepub async fn session_token(&self) -> Result<Option<String>, S3Error>
pub async fn session_token(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS session token.
Sourcepub async fn credentials(&self) -> Result<Credentials, S3Error>
pub async fn credentials(&self) -> Result<Credentials, S3Error>
Get a reference to the full Credentials
object used by this Bucket
.
Sourcepub fn set_credentials(&mut self, credentials: Credentials)
pub fn set_credentials(&mut self, credentials: Credentials)
Change the credentials used by the Bucket.
Sourcepub fn add_header(&mut self, key: &str, value: &str)
pub fn add_header(&mut self, key: &str, value: &str)
Add an extra header to send with requests to S3.
Add an extra header to send with requests. Note that the library already sets a number of headers - headers set with this method will be overridden by the library headers:
- Host
- Content-Type
- Date
- Content-Length
- Authorization
- X-Amz-Content-Sha256
- X-Amz-Date
Sourcepub fn extra_headers(&self) -> &HeaderMap
pub fn extra_headers(&self) -> &HeaderMap
Get a reference to the extra headers to be passed to the S3 API.
Sourcepub fn extra_headers_mut(&mut self) -> &mut HeaderMap
pub fn extra_headers_mut(&mut self) -> &mut HeaderMap
Get a mutable reference to the extra headers to be passed to the S3 API.
Sourcepub fn add_query(&mut self, key: &str, value: &str)
pub fn add_query(&mut self, key: &str, value: &str)
Add an extra query pair to the URL used for S3 API access.
Sourcepub fn extra_query(&self) -> &Query
pub fn extra_query(&self) -> &Query
Get a reference to the extra query pairs to be passed to the S3 API.
Sourcepub fn extra_query_mut(&mut self) -> &mut Query
pub fn extra_query_mut(&mut self) -> &mut Query
Get a mutable reference to the extra query pairs to be passed to the S3 API.