pub struct Bucket {
pub name: String,
pub region: Region,
pub credentials: Arc<RwLock<Credentials>>,
pub extra_headers: HeaderMap,
pub extra_query: Query,
pub request_timeout: Option<Duration>,
/* private fields */
}
Expand description
Instantiate an existing Bucket
Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials);
Fields
name: String
region: Region
credentials: Arc<RwLock<Credentials>>
extra_headers: HeaderMap
extra_query: Query
request_timeout: Option<Duration>
Implementations
sourceimpl Bucket
impl Bucket
sourcepub fn presign_get<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
custom_queries: Option<HashMap<String, String>>
) -> Result<String, S3Error>
pub fn presign_get<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
custom_queries: Option<HashMap<String, String>>
) -> Result<String, S3Error>
Get a presigned url for getting object on a given path
Example:
use std::collections::HashMap;
use s3::bucket::Bucket;
use s3::creds::Credentials;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
// Add optional custom queries
let mut custom_queries = HashMap::new();
custom_queries.insert(
"response-content-disposition".into(),
"attachment; filename=\"test.png\"".into(),
);
let url = bucket.presign_get("/test.file", 86400, Some(custom_queries)).unwrap();
println!("Presigned url: {}", url);
sourcepub fn presign_post<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
post_policy: String
) -> Result<String, S3Error>
pub fn presign_post<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
post_policy: String
) -> Result<String, S3Error>
Get a presigned url for posting an object to a given path
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use http::HeaderMap;
use http::header::HeaderName;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
let post_policy = "eyAiZXhwaXJhdGlvbiI6ICIyMDE1LTEyLTMwVDEyOjAwOjAwLjAwMFoiLA0KICAiY29uZGl0aW9ucyI6IFsNCiAgICB7ImJ1Y2tldCI6ICJzaWd2NGV4YW1wbGVidWNrZXQifSwNCiAgICBbInN0YXJ0cy13aXRoIiwgIiRrZXkiLCAidXNlci91c2VyMS8iXSwNCiAgICB7ImFjbCI6ICJwdWJsaWMtcmVhZCJ9LA0KICAgIHsic3VjY2Vzc19hY3Rpb25fcmVkaXJlY3QiOiAiaHR0cDovL3NpZ3Y0ZXhhbXBsZWJ1Y2tldC5zMy5hbWF6b25hd3MuY29tL3N1Y2Nlc3NmdWxfdXBsb2FkLmh0bWwifSwNCiAgICBbInN0YXJ0cy13aXRoIiwgIiRDb250ZW50LVR5cGUiLCAiaW1hZ2UvIl0sDQogICAgeyJ4LWFtei1tZXRhLXV1aWQiOiAiMTQzNjUxMjM2NTEyNzQifSwNCiAgICB7IngtYW16LXNlcnZlci1zaWRlLWVuY3J5cHRpb24iOiAiQUVTMjU2In0sDQogICAgWyJzdGFydHMtd2l0aCIsICIkeC1hbXotbWV0YS10YWciLCAiIl0sDQoNCiAgICB7IngtYW16LWNyZWRlbnRpYWwiOiAiQUtJQUlPU0ZPRE5ON0VYQU1QTEUvMjAxNTEyMjkvdXMtZWFzdC0xL3MzL2F3czRfcmVxdWVzdCJ9LA0KICAgIHsieC1hbXotYWxnb3JpdGhtIjogIkFXUzQtSE1BQy1TSEEyNTYifSwNCiAgICB7IngtYW16LWRhdGUiOiAiMjAxNTEyMjlUMDAwMDAwWiIgfQ0KICBdDQp9";
let url = bucket.presign_post("/test.file", 86400, post_policy.to_string()).unwrap();
println!("Presigned url: {}", url);
sourcepub fn presign_put<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
custom_headers: Option<HeaderMap>
) -> Result<String, S3Error>
pub fn presign_put<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
custom_headers: Option<HeaderMap>
) -> Result<String, S3Error>
Get a presigned url for putting object to a given path
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use http::HeaderMap;
use http::header::HeaderName;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
// Add optional custom headers
let mut custom_headers = HeaderMap::new();
custom_headers.insert(
HeaderName::from_static("custom_header"),
"custom_value".parse().unwrap(),
);
let url = bucket.presign_put("/test.file", 86400, Some(custom_headers)).unwrap();
println!("Presigned url: {}", url);
sourcepub fn presign_delete<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32
) -> Result<String, S3Error>
pub fn presign_delete<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32
) -> Result<String, S3Error>
Get a presigned url for deleting object on a given path
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
let url = bucket.presign_delete("/test.file", 86400).unwrap();
println!("Presigned url: {}", url);
sourcepub async fn create(
name: &str,
region: Region,
credentials: Credentials,
config: BucketConfiguration
) -> Result<CreateBucketResponse, S3Error>
pub async fn create(
name: &str,
region: Region,
credentials: Credentials,
config: BucketConfiguration
) -> Result<CreateBucketResponse, S3Error>
Create a new Bucket
and instantiate it
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let config = BucketConfiguration::default();
// Async variant with `tokio` or `async-std` features
let create_bucket_response = Bucket::create(bucket_name, region, credentials, config).await?;
// `sync` fature will produce an identical method
#[cfg(feature = "sync")]
let create_bucket_response = Bucket::create(bucket_name, region, credentials, config)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let create_bucket_response = Bucket::create_blocking(bucket_name, region, credentials, config)?;
sourcepub async fn create_with_path_style(
name: &str,
region: Region,
credentials: Credentials,
config: BucketConfiguration
) -> Result<CreateBucketResponse, S3Error>
pub async fn create_with_path_style(
name: &str,
region: Region,
credentials: Credentials,
config: BucketConfiguration
) -> Result<CreateBucketResponse, S3Error>
Create a new Bucket
with path style and instantiate it
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let config = BucketConfiguration::default();
// Async variant with `tokio` or `async-std` features
let create_bucket_response = Bucket::create_with_path_style(bucket_name, region, credentials, config).await?;
// `sync` fature will produce an identical method
#[cfg(feature = "sync")]
let create_bucket_response = Bucket::create_with_path_style(bucket_name, region, credentials, config)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let create_bucket_response = Bucket::create_with_path_style_blocking(bucket_name, region, credentials, config)?;
sourcepub async fn delete(&self) -> Result<u16, S3Error>
pub async fn delete(&self) -> Result<u16, S3Error>
Delete existing Bucket
Example
use s3::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
// Async variant with `tokio` or `async-std` features
bucket.delete().await.unwrap();
// `sync` fature will produce an identical method
#[cfg(feature = "sync")]
bucket.delete().unwrap();
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
bucket.delete_blocking().unwrap();
sourcepub fn new(
name: &str,
region: Region,
credentials: Credentials
) -> Result<Bucket, S3Error>
pub fn new(
name: &str,
region: Region,
credentials: Credentials
) -> Result<Bucket, S3Error>
Instantiate an existing Bucket
.
Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
// Fake credentials so we don't access user's real credentials in tests
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
sourcepub fn new_public(name: &str, region: Region) -> Result<Bucket, S3Error>
pub fn new_public(name: &str, region: Region) -> Result<Bucket, S3Error>
Instantiate a public existing Bucket
.
Example
use s3::bucket::Bucket;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let bucket = Bucket::new_public(bucket_name, region).unwrap();
pub fn with_path_style(&self) -> Bucket
pub fn with_extra_headers(&self, extra_headers: HeaderMap) -> Bucket
pub fn with_extra_query(&self, extra_query: HashMap<String, String>) -> Bucket
pub fn with_request_timeout(&self, request_timeout: Duration) -> Bucket
pub fn with_listobjects_v1(&self) -> Bucket
sourcepub async fn copy_object_internal<F: AsRef<str>, T: AsRef<str>>(
&self,
from: F,
to: T
) -> Result<u16, S3Error>
pub async fn copy_object_internal<F: AsRef<str>, T: AsRef<str>>(
&self,
from: F,
to: T
) -> Result<u16, S3Error>
Copy file from an S3 path, internally within the same bucket.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let code = bucket.copy_object_internal("/from.file", "/to.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let code = bucket.copy_object_internal("/from.file", "/to.file")?;
sourcepub async fn get_object<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
pub async fn get_object<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
Gets file from an S3 path.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.get_object("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.get_object("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.get_object_blocking("/test.file")?;
sourcepub async fn get_object_torrent<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
pub async fn get_object_torrent<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
Gets torrent from an S3 path.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.get_object_torrent("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.get_object_torrent("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.get_object_torrent_blocking("/test.file")?;
sourcepub async fn get_object_range<S: AsRef<str>>(
&self,
path: S,
start: u64,
end: Option<u64>
) -> Result<ResponseData, S3Error>
pub async fn get_object_range<S: AsRef<str>>(
&self,
path: S,
start: u64,
end: Option<u64>
) -> Result<ResponseData, S3Error>
Gets specified inclusive byte range of file from an S3 path.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.get_object_range("/test.file", 0, Some(31)).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.get_object_range("/test.file", 0, Some(31))?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.get_object_range_blocking("/test.file", 0, Some(31))?;
sourcepub async fn get_object_range_to_writer<T: AsyncWrite + Send + Unpin, S: AsRef<str>>(
&self,
path: S,
start: u64,
end: Option<u64>,
writer: &mut T
) -> Result<u16, S3Error>
pub async fn get_object_range_to_writer<T: AsyncWrite + Send + Unpin, S: AsRef<str>>(
&self,
path: S,
start: u64,
end: Option<u64>,
writer: &mut T
) -> Result<u16, S3Error>
Stream range of bytes from S3 path to a local file, generic over T: Write.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let mut output_file = File::create("output_file").expect("Unable to create file");
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
let start = 0;
let end = Some(1024);
// Async variant with `tokio` or `async-std` features
let status_code = bucket.get_object_range_to_writer("/test.file", start, end, &mut async_output_file).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let status_code = bucket.get_object_range_to_writer("/test.file", start, end, &mut output_file)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features. Based of the async branch
#[cfg(feature = "blocking")]
let status_code = bucket.get_object_range_to_writer_blocking("/test.file", start, end, &mut async_output_file)?;
sourcepub async fn get_object_to_writer<T: AsyncWrite + Send + Unpin, S: AsRef<str>>(
&self,
path: S,
writer: &mut T
) -> Result<u16, S3Error>
pub async fn get_object_to_writer<T: AsyncWrite + Send + Unpin, S: AsRef<str>>(
&self,
path: S,
writer: &mut T
) -> Result<u16, S3Error>
Stream file from S3 path to a local file, generic over T: Write.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let mut output_file = File::create("output_file").expect("Unable to create file");
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
// Async variant with `tokio` or `async-std` features
let status_code = bucket.get_object_to_writer("/test.file", &mut async_output_file).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let status_code = bucket.get_object_to_writer("/test.file", &mut output_file)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features. Based of the async branch
#[cfg(feature = "blocking")]
let status_code = bucket.get_object_to_writer_blocking("/test.file", &mut async_output_file)?;
sourcepub async fn get_object_stream<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseDataStream, S3Error>
pub async fn get_object_stream<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseDataStream, S3Error>
Stream file from S3 path to a local file using an async stream.
Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
#[cfg(feature = "with-tokio")]
use tokio_stream::StreamExt;
#[cfg(feature = "with-tokio")]
use tokio::io::AsyncWriteExt;
#[cfg(feature = "with-async-std")]
use futures_util::StreamExt;
#[cfg(feature = "with-async-std")]
use futures_util::AsyncWriteExt;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let mut response_data_stream = bucket.get_object_stream(path).await?;
#[cfg(feature = "with-tokio")]
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
while let Some(chunk) = response_data_stream.bytes().next().await {
async_output_file.write_all(&chunk).await?;
}
sourcepub async fn put_object_stream<R: AsyncRead + Unpin>(
&self,
reader: &mut R,
s3_path: impl AsRef<str>
) -> Result<u16, S3Error>
pub async fn put_object_stream<R: AsyncRead + Unpin>(
&self,
reader: &mut R,
s3_path: impl AsRef<str>
) -> Result<u16, S3Error>
Stream file from local path to s3, generic over T: Write.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
use std::io::Write;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let test: Vec<u8> = (0..1000).map(|_| 42).collect();
let mut file = File::create(path)?;
file.write_all(&test)?;
// Generic over std::io::Read
let status_code = bucket.put_object_stream(&mut file, "/path").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
// Generic over std::io::Read
let status_code = bucket.put_object_stream(&mut path, "/path")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let status_code = bucket.put_object_stream_blocking(&mut path, "/path")?;
sourcepub async fn put_object_stream_with_content_type<R: AsyncRead + Unpin>(
&self,
reader: &mut R,
s3_path: impl AsRef<str>,
content_type: impl AsRef<str>
) -> Result<u16, S3Error>
pub async fn put_object_stream_with_content_type<R: AsyncRead + Unpin>(
&self,
reader: &mut R,
s3_path: impl AsRef<str>,
content_type: impl AsRef<str>
) -> Result<u16, S3Error>
Stream file from local path to s3, generic over T: Write with explicit content type.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
use std::io::Write;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let test: Vec<u8> = (0..1000).map(|_| 42).collect();
let mut file = File::create(path)?;
file.write_all(&test)?;
// Async variant with `tokio` or `async-std` features
// Generic over std::io::Read
let status_code = bucket
.put_object_stream_with_content_type(&mut file, "/path", "application/octet-stream")
.await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
// Generic over std::io::Read
let status_code = bucket
.put_object_stream_with_content_type(&mut path, "/path", "application/octet-stream")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let status_code = bucket
.put_object_stream_with_content_type_blocking(&mut path, "/path", "application/octet-stream")?;
sourcepub async fn initiate_multipart_upload(
&self,
s3_path: &str,
content_type: &str
) -> Result<InitiateMultipartUploadResponse, S3Error>
pub async fn initiate_multipart_upload(
&self,
s3_path: &str,
content_type: &str
) -> Result<InitiateMultipartUploadResponse, S3Error>
Initiate multipart upload to s3.
sourcepub async fn put_multipart_stream<R: Read + Unpin>(
&self,
reader: &mut R,
path: &str,
part_number: u32,
upload_id: &str,
content_type: &str
) -> Result<Part, S3Error>
pub async fn put_multipart_stream<R: Read + Unpin>(
&self,
reader: &mut R,
path: &str,
part_number: u32,
upload_id: &str,
content_type: &str
) -> Result<Part, S3Error>
Upload a streamed multipart chunk to s3 using a previously initiated multipart upload
sourcepub async fn put_multipart_chunk(
&self,
chunk: Vec<u8>,
path: &str,
part_number: u32,
upload_id: &str,
content_type: &str
) -> Result<Part, S3Error>
pub async fn put_multipart_chunk(
&self,
chunk: Vec<u8>,
path: &str,
part_number: u32,
upload_id: &str,
content_type: &str
) -> Result<Part, S3Error>
Upload a buffered multipart chunk to s3 using a previously initiated multipart upload
sourcepub async fn complete_multipart_upload(
&self,
path: &str,
upload_id: &str,
parts: Vec<Part>
) -> Result<ResponseData, S3Error>
pub async fn complete_multipart_upload(
&self,
path: &str,
upload_id: &str,
parts: Vec<Part>
) -> Result<ResponseData, S3Error>
Completes a previously initiated multipart upload, with optional final data chunks
sourcepub async fn location(&self) -> Result<(Region, u16), S3Error>
pub async fn location(&self) -> Result<(Region, u16), S3Error>
Get Bucket location.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let (region, status_code) = bucket.location().await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let (region, status_code) = bucket.location()?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let (region, status_code) = bucket.location_blocking()?;
sourcepub async fn delete_object<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
pub async fn delete_object<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
Delete file from an S3 path.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.delete_object("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.delete_object("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.delete_object_blocking("/test.file")?;
sourcepub async fn head_object<S: AsRef<str>>(
&self,
path: S
) -> Result<(HeadObjectResult, u16), S3Error>
pub async fn head_object<S: AsRef<str>>(
&self,
path: S
) -> Result<(HeadObjectResult, u16), S3Error>
Head object from S3.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let (head_object_result, code) = bucket.head_object("/test.png").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let (head_object_result, code) = bucket.head_object("/test.png")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let (head_object_result, code) = bucket.head_object_blocking("/test.png")?;
sourcepub async fn put_object_with_content_type<S: AsRef<str>>(
&self,
path: S,
content: &[u8],
content_type: &str
) -> Result<ResponseData, S3Error>
pub async fn put_object_with_content_type<S: AsRef<str>>(
&self,
path: S,
content: &[u8],
content_type: &str
) -> Result<ResponseData, S3Error>
Put into an S3 bucket, with explicit content-type.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let content = "I want to go to S3".as_bytes();
// Async variant with `tokio` or `async-std` features
let response_data = bucket.put_object_with_content_type("/test.file", content, "text/plain").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.put_object_with_content_type("/test.file", content, "text/plain")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.put_object_with_content_type_blocking("/test.file", content, "text/plain")?;
sourcepub async fn put_object<S: AsRef<str>>(
&self,
path: S,
content: &[u8]
) -> Result<ResponseData, S3Error>
pub async fn put_object<S: AsRef<str>>(
&self,
path: S,
content: &[u8]
) -> Result<ResponseData, S3Error>
Put into an S3 bucket.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let content = "I want to go to S3".as_bytes();
// Async variant with `tokio` or `async-std` features
let response_data = bucket.put_object("/test.file", content).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.put_object("/test.file", content)?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.put_object_blocking("/test.file", content)?;
sourcepub async fn put_object_tagging<S: AsRef<str>>(
&self,
path: &str,
tags: &[(S, S)]
) -> Result<ResponseData, S3Error>
pub async fn put_object_tagging<S: AsRef<str>>(
&self,
path: &str,
tags: &[(S, S)]
) -> Result<ResponseData, S3Error>
Tag an S3 object.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.put_object_tagging("/test.file", &[("Tag1", "Value1"), ("Tag2", "Value2")]).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.put_object_tagging("/test.file", &[("Tag1", "Value1"), ("Tag2", "Value2")])?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.put_object_tagging_blocking("/test.file", &[("Tag1", "Value1"), ("Tag2", "Value2")])?;
sourcepub async fn delete_object_tagging<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
pub async fn delete_object_tagging<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
Delete tags from an S3 object.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.delete_object_tagging("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.delete_object_tagging("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.delete_object_tagging_blocking("/test.file")?;
sourcepub async fn get_object_tagging<S: AsRef<str>>(
&self,
path: S
) -> Result<(Vec<Tag>, u16), S3Error>
pub async fn get_object_tagging<S: AsRef<str>>(
&self,
path: S
) -> Result<(Vec<Tag>, u16), S3Error>
Retrieve an S3 object list of tags.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let response_data = bucket.get_object_tagging("/test.file").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let response_data = bucket.get_object_tagging("/test.file")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let response_data = bucket.get_object_tagging_blocking("/test.file")?;
pub async fn list_page(
&self,
prefix: String,
delimiter: Option<String>,
continuation_token: Option<String>,
start_after: Option<String>,
max_keys: Option<usize>
) -> Result<(ListBucketResult, u16), S3Error>
sourcepub async fn list(
&self,
prefix: String,
delimiter: Option<String>
) -> Result<Vec<ListBucketResult>, S3Error>
pub async fn list(
&self,
prefix: String,
delimiter: Option<String>
) -> Result<Vec<ListBucketResult>, S3Error>
List the contents of an S3 bucket.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let results = bucket.list("/".to_string(), Some("/".to_string())).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let results = bucket.list("/".to_string(), Some("/".to_string()))?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let results = bucket.list_blocking("/".to_string(), Some("/".to_string()))?;
pub async fn list_multiparts_uploads_page(
&self,
prefix: Option<&str>,
delimiter: Option<&str>,
key_marker: Option<String>,
max_uploads: Option<usize>
) -> Result<(ListMultipartUploadsResult, u16), S3Error>
sourcepub async fn list_multiparts_uploads(
&self,
prefix: Option<&str>,
delimiter: Option<&str>
) -> Result<Vec<ListMultipartUploadsResult>, S3Error>
pub async fn list_multiparts_uploads(
&self,
prefix: Option<&str>,
delimiter: Option<&str>
) -> Result<Vec<ListMultipartUploadsResult>, S3Error>
List the ongoing multipart uploads of an S3 bucket. This may be useful to cleanup failed
uploads, together with crate::bucket::Bucket::abort_upload
.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let results = bucket.list_multiparts_uploads(Some("/"), Some("/")).await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let results = bucket.list_multiparts_uploads(Some("/"), Some("/"))?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let results = bucket.list_multiparts_uploads_blocking(Some("/"), Some("/"))?;
sourcepub async fn abort_upload(
&self,
key: &str,
upload_id: &str
) -> Result<(), S3Error>
pub async fn abort_upload(
&self,
key: &str,
upload_id: &str
) -> Result<(), S3Error>
Abort a running multipart upload.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
// Async variant with `tokio` or `async-std` features
let results = bucket.abort_upload("/some/file.txt", "ZDFjM2I0YmEtMzU3ZC00OTQ1LTlkNGUtMTgxZThjYzIwNjA2").await?;
// `sync` feature will produce an identical method
#[cfg(feature = "sync")]
let results = bucket.abort_upload("/some/file.txt", "ZDFjM2I0YmEtMzU3ZC00OTQ1LTlkNGUtMTgxZThjYzIwNjA2")?;
// Blocking variant, generated with `blocking` feature in combination
// with `tokio` or `async-std` features.
#[cfg(feature = "blocking")]
let results = bucket.abort_upload_blocking("/some/file.txt", "ZDFjM2I0YmEtMzU3ZC00OTQ1LTlkNGUtMTgxZThjYzIwNjA2")?;
sourcepub fn is_path_style(&self) -> bool
pub fn is_path_style(&self) -> bool
Get path_style field of the Bucket struct
sourcepub fn is_subdomain_style(&self) -> bool
pub fn is_subdomain_style(&self) -> bool
Get negated path_style field of the Bucket struct
sourcepub fn set_path_style(&mut self)
pub fn set_path_style(&mut self)
Configure bucket to use path-style urls and headers
sourcepub fn set_subdomain_style(&mut self)
pub fn set_subdomain_style(&mut self)
Configure bucket to use subdomain style urls and headers [default]
sourcepub fn set_request_timeout(&mut self, timeout: Option<Duration>)
pub fn set_request_timeout(&mut self, timeout: Option<Duration>)
Configure bucket to apply this request timeout to all HTTP
requests, or no (infinity) timeout if None
. Defaults to
30 seconds.
Only the attohttpc and the Reqwest backends obey this option; async code may instead await with a timeout.
sourcepub fn set_listobjects_v1(&mut self)
pub fn set_listobjects_v1(&mut self)
Configure bucket to use the older ListObjects API
If your provider doesn’t support the ListObjectsV2 interface, set this to use the v1 ListObjects interface instead. This is currently needed at least for Google Cloud Storage.
sourcepub fn set_listobjects_v2(&mut self)
pub fn set_listobjects_v2(&mut self)
Configure bucket to use the newer ListObjectsV2 API
pub fn host(&self) -> String
pub fn url(&self) -> String
sourcepub fn path_style_host(&self) -> String
pub fn path_style_host(&self) -> String
Get a paths-style reference to the hostname of the S3 API endpoint.
pub fn subdomain_style_host(&self) -> String
pub fn scheme(&self) -> String
sourcepub fn access_key(&self) -> Result<Option<String>, S3Error>
pub fn access_key(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS access key.
sourcepub fn secret_key(&self) -> Result<Option<String>, S3Error>
pub fn secret_key(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS secret key.
sourcepub fn security_token(&self) -> Result<Option<String>, S3Error>
pub fn security_token(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS security token.
sourcepub fn session_token(&self) -> Result<Option<String>, S3Error>
pub fn session_token(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS session token.
sourcepub fn credentials(&self) -> Arc<RwLock<Credentials>>
pub fn credentials(&self) -> Arc<RwLock<Credentials>>
Get a reference to the full Credentials
object used by this Bucket
.
sourcepub fn set_credentials(&mut self, credentials: Credentials)
pub fn set_credentials(&mut self, credentials: Credentials)
Change the credentials used by the Bucket.
sourcepub fn add_header(&mut self, key: &str, value: &str)
pub fn add_header(&mut self, key: &str, value: &str)
Add an extra header to send with requests to S3.
Add an extra header to send with requests. Note that the library already sets a number of headers - headers set with this method will be overridden by the library headers:
- Host
- Content-Type
- Date
- Content-Length
- Authorization
- X-Amz-Content-Sha256
- X-Amz-Date
sourcepub fn extra_headers(&self) -> &HeaderMap
pub fn extra_headers(&self) -> &HeaderMap
Get a reference to the extra headers to be passed to the S3 API.
sourcepub fn extra_headers_mut(&mut self) -> &mut HeaderMap
pub fn extra_headers_mut(&mut self) -> &mut HeaderMap
Get a mutable reference to the extra headers to be passed to the S3 API.
sourcepub fn add_query(&mut self, key: &str, value: &str)
pub fn add_query(&mut self, key: &str, value: &str)
Add an extra query pair to the URL used for S3 API access.
sourcepub fn extra_query(&self) -> &Query
pub fn extra_query(&self) -> &Query
Get a reference to the extra query pairs to be passed to the S3 API.
sourcepub fn extra_query_mut(&mut self) -> &mut Query
pub fn extra_query_mut(&mut self) -> &mut Query
Get a mutable reference to the extra query pairs to be passed to the S3 API.