pub struct Bucket {
pub name: String,
pub region: Region,
pub credentials: Arc<RwLock<Credentials>>,
pub extra_headers: HeaderMap,
pub extra_query: Query,
pub request_timeout: Option<Duration>,
/* private fields */
}
Expand description
Instantiate an existing Bucket
Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials);
Fields§
§name: String
§region: Region
§credentials: Arc<RwLock<Credentials>>
§extra_headers: HeaderMap
§extra_query: Query
§request_timeout: Option<Duration>
Implementations§
source§impl Bucket
impl Bucket
sourcepub fn presign_get<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
custom_queries: Option<HashMap<String, String>>
) -> Result<String, S3Error>
pub fn presign_get<S: AsRef<str>>( &self, path: S, expiry_secs: u32, custom_queries: Option<HashMap<String, String>> ) -> Result<String, S3Error>
Get a presigned url for getting object on a given path
Example:
use std::collections::HashMap;
use s3::bucket::Bucket;
use s3::creds::Credentials;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
// Add optional custom queries
let mut custom_queries = HashMap::new();
custom_queries.insert(
"response-content-disposition".into(),
"attachment; filename=\"test.png\"".into(),
);
let url = bucket.presign_get("/test.file", 86400, Some(custom_queries)).unwrap();
println!("Presigned url: {}", url);
sourcepub fn presign_post<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
post_policy: String
) -> Result<String, S3Error>
pub fn presign_post<S: AsRef<str>>( &self, path: S, expiry_secs: u32, post_policy: String ) -> Result<String, S3Error>
Get a presigned url for posting an object to a given path
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use http::HeaderMap;
use http::header::HeaderName;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
let post_policy = "eyAiZXhwaXJhdGlvbiI6ICIyMDE1LTEyLTMwVDEyOjAwOjAwLjAwMFoiLA0KICAiY29uZGl0aW9ucyI6IFsNCiAgICB7ImJ1Y2tldCI6ICJzaWd2NGV4YW1wbGVidWNrZXQifSwNCiAgICBbInN0YXJ0cy13aXRoIiwgIiRrZXkiLCAidXNlci91c2VyMS8iXSwNCiAgICB7ImFjbCI6ICJwdWJsaWMtcmVhZCJ9LA0KICAgIHsic3VjY2Vzc19hY3Rpb25fcmVkaXJlY3QiOiAiaHR0cDovL3NpZ3Y0ZXhhbXBsZWJ1Y2tldC5zMy5hbWF6b25hd3MuY29tL3N1Y2Nlc3NmdWxfdXBsb2FkLmh0bWwifSwNCiAgICBbInN0YXJ0cy13aXRoIiwgIiRDb250ZW50LVR5cGUiLCAiaW1hZ2UvIl0sDQogICAgeyJ4LWFtei1tZXRhLXV1aWQiOiAiMTQzNjUxMjM2NTEyNzQifSwNCiAgICB7IngtYW16LXNlcnZlci1zaWRlLWVuY3J5cHRpb24iOiAiQUVTMjU2In0sDQogICAgWyJzdGFydHMtd2l0aCIsICIkeC1hbXotbWV0YS10YWciLCAiIl0sDQoNCiAgICB7IngtYW16LWNyZWRlbnRpYWwiOiAiQUtJQUlPU0ZPRE5ON0VYQU1QTEUvMjAxNTEyMjkvdXMtZWFzdC0xL3MzL2F3czRfcmVxdWVzdCJ9LA0KICAgIHsieC1hbXotYWxnb3JpdGhtIjogIkFXUzQtSE1BQy1TSEEyNTYifSwNCiAgICB7IngtYW16LWRhdGUiOiAiMjAxNTEyMjlUMDAwMDAwWiIgfQ0KICBdDQp9";
let url = bucket.presign_post("/test.file", 86400, post_policy.to_string()).unwrap();
println!("Presigned url: {}", url);
sourcepub fn presign_put<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32,
custom_headers: Option<HeaderMap>
) -> Result<String, S3Error>
pub fn presign_put<S: AsRef<str>>( &self, path: S, expiry_secs: u32, custom_headers: Option<HeaderMap> ) -> Result<String, S3Error>
Get a presigned url for putting object to a given path
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use http::HeaderMap;
use http::header::HeaderName;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
// Add optional custom headers
let mut custom_headers = HeaderMap::new();
custom_headers.insert(
HeaderName::from_static("custom_header"),
"custom_value".parse().unwrap(),
);
let url = bucket.presign_put("/test.file", 86400, Some(custom_headers)).unwrap();
println!("Presigned url: {}", url);
sourcepub fn presign_delete<S: AsRef<str>>(
&self,
path: S,
expiry_secs: u32
) -> Result<String, S3Error>
pub fn presign_delete<S: AsRef<str>>( &self, path: S, expiry_secs: u32 ) -> Result<String, S3Error>
Get a presigned url for deleting object on a given path
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
let url = bucket.presign_delete("/test.file", 86400).unwrap();
println!("Presigned url: {}", url);
source§impl Bucket
impl Bucket
sourcepub async fn create(
name: &str,
region: Region,
credentials: Credentials,
config: BucketConfiguration
) -> Result<CreateBucketResponse, S3Error>
pub async fn create( name: &str, region: Region, credentials: Credentials, config: BucketConfiguration ) -> Result<CreateBucketResponse, S3Error>
Create a new Bucket
and instantiate it
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let config = BucketConfiguration::default();
let create_bucket_response = Bucket::create(bucket_name, region, credentials, config).await?;
sourcepub async fn create_with_path_style(
name: &str,
region: Region,
credentials: Credentials,
config: BucketConfiguration
) -> Result<CreateBucketResponse, S3Error>
pub async fn create_with_path_style( name: &str, region: Region, credentials: Credentials, config: BucketConfiguration ) -> Result<CreateBucketResponse, S3Error>
Create a new Bucket
with path style and instantiate it
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let config = BucketConfiguration::default();
let create_bucket_response = Bucket::create_with_path_style(bucket_name, region, credentials, config).await?;
sourcepub fn new(
name: &str,
region: Region,
credentials: Credentials
) -> Result<Bucket, S3Error>
pub fn new( name: &str, region: Region, credentials: Credentials ) -> Result<Bucket, S3Error>
Instantiate an existing Bucket
.
Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
// Fake credentials so we don't access user's real credentials in tests
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
source§impl Bucket
impl Bucket
sourcepub async fn list_buckets(
region: Region,
credentials: Credentials
) -> Result<ListBucketsResponse, S3Error>
pub async fn list_buckets( region: Region, credentials: Credentials ) -> Result<ListBucketsResponse, S3Error>
Get a list of all existing buckets in the region that are accessible by the given credentials.
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use s3::region::Region;
use anyhow::Result;
let region = Region::Custom {
region: "eu-central-1".to_owned(),
endpoint: "http://localhost:9000".to_owned()
};
let credentials = Credentials::default()?;
let response = Bucket::list_buckets(region, credentials).await?;
let found_buckets = response.bucket_names().collect::<Vec<String>>();
println!("found buckets: {:#?}", found_buckets);
sourcepub async fn exists(&self) -> Result<bool, S3Error>
pub async fn exists(&self) -> Result<bool, S3Error>
Determine whether the instantiated bucket exists.
use s3::{Bucket, BucketConfiguration};
use s3::creds::Credentials;
use s3::region::Region;
use anyhow::Result;
let bucket_name = "some-bucket-that-is-known-to-exist";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let exists = bucket.exists().await?;
assert_eq!(exists, true);
pub async fn list_page( &self, prefix: String, delimiter: Option<String>, continuation_token: Option<String>, start_after: Option<String>, max_keys: Option<usize> ) -> Result<(ListBucketResult, u16), S3Error>
sourcepub async fn list(
&self,
prefix: String,
delimiter: Option<String>
) -> Result<Vec<ListBucketResult>, S3Error>
pub async fn list( &self, prefix: String, delimiter: Option<String> ) -> Result<Vec<ListBucketResult>, S3Error>
List the contents of an S3 bucket.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let results = bucket.list("/".to_string(), Some("/".to_string())).await?;
pub async fn list_multiparts_uploads_page( &self, prefix: Option<&str>, delimiter: Option<&str>, key_marker: Option<String>, max_uploads: Option<usize> ) -> Result<(ListMultipartUploadsResult, u16), S3Error>
sourcepub async fn list_multiparts_uploads(
&self,
prefix: Option<&str>,
delimiter: Option<&str>
) -> Result<Vec<ListMultipartUploadsResult>, S3Error>
pub async fn list_multiparts_uploads( &self, prefix: Option<&str>, delimiter: Option<&str> ) -> Result<Vec<ListMultipartUploadsResult>, S3Error>
List the ongoing multipart uploads of an S3 bucket. This may be useful to cleanup failed
uploads, together with crate::bucket::Bucket::abort_upload
.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let results = bucket.list_multiparts_uploads(Some("/"), Some("/")).await?;
source§impl Bucket
impl Bucket
sourcepub async fn delete(&self) -> Result<u16, S3Error>
pub async fn delete(&self) -> Result<u16, S3Error>
Delete existing Bucket
Example
use s3::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse().unwrap();
let credentials = Credentials::default().unwrap();
let bucket = Bucket::new(bucket_name, region, credentials).unwrap();
bucket.delete().await.unwrap();
sourcepub async fn delete_object<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
pub async fn delete_object<S: AsRef<str>>( &self, path: S ) -> Result<ResponseData, S3Error>
Delete file from an S3 path.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let response_data = bucket.delete_object("/test.file").await?;
sourcepub async fn delete_object_tagging<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
pub async fn delete_object_tagging<S: AsRef<str>>( &self, path: S ) -> Result<ResponseData, S3Error>
Delete tags from an S3 object.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let response_data = bucket.delete_object_tagging("/test.file").await?;
source§impl Bucket
impl Bucket
sourcepub async fn copy_object_internal<F: AsRef<str>, T: AsRef<str>>(
&self,
from: F,
to: T
) -> Result<u16, S3Error>
pub async fn copy_object_internal<F: AsRef<str>, T: AsRef<str>>( &self, from: F, to: T ) -> Result<u16, S3Error>
Copy file from an S3 path, internally within the same bucket.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let code = bucket.copy_object_internal("/from.file", "/to.file").await?;
source§impl Bucket
impl Bucket
sourcepub async fn get_object<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
pub async fn get_object<S: AsRef<str>>( &self, path: S ) -> Result<ResponseData, S3Error>
Gets file from an S3 path.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let response_data = bucket.get_object("/test.file").await?;
sourcepub async fn get_object_torrent<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseData, S3Error>
pub async fn get_object_torrent<S: AsRef<str>>( &self, path: S ) -> Result<ResponseData, S3Error>
Gets torrent from an S3 path.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let response_data = bucket.get_object_torrent("/test.file").await?;
sourcepub async fn get_object_range<S: AsRef<str>>(
&self,
path: S,
start: u64,
end: Option<u64>
) -> Result<ResponseData, S3Error>
pub async fn get_object_range<S: AsRef<str>>( &self, path: S, start: u64, end: Option<u64> ) -> Result<ResponseData, S3Error>
Gets specified inclusive byte range of file from an S3 path.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let response_data = bucket.get_object_range("/test.file", 0, Some(31)).await?;
sourcepub async fn get_object_range_to_writer<T: AsyncWrite + Send + Unpin, S: AsRef<str>>(
&self,
path: S,
start: u64,
end: Option<u64>,
writer: &mut T
) -> Result<u16, S3Error>
pub async fn get_object_range_to_writer<T: AsyncWrite + Send + Unpin, S: AsRef<str>>( &self, path: S, start: u64, end: Option<u64>, writer: &mut T ) -> Result<u16, S3Error>
Stream range of bytes from S3 path to a local file, generic over T: Write.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let mut output_file = File::create("output_file").expect("Unable to create file");
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
let start = 0;
let end = Some(1024);
let status_code = bucket.get_object_range_to_writer("/test.file", start, end, &mut async_output_file).await?;
sourcepub async fn get_object_to_writer<T: AsyncWrite + Send + Unpin, S: AsRef<str>>(
&self,
path: S,
writer: &mut T
) -> Result<u16, S3Error>
pub async fn get_object_to_writer<T: AsyncWrite + Send + Unpin, S: AsRef<str>>( &self, path: S, writer: &mut T ) -> Result<u16, S3Error>
Stream file from S3 path to a local file, generic over T: Write.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let mut output_file = File::create("output_file").expect("Unable to create file");
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
#[cfg(feature = "with-async-std")]
let mut async_output_file = async_std::fs::File::create("async_output_file").await.expect("Unable to create file");
let status_code = bucket.get_object_to_writer("/test.file", &mut async_output_file).await?;
sourcepub async fn get_object_stream<S: AsRef<str>>(
&self,
path: S
) -> Result<ResponseDataStream, S3Error>
pub async fn get_object_stream<S: AsRef<str>>( &self, path: S ) -> Result<ResponseDataStream, S3Error>
Stream file from S3 path to a local file using an async stream.
Example
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use tokio_stream::StreamExt;
use tokio::io::AsyncWriteExt;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let mut response_data_stream = bucket.get_object_stream(path).await?;
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
while let Some(chunk) = response_data_stream.bytes().next().await {
async_output_file.write_all(&chunk.unwrap()).await?;
}
sourcepub async fn get_object_tagging<S: AsRef<str>>(
&self,
path: S
) -> Result<(Vec<Tag>, u16), S3Error>
pub async fn get_object_tagging<S: AsRef<str>>( &self, path: S ) -> Result<(Vec<Tag>, u16), S3Error>
Retrieve an S3 object list of tags.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let response_data = bucket.get_object_tagging("/test.file").await?;
source§impl Bucket
impl Bucket
pub async fn put_bucket_cors( &self, cors_config: CorsConfiguration ) -> Result<ResponseData, S3Error>
sourcepub async fn put_object_stream<R: AsyncRead + Unpin>(
&self,
reader: &mut R,
s3_path: impl AsRef<str>
) -> Result<PutStreamResponse, S3Error>
pub async fn put_object_stream<R: AsyncRead + Unpin>( &self, reader: &mut R, s3_path: impl AsRef<str> ) -> Result<PutStreamResponse, S3Error>
Stream file from local path to s3, generic over T: Write.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
use std::io::Write;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let test: Vec<u8> = (0..1000).map(|_| 42).collect();
let mut file = File::create(path)?;
// tokio open file
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
file.write_all(&test)?;
let status_code = bucket.put_object_stream(&mut async_output_file, "/path").await?;
sourcepub async fn put_object_stream_with_content_type<R: AsyncRead + Unpin>(
&self,
reader: &mut R,
s3_path: impl AsRef<str>,
content_type: impl AsRef<str>
) -> Result<PutStreamResponse, S3Error>
pub async fn put_object_stream_with_content_type<R: AsyncRead + Unpin>( &self, reader: &mut R, s3_path: impl AsRef<str>, content_type: impl AsRef<str> ) -> Result<PutStreamResponse, S3Error>
Stream file from local path to s3, generic over T: Write with explicit content type.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
use std::fs::File;
use std::io::Write;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let path = "path";
let test: Vec<u8> = (0..1000).map(|_| 42).collect();
let mut file = File::create(path)?;
file.write_all(&test)?;
let mut async_output_file = tokio::fs::File::create("async_output_file").await.expect("Unable to create file");
let status_code = bucket
.put_object_stream_with_content_type(&mut async_output_file, "/path", "application/octet-stream")
.await?;
sourcepub async fn initiate_multipart_upload(
&self,
s3_path: &str,
content_type: &str
) -> Result<InitiateMultipartUploadResponse, S3Error>
pub async fn initiate_multipart_upload( &self, s3_path: &str, content_type: &str ) -> Result<InitiateMultipartUploadResponse, S3Error>
Initiate multipart upload to s3.
sourcepub async fn put_multipart_stream<R: Read + Unpin>(
&self,
reader: &mut R,
path: &str,
part_number: u32,
upload_id: &str,
content_type: &str
) -> Result<Part, S3Error>
pub async fn put_multipart_stream<R: Read + Unpin>( &self, reader: &mut R, path: &str, part_number: u32, upload_id: &str, content_type: &str ) -> Result<Part, S3Error>
Upload a streamed multipart chunk to s3 using a previously initiated multipart upload
sourcepub async fn put_multipart_chunk(
&self,
chunk: Vec<u8>,
path: &str,
part_number: u32,
upload_id: &str,
content_type: &str
) -> Result<Part, S3Error>
pub async fn put_multipart_chunk( &self, chunk: Vec<u8>, path: &str, part_number: u32, upload_id: &str, content_type: &str ) -> Result<Part, S3Error>
Upload a buffered multipart chunk to s3 using a previously initiated multipart upload
sourcepub async fn complete_multipart_upload(
&self,
path: &str,
upload_id: &str,
parts: Vec<Part>
) -> Result<ResponseData, S3Error>
pub async fn complete_multipart_upload( &self, path: &str, upload_id: &str, parts: Vec<Part> ) -> Result<ResponseData, S3Error>
Completes a previously initiated multipart upload, with optional final data chunks
sourcepub async fn put_object_with_content_type<S: AsRef<str>>(
&self,
path: S,
content: &[u8],
content_type: &str
) -> Result<ResponseData, S3Error>
pub async fn put_object_with_content_type<S: AsRef<str>>( &self, path: S, content: &[u8], content_type: &str ) -> Result<ResponseData, S3Error>
Put into an S3 bucket, with explicit content-type.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let content = "I want to go to S3".as_bytes();
let response_data = bucket.put_object_with_content_type("/test.file", content, "text/plain").await?;
sourcepub async fn put_object<S: AsRef<str>>(
&self,
path: S,
content: &[u8]
) -> Result<ResponseData, S3Error>
pub async fn put_object<S: AsRef<str>>( &self, path: S, content: &[u8] ) -> Result<ResponseData, S3Error>
Put into an S3 bucket.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let content = "I want to go to S3".as_bytes();
let response_data = bucket.put_object("/test.file", content).await?;
sourcepub async fn put_object_tagging<S: AsRef<str>>(
&self,
path: &str,
tags: &[(S, S)]
) -> Result<ResponseData, S3Error>
pub async fn put_object_tagging<S: AsRef<str>>( &self, path: &str, tags: &[(S, S)] ) -> Result<ResponseData, S3Error>
Tag an S3 object.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let response_data = bucket.put_object_tagging("/test.file", &[("Tag1", "Value1"), ("Tag2", "Value2")]).await?;
sourcepub async fn abort_upload(
&self,
key: &str,
upload_id: &str
) -> Result<(), S3Error>
pub async fn abort_upload( &self, key: &str, upload_id: &str ) -> Result<(), S3Error>
Abort a running multipart upload.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let results = bucket.abort_upload("/some/file.txt", "ZDFjM2I0YmEtMzU3ZC00OTQ1LTlkNGUtMTgxZThjYzIwNjA2").await?;
source§impl Bucket
impl Bucket
sourcepub async fn head_object<S: AsRef<str>>(
&self,
path: S
) -> Result<(HeadObjectResult, u16), S3Error>
pub async fn head_object<S: AsRef<str>>( &self, path: S ) -> Result<(HeadObjectResult, u16), S3Error>
Head object from S3.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let (head_object_result, code) = bucket.head_object("/test.png").await?;
source§impl Bucket
impl Bucket
sourcepub fn is_path_style(&self) -> bool
pub fn is_path_style(&self) -> bool
Get path_style field of the Bucket struct
sourcepub fn is_subdomain_style(&self) -> bool
pub fn is_subdomain_style(&self) -> bool
Get negated path_style field of the Bucket struct
sourcepub fn set_path_style(&mut self)
pub fn set_path_style(&mut self)
Configure bucket to use path-style urls and headers
sourcepub fn set_subdomain_style(&mut self)
pub fn set_subdomain_style(&mut self)
Configure bucket to use subdomain style urls and headers [default]
sourcepub fn set_request_timeout(&mut self, timeout: Option<Duration>)
pub fn set_request_timeout(&mut self, timeout: Option<Duration>)
Configure bucket to apply this request timeout to all HTTP
requests, or no (infinity) timeout if None
. Defaults to
30 seconds.
Only the hyper
backend obeys this option;
async code may instead await with a timeout.
sourcepub fn set_listobjects_v1(&mut self)
pub fn set_listobjects_v1(&mut self)
Configure bucket to use the older ListObjects API
If your provider doesn’t support the ListObjectsV2 interface, set this to use the v1 ListObjects interface instead. This is currently needed at least for Google Cloud Storage.
sourcepub fn set_listobjects_v2(&mut self)
pub fn set_listobjects_v2(&mut self)
Configure bucket to use the newer ListObjectsV2 API
pub fn host(&self) -> String
pub fn url(&self) -> String
sourcepub fn path_style_host(&self) -> String
pub fn path_style_host(&self) -> String
Get a paths-style reference to the hostname of the S3 API endpoint.
pub fn subdomain_style_host(&self) -> String
pub fn scheme(&self) -> String
sourcepub fn access_key(&self) -> Result<Option<String>, S3Error>
pub fn access_key(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS access key.
sourcepub fn secret_key(&self) -> Result<Option<String>, S3Error>
pub fn secret_key(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS secret key.
sourcepub fn security_token(&self) -> Result<Option<String>, S3Error>
pub fn security_token(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS security token.
sourcepub fn session_token(&self) -> Result<Option<String>, S3Error>
pub fn session_token(&self) -> Result<Option<String>, S3Error>
Get a reference to the AWS session token.
sourcepub fn credentials(&self) -> Arc<RwLock<Credentials>>
pub fn credentials(&self) -> Arc<RwLock<Credentials>>
Get a reference to the full Credentials
object used by this Bucket
.
sourcepub fn set_credentials(&mut self, credentials: Credentials)
pub fn set_credentials(&mut self, credentials: Credentials)
Change the credentials used by the Bucket.
sourcepub fn add_header(&mut self, key: &str, value: &str)
pub fn add_header(&mut self, key: &str, value: &str)
Add an extra header to send with requests to S3.
Add an extra header to send with requests. Note that the library already sets a number of headers - headers set with this method will be overridden by the library headers:
- Host
- Content-Type
- Date
- Content-Length
- Authorization
- X-Amz-Content-Sha256
- X-Amz-Date
sourcepub fn extra_headers(&self) -> &HeaderMap
pub fn extra_headers(&self) -> &HeaderMap
Get a reference to the extra headers to be passed to the S3 API.
sourcepub fn extra_headers_mut(&mut self) -> &mut HeaderMap
pub fn extra_headers_mut(&mut self) -> &mut HeaderMap
Get a mutable reference to the extra headers to be passed to the S3 API.
sourcepub fn add_query(&mut self, key: &str, value: &str)
pub fn add_query(&mut self, key: &str, value: &str)
Add an extra query pair to the URL used for S3 API access.
sourcepub fn extra_query(&self) -> &Query
pub fn extra_query(&self) -> &Query
Get a reference to the extra query pairs to be passed to the S3 API.
sourcepub fn extra_query_mut(&mut self) -> &mut Query
pub fn extra_query_mut(&mut self) -> &mut Query
Get a mutable reference to the extra query pairs to be passed to the S3 API.
pub fn request_timeout(&self) -> Option<Duration>
sourcepub async fn location(&self) -> Result<(Region, u16), S3Error>
pub async fn location(&self) -> Result<(Region, u16), S3Error>
Get Bucket location.
Example:
use s3::bucket::Bucket;
use s3::creds::Credentials;
use anyhow::Result;
let bucket_name = "rust-s3-test";
let region = "us-east-1".parse()?;
let credentials = Credentials::default()?;
let bucket = Bucket::new(bucket_name, region, credentials)?;
let (region, status_code) = bucket.location().await?;