aws_sdk_rekognition/operation/detect_faces/builders.rs
1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2pub use crate::operation::detect_faces::_detect_faces_output::DetectFacesOutputBuilder;
3
4pub use crate::operation::detect_faces::_detect_faces_input::DetectFacesInputBuilder;
5
6impl crate::operation::detect_faces::builders::DetectFacesInputBuilder {
7 /// Sends a request with this input using the given client.
8 pub async fn send_with(
9 self,
10 client: &crate::Client,
11 ) -> ::std::result::Result<
12 crate::operation::detect_faces::DetectFacesOutput,
13 ::aws_smithy_runtime_api::client::result::SdkError<
14 crate::operation::detect_faces::DetectFacesError,
15 ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
16 >,
17 > {
18 let mut fluent_builder = client.detect_faces();
19 fluent_builder.inner = self;
20 fluent_builder.send().await
21 }
22}
23/// Fluent builder constructing a request to `DetectFaces`.
24///
25/// <p>Detects faces within an image that is provided as input.</p>
26/// <p><code>DetectFaces</code> detects the 100 largest faces in the image. For each face detected, the operation returns face details. These details include a bounding box of the face, a confidence value (that the bounding box contains a face), and a fixed set of attributes such as facial landmarks (for example, coordinates of eye and mouth), pose, presence of facial occlusion, and so on.</p>
27/// <p>The face-detection algorithm is most effective on frontal faces. For non-frontal or obscured faces, the algorithm might not detect the faces or might detect faces with lower confidence.</p>
28/// <p>You pass the input image either as base64-encoded image bytes or as a reference to an image in an Amazon S3 bucket. If you use the AWS CLI to call Amazon Rekognition operations, passing image bytes is not supported. The image must be either a PNG or JPEG formatted file.</p><note>
29/// <p>This is a stateless API operation. That is, the operation does not persist any data.</p>
30/// </note>
31/// <p>This operation requires permissions to perform the <code>rekognition:DetectFaces</code> action.</p>
32#[derive(::std::clone::Clone, ::std::fmt::Debug)]
33pub struct DetectFacesFluentBuilder {
34 handle: ::std::sync::Arc<crate::client::Handle>,
35 inner: crate::operation::detect_faces::builders::DetectFacesInputBuilder,
36 config_override: ::std::option::Option<crate::config::Builder>,
37}
38impl
39 crate::client::customize::internal::CustomizableSend<
40 crate::operation::detect_faces::DetectFacesOutput,
41 crate::operation::detect_faces::DetectFacesError,
42 > for DetectFacesFluentBuilder
43{
44 fn send(
45 self,
46 config_override: crate::config::Builder,
47 ) -> crate::client::customize::internal::BoxFuture<
48 crate::client::customize::internal::SendResult<
49 crate::operation::detect_faces::DetectFacesOutput,
50 crate::operation::detect_faces::DetectFacesError,
51 >,
52 > {
53 ::std::boxed::Box::pin(async move { self.config_override(config_override).send().await })
54 }
55}
56impl DetectFacesFluentBuilder {
57 /// Creates a new `DetectFacesFluentBuilder`.
58 pub(crate) fn new(handle: ::std::sync::Arc<crate::client::Handle>) -> Self {
59 Self {
60 handle,
61 inner: ::std::default::Default::default(),
62 config_override: ::std::option::Option::None,
63 }
64 }
65 /// Access the DetectFaces as a reference.
66 pub fn as_input(&self) -> &crate::operation::detect_faces::builders::DetectFacesInputBuilder {
67 &self.inner
68 }
69 /// Sends the request and returns the response.
70 ///
71 /// If an error occurs, an `SdkError` will be returned with additional details that
72 /// can be matched against.
73 ///
74 /// By default, any retryable failures will be retried twice. Retry behavior
75 /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
76 /// set when configuring the client.
77 pub async fn send(
78 self,
79 ) -> ::std::result::Result<
80 crate::operation::detect_faces::DetectFacesOutput,
81 ::aws_smithy_runtime_api::client::result::SdkError<
82 crate::operation::detect_faces::DetectFacesError,
83 ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
84 >,
85 > {
86 let input = self
87 .inner
88 .build()
89 .map_err(::aws_smithy_runtime_api::client::result::SdkError::construction_failure)?;
90 let runtime_plugins = crate::operation::detect_faces::DetectFaces::operation_runtime_plugins(
91 self.handle.runtime_plugins.clone(),
92 &self.handle.conf,
93 self.config_override,
94 );
95 crate::operation::detect_faces::DetectFaces::orchestrate(&runtime_plugins, input).await
96 }
97
98 /// Consumes this builder, creating a customizable operation that can be modified before being sent.
99 pub fn customize(
100 self,
101 ) -> crate::client::customize::CustomizableOperation<
102 crate::operation::detect_faces::DetectFacesOutput,
103 crate::operation::detect_faces::DetectFacesError,
104 Self,
105 > {
106 crate::client::customize::CustomizableOperation::new(self)
107 }
108 pub(crate) fn config_override(mut self, config_override: impl ::std::convert::Into<crate::config::Builder>) -> Self {
109 self.set_config_override(::std::option::Option::Some(config_override.into()));
110 self
111 }
112
113 pub(crate) fn set_config_override(&mut self, config_override: ::std::option::Option<crate::config::Builder>) -> &mut Self {
114 self.config_override = config_override;
115 self
116 }
117 /// <p>The input image as base64-encoded bytes or an S3 object. If you use the AWS CLI to call Amazon Rekognition operations, passing base64-encoded image bytes is not supported.</p>
118 /// <p>If you are using an AWS SDK to call Amazon Rekognition, you might not need to base64-encode image bytes passed using the <code>Bytes</code> field. For more information, see Images in the Amazon Rekognition developer guide.</p>
119 pub fn image(mut self, input: crate::types::Image) -> Self {
120 self.inner = self.inner.image(input);
121 self
122 }
123 /// <p>The input image as base64-encoded bytes or an S3 object. If you use the AWS CLI to call Amazon Rekognition operations, passing base64-encoded image bytes is not supported.</p>
124 /// <p>If you are using an AWS SDK to call Amazon Rekognition, you might not need to base64-encode image bytes passed using the <code>Bytes</code> field. For more information, see Images in the Amazon Rekognition developer guide.</p>
125 pub fn set_image(mut self, input: ::std::option::Option<crate::types::Image>) -> Self {
126 self.inner = self.inner.set_image(input);
127 self
128 }
129 /// <p>The input image as base64-encoded bytes or an S3 object. If you use the AWS CLI to call Amazon Rekognition operations, passing base64-encoded image bytes is not supported.</p>
130 /// <p>If you are using an AWS SDK to call Amazon Rekognition, you might not need to base64-encode image bytes passed using the <code>Bytes</code> field. For more information, see Images in the Amazon Rekognition developer guide.</p>
131 pub fn get_image(&self) -> &::std::option::Option<crate::types::Image> {
132 self.inner.get_image()
133 }
134 ///
135 /// Appends an item to `Attributes`.
136 ///
137 /// To override the contents of this collection use [`set_attributes`](Self::set_attributes).
138 ///
139 /// <p>An array of facial attributes you want to be returned. A DEFAULT subset of facial attributes - BoundingBox, Confidence, Pose, Quality, and Landmarks - will always be returned. You can request for specific facial attributes (in addition to the default list) - by using \["DEFAULT", "FACE_OCCLUDED"\] or just \["FACE_OCCLUDED"\]. You can request for all facial attributes by using \["ALL"\]. Requesting more attributes may increase response time.</p>
140 /// <p>If you provide both, <code>\["ALL", "DEFAULT"\]</code>, the service uses a logical "AND" operator to determine which attributes to return (in this case, all attributes).</p>
141 /// <p>Note that while the FaceOccluded and EyeDirection attributes are supported when using <code>DetectFaces</code>, they aren't supported when analyzing videos with <code>StartFaceDetection</code> and <code>GetFaceDetection</code>.</p>
142 pub fn attributes(mut self, input: crate::types::Attribute) -> Self {
143 self.inner = self.inner.attributes(input);
144 self
145 }
146 /// <p>An array of facial attributes you want to be returned. A DEFAULT subset of facial attributes - BoundingBox, Confidence, Pose, Quality, and Landmarks - will always be returned. You can request for specific facial attributes (in addition to the default list) - by using \["DEFAULT", "FACE_OCCLUDED"\] or just \["FACE_OCCLUDED"\]. You can request for all facial attributes by using \["ALL"\]. Requesting more attributes may increase response time.</p>
147 /// <p>If you provide both, <code>\["ALL", "DEFAULT"\]</code>, the service uses a logical "AND" operator to determine which attributes to return (in this case, all attributes).</p>
148 /// <p>Note that while the FaceOccluded and EyeDirection attributes are supported when using <code>DetectFaces</code>, they aren't supported when analyzing videos with <code>StartFaceDetection</code> and <code>GetFaceDetection</code>.</p>
149 pub fn set_attributes(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::Attribute>>) -> Self {
150 self.inner = self.inner.set_attributes(input);
151 self
152 }
153 /// <p>An array of facial attributes you want to be returned. A DEFAULT subset of facial attributes - BoundingBox, Confidence, Pose, Quality, and Landmarks - will always be returned. You can request for specific facial attributes (in addition to the default list) - by using \["DEFAULT", "FACE_OCCLUDED"\] or just \["FACE_OCCLUDED"\]. You can request for all facial attributes by using \["ALL"\]. Requesting more attributes may increase response time.</p>
154 /// <p>If you provide both, <code>\["ALL", "DEFAULT"\]</code>, the service uses a logical "AND" operator to determine which attributes to return (in this case, all attributes).</p>
155 /// <p>Note that while the FaceOccluded and EyeDirection attributes are supported when using <code>DetectFaces</code>, they aren't supported when analyzing videos with <code>StartFaceDetection</code> and <code>GetFaceDetection</code>.</p>
156 pub fn get_attributes(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::Attribute>> {
157 self.inner.get_attributes()
158 }
159}