aws_sdk_rekognition/operation/start_text_detection/builders.rs
1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2pub use crate::operation::start_text_detection::_start_text_detection_output::StartTextDetectionOutputBuilder;
3
4pub use crate::operation::start_text_detection::_start_text_detection_input::StartTextDetectionInputBuilder;
5
6impl crate::operation::start_text_detection::builders::StartTextDetectionInputBuilder {
7 /// Sends a request with this input using the given client.
8 pub async fn send_with(
9 self,
10 client: &crate::Client,
11 ) -> ::std::result::Result<
12 crate::operation::start_text_detection::StartTextDetectionOutput,
13 ::aws_smithy_runtime_api::client::result::SdkError<
14 crate::operation::start_text_detection::StartTextDetectionError,
15 ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
16 >,
17 > {
18 let mut fluent_builder = client.start_text_detection();
19 fluent_builder.inner = self;
20 fluent_builder.send().await
21 }
22}
23/// Fluent builder constructing a request to `StartTextDetection`.
24///
25/// <p>Starts asynchronous detection of text in a stored video.</p>
26/// <p>Amazon Rekognition Video can detect text in a video stored in an Amazon S3 bucket. Use <code>Video</code> to specify the bucket name and the filename of the video. <code>StartTextDetection</code> returns a job identifier (<code>JobId</code>) which you use to get the results of the operation. When text detection is finished, Amazon Rekognition Video publishes a completion status to the Amazon Simple Notification Service topic that you specify in <code>NotificationChannel</code>.</p>
27/// <p>To get the results of the text detection operation, first check that the status value published to the Amazon SNS topic is <code>SUCCEEDED</code>. if so, call <code>GetTextDetection</code> and pass the job identifier (<code>JobId</code>) from the initial call to <code>StartTextDetection</code>.</p>
28#[derive(::std::clone::Clone, ::std::fmt::Debug)]
29pub struct StartTextDetectionFluentBuilder {
30 handle: ::std::sync::Arc<crate::client::Handle>,
31 inner: crate::operation::start_text_detection::builders::StartTextDetectionInputBuilder,
32 config_override: ::std::option::Option<crate::config::Builder>,
33}
34impl
35 crate::client::customize::internal::CustomizableSend<
36 crate::operation::start_text_detection::StartTextDetectionOutput,
37 crate::operation::start_text_detection::StartTextDetectionError,
38 > for StartTextDetectionFluentBuilder
39{
40 fn send(
41 self,
42 config_override: crate::config::Builder,
43 ) -> crate::client::customize::internal::BoxFuture<
44 crate::client::customize::internal::SendResult<
45 crate::operation::start_text_detection::StartTextDetectionOutput,
46 crate::operation::start_text_detection::StartTextDetectionError,
47 >,
48 > {
49 ::std::boxed::Box::pin(async move { self.config_override(config_override).send().await })
50 }
51}
52impl StartTextDetectionFluentBuilder {
53 /// Creates a new `StartTextDetectionFluentBuilder`.
54 pub(crate) fn new(handle: ::std::sync::Arc<crate::client::Handle>) -> Self {
55 Self {
56 handle,
57 inner: ::std::default::Default::default(),
58 config_override: ::std::option::Option::None,
59 }
60 }
61 /// Access the StartTextDetection as a reference.
62 pub fn as_input(&self) -> &crate::operation::start_text_detection::builders::StartTextDetectionInputBuilder {
63 &self.inner
64 }
65 /// Sends the request and returns the response.
66 ///
67 /// If an error occurs, an `SdkError` will be returned with additional details that
68 /// can be matched against.
69 ///
70 /// By default, any retryable failures will be retried twice. Retry behavior
71 /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
72 /// set when configuring the client.
73 pub async fn send(
74 self,
75 ) -> ::std::result::Result<
76 crate::operation::start_text_detection::StartTextDetectionOutput,
77 ::aws_smithy_runtime_api::client::result::SdkError<
78 crate::operation::start_text_detection::StartTextDetectionError,
79 ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
80 >,
81 > {
82 let input = self
83 .inner
84 .build()
85 .map_err(::aws_smithy_runtime_api::client::result::SdkError::construction_failure)?;
86 let runtime_plugins = crate::operation::start_text_detection::StartTextDetection::operation_runtime_plugins(
87 self.handle.runtime_plugins.clone(),
88 &self.handle.conf,
89 self.config_override,
90 );
91 crate::operation::start_text_detection::StartTextDetection::orchestrate(&runtime_plugins, input).await
92 }
93
94 /// Consumes this builder, creating a customizable operation that can be modified before being sent.
95 pub fn customize(
96 self,
97 ) -> crate::client::customize::CustomizableOperation<
98 crate::operation::start_text_detection::StartTextDetectionOutput,
99 crate::operation::start_text_detection::StartTextDetectionError,
100 Self,
101 > {
102 crate::client::customize::CustomizableOperation::new(self)
103 }
104 pub(crate) fn config_override(mut self, config_override: impl ::std::convert::Into<crate::config::Builder>) -> Self {
105 self.set_config_override(::std::option::Option::Some(config_override.into()));
106 self
107 }
108
109 pub(crate) fn set_config_override(&mut self, config_override: ::std::option::Option<crate::config::Builder>) -> &mut Self {
110 self.config_override = config_override;
111 self
112 }
113 /// <p>Video file stored in an Amazon S3 bucket. Amazon Rekognition video start operations such as <code>StartLabelDetection</code> use <code>Video</code> to specify a video for analysis. The supported file formats are .mp4, .mov and .avi.</p>
114 pub fn video(mut self, input: crate::types::Video) -> Self {
115 self.inner = self.inner.video(input);
116 self
117 }
118 /// <p>Video file stored in an Amazon S3 bucket. Amazon Rekognition video start operations such as <code>StartLabelDetection</code> use <code>Video</code> to specify a video for analysis. The supported file formats are .mp4, .mov and .avi.</p>
119 pub fn set_video(mut self, input: ::std::option::Option<crate::types::Video>) -> Self {
120 self.inner = self.inner.set_video(input);
121 self
122 }
123 /// <p>Video file stored in an Amazon S3 bucket. Amazon Rekognition video start operations such as <code>StartLabelDetection</code> use <code>Video</code> to specify a video for analysis. The supported file formats are .mp4, .mov and .avi.</p>
124 pub fn get_video(&self) -> &::std::option::Option<crate::types::Video> {
125 self.inner.get_video()
126 }
127 /// <p>Idempotent token used to identify the start request. If you use the same token with multiple <code>StartTextDetection</code> requests, the same <code>JobId</code> is returned. Use <code>ClientRequestToken</code> to prevent the same job from being accidentaly started more than once.</p>
128 pub fn client_request_token(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
129 self.inner = self.inner.client_request_token(input.into());
130 self
131 }
132 /// <p>Idempotent token used to identify the start request. If you use the same token with multiple <code>StartTextDetection</code> requests, the same <code>JobId</code> is returned. Use <code>ClientRequestToken</code> to prevent the same job from being accidentaly started more than once.</p>
133 pub fn set_client_request_token(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
134 self.inner = self.inner.set_client_request_token(input);
135 self
136 }
137 /// <p>Idempotent token used to identify the start request. If you use the same token with multiple <code>StartTextDetection</code> requests, the same <code>JobId</code> is returned. Use <code>ClientRequestToken</code> to prevent the same job from being accidentaly started more than once.</p>
138 pub fn get_client_request_token(&self) -> &::std::option::Option<::std::string::String> {
139 self.inner.get_client_request_token()
140 }
141 /// <p>The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the completion status of a video analysis operation. For more information, see <a href="https://docs.aws.amazon.com/rekognition/latest/dg/api-video.html">Calling Amazon Rekognition Video operations</a>. Note that the Amazon SNS topic must have a topic name that begins with <i>AmazonRekognition</i> if you are using the AmazonRekognitionServiceRole permissions policy to access the topic. For more information, see <a href="https://docs.aws.amazon.com/rekognition/latest/dg/api-video-roles.html#api-video-roles-all-topics">Giving access to multiple Amazon SNS topics</a>.</p>
142 pub fn notification_channel(mut self, input: crate::types::NotificationChannel) -> Self {
143 self.inner = self.inner.notification_channel(input);
144 self
145 }
146 /// <p>The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the completion status of a video analysis operation. For more information, see <a href="https://docs.aws.amazon.com/rekognition/latest/dg/api-video.html">Calling Amazon Rekognition Video operations</a>. Note that the Amazon SNS topic must have a topic name that begins with <i>AmazonRekognition</i> if you are using the AmazonRekognitionServiceRole permissions policy to access the topic. For more information, see <a href="https://docs.aws.amazon.com/rekognition/latest/dg/api-video-roles.html#api-video-roles-all-topics">Giving access to multiple Amazon SNS topics</a>.</p>
147 pub fn set_notification_channel(mut self, input: ::std::option::Option<crate::types::NotificationChannel>) -> Self {
148 self.inner = self.inner.set_notification_channel(input);
149 self
150 }
151 /// <p>The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the completion status of a video analysis operation. For more information, see <a href="https://docs.aws.amazon.com/rekognition/latest/dg/api-video.html">Calling Amazon Rekognition Video operations</a>. Note that the Amazon SNS topic must have a topic name that begins with <i>AmazonRekognition</i> if you are using the AmazonRekognitionServiceRole permissions policy to access the topic. For more information, see <a href="https://docs.aws.amazon.com/rekognition/latest/dg/api-video-roles.html#api-video-roles-all-topics">Giving access to multiple Amazon SNS topics</a>.</p>
152 pub fn get_notification_channel(&self) -> &::std::option::Option<crate::types::NotificationChannel> {
153 self.inner.get_notification_channel()
154 }
155 /// <p>An identifier returned in the completion status published by your Amazon Simple Notification Service topic. For example, you can use <code>JobTag</code> to group related jobs and identify them in the completion notification.</p>
156 pub fn job_tag(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
157 self.inner = self.inner.job_tag(input.into());
158 self
159 }
160 /// <p>An identifier returned in the completion status published by your Amazon Simple Notification Service topic. For example, you can use <code>JobTag</code> to group related jobs and identify them in the completion notification.</p>
161 pub fn set_job_tag(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
162 self.inner = self.inner.set_job_tag(input);
163 self
164 }
165 /// <p>An identifier returned in the completion status published by your Amazon Simple Notification Service topic. For example, you can use <code>JobTag</code> to group related jobs and identify them in the completion notification.</p>
166 pub fn get_job_tag(&self) -> &::std::option::Option<::std::string::String> {
167 self.inner.get_job_tag()
168 }
169 /// <p>Optional parameters that let you set criteria the text must meet to be included in your response.</p>
170 pub fn filters(mut self, input: crate::types::StartTextDetectionFilters) -> Self {
171 self.inner = self.inner.filters(input);
172 self
173 }
174 /// <p>Optional parameters that let you set criteria the text must meet to be included in your response.</p>
175 pub fn set_filters(mut self, input: ::std::option::Option<crate::types::StartTextDetectionFilters>) -> Self {
176 self.inner = self.inner.set_filters(input);
177 self
178 }
179 /// <p>Optional parameters that let you set criteria the text must meet to be included in your response.</p>
180 pub fn get_filters(&self) -> &::std::option::Option<crate::types::StartTextDetectionFilters> {
181 self.inner.get_filters()
182 }
183}