aws_sdk_rekognition/operation/start_segment_detection/builders.rs
1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2pub use crate::operation::start_segment_detection::_start_segment_detection_output::StartSegmentDetectionOutputBuilder;
3
4pub use crate::operation::start_segment_detection::_start_segment_detection_input::StartSegmentDetectionInputBuilder;
5
6impl crate::operation::start_segment_detection::builders::StartSegmentDetectionInputBuilder {
7 /// Sends a request with this input using the given client.
8 pub async fn send_with(
9 self,
10 client: &crate::Client,
11 ) -> ::std::result::Result<
12 crate::operation::start_segment_detection::StartSegmentDetectionOutput,
13 ::aws_smithy_runtime_api::client::result::SdkError<
14 crate::operation::start_segment_detection::StartSegmentDetectionError,
15 ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
16 >,
17 > {
18 let mut fluent_builder = client.start_segment_detection();
19 fluent_builder.inner = self;
20 fluent_builder.send().await
21 }
22}
23/// Fluent builder constructing a request to `StartSegmentDetection`.
24///
25/// <p>Starts asynchronous detection of segment detection in a stored video.</p>
26/// <p>Amazon Rekognition Video can detect segments in a video stored in an Amazon S3 bucket. Use <code>Video</code> to specify the bucket name and the filename of the video. <code>StartSegmentDetection</code> returns a job identifier (<code>JobId</code>) which you use to get the results of the operation. When segment detection is finished, Amazon Rekognition Video publishes a completion status to the Amazon Simple Notification Service topic that you specify in <code>NotificationChannel</code>.</p>
27/// <p>You can use the <code>Filters</code> (<code>StartSegmentDetectionFilters</code>) input parameter to specify the minimum detection confidence returned in the response. Within <code>Filters</code>, use <code>ShotFilter</code> (<code>StartShotDetectionFilter</code>) to filter detected shots. Use <code>TechnicalCueFilter</code> (<code>StartTechnicalCueDetectionFilter</code>) to filter technical cues.</p>
28/// <p>To get the results of the segment detection operation, first check that the status value published to the Amazon SNS topic is <code>SUCCEEDED</code>. if so, call <code>GetSegmentDetection</code> and pass the job identifier (<code>JobId</code>) from the initial call to <code>StartSegmentDetection</code>.</p>
29/// <p>For more information, see Detecting video segments in stored video in the Amazon Rekognition Developer Guide.</p>
30#[derive(::std::clone::Clone, ::std::fmt::Debug)]
31pub struct StartSegmentDetectionFluentBuilder {
32 handle: ::std::sync::Arc<crate::client::Handle>,
33 inner: crate::operation::start_segment_detection::builders::StartSegmentDetectionInputBuilder,
34 config_override: ::std::option::Option<crate::config::Builder>,
35}
36impl
37 crate::client::customize::internal::CustomizableSend<
38 crate::operation::start_segment_detection::StartSegmentDetectionOutput,
39 crate::operation::start_segment_detection::StartSegmentDetectionError,
40 > for StartSegmentDetectionFluentBuilder
41{
42 fn send(
43 self,
44 config_override: crate::config::Builder,
45 ) -> crate::client::customize::internal::BoxFuture<
46 crate::client::customize::internal::SendResult<
47 crate::operation::start_segment_detection::StartSegmentDetectionOutput,
48 crate::operation::start_segment_detection::StartSegmentDetectionError,
49 >,
50 > {
51 ::std::boxed::Box::pin(async move { self.config_override(config_override).send().await })
52 }
53}
54impl StartSegmentDetectionFluentBuilder {
55 /// Creates a new `StartSegmentDetectionFluentBuilder`.
56 pub(crate) fn new(handle: ::std::sync::Arc<crate::client::Handle>) -> Self {
57 Self {
58 handle,
59 inner: ::std::default::Default::default(),
60 config_override: ::std::option::Option::None,
61 }
62 }
63 /// Access the StartSegmentDetection as a reference.
64 pub fn as_input(&self) -> &crate::operation::start_segment_detection::builders::StartSegmentDetectionInputBuilder {
65 &self.inner
66 }
67 /// Sends the request and returns the response.
68 ///
69 /// If an error occurs, an `SdkError` will be returned with additional details that
70 /// can be matched against.
71 ///
72 /// By default, any retryable failures will be retried twice. Retry behavior
73 /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
74 /// set when configuring the client.
75 pub async fn send(
76 self,
77 ) -> ::std::result::Result<
78 crate::operation::start_segment_detection::StartSegmentDetectionOutput,
79 ::aws_smithy_runtime_api::client::result::SdkError<
80 crate::operation::start_segment_detection::StartSegmentDetectionError,
81 ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
82 >,
83 > {
84 let input = self
85 .inner
86 .build()
87 .map_err(::aws_smithy_runtime_api::client::result::SdkError::construction_failure)?;
88 let runtime_plugins = crate::operation::start_segment_detection::StartSegmentDetection::operation_runtime_plugins(
89 self.handle.runtime_plugins.clone(),
90 &self.handle.conf,
91 self.config_override,
92 );
93 crate::operation::start_segment_detection::StartSegmentDetection::orchestrate(&runtime_plugins, input).await
94 }
95
96 /// Consumes this builder, creating a customizable operation that can be modified before being sent.
97 pub fn customize(
98 self,
99 ) -> crate::client::customize::CustomizableOperation<
100 crate::operation::start_segment_detection::StartSegmentDetectionOutput,
101 crate::operation::start_segment_detection::StartSegmentDetectionError,
102 Self,
103 > {
104 crate::client::customize::CustomizableOperation::new(self)
105 }
106 pub(crate) fn config_override(mut self, config_override: impl ::std::convert::Into<crate::config::Builder>) -> Self {
107 self.set_config_override(::std::option::Option::Some(config_override.into()));
108 self
109 }
110
111 pub(crate) fn set_config_override(&mut self, config_override: ::std::option::Option<crate::config::Builder>) -> &mut Self {
112 self.config_override = config_override;
113 self
114 }
115 /// <p>Video file stored in an Amazon S3 bucket. Amazon Rekognition video start operations such as <code>StartLabelDetection</code> use <code>Video</code> to specify a video for analysis. The supported file formats are .mp4, .mov and .avi.</p>
116 pub fn video(mut self, input: crate::types::Video) -> Self {
117 self.inner = self.inner.video(input);
118 self
119 }
120 /// <p>Video file stored in an Amazon S3 bucket. Amazon Rekognition video start operations such as <code>StartLabelDetection</code> use <code>Video</code> to specify a video for analysis. The supported file formats are .mp4, .mov and .avi.</p>
121 pub fn set_video(mut self, input: ::std::option::Option<crate::types::Video>) -> Self {
122 self.inner = self.inner.set_video(input);
123 self
124 }
125 /// <p>Video file stored in an Amazon S3 bucket. Amazon Rekognition video start operations such as <code>StartLabelDetection</code> use <code>Video</code> to specify a video for analysis. The supported file formats are .mp4, .mov and .avi.</p>
126 pub fn get_video(&self) -> &::std::option::Option<crate::types::Video> {
127 self.inner.get_video()
128 }
129 /// <p>Idempotent token used to identify the start request. If you use the same token with multiple <code>StartSegmentDetection</code> requests, the same <code>JobId</code> is returned. Use <code>ClientRequestToken</code> to prevent the same job from being accidently started more than once.</p>
130 pub fn client_request_token(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
131 self.inner = self.inner.client_request_token(input.into());
132 self
133 }
134 /// <p>Idempotent token used to identify the start request. If you use the same token with multiple <code>StartSegmentDetection</code> requests, the same <code>JobId</code> is returned. Use <code>ClientRequestToken</code> to prevent the same job from being accidently started more than once.</p>
135 pub fn set_client_request_token(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
136 self.inner = self.inner.set_client_request_token(input);
137 self
138 }
139 /// <p>Idempotent token used to identify the start request. If you use the same token with multiple <code>StartSegmentDetection</code> requests, the same <code>JobId</code> is returned. Use <code>ClientRequestToken</code> to prevent the same job from being accidently started more than once.</p>
140 pub fn get_client_request_token(&self) -> &::std::option::Option<::std::string::String> {
141 self.inner.get_client_request_token()
142 }
143 /// <p>The ARN of the Amazon SNS topic to which you want Amazon Rekognition Video to publish the completion status of the segment detection operation. Note that the Amazon SNS topic must have a topic name that begins with <i>AmazonRekognition</i> if you are using the AmazonRekognitionServiceRole permissions policy to access the topic.</p>
144 pub fn notification_channel(mut self, input: crate::types::NotificationChannel) -> Self {
145 self.inner = self.inner.notification_channel(input);
146 self
147 }
148 /// <p>The ARN of the Amazon SNS topic to which you want Amazon Rekognition Video to publish the completion status of the segment detection operation. Note that the Amazon SNS topic must have a topic name that begins with <i>AmazonRekognition</i> if you are using the AmazonRekognitionServiceRole permissions policy to access the topic.</p>
149 pub fn set_notification_channel(mut self, input: ::std::option::Option<crate::types::NotificationChannel>) -> Self {
150 self.inner = self.inner.set_notification_channel(input);
151 self
152 }
153 /// <p>The ARN of the Amazon SNS topic to which you want Amazon Rekognition Video to publish the completion status of the segment detection operation. Note that the Amazon SNS topic must have a topic name that begins with <i>AmazonRekognition</i> if you are using the AmazonRekognitionServiceRole permissions policy to access the topic.</p>
154 pub fn get_notification_channel(&self) -> &::std::option::Option<crate::types::NotificationChannel> {
155 self.inner.get_notification_channel()
156 }
157 /// <p>An identifier you specify that's returned in the completion notification that's published to your Amazon Simple Notification Service topic. For example, you can use <code>JobTag</code> to group related jobs and identify them in the completion notification.</p>
158 pub fn job_tag(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
159 self.inner = self.inner.job_tag(input.into());
160 self
161 }
162 /// <p>An identifier you specify that's returned in the completion notification that's published to your Amazon Simple Notification Service topic. For example, you can use <code>JobTag</code> to group related jobs and identify them in the completion notification.</p>
163 pub fn set_job_tag(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
164 self.inner = self.inner.set_job_tag(input);
165 self
166 }
167 /// <p>An identifier you specify that's returned in the completion notification that's published to your Amazon Simple Notification Service topic. For example, you can use <code>JobTag</code> to group related jobs and identify them in the completion notification.</p>
168 pub fn get_job_tag(&self) -> &::std::option::Option<::std::string::String> {
169 self.inner.get_job_tag()
170 }
171 /// <p>Filters for technical cue or shot detection.</p>
172 pub fn filters(mut self, input: crate::types::StartSegmentDetectionFilters) -> Self {
173 self.inner = self.inner.filters(input);
174 self
175 }
176 /// <p>Filters for technical cue or shot detection.</p>
177 pub fn set_filters(mut self, input: ::std::option::Option<crate::types::StartSegmentDetectionFilters>) -> Self {
178 self.inner = self.inner.set_filters(input);
179 self
180 }
181 /// <p>Filters for technical cue or shot detection.</p>
182 pub fn get_filters(&self) -> &::std::option::Option<crate::types::StartSegmentDetectionFilters> {
183 self.inner.get_filters()
184 }
185 ///
186 /// Appends an item to `SegmentTypes`.
187 ///
188 /// To override the contents of this collection use [`set_segment_types`](Self::set_segment_types).
189 ///
190 /// <p>An array of segment types to detect in the video. Valid values are TECHNICAL_CUE and SHOT.</p>
191 pub fn segment_types(mut self, input: crate::types::SegmentType) -> Self {
192 self.inner = self.inner.segment_types(input);
193 self
194 }
195 /// <p>An array of segment types to detect in the video. Valid values are TECHNICAL_CUE and SHOT.</p>
196 pub fn set_segment_types(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::SegmentType>>) -> Self {
197 self.inner = self.inner.set_segment_types(input);
198 self
199 }
200 /// <p>An array of segment types to detect in the video. Valid values are TECHNICAL_CUE and SHOT.</p>
201 pub fn get_segment_types(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::SegmentType>> {
202 self.inner.get_segment_types()
203 }
204}