aws_sdk_personalize/operation/create_batch_inference_job/
builders.rs

1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2pub use crate::operation::create_batch_inference_job::_create_batch_inference_job_output::CreateBatchInferenceJobOutputBuilder;
3
4pub use crate::operation::create_batch_inference_job::_create_batch_inference_job_input::CreateBatchInferenceJobInputBuilder;
5
6impl crate::operation::create_batch_inference_job::builders::CreateBatchInferenceJobInputBuilder {
7    /// Sends a request with this input using the given client.
8    pub async fn send_with(
9        self,
10        client: &crate::Client,
11    ) -> ::std::result::Result<
12        crate::operation::create_batch_inference_job::CreateBatchInferenceJobOutput,
13        ::aws_smithy_runtime_api::client::result::SdkError<
14            crate::operation::create_batch_inference_job::CreateBatchInferenceJobError,
15            ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
16        >,
17    > {
18        let mut fluent_builder = client.create_batch_inference_job();
19        fluent_builder.inner = self;
20        fluent_builder.send().await
21    }
22}
23/// Fluent builder constructing a request to `CreateBatchInferenceJob`.
24///
25/// <p>Generates batch recommendations based on a list of items or users stored in Amazon S3 and exports the recommendations to an Amazon S3 bucket.</p>
26/// <p>To generate batch recommendations, specify the ARN of a solution version and an Amazon S3 URI for the input and output data. For user personalization, popular items, and personalized ranking solutions, the batch inference job generates a list of recommended items for each user ID in the input file. For related items solutions, the job generates a list of recommended items for each item ID in the input file.</p>
27/// <p>For more information, see <a href="https://docs.aws.amazon.com/personalize/latest/dg/getting-batch-recommendations.html">Creating a batch inference job </a>.</p>
28/// <p>If you use the Similar-Items recipe, Amazon Personalize can add descriptive themes to batch recommendations. To generate themes, set the job's mode to <code>THEME_GENERATION</code> and specify the name of the field that contains item names in the input data.</p>
29/// <p>For more information about generating themes, see <a href="https://docs.aws.amazon.com/personalize/latest/dg/themed-batch-recommendations.html">Batch recommendations with themes from Content Generator </a>.</p>
30/// <p>You can't get batch recommendations with the Trending-Now or Next-Best-Action recipes.</p>
31#[derive(::std::clone::Clone, ::std::fmt::Debug)]
32pub struct CreateBatchInferenceJobFluentBuilder {
33    handle: ::std::sync::Arc<crate::client::Handle>,
34    inner: crate::operation::create_batch_inference_job::builders::CreateBatchInferenceJobInputBuilder,
35    config_override: ::std::option::Option<crate::config::Builder>,
36}
37impl
38    crate::client::customize::internal::CustomizableSend<
39        crate::operation::create_batch_inference_job::CreateBatchInferenceJobOutput,
40        crate::operation::create_batch_inference_job::CreateBatchInferenceJobError,
41    > for CreateBatchInferenceJobFluentBuilder
42{
43    fn send(
44        self,
45        config_override: crate::config::Builder,
46    ) -> crate::client::customize::internal::BoxFuture<
47        crate::client::customize::internal::SendResult<
48            crate::operation::create_batch_inference_job::CreateBatchInferenceJobOutput,
49            crate::operation::create_batch_inference_job::CreateBatchInferenceJobError,
50        >,
51    > {
52        ::std::boxed::Box::pin(async move { self.config_override(config_override).send().await })
53    }
54}
55impl CreateBatchInferenceJobFluentBuilder {
56    /// Creates a new `CreateBatchInferenceJobFluentBuilder`.
57    pub(crate) fn new(handle: ::std::sync::Arc<crate::client::Handle>) -> Self {
58        Self {
59            handle,
60            inner: ::std::default::Default::default(),
61            config_override: ::std::option::Option::None,
62        }
63    }
64    /// Access the CreateBatchInferenceJob as a reference.
65    pub fn as_input(&self) -> &crate::operation::create_batch_inference_job::builders::CreateBatchInferenceJobInputBuilder {
66        &self.inner
67    }
68    /// Sends the request and returns the response.
69    ///
70    /// If an error occurs, an `SdkError` will be returned with additional details that
71    /// can be matched against.
72    ///
73    /// By default, any retryable failures will be retried twice. Retry behavior
74    /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
75    /// set when configuring the client.
76    pub async fn send(
77        self,
78    ) -> ::std::result::Result<
79        crate::operation::create_batch_inference_job::CreateBatchInferenceJobOutput,
80        ::aws_smithy_runtime_api::client::result::SdkError<
81            crate::operation::create_batch_inference_job::CreateBatchInferenceJobError,
82            ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
83        >,
84    > {
85        let input = self
86            .inner
87            .build()
88            .map_err(::aws_smithy_runtime_api::client::result::SdkError::construction_failure)?;
89        let runtime_plugins = crate::operation::create_batch_inference_job::CreateBatchInferenceJob::operation_runtime_plugins(
90            self.handle.runtime_plugins.clone(),
91            &self.handle.conf,
92            self.config_override,
93        );
94        crate::operation::create_batch_inference_job::CreateBatchInferenceJob::orchestrate(&runtime_plugins, input).await
95    }
96
97    /// Consumes this builder, creating a customizable operation that can be modified before being sent.
98    pub fn customize(
99        self,
100    ) -> crate::client::customize::CustomizableOperation<
101        crate::operation::create_batch_inference_job::CreateBatchInferenceJobOutput,
102        crate::operation::create_batch_inference_job::CreateBatchInferenceJobError,
103        Self,
104    > {
105        crate::client::customize::CustomizableOperation::new(self)
106    }
107    pub(crate) fn config_override(mut self, config_override: impl ::std::convert::Into<crate::config::Builder>) -> Self {
108        self.set_config_override(::std::option::Option::Some(config_override.into()));
109        self
110    }
111
112    pub(crate) fn set_config_override(&mut self, config_override: ::std::option::Option<crate::config::Builder>) -> &mut Self {
113        self.config_override = config_override;
114        self
115    }
116    /// <p>The name of the batch inference job to create.</p>
117    pub fn job_name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
118        self.inner = self.inner.job_name(input.into());
119        self
120    }
121    /// <p>The name of the batch inference job to create.</p>
122    pub fn set_job_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
123        self.inner = self.inner.set_job_name(input);
124        self
125    }
126    /// <p>The name of the batch inference job to create.</p>
127    pub fn get_job_name(&self) -> &::std::option::Option<::std::string::String> {
128        self.inner.get_job_name()
129    }
130    /// <p>The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference recommendations.</p>
131    pub fn solution_version_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
132        self.inner = self.inner.solution_version_arn(input.into());
133        self
134    }
135    /// <p>The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference recommendations.</p>
136    pub fn set_solution_version_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
137        self.inner = self.inner.set_solution_version_arn(input);
138        self
139    }
140    /// <p>The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference recommendations.</p>
141    pub fn get_solution_version_arn(&self) -> &::std::option::Option<::std::string::String> {
142        self.inner.get_solution_version_arn()
143    }
144    /// <p>The ARN of the filter to apply to the batch inference job. For more information on using filters, see <a href="https://docs.aws.amazon.com/personalize/latest/dg/filter-batch.html">Filtering batch recommendations</a>.</p>
145    pub fn filter_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
146        self.inner = self.inner.filter_arn(input.into());
147        self
148    }
149    /// <p>The ARN of the filter to apply to the batch inference job. For more information on using filters, see <a href="https://docs.aws.amazon.com/personalize/latest/dg/filter-batch.html">Filtering batch recommendations</a>.</p>
150    pub fn set_filter_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
151        self.inner = self.inner.set_filter_arn(input);
152        self
153    }
154    /// <p>The ARN of the filter to apply to the batch inference job. For more information on using filters, see <a href="https://docs.aws.amazon.com/personalize/latest/dg/filter-batch.html">Filtering batch recommendations</a>.</p>
155    pub fn get_filter_arn(&self) -> &::std::option::Option<::std::string::String> {
156        self.inner.get_filter_arn()
157    }
158    /// <p>The number of recommendations to retrieve.</p>
159    pub fn num_results(mut self, input: i32) -> Self {
160        self.inner = self.inner.num_results(input);
161        self
162    }
163    /// <p>The number of recommendations to retrieve.</p>
164    pub fn set_num_results(mut self, input: ::std::option::Option<i32>) -> Self {
165        self.inner = self.inner.set_num_results(input);
166        self
167    }
168    /// <p>The number of recommendations to retrieve.</p>
169    pub fn get_num_results(&self) -> &::std::option::Option<i32> {
170        self.inner.get_num_results()
171    }
172    /// <p>The Amazon S3 path that leads to the input file to base your recommendations on. The input material must be in JSON format.</p>
173    pub fn job_input(mut self, input: crate::types::BatchInferenceJobInput) -> Self {
174        self.inner = self.inner.job_input(input);
175        self
176    }
177    /// <p>The Amazon S3 path that leads to the input file to base your recommendations on. The input material must be in JSON format.</p>
178    pub fn set_job_input(mut self, input: ::std::option::Option<crate::types::BatchInferenceJobInput>) -> Self {
179        self.inner = self.inner.set_job_input(input);
180        self
181    }
182    /// <p>The Amazon S3 path that leads to the input file to base your recommendations on. The input material must be in JSON format.</p>
183    pub fn get_job_input(&self) -> &::std::option::Option<crate::types::BatchInferenceJobInput> {
184        self.inner.get_job_input()
185    }
186    /// <p>The path to the Amazon S3 bucket where the job's output will be stored.</p>
187    pub fn job_output(mut self, input: crate::types::BatchInferenceJobOutput) -> Self {
188        self.inner = self.inner.job_output(input);
189        self
190    }
191    /// <p>The path to the Amazon S3 bucket where the job's output will be stored.</p>
192    pub fn set_job_output(mut self, input: ::std::option::Option<crate::types::BatchInferenceJobOutput>) -> Self {
193        self.inner = self.inner.set_job_output(input);
194        self
195    }
196    /// <p>The path to the Amazon S3 bucket where the job's output will be stored.</p>
197    pub fn get_job_output(&self) -> &::std::option::Option<crate::types::BatchInferenceJobOutput> {
198        self.inner.get_job_output()
199    }
200    /// <p>The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your input and output Amazon S3 buckets respectively.</p>
201    pub fn role_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
202        self.inner = self.inner.role_arn(input.into());
203        self
204    }
205    /// <p>The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your input and output Amazon S3 buckets respectively.</p>
206    pub fn set_role_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
207        self.inner = self.inner.set_role_arn(input);
208        self
209    }
210    /// <p>The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your input and output Amazon S3 buckets respectively.</p>
211    pub fn get_role_arn(&self) -> &::std::option::Option<::std::string::String> {
212        self.inner.get_role_arn()
213    }
214    /// <p>The configuration details of a batch inference job.</p>
215    pub fn batch_inference_job_config(mut self, input: crate::types::BatchInferenceJobConfig) -> Self {
216        self.inner = self.inner.batch_inference_job_config(input);
217        self
218    }
219    /// <p>The configuration details of a batch inference job.</p>
220    pub fn set_batch_inference_job_config(mut self, input: ::std::option::Option<crate::types::BatchInferenceJobConfig>) -> Self {
221        self.inner = self.inner.set_batch_inference_job_config(input);
222        self
223    }
224    /// <p>The configuration details of a batch inference job.</p>
225    pub fn get_batch_inference_job_config(&self) -> &::std::option::Option<crate::types::BatchInferenceJobConfig> {
226        self.inner.get_batch_inference_job_config()
227    }
228    ///
229    /// Appends an item to `tags`.
230    ///
231    /// To override the contents of this collection use [`set_tags`](Self::set_tags).
232    ///
233    /// <p>A list of <a href="https://docs.aws.amazon.com/personalize/latest/dg/tagging-resources.html">tags</a> to apply to the batch inference job.</p>
234    pub fn tags(mut self, input: crate::types::Tag) -> Self {
235        self.inner = self.inner.tags(input);
236        self
237    }
238    /// <p>A list of <a href="https://docs.aws.amazon.com/personalize/latest/dg/tagging-resources.html">tags</a> to apply to the batch inference job.</p>
239    pub fn set_tags(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>) -> Self {
240        self.inner = self.inner.set_tags(input);
241        self
242    }
243    /// <p>A list of <a href="https://docs.aws.amazon.com/personalize/latest/dg/tagging-resources.html">tags</a> to apply to the batch inference job.</p>
244    pub fn get_tags(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::Tag>> {
245        self.inner.get_tags()
246    }
247    /// <p>The mode of the batch inference job. To generate descriptive themes for groups of similar items, set the job mode to <code>THEME_GENERATION</code>. If you don't want to generate themes, use the default <code>BATCH_INFERENCE</code>.</p>
248    /// <p>When you get batch recommendations with themes, you will incur additional costs. For more information, see <a href="https://aws.amazon.com/personalize/pricing/">Amazon Personalize pricing</a>.</p>
249    pub fn batch_inference_job_mode(mut self, input: crate::types::BatchInferenceJobMode) -> Self {
250        self.inner = self.inner.batch_inference_job_mode(input);
251        self
252    }
253    /// <p>The mode of the batch inference job. To generate descriptive themes for groups of similar items, set the job mode to <code>THEME_GENERATION</code>. If you don't want to generate themes, use the default <code>BATCH_INFERENCE</code>.</p>
254    /// <p>When you get batch recommendations with themes, you will incur additional costs. For more information, see <a href="https://aws.amazon.com/personalize/pricing/">Amazon Personalize pricing</a>.</p>
255    pub fn set_batch_inference_job_mode(mut self, input: ::std::option::Option<crate::types::BatchInferenceJobMode>) -> Self {
256        self.inner = self.inner.set_batch_inference_job_mode(input);
257        self
258    }
259    /// <p>The mode of the batch inference job. To generate descriptive themes for groups of similar items, set the job mode to <code>THEME_GENERATION</code>. If you don't want to generate themes, use the default <code>BATCH_INFERENCE</code>.</p>
260    /// <p>When you get batch recommendations with themes, you will incur additional costs. For more information, see <a href="https://aws.amazon.com/personalize/pricing/">Amazon Personalize pricing</a>.</p>
261    pub fn get_batch_inference_job_mode(&self) -> &::std::option::Option<crate::types::BatchInferenceJobMode> {
262        self.inner.get_batch_inference_job_mode()
263    }
264    /// <p>For theme generation jobs, specify the name of the column in your Items dataset that contains each item's name.</p>
265    pub fn theme_generation_config(mut self, input: crate::types::ThemeGenerationConfig) -> Self {
266        self.inner = self.inner.theme_generation_config(input);
267        self
268    }
269    /// <p>For theme generation jobs, specify the name of the column in your Items dataset that contains each item's name.</p>
270    pub fn set_theme_generation_config(mut self, input: ::std::option::Option<crate::types::ThemeGenerationConfig>) -> Self {
271        self.inner = self.inner.set_theme_generation_config(input);
272        self
273    }
274    /// <p>For theme generation jobs, specify the name of the column in your Items dataset that contains each item's name.</p>
275    pub fn get_theme_generation_config(&self) -> &::std::option::Option<crate::types::ThemeGenerationConfig> {
276        self.inner.get_theme_generation_config()
277    }
278}