1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub use crate::operation::create_optimization_job::_create_optimization_job_output::CreateOptimizationJobOutputBuilder;

pub use crate::operation::create_optimization_job::_create_optimization_job_input::CreateOptimizationJobInputBuilder;

impl crate::operation::create_optimization_job::builders::CreateOptimizationJobInputBuilder {
    /// Sends a request with this input using the given client.
    pub async fn send_with(
        self,
        client: &crate::Client,
    ) -> ::std::result::Result<
        crate::operation::create_optimization_job::CreateOptimizationJobOutput,
        ::aws_smithy_runtime_api::client::result::SdkError<
            crate::operation::create_optimization_job::CreateOptimizationJobError,
            ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
        >,
    > {
        let mut fluent_builder = client.create_optimization_job();
        fluent_builder.inner = self;
        fluent_builder.send().await
    }
}
/// Fluent builder constructing a request to `CreateOptimizationJob`.
///
/// <p>Creates a job that optimizes a model for inference performance. To create the job, you provide the location of a source model, and you provide the settings for the optimization techniques that you want the job to apply. When the job completes successfully, SageMaker uploads the new optimized model to the output destination that you specify.</p>
/// <p>For more information about how to use this action, and about the supported optimization techniques, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-optimize.html">Optimize model inference with Amazon SageMaker</a>.</p>
#[derive(::std::clone::Clone, ::std::fmt::Debug)]
pub struct CreateOptimizationJobFluentBuilder {
    handle: ::std::sync::Arc<crate::client::Handle>,
    inner: crate::operation::create_optimization_job::builders::CreateOptimizationJobInputBuilder,
    config_override: ::std::option::Option<crate::config::Builder>,
}
impl
    crate::client::customize::internal::CustomizableSend<
        crate::operation::create_optimization_job::CreateOptimizationJobOutput,
        crate::operation::create_optimization_job::CreateOptimizationJobError,
    > for CreateOptimizationJobFluentBuilder
{
    fn send(
        self,
        config_override: crate::config::Builder,
    ) -> crate::client::customize::internal::BoxFuture<
        crate::client::customize::internal::SendResult<
            crate::operation::create_optimization_job::CreateOptimizationJobOutput,
            crate::operation::create_optimization_job::CreateOptimizationJobError,
        >,
    > {
        ::std::boxed::Box::pin(async move { self.config_override(config_override).send().await })
    }
}
impl CreateOptimizationJobFluentBuilder {
    /// Creates a new `CreateOptimizationJobFluentBuilder`.
    pub(crate) fn new(handle: ::std::sync::Arc<crate::client::Handle>) -> Self {
        Self {
            handle,
            inner: ::std::default::Default::default(),
            config_override: ::std::option::Option::None,
        }
    }
    /// Access the CreateOptimizationJob as a reference.
    pub fn as_input(&self) -> &crate::operation::create_optimization_job::builders::CreateOptimizationJobInputBuilder {
        &self.inner
    }
    /// Sends the request and returns the response.
    ///
    /// If an error occurs, an `SdkError` will be returned with additional details that
    /// can be matched against.
    ///
    /// By default, any retryable failures will be retried twice. Retry behavior
    /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
    /// set when configuring the client.
    pub async fn send(
        self,
    ) -> ::std::result::Result<
        crate::operation::create_optimization_job::CreateOptimizationJobOutput,
        ::aws_smithy_runtime_api::client::result::SdkError<
            crate::operation::create_optimization_job::CreateOptimizationJobError,
            ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
        >,
    > {
        let input = self
            .inner
            .build()
            .map_err(::aws_smithy_runtime_api::client::result::SdkError::construction_failure)?;
        let runtime_plugins = crate::operation::create_optimization_job::CreateOptimizationJob::operation_runtime_plugins(
            self.handle.runtime_plugins.clone(),
            &self.handle.conf,
            self.config_override,
        );
        crate::operation::create_optimization_job::CreateOptimizationJob::orchestrate(&runtime_plugins, input).await
    }

    /// Consumes this builder, creating a customizable operation that can be modified before being sent.
    pub fn customize(
        self,
    ) -> crate::client::customize::CustomizableOperation<
        crate::operation::create_optimization_job::CreateOptimizationJobOutput,
        crate::operation::create_optimization_job::CreateOptimizationJobError,
        Self,
    > {
        crate::client::customize::CustomizableOperation::new(self)
    }
    pub(crate) fn config_override(mut self, config_override: impl ::std::convert::Into<crate::config::Builder>) -> Self {
        self.set_config_override(::std::option::Option::Some(config_override.into()));
        self
    }

    pub(crate) fn set_config_override(&mut self, config_override: ::std::option::Option<crate::config::Builder>) -> &mut Self {
        self.config_override = config_override;
        self
    }
    /// <p>A custom name for the new optimization job.</p>
    pub fn optimization_job_name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
        self.inner = self.inner.optimization_job_name(input.into());
        self
    }
    /// <p>A custom name for the new optimization job.</p>
    pub fn set_optimization_job_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
        self.inner = self.inner.set_optimization_job_name(input);
        self
    }
    /// <p>A custom name for the new optimization job.</p>
    pub fn get_optimization_job_name(&self) -> &::std::option::Option<::std::string::String> {
        self.inner.get_optimization_job_name()
    }
    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf.</p>
    /// <p>During model optimization, Amazon SageMaker needs your permission to:</p>
    /// <ul>
    /// <li>
    /// <p>Read input data from an S3 bucket</p></li>
    /// <li>
    /// <p>Write model artifacts to an S3 bucket</p></li>
    /// <li>
    /// <p>Write logs to Amazon CloudWatch Logs</p></li>
    /// <li>
    /// <p>Publish metrics to Amazon CloudWatch</p></li>
    /// </ul>
    /// <p>You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the <code>iam:PassRole</code> permission. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">Amazon SageMaker Roles.</a></p>
    pub fn role_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
        self.inner = self.inner.role_arn(input.into());
        self
    }
    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf.</p>
    /// <p>During model optimization, Amazon SageMaker needs your permission to:</p>
    /// <ul>
    /// <li>
    /// <p>Read input data from an S3 bucket</p></li>
    /// <li>
    /// <p>Write model artifacts to an S3 bucket</p></li>
    /// <li>
    /// <p>Write logs to Amazon CloudWatch Logs</p></li>
    /// <li>
    /// <p>Publish metrics to Amazon CloudWatch</p></li>
    /// </ul>
    /// <p>You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the <code>iam:PassRole</code> permission. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">Amazon SageMaker Roles.</a></p>
    pub fn set_role_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
        self.inner = self.inner.set_role_arn(input);
        self
    }
    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf.</p>
    /// <p>During model optimization, Amazon SageMaker needs your permission to:</p>
    /// <ul>
    /// <li>
    /// <p>Read input data from an S3 bucket</p></li>
    /// <li>
    /// <p>Write model artifacts to an S3 bucket</p></li>
    /// <li>
    /// <p>Write logs to Amazon CloudWatch Logs</p></li>
    /// <li>
    /// <p>Publish metrics to Amazon CloudWatch</p></li>
    /// </ul>
    /// <p>You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the <code>iam:PassRole</code> permission. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">Amazon SageMaker Roles.</a></p>
    pub fn get_role_arn(&self) -> &::std::option::Option<::std::string::String> {
        self.inner.get_role_arn()
    }
    /// <p>The location of the source model to optimize with an optimization job.</p>
    pub fn model_source(mut self, input: crate::types::OptimizationJobModelSource) -> Self {
        self.inner = self.inner.model_source(input);
        self
    }
    /// <p>The location of the source model to optimize with an optimization job.</p>
    pub fn set_model_source(mut self, input: ::std::option::Option<crate::types::OptimizationJobModelSource>) -> Self {
        self.inner = self.inner.set_model_source(input);
        self
    }
    /// <p>The location of the source model to optimize with an optimization job.</p>
    pub fn get_model_source(&self) -> &::std::option::Option<crate::types::OptimizationJobModelSource> {
        self.inner.get_model_source()
    }
    /// <p>The type of instance that hosts the optimized model that you create with the optimization job.</p>
    pub fn deployment_instance_type(mut self, input: crate::types::OptimizationJobDeploymentInstanceType) -> Self {
        self.inner = self.inner.deployment_instance_type(input);
        self
    }
    /// <p>The type of instance that hosts the optimized model that you create with the optimization job.</p>
    pub fn set_deployment_instance_type(mut self, input: ::std::option::Option<crate::types::OptimizationJobDeploymentInstanceType>) -> Self {
        self.inner = self.inner.set_deployment_instance_type(input);
        self
    }
    /// <p>The type of instance that hosts the optimized model that you create with the optimization job.</p>
    pub fn get_deployment_instance_type(&self) -> &::std::option::Option<crate::types::OptimizationJobDeploymentInstanceType> {
        self.inner.get_deployment_instance_type()
    }
    ///
    /// Adds a key-value pair to `OptimizationEnvironment`.
    ///
    /// To override the contents of this collection use [`set_optimization_environment`](Self::set_optimization_environment).
    ///
    /// <p>The environment variables to set in the model container.</p>
    pub fn optimization_environment(
        mut self,
        k: impl ::std::convert::Into<::std::string::String>,
        v: impl ::std::convert::Into<::std::string::String>,
    ) -> Self {
        self.inner = self.inner.optimization_environment(k.into(), v.into());
        self
    }
    /// <p>The environment variables to set in the model container.</p>
    pub fn set_optimization_environment(
        mut self,
        input: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
    ) -> Self {
        self.inner = self.inner.set_optimization_environment(input);
        self
    }
    /// <p>The environment variables to set in the model container.</p>
    pub fn get_optimization_environment(&self) -> &::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>> {
        self.inner.get_optimization_environment()
    }
    ///
    /// Appends an item to `OptimizationConfigs`.
    ///
    /// To override the contents of this collection use [`set_optimization_configs`](Self::set_optimization_configs).
    ///
    /// <p>Settings for each of the optimization techniques that the job applies.</p>
    pub fn optimization_configs(mut self, input: crate::types::OptimizationConfig) -> Self {
        self.inner = self.inner.optimization_configs(input);
        self
    }
    /// <p>Settings for each of the optimization techniques that the job applies.</p>
    pub fn set_optimization_configs(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::OptimizationConfig>>) -> Self {
        self.inner = self.inner.set_optimization_configs(input);
        self
    }
    /// <p>Settings for each of the optimization techniques that the job applies.</p>
    pub fn get_optimization_configs(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::OptimizationConfig>> {
        self.inner.get_optimization_configs()
    }
    /// <p>Details for where to store the optimized model that you create with the optimization job.</p>
    pub fn output_config(mut self, input: crate::types::OptimizationJobOutputConfig) -> Self {
        self.inner = self.inner.output_config(input);
        self
    }
    /// <p>Details for where to store the optimized model that you create with the optimization job.</p>
    pub fn set_output_config(mut self, input: ::std::option::Option<crate::types::OptimizationJobOutputConfig>) -> Self {
        self.inner = self.inner.set_output_config(input);
        self
    }
    /// <p>Details for where to store the optimized model that you create with the optimization job.</p>
    pub fn get_output_config(&self) -> &::std::option::Option<crate::types::OptimizationJobOutputConfig> {
        self.inner.get_output_config()
    }
    /// <p>Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.</p>
    /// <p>To stop a training job, SageMaker sends the algorithm the <code>SIGTERM</code> signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.</p>
    /// <p>The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with <code>CreateModel</code>.</p><note>
    /// <p>The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.</p>
    /// </note>
    pub fn stopping_condition(mut self, input: crate::types::StoppingCondition) -> Self {
        self.inner = self.inner.stopping_condition(input);
        self
    }
    /// <p>Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.</p>
    /// <p>To stop a training job, SageMaker sends the algorithm the <code>SIGTERM</code> signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.</p>
    /// <p>The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with <code>CreateModel</code>.</p><note>
    /// <p>The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.</p>
    /// </note>
    pub fn set_stopping_condition(mut self, input: ::std::option::Option<crate::types::StoppingCondition>) -> Self {
        self.inner = self.inner.set_stopping_condition(input);
        self
    }
    /// <p>Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.</p>
    /// <p>To stop a training job, SageMaker sends the algorithm the <code>SIGTERM</code> signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.</p>
    /// <p>The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with <code>CreateModel</code>.</p><note>
    /// <p>The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.</p>
    /// </note>
    pub fn get_stopping_condition(&self) -> &::std::option::Option<crate::types::StoppingCondition> {
        self.inner.get_stopping_condition()
    }
    ///
    /// Appends an item to `Tags`.
    ///
    /// To override the contents of this collection use [`set_tags`](Self::set_tags).
    ///
    /// <p>A list of key-value pairs associated with the optimization job. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a> in the <i>Amazon Web Services General Reference Guide</i>.</p>
    pub fn tags(mut self, input: crate::types::Tag) -> Self {
        self.inner = self.inner.tags(input);
        self
    }
    /// <p>A list of key-value pairs associated with the optimization job. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a> in the <i>Amazon Web Services General Reference Guide</i>.</p>
    pub fn set_tags(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>) -> Self {
        self.inner = self.inner.set_tags(input);
        self
    }
    /// <p>A list of key-value pairs associated with the optimization job. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a> in the <i>Amazon Web Services General Reference Guide</i>.</p>
    pub fn get_tags(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::Tag>> {
        self.inner.get_tags()
    }
    /// <p>A VPC in Amazon VPC that your optimized model has access to.</p>
    pub fn vpc_config(mut self, input: crate::types::OptimizationVpcConfig) -> Self {
        self.inner = self.inner.vpc_config(input);
        self
    }
    /// <p>A VPC in Amazon VPC that your optimized model has access to.</p>
    pub fn set_vpc_config(mut self, input: ::std::option::Option<crate::types::OptimizationVpcConfig>) -> Self {
        self.inner = self.inner.set_vpc_config(input);
        self
    }
    /// <p>A VPC in Amazon VPC that your optimized model has access to.</p>
    pub fn get_vpc_config(&self) -> &::std::option::Option<crate::types::OptimizationVpcConfig> {
        self.inner.get_vpc_config()
    }
}