Skip to main content

aws_sdk_sagemaker/operation/create_optimization_job/
_create_optimization_job_input.rs

1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2#[allow(missing_docs)] // documentation missing in model
3#[non_exhaustive]
4#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::fmt::Debug)]
5pub struct CreateOptimizationJobInput {
6    /// <p>A custom name for the new optimization job.</p>
7    pub optimization_job_name: ::std::option::Option<::std::string::String>,
8    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker AI to perform tasks on your behalf.</p>
9    /// <p>During model optimization, Amazon SageMaker AI needs your permission to:</p>
10    /// <ul>
11    /// <li>
12    /// <p>Read input data from an S3 bucket</p></li>
13    /// <li>
14    /// <p>Write model artifacts to an S3 bucket</p></li>
15    /// <li>
16    /// <p>Write logs to Amazon CloudWatch Logs</p></li>
17    /// <li>
18    /// <p>Publish metrics to Amazon CloudWatch</p></li>
19    /// </ul>
20    /// <p>You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker AI, the caller of this API must have the <code>iam:PassRole</code> permission. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">Amazon SageMaker AI Roles.</a></p>
21    pub role_arn: ::std::option::Option<::std::string::String>,
22    /// <p>The location of the source model to optimize with an optimization job.</p>
23    pub model_source: ::std::option::Option<crate::types::OptimizationJobModelSource>,
24    /// <p>The type of instance that hosts the optimized model that you create with the optimization job.</p>
25    pub deployment_instance_type: ::std::option::Option<crate::types::OptimizationJobDeploymentInstanceType>,
26    /// <p>The maximum number of instances to use for the optimization job.</p>
27    pub max_instance_count: ::std::option::Option<i32>,
28    /// <p>The environment variables to set in the model container.</p>
29    pub optimization_environment: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
30    /// <p>Settings for each of the optimization techniques that the job applies.</p>
31    pub optimization_configs: ::std::option::Option<::std::vec::Vec<crate::types::OptimizationConfig>>,
32    /// <p>Details for where to store the optimized model that you create with the optimization job.</p>
33    pub output_config: ::std::option::Option<crate::types::OptimizationJobOutputConfig>,
34    /// <p>Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.</p>
35    /// <p>To stop a training job, SageMaker sends the algorithm the <code>SIGTERM</code> signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.</p>
36    /// <p>The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with <code>CreateModel</code>.</p><note>
37    /// <p>The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.</p>
38    /// </note>
39    pub stopping_condition: ::std::option::Option<crate::types::StoppingCondition>,
40    /// <p>A list of key-value pairs associated with the optimization job. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a> in the <i>Amazon Web Services General Reference Guide</i>.</p>
41    pub tags: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>,
42    /// <p>A VPC in Amazon VPC that your optimized model has access to.</p>
43    pub vpc_config: ::std::option::Option<crate::types::OptimizationVpcConfig>,
44}
45impl CreateOptimizationJobInput {
46    /// <p>A custom name for the new optimization job.</p>
47    pub fn optimization_job_name(&self) -> ::std::option::Option<&str> {
48        self.optimization_job_name.as_deref()
49    }
50    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker AI to perform tasks on your behalf.</p>
51    /// <p>During model optimization, Amazon SageMaker AI needs your permission to:</p>
52    /// <ul>
53    /// <li>
54    /// <p>Read input data from an S3 bucket</p></li>
55    /// <li>
56    /// <p>Write model artifacts to an S3 bucket</p></li>
57    /// <li>
58    /// <p>Write logs to Amazon CloudWatch Logs</p></li>
59    /// <li>
60    /// <p>Publish metrics to Amazon CloudWatch</p></li>
61    /// </ul>
62    /// <p>You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker AI, the caller of this API must have the <code>iam:PassRole</code> permission. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">Amazon SageMaker AI Roles.</a></p>
63    pub fn role_arn(&self) -> ::std::option::Option<&str> {
64        self.role_arn.as_deref()
65    }
66    /// <p>The location of the source model to optimize with an optimization job.</p>
67    pub fn model_source(&self) -> ::std::option::Option<&crate::types::OptimizationJobModelSource> {
68        self.model_source.as_ref()
69    }
70    /// <p>The type of instance that hosts the optimized model that you create with the optimization job.</p>
71    pub fn deployment_instance_type(&self) -> ::std::option::Option<&crate::types::OptimizationJobDeploymentInstanceType> {
72        self.deployment_instance_type.as_ref()
73    }
74    /// <p>The maximum number of instances to use for the optimization job.</p>
75    pub fn max_instance_count(&self) -> ::std::option::Option<i32> {
76        self.max_instance_count
77    }
78    /// <p>The environment variables to set in the model container.</p>
79    pub fn optimization_environment(&self) -> ::std::option::Option<&::std::collections::HashMap<::std::string::String, ::std::string::String>> {
80        self.optimization_environment.as_ref()
81    }
82    /// <p>Settings for each of the optimization techniques that the job applies.</p>
83    ///
84    /// If no value was sent for this field, a default will be set. If you want to determine if no value was sent, use `.optimization_configs.is_none()`.
85    pub fn optimization_configs(&self) -> &[crate::types::OptimizationConfig] {
86        self.optimization_configs.as_deref().unwrap_or_default()
87    }
88    /// <p>Details for where to store the optimized model that you create with the optimization job.</p>
89    pub fn output_config(&self) -> ::std::option::Option<&crate::types::OptimizationJobOutputConfig> {
90        self.output_config.as_ref()
91    }
92    /// <p>Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.</p>
93    /// <p>To stop a training job, SageMaker sends the algorithm the <code>SIGTERM</code> signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.</p>
94    /// <p>The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with <code>CreateModel</code>.</p><note>
95    /// <p>The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.</p>
96    /// </note>
97    pub fn stopping_condition(&self) -> ::std::option::Option<&crate::types::StoppingCondition> {
98        self.stopping_condition.as_ref()
99    }
100    /// <p>A list of key-value pairs associated with the optimization job. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a> in the <i>Amazon Web Services General Reference Guide</i>.</p>
101    ///
102    /// If no value was sent for this field, a default will be set. If you want to determine if no value was sent, use `.tags.is_none()`.
103    pub fn tags(&self) -> &[crate::types::Tag] {
104        self.tags.as_deref().unwrap_or_default()
105    }
106    /// <p>A VPC in Amazon VPC that your optimized model has access to.</p>
107    pub fn vpc_config(&self) -> ::std::option::Option<&crate::types::OptimizationVpcConfig> {
108        self.vpc_config.as_ref()
109    }
110}
111impl CreateOptimizationJobInput {
112    /// Creates a new builder-style object to manufacture [`CreateOptimizationJobInput`](crate::operation::create_optimization_job::CreateOptimizationJobInput).
113    pub fn builder() -> crate::operation::create_optimization_job::builders::CreateOptimizationJobInputBuilder {
114        crate::operation::create_optimization_job::builders::CreateOptimizationJobInputBuilder::default()
115    }
116}
117
118/// A builder for [`CreateOptimizationJobInput`](crate::operation::create_optimization_job::CreateOptimizationJobInput).
119#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::default::Default, ::std::fmt::Debug)]
120#[non_exhaustive]
121pub struct CreateOptimizationJobInputBuilder {
122    pub(crate) optimization_job_name: ::std::option::Option<::std::string::String>,
123    pub(crate) role_arn: ::std::option::Option<::std::string::String>,
124    pub(crate) model_source: ::std::option::Option<crate::types::OptimizationJobModelSource>,
125    pub(crate) deployment_instance_type: ::std::option::Option<crate::types::OptimizationJobDeploymentInstanceType>,
126    pub(crate) max_instance_count: ::std::option::Option<i32>,
127    pub(crate) optimization_environment: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
128    pub(crate) optimization_configs: ::std::option::Option<::std::vec::Vec<crate::types::OptimizationConfig>>,
129    pub(crate) output_config: ::std::option::Option<crate::types::OptimizationJobOutputConfig>,
130    pub(crate) stopping_condition: ::std::option::Option<crate::types::StoppingCondition>,
131    pub(crate) tags: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>,
132    pub(crate) vpc_config: ::std::option::Option<crate::types::OptimizationVpcConfig>,
133}
134impl CreateOptimizationJobInputBuilder {
135    /// <p>A custom name for the new optimization job.</p>
136    /// This field is required.
137    pub fn optimization_job_name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
138        self.optimization_job_name = ::std::option::Option::Some(input.into());
139        self
140    }
141    /// <p>A custom name for the new optimization job.</p>
142    pub fn set_optimization_job_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
143        self.optimization_job_name = input;
144        self
145    }
146    /// <p>A custom name for the new optimization job.</p>
147    pub fn get_optimization_job_name(&self) -> &::std::option::Option<::std::string::String> {
148        &self.optimization_job_name
149    }
150    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker AI to perform tasks on your behalf.</p>
151    /// <p>During model optimization, Amazon SageMaker AI needs your permission to:</p>
152    /// <ul>
153    /// <li>
154    /// <p>Read input data from an S3 bucket</p></li>
155    /// <li>
156    /// <p>Write model artifacts to an S3 bucket</p></li>
157    /// <li>
158    /// <p>Write logs to Amazon CloudWatch Logs</p></li>
159    /// <li>
160    /// <p>Publish metrics to Amazon CloudWatch</p></li>
161    /// </ul>
162    /// <p>You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker AI, the caller of this API must have the <code>iam:PassRole</code> permission. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">Amazon SageMaker AI Roles.</a></p>
163    /// This field is required.
164    pub fn role_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
165        self.role_arn = ::std::option::Option::Some(input.into());
166        self
167    }
168    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker AI to perform tasks on your behalf.</p>
169    /// <p>During model optimization, Amazon SageMaker AI needs your permission to:</p>
170    /// <ul>
171    /// <li>
172    /// <p>Read input data from an S3 bucket</p></li>
173    /// <li>
174    /// <p>Write model artifacts to an S3 bucket</p></li>
175    /// <li>
176    /// <p>Write logs to Amazon CloudWatch Logs</p></li>
177    /// <li>
178    /// <p>Publish metrics to Amazon CloudWatch</p></li>
179    /// </ul>
180    /// <p>You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker AI, the caller of this API must have the <code>iam:PassRole</code> permission. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">Amazon SageMaker AI Roles.</a></p>
181    pub fn set_role_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
182        self.role_arn = input;
183        self
184    }
185    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker AI to perform tasks on your behalf.</p>
186    /// <p>During model optimization, Amazon SageMaker AI needs your permission to:</p>
187    /// <ul>
188    /// <li>
189    /// <p>Read input data from an S3 bucket</p></li>
190    /// <li>
191    /// <p>Write model artifacts to an S3 bucket</p></li>
192    /// <li>
193    /// <p>Write logs to Amazon CloudWatch Logs</p></li>
194    /// <li>
195    /// <p>Publish metrics to Amazon CloudWatch</p></li>
196    /// </ul>
197    /// <p>You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker AI, the caller of this API must have the <code>iam:PassRole</code> permission. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">Amazon SageMaker AI Roles.</a></p>
198    pub fn get_role_arn(&self) -> &::std::option::Option<::std::string::String> {
199        &self.role_arn
200    }
201    /// <p>The location of the source model to optimize with an optimization job.</p>
202    /// This field is required.
203    pub fn model_source(mut self, input: crate::types::OptimizationJobModelSource) -> Self {
204        self.model_source = ::std::option::Option::Some(input);
205        self
206    }
207    /// <p>The location of the source model to optimize with an optimization job.</p>
208    pub fn set_model_source(mut self, input: ::std::option::Option<crate::types::OptimizationJobModelSource>) -> Self {
209        self.model_source = input;
210        self
211    }
212    /// <p>The location of the source model to optimize with an optimization job.</p>
213    pub fn get_model_source(&self) -> &::std::option::Option<crate::types::OptimizationJobModelSource> {
214        &self.model_source
215    }
216    /// <p>The type of instance that hosts the optimized model that you create with the optimization job.</p>
217    /// This field is required.
218    pub fn deployment_instance_type(mut self, input: crate::types::OptimizationJobDeploymentInstanceType) -> Self {
219        self.deployment_instance_type = ::std::option::Option::Some(input);
220        self
221    }
222    /// <p>The type of instance that hosts the optimized model that you create with the optimization job.</p>
223    pub fn set_deployment_instance_type(mut self, input: ::std::option::Option<crate::types::OptimizationJobDeploymentInstanceType>) -> Self {
224        self.deployment_instance_type = input;
225        self
226    }
227    /// <p>The type of instance that hosts the optimized model that you create with the optimization job.</p>
228    pub fn get_deployment_instance_type(&self) -> &::std::option::Option<crate::types::OptimizationJobDeploymentInstanceType> {
229        &self.deployment_instance_type
230    }
231    /// <p>The maximum number of instances to use for the optimization job.</p>
232    pub fn max_instance_count(mut self, input: i32) -> Self {
233        self.max_instance_count = ::std::option::Option::Some(input);
234        self
235    }
236    /// <p>The maximum number of instances to use for the optimization job.</p>
237    pub fn set_max_instance_count(mut self, input: ::std::option::Option<i32>) -> Self {
238        self.max_instance_count = input;
239        self
240    }
241    /// <p>The maximum number of instances to use for the optimization job.</p>
242    pub fn get_max_instance_count(&self) -> &::std::option::Option<i32> {
243        &self.max_instance_count
244    }
245    /// Adds a key-value pair to `optimization_environment`.
246    ///
247    /// To override the contents of this collection use [`set_optimization_environment`](Self::set_optimization_environment).
248    ///
249    /// <p>The environment variables to set in the model container.</p>
250    pub fn optimization_environment(
251        mut self,
252        k: impl ::std::convert::Into<::std::string::String>,
253        v: impl ::std::convert::Into<::std::string::String>,
254    ) -> Self {
255        let mut hash_map = self.optimization_environment.unwrap_or_default();
256        hash_map.insert(k.into(), v.into());
257        self.optimization_environment = ::std::option::Option::Some(hash_map);
258        self
259    }
260    /// <p>The environment variables to set in the model container.</p>
261    pub fn set_optimization_environment(
262        mut self,
263        input: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
264    ) -> Self {
265        self.optimization_environment = input;
266        self
267    }
268    /// <p>The environment variables to set in the model container.</p>
269    pub fn get_optimization_environment(&self) -> &::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>> {
270        &self.optimization_environment
271    }
272    /// Appends an item to `optimization_configs`.
273    ///
274    /// To override the contents of this collection use [`set_optimization_configs`](Self::set_optimization_configs).
275    ///
276    /// <p>Settings for each of the optimization techniques that the job applies.</p>
277    pub fn optimization_configs(mut self, input: crate::types::OptimizationConfig) -> Self {
278        let mut v = self.optimization_configs.unwrap_or_default();
279        v.push(input);
280        self.optimization_configs = ::std::option::Option::Some(v);
281        self
282    }
283    /// <p>Settings for each of the optimization techniques that the job applies.</p>
284    pub fn set_optimization_configs(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::OptimizationConfig>>) -> Self {
285        self.optimization_configs = input;
286        self
287    }
288    /// <p>Settings for each of the optimization techniques that the job applies.</p>
289    pub fn get_optimization_configs(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::OptimizationConfig>> {
290        &self.optimization_configs
291    }
292    /// <p>Details for where to store the optimized model that you create with the optimization job.</p>
293    /// This field is required.
294    pub fn output_config(mut self, input: crate::types::OptimizationJobOutputConfig) -> Self {
295        self.output_config = ::std::option::Option::Some(input);
296        self
297    }
298    /// <p>Details for where to store the optimized model that you create with the optimization job.</p>
299    pub fn set_output_config(mut self, input: ::std::option::Option<crate::types::OptimizationJobOutputConfig>) -> Self {
300        self.output_config = input;
301        self
302    }
303    /// <p>Details for where to store the optimized model that you create with the optimization job.</p>
304    pub fn get_output_config(&self) -> &::std::option::Option<crate::types::OptimizationJobOutputConfig> {
305        &self.output_config
306    }
307    /// <p>Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.</p>
308    /// <p>To stop a training job, SageMaker sends the algorithm the <code>SIGTERM</code> signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.</p>
309    /// <p>The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with <code>CreateModel</code>.</p><note>
310    /// <p>The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.</p>
311    /// </note>
312    /// This field is required.
313    pub fn stopping_condition(mut self, input: crate::types::StoppingCondition) -> Self {
314        self.stopping_condition = ::std::option::Option::Some(input);
315        self
316    }
317    /// <p>Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.</p>
318    /// <p>To stop a training job, SageMaker sends the algorithm the <code>SIGTERM</code> signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.</p>
319    /// <p>The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with <code>CreateModel</code>.</p><note>
320    /// <p>The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.</p>
321    /// </note>
322    pub fn set_stopping_condition(mut self, input: ::std::option::Option<crate::types::StoppingCondition>) -> Self {
323        self.stopping_condition = input;
324        self
325    }
326    /// <p>Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.</p>
327    /// <p>To stop a training job, SageMaker sends the algorithm the <code>SIGTERM</code> signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.</p>
328    /// <p>The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with <code>CreateModel</code>.</p><note>
329    /// <p>The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.</p>
330    /// </note>
331    pub fn get_stopping_condition(&self) -> &::std::option::Option<crate::types::StoppingCondition> {
332        &self.stopping_condition
333    }
334    /// Appends an item to `tags`.
335    ///
336    /// To override the contents of this collection use [`set_tags`](Self::set_tags).
337    ///
338    /// <p>A list of key-value pairs associated with the optimization job. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a> in the <i>Amazon Web Services General Reference Guide</i>.</p>
339    pub fn tags(mut self, input: crate::types::Tag) -> Self {
340        let mut v = self.tags.unwrap_or_default();
341        v.push(input);
342        self.tags = ::std::option::Option::Some(v);
343        self
344    }
345    /// <p>A list of key-value pairs associated with the optimization job. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a> in the <i>Amazon Web Services General Reference Guide</i>.</p>
346    pub fn set_tags(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>) -> Self {
347        self.tags = input;
348        self
349    }
350    /// <p>A list of key-value pairs associated with the optimization job. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a> in the <i>Amazon Web Services General Reference Guide</i>.</p>
351    pub fn get_tags(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::Tag>> {
352        &self.tags
353    }
354    /// <p>A VPC in Amazon VPC that your optimized model has access to.</p>
355    pub fn vpc_config(mut self, input: crate::types::OptimizationVpcConfig) -> Self {
356        self.vpc_config = ::std::option::Option::Some(input);
357        self
358    }
359    /// <p>A VPC in Amazon VPC that your optimized model has access to.</p>
360    pub fn set_vpc_config(mut self, input: ::std::option::Option<crate::types::OptimizationVpcConfig>) -> Self {
361        self.vpc_config = input;
362        self
363    }
364    /// <p>A VPC in Amazon VPC that your optimized model has access to.</p>
365    pub fn get_vpc_config(&self) -> &::std::option::Option<crate::types::OptimizationVpcConfig> {
366        &self.vpc_config
367    }
368    /// Consumes the builder and constructs a [`CreateOptimizationJobInput`](crate::operation::create_optimization_job::CreateOptimizationJobInput).
369    pub fn build(
370        self,
371    ) -> ::std::result::Result<crate::operation::create_optimization_job::CreateOptimizationJobInput, ::aws_smithy_types::error::operation::BuildError>
372    {
373        ::std::result::Result::Ok(crate::operation::create_optimization_job::CreateOptimizationJobInput {
374            optimization_job_name: self.optimization_job_name,
375            role_arn: self.role_arn,
376            model_source: self.model_source,
377            deployment_instance_type: self.deployment_instance_type,
378            max_instance_count: self.max_instance_count,
379            optimization_environment: self.optimization_environment,
380            optimization_configs: self.optimization_configs,
381            output_config: self.output_config,
382            stopping_condition: self.stopping_condition,
383            tags: self.tags,
384            vpc_config: self.vpc_config,
385        })
386    }
387}