1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::fmt::Debug)]
pub struct CreateModelInput {
    /// <p>The name of the new model.</p>
    pub model_name: ::std::option::Option<::std::string::String>,
    /// <p>The location of the primary docker image containing inference code, associated artifacts, and custom environment map that the inference code uses when the model is deployed for predictions. </p>
    pub primary_container: ::std::option::Option<crate::types::ContainerDefinition>,
    /// <p>Specifies the containers in the inference pipeline.</p>
    pub containers: ::std::option::Option<::std::vec::Vec<crate::types::ContainerDefinition>>,
    /// <p>Specifies details of how containers in a multi-container endpoint are called.</p>
    pub inference_execution_config: ::std::option::Option<crate::types::InferenceExecutionConfig>,
    /// <p>The Amazon Resource Name (ARN) of the IAM role that SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">SageMaker Roles</a>. </p> <note>
    /// <p>To be able to pass this role to SageMaker, the caller of this API must have the <code>iam:PassRole</code> permission.</p>
    /// </note>
    pub execution_role_arn: ::std::option::Option<::std::string::String>,
    /// <p>An array of key-value pairs. You can use tags to categorize your Amazon Web Services resources in different ways, for example, by purpose, owner, or environment. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services Resources</a>.</p>
    pub tags: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>,
    /// <p>A <a href="https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_VpcConfig.html">VpcConfig</a> object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. <code>VpcConfig</code> is used in hosting services and in batch transform. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/host-vpc.html">Protect Endpoints by Using an Amazon Virtual Private Cloud</a> and <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/batch-vpc.html">Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud</a>.</p>
    pub vpc_config: ::std::option::Option<crate::types::VpcConfig>,
    /// <p>Isolates the model container. No inbound or outbound network calls can be made to or from the model container.</p>
    pub enable_network_isolation: ::std::option::Option<bool>,
}
impl CreateModelInput {
    /// <p>The name of the new model.</p>
    pub fn model_name(&self) -> ::std::option::Option<&str> {
        self.model_name.as_deref()
    }
    /// <p>The location of the primary docker image containing inference code, associated artifacts, and custom environment map that the inference code uses when the model is deployed for predictions. </p>
    pub fn primary_container(&self) -> ::std::option::Option<&crate::types::ContainerDefinition> {
        self.primary_container.as_ref()
    }
    /// <p>Specifies the containers in the inference pipeline.</p>
    pub fn containers(&self) -> ::std::option::Option<&[crate::types::ContainerDefinition]> {
        self.containers.as_deref()
    }
    /// <p>Specifies details of how containers in a multi-container endpoint are called.</p>
    pub fn inference_execution_config(&self) -> ::std::option::Option<&crate::types::InferenceExecutionConfig> {
        self.inference_execution_config.as_ref()
    }
    /// <p>The Amazon Resource Name (ARN) of the IAM role that SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">SageMaker Roles</a>. </p> <note>
    /// <p>To be able to pass this role to SageMaker, the caller of this API must have the <code>iam:PassRole</code> permission.</p>
    /// </note>
    pub fn execution_role_arn(&self) -> ::std::option::Option<&str> {
        self.execution_role_arn.as_deref()
    }
    /// <p>An array of key-value pairs. You can use tags to categorize your Amazon Web Services resources in different ways, for example, by purpose, owner, or environment. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services Resources</a>.</p>
    pub fn tags(&self) -> ::std::option::Option<&[crate::types::Tag]> {
        self.tags.as_deref()
    }
    /// <p>A <a href="https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_VpcConfig.html">VpcConfig</a> object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. <code>VpcConfig</code> is used in hosting services and in batch transform. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/host-vpc.html">Protect Endpoints by Using an Amazon Virtual Private Cloud</a> and <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/batch-vpc.html">Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud</a>.</p>
    pub fn vpc_config(&self) -> ::std::option::Option<&crate::types::VpcConfig> {
        self.vpc_config.as_ref()
    }
    /// <p>Isolates the model container. No inbound or outbound network calls can be made to or from the model container.</p>
    pub fn enable_network_isolation(&self) -> ::std::option::Option<bool> {
        self.enable_network_isolation
    }
}
impl CreateModelInput {
    /// Creates a new builder-style object to manufacture [`CreateModelInput`](crate::operation::create_model::CreateModelInput).
    pub fn builder() -> crate::operation::create_model::builders::CreateModelInputBuilder {
        crate::operation::create_model::builders::CreateModelInputBuilder::default()
    }
}

/// A builder for [`CreateModelInput`](crate::operation::create_model::CreateModelInput).
#[non_exhaustive]
#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::default::Default, ::std::fmt::Debug)]
pub struct CreateModelInputBuilder {
    pub(crate) model_name: ::std::option::Option<::std::string::String>,
    pub(crate) primary_container: ::std::option::Option<crate::types::ContainerDefinition>,
    pub(crate) containers: ::std::option::Option<::std::vec::Vec<crate::types::ContainerDefinition>>,
    pub(crate) inference_execution_config: ::std::option::Option<crate::types::InferenceExecutionConfig>,
    pub(crate) execution_role_arn: ::std::option::Option<::std::string::String>,
    pub(crate) tags: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>,
    pub(crate) vpc_config: ::std::option::Option<crate::types::VpcConfig>,
    pub(crate) enable_network_isolation: ::std::option::Option<bool>,
}
impl CreateModelInputBuilder {
    /// <p>The name of the new model.</p>
    pub fn model_name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
        self.model_name = ::std::option::Option::Some(input.into());
        self
    }
    /// <p>The name of the new model.</p>
    pub fn set_model_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
        self.model_name = input;
        self
    }
    /// <p>The name of the new model.</p>
    pub fn get_model_name(&self) -> &::std::option::Option<::std::string::String> {
        &self.model_name
    }
    /// <p>The location of the primary docker image containing inference code, associated artifacts, and custom environment map that the inference code uses when the model is deployed for predictions. </p>
    pub fn primary_container(mut self, input: crate::types::ContainerDefinition) -> Self {
        self.primary_container = ::std::option::Option::Some(input);
        self
    }
    /// <p>The location of the primary docker image containing inference code, associated artifacts, and custom environment map that the inference code uses when the model is deployed for predictions. </p>
    pub fn set_primary_container(mut self, input: ::std::option::Option<crate::types::ContainerDefinition>) -> Self {
        self.primary_container = input;
        self
    }
    /// <p>The location of the primary docker image containing inference code, associated artifacts, and custom environment map that the inference code uses when the model is deployed for predictions. </p>
    pub fn get_primary_container(&self) -> &::std::option::Option<crate::types::ContainerDefinition> {
        &self.primary_container
    }
    /// Appends an item to `containers`.
    ///
    /// To override the contents of this collection use [`set_containers`](Self::set_containers).
    ///
    /// <p>Specifies the containers in the inference pipeline.</p>
    pub fn containers(mut self, input: crate::types::ContainerDefinition) -> Self {
        let mut v = self.containers.unwrap_or_default();
        v.push(input);
        self.containers = ::std::option::Option::Some(v);
        self
    }
    /// <p>Specifies the containers in the inference pipeline.</p>
    pub fn set_containers(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::ContainerDefinition>>) -> Self {
        self.containers = input;
        self
    }
    /// <p>Specifies the containers in the inference pipeline.</p>
    pub fn get_containers(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::ContainerDefinition>> {
        &self.containers
    }
    /// <p>Specifies details of how containers in a multi-container endpoint are called.</p>
    pub fn inference_execution_config(mut self, input: crate::types::InferenceExecutionConfig) -> Self {
        self.inference_execution_config = ::std::option::Option::Some(input);
        self
    }
    /// <p>Specifies details of how containers in a multi-container endpoint are called.</p>
    pub fn set_inference_execution_config(mut self, input: ::std::option::Option<crate::types::InferenceExecutionConfig>) -> Self {
        self.inference_execution_config = input;
        self
    }
    /// <p>Specifies details of how containers in a multi-container endpoint are called.</p>
    pub fn get_inference_execution_config(&self) -> &::std::option::Option<crate::types::InferenceExecutionConfig> {
        &self.inference_execution_config
    }
    /// <p>The Amazon Resource Name (ARN) of the IAM role that SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">SageMaker Roles</a>. </p> <note>
    /// <p>To be able to pass this role to SageMaker, the caller of this API must have the <code>iam:PassRole</code> permission.</p>
    /// </note>
    pub fn execution_role_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
        self.execution_role_arn = ::std::option::Option::Some(input.into());
        self
    }
    /// <p>The Amazon Resource Name (ARN) of the IAM role that SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">SageMaker Roles</a>. </p> <note>
    /// <p>To be able to pass this role to SageMaker, the caller of this API must have the <code>iam:PassRole</code> permission.</p>
    /// </note>
    pub fn set_execution_role_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
        self.execution_role_arn = input;
        self
    }
    /// <p>The Amazon Resource Name (ARN) of the IAM role that SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html">SageMaker Roles</a>. </p> <note>
    /// <p>To be able to pass this role to SageMaker, the caller of this API must have the <code>iam:PassRole</code> permission.</p>
    /// </note>
    pub fn get_execution_role_arn(&self) -> &::std::option::Option<::std::string::String> {
        &self.execution_role_arn
    }
    /// Appends an item to `tags`.
    ///
    /// To override the contents of this collection use [`set_tags`](Self::set_tags).
    ///
    /// <p>An array of key-value pairs. You can use tags to categorize your Amazon Web Services resources in different ways, for example, by purpose, owner, or environment. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services Resources</a>.</p>
    pub fn tags(mut self, input: crate::types::Tag) -> Self {
        let mut v = self.tags.unwrap_or_default();
        v.push(input);
        self.tags = ::std::option::Option::Some(v);
        self
    }
    /// <p>An array of key-value pairs. You can use tags to categorize your Amazon Web Services resources in different ways, for example, by purpose, owner, or environment. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services Resources</a>.</p>
    pub fn set_tags(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>) -> Self {
        self.tags = input;
        self
    }
    /// <p>An array of key-value pairs. You can use tags to categorize your Amazon Web Services resources in different ways, for example, by purpose, owner, or environment. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services Resources</a>.</p>
    pub fn get_tags(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::Tag>> {
        &self.tags
    }
    /// <p>A <a href="https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_VpcConfig.html">VpcConfig</a> object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. <code>VpcConfig</code> is used in hosting services and in batch transform. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/host-vpc.html">Protect Endpoints by Using an Amazon Virtual Private Cloud</a> and <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/batch-vpc.html">Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud</a>.</p>
    pub fn vpc_config(mut self, input: crate::types::VpcConfig) -> Self {
        self.vpc_config = ::std::option::Option::Some(input);
        self
    }
    /// <p>A <a href="https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_VpcConfig.html">VpcConfig</a> object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. <code>VpcConfig</code> is used in hosting services and in batch transform. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/host-vpc.html">Protect Endpoints by Using an Amazon Virtual Private Cloud</a> and <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/batch-vpc.html">Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud</a>.</p>
    pub fn set_vpc_config(mut self, input: ::std::option::Option<crate::types::VpcConfig>) -> Self {
        self.vpc_config = input;
        self
    }
    /// <p>A <a href="https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_VpcConfig.html">VpcConfig</a> object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. <code>VpcConfig</code> is used in hosting services and in batch transform. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/host-vpc.html">Protect Endpoints by Using an Amazon Virtual Private Cloud</a> and <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/batch-vpc.html">Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud</a>.</p>
    pub fn get_vpc_config(&self) -> &::std::option::Option<crate::types::VpcConfig> {
        &self.vpc_config
    }
    /// <p>Isolates the model container. No inbound or outbound network calls can be made to or from the model container.</p>
    pub fn enable_network_isolation(mut self, input: bool) -> Self {
        self.enable_network_isolation = ::std::option::Option::Some(input);
        self
    }
    /// <p>Isolates the model container. No inbound or outbound network calls can be made to or from the model container.</p>
    pub fn set_enable_network_isolation(mut self, input: ::std::option::Option<bool>) -> Self {
        self.enable_network_isolation = input;
        self
    }
    /// <p>Isolates the model container. No inbound or outbound network calls can be made to or from the model container.</p>
    pub fn get_enable_network_isolation(&self) -> &::std::option::Option<bool> {
        &self.enable_network_isolation
    }
    /// Consumes the builder and constructs a [`CreateModelInput`](crate::operation::create_model::CreateModelInput).
    pub fn build(self) -> ::std::result::Result<crate::operation::create_model::CreateModelInput, ::aws_smithy_http::operation::error::BuildError> {
        ::std::result::Result::Ok(crate::operation::create_model::CreateModelInput {
            model_name: self.model_name,
            primary_container: self.primary_container,
            containers: self.containers,
            inference_execution_config: self.inference_execution_config,
            execution_role_arn: self.execution_role_arn,
            tags: self.tags,
            vpc_config: self.vpc_config,
            enable_network_isolation: self.enable_network_isolation,
        })
    }
}