aws_sdk_mwaaserverless/operation/create_workflow/builders.rs
1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2pub use crate::operation::create_workflow::_create_workflow_output::CreateWorkflowOutputBuilder;
3
4pub use crate::operation::create_workflow::_create_workflow_input::CreateWorkflowInputBuilder;
5
6impl crate::operation::create_workflow::builders::CreateWorkflowInputBuilder {
7 /// Sends a request with this input using the given client.
8 pub async fn send_with(
9 self,
10 client: &crate::Client,
11 ) -> ::std::result::Result<
12 crate::operation::create_workflow::CreateWorkflowOutput,
13 ::aws_smithy_runtime_api::client::result::SdkError<
14 crate::operation::create_workflow::CreateWorkflowError,
15 ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
16 >,
17 > {
18 let mut fluent_builder = client.create_workflow();
19 fluent_builder.inner = self;
20 fluent_builder.send().await
21 }
22}
23/// Fluent builder constructing a request to `CreateWorkflow`.
24///
25/// <p>Creates a new workflow in Amazon Managed Workflows for Apache Airflow Serverless. This operation initializes a workflow with the specified configuration including the workflow definition, execution role, and optional settings for encryption, logging, and networking. You must provide the workflow definition as a YAML file stored in Amazon S3 that defines the DAG structure using supported Amazon Web Services operators. Amazon Managed Workflows for Apache Airflow Serverless automatically creates the first version of the workflow and sets up the necessary execution environment with multi-tenant isolation and security controls.</p>
26#[derive(::std::clone::Clone, ::std::fmt::Debug)]
27pub struct CreateWorkflowFluentBuilder {
28 handle: ::std::sync::Arc<crate::client::Handle>,
29 inner: crate::operation::create_workflow::builders::CreateWorkflowInputBuilder,
30 config_override: ::std::option::Option<crate::config::Builder>,
31}
32impl
33 crate::client::customize::internal::CustomizableSend<
34 crate::operation::create_workflow::CreateWorkflowOutput,
35 crate::operation::create_workflow::CreateWorkflowError,
36 > for CreateWorkflowFluentBuilder
37{
38 fn send(
39 self,
40 config_override: crate::config::Builder,
41 ) -> crate::client::customize::internal::BoxFuture<
42 crate::client::customize::internal::SendResult<
43 crate::operation::create_workflow::CreateWorkflowOutput,
44 crate::operation::create_workflow::CreateWorkflowError,
45 >,
46 > {
47 ::std::boxed::Box::pin(async move { self.config_override(config_override).send().await })
48 }
49}
50impl CreateWorkflowFluentBuilder {
51 /// Creates a new `CreateWorkflowFluentBuilder`.
52 pub(crate) fn new(handle: ::std::sync::Arc<crate::client::Handle>) -> Self {
53 Self {
54 handle,
55 inner: ::std::default::Default::default(),
56 config_override: ::std::option::Option::None,
57 }
58 }
59 /// Access the CreateWorkflow as a reference.
60 pub fn as_input(&self) -> &crate::operation::create_workflow::builders::CreateWorkflowInputBuilder {
61 &self.inner
62 }
63 /// Sends the request and returns the response.
64 ///
65 /// If an error occurs, an `SdkError` will be returned with additional details that
66 /// can be matched against.
67 ///
68 /// By default, any retryable failures will be retried twice. Retry behavior
69 /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
70 /// set when configuring the client.
71 pub async fn send(
72 self,
73 ) -> ::std::result::Result<
74 crate::operation::create_workflow::CreateWorkflowOutput,
75 ::aws_smithy_runtime_api::client::result::SdkError<
76 crate::operation::create_workflow::CreateWorkflowError,
77 ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
78 >,
79 > {
80 let input = self
81 .inner
82 .build()
83 .map_err(::aws_smithy_runtime_api::client::result::SdkError::construction_failure)?;
84 let runtime_plugins = crate::operation::create_workflow::CreateWorkflow::operation_runtime_plugins(
85 self.handle.runtime_plugins.clone(),
86 &self.handle.conf,
87 self.config_override,
88 );
89 crate::operation::create_workflow::CreateWorkflow::orchestrate(&runtime_plugins, input).await
90 }
91
92 /// Consumes this builder, creating a customizable operation that can be modified before being sent.
93 pub fn customize(
94 self,
95 ) -> crate::client::customize::CustomizableOperation<
96 crate::operation::create_workflow::CreateWorkflowOutput,
97 crate::operation::create_workflow::CreateWorkflowError,
98 Self,
99 > {
100 crate::client::customize::CustomizableOperation::new(self)
101 }
102 pub(crate) fn config_override(mut self, config_override: impl ::std::convert::Into<crate::config::Builder>) -> Self {
103 self.set_config_override(::std::option::Option::Some(config_override.into()));
104 self
105 }
106
107 pub(crate) fn set_config_override(&mut self, config_override: ::std::option::Option<crate::config::Builder>) -> &mut Self {
108 self.config_override = config_override;
109 self
110 }
111 /// <p>The name of the workflow. You must use unique workflow names within your Amazon Web Services account. The service generates a unique identifier that is appended to ensure temporal uniqueness across the account lifecycle.</p>
112 pub fn name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
113 self.inner = self.inner.name(input.into());
114 self
115 }
116 /// <p>The name of the workflow. You must use unique workflow names within your Amazon Web Services account. The service generates a unique identifier that is appended to ensure temporal uniqueness across the account lifecycle.</p>
117 pub fn set_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
118 self.inner = self.inner.set_name(input);
119 self
120 }
121 /// <p>The name of the workflow. You must use unique workflow names within your Amazon Web Services account. The service generates a unique identifier that is appended to ensure temporal uniqueness across the account lifecycle.</p>
122 pub fn get_name(&self) -> &::std::option::Option<::std::string::String> {
123 self.inner.get_name()
124 }
125 /// <p>A unique, case-sensitive identifier that you provide to ensure the idempotency of the request. This token prevents duplicate workflow creation requests.</p>
126 pub fn client_token(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
127 self.inner = self.inner.client_token(input.into());
128 self
129 }
130 /// <p>A unique, case-sensitive identifier that you provide to ensure the idempotency of the request. This token prevents duplicate workflow creation requests.</p>
131 pub fn set_client_token(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
132 self.inner = self.inner.set_client_token(input);
133 self
134 }
135 /// <p>A unique, case-sensitive identifier that you provide to ensure the idempotency of the request. This token prevents duplicate workflow creation requests.</p>
136 pub fn get_client_token(&self) -> &::std::option::Option<::std::string::String> {
137 self.inner.get_client_token()
138 }
139 /// <p>The Amazon S3 location where the workflow definition file is stored. This must point to a valid YAML file that defines the workflow structure using supported Amazon Web Services operators and tasks. Amazon Managed Workflows for Apache Airflow Serverless takes a snapshot of the definition at creation time, so subsequent changes to the Amazon S3 object will not affect the workflow unless you create a new version. In your YAML definition, include task dependencies, scheduling information, and operator configurations that are compatible with the Amazon Managed Workflows for Apache Airflow Serverless execution environment.</p>
140 pub fn definition_s3_location(mut self, input: crate::types::DefinitionS3Location) -> Self {
141 self.inner = self.inner.definition_s3_location(input);
142 self
143 }
144 /// <p>The Amazon S3 location where the workflow definition file is stored. This must point to a valid YAML file that defines the workflow structure using supported Amazon Web Services operators and tasks. Amazon Managed Workflows for Apache Airflow Serverless takes a snapshot of the definition at creation time, so subsequent changes to the Amazon S3 object will not affect the workflow unless you create a new version. In your YAML definition, include task dependencies, scheduling information, and operator configurations that are compatible with the Amazon Managed Workflows for Apache Airflow Serverless execution environment.</p>
145 pub fn set_definition_s3_location(mut self, input: ::std::option::Option<crate::types::DefinitionS3Location>) -> Self {
146 self.inner = self.inner.set_definition_s3_location(input);
147 self
148 }
149 /// <p>The Amazon S3 location where the workflow definition file is stored. This must point to a valid YAML file that defines the workflow structure using supported Amazon Web Services operators and tasks. Amazon Managed Workflows for Apache Airflow Serverless takes a snapshot of the definition at creation time, so subsequent changes to the Amazon S3 object will not affect the workflow unless you create a new version. In your YAML definition, include task dependencies, scheduling information, and operator configurations that are compatible with the Amazon Managed Workflows for Apache Airflow Serverless execution environment.</p>
150 pub fn get_definition_s3_location(&self) -> &::std::option::Option<crate::types::DefinitionS3Location> {
151 self.inner.get_definition_s3_location()
152 }
153 /// <p>The Amazon Resource Name (ARN) of the IAM role that Amazon Managed Workflows for Apache Airflow Serverless assumes when executing the workflow. This role must have the necessary permissions to access the required Amazon Web Services services and resources that your workflow tasks will interact with. The role is used for task execution in the isolated, multi-tenant environment and should follow the principle of least privilege. Amazon Managed Workflows for Apache Airflow Serverless validates role access during workflow creation but runtime permission checks are performed by the target services.</p>
154 pub fn role_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
155 self.inner = self.inner.role_arn(input.into());
156 self
157 }
158 /// <p>The Amazon Resource Name (ARN) of the IAM role that Amazon Managed Workflows for Apache Airflow Serverless assumes when executing the workflow. This role must have the necessary permissions to access the required Amazon Web Services services and resources that your workflow tasks will interact with. The role is used for task execution in the isolated, multi-tenant environment and should follow the principle of least privilege. Amazon Managed Workflows for Apache Airflow Serverless validates role access during workflow creation but runtime permission checks are performed by the target services.</p>
159 pub fn set_role_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
160 self.inner = self.inner.set_role_arn(input);
161 self
162 }
163 /// <p>The Amazon Resource Name (ARN) of the IAM role that Amazon Managed Workflows for Apache Airflow Serverless assumes when executing the workflow. This role must have the necessary permissions to access the required Amazon Web Services services and resources that your workflow tasks will interact with. The role is used for task execution in the isolated, multi-tenant environment and should follow the principle of least privilege. Amazon Managed Workflows for Apache Airflow Serverless validates role access during workflow creation but runtime permission checks are performed by the target services.</p>
164 pub fn get_role_arn(&self) -> &::std::option::Option<::std::string::String> {
165 self.inner.get_role_arn()
166 }
167 /// <p>An optional description of the workflow that you can use to provide additional context about the workflow's purpose and functionality.</p>
168 pub fn description(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
169 self.inner = self.inner.description(input.into());
170 self
171 }
172 /// <p>An optional description of the workflow that you can use to provide additional context about the workflow's purpose and functionality.</p>
173 pub fn set_description(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
174 self.inner = self.inner.set_description(input);
175 self
176 }
177 /// <p>An optional description of the workflow that you can use to provide additional context about the workflow's purpose and functionality.</p>
178 pub fn get_description(&self) -> &::std::option::Option<::std::string::String> {
179 self.inner.get_description()
180 }
181 /// <p>The configuration for encrypting workflow data at rest and in transit. Specifies the encryption type and optional KMS key for customer-managed encryption.</p>
182 pub fn encryption_configuration(mut self, input: crate::types::EncryptionConfiguration) -> Self {
183 self.inner = self.inner.encryption_configuration(input);
184 self
185 }
186 /// <p>The configuration for encrypting workflow data at rest and in transit. Specifies the encryption type and optional KMS key for customer-managed encryption.</p>
187 pub fn set_encryption_configuration(mut self, input: ::std::option::Option<crate::types::EncryptionConfiguration>) -> Self {
188 self.inner = self.inner.set_encryption_configuration(input);
189 self
190 }
191 /// <p>The configuration for encrypting workflow data at rest and in transit. Specifies the encryption type and optional KMS key for customer-managed encryption.</p>
192 pub fn get_encryption_configuration(&self) -> &::std::option::Option<crate::types::EncryptionConfiguration> {
193 self.inner.get_encryption_configuration()
194 }
195 /// <p>The configuration for workflow logging. Specifies the CloudWatch log group where workflow execution logs are stored. Amazon Managed Workflows for Apache Airflow Serverless automatically exports worker logs and task-level information to the specified log group in your account using remote logging functionality. This provides comprehensive observability for debugging and monitoring workflow execution across the distributed, serverless environment.</p>
196 pub fn logging_configuration(mut self, input: crate::types::LoggingConfiguration) -> Self {
197 self.inner = self.inner.logging_configuration(input);
198 self
199 }
200 /// <p>The configuration for workflow logging. Specifies the CloudWatch log group where workflow execution logs are stored. Amazon Managed Workflows for Apache Airflow Serverless automatically exports worker logs and task-level information to the specified log group in your account using remote logging functionality. This provides comprehensive observability for debugging and monitoring workflow execution across the distributed, serverless environment.</p>
201 pub fn set_logging_configuration(mut self, input: ::std::option::Option<crate::types::LoggingConfiguration>) -> Self {
202 self.inner = self.inner.set_logging_configuration(input);
203 self
204 }
205 /// <p>The configuration for workflow logging. Specifies the CloudWatch log group where workflow execution logs are stored. Amazon Managed Workflows for Apache Airflow Serverless automatically exports worker logs and task-level information to the specified log group in your account using remote logging functionality. This provides comprehensive observability for debugging and monitoring workflow execution across the distributed, serverless environment.</p>
206 pub fn get_logging_configuration(&self) -> &::std::option::Option<crate::types::LoggingConfiguration> {
207 self.inner.get_logging_configuration()
208 }
209 /// <p>The version of the Amazon Managed Workflows for Apache Airflow Serverless engine that you want to use for this workflow. This determines the feature set, supported operators, and execution environment capabilities available to your workflow. Amazon Managed Workflows for Apache Airflow Serverless maintains backward compatibility across versions while introducing new features and improvements. Currently supports version 1 with plans for additional versions as the service evolves.</p>
210 pub fn engine_version(mut self, input: i32) -> Self {
211 self.inner = self.inner.engine_version(input);
212 self
213 }
214 /// <p>The version of the Amazon Managed Workflows for Apache Airflow Serverless engine that you want to use for this workflow. This determines the feature set, supported operators, and execution environment capabilities available to your workflow. Amazon Managed Workflows for Apache Airflow Serverless maintains backward compatibility across versions while introducing new features and improvements. Currently supports version 1 with plans for additional versions as the service evolves.</p>
215 pub fn set_engine_version(mut self, input: ::std::option::Option<i32>) -> Self {
216 self.inner = self.inner.set_engine_version(input);
217 self
218 }
219 /// <p>The version of the Amazon Managed Workflows for Apache Airflow Serverless engine that you want to use for this workflow. This determines the feature set, supported operators, and execution environment capabilities available to your workflow. Amazon Managed Workflows for Apache Airflow Serverless maintains backward compatibility across versions while introducing new features and improvements. Currently supports version 1 with plans for additional versions as the service evolves.</p>
220 pub fn get_engine_version(&self) -> &::std::option::Option<i32> {
221 self.inner.get_engine_version()
222 }
223 /// <p>Network configuration for the workflow execution environment, including VPC security groups and subnets for secure network access. When specified, Amazon Managed Workflows for Apache Airflow Serverless deploys ECS worker tasks in your customer VPC to provide secure connectivity to your resources. If not specified, tasks run in the service's default worker VPC with network isolation from other customers. This configuration enables secure access to VPC-only resources like RDS databases or private endpoints.</p>
224 pub fn network_configuration(mut self, input: crate::types::NetworkConfiguration) -> Self {
225 self.inner = self.inner.network_configuration(input);
226 self
227 }
228 /// <p>Network configuration for the workflow execution environment, including VPC security groups and subnets for secure network access. When specified, Amazon Managed Workflows for Apache Airflow Serverless deploys ECS worker tasks in your customer VPC to provide secure connectivity to your resources. If not specified, tasks run in the service's default worker VPC with network isolation from other customers. This configuration enables secure access to VPC-only resources like RDS databases or private endpoints.</p>
229 pub fn set_network_configuration(mut self, input: ::std::option::Option<crate::types::NetworkConfiguration>) -> Self {
230 self.inner = self.inner.set_network_configuration(input);
231 self
232 }
233 /// <p>Network configuration for the workflow execution environment, including VPC security groups and subnets for secure network access. When specified, Amazon Managed Workflows for Apache Airflow Serverless deploys ECS worker tasks in your customer VPC to provide secure connectivity to your resources. If not specified, tasks run in the service's default worker VPC with network isolation from other customers. This configuration enables secure access to VPC-only resources like RDS databases or private endpoints.</p>
234 pub fn get_network_configuration(&self) -> &::std::option::Option<crate::types::NetworkConfiguration> {
235 self.inner.get_network_configuration()
236 }
237 ///
238 /// Adds a key-value pair to `Tags`.
239 ///
240 /// To override the contents of this collection use [`set_tags`](Self::set_tags).
241 ///
242 /// <p>A map of tags to assign to the workflow resource. Tags are key-value pairs that are used for resource organization and cost allocation.</p>
243 pub fn tags(mut self, k: impl ::std::convert::Into<::std::string::String>, v: impl ::std::convert::Into<::std::string::String>) -> Self {
244 self.inner = self.inner.tags(k.into(), v.into());
245 self
246 }
247 /// <p>A map of tags to assign to the workflow resource. Tags are key-value pairs that are used for resource organization and cost allocation.</p>
248 pub fn set_tags(mut self, input: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>) -> Self {
249 self.inner = self.inner.set_tags(input);
250 self
251 }
252 /// <p>A map of tags to assign to the workflow resource. Tags are key-value pairs that are used for resource organization and cost allocation.</p>
253 pub fn get_tags(&self) -> &::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>> {
254 self.inner.get_tags()
255 }
256 /// <p>The trigger mode for the workflow execution.</p>
257 pub fn trigger_mode(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
258 self.inner = self.inner.trigger_mode(input.into());
259 self
260 }
261 /// <p>The trigger mode for the workflow execution.</p>
262 pub fn set_trigger_mode(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
263 self.inner = self.inner.set_trigger_mode(input);
264 self
265 }
266 /// <p>The trigger mode for the workflow execution.</p>
267 pub fn get_trigger_mode(&self) -> &::std::option::Option<::std::string::String> {
268 self.inner.get_trigger_mode()
269 }
270}