aws_sdk_mwaa/operation/create_environment/
_create_environment_input.rs

1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2
3/// <p>This section contains the Amazon Managed Workflows for Apache Airflow (Amazon MWAA) API reference documentation to create an environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/get-started.html">Get started with Amazon Managed Workflows for Apache Airflow</a>.</p>
4#[non_exhaustive]
5#[derive(::std::clone::Clone, ::std::cmp::PartialEq)]
6pub struct CreateEnvironmentInput {
7    /// <p>The name of the Amazon MWAA environment. For example, <code>MyMWAAEnvironment</code>.</p>
8    pub name: ::std::option::Option<::std::string::String>,
9    /// <p>The Amazon Resource Name (ARN) of the execution role for your environment. An execution role is an Amazon Web Services Identity and Access Management (IAM) role that grants MWAA permission to access Amazon Web Services services and resources used by your environment. For example, <code>arn:aws:iam::123456789:role/my-execution-role</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-create-role.html">Amazon MWAA Execution role</a>.</p>
10    pub execution_role_arn: ::std::option::Option<::std::string::String>,
11    /// <p>The Amazon Resource Name (ARN) of the Amazon S3 bucket where your DAG code and supporting files are stored. For example, <code>arn:aws:s3:::my-airflow-bucket-unique-name</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-s3-bucket.html">Create an Amazon S3 bucket for Amazon MWAA</a>.</p>
12    pub source_bucket_arn: ::std::option::Option<::std::string::String>,
13    /// <p>The relative path to the DAGs folder on your Amazon S3 bucket. For example, <code>dags</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-folder.html">Adding or updating DAGs</a>.</p>
14    pub dag_s3_path: ::std::option::Option<::std::string::String>,
15    /// <p>The VPC networking components used to secure and enable network traffic between the Amazon Web Services resources for your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/networking-about.html">About networking on Amazon MWAA</a>.</p>
16    pub network_configuration: ::std::option::Option<crate::types::NetworkConfiguration>,
17    /// <p>The relative path to the <code>plugins.zip</code> file on your Amazon S3 bucket. For example, <code>plugins.zip</code>. If specified, then the <code>plugins.zip</code> version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import-plugins.html">Installing custom plugins</a>.</p>
18    pub plugins_s3_path: ::std::option::Option<::std::string::String>,
19    /// <p>The version of the plugins.zip file on your Amazon S3 bucket. You must specify a version each time a plugins.zip file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
20    pub plugins_s3_object_version: ::std::option::Option<::std::string::String>,
21    /// <p>The relative path to the <code>requirements.txt</code> file on your Amazon S3 bucket. For example, <code>requirements.txt</code>. If specified, then a version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/working-dags-dependencies.html">Installing Python dependencies</a>.</p>
22    pub requirements_s3_path: ::std::option::Option<::std::string::String>,
23    /// <p>The version of the <code>requirements.txt</code> file on your Amazon S3 bucket. You must specify a version each time a requirements.txt file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
24    pub requirements_s3_object_version: ::std::option::Option<::std::string::String>,
25    /// <p>The relative path to the startup shell script in your Amazon S3 bucket. For example, <code>s3://mwaa-environment/startup.sh</code>.</p>
26    /// <p>Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
27    pub startup_script_s3_path: ::std::option::Option<::std::string::String>,
28    /// <p>The version of the startup shell script in your Amazon S3 bucket. You must specify the <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">version ID</a> that Amazon S3 assigns to the file every time you update the script.</p>
29    /// <p>Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example:</p>
30    /// <p><code>3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo</code></p>
31    /// <p>For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
32    pub startup_script_s3_object_version: ::std::option::Option<::std::string::String>,
33    /// <p>A list of key-value pairs containing the Apache Airflow configuration options you want to attach to your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-env-variables.html">Apache Airflow configuration options</a>.</p>
34    pub airflow_configuration_options: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
35    /// <p>The environment class type. Valid values: <code>mw1.micro</code>, <code>mw1.small</code>, <code>mw1.medium</code>, <code>mw1.large</code>, <code>mw1.xlarge</code>, and <code>mw1.2xlarge</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/environment-class.html">Amazon MWAA environment class</a>.</p>
36    pub environment_class: ::std::option::Option<::std::string::String>,
37    /// <p>The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. For example, <code>20</code>. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in <code>MinWorkers</code>.</p>
38    pub max_workers: ::std::option::Option<i32>,
39    /// <p>The Amazon Web Services Key Management Service (KMS) key to encrypt the data in your environment. You can use an Amazon Web Services owned CMK, or a Customer managed CMK (advanced). For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/create-environment.html">Create an Amazon MWAA environment</a>.</p>
40    pub kms_key: ::std::option::Option<::std::string::String>,
41    /// <p>The Apache Airflow version for your environment. If no value is specified, it defaults to the latest version. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/airflow-versions.html">Apache Airflow versions on Amazon Managed Workflows for Apache Airflow (Amazon MWAA)</a>.</p>
42    /// <p>Valid values: <code>1.10.12</code>, <code>2.0.2</code>, <code>2.2.2</code>, <code>2.4.3</code>, <code>2.5.1</code>, <code>2.6.3</code>, <code>2.7.2</code>, <code>2.8.1</code>, <code>2.9.2</code>, <code>2.10.1</code>, and <code>2.10.3</code>.</p>
43    pub airflow_version: ::std::option::Option<::std::string::String>,
44    /// <p>Defines the Apache Airflow logs to send to CloudWatch Logs.</p>
45    pub logging_configuration: ::std::option::Option<crate::types::LoggingConfigurationInput>,
46    /// <p>The day and time of the week in Coordinated Universal Time (UTC) 24-hour standard time to start weekly maintenance updates of your environment in the following format: <code>DAY:HH:MM</code>. For example: <code>TUE:03:30</code>. You can specify a start time in 30 minute increments only.</p>
47    pub weekly_maintenance_window_start: ::std::option::Option<::std::string::String>,
48    /// <p>The key-value tag pairs you want to associate to your environment. For example, <code>"Environment": "Staging"</code>. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a>.</p>
49    pub tags: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
50    /// <p>Defines the access mode for the Apache Airflow <i>web server</i>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-networking.html">Apache Airflow access modes</a>.</p>
51    pub webserver_access_mode: ::std::option::Option<crate::types::WebserverAccessMode>,
52    /// <p>The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the <code>MinWorkers</code> field. For example, <code>2</code>.</p>
53    pub min_workers: ::std::option::Option<i32>,
54    /// <p>The number of Apache Airflow schedulers to run in your environment. Valid values:</p>
55    /// <ul>
56    /// <li>
57    /// <p>v2 - For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p></li>
58    /// <li>
59    /// <p>v1 - Accepts <code>1</code>.</p></li>
60    /// </ul>
61    pub schedulers: ::std::option::Option<i32>,
62    /// <p>Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to <code>SERVICE</code>, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to <code>CUSTOMER</code>, you must create, and manage, the VPC endpoints for your VPC. If you choose to create an environment in a shared VPC, you must set this value to <code>CUSTOMER</code>. In a shared VPC deployment, the environment will remain in <code>PENDING</code> status until you create the VPC endpoints. If you do not take action to create the endpoints within 72 hours, the status will change to <code>CREATE_FAILED</code>. You can delete the failed environment and create a new one.</p>
63    pub endpoint_management: ::std::option::Option<crate::types::EndpointManagement>,
64    /// <p>The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
65    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
66    pub min_webservers: ::std::option::Option<i32>,
67    /// <p>The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in <code>MaxWebserers</code>. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
68    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
69    pub max_webservers: ::std::option::Option<i32>,
70}
71impl CreateEnvironmentInput {
72    /// <p>The name of the Amazon MWAA environment. For example, <code>MyMWAAEnvironment</code>.</p>
73    pub fn name(&self) -> ::std::option::Option<&str> {
74        self.name.as_deref()
75    }
76    /// <p>The Amazon Resource Name (ARN) of the execution role for your environment. An execution role is an Amazon Web Services Identity and Access Management (IAM) role that grants MWAA permission to access Amazon Web Services services and resources used by your environment. For example, <code>arn:aws:iam::123456789:role/my-execution-role</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-create-role.html">Amazon MWAA Execution role</a>.</p>
77    pub fn execution_role_arn(&self) -> ::std::option::Option<&str> {
78        self.execution_role_arn.as_deref()
79    }
80    /// <p>The Amazon Resource Name (ARN) of the Amazon S3 bucket where your DAG code and supporting files are stored. For example, <code>arn:aws:s3:::my-airflow-bucket-unique-name</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-s3-bucket.html">Create an Amazon S3 bucket for Amazon MWAA</a>.</p>
81    pub fn source_bucket_arn(&self) -> ::std::option::Option<&str> {
82        self.source_bucket_arn.as_deref()
83    }
84    /// <p>The relative path to the DAGs folder on your Amazon S3 bucket. For example, <code>dags</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-folder.html">Adding or updating DAGs</a>.</p>
85    pub fn dag_s3_path(&self) -> ::std::option::Option<&str> {
86        self.dag_s3_path.as_deref()
87    }
88    /// <p>The VPC networking components used to secure and enable network traffic between the Amazon Web Services resources for your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/networking-about.html">About networking on Amazon MWAA</a>.</p>
89    pub fn network_configuration(&self) -> ::std::option::Option<&crate::types::NetworkConfiguration> {
90        self.network_configuration.as_ref()
91    }
92    /// <p>The relative path to the <code>plugins.zip</code> file on your Amazon S3 bucket. For example, <code>plugins.zip</code>. If specified, then the <code>plugins.zip</code> version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import-plugins.html">Installing custom plugins</a>.</p>
93    pub fn plugins_s3_path(&self) -> ::std::option::Option<&str> {
94        self.plugins_s3_path.as_deref()
95    }
96    /// <p>The version of the plugins.zip file on your Amazon S3 bucket. You must specify a version each time a plugins.zip file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
97    pub fn plugins_s3_object_version(&self) -> ::std::option::Option<&str> {
98        self.plugins_s3_object_version.as_deref()
99    }
100    /// <p>The relative path to the <code>requirements.txt</code> file on your Amazon S3 bucket. For example, <code>requirements.txt</code>. If specified, then a version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/working-dags-dependencies.html">Installing Python dependencies</a>.</p>
101    pub fn requirements_s3_path(&self) -> ::std::option::Option<&str> {
102        self.requirements_s3_path.as_deref()
103    }
104    /// <p>The version of the <code>requirements.txt</code> file on your Amazon S3 bucket. You must specify a version each time a requirements.txt file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
105    pub fn requirements_s3_object_version(&self) -> ::std::option::Option<&str> {
106        self.requirements_s3_object_version.as_deref()
107    }
108    /// <p>The relative path to the startup shell script in your Amazon S3 bucket. For example, <code>s3://mwaa-environment/startup.sh</code>.</p>
109    /// <p>Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
110    pub fn startup_script_s3_path(&self) -> ::std::option::Option<&str> {
111        self.startup_script_s3_path.as_deref()
112    }
113    /// <p>The version of the startup shell script in your Amazon S3 bucket. You must specify the <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">version ID</a> that Amazon S3 assigns to the file every time you update the script.</p>
114    /// <p>Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example:</p>
115    /// <p><code>3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo</code></p>
116    /// <p>For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
117    pub fn startup_script_s3_object_version(&self) -> ::std::option::Option<&str> {
118        self.startup_script_s3_object_version.as_deref()
119    }
120    /// <p>A list of key-value pairs containing the Apache Airflow configuration options you want to attach to your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-env-variables.html">Apache Airflow configuration options</a>.</p>
121    pub fn airflow_configuration_options(&self) -> ::std::option::Option<&::std::collections::HashMap<::std::string::String, ::std::string::String>> {
122        self.airflow_configuration_options.as_ref()
123    }
124    /// <p>The environment class type. Valid values: <code>mw1.micro</code>, <code>mw1.small</code>, <code>mw1.medium</code>, <code>mw1.large</code>, <code>mw1.xlarge</code>, and <code>mw1.2xlarge</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/environment-class.html">Amazon MWAA environment class</a>.</p>
125    pub fn environment_class(&self) -> ::std::option::Option<&str> {
126        self.environment_class.as_deref()
127    }
128    /// <p>The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. For example, <code>20</code>. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in <code>MinWorkers</code>.</p>
129    pub fn max_workers(&self) -> ::std::option::Option<i32> {
130        self.max_workers
131    }
132    /// <p>The Amazon Web Services Key Management Service (KMS) key to encrypt the data in your environment. You can use an Amazon Web Services owned CMK, or a Customer managed CMK (advanced). For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/create-environment.html">Create an Amazon MWAA environment</a>.</p>
133    pub fn kms_key(&self) -> ::std::option::Option<&str> {
134        self.kms_key.as_deref()
135    }
136    /// <p>The Apache Airflow version for your environment. If no value is specified, it defaults to the latest version. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/airflow-versions.html">Apache Airflow versions on Amazon Managed Workflows for Apache Airflow (Amazon MWAA)</a>.</p>
137    /// <p>Valid values: <code>1.10.12</code>, <code>2.0.2</code>, <code>2.2.2</code>, <code>2.4.3</code>, <code>2.5.1</code>, <code>2.6.3</code>, <code>2.7.2</code>, <code>2.8.1</code>, <code>2.9.2</code>, <code>2.10.1</code>, and <code>2.10.3</code>.</p>
138    pub fn airflow_version(&self) -> ::std::option::Option<&str> {
139        self.airflow_version.as_deref()
140    }
141    /// <p>Defines the Apache Airflow logs to send to CloudWatch Logs.</p>
142    pub fn logging_configuration(&self) -> ::std::option::Option<&crate::types::LoggingConfigurationInput> {
143        self.logging_configuration.as_ref()
144    }
145    /// <p>The day and time of the week in Coordinated Universal Time (UTC) 24-hour standard time to start weekly maintenance updates of your environment in the following format: <code>DAY:HH:MM</code>. For example: <code>TUE:03:30</code>. You can specify a start time in 30 minute increments only.</p>
146    pub fn weekly_maintenance_window_start(&self) -> ::std::option::Option<&str> {
147        self.weekly_maintenance_window_start.as_deref()
148    }
149    /// <p>The key-value tag pairs you want to associate to your environment. For example, <code>"Environment": "Staging"</code>. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a>.</p>
150    pub fn tags(&self) -> ::std::option::Option<&::std::collections::HashMap<::std::string::String, ::std::string::String>> {
151        self.tags.as_ref()
152    }
153    /// <p>Defines the access mode for the Apache Airflow <i>web server</i>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-networking.html">Apache Airflow access modes</a>.</p>
154    pub fn webserver_access_mode(&self) -> ::std::option::Option<&crate::types::WebserverAccessMode> {
155        self.webserver_access_mode.as_ref()
156    }
157    /// <p>The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the <code>MinWorkers</code> field. For example, <code>2</code>.</p>
158    pub fn min_workers(&self) -> ::std::option::Option<i32> {
159        self.min_workers
160    }
161    /// <p>The number of Apache Airflow schedulers to run in your environment. Valid values:</p>
162    /// <ul>
163    /// <li>
164    /// <p>v2 - For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p></li>
165    /// <li>
166    /// <p>v1 - Accepts <code>1</code>.</p></li>
167    /// </ul>
168    pub fn schedulers(&self) -> ::std::option::Option<i32> {
169        self.schedulers
170    }
171    /// <p>Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to <code>SERVICE</code>, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to <code>CUSTOMER</code>, you must create, and manage, the VPC endpoints for your VPC. If you choose to create an environment in a shared VPC, you must set this value to <code>CUSTOMER</code>. In a shared VPC deployment, the environment will remain in <code>PENDING</code> status until you create the VPC endpoints. If you do not take action to create the endpoints within 72 hours, the status will change to <code>CREATE_FAILED</code>. You can delete the failed environment and create a new one.</p>
172    pub fn endpoint_management(&self) -> ::std::option::Option<&crate::types::EndpointManagement> {
173        self.endpoint_management.as_ref()
174    }
175    /// <p>The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
176    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
177    pub fn min_webservers(&self) -> ::std::option::Option<i32> {
178        self.min_webservers
179    }
180    /// <p>The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in <code>MaxWebserers</code>. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
181    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
182    pub fn max_webservers(&self) -> ::std::option::Option<i32> {
183        self.max_webservers
184    }
185}
186impl ::std::fmt::Debug for CreateEnvironmentInput {
187    fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
188        let mut formatter = f.debug_struct("CreateEnvironmentInput");
189        formatter.field("name", &self.name);
190        formatter.field("execution_role_arn", &self.execution_role_arn);
191        formatter.field("source_bucket_arn", &self.source_bucket_arn);
192        formatter.field("dag_s3_path", &self.dag_s3_path);
193        formatter.field("network_configuration", &self.network_configuration);
194        formatter.field("plugins_s3_path", &self.plugins_s3_path);
195        formatter.field("plugins_s3_object_version", &self.plugins_s3_object_version);
196        formatter.field("requirements_s3_path", &self.requirements_s3_path);
197        formatter.field("requirements_s3_object_version", &self.requirements_s3_object_version);
198        formatter.field("startup_script_s3_path", &self.startup_script_s3_path);
199        formatter.field("startup_script_s3_object_version", &self.startup_script_s3_object_version);
200        formatter.field("airflow_configuration_options", &"*** Sensitive Data Redacted ***");
201        formatter.field("environment_class", &self.environment_class);
202        formatter.field("max_workers", &self.max_workers);
203        formatter.field("kms_key", &self.kms_key);
204        formatter.field("airflow_version", &self.airflow_version);
205        formatter.field("logging_configuration", &self.logging_configuration);
206        formatter.field("weekly_maintenance_window_start", &self.weekly_maintenance_window_start);
207        formatter.field("tags", &self.tags);
208        formatter.field("webserver_access_mode", &self.webserver_access_mode);
209        formatter.field("min_workers", &self.min_workers);
210        formatter.field("schedulers", &self.schedulers);
211        formatter.field("endpoint_management", &self.endpoint_management);
212        formatter.field("min_webservers", &self.min_webservers);
213        formatter.field("max_webservers", &self.max_webservers);
214        formatter.finish()
215    }
216}
217impl CreateEnvironmentInput {
218    /// Creates a new builder-style object to manufacture [`CreateEnvironmentInput`](crate::operation::create_environment::CreateEnvironmentInput).
219    pub fn builder() -> crate::operation::create_environment::builders::CreateEnvironmentInputBuilder {
220        crate::operation::create_environment::builders::CreateEnvironmentInputBuilder::default()
221    }
222}
223
224/// A builder for [`CreateEnvironmentInput`](crate::operation::create_environment::CreateEnvironmentInput).
225#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::default::Default)]
226#[non_exhaustive]
227pub struct CreateEnvironmentInputBuilder {
228    pub(crate) name: ::std::option::Option<::std::string::String>,
229    pub(crate) execution_role_arn: ::std::option::Option<::std::string::String>,
230    pub(crate) source_bucket_arn: ::std::option::Option<::std::string::String>,
231    pub(crate) dag_s3_path: ::std::option::Option<::std::string::String>,
232    pub(crate) network_configuration: ::std::option::Option<crate::types::NetworkConfiguration>,
233    pub(crate) plugins_s3_path: ::std::option::Option<::std::string::String>,
234    pub(crate) plugins_s3_object_version: ::std::option::Option<::std::string::String>,
235    pub(crate) requirements_s3_path: ::std::option::Option<::std::string::String>,
236    pub(crate) requirements_s3_object_version: ::std::option::Option<::std::string::String>,
237    pub(crate) startup_script_s3_path: ::std::option::Option<::std::string::String>,
238    pub(crate) startup_script_s3_object_version: ::std::option::Option<::std::string::String>,
239    pub(crate) airflow_configuration_options: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
240    pub(crate) environment_class: ::std::option::Option<::std::string::String>,
241    pub(crate) max_workers: ::std::option::Option<i32>,
242    pub(crate) kms_key: ::std::option::Option<::std::string::String>,
243    pub(crate) airflow_version: ::std::option::Option<::std::string::String>,
244    pub(crate) logging_configuration: ::std::option::Option<crate::types::LoggingConfigurationInput>,
245    pub(crate) weekly_maintenance_window_start: ::std::option::Option<::std::string::String>,
246    pub(crate) tags: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
247    pub(crate) webserver_access_mode: ::std::option::Option<crate::types::WebserverAccessMode>,
248    pub(crate) min_workers: ::std::option::Option<i32>,
249    pub(crate) schedulers: ::std::option::Option<i32>,
250    pub(crate) endpoint_management: ::std::option::Option<crate::types::EndpointManagement>,
251    pub(crate) min_webservers: ::std::option::Option<i32>,
252    pub(crate) max_webservers: ::std::option::Option<i32>,
253}
254impl CreateEnvironmentInputBuilder {
255    /// <p>The name of the Amazon MWAA environment. For example, <code>MyMWAAEnvironment</code>.</p>
256    /// This field is required.
257    pub fn name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
258        self.name = ::std::option::Option::Some(input.into());
259        self
260    }
261    /// <p>The name of the Amazon MWAA environment. For example, <code>MyMWAAEnvironment</code>.</p>
262    pub fn set_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
263        self.name = input;
264        self
265    }
266    /// <p>The name of the Amazon MWAA environment. For example, <code>MyMWAAEnvironment</code>.</p>
267    pub fn get_name(&self) -> &::std::option::Option<::std::string::String> {
268        &self.name
269    }
270    /// <p>The Amazon Resource Name (ARN) of the execution role for your environment. An execution role is an Amazon Web Services Identity and Access Management (IAM) role that grants MWAA permission to access Amazon Web Services services and resources used by your environment. For example, <code>arn:aws:iam::123456789:role/my-execution-role</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-create-role.html">Amazon MWAA Execution role</a>.</p>
271    /// This field is required.
272    pub fn execution_role_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
273        self.execution_role_arn = ::std::option::Option::Some(input.into());
274        self
275    }
276    /// <p>The Amazon Resource Name (ARN) of the execution role for your environment. An execution role is an Amazon Web Services Identity and Access Management (IAM) role that grants MWAA permission to access Amazon Web Services services and resources used by your environment. For example, <code>arn:aws:iam::123456789:role/my-execution-role</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-create-role.html">Amazon MWAA Execution role</a>.</p>
277    pub fn set_execution_role_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
278        self.execution_role_arn = input;
279        self
280    }
281    /// <p>The Amazon Resource Name (ARN) of the execution role for your environment. An execution role is an Amazon Web Services Identity and Access Management (IAM) role that grants MWAA permission to access Amazon Web Services services and resources used by your environment. For example, <code>arn:aws:iam::123456789:role/my-execution-role</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-create-role.html">Amazon MWAA Execution role</a>.</p>
282    pub fn get_execution_role_arn(&self) -> &::std::option::Option<::std::string::String> {
283        &self.execution_role_arn
284    }
285    /// <p>The Amazon Resource Name (ARN) of the Amazon S3 bucket where your DAG code and supporting files are stored. For example, <code>arn:aws:s3:::my-airflow-bucket-unique-name</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-s3-bucket.html">Create an Amazon S3 bucket for Amazon MWAA</a>.</p>
286    /// This field is required.
287    pub fn source_bucket_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
288        self.source_bucket_arn = ::std::option::Option::Some(input.into());
289        self
290    }
291    /// <p>The Amazon Resource Name (ARN) of the Amazon S3 bucket where your DAG code and supporting files are stored. For example, <code>arn:aws:s3:::my-airflow-bucket-unique-name</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-s3-bucket.html">Create an Amazon S3 bucket for Amazon MWAA</a>.</p>
292    pub fn set_source_bucket_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
293        self.source_bucket_arn = input;
294        self
295    }
296    /// <p>The Amazon Resource Name (ARN) of the Amazon S3 bucket where your DAG code and supporting files are stored. For example, <code>arn:aws:s3:::my-airflow-bucket-unique-name</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-s3-bucket.html">Create an Amazon S3 bucket for Amazon MWAA</a>.</p>
297    pub fn get_source_bucket_arn(&self) -> &::std::option::Option<::std::string::String> {
298        &self.source_bucket_arn
299    }
300    /// <p>The relative path to the DAGs folder on your Amazon S3 bucket. For example, <code>dags</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-folder.html">Adding or updating DAGs</a>.</p>
301    /// This field is required.
302    pub fn dag_s3_path(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
303        self.dag_s3_path = ::std::option::Option::Some(input.into());
304        self
305    }
306    /// <p>The relative path to the DAGs folder on your Amazon S3 bucket. For example, <code>dags</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-folder.html">Adding or updating DAGs</a>.</p>
307    pub fn set_dag_s3_path(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
308        self.dag_s3_path = input;
309        self
310    }
311    /// <p>The relative path to the DAGs folder on your Amazon S3 bucket. For example, <code>dags</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-folder.html">Adding or updating DAGs</a>.</p>
312    pub fn get_dag_s3_path(&self) -> &::std::option::Option<::std::string::String> {
313        &self.dag_s3_path
314    }
315    /// <p>The VPC networking components used to secure and enable network traffic between the Amazon Web Services resources for your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/networking-about.html">About networking on Amazon MWAA</a>.</p>
316    /// This field is required.
317    pub fn network_configuration(mut self, input: crate::types::NetworkConfiguration) -> Self {
318        self.network_configuration = ::std::option::Option::Some(input);
319        self
320    }
321    /// <p>The VPC networking components used to secure and enable network traffic between the Amazon Web Services resources for your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/networking-about.html">About networking on Amazon MWAA</a>.</p>
322    pub fn set_network_configuration(mut self, input: ::std::option::Option<crate::types::NetworkConfiguration>) -> Self {
323        self.network_configuration = input;
324        self
325    }
326    /// <p>The VPC networking components used to secure and enable network traffic between the Amazon Web Services resources for your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/networking-about.html">About networking on Amazon MWAA</a>.</p>
327    pub fn get_network_configuration(&self) -> &::std::option::Option<crate::types::NetworkConfiguration> {
328        &self.network_configuration
329    }
330    /// <p>The relative path to the <code>plugins.zip</code> file on your Amazon S3 bucket. For example, <code>plugins.zip</code>. If specified, then the <code>plugins.zip</code> version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import-plugins.html">Installing custom plugins</a>.</p>
331    pub fn plugins_s3_path(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
332        self.plugins_s3_path = ::std::option::Option::Some(input.into());
333        self
334    }
335    /// <p>The relative path to the <code>plugins.zip</code> file on your Amazon S3 bucket. For example, <code>plugins.zip</code>. If specified, then the <code>plugins.zip</code> version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import-plugins.html">Installing custom plugins</a>.</p>
336    pub fn set_plugins_s3_path(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
337        self.plugins_s3_path = input;
338        self
339    }
340    /// <p>The relative path to the <code>plugins.zip</code> file on your Amazon S3 bucket. For example, <code>plugins.zip</code>. If specified, then the <code>plugins.zip</code> version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import-plugins.html">Installing custom plugins</a>.</p>
341    pub fn get_plugins_s3_path(&self) -> &::std::option::Option<::std::string::String> {
342        &self.plugins_s3_path
343    }
344    /// <p>The version of the plugins.zip file on your Amazon S3 bucket. You must specify a version each time a plugins.zip file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
345    pub fn plugins_s3_object_version(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
346        self.plugins_s3_object_version = ::std::option::Option::Some(input.into());
347        self
348    }
349    /// <p>The version of the plugins.zip file on your Amazon S3 bucket. You must specify a version each time a plugins.zip file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
350    pub fn set_plugins_s3_object_version(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
351        self.plugins_s3_object_version = input;
352        self
353    }
354    /// <p>The version of the plugins.zip file on your Amazon S3 bucket. You must specify a version each time a plugins.zip file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
355    pub fn get_plugins_s3_object_version(&self) -> &::std::option::Option<::std::string::String> {
356        &self.plugins_s3_object_version
357    }
358    /// <p>The relative path to the <code>requirements.txt</code> file on your Amazon S3 bucket. For example, <code>requirements.txt</code>. If specified, then a version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/working-dags-dependencies.html">Installing Python dependencies</a>.</p>
359    pub fn requirements_s3_path(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
360        self.requirements_s3_path = ::std::option::Option::Some(input.into());
361        self
362    }
363    /// <p>The relative path to the <code>requirements.txt</code> file on your Amazon S3 bucket. For example, <code>requirements.txt</code>. If specified, then a version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/working-dags-dependencies.html">Installing Python dependencies</a>.</p>
364    pub fn set_requirements_s3_path(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
365        self.requirements_s3_path = input;
366        self
367    }
368    /// <p>The relative path to the <code>requirements.txt</code> file on your Amazon S3 bucket. For example, <code>requirements.txt</code>. If specified, then a version is required. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/working-dags-dependencies.html">Installing Python dependencies</a>.</p>
369    pub fn get_requirements_s3_path(&self) -> &::std::option::Option<::std::string::String> {
370        &self.requirements_s3_path
371    }
372    /// <p>The version of the <code>requirements.txt</code> file on your Amazon S3 bucket. You must specify a version each time a requirements.txt file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
373    pub fn requirements_s3_object_version(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
374        self.requirements_s3_object_version = ::std::option::Option::Some(input.into());
375        self
376    }
377    /// <p>The version of the <code>requirements.txt</code> file on your Amazon S3 bucket. You must specify a version each time a requirements.txt file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
378    pub fn set_requirements_s3_object_version(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
379        self.requirements_s3_object_version = input;
380        self
381    }
382    /// <p>The version of the <code>requirements.txt</code> file on your Amazon S3 bucket. You must specify a version each time a requirements.txt file is updated. For more information, see <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">How S3 Versioning works</a>.</p>
383    pub fn get_requirements_s3_object_version(&self) -> &::std::option::Option<::std::string::String> {
384        &self.requirements_s3_object_version
385    }
386    /// <p>The relative path to the startup shell script in your Amazon S3 bucket. For example, <code>s3://mwaa-environment/startup.sh</code>.</p>
387    /// <p>Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
388    pub fn startup_script_s3_path(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
389        self.startup_script_s3_path = ::std::option::Option::Some(input.into());
390        self
391    }
392    /// <p>The relative path to the startup shell script in your Amazon S3 bucket. For example, <code>s3://mwaa-environment/startup.sh</code>.</p>
393    /// <p>Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
394    pub fn set_startup_script_s3_path(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
395        self.startup_script_s3_path = input;
396        self
397    }
398    /// <p>The relative path to the startup shell script in your Amazon S3 bucket. For example, <code>s3://mwaa-environment/startup.sh</code>.</p>
399    /// <p>Amazon MWAA runs the script as your environment starts, and before running the Apache Airflow process. You can use this script to install dependencies, modify Apache Airflow configuration options, and set environment variables. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
400    pub fn get_startup_script_s3_path(&self) -> &::std::option::Option<::std::string::String> {
401        &self.startup_script_s3_path
402    }
403    /// <p>The version of the startup shell script in your Amazon S3 bucket. You must specify the <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">version ID</a> that Amazon S3 assigns to the file every time you update the script.</p>
404    /// <p>Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example:</p>
405    /// <p><code>3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo</code></p>
406    /// <p>For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
407    pub fn startup_script_s3_object_version(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
408        self.startup_script_s3_object_version = ::std::option::Option::Some(input.into());
409        self
410    }
411    /// <p>The version of the startup shell script in your Amazon S3 bucket. You must specify the <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">version ID</a> that Amazon S3 assigns to the file every time you update the script.</p>
412    /// <p>Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example:</p>
413    /// <p><code>3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo</code></p>
414    /// <p>For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
415    pub fn set_startup_script_s3_object_version(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
416        self.startup_script_s3_object_version = input;
417        self
418    }
419    /// <p>The version of the startup shell script in your Amazon S3 bucket. You must specify the <a href="https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html">version ID</a> that Amazon S3 assigns to the file every time you update the script.</p>
420    /// <p>Version IDs are Unicode, UTF-8 encoded, URL-ready, opaque strings that are no more than 1,024 bytes long. The following is an example:</p>
421    /// <p><code>3sL4kqtJlcpXroDTDmJ+rmSpXd3dIbrHY+MTRCxf3vjVBH40Nr8X8gdRQBpUMLUo</code></p>
422    /// <p>For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html">Using a startup script</a>.</p>
423    pub fn get_startup_script_s3_object_version(&self) -> &::std::option::Option<::std::string::String> {
424        &self.startup_script_s3_object_version
425    }
426    /// Adds a key-value pair to `airflow_configuration_options`.
427    ///
428    /// To override the contents of this collection use [`set_airflow_configuration_options`](Self::set_airflow_configuration_options).
429    ///
430    /// <p>A list of key-value pairs containing the Apache Airflow configuration options you want to attach to your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-env-variables.html">Apache Airflow configuration options</a>.</p>
431    pub fn airflow_configuration_options(
432        mut self,
433        k: impl ::std::convert::Into<::std::string::String>,
434        v: impl ::std::convert::Into<::std::string::String>,
435    ) -> Self {
436        let mut hash_map = self.airflow_configuration_options.unwrap_or_default();
437        hash_map.insert(k.into(), v.into());
438        self.airflow_configuration_options = ::std::option::Option::Some(hash_map);
439        self
440    }
441    /// <p>A list of key-value pairs containing the Apache Airflow configuration options you want to attach to your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-env-variables.html">Apache Airflow configuration options</a>.</p>
442    pub fn set_airflow_configuration_options(
443        mut self,
444        input: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
445    ) -> Self {
446        self.airflow_configuration_options = input;
447        self
448    }
449    /// <p>A list of key-value pairs containing the Apache Airflow configuration options you want to attach to your environment. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-env-variables.html">Apache Airflow configuration options</a>.</p>
450    pub fn get_airflow_configuration_options(
451        &self,
452    ) -> &::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>> {
453        &self.airflow_configuration_options
454    }
455    /// <p>The environment class type. Valid values: <code>mw1.micro</code>, <code>mw1.small</code>, <code>mw1.medium</code>, <code>mw1.large</code>, <code>mw1.xlarge</code>, and <code>mw1.2xlarge</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/environment-class.html">Amazon MWAA environment class</a>.</p>
456    pub fn environment_class(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
457        self.environment_class = ::std::option::Option::Some(input.into());
458        self
459    }
460    /// <p>The environment class type. Valid values: <code>mw1.micro</code>, <code>mw1.small</code>, <code>mw1.medium</code>, <code>mw1.large</code>, <code>mw1.xlarge</code>, and <code>mw1.2xlarge</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/environment-class.html">Amazon MWAA environment class</a>.</p>
461    pub fn set_environment_class(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
462        self.environment_class = input;
463        self
464    }
465    /// <p>The environment class type. Valid values: <code>mw1.micro</code>, <code>mw1.small</code>, <code>mw1.medium</code>, <code>mw1.large</code>, <code>mw1.xlarge</code>, and <code>mw1.2xlarge</code>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/environment-class.html">Amazon MWAA environment class</a>.</p>
466    pub fn get_environment_class(&self) -> &::std::option::Option<::std::string::String> {
467        &self.environment_class
468    }
469    /// <p>The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. For example, <code>20</code>. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in <code>MinWorkers</code>.</p>
470    pub fn max_workers(mut self, input: i32) -> Self {
471        self.max_workers = ::std::option::Option::Some(input);
472        self
473    }
474    /// <p>The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. For example, <code>20</code>. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in <code>MinWorkers</code>.</p>
475    pub fn set_max_workers(mut self, input: ::std::option::Option<i32>) -> Self {
476        self.max_workers = input;
477        self
478    }
479    /// <p>The maximum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. For example, <code>20</code>. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the one worker that is included with your environment, or the number you specify in <code>MinWorkers</code>.</p>
480    pub fn get_max_workers(&self) -> &::std::option::Option<i32> {
481        &self.max_workers
482    }
483    /// <p>The Amazon Web Services Key Management Service (KMS) key to encrypt the data in your environment. You can use an Amazon Web Services owned CMK, or a Customer managed CMK (advanced). For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/create-environment.html">Create an Amazon MWAA environment</a>.</p>
484    pub fn kms_key(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
485        self.kms_key = ::std::option::Option::Some(input.into());
486        self
487    }
488    /// <p>The Amazon Web Services Key Management Service (KMS) key to encrypt the data in your environment. You can use an Amazon Web Services owned CMK, or a Customer managed CMK (advanced). For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/create-environment.html">Create an Amazon MWAA environment</a>.</p>
489    pub fn set_kms_key(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
490        self.kms_key = input;
491        self
492    }
493    /// <p>The Amazon Web Services Key Management Service (KMS) key to encrypt the data in your environment. You can use an Amazon Web Services owned CMK, or a Customer managed CMK (advanced). For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/create-environment.html">Create an Amazon MWAA environment</a>.</p>
494    pub fn get_kms_key(&self) -> &::std::option::Option<::std::string::String> {
495        &self.kms_key
496    }
497    /// <p>The Apache Airflow version for your environment. If no value is specified, it defaults to the latest version. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/airflow-versions.html">Apache Airflow versions on Amazon Managed Workflows for Apache Airflow (Amazon MWAA)</a>.</p>
498    /// <p>Valid values: <code>1.10.12</code>, <code>2.0.2</code>, <code>2.2.2</code>, <code>2.4.3</code>, <code>2.5.1</code>, <code>2.6.3</code>, <code>2.7.2</code>, <code>2.8.1</code>, <code>2.9.2</code>, <code>2.10.1</code>, and <code>2.10.3</code>.</p>
499    pub fn airflow_version(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
500        self.airflow_version = ::std::option::Option::Some(input.into());
501        self
502    }
503    /// <p>The Apache Airflow version for your environment. If no value is specified, it defaults to the latest version. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/airflow-versions.html">Apache Airflow versions on Amazon Managed Workflows for Apache Airflow (Amazon MWAA)</a>.</p>
504    /// <p>Valid values: <code>1.10.12</code>, <code>2.0.2</code>, <code>2.2.2</code>, <code>2.4.3</code>, <code>2.5.1</code>, <code>2.6.3</code>, <code>2.7.2</code>, <code>2.8.1</code>, <code>2.9.2</code>, <code>2.10.1</code>, and <code>2.10.3</code>.</p>
505    pub fn set_airflow_version(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
506        self.airflow_version = input;
507        self
508    }
509    /// <p>The Apache Airflow version for your environment. If no value is specified, it defaults to the latest version. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/airflow-versions.html">Apache Airflow versions on Amazon Managed Workflows for Apache Airflow (Amazon MWAA)</a>.</p>
510    /// <p>Valid values: <code>1.10.12</code>, <code>2.0.2</code>, <code>2.2.2</code>, <code>2.4.3</code>, <code>2.5.1</code>, <code>2.6.3</code>, <code>2.7.2</code>, <code>2.8.1</code>, <code>2.9.2</code>, <code>2.10.1</code>, and <code>2.10.3</code>.</p>
511    pub fn get_airflow_version(&self) -> &::std::option::Option<::std::string::String> {
512        &self.airflow_version
513    }
514    /// <p>Defines the Apache Airflow logs to send to CloudWatch Logs.</p>
515    pub fn logging_configuration(mut self, input: crate::types::LoggingConfigurationInput) -> Self {
516        self.logging_configuration = ::std::option::Option::Some(input);
517        self
518    }
519    /// <p>Defines the Apache Airflow logs to send to CloudWatch Logs.</p>
520    pub fn set_logging_configuration(mut self, input: ::std::option::Option<crate::types::LoggingConfigurationInput>) -> Self {
521        self.logging_configuration = input;
522        self
523    }
524    /// <p>Defines the Apache Airflow logs to send to CloudWatch Logs.</p>
525    pub fn get_logging_configuration(&self) -> &::std::option::Option<crate::types::LoggingConfigurationInput> {
526        &self.logging_configuration
527    }
528    /// <p>The day and time of the week in Coordinated Universal Time (UTC) 24-hour standard time to start weekly maintenance updates of your environment in the following format: <code>DAY:HH:MM</code>. For example: <code>TUE:03:30</code>. You can specify a start time in 30 minute increments only.</p>
529    pub fn weekly_maintenance_window_start(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
530        self.weekly_maintenance_window_start = ::std::option::Option::Some(input.into());
531        self
532    }
533    /// <p>The day and time of the week in Coordinated Universal Time (UTC) 24-hour standard time to start weekly maintenance updates of your environment in the following format: <code>DAY:HH:MM</code>. For example: <code>TUE:03:30</code>. You can specify a start time in 30 minute increments only.</p>
534    pub fn set_weekly_maintenance_window_start(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
535        self.weekly_maintenance_window_start = input;
536        self
537    }
538    /// <p>The day and time of the week in Coordinated Universal Time (UTC) 24-hour standard time to start weekly maintenance updates of your environment in the following format: <code>DAY:HH:MM</code>. For example: <code>TUE:03:30</code>. You can specify a start time in 30 minute increments only.</p>
539    pub fn get_weekly_maintenance_window_start(&self) -> &::std::option::Option<::std::string::String> {
540        &self.weekly_maintenance_window_start
541    }
542    /// Adds a key-value pair to `tags`.
543    ///
544    /// To override the contents of this collection use [`set_tags`](Self::set_tags).
545    ///
546    /// <p>The key-value tag pairs you want to associate to your environment. For example, <code>"Environment": "Staging"</code>. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a>.</p>
547    pub fn tags(mut self, k: impl ::std::convert::Into<::std::string::String>, v: impl ::std::convert::Into<::std::string::String>) -> Self {
548        let mut hash_map = self.tags.unwrap_or_default();
549        hash_map.insert(k.into(), v.into());
550        self.tags = ::std::option::Option::Some(hash_map);
551        self
552    }
553    /// <p>The key-value tag pairs you want to associate to your environment. For example, <code>"Environment": "Staging"</code>. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a>.</p>
554    pub fn set_tags(mut self, input: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>) -> Self {
555        self.tags = input;
556        self
557    }
558    /// <p>The key-value tag pairs you want to associate to your environment. For example, <code>"Environment": "Staging"</code>. For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services resources</a>.</p>
559    pub fn get_tags(&self) -> &::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>> {
560        &self.tags
561    }
562    /// <p>Defines the access mode for the Apache Airflow <i>web server</i>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-networking.html">Apache Airflow access modes</a>.</p>
563    pub fn webserver_access_mode(mut self, input: crate::types::WebserverAccessMode) -> Self {
564        self.webserver_access_mode = ::std::option::Option::Some(input);
565        self
566    }
567    /// <p>Defines the access mode for the Apache Airflow <i>web server</i>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-networking.html">Apache Airflow access modes</a>.</p>
568    pub fn set_webserver_access_mode(mut self, input: ::std::option::Option<crate::types::WebserverAccessMode>) -> Self {
569        self.webserver_access_mode = input;
570        self
571    }
572    /// <p>Defines the access mode for the Apache Airflow <i>web server</i>. For more information, see <a href="https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-networking.html">Apache Airflow access modes</a>.</p>
573    pub fn get_webserver_access_mode(&self) -> &::std::option::Option<crate::types::WebserverAccessMode> {
574        &self.webserver_access_mode
575    }
576    /// <p>The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the <code>MinWorkers</code> field. For example, <code>2</code>.</p>
577    pub fn min_workers(mut self, input: i32) -> Self {
578        self.min_workers = ::std::option::Option::Some(input);
579        self
580    }
581    /// <p>The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the <code>MinWorkers</code> field. For example, <code>2</code>.</p>
582    pub fn set_min_workers(mut self, input: ::std::option::Option<i32>) -> Self {
583        self.min_workers = input;
584        self
585    }
586    /// <p>The minimum number of workers that you want to run in your environment. MWAA scales the number of Apache Airflow workers up to the number you specify in the <code>MaxWorkers</code> field. When there are no more tasks running, and no more in the queue, MWAA disposes of the extra workers leaving the worker count you specify in the <code>MinWorkers</code> field. For example, <code>2</code>.</p>
587    pub fn get_min_workers(&self) -> &::std::option::Option<i32> {
588        &self.min_workers
589    }
590    /// <p>The number of Apache Airflow schedulers to run in your environment. Valid values:</p>
591    /// <ul>
592    /// <li>
593    /// <p>v2 - For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p></li>
594    /// <li>
595    /// <p>v1 - Accepts <code>1</code>.</p></li>
596    /// </ul>
597    pub fn schedulers(mut self, input: i32) -> Self {
598        self.schedulers = ::std::option::Option::Some(input);
599        self
600    }
601    /// <p>The number of Apache Airflow schedulers to run in your environment. Valid values:</p>
602    /// <ul>
603    /// <li>
604    /// <p>v2 - For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p></li>
605    /// <li>
606    /// <p>v1 - Accepts <code>1</code>.</p></li>
607    /// </ul>
608    pub fn set_schedulers(mut self, input: ::std::option::Option<i32>) -> Self {
609        self.schedulers = input;
610        self
611    }
612    /// <p>The number of Apache Airflow schedulers to run in your environment. Valid values:</p>
613    /// <ul>
614    /// <li>
615    /// <p>v2 - For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p></li>
616    /// <li>
617    /// <p>v1 - Accepts <code>1</code>.</p></li>
618    /// </ul>
619    pub fn get_schedulers(&self) -> &::std::option::Option<i32> {
620        &self.schedulers
621    }
622    /// <p>Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to <code>SERVICE</code>, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to <code>CUSTOMER</code>, you must create, and manage, the VPC endpoints for your VPC. If you choose to create an environment in a shared VPC, you must set this value to <code>CUSTOMER</code>. In a shared VPC deployment, the environment will remain in <code>PENDING</code> status until you create the VPC endpoints. If you do not take action to create the endpoints within 72 hours, the status will change to <code>CREATE_FAILED</code>. You can delete the failed environment and create a new one.</p>
623    pub fn endpoint_management(mut self, input: crate::types::EndpointManagement) -> Self {
624        self.endpoint_management = ::std::option::Option::Some(input);
625        self
626    }
627    /// <p>Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to <code>SERVICE</code>, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to <code>CUSTOMER</code>, you must create, and manage, the VPC endpoints for your VPC. If you choose to create an environment in a shared VPC, you must set this value to <code>CUSTOMER</code>. In a shared VPC deployment, the environment will remain in <code>PENDING</code> status until you create the VPC endpoints. If you do not take action to create the endpoints within 72 hours, the status will change to <code>CREATE_FAILED</code>. You can delete the failed environment and create a new one.</p>
628    pub fn set_endpoint_management(mut self, input: ::std::option::Option<crate::types::EndpointManagement>) -> Self {
629        self.endpoint_management = input;
630        self
631    }
632    /// <p>Defines whether the VPC endpoints configured for the environment are created, and managed, by the customer or by Amazon MWAA. If set to <code>SERVICE</code>, Amazon MWAA will create and manage the required VPC endpoints in your VPC. If set to <code>CUSTOMER</code>, you must create, and manage, the VPC endpoints for your VPC. If you choose to create an environment in a shared VPC, you must set this value to <code>CUSTOMER</code>. In a shared VPC deployment, the environment will remain in <code>PENDING</code> status until you create the VPC endpoints. If you do not take action to create the endpoints within 72 hours, the status will change to <code>CREATE_FAILED</code>. You can delete the failed environment and create a new one.</p>
633    pub fn get_endpoint_management(&self) -> &::std::option::Option<crate::types::EndpointManagement> {
634        &self.endpoint_management
635    }
636    /// <p>The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
637    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
638    pub fn min_webservers(mut self, input: i32) -> Self {
639        self.min_webservers = ::std::option::Option::Some(input);
640        self
641    }
642    /// <p>The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
643    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
644    pub fn set_min_webservers(mut self, input: ::std::option::Option<i32>) -> Self {
645        self.min_webservers = input;
646        self
647    }
648    /// <p>The minimum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. As the transaction-per-second rate, and the network load, decrease, Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
649    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
650    pub fn get_min_webservers(&self) -> &::std::option::Option<i32> {
651        &self.min_webservers
652    }
653    /// <p>The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in <code>MaxWebserers</code>. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
654    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
655    pub fn max_webservers(mut self, input: i32) -> Self {
656        self.max_webservers = ::std::option::Option::Some(input);
657        self
658    }
659    /// <p>The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in <code>MaxWebserers</code>. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
660    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
661    pub fn set_max_webservers(mut self, input: ::std::option::Option<i32>) -> Self {
662        self.max_webservers = input;
663        self
664    }
665    /// <p>The maximum number of web servers that you want to run in your environment. Amazon MWAA scales the number of Apache Airflow web servers up to the number you specify for <code>MaxWebservers</code> when you interact with your Apache Airflow environment using Apache Airflow REST API, or the Apache Airflow CLI. For example, in scenarios where your workload requires network calls to the Apache Airflow REST API with a high transaction-per-second (TPS) rate, Amazon MWAA will increase the number of web servers up to the number set in <code>MaxWebserers</code>. As TPS rates decrease Amazon MWAA disposes of the additional web servers, and scales down to the number set in <code>MinxWebserers</code>.</p>
666    /// <p>Valid values: For environments larger than mw1.micro, accepts values from <code>2</code> to <code>5</code>. Defaults to <code>2</code> for all environment sizes except mw1.micro, which defaults to <code>1</code>.</p>
667    pub fn get_max_webservers(&self) -> &::std::option::Option<i32> {
668        &self.max_webservers
669    }
670    /// Consumes the builder and constructs a [`CreateEnvironmentInput`](crate::operation::create_environment::CreateEnvironmentInput).
671    pub fn build(
672        self,
673    ) -> ::std::result::Result<crate::operation::create_environment::CreateEnvironmentInput, ::aws_smithy_types::error::operation::BuildError> {
674        ::std::result::Result::Ok(crate::operation::create_environment::CreateEnvironmentInput {
675            name: self.name,
676            execution_role_arn: self.execution_role_arn,
677            source_bucket_arn: self.source_bucket_arn,
678            dag_s3_path: self.dag_s3_path,
679            network_configuration: self.network_configuration,
680            plugins_s3_path: self.plugins_s3_path,
681            plugins_s3_object_version: self.plugins_s3_object_version,
682            requirements_s3_path: self.requirements_s3_path,
683            requirements_s3_object_version: self.requirements_s3_object_version,
684            startup_script_s3_path: self.startup_script_s3_path,
685            startup_script_s3_object_version: self.startup_script_s3_object_version,
686            airflow_configuration_options: self.airflow_configuration_options,
687            environment_class: self.environment_class,
688            max_workers: self.max_workers,
689            kms_key: self.kms_key,
690            airflow_version: self.airflow_version,
691            logging_configuration: self.logging_configuration,
692            weekly_maintenance_window_start: self.weekly_maintenance_window_start,
693            tags: self.tags,
694            webserver_access_mode: self.webserver_access_mode,
695            min_workers: self.min_workers,
696            schedulers: self.schedulers,
697            endpoint_management: self.endpoint_management,
698            min_webservers: self.min_webservers,
699            max_webservers: self.max_webservers,
700        })
701    }
702}
703impl ::std::fmt::Debug for CreateEnvironmentInputBuilder {
704    fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
705        let mut formatter = f.debug_struct("CreateEnvironmentInputBuilder");
706        formatter.field("name", &self.name);
707        formatter.field("execution_role_arn", &self.execution_role_arn);
708        formatter.field("source_bucket_arn", &self.source_bucket_arn);
709        formatter.field("dag_s3_path", &self.dag_s3_path);
710        formatter.field("network_configuration", &self.network_configuration);
711        formatter.field("plugins_s3_path", &self.plugins_s3_path);
712        formatter.field("plugins_s3_object_version", &self.plugins_s3_object_version);
713        formatter.field("requirements_s3_path", &self.requirements_s3_path);
714        formatter.field("requirements_s3_object_version", &self.requirements_s3_object_version);
715        formatter.field("startup_script_s3_path", &self.startup_script_s3_path);
716        formatter.field("startup_script_s3_object_version", &self.startup_script_s3_object_version);
717        formatter.field("airflow_configuration_options", &"*** Sensitive Data Redacted ***");
718        formatter.field("environment_class", &self.environment_class);
719        formatter.field("max_workers", &self.max_workers);
720        formatter.field("kms_key", &self.kms_key);
721        formatter.field("airflow_version", &self.airflow_version);
722        formatter.field("logging_configuration", &self.logging_configuration);
723        formatter.field("weekly_maintenance_window_start", &self.weekly_maintenance_window_start);
724        formatter.field("tags", &self.tags);
725        formatter.field("webserver_access_mode", &self.webserver_access_mode);
726        formatter.field("min_workers", &self.min_workers);
727        formatter.field("schedulers", &self.schedulers);
728        formatter.field("endpoint_management", &self.endpoint_management);
729        formatter.field("min_webservers", &self.min_webservers);
730        formatter.field("max_webservers", &self.max_webservers);
731        formatter.finish()
732    }
733}