Skip to main content

aws_sdk_applicationdiscovery/types/
_continuous_export_description.rs

1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2
3/// <p>A list of continuous export descriptions.</p>
4#[non_exhaustive]
5#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::fmt::Debug)]
6pub struct ContinuousExportDescription {
7    /// <p>The unique ID assigned to this export.</p>
8    pub export_id: ::std::option::Option<::std::string::String>,
9    /// <p>Describes the status of the export. Can be one of the following values:</p>
10    /// <ul>
11    /// <li>
12    /// <p>START_IN_PROGRESS - setting up resources to start continuous export.</p></li>
13    /// <li>
14    /// <p>START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.</p></li>
15    /// <li>
16    /// <p>ACTIVE - data is being exported to the customer bucket.</p></li>
17    /// <li>
18    /// <p>ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and start-continuous-export.</p></li>
19    /// <li>
20    /// <p>STOP_IN_PROGRESS - stopping the export.</p></li>
21    /// <li>
22    /// <p>STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.</p></li>
23    /// <li>
24    /// <p>INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.</p></li>
25    /// </ul>
26    pub status: ::std::option::Option<crate::types::ContinuousExportStatus>,
27    /// <p>Contains information about any errors that have occurred. This data type can have the following values:</p>
28    /// <ul>
29    /// <li>
30    /// <p>ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web Services administrator for help. For more information, see <a href="http://docs.aws.amazon.com/application-discovery/latest/userguide/setting-up.html">Setting Up Amazon Web Services Application Discovery Service</a> in the Application Discovery Service User Guide.</p></li>
31    /// <li>
32    /// <p>DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce the number of streams or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/streams/latest/dev/service-sizes-and-limits.html">Kinesis Data Streams Limits</a> in the Amazon Kinesis Data Streams Developer Guide.</p></li>
33    /// <li>
34    /// <p>FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again. For more information, see <a href="https://docs.aws.amazon.com/application-discovery/latest/userguide/security-iam-awsmanpol.html#security-iam-awsmanpol-create-firehose-role">Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role</a> in the Application Discovery Service User Guide.</p></li>
35    /// <li>
36    /// <p>FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing one or more of the Kinesis data delivery streams.</p></li>
37    /// <li>
38    /// <p>INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again later. If this problem persists, contact Amazon Web Services Support.</p></li>
39    /// <li>
40    /// <p>LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export. For more information, see <a href="http://docs.aws.amazon.com/lake-formation/latest/dg/upgrade-glue-lake-formation.html"> Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services Lake Formation Model </a> in the Amazon Web Services <i>Lake Formation Developer Guide</i>.</p>
41    /// <p>You can use one of the following two ways to resolve this issue.</p>
42    /// <ol>
43    /// <li>
44    /// <p>If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/getting-started-setup.html#setup-change-cat-settings">Change Data Catalog Settings</a> in the <i>Lake Formation Developer Guide</i>.</p></li>
45    /// <li>
46    /// <p>You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/granting-database-permissions.html"> Granting Database Permissions</a> in the <i>Lake Formation Developer Guide</i>.</p>
47    /// <ol>
48    /// <li>
49    /// <p>AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives the role database creation ability and implicit permissions for any created tables. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html"> Implicit Lake Formation Permissions </a> in the <i>Lake Formation Developer Guide</i>.</p></li>
50    /// <li>
51    /// <p>AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.</p></li>
52    /// </ol></li>
53    /// </ol></li>
54    /// <li>
55    /// <p>S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html">Bucket Restrictions and Limitations</a> in the Amazon Simple Storage Service Developer Guide.</p></li>
56    /// <li>
57    /// <p>S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use Amazon S3. You can sign up at the following URL: <a href="https://aws.amazon.com/s3">https://aws.amazon.com/s3</a>.</p></li>
58    /// </ul>
59    pub status_detail: ::std::option::Option<::std::string::String>,
60    /// <p>The name of the s3 bucket where the export data parquet files are stored.</p>
61    pub s3_bucket: ::std::option::Option<::std::string::String>,
62    /// <p>The timestamp representing when the continuous export was started.</p>
63    pub start_time: ::std::option::Option<::aws_smithy_types::DateTime>,
64    /// <p>The timestamp that represents when this continuous export was stopped.</p>
65    pub stop_time: ::std::option::Option<::aws_smithy_types::DateTime>,
66    /// <p>The type of data collector used to gather this data (currently only offered for AGENT).</p>
67    pub data_source: ::std::option::Option<crate::types::DataSource>,
68    /// <p>An object which describes how the data is stored.</p>
69    /// <ul>
70    /// <li>
71    /// <p><code>databaseName</code> - the name of the Glue database used to store the schema.</p></li>
72    /// </ul>
73    pub schema_storage_config: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
74}
75impl ContinuousExportDescription {
76    /// <p>The unique ID assigned to this export.</p>
77    pub fn export_id(&self) -> ::std::option::Option<&str> {
78        self.export_id.as_deref()
79    }
80    /// <p>Describes the status of the export. Can be one of the following values:</p>
81    /// <ul>
82    /// <li>
83    /// <p>START_IN_PROGRESS - setting up resources to start continuous export.</p></li>
84    /// <li>
85    /// <p>START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.</p></li>
86    /// <li>
87    /// <p>ACTIVE - data is being exported to the customer bucket.</p></li>
88    /// <li>
89    /// <p>ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and start-continuous-export.</p></li>
90    /// <li>
91    /// <p>STOP_IN_PROGRESS - stopping the export.</p></li>
92    /// <li>
93    /// <p>STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.</p></li>
94    /// <li>
95    /// <p>INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.</p></li>
96    /// </ul>
97    pub fn status(&self) -> ::std::option::Option<&crate::types::ContinuousExportStatus> {
98        self.status.as_ref()
99    }
100    /// <p>Contains information about any errors that have occurred. This data type can have the following values:</p>
101    /// <ul>
102    /// <li>
103    /// <p>ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web Services administrator for help. For more information, see <a href="http://docs.aws.amazon.com/application-discovery/latest/userguide/setting-up.html">Setting Up Amazon Web Services Application Discovery Service</a> in the Application Discovery Service User Guide.</p></li>
104    /// <li>
105    /// <p>DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce the number of streams or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/streams/latest/dev/service-sizes-and-limits.html">Kinesis Data Streams Limits</a> in the Amazon Kinesis Data Streams Developer Guide.</p></li>
106    /// <li>
107    /// <p>FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again. For more information, see <a href="https://docs.aws.amazon.com/application-discovery/latest/userguide/security-iam-awsmanpol.html#security-iam-awsmanpol-create-firehose-role">Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role</a> in the Application Discovery Service User Guide.</p></li>
108    /// <li>
109    /// <p>FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing one or more of the Kinesis data delivery streams.</p></li>
110    /// <li>
111    /// <p>INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again later. If this problem persists, contact Amazon Web Services Support.</p></li>
112    /// <li>
113    /// <p>LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export. For more information, see <a href="http://docs.aws.amazon.com/lake-formation/latest/dg/upgrade-glue-lake-formation.html"> Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services Lake Formation Model </a> in the Amazon Web Services <i>Lake Formation Developer Guide</i>.</p>
114    /// <p>You can use one of the following two ways to resolve this issue.</p>
115    /// <ol>
116    /// <li>
117    /// <p>If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/getting-started-setup.html#setup-change-cat-settings">Change Data Catalog Settings</a> in the <i>Lake Formation Developer Guide</i>.</p></li>
118    /// <li>
119    /// <p>You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/granting-database-permissions.html"> Granting Database Permissions</a> in the <i>Lake Formation Developer Guide</i>.</p>
120    /// <ol>
121    /// <li>
122    /// <p>AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives the role database creation ability and implicit permissions for any created tables. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html"> Implicit Lake Formation Permissions </a> in the <i>Lake Formation Developer Guide</i>.</p></li>
123    /// <li>
124    /// <p>AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.</p></li>
125    /// </ol></li>
126    /// </ol></li>
127    /// <li>
128    /// <p>S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html">Bucket Restrictions and Limitations</a> in the Amazon Simple Storage Service Developer Guide.</p></li>
129    /// <li>
130    /// <p>S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use Amazon S3. You can sign up at the following URL: <a href="https://aws.amazon.com/s3">https://aws.amazon.com/s3</a>.</p></li>
131    /// </ul>
132    pub fn status_detail(&self) -> ::std::option::Option<&str> {
133        self.status_detail.as_deref()
134    }
135    /// <p>The name of the s3 bucket where the export data parquet files are stored.</p>
136    pub fn s3_bucket(&self) -> ::std::option::Option<&str> {
137        self.s3_bucket.as_deref()
138    }
139    /// <p>The timestamp representing when the continuous export was started.</p>
140    pub fn start_time(&self) -> ::std::option::Option<&::aws_smithy_types::DateTime> {
141        self.start_time.as_ref()
142    }
143    /// <p>The timestamp that represents when this continuous export was stopped.</p>
144    pub fn stop_time(&self) -> ::std::option::Option<&::aws_smithy_types::DateTime> {
145        self.stop_time.as_ref()
146    }
147    /// <p>The type of data collector used to gather this data (currently only offered for AGENT).</p>
148    pub fn data_source(&self) -> ::std::option::Option<&crate::types::DataSource> {
149        self.data_source.as_ref()
150    }
151    /// <p>An object which describes how the data is stored.</p>
152    /// <ul>
153    /// <li>
154    /// <p><code>databaseName</code> - the name of the Glue database used to store the schema.</p></li>
155    /// </ul>
156    pub fn schema_storage_config(&self) -> ::std::option::Option<&::std::collections::HashMap<::std::string::String, ::std::string::String>> {
157        self.schema_storage_config.as_ref()
158    }
159}
160impl ContinuousExportDescription {
161    /// Creates a new builder-style object to manufacture [`ContinuousExportDescription`](crate::types::ContinuousExportDescription).
162    pub fn builder() -> crate::types::builders::ContinuousExportDescriptionBuilder {
163        crate::types::builders::ContinuousExportDescriptionBuilder::default()
164    }
165}
166
167/// A builder for [`ContinuousExportDescription`](crate::types::ContinuousExportDescription).
168#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::default::Default, ::std::fmt::Debug)]
169#[non_exhaustive]
170pub struct ContinuousExportDescriptionBuilder {
171    pub(crate) export_id: ::std::option::Option<::std::string::String>,
172    pub(crate) status: ::std::option::Option<crate::types::ContinuousExportStatus>,
173    pub(crate) status_detail: ::std::option::Option<::std::string::String>,
174    pub(crate) s3_bucket: ::std::option::Option<::std::string::String>,
175    pub(crate) start_time: ::std::option::Option<::aws_smithy_types::DateTime>,
176    pub(crate) stop_time: ::std::option::Option<::aws_smithy_types::DateTime>,
177    pub(crate) data_source: ::std::option::Option<crate::types::DataSource>,
178    pub(crate) schema_storage_config: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
179}
180impl ContinuousExportDescriptionBuilder {
181    /// <p>The unique ID assigned to this export.</p>
182    pub fn export_id(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
183        self.export_id = ::std::option::Option::Some(input.into());
184        self
185    }
186    /// <p>The unique ID assigned to this export.</p>
187    pub fn set_export_id(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
188        self.export_id = input;
189        self
190    }
191    /// <p>The unique ID assigned to this export.</p>
192    pub fn get_export_id(&self) -> &::std::option::Option<::std::string::String> {
193        &self.export_id
194    }
195    /// <p>Describes the status of the export. Can be one of the following values:</p>
196    /// <ul>
197    /// <li>
198    /// <p>START_IN_PROGRESS - setting up resources to start continuous export.</p></li>
199    /// <li>
200    /// <p>START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.</p></li>
201    /// <li>
202    /// <p>ACTIVE - data is being exported to the customer bucket.</p></li>
203    /// <li>
204    /// <p>ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and start-continuous-export.</p></li>
205    /// <li>
206    /// <p>STOP_IN_PROGRESS - stopping the export.</p></li>
207    /// <li>
208    /// <p>STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.</p></li>
209    /// <li>
210    /// <p>INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.</p></li>
211    /// </ul>
212    pub fn status(mut self, input: crate::types::ContinuousExportStatus) -> Self {
213        self.status = ::std::option::Option::Some(input);
214        self
215    }
216    /// <p>Describes the status of the export. Can be one of the following values:</p>
217    /// <ul>
218    /// <li>
219    /// <p>START_IN_PROGRESS - setting up resources to start continuous export.</p></li>
220    /// <li>
221    /// <p>START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.</p></li>
222    /// <li>
223    /// <p>ACTIVE - data is being exported to the customer bucket.</p></li>
224    /// <li>
225    /// <p>ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and start-continuous-export.</p></li>
226    /// <li>
227    /// <p>STOP_IN_PROGRESS - stopping the export.</p></li>
228    /// <li>
229    /// <p>STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.</p></li>
230    /// <li>
231    /// <p>INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.</p></li>
232    /// </ul>
233    pub fn set_status(mut self, input: ::std::option::Option<crate::types::ContinuousExportStatus>) -> Self {
234        self.status = input;
235        self
236    }
237    /// <p>Describes the status of the export. Can be one of the following values:</p>
238    /// <ul>
239    /// <li>
240    /// <p>START_IN_PROGRESS - setting up resources to start continuous export.</p></li>
241    /// <li>
242    /// <p>START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.</p></li>
243    /// <li>
244    /// <p>ACTIVE - data is being exported to the customer bucket.</p></li>
245    /// <li>
246    /// <p>ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and start-continuous-export.</p></li>
247    /// <li>
248    /// <p>STOP_IN_PROGRESS - stopping the export.</p></li>
249    /// <li>
250    /// <p>STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.</p></li>
251    /// <li>
252    /// <p>INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.</p></li>
253    /// </ul>
254    pub fn get_status(&self) -> &::std::option::Option<crate::types::ContinuousExportStatus> {
255        &self.status
256    }
257    /// <p>Contains information about any errors that have occurred. This data type can have the following values:</p>
258    /// <ul>
259    /// <li>
260    /// <p>ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web Services administrator for help. For more information, see <a href="http://docs.aws.amazon.com/application-discovery/latest/userguide/setting-up.html">Setting Up Amazon Web Services Application Discovery Service</a> in the Application Discovery Service User Guide.</p></li>
261    /// <li>
262    /// <p>DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce the number of streams or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/streams/latest/dev/service-sizes-and-limits.html">Kinesis Data Streams Limits</a> in the Amazon Kinesis Data Streams Developer Guide.</p></li>
263    /// <li>
264    /// <p>FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again. For more information, see <a href="https://docs.aws.amazon.com/application-discovery/latest/userguide/security-iam-awsmanpol.html#security-iam-awsmanpol-create-firehose-role">Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role</a> in the Application Discovery Service User Guide.</p></li>
265    /// <li>
266    /// <p>FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing one or more of the Kinesis data delivery streams.</p></li>
267    /// <li>
268    /// <p>INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again later. If this problem persists, contact Amazon Web Services Support.</p></li>
269    /// <li>
270    /// <p>LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export. For more information, see <a href="http://docs.aws.amazon.com/lake-formation/latest/dg/upgrade-glue-lake-formation.html"> Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services Lake Formation Model </a> in the Amazon Web Services <i>Lake Formation Developer Guide</i>.</p>
271    /// <p>You can use one of the following two ways to resolve this issue.</p>
272    /// <ol>
273    /// <li>
274    /// <p>If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/getting-started-setup.html#setup-change-cat-settings">Change Data Catalog Settings</a> in the <i>Lake Formation Developer Guide</i>.</p></li>
275    /// <li>
276    /// <p>You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/granting-database-permissions.html"> Granting Database Permissions</a> in the <i>Lake Formation Developer Guide</i>.</p>
277    /// <ol>
278    /// <li>
279    /// <p>AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives the role database creation ability and implicit permissions for any created tables. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html"> Implicit Lake Formation Permissions </a> in the <i>Lake Formation Developer Guide</i>.</p></li>
280    /// <li>
281    /// <p>AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.</p></li>
282    /// </ol></li>
283    /// </ol></li>
284    /// <li>
285    /// <p>S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html">Bucket Restrictions and Limitations</a> in the Amazon Simple Storage Service Developer Guide.</p></li>
286    /// <li>
287    /// <p>S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use Amazon S3. You can sign up at the following URL: <a href="https://aws.amazon.com/s3">https://aws.amazon.com/s3</a>.</p></li>
288    /// </ul>
289    pub fn status_detail(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
290        self.status_detail = ::std::option::Option::Some(input.into());
291        self
292    }
293    /// <p>Contains information about any errors that have occurred. This data type can have the following values:</p>
294    /// <ul>
295    /// <li>
296    /// <p>ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web Services administrator for help. For more information, see <a href="http://docs.aws.amazon.com/application-discovery/latest/userguide/setting-up.html">Setting Up Amazon Web Services Application Discovery Service</a> in the Application Discovery Service User Guide.</p></li>
297    /// <li>
298    /// <p>DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce the number of streams or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/streams/latest/dev/service-sizes-and-limits.html">Kinesis Data Streams Limits</a> in the Amazon Kinesis Data Streams Developer Guide.</p></li>
299    /// <li>
300    /// <p>FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again. For more information, see <a href="https://docs.aws.amazon.com/application-discovery/latest/userguide/security-iam-awsmanpol.html#security-iam-awsmanpol-create-firehose-role">Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role</a> in the Application Discovery Service User Guide.</p></li>
301    /// <li>
302    /// <p>FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing one or more of the Kinesis data delivery streams.</p></li>
303    /// <li>
304    /// <p>INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again later. If this problem persists, contact Amazon Web Services Support.</p></li>
305    /// <li>
306    /// <p>LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export. For more information, see <a href="http://docs.aws.amazon.com/lake-formation/latest/dg/upgrade-glue-lake-formation.html"> Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services Lake Formation Model </a> in the Amazon Web Services <i>Lake Formation Developer Guide</i>.</p>
307    /// <p>You can use one of the following two ways to resolve this issue.</p>
308    /// <ol>
309    /// <li>
310    /// <p>If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/getting-started-setup.html#setup-change-cat-settings">Change Data Catalog Settings</a> in the <i>Lake Formation Developer Guide</i>.</p></li>
311    /// <li>
312    /// <p>You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/granting-database-permissions.html"> Granting Database Permissions</a> in the <i>Lake Formation Developer Guide</i>.</p>
313    /// <ol>
314    /// <li>
315    /// <p>AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives the role database creation ability and implicit permissions for any created tables. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html"> Implicit Lake Formation Permissions </a> in the <i>Lake Formation Developer Guide</i>.</p></li>
316    /// <li>
317    /// <p>AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.</p></li>
318    /// </ol></li>
319    /// </ol></li>
320    /// <li>
321    /// <p>S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html">Bucket Restrictions and Limitations</a> in the Amazon Simple Storage Service Developer Guide.</p></li>
322    /// <li>
323    /// <p>S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use Amazon S3. You can sign up at the following URL: <a href="https://aws.amazon.com/s3">https://aws.amazon.com/s3</a>.</p></li>
324    /// </ul>
325    pub fn set_status_detail(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
326        self.status_detail = input;
327        self
328    }
329    /// <p>Contains information about any errors that have occurred. This data type can have the following values:</p>
330    /// <ul>
331    /// <li>
332    /// <p>ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web Services administrator for help. For more information, see <a href="http://docs.aws.amazon.com/application-discovery/latest/userguide/setting-up.html">Setting Up Amazon Web Services Application Discovery Service</a> in the Application Discovery Service User Guide.</p></li>
333    /// <li>
334    /// <p>DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce the number of streams or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/streams/latest/dev/service-sizes-and-limits.html">Kinesis Data Streams Limits</a> in the Amazon Kinesis Data Streams Developer Guide.</p></li>
335    /// <li>
336    /// <p>FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again. For more information, see <a href="https://docs.aws.amazon.com/application-discovery/latest/userguide/security-iam-awsmanpol.html#security-iam-awsmanpol-create-firehose-role">Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role</a> in the Application Discovery Service User Guide.</p></li>
337    /// <li>
338    /// <p>FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing one or more of the Kinesis data delivery streams.</p></li>
339    /// <li>
340    /// <p>INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again later. If this problem persists, contact Amazon Web Services Support.</p></li>
341    /// <li>
342    /// <p>LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export. For more information, see <a href="http://docs.aws.amazon.com/lake-formation/latest/dg/upgrade-glue-lake-formation.html"> Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services Lake Formation Model </a> in the Amazon Web Services <i>Lake Formation Developer Guide</i>.</p>
343    /// <p>You can use one of the following two ways to resolve this issue.</p>
344    /// <ol>
345    /// <li>
346    /// <p>If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/getting-started-setup.html#setup-change-cat-settings">Change Data Catalog Settings</a> in the <i>Lake Formation Developer Guide</i>.</p></li>
347    /// <li>
348    /// <p>You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/granting-database-permissions.html"> Granting Database Permissions</a> in the <i>Lake Formation Developer Guide</i>.</p>
349    /// <ol>
350    /// <li>
351    /// <p>AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives the role database creation ability and implicit permissions for any created tables. For more information, see <a href="https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html"> Implicit Lake Formation Permissions </a> in the <i>Lake Formation Developer Guide</i>.</p></li>
352    /// <li>
353    /// <p>AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.</p></li>
354    /// </ol></li>
355    /// </ol></li>
356    /// <li>
357    /// <p>S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request a limit increase and try again. For more information, see <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html">Bucket Restrictions and Limitations</a> in the Amazon Simple Storage Service Developer Guide.</p></li>
358    /// <li>
359    /// <p>S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use Amazon S3. You can sign up at the following URL: <a href="https://aws.amazon.com/s3">https://aws.amazon.com/s3</a>.</p></li>
360    /// </ul>
361    pub fn get_status_detail(&self) -> &::std::option::Option<::std::string::String> {
362        &self.status_detail
363    }
364    /// <p>The name of the s3 bucket where the export data parquet files are stored.</p>
365    pub fn s3_bucket(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
366        self.s3_bucket = ::std::option::Option::Some(input.into());
367        self
368    }
369    /// <p>The name of the s3 bucket where the export data parquet files are stored.</p>
370    pub fn set_s3_bucket(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
371        self.s3_bucket = input;
372        self
373    }
374    /// <p>The name of the s3 bucket where the export data parquet files are stored.</p>
375    pub fn get_s3_bucket(&self) -> &::std::option::Option<::std::string::String> {
376        &self.s3_bucket
377    }
378    /// <p>The timestamp representing when the continuous export was started.</p>
379    pub fn start_time(mut self, input: ::aws_smithy_types::DateTime) -> Self {
380        self.start_time = ::std::option::Option::Some(input);
381        self
382    }
383    /// <p>The timestamp representing when the continuous export was started.</p>
384    pub fn set_start_time(mut self, input: ::std::option::Option<::aws_smithy_types::DateTime>) -> Self {
385        self.start_time = input;
386        self
387    }
388    /// <p>The timestamp representing when the continuous export was started.</p>
389    pub fn get_start_time(&self) -> &::std::option::Option<::aws_smithy_types::DateTime> {
390        &self.start_time
391    }
392    /// <p>The timestamp that represents when this continuous export was stopped.</p>
393    pub fn stop_time(mut self, input: ::aws_smithy_types::DateTime) -> Self {
394        self.stop_time = ::std::option::Option::Some(input);
395        self
396    }
397    /// <p>The timestamp that represents when this continuous export was stopped.</p>
398    pub fn set_stop_time(mut self, input: ::std::option::Option<::aws_smithy_types::DateTime>) -> Self {
399        self.stop_time = input;
400        self
401    }
402    /// <p>The timestamp that represents when this continuous export was stopped.</p>
403    pub fn get_stop_time(&self) -> &::std::option::Option<::aws_smithy_types::DateTime> {
404        &self.stop_time
405    }
406    /// <p>The type of data collector used to gather this data (currently only offered for AGENT).</p>
407    pub fn data_source(mut self, input: crate::types::DataSource) -> Self {
408        self.data_source = ::std::option::Option::Some(input);
409        self
410    }
411    /// <p>The type of data collector used to gather this data (currently only offered for AGENT).</p>
412    pub fn set_data_source(mut self, input: ::std::option::Option<crate::types::DataSource>) -> Self {
413        self.data_source = input;
414        self
415    }
416    /// <p>The type of data collector used to gather this data (currently only offered for AGENT).</p>
417    pub fn get_data_source(&self) -> &::std::option::Option<crate::types::DataSource> {
418        &self.data_source
419    }
420    /// Adds a key-value pair to `schema_storage_config`.
421    ///
422    /// To override the contents of this collection use [`set_schema_storage_config`](Self::set_schema_storage_config).
423    ///
424    /// <p>An object which describes how the data is stored.</p>
425    /// <ul>
426    /// <li>
427    /// <p><code>databaseName</code> - the name of the Glue database used to store the schema.</p></li>
428    /// </ul>
429    pub fn schema_storage_config(
430        mut self,
431        k: impl ::std::convert::Into<::std::string::String>,
432        v: impl ::std::convert::Into<::std::string::String>,
433    ) -> Self {
434        let mut hash_map = self.schema_storage_config.unwrap_or_default();
435        hash_map.insert(k.into(), v.into());
436        self.schema_storage_config = ::std::option::Option::Some(hash_map);
437        self
438    }
439    /// <p>An object which describes how the data is stored.</p>
440    /// <ul>
441    /// <li>
442    /// <p><code>databaseName</code> - the name of the Glue database used to store the schema.</p></li>
443    /// </ul>
444    pub fn set_schema_storage_config(
445        mut self,
446        input: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
447    ) -> Self {
448        self.schema_storage_config = input;
449        self
450    }
451    /// <p>An object which describes how the data is stored.</p>
452    /// <ul>
453    /// <li>
454    /// <p><code>databaseName</code> - the name of the Glue database used to store the schema.</p></li>
455    /// </ul>
456    pub fn get_schema_storage_config(&self) -> &::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>> {
457        &self.schema_storage_config
458    }
459    /// Consumes the builder and constructs a [`ContinuousExportDescription`](crate::types::ContinuousExportDescription).
460    pub fn build(self) -> crate::types::ContinuousExportDescription {
461        crate::types::ContinuousExportDescription {
462            export_id: self.export_id,
463            status: self.status,
464            status_detail: self.status_detail,
465            s3_bucket: self.s3_bucket,
466            start_time: self.start_time,
467            stop_time: self.stop_time,
468            data_source: self.data_source,
469            schema_storage_config: self.schema_storage_config,
470        }
471    }
472}