1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::fmt::Debug)]
pub struct AnalyzeDocumentInput {
    /// <p>The input document as base64-encoded bytes or an Amazon S3 object. If you use the AWS CLI to call Amazon Textract operations, you can't pass image bytes. The document must be an image in JPEG, PNG, PDF, or TIFF format.</p>
    /// <p>If you're using an AWS SDK to call Amazon Textract, you might not need to base64-encode image bytes that are passed using the <code>Bytes</code> field. </p>
    pub document: ::std::option::Option<crate::types::Document>,
    /// <p>A list of the types of analysis to perform. Add TABLES to the list to return information about the tables that are detected in the input document. Add FORMS to return detected form data. Add SIGNATURES to return the locations of detected signatures. To perform both forms and table analysis, add TABLES and FORMS to <code>FeatureTypes</code>. To detect signatures within form data and table data, add SIGNATURES to either TABLES or FORMS. All lines and words detected in the document are included in the response (including text that isn't related to the value of <code>FeatureTypes</code>). </p>
    pub feature_types: ::std::option::Option<::std::vec::Vec<crate::types::FeatureType>>,
    /// <p>Sets the configuration for the human in the loop workflow for analyzing documents.</p>
    pub human_loop_config: ::std::option::Option<crate::types::HumanLoopConfig>,
    /// <p>Contains Queries and the alias for those Queries, as determined by the input. </p>
    pub queries_config: ::std::option::Option<crate::types::QueriesConfig>,
}
impl AnalyzeDocumentInput {
    /// <p>The input document as base64-encoded bytes or an Amazon S3 object. If you use the AWS CLI to call Amazon Textract operations, you can't pass image bytes. The document must be an image in JPEG, PNG, PDF, or TIFF format.</p>
    /// <p>If you're using an AWS SDK to call Amazon Textract, you might not need to base64-encode image bytes that are passed using the <code>Bytes</code> field. </p>
    pub fn document(&self) -> ::std::option::Option<&crate::types::Document> {
        self.document.as_ref()
    }
    /// <p>A list of the types of analysis to perform. Add TABLES to the list to return information about the tables that are detected in the input document. Add FORMS to return detected form data. Add SIGNATURES to return the locations of detected signatures. To perform both forms and table analysis, add TABLES and FORMS to <code>FeatureTypes</code>. To detect signatures within form data and table data, add SIGNATURES to either TABLES or FORMS. All lines and words detected in the document are included in the response (including text that isn't related to the value of <code>FeatureTypes</code>). </p>
    pub fn feature_types(&self) -> ::std::option::Option<&[crate::types::FeatureType]> {
        self.feature_types.as_deref()
    }
    /// <p>Sets the configuration for the human in the loop workflow for analyzing documents.</p>
    pub fn human_loop_config(&self) -> ::std::option::Option<&crate::types::HumanLoopConfig> {
        self.human_loop_config.as_ref()
    }
    /// <p>Contains Queries and the alias for those Queries, as determined by the input. </p>
    pub fn queries_config(&self) -> ::std::option::Option<&crate::types::QueriesConfig> {
        self.queries_config.as_ref()
    }
}
impl AnalyzeDocumentInput {
    /// Creates a new builder-style object to manufacture [`AnalyzeDocumentInput`](crate::operation::analyze_document::AnalyzeDocumentInput).
    pub fn builder() -> crate::operation::analyze_document::builders::AnalyzeDocumentInputBuilder {
        crate::operation::analyze_document::builders::AnalyzeDocumentInputBuilder::default()
    }
}

/// A builder for [`AnalyzeDocumentInput`](crate::operation::analyze_document::AnalyzeDocumentInput).
#[non_exhaustive]
#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::default::Default, ::std::fmt::Debug)]
pub struct AnalyzeDocumentInputBuilder {
    pub(crate) document: ::std::option::Option<crate::types::Document>,
    pub(crate) feature_types: ::std::option::Option<::std::vec::Vec<crate::types::FeatureType>>,
    pub(crate) human_loop_config: ::std::option::Option<crate::types::HumanLoopConfig>,
    pub(crate) queries_config: ::std::option::Option<crate::types::QueriesConfig>,
}
impl AnalyzeDocumentInputBuilder {
    /// <p>The input document as base64-encoded bytes or an Amazon S3 object. If you use the AWS CLI to call Amazon Textract operations, you can't pass image bytes. The document must be an image in JPEG, PNG, PDF, or TIFF format.</p>
    /// <p>If you're using an AWS SDK to call Amazon Textract, you might not need to base64-encode image bytes that are passed using the <code>Bytes</code> field. </p>
    pub fn document(mut self, input: crate::types::Document) -> Self {
        self.document = ::std::option::Option::Some(input);
        self
    }
    /// <p>The input document as base64-encoded bytes or an Amazon S3 object. If you use the AWS CLI to call Amazon Textract operations, you can't pass image bytes. The document must be an image in JPEG, PNG, PDF, or TIFF format.</p>
    /// <p>If you're using an AWS SDK to call Amazon Textract, you might not need to base64-encode image bytes that are passed using the <code>Bytes</code> field. </p>
    pub fn set_document(mut self, input: ::std::option::Option<crate::types::Document>) -> Self {
        self.document = input;
        self
    }
    /// <p>The input document as base64-encoded bytes or an Amazon S3 object. If you use the AWS CLI to call Amazon Textract operations, you can't pass image bytes. The document must be an image in JPEG, PNG, PDF, or TIFF format.</p>
    /// <p>If you're using an AWS SDK to call Amazon Textract, you might not need to base64-encode image bytes that are passed using the <code>Bytes</code> field. </p>
    pub fn get_document(&self) -> &::std::option::Option<crate::types::Document> {
        &self.document
    }
    /// Appends an item to `feature_types`.
    ///
    /// To override the contents of this collection use [`set_feature_types`](Self::set_feature_types).
    ///
    /// <p>A list of the types of analysis to perform. Add TABLES to the list to return information about the tables that are detected in the input document. Add FORMS to return detected form data. Add SIGNATURES to return the locations of detected signatures. To perform both forms and table analysis, add TABLES and FORMS to <code>FeatureTypes</code>. To detect signatures within form data and table data, add SIGNATURES to either TABLES or FORMS. All lines and words detected in the document are included in the response (including text that isn't related to the value of <code>FeatureTypes</code>). </p>
    pub fn feature_types(mut self, input: crate::types::FeatureType) -> Self {
        let mut v = self.feature_types.unwrap_or_default();
        v.push(input);
        self.feature_types = ::std::option::Option::Some(v);
        self
    }
    /// <p>A list of the types of analysis to perform. Add TABLES to the list to return information about the tables that are detected in the input document. Add FORMS to return detected form data. Add SIGNATURES to return the locations of detected signatures. To perform both forms and table analysis, add TABLES and FORMS to <code>FeatureTypes</code>. To detect signatures within form data and table data, add SIGNATURES to either TABLES or FORMS. All lines and words detected in the document are included in the response (including text that isn't related to the value of <code>FeatureTypes</code>). </p>
    pub fn set_feature_types(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::FeatureType>>) -> Self {
        self.feature_types = input;
        self
    }
    /// <p>A list of the types of analysis to perform. Add TABLES to the list to return information about the tables that are detected in the input document. Add FORMS to return detected form data. Add SIGNATURES to return the locations of detected signatures. To perform both forms and table analysis, add TABLES and FORMS to <code>FeatureTypes</code>. To detect signatures within form data and table data, add SIGNATURES to either TABLES or FORMS. All lines and words detected in the document are included in the response (including text that isn't related to the value of <code>FeatureTypes</code>). </p>
    pub fn get_feature_types(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::FeatureType>> {
        &self.feature_types
    }
    /// <p>Sets the configuration for the human in the loop workflow for analyzing documents.</p>
    pub fn human_loop_config(mut self, input: crate::types::HumanLoopConfig) -> Self {
        self.human_loop_config = ::std::option::Option::Some(input);
        self
    }
    /// <p>Sets the configuration for the human in the loop workflow for analyzing documents.</p>
    pub fn set_human_loop_config(mut self, input: ::std::option::Option<crate::types::HumanLoopConfig>) -> Self {
        self.human_loop_config = input;
        self
    }
    /// <p>Sets the configuration for the human in the loop workflow for analyzing documents.</p>
    pub fn get_human_loop_config(&self) -> &::std::option::Option<crate::types::HumanLoopConfig> {
        &self.human_loop_config
    }
    /// <p>Contains Queries and the alias for those Queries, as determined by the input. </p>
    pub fn queries_config(mut self, input: crate::types::QueriesConfig) -> Self {
        self.queries_config = ::std::option::Option::Some(input);
        self
    }
    /// <p>Contains Queries and the alias for those Queries, as determined by the input. </p>
    pub fn set_queries_config(mut self, input: ::std::option::Option<crate::types::QueriesConfig>) -> Self {
        self.queries_config = input;
        self
    }
    /// <p>Contains Queries and the alias for those Queries, as determined by the input. </p>
    pub fn get_queries_config(&self) -> &::std::option::Option<crate::types::QueriesConfig> {
        &self.queries_config
    }
    /// Consumes the builder and constructs a [`AnalyzeDocumentInput`](crate::operation::analyze_document::AnalyzeDocumentInput).
    pub fn build(
        self,
    ) -> ::std::result::Result<crate::operation::analyze_document::AnalyzeDocumentInput, ::aws_smithy_http::operation::error::BuildError> {
        ::std::result::Result::Ok(crate::operation::analyze_document::AnalyzeDocumentInput {
            document: self.document,
            feature_types: self.feature_types,
            human_loop_config: self.human_loop_config,
            queries_config: self.queries_config,
        })
    }
}