1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
use std::{fmt::Debug, net::SocketAddrV4};

use sqlx_core::Error as SqlxError;

use super::{ExaExport, ExportSource};
use crate::{
    connection::etl::RowSeparator,
    etl::{build_etl, traits::EtlJob, JobFuture, SocketFuture},
    ExaConnection,
};

/// A builder for an ETL EXPORT job.
/// Calling [`build().await`] will ouput a future that drives the EXPORT query execution and a
/// [`Vec<ExaReader>`] which must be concurrently used to read data from Exasol.
#[derive(Debug)]
pub struct ExportBuilder<'a> {
    num_readers: usize,
    compression: Option<bool>,
    source: ExportSource<'a>,
    comment: Option<&'a str>,
    encoding: Option<&'a str>,
    null: &'a str,
    row_separator: RowSeparator,
    column_separator: &'a str,
    column_delimiter: &'a str,
    with_column_names: bool,
}

impl<'a> ExportBuilder<'a> {
    pub fn new(source: ExportSource<'a>) -> Self {
        Self {
            num_readers: 0,
            compression: None,
            source,
            comment: None,
            encoding: None,
            null: "",
            row_separator: RowSeparator::CRLF,
            column_separator: ",",
            column_delimiter: "\"",
            with_column_names: false,
        }
    }

    /// Builds the EXPORT job.
    ///
    /// This implies submitting the EXPORT query.
    /// The output will be a future to await the result of the job
    /// and the workers that can be used for ETL IO.
    ///
    /// # Errors
    ///
    /// Returns an error if the job could not be built and submitted.
    pub async fn build<'c>(
        &'a self,
        con: &'c mut ExaConnection,
    ) -> Result<(JobFuture<'c>, Vec<ExaExport>), SqlxError>
    where
        'c: 'a,
    {
        build_etl(self, con).await
    }

    /// Sets the number of reader jobs that will be started.
    /// If set to `0`, then as many as possible will be used (one per node).
    /// Providing a number bigger than the number of nodes is the same as providing `0`.
    pub fn num_readers(&mut self, num_readers: usize) -> &mut Self {
        self.num_readers = num_readers;
        self
    }

    #[cfg(feature = "compression")]
    pub fn compression(&mut self, enabled: bool) -> &mut Self {
        self.compression = Some(enabled);
        self
    }

    pub fn comment(&mut self, comment: &'a str) -> &mut Self {
        self.comment = Some(comment);
        self
    }

    pub fn encoding(&mut self, encoding: &'a str) -> &mut Self {
        self.encoding = Some(encoding);
        self
    }

    pub fn null(&mut self, null: &'a str) -> &mut Self {
        self.null = null;
        self
    }

    pub fn row_separator(&mut self, separator: RowSeparator) -> &mut Self {
        self.row_separator = separator;
        self
    }

    pub fn column_separator(&mut self, separator: &'a str) -> &mut Self {
        self.column_separator = separator;
        self
    }

    pub fn column_delimiter(&mut self, delimiter: &'a str) -> &mut Self {
        self.column_delimiter = delimiter;
        self
    }

    pub fn with_column_names(&mut self, flag: bool) -> &mut Self {
        self.with_column_names = flag;
        self
    }
}

impl<'a> EtlJob for ExportBuilder<'a> {
    const JOB_TYPE: &'static str = "export";

    type Worker = ExaExport;

    fn use_compression(&self) -> Option<bool> {
        self.compression
    }

    fn num_workers(&self) -> usize {
        self.num_readers
    }

    fn create_workers(
        &self,
        socket_futures: Vec<SocketFuture>,
        with_compression: bool,
    ) -> Vec<Self::Worker> {
        socket_futures
            .into_iter()
            .map(|f| ExaExport::Setup(f, with_compression))
            .collect()
    }

    fn query(&self, addrs: Vec<SocketAddrV4>, with_tls: bool, with_compression: bool) -> String {
        let mut query = String::new();

        if let Some(comment) = self.comment {
            Self::push_comment(&mut query, comment);
        }

        query.push_str("EXPORT ");

        match self.source {
            ExportSource::Table(tbl) => {
                Self::push_ident(&mut query, tbl);
            }
            ExportSource::Query(qr) => {
                query.push_str("(\n");
                query.push_str(qr);
                query.push_str("\n)");
            }
        };

        query.push(' ');

        query.push_str(" INTO CSV ");
        Self::append_files(&mut query, addrs, with_tls, with_compression);

        if let Some(enc) = self.encoding {
            Self::push_key_value(&mut query, "ENCODING", enc);
        }

        Self::push_key_value(&mut query, "NULL", self.null);
        Self::push_key_value(&mut query, "ROW SEPARATOR", self.row_separator.as_ref());
        Self::push_key_value(&mut query, "COLUMN SEPARATOR", self.column_separator);
        Self::push_key_value(&mut query, "COLUMN DELIMITER", self.column_delimiter);

        if self.with_column_names {
            query.push_str(" WITH COLUMN NAMES");
        }

        query
    }
}