1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
use crate::{
ColumnarBulkInserter, CursorImpl, Error, InputParameterMapping, ParameterCollectionRef,
ResultSetMetadata,
buffers::{AnyBuffer, BufferDesc, ColumnBuffer, TextColumn},
columnar_bulk_inserter::InOrder,
execute::execute_with_parameters,
handles::{AsStatementRef, HasDataType, ParameterDescription, Statement, StatementRef},
};
/// A prepared query. Prepared queries are useful if the similar queries should executed more than
/// once. See [`crate::Connection::prepare`].
pub struct Prepared<S> {
statement: S,
}
impl<S> Prepared<S> {
pub(crate) fn new(statement: S) -> Self {
Self { statement }
}
/// Transfer ownership to the underlying statement handle.
///
/// The resulting type is one level of indirection away from the raw pointer of the ODBC API. It
/// no longer has any guarantees about bound buffers, but is still guaranteed to be a valid
/// allocated statement handle. This serves together with
/// [`crate::handles::StatementImpl::into_sys`] or [`crate::handles::Statement::as_sys`] this
/// serves as an escape hatch to access the functionality provided by `crate::sys` not yet
/// accessible through safe abstractions.
pub fn into_handle(self) -> S {
self.statement
}
}
impl<S> Prepared<S>
where
S: AsStatementRef,
{
/// Execute the prepared statement.
///
/// * `params`: Used to bind these parameters before executing the statement. You can use `()`
/// to represent no parameters. In regards to binding arrays of parameters: Should `params`
/// specify a parameter set size of `0`, nothing is executed, and `Ok(None)` is returned. See
/// the [`crate::parameter`] module level documentation for more information on how to pass
/// parameters.
pub fn execute(
&mut self,
params: impl ParameterCollectionRef,
) -> Result<Option<CursorImpl<StatementRef<'_>>>, Error> {
let stmt = self.statement.as_stmt_ref();
execute_with_parameters(stmt, None, params)
}
/// Describes parameter marker associated with a prepared SQL statement.
///
/// # Parameters
///
/// * `parameter_number`: Parameter marker number ordered sequentially in increasing parameter
/// order, starting at 1.
pub fn describe_param(&mut self, parameter_number: u16) -> Result<ParameterDescription, Error> {
let mut stmt = self.as_stmt_ref();
stmt.describe_param(parameter_number).into_result(&stmt)
}
/// Number of placeholders which must be provided with [`Self::execute`] in order to execute
/// this statement. This is equivalent to the number of placeholders used in the SQL string
/// used to prepare the statement.
pub fn num_params(&mut self) -> Result<u16, Error> {
let mut stmt = self.as_stmt_ref();
stmt.num_params().into_result(&stmt)
}
/// Number of placeholders which must be provided with [`Self::execute`] in order to execute
/// this statement. This is equivalent to the number of placeholders used in the SQL string
/// used to prepare the statement.
///
/// ```
/// use odbc_api::{Connection, Error, handles::ParameterDescription};
///
/// fn collect_parameter_descriptions(
/// connection: Connection<'_>
/// ) -> Result<Vec<ParameterDescription>, Error>{
/// // Note the two `?` used as placeholders for the parameters.
/// let sql = "INSERT INTO NationalDrink (country, drink) VALUES (?, ?)";
/// let mut prepared = connection.prepare(sql)?;
///
/// let params: Vec<_> = prepared.parameter_descriptions()?.collect::<Result<_,_>>()?;
///
/// Ok(params)
/// }
/// ```
pub fn parameter_descriptions(
&mut self,
) -> Result<
impl DoubleEndedIterator<Item = Result<ParameterDescription, Error>>
+ ExactSizeIterator<Item = Result<ParameterDescription, Error>>
+ '_,
Error,
> {
Ok((1..=self.num_params()?).map(|index| self.describe_param(index)))
}
/// Unless you want to roll your own column buffer implementation users are encouraged to use
/// [`Self::into_text_inserter`] or [`Self::into_column_inserter`] instead.
///
/// # Safety
///
/// * Parameters must all be valid for insertion. An example for an invalid parameter would be a
/// text buffer with a cell those indiactor value exceeds the maximum element length. This can
/// happen after when truncation occurs then writing into a buffer.
pub unsafe fn unchecked_bind_columnar_array_parameters<C>(
self,
parameter_buffers: Vec<C>,
index_mapping: impl InputParameterMapping,
) -> Result<ColumnarBulkInserter<S, C>, Error>
where
C: ColumnBuffer + HasDataType + Send,
{
// We know that statement is a prepared statement.
unsafe { ColumnarBulkInserter::new(self.into_handle(), parameter_buffers, index_mapping) }
}
/// Use this to insert rows of string input into the database.
///
/// ```
/// use odbc_api::{Connection, Error};
///
/// fn insert_text<'e>(connection: Connection<'e>) -> Result<(), Error>{
/// // Insert six rows of text with two columns each into the database in batches of 3. In a
/// // real use case you are likely to achieve a better results with a higher batch size.
///
/// // Note the two `?` used as placeholders for the parameters.
/// let prepared = connection.prepare("INSERT INTO NationalDrink (country, drink) VALUES (?, ?)")?;
/// // We assume both parameter inputs never exceed 50 bytes.
/// let mut prebound = prepared.into_text_inserter(3, [50, 50])?;
///
/// // A cell is an option to byte. We could use `None` to represent NULL but we have no
/// // need to do that in this example.
/// let as_cell = |s: &'static str| { Some(s.as_bytes()) } ;
///
/// // First batch of values
/// prebound.append(["England", "Tea"].into_iter().map(as_cell))?;
/// prebound.append(["Germany", "Beer"].into_iter().map(as_cell))?;
/// prebound.append(["Russia", "Vodka"].into_iter().map(as_cell))?;
///
/// // Execute statement using values bound in buffer.
/// prebound.execute()?;
/// // Clear buffer contents, otherwise the previous values would stay in the buffer.
/// prebound.clear();
///
/// // Second batch of values
/// prebound.append(["India", "Tea"].into_iter().map(as_cell))?;
/// prebound.append(["France", "Wine"].into_iter().map(as_cell))?;
/// prebound.append(["USA", "Cola"].into_iter().map(as_cell))?;
///
/// // Send second batch to the database
/// prebound.execute()?;
///
/// Ok(())
/// }
/// ```
pub fn into_text_inserter(
self,
capacity: usize,
max_str_len: impl IntoIterator<Item = usize>,
) -> Result<ColumnarBulkInserter<S, TextColumn<u8>>, Error> {
let max_str_len = max_str_len.into_iter();
let parameter_buffers: Vec<_> = max_str_len
.map(|max_str_len| TextColumn::new(capacity, max_str_len))
.collect();
let index_mapping = InOrder::new(parameter_buffers.len());
// Text Columns are created with NULL as default, which is valid for insertion.
unsafe { self.unchecked_bind_columnar_array_parameters(parameter_buffers, index_mapping) }
}
/// A [`crate::ColumnarBulkInserter`] which takes ownership of both the statement and the bound
/// array parameter buffers.
///
/// ```no_run
/// use odbc_api::{Connection, Error, IntoParameter, buffers::BufferDesc};
///
/// fn insert_birth_years(
/// conn: &Connection,
/// names: &[&str],
/// years: &[i16]
/// ) -> Result<(), Error> {
/// // All columns must have equal length.
/// assert_eq!(names.len(), years.len());
///
/// let prepared = conn.prepare("INSERT INTO Birthdays (name, year) VALUES (?, ?)")?;
///
/// // Create a columnar buffer which fits the input parameters.
/// let buffer_description = [
/// BufferDesc::Text { max_str_len: 255 },
/// BufferDesc::I16 { nullable: false },
/// ];
/// // The capacity must be able to hold at least the largest batch. We do everything in one
/// // go, so we set it to the length of the input parameters.
/// let capacity = names.len();
/// // Allocate memory for the array column parameters and bind it to the statement.
/// let mut prebound = prepared.into_column_inserter(capacity, buffer_description)?;
/// // Length of this batch
/// prebound.set_num_rows(capacity);
///
///
/// // Fill the buffer with values column by column
/// let mut col = prebound
/// .column_mut(0)
/// .as_text_view()
/// .expect("We know the name column to hold text.");
///
/// for (index, name) in names.iter().enumerate() {
/// col.set_cell(index, Some(name.as_bytes()));
/// }
///
/// let col = prebound
/// .column_mut(1)
/// .as_slice::<i16>()
/// .expect("We know the year column to hold i16.");
/// col.copy_from_slice(years);
///
/// prebound.execute()?;
/// Ok(())
/// }
/// ```
pub fn into_column_inserter(
self,
capacity: usize,
descriptions: impl IntoIterator<Item = BufferDesc>,
) -> Result<ColumnarBulkInserter<S, AnyBuffer>, Error> {
let parameter_buffers: Vec<_> = descriptions
.into_iter()
.map(|desc| AnyBuffer::from_desc(capacity, desc))
.collect();
let index_mapping = InOrder::new(parameter_buffers.len());
// Safe: We know this to be a valid prepared statement. Also we just created the buffers
// to be bound and know them to be empty. => Therfore they are valid and do not contain any
// indicator values which would could trigger out of bounds in the database drivers.
unsafe { self.unchecked_bind_columnar_array_parameters(parameter_buffers, index_mapping) }
}
/// Similar to [`Self::into_column_inserter`], but allows to specify a custom mapping between
/// columns and parameters. This is useful if e.g. the same values a bound to multiple
/// parameter placeholders.
pub fn into_column_inserter_with_mapping(
self,
capacity: usize,
descriptions: impl IntoIterator<Item = BufferDesc>,
index_mapping: impl InputParameterMapping,
) -> Result<ColumnarBulkInserter<S, AnyBuffer>, Error> {
let parameter_buffers: Vec<_> = descriptions
.into_iter()
.map(|desc| AnyBuffer::from_desc(capacity, desc))
.collect();
// Safe: We know this to be a valid prepared statement. Also we just created the buffers
// to be bound and know them to be empty. => Therfore they are valid and do not contain any
// indicator values which would could trigger out of bounds in the database drivers.
unsafe { self.unchecked_bind_columnar_array_parameters(parameter_buffers, index_mapping) }
}
/// A [`crate::ColumnarBulkInserter`] which has ownership of the bound array parameter buffers
/// and borrows the statement. For most usecases [`Self::into_column_inserter`] is what you
/// want to use, yet on some instances you may want to bind new paramater buffers to the same
/// prepared statement. E.g. to grow the capacity dynamically during insertions with several
/// chunks. In such use cases you may only want to borrow the prepared statemnt, so it can be
/// reused with a different set of parameter buffers.
pub fn column_inserter(
&mut self,
capacity: usize,
descriptions: impl IntoIterator<Item = BufferDesc>,
) -> Result<ColumnarBulkInserter<StatementRef<'_>, AnyBuffer>, Error> {
// Remark: We repeat the implementation here. It is hard to reuse the
// `column_inserter_with_mapping` function, because we need to know the number of parameters
// to create the `InOrder` mapping.
let stmt = self.statement.as_stmt_ref();
let parameter_buffers: Vec<_> = descriptions
.into_iter()
.map(|desc| AnyBuffer::from_desc(capacity, desc))
.collect();
let index_mapping = InOrder::new(parameter_buffers.len());
// Safe: We know that the statement is a prepared statement, and we just created the buffers
// to be bound and know them to be empty. => Therfore they are valid and do not contain any
// indicator values which would could trigger out of bounds in the database drivers.
unsafe { ColumnarBulkInserter::new(stmt, parameter_buffers, index_mapping) }
}
/// Similar to [`Self::column_inserter`], but allows to specify a custom mapping between columns
/// and parameters. This is useful if e.g. the same values a bound to multiple parameter
/// placeholders.
pub fn column_inserter_with_mapping(
&mut self,
capacity: usize,
descriptions: impl IntoIterator<Item = BufferDesc>,
index_mapping: impl InputParameterMapping,
) -> Result<ColumnarBulkInserter<StatementRef<'_>, AnyBuffer>, Error> {
let stmt = self.statement.as_stmt_ref();
let parameter_buffers: Vec<_> = descriptions
.into_iter()
.map(|desc| AnyBuffer::from_desc(capacity, desc))
.collect();
// Safe: We know that the statement is a prepared statement, and we just created the buffers
// to be bound and know them to be empty. => Therfore they are valid and do not contain any
// indicator values which would could trigger out of bounds in the database drivers.
unsafe { ColumnarBulkInserter::new(stmt, parameter_buffers, index_mapping) }
}
/// Number of rows affected by the last `INSERT`, `UPDATE` or `DELETE` statement. May return
/// `None` if row count is not available. Some drivers may also allow to use this to determine
/// how many rows have been fetched using `SELECT`. Most drivers however only know how many rows
/// have been fetched after they have been fetched.
///
/// ```
/// use odbc_api::{Connection, Error, IntoParameter};
///
/// /// Deletes all comments for every user in the slice. Returns the number of deleted
/// /// comments.
/// pub fn delete_all_comments_from(
/// users: &[&str],
/// conn: Connection<'_>,
/// ) -> Result<usize, Error>
/// {
/// // Store prepared query for fast repeated execution.
/// let mut prepared = conn.prepare("DELETE FROM Comments WHERE user=?")?;
/// let mut total_deleted_comments = 0;
/// for user in users {
/// prepared.execute(&user.into_parameter())?;
/// total_deleted_comments += prepared
/// .row_count()?
/// .expect("Row count must always be available for DELETE statements.");
/// }
/// Ok(total_deleted_comments)
/// }
/// ```
pub fn row_count(&mut self) -> Result<Option<usize>, Error> {
let mut stmt = self.statement.as_stmt_ref();
stmt.row_count().into_result(&stmt).map(|count| {
// ODBC returns -1 in case a row count is not available
if count == -1 {
None
} else {
Some(count.try_into().unwrap())
}
})
}
/// Use this to limit the time the query is allowed to take, before responding with data to the
/// application. The driver may replace the number of seconds you provide with a minimum or
/// maximum value. You can specify ``0``, to deactivate the timeout, this is the default. For
/// this to work the driver must support this feature. E.g. PostgreSQL, and Microsoft SQL Server
/// do, but SQLite or MariaDB do not.
///
/// This corresponds to `SQL_ATTR_QUERY_TIMEOUT` in the ODBC C API.
///
/// See:
/// <https://learn.microsoft.com/en-us/sql/odbc/reference/syntax/sqlsetstmtattr-function>
pub fn set_query_timeout_sec(&mut self, timeout_sec: usize) -> Result<(), Error> {
let mut stmt = self.statement.as_stmt_ref();
stmt.set_query_timeout_sec(timeout_sec).into_result(&stmt)
}
/// The number of seconds to wait for a SQL statement to execute before returning to the
/// application. If `timeout_sec` is equal to 0 (default), there is no timeout.
///
/// This corresponds to `SQL_ATTR_QUERY_TIMEOUT` in the ODBC C API.
///
/// See:
/// <https://learn.microsoft.com/en-us/sql/odbc/reference/syntax/sqlsetstmtattr-function>
pub fn query_timeout_sec(&mut self) -> Result<usize, Error> {
let mut stmt = self.statement.as_stmt_ref();
stmt.query_timeout_sec().into_result(&stmt)
}
}
impl<S> ResultSetMetadata for Prepared<S> where S: AsStatementRef {}
impl<S> AsStatementRef for Prepared<S>
where
S: AsStatementRef,
{
fn as_stmt_ref(&mut self) -> StatementRef<'_> {
self.statement.as_stmt_ref()
}
}