use std::{convert, sync::Arc};
use super::{Error, Result, Statement};
use crate::types::{self, EnumType, FromSql, FromSqlError, ListType, ValueRef};
use arrow::{
array::{
self, Array, ArrayRef, DictionaryArray, FixedSizeBinaryArray, FixedSizeListArray, ListArray, MapArray,
StructArray,
},
datatypes::*,
};
use fallible_iterator::FallibleIterator;
use fallible_streaming_iterator::FallibleStreamingIterator;
use rust_decimal::prelude::*;
#[must_use = "Rows is lazy and will do nothing unless consumed"]
pub struct Rows<'stmt> {
pub(crate) stmt: Option<&'stmt Statement<'stmt>>,
arr: Arc<Option<StructArray>>,
row: Option<Row<'stmt>>,
current_row: usize,
current_batch_row: usize,
}
impl<'stmt> Rows<'stmt> {
#[inline]
fn reset(&mut self) {
self.current_row = 0;
self.current_batch_row = 0;
self.arr = Arc::new(None);
}
#[allow(clippy::should_implement_trait)] #[inline]
pub fn next(&mut self) -> Result<Option<&Row<'stmt>>> {
self.advance()?;
Ok((*self).get())
}
#[inline]
fn batch_row_count(&self) -> usize {
if self.arr.is_none() {
return 0;
}
self.arr.as_ref().as_ref().unwrap().len()
}
#[inline]
pub fn map<F, B>(self, f: F) -> Map<'stmt, F>
where
F: FnMut(&Row<'_>) -> Result<B>,
{
Map { rows: self, f }
}
#[inline]
pub fn mapped<F, B>(self, f: F) -> MappedRows<'stmt, F>
where
F: FnMut(&Row<'_>) -> Result<B>,
{
MappedRows { rows: self, map: f }
}
#[inline]
pub fn and_then<F, T, E>(self, f: F) -> AndThenRows<'stmt, F>
where
F: FnMut(&Row<'_>) -> Result<T, E>,
{
AndThenRows { rows: self, map: f }
}
pub fn as_ref(&self) -> Option<&Statement<'stmt>> {
self.stmt
}
}
impl<'stmt> Rows<'stmt> {
#[inline]
pub(crate) fn new(stmt: &'stmt Statement<'stmt>) -> Self {
Rows {
stmt: Some(stmt),
arr: Arc::new(None),
row: None,
current_row: 0,
current_batch_row: 0,
}
}
#[inline]
pub(crate) fn get_expected_row(&mut self) -> Result<&Row<'stmt>> {
match self.next()? {
Some(row) => Ok(row),
None => Err(Error::QueryReturnedNoRows),
}
}
}
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct Map<'stmt, F> {
rows: Rows<'stmt>,
f: F,
}
impl<F, B> FallibleIterator for Map<'_, F>
where
F: FnMut(&Row<'_>) -> Result<B>,
{
type Error = Error;
type Item = B;
#[inline]
fn next(&mut self) -> Result<Option<B>> {
match self.rows.next()? {
Some(v) => Ok(Some((self.f)(v)?)),
None => Ok(None),
}
}
}
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct MappedRows<'stmt, F> {
rows: Rows<'stmt>,
map: F,
}
impl<T, F> Iterator for MappedRows<'_, F>
where
F: FnMut(&Row<'_>) -> Result<T>,
{
type Item = Result<T>;
#[inline]
fn next(&mut self) -> Option<Result<T>> {
let map = &mut self.map;
self.rows.next().transpose().map(|row_result| row_result.and_then(map))
}
}
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct AndThenRows<'stmt, F> {
rows: Rows<'stmt>,
map: F,
}
impl<T, E, F> Iterator for AndThenRows<'_, F>
where
E: convert::From<Error>,
F: FnMut(&Row<'_>) -> Result<T, E>,
{
type Item = Result<T, E>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let map = &mut self.map;
self.rows
.next()
.transpose()
.map(|row_result| row_result.map_err(E::from).and_then(map))
}
}
impl<'stmt> FallibleStreamingIterator for Rows<'stmt> {
type Error = Error;
type Item = Row<'stmt>;
#[inline]
fn advance(&mut self) -> Result<()> {
match self.stmt {
Some(stmt) => {
if self.current_row < stmt.row_count() {
if self.current_batch_row >= self.batch_row_count() {
self.arr = Arc::new(stmt.step());
if self.arr.is_none() {
self.row = None;
return Ok(());
}
self.current_batch_row = 0;
}
self.row = Some(Row {
stmt,
arr: self.arr.clone(),
current_row: self.current_batch_row,
});
self.current_row += 1;
self.current_batch_row += 1;
Ok(())
} else {
self.reset();
self.row = None;
Ok(())
}
}
None => {
self.row = None;
Ok(())
}
}
}
#[inline]
fn get(&self) -> Option<&Row<'stmt>> {
self.row.as_ref()
}
}
pub struct Row<'stmt> {
pub(crate) stmt: &'stmt Statement<'stmt>,
arr: Arc<Option<StructArray>>,
current_row: usize,
}
#[allow(clippy::needless_lifetimes)]
impl<'stmt> Row<'stmt> {
pub fn get_unwrap<I: RowIndex, T: FromSql>(&self, idx: I) -> T {
self.get(idx).unwrap()
}
pub fn get<I: RowIndex, T: FromSql>(&self, idx: I) -> Result<T> {
let idx = idx.idx(self.stmt)?;
let value = self.value_ref(self.current_row, idx);
FromSql::column_result(value).map_err(|err| match err {
FromSqlError::InvalidType => {
Error::InvalidColumnType(idx, self.stmt.column_name_unwrap(idx).into(), value.data_type())
}
FromSqlError::OutOfRange(i) => Error::IntegralValueOutOfRange(idx, i),
FromSqlError::Other(err) => Error::FromSqlConversionFailure(idx, value.data_type(), err),
#[cfg(feature = "uuid")]
FromSqlError::InvalidUuidSize(_) => {
Error::InvalidColumnType(idx, self.stmt.column_name_unwrap(idx).into(), value.data_type())
}
})
}
pub fn get_ref<I: RowIndex>(&self, idx: I) -> Result<ValueRef<'_>> {
let idx = idx.idx(self.stmt)?;
let val_ref = self.value_ref(self.current_row, idx);
Ok(val_ref)
}
fn value_ref(&self, row: usize, col: usize) -> ValueRef<'_> {
let column = self.arr.as_ref().as_ref().unwrap().column(col);
Self::value_ref_internal(row, col, column)
}
pub(crate) fn value_ref_internal(row: usize, col: usize, column: &ArrayRef) -> ValueRef<'_> {
if column.is_null(row) {
return ValueRef::Null;
}
match column.data_type() {
DataType::Utf8 => {
let array = column.as_any().downcast_ref::<array::StringArray>().unwrap();
ValueRef::from(array.value(row))
}
DataType::LargeUtf8 => {
let array = column.as_any().downcast_ref::<array::LargeStringArray>().unwrap();
ValueRef::from(array.value(row))
}
DataType::Binary => {
let array = column.as_any().downcast_ref::<array::BinaryArray>().unwrap();
ValueRef::Blob(array.value(row))
}
DataType::LargeBinary => {
let array = column.as_any().downcast_ref::<array::LargeBinaryArray>().unwrap();
ValueRef::Blob(array.value(row))
}
DataType::FixedSizeBinary(_) => {
let array = column.as_any().downcast_ref::<FixedSizeBinaryArray>().unwrap();
ValueRef::Blob(array.value(row))
}
DataType::Boolean => {
let array = column.as_any().downcast_ref::<array::BooleanArray>().unwrap();
ValueRef::Boolean(array.value(row))
}
DataType::Int8 => {
let array = column.as_any().downcast_ref::<array::Int8Array>().unwrap();
ValueRef::TinyInt(array.value(row))
}
DataType::Int16 => {
let array = column.as_any().downcast_ref::<array::Int16Array>().unwrap();
ValueRef::SmallInt(array.value(row))
}
DataType::Int32 => {
let array = column.as_any().downcast_ref::<array::Int32Array>().unwrap();
ValueRef::Int(array.value(row))
}
DataType::Int64 => {
let array = column.as_any().downcast_ref::<array::Int64Array>().unwrap();
ValueRef::BigInt(array.value(row))
}
DataType::UInt8 => {
let array = column.as_any().downcast_ref::<array::UInt8Array>().unwrap();
ValueRef::UTinyInt(array.value(row))
}
DataType::UInt16 => {
let array = column.as_any().downcast_ref::<array::UInt16Array>().unwrap();
ValueRef::USmallInt(array.value(row))
}
DataType::UInt32 => {
let array = column.as_any().downcast_ref::<array::UInt32Array>().unwrap();
ValueRef::UInt(array.value(row))
}
DataType::UInt64 => {
let array = column.as_any().downcast_ref::<array::UInt64Array>().unwrap();
ValueRef::UBigInt(array.value(row))
}
DataType::Float16 => {
let array = column.as_any().downcast_ref::<array::Float32Array>().unwrap();
ValueRef::Float(array.value(row))
}
DataType::Float32 => {
let array = column.as_any().downcast_ref::<array::Float32Array>().unwrap();
ValueRef::Float(array.value(row))
}
DataType::Float64 => {
let array = column.as_any().downcast_ref::<array::Float64Array>().unwrap();
ValueRef::Double(array.value(row))
}
DataType::Decimal128(..) => {
let array = column.as_any().downcast_ref::<array::Decimal128Array>().unwrap();
if array.scale() == 0 {
return ValueRef::HugeInt(array.value(row));
}
ValueRef::Decimal(Decimal::from_i128_with_scale(array.value(row), array.scale() as u32))
}
DataType::Timestamp(unit, _) if *unit == TimeUnit::Second => {
let array = column.as_any().downcast_ref::<array::TimestampSecondArray>().unwrap();
ValueRef::Timestamp(types::TimeUnit::Second, array.value(row))
}
DataType::Timestamp(unit, _) if *unit == TimeUnit::Millisecond => {
let array = column
.as_any()
.downcast_ref::<array::TimestampMillisecondArray>()
.unwrap();
ValueRef::Timestamp(types::TimeUnit::Millisecond, array.value(row))
}
DataType::Timestamp(unit, _) if *unit == TimeUnit::Microsecond => {
let array = column
.as_any()
.downcast_ref::<array::TimestampMicrosecondArray>()
.unwrap();
ValueRef::Timestamp(types::TimeUnit::Microsecond, array.value(row))
}
DataType::Timestamp(unit, _) if *unit == TimeUnit::Nanosecond => {
let array = column
.as_any()
.downcast_ref::<array::TimestampNanosecondArray>()
.unwrap();
ValueRef::Timestamp(types::TimeUnit::Nanosecond, array.value(row))
}
DataType::Date32 => {
let array = column.as_any().downcast_ref::<array::Date32Array>().unwrap();
ValueRef::Date32(array.value(row))
}
DataType::Time64(TimeUnit::Microsecond) => {
let array = column.as_any().downcast_ref::<array::Time64MicrosecondArray>().unwrap();
ValueRef::Time64(types::TimeUnit::Microsecond, array.value(row))
}
DataType::Interval(unit) => match unit {
IntervalUnit::MonthDayNano => {
let array = column
.as_any()
.downcast_ref::<array::IntervalMonthDayNanoArray>()
.unwrap();
let value = array.value(row);
ValueRef::Interval {
months: value.months,
days: value.days,
nanos: value.nanoseconds,
}
}
_ => unimplemented!("{:?}", unit),
},
DataType::LargeList(..) => {
let arr = column.as_any().downcast_ref::<array::LargeListArray>().unwrap();
ValueRef::List(ListType::Large(arr), row)
}
DataType::List(..) => {
let arr = column.as_any().downcast_ref::<ListArray>().unwrap();
ValueRef::List(ListType::Regular(arr), row)
}
DataType::Dictionary(key_type, ..) => {
let column = column.as_any();
ValueRef::Enum(
match key_type.as_ref() {
DataType::UInt8 => {
EnumType::UInt8(column.downcast_ref::<DictionaryArray<UInt8Type>>().unwrap())
}
DataType::UInt16 => {
EnumType::UInt16(column.downcast_ref::<DictionaryArray<UInt16Type>>().unwrap())
}
DataType::UInt32 => {
EnumType::UInt32(column.downcast_ref::<DictionaryArray<UInt32Type>>().unwrap())
}
typ => panic!("Unsupported key type: {typ:?}"),
},
row,
)
}
DataType::Struct(_) => {
let res = column.as_any().downcast_ref::<StructArray>().unwrap();
ValueRef::Struct(res, row)
}
DataType::Map(..) => {
let arr = column.as_any().downcast_ref::<MapArray>().unwrap();
ValueRef::Map(arr, row)
}
DataType::FixedSizeList(..) => {
let arr = column.as_any().downcast_ref::<FixedSizeListArray>().unwrap();
ValueRef::Array(arr, row)
}
DataType::Union(..) => ValueRef::Union(column, row),
_ => unreachable!("invalid value: {}, {}", col, column.data_type()),
}
}
pub fn get_ref_unwrap<I: RowIndex>(&self, idx: I) -> ValueRef<'_> {
self.get_ref(idx).unwrap()
}
}
impl<'stmt> AsRef<Statement<'stmt>> for Row<'stmt> {
fn as_ref(&self) -> &Statement<'stmt> {
self.stmt
}
}
mod sealed {
pub trait Sealed {}
impl Sealed for usize {}
impl Sealed for &str {}
}
pub trait RowIndex: sealed::Sealed {
fn idx(&self, stmt: &Statement<'_>) -> Result<usize>;
}
impl RowIndex for usize {
#[inline]
fn idx(&self, stmt: &Statement<'_>) -> Result<usize> {
if *self >= stmt.column_count() {
Err(Error::InvalidColumnIndex(*self))
} else {
Ok(*self)
}
}
}
impl RowIndex for &'_ str {
#[inline]
fn idx(&self, stmt: &Statement<'_>) -> Result<usize> {
stmt.column_index(self)
}
}
macro_rules! tuple_try_from_row {
($($field:ident),*) => {
impl<'a, $($field,)*> convert::TryFrom<&'a Row<'a>> for ($($field,)*) where $($field: FromSql,)* {
type Error = crate::Error;
#[allow(unused_assignments, unused_variables, unused_mut)]
fn try_from(row: &'a Row<'a>) -> Result<Self> {
let mut index = 0;
$(
#[allow(non_snake_case)]
let $field = row.get::<_, $field>(index)?;
index += 1;
)*
Ok(($($field,)*))
}
}
}
}
macro_rules! tuples_try_from_row {
() => {
tuple_try_from_row!();
};
($first:ident $(, $remaining:ident)*) => {
tuple_try_from_row!($first $(, $remaining)*);
tuples_try_from_row!($($remaining),*);
};
}
tuples_try_from_row!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
#[cfg(test)]
mod tests {
#![allow(clippy::redundant_closure)] use crate::{Connection, Result};
#[test]
fn test_try_from_row_for_tuple_1() -> Result<()> {
use crate::ToSql;
use std::convert::TryFrom;
let conn = Connection::open_in_memory()?;
conn.execute(
"CREATE TABLE test (a INTEGER)",
crate::params_from_iter(std::iter::empty::<&dyn ToSql>()),
)?;
conn.execute("INSERT INTO test VALUES (42)", [])?;
let val = conn.query_row("SELECT a FROM test", [], |row| <(u32,)>::try_from(row))?;
assert_eq!(val, (42,));
let fail = conn.query_row("SELECT a FROM test", [], |row| <(u32, u32)>::try_from(row));
assert!(fail.is_err());
Ok(())
}
#[test]
fn test_try_from_row_for_tuple_2() -> Result<()> {
use std::convert::TryFrom;
let conn = Connection::open_in_memory()?;
conn.execute("CREATE TABLE test (a INTEGER, b INTEGER)", [])?;
conn.execute("INSERT INTO test VALUES (42, 47)", [])?;
let val = conn.query_row("SELECT a, b FROM test", [], |row| <(u32, u32)>::try_from(row))?;
assert_eq!(val, (42, 47));
let fail = conn.query_row("SELECT a, b FROM test", [], |row| <(u32, u32, u32)>::try_from(row));
assert!(fail.is_err());
Ok(())
}
#[test]
fn test_try_from_row_for_tuple_16() -> Result<()> {
use std::convert::TryFrom;
let create_table = "CREATE TABLE test (
a INTEGER,
b INTEGER,
c INTEGER,
d INTEGER,
e INTEGER,
f INTEGER,
g INTEGER,
h INTEGER,
i INTEGER,
j INTEGER,
k INTEGER,
l INTEGER,
m INTEGER,
n INTEGER,
o INTEGER,
p INTEGER
)";
let insert_values = "INSERT INTO test VALUES (
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15
)";
type BigTuple = (
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
u32,
);
let conn = Connection::open_in_memory()?;
conn.execute(create_table, [])?;
conn.execute(insert_values, [])?;
let val = conn.query_row("SELECT * FROM test", [], |row| BigTuple::try_from(row))?;
assert_eq!(val.0, 0);
assert_eq!(val.1, 1);
assert_eq!(val.2, 2);
assert_eq!(val.3, 3);
assert_eq!(val.4, 4);
assert_eq!(val.5, 5);
assert_eq!(val.6, 6);
assert_eq!(val.7, 7);
assert_eq!(val.8, 8);
assert_eq!(val.9, 9);
assert_eq!(val.10, 10);
assert_eq!(val.11, 11);
assert_eq!(val.12, 12);
assert_eq!(val.13, 13);
assert_eq!(val.14, 14);
assert_eq!(val.15, 15);
Ok(())
}
#[test]
#[cfg(feature = "vtab-arrow")]
fn test_fixed_size_binary_via_arrow() -> Result<()> {
use crate::vtab::arrow::{ArrowVTab, arrow_recordbatch_to_query_params};
use arrow::array::{Array, ArrayRef, BinaryArray, FixedSizeBinaryArray};
use arrow::datatypes::{DataType, Field, Schema};
use arrow::record_batch::RecordBatch;
use std::sync::Arc;
let conn = Connection::open_in_memory()?;
conn.register_table_function::<ArrowVTab>("arrow")?;
let values = vec![
vec![1u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
vec![16u8, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1],
vec![0u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255],
];
let byte_array = FixedSizeBinaryArray::try_from_iter(values.into_iter()).unwrap();
let arc: ArrayRef = Arc::new(byte_array);
let schema = Schema::new(vec![Field::new("data", DataType::FixedSizeBinary(16), false)]);
let batch = RecordBatch::try_new(Arc::new(schema), vec![arc]).unwrap();
let mut stmt = conn.prepare("SELECT data FROM arrow(?, ?)")?;
let mut arr = stmt.query_arrow(arrow_recordbatch_to_query_params(batch))?;
let rb = arr.next().expect("no record batch");
let column = rb.column(0).as_any().downcast_ref::<BinaryArray>().unwrap();
assert_eq!(column.len(), 3);
assert_eq!(
column.value(0),
&[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
);
assert_eq!(
column.value(1),
&[16u8, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
);
assert_eq!(column.value(2), &[0u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255]);
Ok(())
}
#[test]
#[cfg(feature = "vtab-arrow")]
fn test_fixed_size_binary_with_nulls_via_arrow() -> Result<()> {
use crate::vtab::arrow::{ArrowVTab, arrow_recordbatch_to_query_params};
use arrow::array::{Array, ArrayRef, BinaryArray, FixedSizeBinaryArray};
use arrow::datatypes::{DataType, Field, Schema};
use arrow::record_batch::RecordBatch;
use std::sync::Arc;
let conn = Connection::open_in_memory()?;
conn.register_table_function::<ArrowVTab>("arrow")?;
let values = vec![
Some(vec![1u8, 2, 3, 4, 5, 6, 7, 8]),
None,
Some(vec![9u8, 10, 11, 12, 13, 14, 15, 16]),
];
let byte_array = FixedSizeBinaryArray::try_from_sparse_iter_with_size(values.into_iter(), 8).unwrap();
let arc: ArrayRef = Arc::new(byte_array);
let schema = Schema::new(vec![Field::new("data", DataType::FixedSizeBinary(8), true)]);
let batch = RecordBatch::try_new(Arc::new(schema), vec![arc]).unwrap();
let mut stmt = conn.prepare("SELECT data FROM arrow(?, ?)")?;
let mut arr = stmt.query_arrow(arrow_recordbatch_to_query_params(batch))?;
let rb = arr.next().expect("no record batch");
let column = rb.column(0).as_any().downcast_ref::<BinaryArray>().unwrap();
assert_eq!(column.len(), 3);
assert!(column.is_valid(0));
assert!(column.is_valid(2));
assert_eq!(column.value(0), &[1u8, 2, 3, 4, 5, 6, 7, 8]);
assert_eq!(column.value(1), &[0u8, 0, 0, 0, 0, 0, 0, 0]);
assert_eq!(column.value(2), &[9u8, 10, 11, 12, 13, 14, 15, 16]);
Ok(())
}
#[test]
#[cfg(feature = "vtab-arrow")]
fn test_fixed_size_binary_different_sizes_via_arrow() -> Result<()> {
use crate::vtab::arrow::{ArrowVTab, arrow_recordbatch_to_query_params};
use arrow::array::{ArrayRef, FixedSizeBinaryArray};
use arrow::datatypes::{DataType, Field, Schema};
use arrow::record_batch::RecordBatch;
use std::sync::Arc;
let conn = Connection::open_in_memory()?;
conn.register_table_function::<ArrowVTab>("arrow")?;
let values = vec![vec![1u8, 2, 3, 4], vec![5u8, 6, 7, 8]];
let byte_array = FixedSizeBinaryArray::try_from_iter(values.into_iter()).unwrap();
let arc: ArrayRef = Arc::new(byte_array);
let schema = Schema::new(vec![Field::new("data", DataType::FixedSizeBinary(4), false)]);
let batch = RecordBatch::try_new(Arc::new(schema), vec![arc]).unwrap();
let mut stmt = conn.prepare("SELECT data FROM arrow(?, ?)")?;
let mut rows = stmt.query(arrow_recordbatch_to_query_params(batch))?;
let row = rows.next()?.unwrap();
let bytes: Vec<u8> = row.get(0)?;
assert_eq!(bytes, vec![1u8, 2, 3, 4]);
let row = rows.next()?.unwrap();
let bytes: Vec<u8> = row.get(0)?;
assert_eq!(bytes, vec![5u8, 6, 7, 8]);
Ok(())
}
#[test]
#[cfg(feature = "vtab-arrow")]
fn test_fixed_size_binary_value_ref_via_arrow() -> Result<()> {
use crate::types::ValueRef;
use crate::vtab::arrow::{ArrowVTab, arrow_recordbatch_to_query_params};
use arrow::array::{ArrayRef, FixedSizeBinaryArray};
use arrow::datatypes::{DataType, Field, Schema};
use arrow::record_batch::RecordBatch;
use std::sync::Arc;
let conn = Connection::open_in_memory()?;
conn.register_table_function::<ArrowVTab>("arrow")?;
let values = vec![Some(vec![1u8, 2, 3, 4]), None];
let byte_array = FixedSizeBinaryArray::try_from_sparse_iter_with_size(values.into_iter(), 4).unwrap();
let arc: ArrayRef = Arc::new(byte_array);
let schema = Schema::new(vec![Field::new("data", DataType::FixedSizeBinary(4), true)]);
let batch = RecordBatch::try_new(Arc::new(schema), vec![arc]).unwrap();
let mut stmt = conn.prepare("SELECT data FROM arrow(?, ?)")?;
let mut rows = stmt.query(arrow_recordbatch_to_query_params(batch))?;
let row = rows.next()?.unwrap();
let value_ref = row.get_ref(0)?;
match value_ref {
ValueRef::Blob(bytes) => {
assert_eq!(bytes, &[1u8, 2, 3, 4]);
}
_ => panic!("Expected Blob ValueRef, got {:?}", value_ref),
}
let row = rows.next()?.unwrap();
let value_ref = row.get_ref(0)?;
match value_ref {
ValueRef::Blob(bytes) => {
assert_eq!(bytes, &[0u8, 0, 0, 0]);
}
_ => panic!("Expected Blob ValueRef with zero bytes, got {:?}", value_ref),
}
Ok(())
}
#[cfg(feature = "uuid")]
#[test]
fn test_fixed_size_binary_uuid() -> Result<()> {
use uuid::Uuid;
let conn = Connection::open_in_memory()?;
conn.execute_batch("CREATE TABLE test (id UUID)")?;
let uuid_str = "550e8400-e29b-41d4-a716-446655440000";
conn.execute("INSERT INTO test VALUES (?)", [uuid_str])?;
let uuid: Uuid = conn.query_row("SELECT id FROM test", [], |r| r.get(0))?;
assert_eq!(uuid.to_string(), uuid_str);
Ok(())
}
#[cfg(feature = "uuid")]
#[test]
fn test_fixed_size_binary_uuid_roundtrip() -> Result<()> {
use uuid::Uuid;
let conn = Connection::open_in_memory()?;
conn.execute_batch("CREATE TABLE test (id UUID)")?;
let original_uuid = Uuid::new_v4();
conn.execute("INSERT INTO test VALUES (?)", [original_uuid])?;
let retrieved_uuid: Uuid = conn.query_row("SELECT id FROM test", [], |r| r.get(0))?;
assert_eq!(original_uuid, retrieved_uuid);
Ok(())
}
}