hdbconnect_arrow/lib.rs
1//! Apache Arrow integration for hdbconnect SAP HANA driver.
2//!
3//! This crate provides zero-copy conversion from HANA `ResultSets` to Apache Arrow
4//! `RecordBatches`, enabling efficient data transfer to Python via `PyO3`.
5//!
6//! # Features
7//!
8//! - Type-safe HANA to Arrow type mapping
9//! - Streaming `RecordBatch` iteration for large result sets
10//! - Sealed traits for API stability
11//! - Generic Associated Types (GATs) for lending iterators
12//!
13//! # Example
14//!
15//! ```rust,ignore
16//! use hdbconnect_arrow::{Result, BatchConfig, HanaBatchProcessor};
17//!
18//! // Configure batch processing
19//! let config = BatchConfig::default();
20//! let schema = /* Arrow schema */;
21//! let mut processor = HanaBatchProcessor::new(schema, config);
22//!
23//! // Process rows
24//! for row in result_set {
25//! if let Some(batch) = processor.process_row(&row)? {
26//! // Handle batch
27//! }
28//! }
29//!
30//! // Flush remaining rows
31//! if let Some(batch) = processor.flush()? {
32//! // Handle final batch
33//! }
34//! ```
35#![warn(missing_docs)]
36#![warn(clippy::all)]
37#![warn(clippy::pedantic)]
38
39pub mod builders;
40pub mod conversion;
41pub mod error;
42pub mod schema;
43pub mod traits;
44pub mod types;
45
46// Re-export main types for convenience
47pub use builders::factory::BuilderFactory;
48pub use conversion::{HanaBatchProcessor, rows_to_record_batch};
49pub use error::{ArrowConversionError, Result};
50pub use schema::mapping::SchemaMapper;
51pub use traits::builder::HanaCompatibleBuilder;
52pub use traits::sealed::FromHanaValue;
53pub use traits::streaming::{BatchConfig, BatchProcessor, LendingBatchIterator};
54#[cfg(feature = "async")]
55pub use types::arrow::FieldMetadataExtAsync;
56pub use types::arrow::{FieldMetadataExt, hana_field_to_arrow, hana_type_to_arrow};
57pub use types::hana::{
58 Binary, Decimal, DecimalPrecision, DecimalScale, HanaTypeCategory, Lob, Numeric, Spatial,
59 StringType, Temporal, TypedColumn,
60};