lance_datafusion/
utils.rs1use std::borrow::Cow;
5
6use arrow::ffi_stream::ArrowArrayStreamReader;
7use arrow_array::{RecordBatch, RecordBatchIterator, RecordBatchReader};
8use arrow_schema::{ArrowError, SchemaRef};
9use async_trait::async_trait;
10use background_iterator::BackgroundIterator;
11use datafusion::{
12 execution::RecordBatchStream,
13 physical_plan::{
14 metrics::{Count, ExecutionPlanMetricsSet, MetricBuilder, MetricValue, MetricsSet, Time},
15 stream::RecordBatchStreamAdapter,
16 SendableRecordBatchStream,
17 },
18};
19use datafusion_common::DataFusionError;
20use futures::{stream, StreamExt, TryStreamExt};
21use lance_core::datatypes::Schema;
22use lance_core::Result;
23use tokio::task::spawn;
24
25pub mod background_iterator;
26
27#[async_trait]
33pub trait StreamingWriteSource: Send {
34 async fn into_stream_and_schema(self) -> Result<(SendableRecordBatchStream, Schema)>
42 where
43 Self: Sized,
44 {
45 let mut stream = self.into_stream();
46 let (stream, arrow_schema, schema) = spawn(async move {
47 let arrow_schema = stream.schema();
48 let mut schema: Schema = Schema::try_from(arrow_schema.as_ref())?;
49 let first_batch = stream.try_next().await?;
50 if let Some(batch) = &first_batch {
51 schema.set_dictionary(batch)?;
52 }
53 let stream = stream::iter(first_batch.map(Ok)).chain(stream);
54 Result::Ok((stream, arrow_schema, schema))
55 })
56 .await
57 .unwrap()?;
58 schema.validate()?;
59 let adapter = RecordBatchStreamAdapter::new(arrow_schema, stream);
60 Ok((Box::pin(adapter), schema))
61 }
62
63 fn arrow_schema(&self) -> SchemaRef;
65
66 fn into_stream(self) -> SendableRecordBatchStream;
70}
71
72impl StreamingWriteSource for ArrowArrayStreamReader {
73 #[inline]
74 fn arrow_schema(&self) -> SchemaRef {
75 RecordBatchReader::schema(self)
76 }
77
78 #[inline]
79 fn into_stream(self) -> SendableRecordBatchStream {
80 reader_to_stream(Box::new(self))
81 }
82}
83
84impl<I> StreamingWriteSource for RecordBatchIterator<I>
85where
86 Self: Send,
87 I: IntoIterator<Item = ::core::result::Result<RecordBatch, ArrowError>> + Send + 'static,
88{
89 #[inline]
90 fn arrow_schema(&self) -> SchemaRef {
91 RecordBatchReader::schema(self)
92 }
93
94 #[inline]
95 fn into_stream(self) -> SendableRecordBatchStream {
96 reader_to_stream(Box::new(self))
97 }
98}
99
100impl<T> StreamingWriteSource for Box<T>
101where
102 T: StreamingWriteSource,
103{
104 #[inline]
105 fn arrow_schema(&self) -> SchemaRef {
106 T::arrow_schema(&**self)
107 }
108
109 #[inline]
110 fn into_stream(self) -> SendableRecordBatchStream {
111 T::into_stream(*self)
112 }
113}
114
115impl StreamingWriteSource for Box<dyn RecordBatchReader + Send> {
116 #[inline]
117 fn arrow_schema(&self) -> SchemaRef {
118 RecordBatchReader::schema(self)
119 }
120
121 #[inline]
122 fn into_stream(self) -> SendableRecordBatchStream {
123 reader_to_stream(self)
124 }
125}
126
127impl StreamingWriteSource for SendableRecordBatchStream {
128 #[inline]
129 fn arrow_schema(&self) -> SchemaRef {
130 RecordBatchStream::schema(&**self)
131 }
132
133 #[inline]
134 fn into_stream(self) -> SendableRecordBatchStream {
135 self
136 }
137}
138
139pub fn reader_to_stream(batches: Box<dyn RecordBatchReader + Send>) -> SendableRecordBatchStream {
143 let arrow_schema = batches.arrow_schema();
144 let stream = RecordBatchStreamAdapter::new(
145 arrow_schema,
146 BackgroundIterator::new(batches)
147 .fuse()
148 .map_err(DataFusionError::from),
149 );
150 Box::pin(stream)
151}
152
153pub trait MetricsExt {
154 fn find_count(&self, name: &str) -> Option<Count>;
155 fn iter_counts(&self) -> impl Iterator<Item = (impl AsRef<str>, &Count)>;
156}
157
158impl MetricsExt for MetricsSet {
159 fn find_count(&self, metric_name: &str) -> Option<Count> {
160 self.iter().find_map(|m| match m.value() {
161 MetricValue::Count { name, count } => {
162 if name == metric_name {
163 Some(count.clone())
164 } else {
165 None
166 }
167 }
168 _ => None,
169 })
170 }
171
172 fn iter_counts(&self) -> impl Iterator<Item = (impl AsRef<str>, &Count)> {
173 self.iter().filter_map(|m| match m.value() {
174 MetricValue::Count { name, count } => Some((name, count)),
175 _ => None,
176 })
177 }
178}
179
180pub trait ExecutionPlanMetricsSetExt {
181 fn new_count(&self, name: &'static str, partition: usize) -> Count;
182 fn new_time(&self, name: &'static str, partition: usize) -> Time;
183}
184
185impl ExecutionPlanMetricsSetExt for ExecutionPlanMetricsSet {
186 fn new_count(&self, name: &'static str, partition: usize) -> Count {
187 let count = Count::new();
188 MetricBuilder::new(self)
189 .with_partition(partition)
190 .build(MetricValue::Count {
191 name: Cow::Borrowed(name),
192 count: count.clone(),
193 });
194 count
195 }
196
197 fn new_time(&self, name: &'static str, partition: usize) -> Time {
198 let time = Time::new();
199 MetricBuilder::new(self)
200 .with_partition(partition)
201 .build(MetricValue::Time {
202 name: Cow::Borrowed(name),
203 time: time.clone(),
204 });
205 time
206 }
207}
208
209pub const IOPS_METRIC: &str = "iops";
211pub const REQUESTS_METRIC: &str = "requests";
212pub const BYTES_READ_METRIC: &str = "bytes_read";
213pub const INDICES_LOADED_METRIC: &str = "indices_loaded";
214pub const PARTS_LOADED_METRIC: &str = "parts_loaded";
215pub const PARTITIONS_RANKED_METRIC: &str = "partitions_ranked";
216pub const INDEX_COMPARISONS_METRIC: &str = "index_comparisons";
217pub const FRAGMENTS_SCANNED_METRIC: &str = "fragments_scanned";
218pub const RANGES_SCANNED_METRIC: &str = "ranges_scanned";
219pub const ROWS_SCANNED_METRIC: &str = "rows_scanned";
220pub const TASK_WAIT_TIME_METRIC: &str = "task_wait_time";
221pub const DELTAS_SEARCHED_METRIC: &str = "deltas_searched";
222pub const PARTITIONS_SEARCHED_METRIC: &str = "partitions_searched";
223pub const SCALAR_INDEX_SEARCH_TIME_METRIC: &str = "search_time";
224pub const SCALAR_INDEX_SER_TIME_METRIC: &str = "ser_time";