pyo3_arrow/
record_batch_reader.rs1use std::fmt::Display;
2use std::sync::{Arc, Mutex};
3
4use arrow_array::{ArrayRef, RecordBatchIterator, RecordBatchReader, StructArray};
5use arrow_schema::{Field, SchemaRef};
6use pyo3::exceptions::{PyIOError, PyStopIteration, PyValueError};
7use pyo3::intern;
8use pyo3::prelude::*;
9use pyo3::types::{PyCapsule, PyTuple, PyType};
10
11use crate::error::PyArrowResult;
12use crate::export::{Arro3RecordBatch, Arro3Schema, Arro3Table};
13use crate::ffi::from_python::utils::import_stream_pycapsule;
14use crate::ffi::to_python::chunked::ArrayIterator;
15use crate::ffi::to_python::nanoarrow::to_nanoarrow_array_stream;
16use crate::ffi::to_python::to_stream_pycapsule;
17use crate::ffi::to_schema_pycapsule;
18use crate::input::AnyRecordBatch;
19use crate::schema::display_schema;
20use crate::{PyRecordBatch, PySchema, PyTable};
21
22#[pyclass(
26 module = "arro3.core._core",
27 name = "RecordBatchReader",
28 subclass,
29 frozen
30)]
31pub struct PyRecordBatchReader(pub(crate) Mutex<Option<Box<dyn RecordBatchReader + Send>>>);
32
33impl PyRecordBatchReader {
34 pub fn new(reader: Box<dyn RecordBatchReader + Send>) -> Self {
36 Self(Mutex::new(Some(reader)))
37 }
38
39 pub fn from_arrow_pycapsule(capsule: &Bound<PyCapsule>) -> PyResult<Self> {
41 let stream = import_stream_pycapsule(capsule)?;
42 let stream_reader = arrow_array::ffi_stream::ArrowArrayStreamReader::try_new(stream)
43 .map_err(|err| PyValueError::new_err(err.to_string()))?;
44
45 Ok(Self::new(Box::new(stream_reader)))
46 }
47
48 pub fn into_reader(self) -> PyResult<Box<dyn RecordBatchReader + Send>> {
52 let stream = self
53 .0
54 .lock()
55 .unwrap()
56 .take()
57 .ok_or(PyIOError::new_err("Cannot read from closed stream."))?;
58 Ok(stream)
59 }
60
61 pub fn into_table(self) -> PyArrowResult<PyTable> {
63 let stream = self
64 .0
65 .lock()
66 .unwrap()
67 .take()
68 .ok_or(PyIOError::new_err("Cannot read from closed stream."))?;
69 let schema = stream.schema();
70 let mut batches = vec![];
71 for batch in stream {
72 batches.push(batch?);
73 }
74 Ok(PyTable::try_new(batches, schema)?)
75 }
76
77 pub fn schema_ref(&self) -> PyResult<SchemaRef> {
81 let inner = self.0.lock().unwrap();
82 let stream = inner
83 .as_ref()
84 .ok_or(PyIOError::new_err("Stream already closed."))?;
85 Ok(stream.schema())
86 }
87
88 pub fn to_arro3<'py>(&'py self, py: Python<'py>) -> PyResult<Bound<'py, PyAny>> {
90 let arro3_mod = py.import(intern!(py, "arro3.core"))?;
91 arro3_mod
92 .getattr(intern!(py, "RecordBatchReader"))?
93 .call_method1(
94 intern!(py, "from_arrow_pycapsule"),
95 PyTuple::new(py, vec![self.__arrow_c_stream__(py, None)?])?,
96 )
97 }
98
99 pub fn into_arro3(self, py: Python) -> PyResult<Bound<PyAny>> {
101 let arro3_mod = py.import(intern!(py, "arro3.core"))?;
102 let reader = self
103 .0
104 .lock()
105 .unwrap()
106 .take()
107 .ok_or(PyIOError::new_err("Cannot read from closed stream"))?;
108 let capsule = Self::to_stream_pycapsule(py, reader, None)?;
109 arro3_mod
110 .getattr(intern!(py, "RecordBatchReader"))?
111 .call_method1(
112 intern!(py, "from_arrow_pycapsule"),
113 PyTuple::new(py, vec![capsule])?,
114 )
115 }
116
117 pub fn to_nanoarrow<'py>(&'py self, py: Python<'py>) -> PyResult<Bound<'py, PyAny>> {
119 to_nanoarrow_array_stream(py, &self.__arrow_c_stream__(py, None)?)
120 }
121
122 pub fn into_pyarrow(self, py: Python) -> PyResult<Bound<PyAny>> {
126 let pyarrow_mod = py.import(intern!(py, "pyarrow"))?;
127 let record_batch_reader_class = pyarrow_mod.getattr(intern!(py, "RecordBatchReader"))?;
128 record_batch_reader_class.call_method1(
129 intern!(py, "from_stream"),
130 PyTuple::new(py, vec![self.into_pyobject(py)?])?,
131 )
132 }
133
134 pub(crate) fn to_stream_pycapsule<'py>(
135 py: Python<'py>,
136 reader: Box<dyn RecordBatchReader + Send>,
137 requested_schema: Option<Bound<'py, PyCapsule>>,
138 ) -> PyArrowResult<Bound<'py, PyCapsule>> {
139 let schema = reader.schema().clone();
140 let array_reader = reader.into_iter().map(|maybe_batch| {
141 let arr: ArrayRef = Arc::new(StructArray::from(maybe_batch?));
142 Ok(arr)
143 });
144 let array_reader = Box::new(ArrayIterator::new(
145 array_reader,
146 Field::new_struct("", schema.fields().clone(), false)
147 .with_metadata(schema.metadata.clone())
148 .into(),
149 ));
150 to_stream_pycapsule(py, array_reader, requested_schema)
151 }
152}
153
154impl From<Box<dyn RecordBatchReader + Send>> for PyRecordBatchReader {
155 fn from(value: Box<dyn RecordBatchReader + Send>) -> Self {
156 Self::new(value)
157 }
158}
159
160impl Display for PyRecordBatchReader {
161 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
162 writeln!(f, "arro3.core.RecordBatchReader")?;
163 writeln!(f, "-----------------------")?;
164 if let Ok(schema) = self.schema_ref() {
165 display_schema(&schema, f)
166 } else {
167 writeln!(f, "Closed stream")
168 }
169 }
170}
171
172#[pymethods]
173impl PyRecordBatchReader {
174 fn __arrow_c_schema__<'py>(&'py self, py: Python<'py>) -> PyArrowResult<Bound<'py, PyCapsule>> {
175 to_schema_pycapsule(py, self.schema_ref()?.as_ref())
176 }
177
178 #[pyo3(signature = (requested_schema=None))]
179 fn __arrow_c_stream__<'py>(
180 &'py self,
181 py: Python<'py>,
182 requested_schema: Option<Bound<'py, PyCapsule>>,
183 ) -> PyArrowResult<Bound<'py, PyCapsule>> {
184 let reader = self
185 .0
186 .lock()
187 .unwrap()
188 .take()
189 .ok_or(PyIOError::new_err("Cannot read from closed stream"))?;
190 Self::to_stream_pycapsule(py, reader, requested_schema)
191 }
192
193 fn __iter__(slf: PyRef<Self>) -> PyRef<Self> {
196 slf
197 }
198
199 fn __next__(&self) -> PyArrowResult<Arro3RecordBatch> {
200 self.read_next_batch()
201 }
202
203 fn __repr__(&self) -> String {
204 self.to_string()
205 }
206
207 #[classmethod]
208 fn from_arrow(_cls: &Bound<PyType>, input: AnyRecordBatch) -> PyArrowResult<Self> {
209 let reader = input.into_reader()?;
210 Ok(Self::new(reader))
211 }
212
213 #[classmethod]
214 #[pyo3(name = "from_arrow_pycapsule")]
215 fn from_arrow_pycapsule_py(_cls: &Bound<PyType>, capsule: &Bound<PyCapsule>) -> PyResult<Self> {
216 Self::from_arrow_pycapsule(capsule)
217 }
218
219 #[classmethod]
220 fn from_batches(_cls: &Bound<PyType>, schema: PySchema, batches: Vec<PyRecordBatch>) -> Self {
221 let batches = batches
222 .into_iter()
223 .map(|batch| batch.into_inner())
224 .collect::<Vec<_>>();
225 Self::new(Box::new(RecordBatchIterator::new(
226 batches.into_iter().map(Ok),
227 schema.into_inner(),
228 )))
229 }
230
231 #[classmethod]
232 fn from_stream(_cls: &Bound<PyType>, data: &Bound<PyAny>) -> PyResult<Self> {
233 data.extract()
234 }
235
236 #[getter]
237 fn closed(&self) -> bool {
238 self.0.lock().unwrap().is_none()
239 }
240
241 fn read_all(&self) -> PyArrowResult<Arro3Table> {
242 let stream = self
243 .0
244 .lock()
245 .unwrap()
246 .take()
247 .ok_or(PyIOError::new_err("Cannot read from closed stream."))?;
248 let schema = stream.schema();
249 let mut batches = vec![];
250 for batch in stream {
251 batches.push(batch?);
252 }
253 Ok(PyTable::try_new(batches, schema)?.into())
254 }
255
256 fn read_next_batch(&self) -> PyArrowResult<Arro3RecordBatch> {
257 let mut inner = self.0.lock().unwrap();
258 let stream = inner
259 .as_mut()
260 .ok_or(PyIOError::new_err("Cannot read from closed stream."))?;
261
262 if let Some(next_batch) = stream.next() {
263 Ok(next_batch?.into())
264 } else {
265 Err(PyStopIteration::new_err("").into())
266 }
267 }
268
269 #[getter]
270 fn schema(&self) -> PyResult<Arro3Schema> {
271 Ok(PySchema::new(self.schema_ref()?.clone()).into())
272 }
273}