polars_python/interop/arrow/
to_rust.rs1use polars_core::POOL;
2use polars_core::prelude::*;
3use polars_core::utils::accumulate_dataframes_vertical_unchecked;
4use polars_core::utils::arrow::ffi;
5use pyo3::ffi::Py_uintptr_t;
6use pyo3::prelude::*;
7use pyo3::types::PyList;
8use rayon::prelude::*;
9
10use crate::error::PyPolarsErr;
11use crate::utils::EnterPolarsExt;
12
13pub fn field_to_rust_arrow(obj: Bound<'_, PyAny>) -> PyResult<ArrowField> {
14 let mut schema = Box::new(ffi::ArrowSchema::empty());
15 let schema_ptr = schema.as_mut() as *mut ffi::ArrowSchema;
16
17 obj.call_method1("_export_to_c", (schema_ptr as Py_uintptr_t,))?;
19 let field = unsafe { ffi::import_field_from_c(schema.as_ref()).map_err(PyPolarsErr::from)? };
20 Ok(normalize_arrow_fields(&field))
21}
22
23fn normalize_arrow_fields(field: &ArrowField) -> ArrowField {
24 match field {
27 ArrowField {
28 dtype: ArrowDataType::Struct(fields),
29 ..
30 } => {
31 let mut normalized = false;
32 let normalized_fields: Vec<_> = fields
33 .iter()
34 .map(|f| {
35 if let ArrowDataType::Extension(ext_type) = &f.dtype {
38 if ext_type.name.starts_with("google:sqlType:") {
39 normalized = true;
40 return ArrowField::new(
41 f.name.clone(),
42 ext_type.inner.clone(),
43 f.is_nullable,
44 );
45 }
46 }
47 f.clone()
48 })
49 .collect();
50
51 if normalized {
52 ArrowField::new(
53 field.name.clone(),
54 ArrowDataType::Struct(normalized_fields),
55 field.is_nullable,
56 )
57 } else {
58 field.clone()
59 }
60 },
61 _ => field.clone(),
62 }
63}
64
65pub fn field_to_rust(obj: Bound<'_, PyAny>) -> PyResult<Field> {
66 field_to_rust_arrow(obj).map(|f| (&f).into())
67}
68
69pub fn pyarrow_schema_to_rust(obj: &Bound<'_, PyList>) -> PyResult<Schema> {
71 obj.into_iter().map(field_to_rust).collect()
72}
73
74pub fn array_to_rust(obj: &Bound<PyAny>) -> PyResult<ArrayRef> {
75 let mut array = Box::new(ffi::ArrowArray::empty());
77 let mut schema = Box::new(ffi::ArrowSchema::empty());
78
79 let array_ptr = array.as_mut() as *mut ffi::ArrowArray;
80 let schema_ptr = schema.as_mut() as *mut ffi::ArrowSchema;
81
82 obj.call_method1(
85 "_export_to_c",
86 (array_ptr as Py_uintptr_t, schema_ptr as Py_uintptr_t),
87 )?;
88
89 unsafe {
90 let field = ffi::import_field_from_c(schema.as_ref()).map_err(PyPolarsErr::from)?;
91 let array = ffi::import_array_from_c(*array, field.dtype).map_err(PyPolarsErr::from)?;
92 Ok(array)
93 }
94}
95
96pub fn to_rust_df(py: Python, rb: &[Bound<PyAny>], schema: Bound<PyAny>) -> PyResult<DataFrame> {
97 let ArrowDataType::Struct(fields) = field_to_rust_arrow(schema)?.dtype else {
98 return Err(PyPolarsErr::Other("invalid top-level schema".into()).into());
99 };
100
101 let schema = ArrowSchema::from_iter(fields.iter().cloned());
102
103 if schema.len() != fields.len() {
106 let mut field_map: PlHashMap<PlSmallStr, u64> = PlHashMap::with_capacity(fields.len());
107 fields.iter().for_each(|field| {
108 field_map
109 .entry(field.name.clone())
110 .and_modify(|c| {
111 *c += 1;
112 })
113 .or_insert(1);
114 });
115 let duplicate_fields: Vec<_> = field_map
116 .into_iter()
117 .filter_map(|(k, v)| (v > 1).then_some(k))
118 .collect();
119
120 return Err(PyPolarsErr::Polars(PolarsError::Duplicate(
121 format!(
122 "column appears more than once; names must be unique: {:?}",
123 duplicate_fields
124 )
125 .into(),
126 ))
127 .into());
128 }
129
130 if rb.is_empty() {
131 let columns = schema
132 .iter_values()
133 .map(|field| {
134 let field = Field::from(field);
135 Series::new_empty(field.name, &field.dtype).into_column()
136 })
137 .collect::<Vec<_>>();
138
139 return Ok(unsafe { DataFrame::new_no_checks_height_from_first(columns) });
141 }
142
143 let dfs = rb
144 .iter()
145 .map(|rb| {
146 let mut run_parallel = false;
147
148 let columns = (0..schema.len())
149 .map(|i| {
150 let array = rb.call_method1("column", (i,))?;
151 let arr = array_to_rust(&array)?;
152 run_parallel |= matches!(
153 arr.dtype(),
154 ArrowDataType::Utf8 | ArrowDataType::Dictionary(_, _, _)
155 );
156 Ok(arr)
157 })
158 .collect::<PyResult<Vec<_>>>()?;
159
160 let columns = if run_parallel {
164 py.enter_polars(|| {
165 POOL.install(|| {
166 columns
167 .into_par_iter()
168 .enumerate()
169 .map(|(i, arr)| {
170 let (_, field) = schema.get_at_index(i).unwrap();
171 let s = unsafe {
172 Series::_try_from_arrow_unchecked_with_md(
173 field.name.clone(),
174 vec![arr],
175 field.dtype(),
176 field.metadata.as_deref(),
177 )
178 }
179 .map_err(PyPolarsErr::from)?
180 .into_column();
181 Ok(s)
182 })
183 .collect::<PyResult<Vec<_>>>()
184 })
185 })
186 } else {
187 columns
188 .into_iter()
189 .enumerate()
190 .map(|(i, arr)| {
191 let (_, field) = schema.get_at_index(i).unwrap();
192 let s = unsafe {
193 Series::_try_from_arrow_unchecked_with_md(
194 field.name.clone(),
195 vec![arr],
196 field.dtype(),
197 field.metadata.as_deref(),
198 )
199 }
200 .map_err(PyPolarsErr::from)?
201 .into_column();
202 Ok(s)
203 })
204 .collect::<PyResult<Vec<_>>>()
205 }?;
206
207 Ok(unsafe { DataFrame::new_no_checks_height_from_first(columns) })
209 })
210 .collect::<PyResult<Vec<_>>>()?;
211
212 Ok(accumulate_dataframes_vertical_unchecked(dfs))
213}