use std::{collections::HashMap, io::Cursor, str::FromStr};
use datafusion::arrow::ipc::reader::StreamReader;
use nautilus_core::python::to_pyvalue_err;
use nautilus_model::{data::OrderBookDepth10, identifiers::InstrumentId};
use nautilus_serialization::arrow::DecodeFromRecordBatch;
use pyo3::prelude::*;
#[pyclass]
#[pyo3_stub_gen::derive::gen_stub_pyclass(module = "nautilus_trader.persistence")]
pub struct OrderBookDepth10DataWrangler {
instrument_id: InstrumentId,
price_precision: u8,
size_precision: u8,
metadata: HashMap<String, String>,
}
#[pymethods]
#[pyo3_stub_gen::derive::gen_stub_pymethods]
impl OrderBookDepth10DataWrangler {
#[new]
fn py_new(instrument_id: &str, price_precision: u8, size_precision: u8) -> PyResult<Self> {
let instrument_id = InstrumentId::from_str(instrument_id).map_err(to_pyvalue_err)?;
let metadata =
OrderBookDepth10::get_metadata(&instrument_id, price_precision, size_precision);
Ok(Self {
instrument_id,
price_precision,
size_precision,
metadata,
})
}
#[getter]
fn instrument_id(&self) -> String {
self.instrument_id.to_string()
}
#[getter]
const fn price_precision(&self) -> u8 {
self.price_precision
}
#[getter]
const fn size_precision(&self) -> u8 {
self.size_precision
}
fn process_record_batch_bytes(
&self,
#[gen_stub(override_type(type_repr = "bytes"))] data: &[u8],
) -> PyResult<Vec<OrderBookDepth10>> {
let cursor = Cursor::new(data);
let reader = match StreamReader::try_new(cursor, None) {
Ok(reader) => reader,
Err(e) => return Err(to_pyvalue_err(e)),
};
let mut depths = Vec::new();
for maybe_batch in reader {
let record_batch = match maybe_batch {
Ok(record_batch) => record_batch,
Err(e) => return Err(to_pyvalue_err(e)),
};
let batch_depths = OrderBookDepth10::decode_batch(&self.metadata, record_batch)
.map_err(to_pyvalue_err)?;
depths.extend(batch_depths);
}
Ok(depths)
}
}