#![allow(missing_docs)]
#![allow(dead_code)]
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PyVtkWriter {
pub filename: String,
pub point_data: Vec<(String, Vec<f64>)>,
pub cell_data: Vec<(String, Vec<f64>)>,
}
impl PyVtkWriter {
pub fn new(filename: impl Into<String>) -> Self {
Self {
filename: filename.into(),
point_data: Vec::new(),
cell_data: Vec::new(),
}
}
pub fn add_point_data(&mut self, name: impl Into<String>, data: Vec<f64>) {
self.point_data.push((name.into(), data));
}
pub fn add_cell_data(&mut self, name: impl Into<String>, data: Vec<f64>) {
self.cell_data.push((name.into(), data));
}
pub fn write_ascii(&self) -> String {
let mut out = format!(
"# vtk DataFile Version 3.0\nOxiPhysics output\nASCII\nDATASET UNSTRUCTURED_GRID\nfile={}\n",
self.filename
);
for (name, data) in &self.point_data {
out.push_str(&format!("POINT_DATA {} len={}\n", name, data.len()));
}
for (name, data) in &self.cell_data {
out.push_str(&format!("CELL_DATA {} len={}\n", name, data.len()));
}
out
}
pub fn write_binary(&self) -> usize {
let base = self.filename.len() + 64;
let pd: usize = self.point_data.iter().map(|(_, v)| v.len() * 8).sum();
let cd: usize = self.cell_data.iter().map(|(_, v)| v.len() * 8).sum();
base + pd + cd
}
pub fn n_point_arrays(&self) -> usize {
self.point_data.len()
}
pub fn n_cell_arrays(&self) -> usize {
self.cell_data.len()
}
}
impl Default for PyVtkWriter {
fn default() -> Self {
Self::new("output.vtk")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PyCsvReader {
pub filename: String,
pub headers: Vec<String>,
pub rows: Vec<Vec<f64>>,
}
impl PyCsvReader {
pub fn new(filename: impl Into<String>) -> Self {
Self {
filename: filename.into(),
headers: Vec::new(),
rows: Vec::new(),
}
}
pub fn load_data(&mut self, headers: Vec<String>, rows: Vec<Vec<f64>>) {
self.headers = headers;
self.rows = rows;
}
pub fn read_column(&self, col: usize) -> Vec<f64> {
self.rows
.iter()
.filter_map(|r| r.get(col).copied())
.collect()
}
pub fn read_all_f64(&self) -> Vec<f64> {
self.rows.iter().flat_map(|r| r.iter().copied()).collect()
}
pub fn header_names(&self) -> &[String] {
&self.headers
}
pub fn n_rows(&self) -> usize {
self.rows.len()
}
pub fn n_cols(&self) -> usize {
self.rows.first().map_or(0, |r| r.len())
}
}
impl Default for PyCsvReader {
fn default() -> Self {
Self::new("input.csv")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PyCsvWriter {
pub filename: String,
pub buffer: Vec<Vec<f64>>,
}
impl PyCsvWriter {
pub fn new(filename: impl Into<String>) -> Self {
Self {
filename: filename.into(),
buffer: Vec::new(),
}
}
pub fn write_row(&mut self, data: Vec<f64>) {
self.buffer.push(data);
}
pub fn flush(&mut self) -> String {
let csv = self
.buffer
.iter()
.map(|row| {
row.iter()
.map(|v| v.to_string())
.collect::<Vec<_>>()
.join(",")
})
.collect::<Vec<_>>()
.join("\n");
self.buffer.clear();
csv
}
pub fn buffered_rows(&self) -> usize {
self.buffer.len()
}
}
impl Default for PyCsvWriter {
fn default() -> Self {
Self::new("output.csv")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PyXyzReader {
pub filename: String,
pub species: Vec<String>,
pub pos_flat: Vec<f64>,
}
impl PyXyzReader {
pub fn new(filename: impl Into<String>) -> Self {
Self {
filename: filename.into(),
species: Vec::new(),
pos_flat: Vec::new(),
}
}
pub fn load_data(&mut self, species: Vec<String>, pos_flat: Vec<f64>) {
self.species = species;
self.pos_flat = pos_flat;
}
pub fn n_atoms(&self) -> usize {
self.species.len()
}
pub fn positions(&self) -> &[f64] {
&self.pos_flat
}
pub fn species(&self) -> &[String] {
&self.species
}
pub fn position_of(&self, i: usize) -> Option<[f64; 3]> {
let base = i * 3;
if base + 2 < self.pos_flat.len() {
Some([
self.pos_flat[base],
self.pos_flat[base + 1],
self.pos_flat[base + 2],
])
} else {
None
}
}
}
impl Default for PyXyzReader {
fn default() -> Self {
Self::new("atoms.xyz")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PyXyzWriter {
pub filename: String,
pub frames: Vec<String>,
}
impl PyXyzWriter {
pub fn new(filename: impl Into<String>) -> Self {
Self {
filename: filename.into(),
frames: Vec::new(),
}
}
pub fn write_frame(
&mut self,
positions: &[f64],
species: &[String],
comment: impl Into<String>,
) {
let n = species.len();
let mut frame = format!("{}\n{}\n", n, comment.into());
for (i, sp) in species.iter().enumerate() {
let base = i * 3;
let (x, y, z) = if base + 2 < positions.len() {
(positions[base], positions[base + 1], positions[base + 2])
} else {
(0.0, 0.0, 0.0)
};
frame.push_str(&format!("{} {} {} {}\n", sp, x, y, z));
}
self.frames.push(frame);
}
pub fn n_frames(&self) -> usize {
self.frames.len()
}
pub fn as_string(&self) -> String {
self.frames.concat()
}
}
impl Default for PyXyzWriter {
fn default() -> Self {
Self::new("output.xyz")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PyLammpsReader {
pub filename: String,
pub atom_data: Vec<Vec<f64>>,
pub box_bounds: [[f64; 2]; 3],
}
impl PyLammpsReader {
pub fn new(filename: impl Into<String>) -> Self {
Self {
filename: filename.into(),
atom_data: Vec::new(),
box_bounds: [[0.0, 1.0]; 3],
}
}
pub fn load_data(&mut self, atom_data: Vec<Vec<f64>>, box_bounds: [[f64; 2]; 3]) {
self.atom_data = atom_data;
self.box_bounds = box_bounds;
}
pub fn read_atoms(&self) -> &[Vec<f64>] {
&self.atom_data
}
pub fn n_atoms(&self) -> usize {
self.atom_data.len()
}
pub fn box_bounds(&self) -> [[f64; 2]; 3] {
self.box_bounds
}
pub fn box_lengths(&self) -> [f64; 3] {
[
self.box_bounds[0][1] - self.box_bounds[0][0],
self.box_bounds[1][1] - self.box_bounds[1][0],
self.box_bounds[2][1] - self.box_bounds[2][0],
]
}
}
impl Default for PyLammpsReader {
fn default() -> Self {
Self::new("dump.lammpstrj")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PyHdf5Writer {
pub filename: String,
pub datasets: Vec<(String, Vec<f64>)>,
pub attributes: Vec<(String, f64)>,
}
impl PyHdf5Writer {
pub fn new(filename: impl Into<String>) -> Self {
Self {
filename: filename.into(),
datasets: Vec::new(),
attributes: Vec::new(),
}
}
pub fn write_dataset(&mut self, name: impl Into<String>, data: Vec<f64>) {
self.datasets.push((name.into(), data));
}
pub fn write_attribute(&mut self, name: impl Into<String>, value: f64) {
self.attributes.push((name.into(), value));
}
pub fn n_datasets(&self) -> usize {
self.datasets.len()
}
pub fn n_attributes(&self) -> usize {
self.attributes.len()
}
pub fn get_dataset(&self, name: &str) -> Option<&Vec<f64>> {
self.datasets
.iter()
.find(|(n, _)| n == name)
.map(|(_, d)| d)
}
pub fn get_attribute(&self, name: &str) -> Option<f64> {
self.attributes
.iter()
.find(|(n, _)| n == name)
.map(|(_, v)| *v)
}
}
impl Default for PyHdf5Writer {
fn default() -> Self {
Self::new("output.h5")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PyTrajectoryWriter {
pub filename: String,
pub format: String,
pub closed: bool,
pub frame_count: usize,
pub frame_buffer: Vec<String>,
}
impl PyTrajectoryWriter {
pub fn new(filename: impl Into<String>, format: impl Into<String>) -> Self {
Self {
filename: filename.into(),
format: format.into(),
closed: false,
frame_count: 0,
frame_buffer: Vec::new(),
}
}
pub fn write_frame(&mut self, positions: &[f64], velocities: &[f64], step: u64) {
if self.closed {
return;
}
let frame = format!(
"FRAME step={} n_pos={} n_vel={} fmt={}\n",
step,
positions.len(),
velocities.len(),
self.format
);
self.frame_buffer.push(frame);
self.frame_count += 1;
}
pub fn close(&mut self) {
self.closed = true;
}
pub fn is_closed(&self) -> bool {
self.closed
}
pub fn n_frames(&self) -> usize {
self.frame_count
}
pub fn as_string(&self) -> String {
self.frame_buffer.concat()
}
}
impl Default for PyTrajectoryWriter {
fn default() -> Self {
Self::new("trajectory.xyz", "xyz")
}
}
pub fn register_io_module(_m: &str) {
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_vtk_new() {
let w = PyVtkWriter::new("out.vtk");
assert_eq!(w.filename, "out.vtk");
assert_eq!(w.n_point_arrays(), 0);
}
#[test]
fn test_vtk_add_point_data() {
let mut w = PyVtkWriter::default();
w.add_point_data("pressure", vec![1.0, 2.0, 3.0]);
assert_eq!(w.n_point_arrays(), 1);
}
#[test]
fn test_vtk_add_cell_data() {
let mut w = PyVtkWriter::default();
w.add_cell_data("stress", vec![10.0, 20.0]);
assert_eq!(w.n_cell_arrays(), 1);
}
#[test]
fn test_vtk_write_ascii_contains_header() {
let w = PyVtkWriter::new("test.vtk");
let s = w.write_ascii();
assert!(s.contains("vtk DataFile"));
}
#[test]
fn test_vtk_write_ascii_contains_point_data_name() {
let mut w = PyVtkWriter::new("test.vtk");
w.add_point_data("velocity", vec![1.0, 2.0]);
let s = w.write_ascii();
assert!(s.contains("velocity"));
}
#[test]
fn test_vtk_write_binary_size_grows() {
let mut w = PyVtkWriter::new("test.vtk");
let s0 = w.write_binary();
w.add_point_data("p", vec![1.0; 100]);
let s1 = w.write_binary();
assert!(s1 > s0);
}
#[test]
fn test_vtk_default() {
let w = PyVtkWriter::default();
assert!(w.filename.ends_with(".vtk"));
}
#[test]
fn test_csv_reader_new() {
let r = PyCsvReader::new("data.csv");
assert_eq!(r.filename, "data.csv");
assert_eq!(r.n_rows(), 0);
}
#[test]
fn test_csv_reader_load_and_read_column() {
let mut r = PyCsvReader::default();
r.load_data(
vec!["x".to_string(), "y".to_string()],
vec![vec![1.0, 2.0], vec![3.0, 4.0]],
);
let col0 = r.read_column(0);
assert_eq!(col0, vec![1.0, 3.0]);
}
#[test]
fn test_csv_reader_read_all_f64() {
let mut r = PyCsvReader::default();
r.load_data(vec![], vec![vec![1.0, 2.0], vec![3.0, 4.0]]);
let all = r.read_all_f64();
assert_eq!(all.len(), 4);
}
#[test]
fn test_csv_reader_header_names() {
let mut r = PyCsvReader::default();
r.load_data(vec!["a".to_string(), "b".to_string()], vec![]);
assert_eq!(r.header_names().len(), 2);
}
#[test]
fn test_csv_reader_n_cols() {
let mut r = PyCsvReader::default();
r.load_data(vec![], vec![vec![1.0, 2.0, 3.0]]);
assert_eq!(r.n_cols(), 3);
}
#[test]
fn test_csv_reader_empty_n_cols_zero() {
let r = PyCsvReader::default();
assert_eq!(r.n_cols(), 0);
}
#[test]
fn test_csv_writer_new() {
let w = PyCsvWriter::new("out.csv");
assert_eq!(w.filename, "out.csv");
assert_eq!(w.buffered_rows(), 0);
}
#[test]
fn test_csv_writer_write_row() {
let mut w = PyCsvWriter::default();
w.write_row(vec![1.0, 2.0, 3.0]);
assert_eq!(w.buffered_rows(), 1);
}
#[test]
fn test_csv_writer_flush_clears_buffer() {
let mut w = PyCsvWriter::default();
w.write_row(vec![1.0]);
w.flush();
assert_eq!(w.buffered_rows(), 0);
}
#[test]
fn test_csv_writer_flush_returns_csv() {
let mut w = PyCsvWriter::default();
w.write_row(vec![1.0, 2.0]);
let s = w.flush();
assert!(s.contains("1") && s.contains("2"));
}
#[test]
fn test_csv_writer_default() {
let w = PyCsvWriter::default();
assert!(w.filename.ends_with(".csv"));
}
#[test]
fn test_xyz_reader_new() {
let r = PyXyzReader::new("mol.xyz");
assert_eq!(r.filename, "mol.xyz");
assert_eq!(r.n_atoms(), 0);
}
#[test]
fn test_xyz_reader_load_and_n_atoms() {
let mut r = PyXyzReader::default();
r.load_data(
vec!["C".to_string(), "H".to_string()],
vec![0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
);
assert_eq!(r.n_atoms(), 2);
}
#[test]
fn test_xyz_reader_positions() {
let mut r = PyXyzReader::default();
r.load_data(vec!["O".to_string()], vec![1.0, 2.0, 3.0]);
assert_eq!(r.positions(), &[1.0, 2.0, 3.0]);
}
#[test]
fn test_xyz_reader_species() {
let mut r = PyXyzReader::default();
r.load_data(vec!["N".to_string()], vec![0.0, 0.0, 0.0]);
assert_eq!(r.species()[0], "N");
}
#[test]
fn test_xyz_reader_position_of() {
let mut r = PyXyzReader::default();
r.load_data(
vec!["C".to_string(), "H".to_string()],
vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
);
let p = r.position_of(0).unwrap();
assert_eq!(p, [1.0, 2.0, 3.0]);
}
#[test]
fn test_xyz_reader_default() {
let r = PyXyzReader::default();
assert!(r.filename.ends_with(".xyz"));
}
#[test]
fn test_xyz_writer_new() {
let w = PyXyzWriter::new("out.xyz");
assert_eq!(w.filename, "out.xyz");
assert_eq!(w.n_frames(), 0);
}
#[test]
fn test_xyz_writer_write_frame_increments_count() {
let mut w = PyXyzWriter::default();
w.write_frame(&[0.0, 0.0, 0.0], &["C".to_string()], "frame 0");
assert_eq!(w.n_frames(), 1);
}
#[test]
fn test_xyz_writer_as_string_contains_n_atoms() {
let mut w = PyXyzWriter::default();
w.write_frame(&[0.0, 0.0, 0.0], &["C".to_string()], "test");
assert!(w.as_string().contains('1'));
}
#[test]
fn test_xyz_writer_multiple_frames() {
let mut w = PyXyzWriter::default();
for _ in 0..5 {
w.write_frame(&[0.0, 0.0, 0.0], &["H".to_string()], "");
}
assert_eq!(w.n_frames(), 5);
}
#[test]
fn test_xyz_writer_default() {
let w = PyXyzWriter::default();
assert!(w.filename.ends_with(".xyz"));
}
#[test]
fn test_lammps_reader_new() {
let r = PyLammpsReader::new("dump.lammps");
assert_eq!(r.n_atoms(), 0);
}
#[test]
fn test_lammps_reader_load_and_n_atoms() {
let mut r = PyLammpsReader::default();
r.load_data(vec![vec![1.0, 1.0, 0.0, 0.5, 0.5]], [[0.0, 1.0]; 3]);
assert_eq!(r.n_atoms(), 1);
}
#[test]
fn test_lammps_reader_box_bounds() {
let mut r = PyLammpsReader::default();
r.load_data(vec![], [[-5.0, 5.0], [-5.0, 5.0], [-5.0, 5.0]]);
let b = r.box_bounds();
assert_eq!(b[0], [-5.0, 5.0]);
}
#[test]
fn test_lammps_reader_box_lengths() {
let mut r = PyLammpsReader::default();
r.load_data(vec![], [[0.0, 10.0], [0.0, 20.0], [0.0, 30.0]]);
let l = r.box_lengths();
assert_eq!(l, [10.0, 20.0, 30.0]);
}
#[test]
fn test_lammps_reader_read_atoms() {
let mut r = PyLammpsReader::default();
let atom = vec![1.0, 1.0, 0.1, 0.2, 0.3];
r.load_data(vec![atom.clone()], [[0.0, 1.0]; 3]);
assert_eq!(r.read_atoms()[0], atom);
}
#[test]
fn test_lammps_reader_default() {
let r = PyLammpsReader::default();
assert!(!r.filename.is_empty());
}
#[test]
fn test_hdf5_writer_new() {
let w = PyHdf5Writer::new("out.h5");
assert_eq!(w.filename, "out.h5");
assert_eq!(w.n_datasets(), 0);
}
#[test]
fn test_hdf5_writer_write_dataset() {
let mut w = PyHdf5Writer::default();
w.write_dataset("pressure", vec![1.0, 2.0, 3.0]);
assert_eq!(w.n_datasets(), 1);
}
#[test]
fn test_hdf5_writer_write_attribute() {
let mut w = PyHdf5Writer::default();
w.write_attribute("timestep", 0.001);
assert_eq!(w.n_attributes(), 1);
}
#[test]
fn test_hdf5_writer_get_dataset() {
let mut w = PyHdf5Writer::default();
w.write_dataset("vel", vec![1.0, 2.0]);
let d = w.get_dataset("vel").unwrap();
assert_eq!(d.len(), 2);
}
#[test]
fn test_hdf5_writer_get_attribute() {
let mut w = PyHdf5Writer::default();
w.write_attribute("dt", 1e-4);
let v = w.get_attribute("dt").unwrap();
assert!((v - 1e-4).abs() < 1e-12);
}
#[test]
fn test_hdf5_writer_missing_dataset_none() {
let w = PyHdf5Writer::default();
assert!(w.get_dataset("missing").is_none());
}
#[test]
fn test_hdf5_writer_default() {
let w = PyHdf5Writer::default();
assert!(w.filename.ends_with(".h5"));
}
#[test]
fn test_trajectory_writer_new() {
let w = PyTrajectoryWriter::new("traj.xyz", "xyz");
assert_eq!(w.format, "xyz");
assert_eq!(w.n_frames(), 0);
}
#[test]
fn test_trajectory_writer_write_frame() {
let mut w = PyTrajectoryWriter::default();
w.write_frame(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0], 0);
assert_eq!(w.n_frames(), 1);
}
#[test]
fn test_trajectory_writer_close() {
let mut w = PyTrajectoryWriter::default();
w.close();
assert!(w.is_closed());
}
#[test]
fn test_trajectory_writer_no_write_after_close() {
let mut w = PyTrajectoryWriter::default();
w.close();
w.write_frame(&[0.0], &[], 1);
assert_eq!(w.n_frames(), 0);
}
#[test]
fn test_trajectory_writer_as_string_contains_step() {
let mut w = PyTrajectoryWriter::default();
w.write_frame(&[1.0, 2.0, 3.0], &[0.1, 0.2, 0.3], 42);
assert!(w.as_string().contains("42"));
}
#[test]
fn test_trajectory_writer_multiple_frames() {
let mut w = PyTrajectoryWriter::new("t.lammps", "lammps");
for i in 0..10_u64 {
w.write_frame(&[0.0], &[0.0], i);
}
assert_eq!(w.n_frames(), 10);
}
#[test]
fn test_trajectory_writer_default() {
let w = PyTrajectoryWriter::default();
assert!(!w.format.is_empty());
}
#[test]
fn test_register_io_module_no_panic() {
register_io_module("io");
}
}