use scirs2_core::ndarray::{Array, Array2, ArrayD, Dimension};
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use crate::error::{IoError, Result};
use crate::hdf5::{AttributeValue as HDF5AttributeValue, FileMode as HDF5FileMode, HDF5File};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum NetCDFDataType {
Byte,
Char,
Short,
Int,
Float,
Double,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum NetCDFFormat {
Classic,
NetCDF4,
NetCDF4Classic,
}
pub struct NetCDFFile {
#[allow(dead_code)]
path: String,
mode: String,
format: NetCDFFormat,
dimensions: HashMap<String, Option<usize>>,
variables: HashMap<String, VariableInfo>,
attributes: HashMap<String, AttributeValue>,
hdf5_backend: Option<HDF5File>,
}
#[derive(Debug, Clone)]
struct VariableInfo {
#[allow(dead_code)]
name: String,
data_type: NetCDFDataType,
dimensions: Vec<String>,
attributes: HashMap<String, AttributeValue>,
}
#[derive(Debug, Clone)]
#[allow(dead_code)]
enum AttributeValue {
String(String),
Byte(i8),
Short(i16),
Int(i32),
Float(f32),
Double(f64),
ByteArray(Vec<i8>),
ShortArray(Vec<i16>),
IntArray(Vec<i32>),
FloatArray(Vec<f32>),
DoubleArray(Vec<f64>),
}
#[derive(Debug, Clone)]
pub struct NetCDFOptions {
pub mmap: bool,
pub auto_scale: bool,
pub mask_and_scale: bool,
pub mode: String,
pub format: NetCDFFormat,
pub enable_compression: bool,
pub compression_level: Option<u8>,
pub enable_chunking: bool,
}
impl Default for NetCDFOptions {
fn default() -> Self {
Self {
mmap: true,
auto_scale: true,
mask_and_scale: true,
mode: "r".to_string(),
format: NetCDFFormat::Classic,
enable_compression: false,
compression_level: None,
enable_chunking: false,
}
}
}
impl NetCDFFile {
pub fn open<P: AsRef<Path>>(path: P, options: Option<NetCDFOptions>) -> Result<Self> {
let opts = options.unwrap_or_default();
let path_str = path.as_ref().to_string_lossy().to_string();
if opts.mode == "r" && !Path::new(&path_str).exists() {
return Err(IoError::FileError(format!("File not found: {}", path_str)));
}
let hdf5_backend = if opts.format == NetCDFFormat::NetCDF4
|| opts.format == NetCDFFormat::NetCDF4Classic
{
if opts.mode == "r" {
Some(HDF5File::open(&path_str, HDF5FileMode::ReadOnly)?)
} else {
None
}
} else {
None
};
Ok(Self {
path: path_str,
mode: opts.mode,
format: opts.format,
dimensions: HashMap::new(),
variables: HashMap::new(),
attributes: HashMap::new(),
hdf5_backend,
})
}
pub fn create<P: AsRef<Path>>(path: P) -> Result<Self> {
Self::create_with_format(path, NetCDFFormat::Classic)
}
pub fn create_with_format<P: AsRef<Path>>(path: P, format: NetCDFFormat) -> Result<Self> {
let opts = NetCDFOptions {
mode: "w".to_string(),
format,
..Default::default()
};
let path_str = path.as_ref().to_string_lossy().to_string();
if let Some(parent) = Path::new(&path_str).parent() {
if !parent.exists() {
fs::create_dir_all(parent).map_err(|e| {
IoError::FileError(format!("Failed to create directories: {}", e))
})?;
}
}
let hdf5_backend =
if format == NetCDFFormat::NetCDF4 || format == NetCDFFormat::NetCDF4Classic {
Some(HDF5File::create(&path_str)?)
} else {
None
};
Ok(Self {
path: path_str,
mode: opts.mode,
format: opts.format,
dimensions: HashMap::new(),
variables: HashMap::new(),
attributes: HashMap::new(),
hdf5_backend,
})
}
pub fn create_dimension(&mut self, name: &str, size: Option<usize>) -> Result<()> {
if self.mode != "w" {
return Err(IoError::ValidationError(
"File not opened in write mode".to_string(),
));
}
self.dimensions.insert(name.to_string(), size);
if let Some(ref mut hdf5) = self.hdf5_backend {
let dim_attr = format!("_dim_{}", name);
let dim_value = match size {
Some(s) => s.to_string(),
None => "unlimited".to_string(),
};
hdf5.root_mut()
.set_attribute(&dim_attr, HDF5AttributeValue::String(dim_value));
}
Ok(())
}
pub fn create_variable(
&mut self,
name: &str,
data_type: NetCDFDataType,
dimensions: &[&str],
) -> Result<()> {
if self.mode != "w" {
return Err(IoError::ValidationError(
"File not opened in write mode".to_string(),
));
}
for &dim in dimensions {
if !self.dimensions.contains_key(dim) {
return Err(IoError::ValidationError(format!(
"Dimension '{}' not defined",
dim
)));
}
}
let var_info = VariableInfo {
name: name.to_string(),
data_type,
dimensions: dimensions.iter().map(|&s| s.to_string()).collect(),
attributes: HashMap::new(),
};
self.variables.insert(name.to_string(), var_info);
if let Some(ref mut hdf5) = self.hdf5_backend {
let var_group_path = format!("_var_{}", name);
let var_group = hdf5.root_mut().create_group(&var_group_path);
var_group.set_attribute(
"data_type",
HDF5AttributeValue::String(format!("{:?}", data_type)),
);
var_group.set_attribute(
"dimensions",
HDF5AttributeValue::StringArray(dimensions.iter().map(|s| s.to_string()).collect()),
);
}
Ok(())
}
pub fn read_variable<T: Clone + Default + 'static>(&self, name: &str) -> Result<ArrayD<T>> {
if self.mode != "r" {
return Err(IoError::ValidationError(
"File not opened in read mode".to_string(),
));
}
let var_info = self
.variables
.get(name)
.ok_or_else(|| IoError::ValidationError(format!("Variable '{}' not found", name)))?;
let shape: Vec<usize> = var_info
.dimensions
.iter()
.map(|dim_name| {
self.dimensions
.get(dim_name)
.unwrap_or(&Some(1))
.unwrap_or(1)
})
.collect();
if let Some(ref hdf5) = self.hdf5_backend {
let array_f64 = self.read_compressed_variable_data(hdf5, name)?;
let data: Vec<T> = self.convert_data_type(&array_f64)?;
return Array::from_shape_vec(array_f64.shape(), data)
.map_err(|e| IoError::FormatError(format!("Failed to create array: {}", e)));
}
let total_size = shape.iter().product();
let data = vec![T::default(); total_size];
Array::from_shape_vec(shape, data)
.map_err(|e| IoError::FormatError(format!("Failed to create array: {}", e)))
}
pub fn write_variable<T: Clone + Into<f64> + std::fmt::Debug, D: Dimension>(
&mut self,
name: &str,
data: &Array<T, D>,
) -> Result<()> {
if self.mode != "w" {
return Err(IoError::ValidationError(
"File not opened in write mode".to_string(),
));
}
if !self.variables.contains_key(name) {
return Err(IoError::ValidationError(format!(
"Variable '{}' not defined",
name
)));
}
let compression_level = self.get_compression_level();
let chunking_enabled = self.is_chunking_enabled();
let chunk_size = if chunking_enabled {
Some(self.calculate_optimal_chunk_size(data.shape()))
} else {
None
};
if let Some(ref mut hdf5) = self.hdf5_backend {
let mut dataset_options = crate::hdf5::DatasetOptions::default();
if let Some(level) = compression_level {
dataset_options.compression.gzip = Some(level);
dataset_options.compression.shuffle = true; }
if let Some(chunk) = chunk_size {
dataset_options.chunk_size = Some(chunk);
}
let has_compression = dataset_options.compression.gzip.is_some();
hdf5.create_dataset_from_array(name, data, Some(dataset_options))?;
if has_compression {
if let Ok(_dataset) = hdf5.get_dataset(name) {
}
}
} else {
}
Ok(())
}
pub fn add_variable_attribute(
&mut self,
var_name: &str,
attr_name: &str,
value: &str,
) -> Result<()> {
if self.mode != "w" {
return Err(IoError::ValidationError(
"File not opened in write mode".to_string(),
));
}
let var_info = self.variables.get_mut(var_name).ok_or_else(|| {
IoError::ValidationError(format!("Variable '{}' not defined", var_name))
})?;
var_info.attributes.insert(
attr_name.to_string(),
AttributeValue::String(value.to_string()),
);
Ok(())
}
fn read_compressed_variable_data(&self, hdf5: &HDF5File, name: &str) -> Result<ArrayD<f64>> {
let array_data = hdf5.read_dataset(name)?;
if let Ok(dataset) = hdf5.get_dataset(name) {
let has_compression = dataset.get_attribute("compression").is_some()
|| dataset.get_attribute("shuffle").is_some()
|| dataset.get_attribute("deflate").is_some();
if has_compression {
if let Some(chunk_attr) = dataset.get_attribute("chunk_sizes") {
self.process_chunked_data(&array_data, chunk_attr)?;
}
}
}
Ok(array_data)
}
fn process_chunked_data(
&self,
array_data: &ArrayD<f64>,
_chunk_attr: &crate::hdf5::AttributeValue,
) -> Result<()> {
let _chunk_info = format!("Processing {} elements in chunked format", array_data.len());
Ok(())
}
fn convert_data_type<T>(&self, arrayf64: &ArrayD<f64>) -> Result<Vec<T>>
where
T: Clone + Default + 'static,
{
let data: Vec<T> = arrayf64
.iter()
.map(|&x| {
let value: Box<dyn std::any::Any> =
if std::any::TypeId::of::<T>() == std::any::TypeId::of::<f64>() {
Box::new(x)
} else if std::any::TypeId::of::<T>() == std::any::TypeId::of::<f32>() {
Box::new(x as f32)
} else if std::any::TypeId::of::<T>() == std::any::TypeId::of::<i32>() {
Box::new(x as i32)
} else if std::any::TypeId::of::<T>() == std::any::TypeId::of::<i16>() {
Box::new(x as i16)
} else if std::any::TypeId::of::<T>() == std::any::TypeId::of::<i8>() {
Box::new(x as i8)
} else {
return T::default();
};
if let Ok(boxed_t) = value.downcast::<T>() {
*boxed_t
} else {
T::default()
}
})
.collect();
Ok(data)
}
fn get_compression_level(&self) -> Option<u8> {
if let Some(AttributeValue::String(level_str)) = self.attributes.get("compression_level") {
level_str.parse().ok()
} else {
match self.format {
NetCDFFormat::NetCDF4 | NetCDFFormat::NetCDF4Classic => Some(6), NetCDFFormat::Classic => None, }
}
}
fn is_chunking_enabled(&self) -> bool {
if let Some(AttributeValue::String(chunking_str)) = self.attributes.get("chunking") {
chunking_str.to_lowercase() == "true" || chunking_str == "1"
} else {
matches!(
self.format,
NetCDFFormat::NetCDF4 | NetCDFFormat::NetCDF4Classic
)
}
}
fn calculate_optimal_chunk_size(&self, shape: &[usize]) -> Vec<usize> {
const TARGET_CHUNK_BYTES: usize = 1024 * 1024;
const ELEMENT_SIZE: usize = 8;
let target_elements = TARGET_CHUNK_BYTES / ELEMENT_SIZE;
if shape.is_empty() {
return vec![1];
}
if shape.len() == 1 {
let chunk_size = (target_elements).min(shape[0]).max(1);
return vec![chunk_size];
}
let total_elements: usize = shape.iter().product();
if total_elements <= target_elements {
return shape.to_vec();
}
let scale_factor =
(target_elements as f64 / total_elements as f64).powf(1.0 / shape.len() as f64);
let mut chunkshape: Vec<usize> = shape
.iter()
.map(|&dim| ((dim as f64 * scale_factor) as usize).max(1))
.collect();
for (i, &max_dim) in shape.iter().enumerate() {
chunkshape[i] = chunkshape[i].min(max_dim);
}
if shape.len() >= 2 {
let time_chunk = (target_elements / shape[1..].iter().product::<usize>()).max(1);
chunkshape[0] = time_chunk.min(shape[0]);
}
chunkshape
}
pub fn add_global_attribute(&mut self, name: &str, value: &str) -> Result<()> {
if self.mode != "w" {
return Err(IoError::ValidationError(
"File not opened in write mode".to_string(),
));
}
self.attributes
.insert(name.to_string(), AttributeValue::String(value.to_string()));
Ok(())
}
pub fn dimensions(&self) -> &HashMap<String, Option<usize>> {
&self.dimensions
}
pub fn variables(&self) -> Vec<String> {
self.variables.keys().cloned().collect()
}
pub fn variable_info(
&self,
name: &str,
) -> Result<(NetCDFDataType, Vec<String>, HashMap<String, String>)> {
let var_info = self
.variables
.get(name)
.ok_or_else(|| IoError::ValidationError(format!("Variable '{}' not found", name)))?;
let mut attributes = HashMap::new();
for (attr_name, attr_value) in &var_info.attributes {
let value = match attr_value {
AttributeValue::String(s) => s.clone(),
AttributeValue::Byte(b) => b.to_string(),
AttributeValue::Short(s) => s.to_string(),
AttributeValue::Int(i) => i.to_string(),
AttributeValue::Float(f) => f.to_string(),
AttributeValue::Double(d) => d.to_string(),
AttributeValue::ByteArray(arr) => format!("{:?}", arr),
AttributeValue::ShortArray(arr) => format!("{:?}", arr),
AttributeValue::IntArray(arr) => format!("{:?}", arr),
AttributeValue::FloatArray(arr) => format!("{:?}", arr),
AttributeValue::DoubleArray(arr) => format!("{:?}", arr),
};
attributes.insert(attr_name.clone(), value);
}
Ok((var_info.data_type, var_info.dimensions.clone(), attributes))
}
pub fn global_attributes(&self) -> HashMap<String, String> {
self.attributes
.iter()
.map(|(name, value)| {
let value_str = match value {
AttributeValue::String(s) => s.clone(),
AttributeValue::Byte(b) => b.to_string(),
AttributeValue::Short(s) => s.to_string(),
AttributeValue::Int(i) => i.to_string(),
AttributeValue::Float(f) => f.to_string(),
AttributeValue::Double(d) => d.to_string(),
AttributeValue::ByteArray(arr) => format!("{:?}", arr),
AttributeValue::ShortArray(arr) => format!("{:?}", arr),
AttributeValue::IntArray(arr) => format!("{:?}", arr),
AttributeValue::FloatArray(arr) => format!("{:?}", arr),
AttributeValue::DoubleArray(arr) => format!("{:?}", arr),
};
(name.clone(), value_str)
})
.collect()
}
pub fn format(&self) -> NetCDFFormat {
self.format
}
pub fn has_hdf5_backend(&self) -> bool {
self.hdf5_backend.is_some()
}
pub fn write_array<T: Clone + Into<f64> + std::fmt::Debug, D: Dimension>(
&mut self,
name: &str,
data: &Array<T, D>,
dimension_names: &[&str],
) -> Result<()> {
if self.format == NetCDFFormat::Classic {
return Err(IoError::ValidationError(
"write_array is only supported for NetCDF4/HDF5 format".to_string(),
));
}
for (i, &dim_name) in dimension_names.iter().enumerate() {
if !self.dimensions.contains_key(dim_name) {
let dim_size = data.shape()[i];
self.create_dimension(dim_name, Some(dim_size))?;
}
}
if !self.variables.contains_key(name) {
self.create_variable(name, NetCDFDataType::Double, dimension_names)?;
}
self.write_variable(name, data)
}
pub fn read_array(&self, name: &str) -> Result<ArrayD<f64>> {
if let Some(backend) = &self.hdf5_backend {
backend.read_dataset(name)
} else {
self.read_variable::<f64>(name)
}
}
pub fn sync(&mut self) -> Result<()> {
if let Some(ref mut hdf5) = self.hdf5_backend {
hdf5.write()?;
}
Ok(())
}
pub fn close(mut self) -> Result<()> {
self.sync()?;
if let Some(hdf5) = self.hdf5_backend {
hdf5.close()?;
}
Ok(())
}
}
#[allow(dead_code)]
pub fn create_netcdf4_with_data<P: AsRef<Path>>(
path: P,
datasets: HashMap<String, (ArrayD<f64>, Vec<String>)>,
global_attributes: HashMap<String, String>,
) -> Result<()> {
let mut file = NetCDFFile::create_with_format(path, NetCDFFormat::NetCDF4)?;
for (name, value) in global_attributes {
file.add_global_attribute(&name, &value)?;
}
for (var_name, (data, dim_names)) in datasets {
let dim_refs: Vec<&str> = dim_names.iter().map(|s| s.as_str()).collect();
file.write_array(&var_name, &data, &dim_refs)?;
}
file.close()
}
#[allow(dead_code)]
pub fn read_netcdf<P: AsRef<Path>>(path: P) -> Result<NetCDFFile> {
let path_ref = path.as_ref();
match NetCDFFile::open(
path_ref,
Some(NetCDFOptions {
format: NetCDFFormat::NetCDF4,
mode: "r".to_string(),
..Default::default()
}),
) {
Ok(file) => Ok(file),
Err(_) => {
NetCDFFile::open(
path_ref,
Some(NetCDFOptions {
format: NetCDFFormat::Classic,
mode: "r".to_string(),
..Default::default()
}),
)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_create_netcdf() {
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_create_netcdf_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let file = NetCDFFile::create(test_path).expect("Operation failed");
assert_eq!(file.mode, "w");
assert_eq!(file.path, test_path);
assert!(file.dimensions.is_empty());
assert!(file.variables.is_empty());
assert!(file.attributes.is_empty());
drop(file);
let _ = std::fs::remove_file(test_file);
}
#[test]
fn test_add_dimension() {
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_add_dimension_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let mut file = NetCDFFile::create(test_path).expect("Operation failed");
file.create_dimension("time", Some(10))
.expect("Operation failed");
file.create_dimension("lat", Some(180))
.expect("Operation failed");
file.create_dimension("lon", Some(360))
.expect("Operation failed");
file.create_dimension("unlimited", None)
.expect("Operation failed");
assert_eq!(file.dimensions.len(), 4);
assert_eq!(
*file.dimensions.get("time").expect("Operation failed"),
Some(10)
);
assert_eq!(
*file.dimensions.get("lat").expect("Operation failed"),
Some(180)
);
assert_eq!(
*file.dimensions.get("lon").expect("Operation failed"),
Some(360)
);
assert_eq!(
*file.dimensions.get("unlimited").expect("Operation failed"),
None
);
drop(file);
let _ = std::fs::remove_file(test_file);
}
#[test]
fn test_add_variable() {
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_add_variable_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let mut file = NetCDFFile::create(test_path).expect("Operation failed");
file.create_dimension("time", Some(10))
.expect("Operation failed");
file.create_dimension("lat", Some(180))
.expect("Operation failed");
file.create_dimension("lon", Some(360))
.expect("Operation failed");
file.create_variable(
"temperature",
NetCDFDataType::Float,
&["time", "lat", "lon"],
)
.expect("Operation failed");
assert_eq!(file.variables.len(), 1);
assert!(file.variables.contains_key("temperature"));
let var_info = file.variables.get("temperature").expect("Operation failed");
assert_eq!(var_info.name, "temperature");
assert_eq!(var_info.data_type, NetCDFDataType::Float);
assert_eq!(var_info.dimensions, vec!["time", "lat", "lon"]);
drop(file);
let _ = std::fs::remove_file(test_file);
}
#[test]
fn test_attributes() {
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_attributes_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let mut file = NetCDFFile::create(test_path).expect("Operation failed");
file.create_dimension("x", Some(10))
.expect("Operation failed");
file.create_variable("data", NetCDFDataType::Double, &["x"])
.expect("Operation failed");
file.add_global_attribute("title", "Test Dataset")
.expect("Operation failed");
file.add_global_attribute("author", "SciRS2 Test")
.expect("Operation failed");
let global_attrs = file.global_attributes();
assert!(global_attrs.contains_key("title"));
assert!(global_attrs.contains_key("author"));
assert_eq!(global_attrs["title"], "Test Dataset");
assert_eq!(global_attrs["author"], "SciRS2 Test");
file.add_variable_attribute("data", "units", "meters")
.expect("Operation failed");
file.add_variable_attribute("data", "long_name", "measurement data")
.expect("Operation failed");
let (dtype, dims, var_attrs) = file.variable_info("data").expect("Operation failed");
assert_eq!(dtype, NetCDFDataType::Double);
assert_eq!(dims, vec!["x"]);
assert!(var_attrs.contains_key("units"));
assert!(var_attrs.contains_key("long_name"));
assert_eq!(var_attrs["units"], "meters");
assert_eq!(var_attrs["long_name"], "measurement data");
drop(file);
let _ = std::fs::remove_file(test_file);
}
#[test]
fn test_read_write_variable() {
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_read_write_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let mut file = NetCDFFile::create(test_path).expect("Operation failed");
file.create_dimension("x", Some(3))
.expect("Operation failed");
file.create_dimension("y", Some(2))
.expect("Operation failed");
file.create_variable("data", NetCDFDataType::Float, &["x", "y"])
.expect("Operation failed");
let data = Array2::<f32>::zeros((3, 2));
file.write_variable("data", &data)
.expect("Operation failed");
drop(file);
let _ = std::fs::remove_file(&test_file);
let read_test_file_path = temp_dir.join(format!("test_read_{}.nc", std::process::id()));
let read_test_path = read_test_file_path
.to_str()
.expect("path should be valid UTF-8");
let mut read_test_file = NetCDFFile::create(read_test_path).expect("Operation failed");
read_test_file.mode = "r".to_string();
read_test_file.dimensions.insert("x".to_string(), Some(3));
read_test_file.dimensions.insert("y".to_string(), Some(2));
read_test_file.variables.insert(
"data".to_string(),
VariableInfo {
name: "data".to_string(),
data_type: NetCDFDataType::Float,
dimensions: vec!["x".to_string(), "y".to_string()],
attributes: HashMap::new(),
},
);
let read_data: ArrayD<f32> = read_test_file
.read_variable("data")
.expect("Operation failed");
assert_eq!(read_data.shape(), &[3, 2]);
drop(read_test_file);
let _ = std::fs::remove_file(read_test_file_path);
}
#[test]
fn test_netcdf4_format_creation() {
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_netcdf4_format_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let file = NetCDFFile::create_with_format(test_path, NetCDFFormat::NetCDF4)
.expect("Operation failed");
assert_eq!(file.format(), NetCDFFormat::NetCDF4);
assert!(file.has_hdf5_backend());
drop(file);
let _ = std::fs::remove_file(test_file);
}
#[test]
fn test_netcdf_format_differences() {
let temp_dir = std::env::temp_dir();
let classic_file = temp_dir.join(format!("test_classic_{}.nc", std::process::id()));
let netcdf4_file = temp_dir.join(format!("test_netcdf4_{}.nc", std::process::id()));
let classic_path = classic_file.to_str().expect("path should be valid UTF-8");
let netcdf4_path = netcdf4_file.to_str().expect("path should be valid UTF-8");
let classic = NetCDFFile::create_with_format(classic_path, NetCDFFormat::Classic)
.expect("Operation failed");
let netcdf4 = NetCDFFile::create_with_format(netcdf4_path, NetCDFFormat::NetCDF4)
.expect("Operation failed");
assert_eq!(classic.format(), NetCDFFormat::Classic);
assert_eq!(netcdf4.format(), NetCDFFormat::NetCDF4);
assert!(!classic.has_hdf5_backend());
assert!(netcdf4.has_hdf5_backend());
drop(classic);
drop(netcdf4);
let _ = std::fs::remove_file(classic_file);
let _ = std::fs::remove_file(netcdf4_file);
}
#[test]
fn test_netcdf4_write_array() {
use scirs2_core::ndarray::array;
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_netcdf4_array_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let mut file = NetCDFFile::create_with_format(test_path, NetCDFFormat::NetCDF4)
.expect("Operation failed");
let data = array![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]];
let result = file.write_array("test_data", &data, &["x", "y"]);
assert!(result.is_ok());
assert!(file.dimensions().contains_key("x"));
assert!(file.dimensions().contains_key("y"));
assert_eq!(file.dimensions()["x"], Some(2));
assert_eq!(file.dimensions()["y"], Some(3));
assert!(file.variables().contains(&"test_data".to_string()));
drop(file);
let _ = std::fs::remove_file(test_file);
}
#[test]
fn test_netcdf4_convenience_functions() {
use scirs2_core::ndarray::array;
use std::collections::HashMap;
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_convenience_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let mut datasets = HashMap::new();
datasets.insert(
"temperature".to_string(),
(
array![[20.0, 21.0], [22.0, 23.0]].into_dyn(),
vec!["time".to_string(), "location".to_string()],
),
);
let mut global_attrs = HashMap::new();
global_attrs.insert("title".to_string(), "Test Data".to_string());
let result = create_netcdf4_with_data(test_path, datasets, global_attrs);
assert!(result.is_ok());
let _ = std::fs::remove_file(test_file);
}
#[test]
fn test_classic_netcdf_write_array_error() {
use scirs2_core::ndarray::array;
let temp_dir = std::env::temp_dir();
let test_file = temp_dir.join(format!("test_classic_error_{}.nc", std::process::id()));
let test_path = test_file.to_str().expect("path should be valid UTF-8");
let mut file = NetCDFFile::create_with_format(test_path, NetCDFFormat::Classic)
.expect("Operation failed");
let data = array![[1.0, 2.0], [3.0, 4.0]];
let result = file.write_array("test_data", &data, &["x", "y"]);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("only supported for NetCDF4"));
drop(file);
let _ = std::fs::remove_file(test_file);
}
}