pub fn read_csv_chunked<P, F>(
path: P,
config: Option<CsvReaderConfig>,
chunk_size: usize,
callback: F,
) -> Result<()>Expand description
Read a CSV file in chunks to process large files memory-efficiently
§Arguments
path- Path to the CSV fileconfig- Optional CSV reader configurationchunk_size- Number of rows to read in each chunkcallback- Function to process each chunk
§Returns
Result<()>- Success or error
§Examples
use scirs2_io::csv::{read_csv_chunked, CsvReaderConfig};
use scirs2_core::ndarray::Array2;
let config = CsvReaderConfig::default();
let mut total_rows = 0;
read_csv_chunked("large_data.csv", Some(config), 1000, |headers, chunk| {
println!("Processing chunk with {} rows", chunk.shape()[0]);
total_rows += chunk.shape()[0];
true // continue processing
}).unwrap();
println!("Total rows processed: {}", total_rows);